Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
62
packages/core/src/after.ts
Normal file
62
packages/core/src/after.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Defer work past the HTTP response.
|
||||
*
|
||||
* Use for bookkeeping that doesn't need to complete before the client
|
||||
* gets bytes — writes that record state, maintenance queries, cache
|
||||
* refreshes. `after()` hands the promise to the host's lifetime
|
||||
* extender when one is available (Cloudflare's `waitUntil` under
|
||||
* workerd), or fires-and-forgets on Node (the process lives for the
|
||||
* next request anyway).
|
||||
*
|
||||
* Host binding is resolved lazily via a dynamic import of the
|
||||
* `virtual:emdash/wait-until` virtual module. Lazy — rather than a
|
||||
* static top-level import — so tools that walk the dist in a plain
|
||||
* Node loader (`astro check`, Vitest, etc.) don't trip over the
|
||||
* `virtual:` scheme: they'd only fail if they actually called
|
||||
* `after()`, which they don't during type-checking.
|
||||
*/
|
||||
|
||||
export type WaitUntilFn = (promise: Promise<unknown>) => void;
|
||||
|
||||
// Resolves to the host's waitUntil if the adapter provided one, or
|
||||
// null otherwise. Kicked off once at module load; subsequent `after()`
|
||||
// calls see the cached result without re-importing.
|
||||
const waitUntilReady: Promise<WaitUntilFn | null> = (async () => {
|
||||
try {
|
||||
// @ts-ignore - virtual module, generated by the Astro integration
|
||||
const mod = (await import("virtual:emdash/wait-until")) as {
|
||||
waitUntil?: WaitUntilFn;
|
||||
};
|
||||
return mod.waitUntil ?? null;
|
||||
} catch {
|
||||
// No virtual module available (Node-side tooling, tests without the
|
||||
// integration in scope). Fire-and-forget is the safe fallback.
|
||||
return null;
|
||||
}
|
||||
})();
|
||||
// Surface rejections without making the module-load fail.
|
||||
waitUntilReady.catch(() => {});
|
||||
|
||||
/**
|
||||
* Schedule `fn` to run without blocking the response.
|
||||
*
|
||||
* Errors are caught and logged — a deferred task should never surface
|
||||
* as an unhandled rejection because the response is long gone. Callers
|
||||
* that care about errors should handle them inside `fn`.
|
||||
*/
|
||||
export function after(fn: () => void | Promise<void>): void {
|
||||
const promise = Promise.resolve()
|
||||
.then(fn)
|
||||
.catch((error) => {
|
||||
console.error("[emdash] deferred task failed:", error);
|
||||
});
|
||||
|
||||
// Defer the lifetime-extender handoff to the microtask that resolves
|
||||
// waitUntilReady. On workerd this is effectively instant (the virtual
|
||||
// module is already loaded in the bundle); on Node the promise
|
||||
// resolves to null, so this is just one extra microtask and no-op.
|
||||
void waitUntilReady.then((waitUntil) => {
|
||||
if (waitUntil) waitUntil(promise);
|
||||
return null;
|
||||
});
|
||||
}
|
||||
37
packages/core/src/api/auth-storage.ts
Normal file
37
packages/core/src/api/auth-storage.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* Auth provider storage helper.
|
||||
*
|
||||
* Gives auth provider routes access to plugin-style storage collections
|
||||
* namespaced under `auth:<providerId>`. Reuses the existing `_plugin_storage`
|
||||
* table and `PluginStorageRepository` infrastructure.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../database/types.js";
|
||||
import { createStorageAccess } from "../plugins/context.js";
|
||||
import type { StorageCollection, StorageCollectionConfig } from "../plugins/types.js";
|
||||
|
||||
/**
|
||||
* Get storage collections for an auth provider.
|
||||
*
|
||||
* Returns a record of `StorageCollection` instances, one per declared
|
||||
* collection in the provider's `storage` config. Data is stored in the
|
||||
* shared `_plugin_storage` table under the namespace `auth:<providerId>`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const storage = getAuthProviderStorage(emdash.db, "atproto", {
|
||||
* states: { indexes: [] },
|
||||
* sessions: { indexes: [] },
|
||||
* });
|
||||
* const session = await storage.sessions.get(sessionId);
|
||||
* ```
|
||||
*/
|
||||
export function getAuthProviderStorage(
|
||||
db: Kysely<Database>,
|
||||
providerId: string,
|
||||
storageConfig: Record<string, StorageCollectionConfig>,
|
||||
): Record<string, StorageCollection> {
|
||||
return createStorageAccess(db, `auth:${providerId}`, storageConfig);
|
||||
}
|
||||
63
packages/core/src/api/authorize.ts
Normal file
63
packages/core/src/api/authorize.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Authorization helpers for API routes
|
||||
*
|
||||
* Thin wrappers around @emdash-cms/auth RBAC that return HTTP responses.
|
||||
* Auth middleware handles authentication; these handle authorization.
|
||||
*/
|
||||
|
||||
import type { Permission, RoleLevel } from "@emdash-cms/auth";
|
||||
import { hasPermission, canActOnOwn } from "@emdash-cms/auth";
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
interface UserLike {
|
||||
id: string;
|
||||
role: RoleLevel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has a permission. Returns a 401/403 Response if not, or null if authorized.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const denied = requirePerm(user, "schema:manage");
|
||||
* if (denied) return denied;
|
||||
* ```
|
||||
*/
|
||||
export function requirePerm(
|
||||
user: UserLike | null | undefined,
|
||||
permission: Permission,
|
||||
): Response | null {
|
||||
if (!user) {
|
||||
return apiError("UNAUTHORIZED", "Authentication required", 401);
|
||||
}
|
||||
if (!hasPermission(user, permission)) {
|
||||
return apiError("FORBIDDEN", "Insufficient permissions", 403);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can act on a resource, considering ownership.
|
||||
* Returns a 401/403 Response if not, or null if authorized.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const denied = requireOwnerPerm(user, item.authorId, "content:edit_own", "content:edit_any");
|
||||
* if (denied) return denied;
|
||||
* ```
|
||||
*/
|
||||
export function requireOwnerPerm(
|
||||
user: UserLike | null | undefined,
|
||||
ownerId: string,
|
||||
ownPermission: Permission,
|
||||
anyPermission: Permission,
|
||||
): Response | null {
|
||||
if (!user) {
|
||||
return apiError("UNAUTHORIZED", "Authentication required", 401);
|
||||
}
|
||||
if (!canActOnOwn(user, ownerId, ownPermission, anyPermission)) {
|
||||
return apiError("FORBIDDEN", "Insufficient permissions", 403);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
59
packages/core/src/api/csrf.ts
Normal file
59
packages/core/src/api/csrf.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* CSRF protection utilities.
|
||||
*
|
||||
* Two mechanisms:
|
||||
* 1. Custom header check (X-EmDash-Request: 1) — used for authenticated API routes.
|
||||
* Browsers block cross-origin custom headers, so presence proves same-origin.
|
||||
* 2. Origin check — used for public API routes that skip auth. Compares the Origin
|
||||
* header against the request origin. Same approach as Astro's `checkOrigin`.
|
||||
*/
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
/**
|
||||
* Origin-based CSRF check for public API routes that skip auth.
|
||||
*
|
||||
* State-changing requests (POST/PUT/DELETE) to public endpoints must either:
|
||||
* 1. Include the X-EmDash-Request: 1 header (custom header blocked cross-origin), OR
|
||||
* 2. Have an Origin header matching the request origin (or the configured public origin)
|
||||
*
|
||||
* This prevents cross-origin form submissions (which can't set custom headers)
|
||||
* and cross-origin fetch (blocked by CORS unless allowed). Same-origin requests
|
||||
* always include a matching Origin header.
|
||||
*
|
||||
* Returns a 403 Response if the check fails, or null if allowed.
|
||||
*
|
||||
* @param request The incoming request
|
||||
* @param url The request URL (internal origin)
|
||||
* @param publicOrigin The public-facing origin from config.siteUrl. Must be
|
||||
* `undefined` when absent — never `null` or `""` (security invariant H-1a).
|
||||
*/
|
||||
export function checkPublicCsrf(
|
||||
request: Request,
|
||||
url: URL,
|
||||
publicOrigin?: string,
|
||||
): Response | null {
|
||||
// Custom header present — browser blocks cross-origin custom headers
|
||||
const csrfHeader = request.headers.get("X-EmDash-Request");
|
||||
if (csrfHeader === "1") return null;
|
||||
|
||||
// Check Origin header — present on all POST/PUT/DELETE from browsers
|
||||
const origin = request.headers.get("Origin");
|
||||
if (origin) {
|
||||
try {
|
||||
const originUrl = new URL(origin);
|
||||
// Accept if Origin matches either the internal or public origin
|
||||
if (originUrl.origin === url.origin) return null;
|
||||
if (publicOrigin && originUrl.origin === publicOrigin) return null;
|
||||
} catch {
|
||||
// Malformed Origin — fall through to reject
|
||||
}
|
||||
|
||||
return apiError("CSRF_REJECTED", "Cross-origin request blocked", 403);
|
||||
}
|
||||
|
||||
// No Origin header — non-browser client (curl, server-to-server).
|
||||
// Allow these through since CSRF is a browser-specific attack vector.
|
||||
// Server-to-server requests don't carry ambient credentials (cookies).
|
||||
return null;
|
||||
}
|
||||
105
packages/core/src/api/error.ts
Normal file
105
packages/core/src/api/error.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* Standardized API error responses.
|
||||
*
|
||||
* All API routes should use these utilities instead of inline
|
||||
* `new Response(JSON.stringify({ error: ... }), ...)` patterns.
|
||||
*/
|
||||
|
||||
import { InvalidCursorError } from "../database/repositories/types.js";
|
||||
import { mapErrorStatus } from "./errors.js";
|
||||
import type { ApiResult } from "./types.js";
|
||||
|
||||
// Re-export everything from errors.ts so existing `import { mapErrorStatus } from "./error.js"` still works
|
||||
export * from "./errors.js";
|
||||
|
||||
/**
|
||||
* Standard cache headers for all API responses.
|
||||
*
|
||||
* Cache-Control: private, no-store -- prevents CDN/proxy caching of authenticated data.
|
||||
* no-store already tells caches not to store the response, so Vary is unnecessary.
|
||||
*/
|
||||
const API_CACHE_HEADERS: HeadersInit = {
|
||||
"Cache-Control": "private, no-store",
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a standardized error response.
|
||||
*
|
||||
* Always returns `{ error: { code, message } }` with correct Content-Type.
|
||||
* Use this for all error responses in API routes.
|
||||
*/
|
||||
export function apiError(code: string, message: string, status: number): Response {
|
||||
return Response.json({ error: { code, message } }, { status, headers: API_CACHE_HEADERS });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a standardized success response.
|
||||
*
|
||||
* Always returns `{ data: T }` with correct status code.
|
||||
* Use this for all success responses in API routes.
|
||||
*/
|
||||
export function apiSuccess<T>(data: T, status = 200): Response {
|
||||
return Response.json({ data }, { status, headers: API_CACHE_HEADERS });
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an unknown error in a catch block.
|
||||
*
|
||||
* - Logs the full error server-side
|
||||
* - Returns a generic message to the client (never leaks error.message)
|
||||
* - Use `fallbackMessage` for the public-facing message
|
||||
* - Use `fallbackCode` for the error code
|
||||
*/
|
||||
export function handleError(
|
||||
error: unknown,
|
||||
fallbackMessage: string,
|
||||
fallbackCode: string,
|
||||
): Response {
|
||||
// Bubble malformed-cursor errors as a structured 400 instead of a
|
||||
// generic 500.
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return apiError("INVALID_CURSOR", error.message, 400);
|
||||
}
|
||||
console.error(`[${fallbackCode}]`, error);
|
||||
return apiError(fallbackCode, fallbackMessage, 500);
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard initialization check.
|
||||
*
|
||||
* Returns an error response if EmDash is not initialized, or null if OK.
|
||||
* Usage: `const err = requireInit(emdash); if (err) return err;`
|
||||
*/
|
||||
export function requireInit(emdash: unknown): Response | null {
|
||||
if (!emdash || typeof emdash !== "object") {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard database check.
|
||||
*
|
||||
* Returns an error response if the database is not available, or null if OK.
|
||||
* Usage: `const err = requireDb(emdash?.db); if (err) return err;`
|
||||
*/
|
||||
export function requireDb(db: unknown): Response | null {
|
||||
if (!db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an ApiResult into an HTTP Response.
|
||||
*
|
||||
* Collapses the handler-to-response boilerplate:
|
||||
* - Success: returns `apiSuccess(result.data, successStatus)`
|
||||
* - Error: returns `apiError(code, message, mapErrorStatus(code))`
|
||||
*/
|
||||
export function unwrapResult<T>(result: ApiResult<T>, successStatus = 200): Response {
|
||||
if (!result.success) {
|
||||
return apiError(result.error.code, result.error.message, mapErrorStatus(result.error.code));
|
||||
}
|
||||
return apiSuccess(result.data, successStatus);
|
||||
}
|
||||
453
packages/core/src/api/errors.ts
Normal file
453
packages/core/src/api/errors.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
/**
|
||||
* Typed error codes and status mapping for the EmDash REST API.
|
||||
*
|
||||
* All handler-level and route-level error codes are defined here.
|
||||
* Routes and handlers should import error codes from this module
|
||||
* instead of using ad-hoc strings.
|
||||
*/
|
||||
|
||||
export const ErrorCode = {
|
||||
// Shared (used across domains)
|
||||
NOT_FOUND: "NOT_FOUND",
|
||||
VALIDATION_ERROR: "VALIDATION_ERROR",
|
||||
INVALID_INPUT: "INVALID_INPUT",
|
||||
INVALID_JSON: "INVALID_JSON",
|
||||
INVALID_CURSOR: "INVALID_CURSOR",
|
||||
CONFLICT: "CONFLICT",
|
||||
SLUG_CONFLICT: "SLUG_CONFLICT",
|
||||
NOT_CONFIGURED: "NOT_CONFIGURED",
|
||||
UNAUTHORIZED: "UNAUTHORIZED",
|
||||
FORBIDDEN: "FORBIDDEN",
|
||||
RATE_LIMITED: "RATE_LIMITED",
|
||||
NOT_AUTHENTICATED: "NOT_AUTHENTICATED",
|
||||
NOT_IMPLEMENTED: "NOT_IMPLEMENTED",
|
||||
NOT_SUPPORTED: "NOT_SUPPORTED",
|
||||
MISSING_PARAM: "MISSING_PARAM",
|
||||
CSRF_REJECTED: "CSRF_REJECTED",
|
||||
|
||||
// Content
|
||||
CONTENT_CREATE_ERROR: "CONTENT_CREATE_ERROR",
|
||||
CONTENT_UPDATE_ERROR: "CONTENT_UPDATE_ERROR",
|
||||
CONTENT_DELETE_ERROR: "CONTENT_DELETE_ERROR",
|
||||
CONTENT_LIST_ERROR: "CONTENT_LIST_ERROR",
|
||||
CONTENT_GET_ERROR: "CONTENT_GET_ERROR",
|
||||
CONTENT_DUPLICATE_ERROR: "CONTENT_DUPLICATE_ERROR",
|
||||
CONTENT_RESTORE_ERROR: "CONTENT_RESTORE_ERROR",
|
||||
CONTENT_PUBLISH_ERROR: "CONTENT_PUBLISH_ERROR",
|
||||
CONTENT_UNPUBLISH_ERROR: "CONTENT_UNPUBLISH_ERROR",
|
||||
CONTENT_SCHEDULE_ERROR: "CONTENT_SCHEDULE_ERROR",
|
||||
CONTENT_UNSCHEDULE_ERROR: "CONTENT_UNSCHEDULE_ERROR",
|
||||
CONTENT_DISCARD_DRAFT_ERROR: "CONTENT_DISCARD_DRAFT_ERROR",
|
||||
CONTENT_COMPARE_ERROR: "CONTENT_COMPARE_ERROR",
|
||||
CONTENT_TRANSLATIONS_ERROR: "CONTENT_TRANSLATIONS_ERROR",
|
||||
CONTENT_COUNT_ERROR: "CONTENT_COUNT_ERROR",
|
||||
|
||||
// Revisions
|
||||
REVISION_LIST_ERROR: "REVISION_LIST_ERROR",
|
||||
REVISION_GET_ERROR: "REVISION_GET_ERROR",
|
||||
REVISION_RESTORE_ERROR: "REVISION_RESTORE_ERROR",
|
||||
INVALID_REVISION: "INVALID_REVISION",
|
||||
|
||||
// Schema
|
||||
SCHEMA_LIST_ERROR: "SCHEMA_LIST_ERROR",
|
||||
SCHEMA_GET_ERROR: "SCHEMA_GET_ERROR",
|
||||
SCHEMA_CREATE_ERROR: "SCHEMA_CREATE_ERROR",
|
||||
SCHEMA_UPDATE_ERROR: "SCHEMA_UPDATE_ERROR",
|
||||
SCHEMA_DELETE_ERROR: "SCHEMA_DELETE_ERROR",
|
||||
SCHEMA_EXPORT_ERROR: "SCHEMA_EXPORT_ERROR",
|
||||
SCHEMA_FIELD_LIST_ERROR: "SCHEMA_FIELD_LIST_ERROR",
|
||||
SCHEMA_FIELD_GET_ERROR: "SCHEMA_FIELD_GET_ERROR",
|
||||
SCHEMA_FIELD_CREATE_ERROR: "SCHEMA_FIELD_CREATE_ERROR",
|
||||
SCHEMA_FIELD_UPDATE_ERROR: "SCHEMA_FIELD_UPDATE_ERROR",
|
||||
SCHEMA_FIELD_DELETE_ERROR: "SCHEMA_FIELD_DELETE_ERROR",
|
||||
SCHEMA_FIELD_REORDER_ERROR: "SCHEMA_FIELD_REORDER_ERROR",
|
||||
ORPHAN_LIST_ERROR: "ORPHAN_LIST_ERROR",
|
||||
ORPHAN_REGISTER_ERROR: "ORPHAN_REGISTER_ERROR",
|
||||
COLLECTION_EXISTS: "COLLECTION_EXISTS",
|
||||
COLLECTION_NOT_FOUND: "COLLECTION_NOT_FOUND",
|
||||
TABLE_NOT_FOUND: "TABLE_NOT_FOUND",
|
||||
FIELD_EXISTS: "FIELD_EXISTS",
|
||||
RESERVED_SLUG: "RESERVED_SLUG",
|
||||
INVALID_SLUG: "INVALID_SLUG",
|
||||
CREATE_FAILED: "CREATE_FAILED",
|
||||
UPDATE_FAILED: "UPDATE_FAILED",
|
||||
REGISTER_FAILED: "REGISTER_FAILED",
|
||||
|
||||
// Media
|
||||
MEDIA_LIST_ERROR: "MEDIA_LIST_ERROR",
|
||||
MEDIA_GET_ERROR: "MEDIA_GET_ERROR",
|
||||
MEDIA_CREATE_ERROR: "MEDIA_CREATE_ERROR",
|
||||
MEDIA_UPDATE_ERROR: "MEDIA_UPDATE_ERROR",
|
||||
MEDIA_DELETE_ERROR: "MEDIA_DELETE_ERROR",
|
||||
NO_STORAGE: "NO_STORAGE",
|
||||
NO_FILE: "NO_FILE",
|
||||
INVALID_TYPE: "INVALID_TYPE",
|
||||
UPLOAD_ERROR: "UPLOAD_ERROR",
|
||||
UPLOAD_URL_ERROR: "UPLOAD_URL_ERROR",
|
||||
CONFIRM_ERROR: "CONFIRM_ERROR",
|
||||
CONFIRM_FAILED: "CONFIRM_FAILED",
|
||||
FILE_NOT_FOUND: "FILE_NOT_FOUND",
|
||||
INVALID_STATE: "INVALID_STATE",
|
||||
FILE_SERVE_ERROR: "FILE_SERVE_ERROR",
|
||||
STORAGE_NOT_CONFIGURED: "STORAGE_NOT_CONFIGURED",
|
||||
PROVIDER_LIST_ERROR: "PROVIDER_LIST_ERROR",
|
||||
PROVIDER_UPLOAD_ERROR: "PROVIDER_UPLOAD_ERROR",
|
||||
PROVIDER_GET_ERROR: "PROVIDER_GET_ERROR",
|
||||
PROVIDER_DELETE_ERROR: "PROVIDER_DELETE_ERROR",
|
||||
|
||||
// Comments
|
||||
COMMENT_LIST_ERROR: "COMMENT_LIST_ERROR",
|
||||
COMMENT_GET_ERROR: "COMMENT_GET_ERROR",
|
||||
COMMENT_STATUS_ERROR: "COMMENT_STATUS_ERROR",
|
||||
COMMENT_DELETE_ERROR: "COMMENT_DELETE_ERROR",
|
||||
COMMENT_BULK_ERROR: "COMMENT_BULK_ERROR",
|
||||
COMMENT_INBOX_ERROR: "COMMENT_INBOX_ERROR",
|
||||
COMMENT_COUNTS_ERROR: "COMMENT_COUNTS_ERROR",
|
||||
COMMENT_CREATE_ERROR: "COMMENT_CREATE_ERROR",
|
||||
COMMENTS_DISABLED: "COMMENTS_DISABLED",
|
||||
COMMENTS_CLOSED: "COMMENTS_CLOSED",
|
||||
COMMENT_REJECTED: "COMMENT_REJECTED",
|
||||
|
||||
// Auth
|
||||
ACCOUNT_DISABLED: "ACCOUNT_DISABLED",
|
||||
ADMIN_EXISTS: "ADMIN_EXISTS",
|
||||
SETUP_COMPLETE: "SETUP_COMPLETE",
|
||||
CREDENTIAL_EXISTS: "CREDENTIAL_EXISTS",
|
||||
CHALLENGE_EXPIRED: "CHALLENGE_EXPIRED",
|
||||
PASSKEY_REGISTER_ERROR: "PASSKEY_REGISTER_ERROR",
|
||||
PASSKEY_REGISTER_OPTIONS_ERROR: "PASSKEY_REGISTER_OPTIONS_ERROR",
|
||||
PASSKEY_OPTIONS_ERROR: "PASSKEY_OPTIONS_ERROR",
|
||||
PASSKEY_VERIFY_ERROR: "PASSKEY_VERIFY_ERROR",
|
||||
PASSKEY_LIST_ERROR: "PASSKEY_LIST_ERROR",
|
||||
PASSKEY_RENAME_ERROR: "PASSKEY_RENAME_ERROR",
|
||||
PASSKEY_DELETE_ERROR: "PASSKEY_DELETE_ERROR",
|
||||
PASSKEY_LIMIT: "PASSKEY_LIMIT",
|
||||
LAST_PASSKEY: "LAST_PASSKEY",
|
||||
LOGOUT_ERROR: "LOGOUT_ERROR",
|
||||
SELF_ROLE_CHANGE: "SELF_ROLE_CHANGE",
|
||||
EMAIL_IN_USE: "EMAIL_IN_USE",
|
||||
EMAIL_NOT_CONFIGURED: "EMAIL_NOT_CONFIGURED",
|
||||
USER_EXISTS: "USER_EXISTS",
|
||||
INVALID_TOKEN: "INVALID_TOKEN",
|
||||
TOKEN_EXPIRED: "TOKEN_EXPIRED",
|
||||
DOMAIN_NOT_ALLOWED: "DOMAIN_NOT_ALLOWED",
|
||||
INVITE_CREATE_ERROR: "INVITE_CREATE_ERROR",
|
||||
INVITE_VALIDATE_ERROR: "INVITE_VALIDATE_ERROR",
|
||||
INVITE_COMPLETE_ERROR: "INVITE_COMPLETE_ERROR",
|
||||
SIGNUP_VERIFY_ERROR: "SIGNUP_VERIFY_ERROR",
|
||||
SIGNUP_COMPLETE_ERROR: "SIGNUP_COMPLETE_ERROR",
|
||||
RECOVERY_SEND_ERROR: "RECOVERY_SEND_ERROR",
|
||||
USER_LIST_ERROR: "USER_LIST_ERROR",
|
||||
USER_DETAIL_ERROR: "USER_DETAIL_ERROR",
|
||||
USER_UPDATE_ERROR: "USER_UPDATE_ERROR",
|
||||
USER_DISABLE_ERROR: "USER_DISABLE_ERROR",
|
||||
USER_ENABLE_ERROR: "USER_ENABLE_ERROR",
|
||||
|
||||
// OAuth (internal codes -- distinct from RFC OAuthErrorCode)
|
||||
UNSUPPORTED_RESPONSE_TYPE: "UNSUPPORTED_RESPONSE_TYPE",
|
||||
INVALID_REDIRECT_URI: "INVALID_REDIRECT_URI",
|
||||
INVALID_CLIENT: "INVALID_CLIENT",
|
||||
INVALID_SCOPE: "INVALID_SCOPE",
|
||||
AUTHORIZATION_ERROR: "AUTHORIZATION_ERROR",
|
||||
INVALID_GRANT: "INVALID_GRANT",
|
||||
UNSUPPORTED_GRANT_TYPE: "UNSUPPORTED_GRANT_TYPE",
|
||||
INVALID_CODE: "INVALID_CODE",
|
||||
EXPIRED_CODE: "EXPIRED_CODE",
|
||||
INSUFFICIENT_ROLE: "INSUFFICIENT_ROLE",
|
||||
INSUFFICIENT_SCOPE: "INSUFFICIENT_SCOPE",
|
||||
INSUFFICIENT_PERMISSIONS: "INSUFFICIENT_PERMISSIONS",
|
||||
TOKEN_EXCHANGE_ERROR: "TOKEN_EXCHANGE_ERROR",
|
||||
TOKEN_REFRESH_ERROR: "TOKEN_REFRESH_ERROR",
|
||||
TOKEN_REVOKE_ERROR: "TOKEN_REVOKE_ERROR",
|
||||
TOKEN_CREATE_ERROR: "TOKEN_CREATE_ERROR",
|
||||
TOKEN_LIST_ERROR: "TOKEN_LIST_ERROR",
|
||||
TOKEN_ERROR: "TOKEN_ERROR",
|
||||
DEVICE_CODE_ERROR: "DEVICE_CODE_ERROR",
|
||||
AUTHORIZE_ERROR: "AUTHORIZE_ERROR",
|
||||
CLIENT_LIST_ERROR: "CLIENT_LIST_ERROR",
|
||||
CLIENT_GET_ERROR: "CLIENT_GET_ERROR",
|
||||
CLIENT_CREATE_ERROR: "CLIENT_CREATE_ERROR",
|
||||
CLIENT_UPDATE_ERROR: "CLIENT_UPDATE_ERROR",
|
||||
CLIENT_DELETE_ERROR: "CLIENT_DELETE_ERROR",
|
||||
|
||||
// Allowed domains
|
||||
DOMAIN_LIST_ERROR: "DOMAIN_LIST_ERROR",
|
||||
DOMAIN_CREATE_ERROR: "DOMAIN_CREATE_ERROR",
|
||||
DOMAIN_UPDATE_ERROR: "DOMAIN_UPDATE_ERROR",
|
||||
DOMAIN_DELETE_ERROR: "DOMAIN_DELETE_ERROR",
|
||||
|
||||
// Plugins / Marketplace
|
||||
PLUGIN_LIST_ERROR: "PLUGIN_LIST_ERROR",
|
||||
PLUGIN_GET_ERROR: "PLUGIN_GET_ERROR",
|
||||
PLUGIN_ENABLE_ERROR: "PLUGIN_ENABLE_ERROR",
|
||||
PLUGIN_DISABLE_ERROR: "PLUGIN_DISABLE_ERROR",
|
||||
PLUGIN_ID_CONFLICT: "PLUGIN_ID_CONFLICT",
|
||||
MARKETPLACE_NOT_CONFIGURED: "MARKETPLACE_NOT_CONFIGURED",
|
||||
MARKETPLACE_UNAVAILABLE: "MARKETPLACE_UNAVAILABLE",
|
||||
MARKETPLACE_ERROR: "MARKETPLACE_ERROR",
|
||||
SANDBOX_NOT_AVAILABLE: "SANDBOX_NOT_AVAILABLE",
|
||||
ALREADY_INSTALLED: "ALREADY_INSTALLED",
|
||||
ALREADY_UP_TO_DATE: "ALREADY_UP_TO_DATE",
|
||||
NO_VERSION: "NO_VERSION",
|
||||
MANIFEST_MISMATCH: "MANIFEST_MISMATCH",
|
||||
MANIFEST_VERSION_MISMATCH: "MANIFEST_VERSION_MISMATCH",
|
||||
AUDIT_FAILED: "AUDIT_FAILED",
|
||||
CHECKSUM_MISMATCH: "CHECKSUM_MISMATCH",
|
||||
INVALID_BUNDLE: "INVALID_BUNDLE",
|
||||
BUNDLE_EXTRACT_FAILED: "BUNDLE_EXTRACT_FAILED",
|
||||
BUNDLE_DOWNLOAD_FAILED: "BUNDLE_DOWNLOAD_FAILED",
|
||||
CAPABILITY_ESCALATION: "CAPABILITY_ESCALATION",
|
||||
ROUTE_VISIBILITY_ESCALATION: "ROUTE_VISIBILITY_ESCALATION",
|
||||
INSTALL_FAILED: "INSTALL_FAILED",
|
||||
UNINSTALL_FAILED: "UNINSTALL_FAILED",
|
||||
SEARCH_FAILED: "SEARCH_FAILED",
|
||||
GET_PLUGIN_FAILED: "GET_PLUGIN_FAILED",
|
||||
GET_THEME_FAILED: "GET_THEME_FAILED",
|
||||
THEME_SEARCH_FAILED: "THEME_SEARCH_FAILED",
|
||||
UPDATE_CHECK_FAILED: "UPDATE_CHECK_FAILED",
|
||||
EXCLUSIVE_HOOKS_LIST_ERROR: "EXCLUSIVE_HOOKS_LIST_ERROR",
|
||||
EXCLUSIVE_HOOK_SET_ERROR: "EXCLUSIVE_HOOK_SET_ERROR",
|
||||
|
||||
// Menus
|
||||
MENU_LIST_ERROR: "MENU_LIST_ERROR",
|
||||
MENU_CREATE_ERROR: "MENU_CREATE_ERROR",
|
||||
MENU_GET_ERROR: "MENU_GET_ERROR",
|
||||
MENU_UPDATE_ERROR: "MENU_UPDATE_ERROR",
|
||||
MENU_DELETE_ERROR: "MENU_DELETE_ERROR",
|
||||
MENU_ITEM_CREATE_ERROR: "MENU_ITEM_CREATE_ERROR",
|
||||
MENU_ITEM_UPDATE_ERROR: "MENU_ITEM_UPDATE_ERROR",
|
||||
MENU_ITEM_DELETE_ERROR: "MENU_ITEM_DELETE_ERROR",
|
||||
MENU_REORDER_ERROR: "MENU_REORDER_ERROR",
|
||||
|
||||
// Taxonomies
|
||||
TAXONOMY_LIST_ERROR: "TAXONOMY_LIST_ERROR",
|
||||
TAXONOMY_CREATE_ERROR: "TAXONOMY_CREATE_ERROR",
|
||||
TERM_LIST_ERROR: "TERM_LIST_ERROR",
|
||||
TERM_CREATE_ERROR: "TERM_CREATE_ERROR",
|
||||
TERM_GET_ERROR: "TERM_GET_ERROR",
|
||||
TERM_UPDATE_ERROR: "TERM_UPDATE_ERROR",
|
||||
TERM_DELETE_ERROR: "TERM_DELETE_ERROR",
|
||||
TERMS_GET_ERROR: "TERMS_GET_ERROR",
|
||||
TERMS_SET_ERROR: "TERMS_SET_ERROR",
|
||||
|
||||
// Sections
|
||||
SECTION_LIST_ERROR: "SECTION_LIST_ERROR",
|
||||
SECTION_CREATE_ERROR: "SECTION_CREATE_ERROR",
|
||||
SECTION_GET_ERROR: "SECTION_GET_ERROR",
|
||||
SECTION_UPDATE_ERROR: "SECTION_UPDATE_ERROR",
|
||||
SECTION_DELETE_ERROR: "SECTION_DELETE_ERROR",
|
||||
|
||||
// Redirects
|
||||
REDIRECT_LIST_ERROR: "REDIRECT_LIST_ERROR",
|
||||
REDIRECT_CREATE_ERROR: "REDIRECT_CREATE_ERROR",
|
||||
REDIRECT_GET_ERROR: "REDIRECT_GET_ERROR",
|
||||
REDIRECT_UPDATE_ERROR: "REDIRECT_UPDATE_ERROR",
|
||||
REDIRECT_DELETE_ERROR: "REDIRECT_DELETE_ERROR",
|
||||
NOT_FOUND_LIST_ERROR: "NOT_FOUND_LIST_ERROR",
|
||||
NOT_FOUND_SUMMARY_ERROR: "NOT_FOUND_SUMMARY_ERROR",
|
||||
NOT_FOUND_CLEAR_ERROR: "NOT_FOUND_CLEAR_ERROR",
|
||||
NOT_FOUND_PRUNE_ERROR: "NOT_FOUND_PRUNE_ERROR",
|
||||
|
||||
// Widgets
|
||||
WIDGET_AREA_LIST_ERROR: "WIDGET_AREA_LIST_ERROR",
|
||||
WIDGET_AREA_CREATE_ERROR: "WIDGET_AREA_CREATE_ERROR",
|
||||
WIDGET_AREA_GET_ERROR: "WIDGET_AREA_GET_ERROR",
|
||||
WIDGET_AREA_DELETE_ERROR: "WIDGET_AREA_DELETE_ERROR",
|
||||
WIDGET_CREATE_ERROR: "WIDGET_CREATE_ERROR",
|
||||
WIDGET_UPDATE_ERROR: "WIDGET_UPDATE_ERROR",
|
||||
WIDGET_DELETE_ERROR: "WIDGET_DELETE_ERROR",
|
||||
WIDGET_REORDER_ERROR: "WIDGET_REORDER_ERROR",
|
||||
WIDGET_COMPONENTS_ERROR: "WIDGET_COMPONENTS_ERROR",
|
||||
|
||||
// Setup
|
||||
ALREADY_CONFIGURED: "ALREADY_CONFIGURED",
|
||||
INVALID_SEED: "INVALID_SEED",
|
||||
INVALID_REDIRECT: "INVALID_REDIRECT",
|
||||
SETUP_ERROR: "SETUP_ERROR",
|
||||
SETUP_STATUS_ERROR: "SETUP_STATUS_ERROR",
|
||||
SETUP_ADMIN_ERROR: "SETUP_ADMIN_ERROR",
|
||||
SETUP_VERIFY_ERROR: "SETUP_VERIFY_ERROR",
|
||||
DEV_BYPASS_ERROR: "DEV_BYPASS_ERROR",
|
||||
DEV_RESET_ERROR: "DEV_RESET_ERROR",
|
||||
MIGRATION_ERROR: "MIGRATION_ERROR",
|
||||
SEED_ERROR: "SEED_ERROR",
|
||||
|
||||
// Settings
|
||||
SETTINGS_READ_ERROR: "SETTINGS_READ_ERROR",
|
||||
SETTINGS_UPDATE_ERROR: "SETTINGS_UPDATE_ERROR",
|
||||
EMAIL_SETTINGS_READ_ERROR: "EMAIL_SETTINGS_READ_ERROR",
|
||||
EMAIL_TEST_ERROR: "EMAIL_TEST_ERROR",
|
||||
|
||||
// Search
|
||||
SEARCH_ERROR: "SEARCH_ERROR",
|
||||
STATS_ERROR: "STATS_ERROR",
|
||||
SUGGESTION_ERROR: "SUGGESTION_ERROR",
|
||||
REBUILD_ERROR: "REBUILD_ERROR",
|
||||
|
||||
// Import
|
||||
WXR_ANALYZE_ERROR: "WXR_ANALYZE_ERROR",
|
||||
WXR_PREPARE_ERROR: "WXR_PREPARE_ERROR",
|
||||
WXR_IMPORT_ERROR: "WXR_IMPORT_ERROR",
|
||||
IMPORT_ERROR: "IMPORT_ERROR",
|
||||
REWRITE_ERROR: "REWRITE_ERROR",
|
||||
WP_PLUGIN_ANALYZE_ERROR: "WP_PLUGIN_ANALYZE_ERROR",
|
||||
WP_PLUGIN_IMPORT_ERROR: "WP_PLUGIN_IMPORT_ERROR",
|
||||
SSRF_BLOCKED: "SSRF_BLOCKED",
|
||||
PROBE_ERROR: "PROBE_ERROR",
|
||||
|
||||
// Dashboard
|
||||
DASHBOARD_ERROR: "DASHBOARD_ERROR",
|
||||
DASHBOARD_STATS_ERROR: "DASHBOARD_STATS_ERROR",
|
||||
|
||||
// Misc
|
||||
SNAPSHOT_ERROR: "SNAPSHOT_ERROR",
|
||||
TYPEGEN_ERROR: "TYPEGEN_ERROR",
|
||||
SITEMAP_ERROR: "SITEMAP_ERROR",
|
||||
NO_DB: "NO_DB",
|
||||
INVALID_REQUEST: "INVALID_REQUEST",
|
||||
UNKNOWN_ACTION: "UNKNOWN_ACTION",
|
||||
} as const;
|
||||
|
||||
export type ErrorCode = (typeof ErrorCode)[keyof typeof ErrorCode];
|
||||
|
||||
/**
|
||||
* OAuth RFC 6749 error codes.
|
||||
*
|
||||
* These MUST be lowercase per the RFC spec. Used only by OAuth token endpoints.
|
||||
* Separate from ErrorCode to prevent mixing conventions.
|
||||
*/
|
||||
export const OAuthErrorCode = {
|
||||
INVALID_GRANT: "invalid_grant",
|
||||
UNSUPPORTED_GRANT_TYPE: "unsupported_grant_type",
|
||||
EXPIRED_TOKEN: "expired_token",
|
||||
ACCESS_DENIED: "access_denied",
|
||||
AUTHORIZATION_PENDING: "authorization_pending",
|
||||
} as const;
|
||||
|
||||
export type OAuthErrorCode = (typeof OAuthErrorCode)[keyof typeof OAuthErrorCode];
|
||||
|
||||
/**
|
||||
* Map a handler error code to an HTTP status code.
|
||||
*
|
||||
* Shared codes have explicit mappings. Domain-specific `*_ERROR` codes
|
||||
* (used in catch blocks via handleError) default to 500. Everything else
|
||||
* defaults to 400 (client error).
|
||||
*/
|
||||
export function mapErrorStatus(code: string | undefined): number {
|
||||
switch (code) {
|
||||
// 400 Bad Request
|
||||
case ErrorCode.VALIDATION_ERROR:
|
||||
case ErrorCode.INVALID_INPUT:
|
||||
case ErrorCode.INVALID_JSON:
|
||||
case ErrorCode.INVALID_CURSOR:
|
||||
case ErrorCode.MISSING_PARAM:
|
||||
case ErrorCode.INVALID_REQUEST:
|
||||
case ErrorCode.NOT_SUPPORTED:
|
||||
case ErrorCode.INVALID_SLUG:
|
||||
case ErrorCode.RESERVED_SLUG:
|
||||
case ErrorCode.INVALID_TYPE:
|
||||
case ErrorCode.NO_FILE:
|
||||
case ErrorCode.INVALID_STATE:
|
||||
case ErrorCode.INVALID_SEED:
|
||||
case ErrorCode.INVALID_REDIRECT:
|
||||
case ErrorCode.INVALID_TOKEN:
|
||||
case ErrorCode.INVALID_REVISION:
|
||||
case ErrorCode.INVALID_CODE:
|
||||
case ErrorCode.CHALLENGE_EXPIRED:
|
||||
case ErrorCode.EXPIRED_CODE:
|
||||
case ErrorCode.LAST_PASSKEY:
|
||||
case ErrorCode.PASSKEY_LIMIT:
|
||||
case ErrorCode.ADMIN_EXISTS:
|
||||
case ErrorCode.SETUP_COMPLETE:
|
||||
case ErrorCode.SELF_ROLE_CHANGE:
|
||||
case ErrorCode.SSRF_BLOCKED:
|
||||
case ErrorCode.UNKNOWN_ACTION:
|
||||
return 400;
|
||||
|
||||
// 401 Unauthorized
|
||||
case ErrorCode.UNAUTHORIZED:
|
||||
case ErrorCode.NOT_AUTHENTICATED:
|
||||
return 401;
|
||||
|
||||
// 403 Forbidden
|
||||
case ErrorCode.FORBIDDEN:
|
||||
case ErrorCode.CSRF_REJECTED:
|
||||
case ErrorCode.ACCOUNT_DISABLED:
|
||||
case ErrorCode.COMMENTS_DISABLED:
|
||||
case ErrorCode.COMMENTS_CLOSED:
|
||||
case ErrorCode.COMMENT_REJECTED:
|
||||
case ErrorCode.DOMAIN_NOT_ALLOWED:
|
||||
case ErrorCode.INSUFFICIENT_ROLE:
|
||||
case ErrorCode.INSUFFICIENT_SCOPE:
|
||||
case ErrorCode.INSUFFICIENT_PERMISSIONS:
|
||||
case ErrorCode.CAPABILITY_ESCALATION:
|
||||
case ErrorCode.ROUTE_VISIBILITY_ESCALATION:
|
||||
case ErrorCode.AUDIT_FAILED:
|
||||
return 403;
|
||||
|
||||
// 404 Not Found
|
||||
case ErrorCode.NOT_FOUND:
|
||||
case ErrorCode.TABLE_NOT_FOUND:
|
||||
case ErrorCode.COLLECTION_NOT_FOUND:
|
||||
case ErrorCode.FILE_NOT_FOUND:
|
||||
case ErrorCode.NO_VERSION:
|
||||
return 404;
|
||||
|
||||
// 409 Conflict
|
||||
case ErrorCode.CONFLICT:
|
||||
case ErrorCode.SLUG_CONFLICT:
|
||||
case ErrorCode.COLLECTION_EXISTS:
|
||||
case ErrorCode.FIELD_EXISTS:
|
||||
case ErrorCode.CREDENTIAL_EXISTS:
|
||||
case ErrorCode.EMAIL_IN_USE:
|
||||
case ErrorCode.USER_EXISTS:
|
||||
case ErrorCode.PLUGIN_ID_CONFLICT:
|
||||
case ErrorCode.ALREADY_INSTALLED:
|
||||
case ErrorCode.ALREADY_CONFIGURED:
|
||||
case ErrorCode.ALREADY_UP_TO_DATE:
|
||||
return 409;
|
||||
|
||||
// 410 Gone
|
||||
case ErrorCode.TOKEN_EXPIRED:
|
||||
return 410;
|
||||
|
||||
// 422 Unprocessable Entity
|
||||
case ErrorCode.CHECKSUM_MISMATCH:
|
||||
case ErrorCode.INVALID_BUNDLE:
|
||||
case ErrorCode.BUNDLE_EXTRACT_FAILED:
|
||||
return 422;
|
||||
|
||||
// 429 Too Many Requests
|
||||
case ErrorCode.RATE_LIMITED:
|
||||
return 429;
|
||||
|
||||
// 500 Internal Server Error
|
||||
case ErrorCode.NOT_CONFIGURED:
|
||||
case ErrorCode.NO_STORAGE:
|
||||
case ErrorCode.NO_DB:
|
||||
case ErrorCode.STORAGE_NOT_CONFIGURED:
|
||||
case ErrorCode.EMAIL_NOT_CONFIGURED:
|
||||
return 500;
|
||||
|
||||
// 501 Not Implemented
|
||||
case ErrorCode.NOT_IMPLEMENTED:
|
||||
return 501;
|
||||
|
||||
// 502 Bad Gateway
|
||||
case ErrorCode.BUNDLE_DOWNLOAD_FAILED:
|
||||
return 502;
|
||||
|
||||
// 503 Service Unavailable
|
||||
case ErrorCode.MARKETPLACE_UNAVAILABLE:
|
||||
case ErrorCode.MARKETPLACE_NOT_CONFIGURED:
|
||||
case ErrorCode.SANDBOX_NOT_AVAILABLE:
|
||||
return 503;
|
||||
|
||||
// Domain-specific *_ERROR codes are catch-block codes -- always 500.
|
||||
// WARNING: If adding a new code that ends in _ERROR but represents a
|
||||
// client error (4xx), add it to an explicit case above or it will
|
||||
// be incorrectly mapped to 500.
|
||||
default:
|
||||
return code?.endsWith("_ERROR") ? 500 : 400;
|
||||
}
|
||||
}
|
||||
9
packages/core/src/api/escape.ts
Normal file
9
packages/core/src/api/escape.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/** HTML-escape a string to prevent XSS when interpolated into HTML/JS */
|
||||
export function escapeHtml(str: string): string {
|
||||
return str
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">")
|
||||
.replaceAll('"', """)
|
||||
.replaceAll("'", "'");
|
||||
}
|
||||
240
packages/core/src/api/handlers/api-tokens.ts
Normal file
240
packages/core/src/api/handlers/api-tokens.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
/**
|
||||
* API token management handlers.
|
||||
*
|
||||
* Creates, lists, and revokes Personal Access Tokens (PATs).
|
||||
* Token format: ec_pat_<base64url>
|
||||
* Only the SHA-256 hash is stored — raw token shown once at creation.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { hashApiToken, generatePrefixedToken } from "../../auth/api-tokens.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface ApiTokenInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
prefix: string;
|
||||
scopes: string[];
|
||||
userId: string;
|
||||
expiresAt: string | null;
|
||||
lastUsedAt: string | null;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface ApiTokenCreateResult {
|
||||
/** The raw token — shown once, never stored */
|
||||
token: string;
|
||||
/** Token metadata */
|
||||
info: ApiTokenInfo;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new API token for a user.
|
||||
*/
|
||||
export async function handleApiTokenCreate(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
input: {
|
||||
name: string;
|
||||
scopes: string[];
|
||||
expiresAt?: string;
|
||||
},
|
||||
): Promise<ApiResult<ApiTokenCreateResult>> {
|
||||
try {
|
||||
const id = ulid();
|
||||
const { raw, hash, prefix } = generatePrefixedToken("ec_pat_");
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_api_tokens")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
token_hash: hash,
|
||||
prefix,
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(input.scopes),
|
||||
expires_at: input.expiresAt ?? null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const info: ApiTokenInfo = {
|
||||
id,
|
||||
name: input.name,
|
||||
prefix,
|
||||
scopes: input.scopes,
|
||||
userId,
|
||||
expiresAt: input.expiresAt ?? null,
|
||||
lastUsedAt: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return { success: true, data: { token: raw, info } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_CREATE_ERROR",
|
||||
message: "Failed to create API token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all API tokens for a user (never returns the raw token or hash).
|
||||
*/
|
||||
export async function handleApiTokenList(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
): Promise<ApiResult<{ items: ApiTokenInfo[] }>> {
|
||||
try {
|
||||
const rows = await db
|
||||
.selectFrom("_emdash_api_tokens")
|
||||
.select([
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"scopes",
|
||||
"user_id",
|
||||
"expires_at",
|
||||
"last_used_at",
|
||||
"created_at",
|
||||
])
|
||||
.where("user_id", "=", userId)
|
||||
.orderBy("created_at", "desc")
|
||||
.execute();
|
||||
|
||||
const items: ApiTokenInfo[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
prefix: row.prefix,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
userId: row.user_id,
|
||||
expiresAt: row.expires_at,
|
||||
lastUsedAt: row.last_used_at,
|
||||
createdAt: row.created_at,
|
||||
}));
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_LIST_ERROR",
|
||||
message: "Failed to list API tokens",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke (delete) an API token.
|
||||
*/
|
||||
export async function handleApiTokenRevoke(
|
||||
db: Kysely<Database>,
|
||||
tokenId: string,
|
||||
userId: string,
|
||||
): Promise<ApiResult<{ revoked: boolean }>> {
|
||||
try {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_api_tokens")
|
||||
.where("id", "=", tokenId)
|
||||
.where("user_id", "=", userId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (result.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Token not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { revoked: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REVOKE_ERROR",
|
||||
message: "Failed to revoke API token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a raw API token (ec_pat_...) to a user ID and scopes.
|
||||
* Updates last_used_at on successful lookup.
|
||||
* Returns null if the token is invalid or expired.
|
||||
*/
|
||||
export async function resolveApiToken(
|
||||
db: Kysely<Database>,
|
||||
rawToken: string,
|
||||
): Promise<{ userId: string; scopes: string[] } | null> {
|
||||
const hash = hashApiToken(rawToken);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_api_tokens")
|
||||
.select(["id", "user_id", "scopes", "expires_at"])
|
||||
.where("token_hash", "=", hash)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
// Check expiry
|
||||
if (row.expires_at && new Date(row.expires_at) < new Date()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update last_used_at (fire-and-forget, don't block the request)
|
||||
db.updateTable("_emdash_api_tokens")
|
||||
.set({ last_used_at: new Date().toISOString() })
|
||||
.where("id", "=", row.id)
|
||||
.execute()
|
||||
.catch(() => {}); // Non-critical, swallow errors
|
||||
|
||||
return {
|
||||
userId: row.user_id,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve an OAuth access token (ec_oat_...) to a user ID and scopes.
|
||||
* Returns null if the token is invalid or expired.
|
||||
*/
|
||||
export async function resolveOAuthToken(
|
||||
db: Kysely<Database>,
|
||||
rawToken: string,
|
||||
): Promise<{ userId: string; scopes: string[] } | null> {
|
||||
const hash = hashApiToken(rawToken);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.select(["user_id", "scopes", "expires_at", "token_type"])
|
||||
.where("token_hash", "=", hash)
|
||||
.where("token_type", "=", "access")
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
userId: row.user_id,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
};
|
||||
}
|
||||
329
packages/core/src/api/handlers/comments.ts
Normal file
329
packages/core/src/api/handlers/comments.ts
Normal file
@@ -0,0 +1,329 @@
|
||||
/**
|
||||
* Comment handlers — business logic for comment API routes.
|
||||
*
|
||||
* Standalone functions that return ApiResult<T>. Routes are thin wrappers.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { CommentRepository } from "../../database/repositories/comment.js";
|
||||
import type { Comment, CommentStatus, PublicComment } from "../../database/repositories/comment.js";
|
||||
import { InvalidCursorError } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public: List approved comments for content
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentList(
|
||||
db: Kysely<Database>,
|
||||
collection: string,
|
||||
contentId: string,
|
||||
options: { limit?: number; cursor?: string; threaded?: boolean } = {},
|
||||
): Promise<ApiResult<{ items: PublicComment[]; nextCursor?: string; total: number }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
|
||||
// Get total approved count
|
||||
const total = await repo.countByContent(collection, contentId, "approved");
|
||||
|
||||
let publicItems: PublicComment[];
|
||||
let nextCursor: string | undefined;
|
||||
|
||||
if (options.threaded) {
|
||||
// Threaded mode: fetch all approved comments (capped) so threading
|
||||
// doesn't lose children that would fall on later pages.
|
||||
const MAX_THREADED = 500;
|
||||
const result = await repo.findByContent(collection, contentId, {
|
||||
status: "approved",
|
||||
limit: MAX_THREADED,
|
||||
});
|
||||
const threaded = CommentRepository.assembleThreads(result.items);
|
||||
publicItems = threaded.map((c) => CommentRepository.toPublicComment(c));
|
||||
// No cursor for threaded mode — all comments returned at once
|
||||
} else {
|
||||
const result = await repo.findByContent(collection, contentId, {
|
||||
status: "approved",
|
||||
limit: options.limit,
|
||||
cursor: options.cursor,
|
||||
});
|
||||
publicItems = result.items.map((c) => CommentRepository.toPublicComment(c));
|
||||
nextCursor = result.nextCursor;
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: publicItems,
|
||||
nextCursor,
|
||||
total,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
console.error("Comment list error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_LIST_ERROR",
|
||||
message: "Failed to list comments",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Moderation inbox
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentInbox(
|
||||
db: Kysely<Database>,
|
||||
options: {
|
||||
status?: CommentStatus;
|
||||
collection?: string;
|
||||
search?: string;
|
||||
limit?: number;
|
||||
cursor?: string;
|
||||
} = {},
|
||||
): Promise<ApiResult<{ items: Comment[]; nextCursor?: string }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const status = options.status ?? "pending";
|
||||
|
||||
const result = await repo.findByStatus(status, {
|
||||
collection: options.collection,
|
||||
search: options.search,
|
||||
limit: options.limit,
|
||||
cursor: options.cursor,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: result.items,
|
||||
nextCursor: result.nextCursor,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
console.error("Comment inbox error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_INBOX_ERROR",
|
||||
message: "Failed to list comments",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Status counts for inbox badges
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentCounts(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<Record<CommentStatus, number>>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const counts = await repo.countByStatus();
|
||||
return { success: true, data: counts };
|
||||
} catch (error) {
|
||||
console.error("Comment counts error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_COUNTS_ERROR",
|
||||
message: "Failed to get comment counts",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Get single comment detail
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<Comment>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const comment = await repo.findById(id);
|
||||
|
||||
if (!comment) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: comment };
|
||||
} catch (error) {
|
||||
console.error("Comment get error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_GET_ERROR",
|
||||
message: "Failed to get comment",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Change comment status
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentStatusChange(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
status: CommentStatus,
|
||||
): Promise<ApiResult<Comment>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const updated = await repo.updateStatus(id, status);
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch (error) {
|
||||
console.error("Comment status change error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_STATUS_ERROR",
|
||||
message: "Failed to update comment status",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Hard delete comment
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch (error) {
|
||||
console.error("Comment delete error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_DELETE_ERROR",
|
||||
message: "Failed to delete comment",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Bulk operations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentBulk(
|
||||
db: Kysely<Database>,
|
||||
ids: string[],
|
||||
action: "approve" | "spam" | "trash" | "delete",
|
||||
): Promise<ApiResult<{ affected: number }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
|
||||
let affected: number;
|
||||
if (action === "delete") {
|
||||
affected = await repo.bulkDelete(ids);
|
||||
} else {
|
||||
const statusMap: Record<string, CommentStatus> = {
|
||||
approve: "approved",
|
||||
spam: "spam",
|
||||
trash: "trash",
|
||||
};
|
||||
affected = await repo.bulkUpdateStatus(ids, statusMap[action]);
|
||||
}
|
||||
|
||||
return { success: true, data: { affected } };
|
||||
} catch (error) {
|
||||
console.error("Comment bulk error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_BULK_ERROR",
|
||||
message: "Failed to perform bulk operation",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Anti-spam: Rate limiting
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Check if an IP has exceeded the comment rate limit.
|
||||
* Uses ip_hash in the comments table — no separate counter storage.
|
||||
*/
|
||||
export async function checkRateLimit(
|
||||
db: Kysely<Database>,
|
||||
ipHash: string,
|
||||
maxPerWindow: number = 5,
|
||||
windowMinutes: number = 10,
|
||||
): Promise<boolean> {
|
||||
const cutoff = new Date(Date.now() - windowMinutes * 60 * 1000).toISOString();
|
||||
|
||||
// Count recent comments from this IP
|
||||
const result = await db
|
||||
.selectFrom("_emdash_comments")
|
||||
.select((eb) => eb.fn.count("id").as("count"))
|
||||
.where("ip_hash", "=", ipHash)
|
||||
.where("created_at", ">", cutoff)
|
||||
.executeTakeFirst();
|
||||
|
||||
const count = Number(result?.count ?? 0);
|
||||
return count >= maxPerWindow;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash an IP address for storage (never store cleartext IPs).
|
||||
*
|
||||
* Uses full SHA-256 with a site-specific salt to prevent rainbow-table
|
||||
* recovery of IPs. The salt must be provided by the caller — typically
|
||||
* via `resolveSecretsCached(db).ipSalt` from `#config/secrets.js`. The
|
||||
* salt is generated and persisted on first need so it's stable across
|
||||
* requests within a deployment but unique per install.
|
||||
*/
|
||||
export async function hashIp(ip: string, salt: string): Promise<string> {
|
||||
const data = `ip:${salt}:${ip}`;
|
||||
const buf = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(data));
|
||||
return Array.from(new Uint8Array(buf), (b) => b.toString(16).padStart(2, "0")).join("");
|
||||
}
|
||||
1482
packages/core/src/api/handlers/content.ts
Normal file
1482
packages/core/src/api/handlers/content.ts
Normal file
File diff suppressed because it is too large
Load Diff
201
packages/core/src/api/handlers/dashboard.ts
Normal file
201
packages/core/src/api/handlers/dashboard.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
/**
|
||||
* Dashboard stats handler
|
||||
*
|
||||
* Returns summary data for the admin dashboard in a single request:
|
||||
* collection content counts, media count, user count, and recent
|
||||
* content across all collections.
|
||||
*/
|
||||
|
||||
import { sql, type Kysely } from "kysely";
|
||||
|
||||
import { ContentRepository } from "../../database/repositories/content.js";
|
||||
import { MediaRepository } from "../../database/repositories/media.js";
|
||||
import { UserRepository } from "../../database/repositories/user.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateIdentifier } from "../../database/validate.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface CollectionStats {
|
||||
slug: string;
|
||||
label: string;
|
||||
total: number;
|
||||
published: number;
|
||||
draft: number;
|
||||
}
|
||||
|
||||
export interface RecentItem {
|
||||
id: string;
|
||||
collection: string;
|
||||
collectionLabel: string;
|
||||
title: string;
|
||||
slug: string | null;
|
||||
status: string;
|
||||
updatedAt: string;
|
||||
authorId: string | null;
|
||||
}
|
||||
|
||||
export interface DashboardStats {
|
||||
collections: CollectionStats[];
|
||||
mediaCount: number;
|
||||
userCount: number;
|
||||
recentItems: RecentItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch dashboard statistics.
|
||||
*
|
||||
* Queries are intentionally lightweight — counts use indexed columns,
|
||||
* and recent items are capped at 10.
|
||||
*/
|
||||
export async function handleDashboardStats(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<DashboardStats>> {
|
||||
try {
|
||||
// Discover collections from the system table
|
||||
const collections = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select(["slug", "label"])
|
||||
.orderBy("slug", "asc")
|
||||
.execute();
|
||||
|
||||
// Gather per-collection counts in parallel
|
||||
const contentRepo = new ContentRepository(db);
|
||||
const collectionStats: CollectionStats[] = await Promise.all(
|
||||
collections.map(async (col) => {
|
||||
const stats = await contentRepo.getStats(col.slug);
|
||||
return {
|
||||
slug: col.slug,
|
||||
label: col.label,
|
||||
total: stats.total,
|
||||
published: stats.published,
|
||||
draft: stats.draft,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
// Media and user counts
|
||||
const mediaRepo = new MediaRepository(db);
|
||||
const userRepo = new UserRepository(db);
|
||||
const [mediaCount, userCount] = await Promise.all([mediaRepo.count(), userRepo.count()]);
|
||||
|
||||
// Recent items across all collections (last 10 updated, any status)
|
||||
const recentItems = await fetchRecentItems(db, collections);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
collections: collectionStats,
|
||||
mediaCount,
|
||||
userCount,
|
||||
recentItems,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Dashboard stats error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "DASHBOARD_STATS_ERROR",
|
||||
message: "Failed to load dashboard statistics",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/** Raw row shape from the UNION ALL query — all snake_case. */
|
||||
interface RecentItemRow {
|
||||
id: string;
|
||||
collection: string;
|
||||
collection_label: string;
|
||||
title: string;
|
||||
slug: string | null;
|
||||
status: string;
|
||||
updated_at: string;
|
||||
author_id: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the 10 most recently updated items across all collections.
|
||||
*
|
||||
* Uses UNION ALL over each ec_* table. The query is safe because
|
||||
* collection slugs come from the system table and are validated.
|
||||
*
|
||||
* `title` is not a standard column — it's a user-defined field. We query
|
||||
* `_emdash_fields` to discover which collections have one and fall back
|
||||
* to `slug` (which is always present) otherwise.
|
||||
*/
|
||||
async function fetchRecentItems(
|
||||
db: Kysely<Database>,
|
||||
collections: Array<{ slug: string; label: string }>,
|
||||
): Promise<RecentItem[]> {
|
||||
if (collections.length === 0) return [];
|
||||
|
||||
// Discover which collections have a "title" column
|
||||
const titleFields = await db
|
||||
.selectFrom("_emdash_fields as f")
|
||||
.innerJoin("_emdash_collections as c", "c.id", "f.collection_id")
|
||||
.select(["c.slug as collection_slug"])
|
||||
.where("f.slug", "=", "title")
|
||||
.execute();
|
||||
|
||||
const collectionsWithTitle = new Set(titleFields.map((r) => r.collection_slug));
|
||||
|
||||
// Build a UNION ALL query across all content tables.
|
||||
// Each branch is wrapped in SELECT * FROM (...) so the inner
|
||||
// ORDER BY + LIMIT is valid SQLite (bare ORDER BY inside UNION
|
||||
// branches is a syntax error in SQLite).
|
||||
const subQueries = collections.map((col) => {
|
||||
validateIdentifier(col.slug);
|
||||
const table = `ec_${col.slug}`;
|
||||
const hasTitle = collectionsWithTitle.has(col.slug);
|
||||
|
||||
// Use title column if it exists, otherwise fall back to slug → id.
|
||||
// All output uses snake_case to avoid SQLite quoting issues on D1.
|
||||
const titleExpr = hasTitle ? sql`COALESCE(title, slug, id)` : sql`COALESCE(slug, id)`;
|
||||
|
||||
return sql<RecentItemRow>`
|
||||
SELECT * FROM (
|
||||
SELECT
|
||||
id,
|
||||
${sql.lit(col.slug)} AS collection,
|
||||
${sql.lit(col.label)} AS collection_label,
|
||||
${titleExpr} AS title,
|
||||
slug,
|
||||
status,
|
||||
updated_at,
|
||||
author_id
|
||||
FROM ${sql.ref(table)}
|
||||
WHERE deleted_at IS NULL
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 10
|
||||
)
|
||||
`;
|
||||
});
|
||||
|
||||
// Combine with UNION ALL
|
||||
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
|
||||
let combined = subQueries[0]!;
|
||||
for (let i = 1; i < subQueries.length; i++) {
|
||||
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
|
||||
combined = sql<RecentItemRow>`${combined} UNION ALL ${subQueries[i]!}`;
|
||||
}
|
||||
|
||||
// Final sort + limit across all branches
|
||||
const result = await sql<RecentItemRow>`
|
||||
SELECT * FROM (${combined})
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 10
|
||||
`.execute(db);
|
||||
|
||||
// Map snake_case DB rows → camelCase API shape
|
||||
return result.rows.map((row) => ({
|
||||
id: row.id,
|
||||
collection: row.collection,
|
||||
collectionLabel: row.collection_label,
|
||||
title: row.title,
|
||||
slug: row.slug,
|
||||
status: row.status,
|
||||
updatedAt: row.updated_at,
|
||||
authorId: row.author_id,
|
||||
}));
|
||||
}
|
||||
707
packages/core/src/api/handlers/device-flow.ts
Normal file
707
packages/core/src/api/handlers/device-flow.ts
Normal file
@@ -0,0 +1,707 @@
|
||||
/**
|
||||
* OAuth Device Flow handlers (RFC 8628).
|
||||
*
|
||||
* EmDash acts as an OAuth 2.0 authorization server. The CLI requests
|
||||
* a device code, displays a URL + user code, and polls for a token.
|
||||
* The user opens a browser, logs in, enters the code, and the CLI gets
|
||||
* an access + refresh token pair.
|
||||
*
|
||||
* Uses arctic for code generation and @emdash-cms/auth for token utilities.
|
||||
*/
|
||||
|
||||
import { clampScopes } from "@emdash-cms/auth";
|
||||
import type { RoleLevel } from "@emdash-cms/auth";
|
||||
import { generateCodeVerifier } from "arctic";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../auth/api-tokens.js";
|
||||
import { withTransaction } from "../../database/transaction.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
import { lookupOAuthClient } from "./oauth-clients.js";
|
||||
import { lookupUserRoleAndStatus } from "./oauth-user-lookup.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Device codes expire after 15 minutes */
|
||||
const DEVICE_CODE_TTL_SECONDS = 15 * 60;
|
||||
|
||||
/** Default polling interval in seconds */
|
||||
const DEFAULT_INTERVAL = 5;
|
||||
|
||||
/** RFC 8628 §3.5: interval increase on slow_down */
|
||||
const SLOW_DOWN_INCREMENT = 5;
|
||||
|
||||
/** Maximum slow_down interval cap (seconds) */
|
||||
const MAX_SLOW_DOWN_INTERVAL = 60;
|
||||
|
||||
/** Access token TTL: 1 hour */
|
||||
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
|
||||
|
||||
/** Refresh token TTL: 90 days */
|
||||
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
|
||||
|
||||
/** Default scopes for CLI login */
|
||||
const DEFAULT_SCOPES = [
|
||||
"content:read",
|
||||
"content:write",
|
||||
"media:read",
|
||||
"media:write",
|
||||
"schema:read",
|
||||
] as const;
|
||||
|
||||
/** Pattern to normalize user codes (strip hyphens) */
|
||||
const HYPHEN_PATTERN = /-/g;
|
||||
|
||||
/** Characters for user codes (uppercase, no ambiguous chars like 0/O, 1/I) */
|
||||
const USER_CODE_CHARS = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface DeviceCodeResponse {
|
||||
device_code: string;
|
||||
user_code: string;
|
||||
verification_uri: string;
|
||||
expires_in: number;
|
||||
interval: number;
|
||||
}
|
||||
|
||||
export interface TokenResponse {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type: "Bearer";
|
||||
expires_in: number;
|
||||
scope: string;
|
||||
}
|
||||
|
||||
// RFC 8628 error codes
|
||||
export type DeviceFlowError =
|
||||
| "authorization_pending"
|
||||
| "slow_down"
|
||||
| "expired_token"
|
||||
| "access_denied";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Generate a short human-readable user code (XXXX-XXXX) */
|
||||
function generateUserCode(): string {
|
||||
const bytes = new Uint8Array(8);
|
||||
crypto.getRandomValues(bytes);
|
||||
const chars = Array.from(bytes, (b) => USER_CODE_CHARS[b % USER_CODE_CHARS.length]).join("");
|
||||
return `${chars.slice(0, 4)}-${chars.slice(4, 8)}`;
|
||||
}
|
||||
|
||||
/** Get an ISO datetime string offset from now */
|
||||
function expiresAt(seconds: number): string {
|
||||
return new Date(Date.now() + seconds * 1000).toISOString();
|
||||
}
|
||||
|
||||
/** Validate and normalize scopes. Returns validated scope list. */
|
||||
function normalizeScopes(requested?: string[]): string[] {
|
||||
if (!requested || requested.length === 0) {
|
||||
return [...DEFAULT_SCOPES];
|
||||
}
|
||||
const validSet = new Set<string>(VALID_SCOPES);
|
||||
return requested.filter((s) => validSet.has(s));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* POST /oauth/device/code
|
||||
*
|
||||
* Issue a device code + user code. The CLI displays the user code
|
||||
* and tells the user to open the verification URI.
|
||||
*/
|
||||
export async function handleDeviceCodeRequest(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
client_id?: string;
|
||||
scope?: string;
|
||||
},
|
||||
verificationUri: string,
|
||||
): Promise<ApiResult<DeviceCodeResponse>> {
|
||||
try {
|
||||
// Note: client_id is accepted but not validated against _emdash_oauth_clients
|
||||
// because the CLI uses a well-known built-in client ID ("emdash-cli") that
|
||||
// isn't stored in the DB. Full client_id validation + scope clamping for the
|
||||
// device flow is tracked as a follow-up.
|
||||
|
||||
// Parse and validate scopes
|
||||
const requestedScopes = input.scope ? input.scope.split(" ").filter(Boolean) : [];
|
||||
const scopes = normalizeScopes(requestedScopes);
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
|
||||
};
|
||||
}
|
||||
|
||||
const deviceCode = generateCodeVerifier();
|
||||
const userCode = generateUserCode();
|
||||
const expires = expiresAt(DEVICE_CODE_TTL_SECONDS);
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_device_codes")
|
||||
.values({
|
||||
device_code: deviceCode,
|
||||
user_code: userCode,
|
||||
scopes: JSON.stringify(scopes),
|
||||
status: "pending",
|
||||
expires_at: expires,
|
||||
interval: DEFAULT_INTERVAL,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
device_code: deviceCode,
|
||||
user_code: userCode,
|
||||
verification_uri: verificationUri,
|
||||
expires_in: DEVICE_CODE_TTL_SECONDS,
|
||||
interval: DEFAULT_INTERVAL,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "DEVICE_CODE_ERROR",
|
||||
message: "Failed to create device code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/device/token
|
||||
*
|
||||
* CLI polls this endpoint with the device_code. Returns:
|
||||
* - 200 with tokens if authorized
|
||||
* - 400 with error "authorization_pending" while waiting
|
||||
* - 400 with error "slow_down" if polling too fast
|
||||
* - 400 with error "expired_token" if the code expired
|
||||
* - 400 with error "access_denied" if the user denied
|
||||
*/
|
||||
export async function handleDeviceTokenExchange(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
device_code: string;
|
||||
grant_type: string;
|
||||
},
|
||||
): Promise<
|
||||
ApiResult<TokenResponse> & { deviceFlowError?: DeviceFlowError; deviceFlowInterval?: number }
|
||||
> {
|
||||
try {
|
||||
// Validate grant_type
|
||||
if (input.grant_type !== "urn:ietf:params:oauth:grant-type:device_code") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
// Look up the device code
|
||||
const row = await db
|
||||
.selectFrom("_emdash_device_codes")
|
||||
.selectAll()
|
||||
.where("device_code", "=", input.device_code)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid device code" },
|
||||
};
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < now) {
|
||||
// Clean up expired code
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "expired_token",
|
||||
error: { code: "expired_token", message: "The device code has expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check status
|
||||
if (row.status === "denied") {
|
||||
// Clean up denied code
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "access_denied",
|
||||
error: { code: "access_denied", message: "The user denied the request" },
|
||||
};
|
||||
}
|
||||
|
||||
if (row.status === "pending") {
|
||||
// RFC 8628 §3.5: slow_down enforcement during polling phase.
|
||||
// Only applies while waiting for authorization — once authorized,
|
||||
// the final exchange proceeds without throttling.
|
||||
if (row.last_polled_at) {
|
||||
const lastPolled = new Date(row.last_polled_at);
|
||||
const elapsedSeconds = (now.getTime() - lastPolled.getTime()) / 1000;
|
||||
|
||||
if (elapsedSeconds < row.interval) {
|
||||
// Too fast — increase interval by 5s per RFC 8628 §3.5, capped at 60s
|
||||
const newInterval = Math.min(row.interval + SLOW_DOWN_INCREMENT, MAX_SLOW_DOWN_INTERVAL);
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({
|
||||
interval: newInterval,
|
||||
last_polled_at: now.toISOString(),
|
||||
})
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "slow_down",
|
||||
deviceFlowInterval: newInterval,
|
||||
error: { code: "slow_down", message: "Polling too fast" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Update last_polled_at for future slow_down checks
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({ last_polled_at: now.toISOString() })
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "authorization_pending",
|
||||
error: { code: "authorization_pending", message: "Authorization pending" },
|
||||
};
|
||||
}
|
||||
|
||||
if (row.status !== "authorized" || !row.user_id) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid device code state" },
|
||||
};
|
||||
}
|
||||
|
||||
// Generate tokens before consuming the device code so that if
|
||||
// generation fails, the code is still available for retry.
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
|
||||
|
||||
// Atomically consume the device code and create tokens in a single
|
||||
// transaction. DELETE...RETURNING prevents TOCTOU: two concurrent
|
||||
// requests race on the DELETE, only one gets a row back. Wrapping
|
||||
// in a transaction ensures the code isn't consumed if token storage fails.
|
||||
const result = await withTransaction(db, async (trx) => {
|
||||
const consumed = await trx
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.where("status", "=", "authorized")
|
||||
.returningAll()
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!consumed) return null;
|
||||
|
||||
if (!consumed.user_id) return null;
|
||||
|
||||
const scopes = JSON.parse(consumed.scopes) as string[];
|
||||
|
||||
await trx
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: consumed.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "cli",
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshToken.hash,
|
||||
})
|
||||
.execute();
|
||||
|
||||
await trx
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: refreshToken.hash,
|
||||
token_type: "refresh",
|
||||
user_id: consumed.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "cli",
|
||||
expires_at: refreshExpires,
|
||||
refresh_token_hash: null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return { scopes };
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Device code already consumed" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: refreshToken.raw,
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: result.scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_EXCHANGE_ERROR",
|
||||
message: "Failed to exchange device code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/device/authorize
|
||||
*
|
||||
* The user submits the user_code after logging in via the browser.
|
||||
* This authorizes the device code, allowing the CLI to exchange it for tokens.
|
||||
*
|
||||
* Scopes are clamped to the user's role at this point. The stored scopes
|
||||
* are replaced with the intersection of requested scopes and the scopes
|
||||
* the user's role permits. This prevents scope escalation.
|
||||
*/
|
||||
export async function handleDeviceAuthorize(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
userRole: RoleLevel,
|
||||
input: {
|
||||
user_code: string;
|
||||
action?: "approve" | "deny";
|
||||
},
|
||||
): Promise<ApiResult<{ authorized: boolean }>> {
|
||||
try {
|
||||
// Normalize user code (strip hyphens, uppercase)
|
||||
const normalizedCode = input.user_code.replace(HYPHEN_PATTERN, "").toUpperCase();
|
||||
|
||||
// Look up the device code by user_code
|
||||
const row = await db
|
||||
.selectFrom("_emdash_device_codes")
|
||||
.selectAll()
|
||||
.where("status", "=", "pending")
|
||||
.execute();
|
||||
|
||||
// Find the matching code (strip hyphens for comparison)
|
||||
const match = row.find(
|
||||
(r) => r.user_code.replace(HYPHEN_PATTERN, "").toUpperCase() === normalizedCode,
|
||||
);
|
||||
|
||||
if (!match) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CODE", message: "Invalid or expired code" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(match.expires_at) < new Date()) {
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "EXPIRED_CODE", message: "This code has expired" },
|
||||
};
|
||||
}
|
||||
|
||||
const action = input.action ?? "approve";
|
||||
|
||||
if (action === "deny") {
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({ status: "denied" })
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { authorized: false } };
|
||||
}
|
||||
|
||||
// Clamp requested scopes to those the user's role permits.
|
||||
// effective_scopes = requested_scopes ∩ scopesForRole(user.role)
|
||||
const requestedScopes = JSON.parse(match.scopes) as string[];
|
||||
const effectiveScopes = clampScopes(requestedScopes, userRole);
|
||||
|
||||
if (effectiveScopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INSUFFICIENT_ROLE",
|
||||
message: "Your role does not permit any of the requested scopes",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Approve: set user_id, status, and clamped scopes
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({
|
||||
status: "authorized",
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(effectiveScopes),
|
||||
})
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { authorized: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUTHORIZE_ERROR",
|
||||
message: "Failed to authorize device",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/token/refresh
|
||||
*
|
||||
* Exchange a refresh token for a new access token.
|
||||
* The refresh token itself is not rotated (per spec: optional rotation).
|
||||
*/
|
||||
export async function handleTokenRefresh(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
refresh_token: string;
|
||||
grant_type: string;
|
||||
},
|
||||
): Promise<ApiResult<TokenResponse>> {
|
||||
try {
|
||||
if (input.grant_type !== "refresh_token") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.refresh_token.startsWith(TOKEN_PREFIXES.OAUTH_REFRESH)) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid refresh token format" },
|
||||
};
|
||||
}
|
||||
|
||||
const refreshHash = hashApiToken(input.refresh_token);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.selectAll()
|
||||
.where("token_hash", "=", refreshHash)
|
||||
.where("token_type", "=", "refresh")
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid refresh token" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
// Clean up expired refresh token and its access tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Refresh token expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// SEC-42: Revalidate user role before issuing new access token.
|
||||
// SEC-43: Reject refresh if user is disabled or deleted.
|
||||
const userInfo = await lookupUserRoleAndStatus(db, row.user_id);
|
||||
if (!userInfo) {
|
||||
// User no longer exists — revoke all their tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "User not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (userInfo.disabled) {
|
||||
// User is disabled — revoke all their tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "User account is disabled" },
|
||||
};
|
||||
}
|
||||
|
||||
// Revalidate stored scopes against the user's current role.
|
||||
// A demoted user's refresh token may carry stale elevated scopes.
|
||||
const storedScopes = JSON.parse(row.scopes) as string[];
|
||||
let scopes = clampScopes(storedScopes, userInfo.role);
|
||||
|
||||
// SEC-41: Intersect with the client's registered scopes (if any).
|
||||
// Same check as the approval path — a client registered with limited
|
||||
// scopes should never receive elevated scopes on refresh, even if the
|
||||
// user's role would allow them.
|
||||
if (row.client_id) {
|
||||
const client = await lookupOAuthClient(db, row.client_id);
|
||||
if (client?.scopes?.length) {
|
||||
scopes = scopes.filter((s: string) => client.scopes!.includes(s));
|
||||
}
|
||||
}
|
||||
|
||||
if (scopes.length === 0) {
|
||||
// User's role no longer supports any of the token's scopes — revoke
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.execute();
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_GRANT",
|
||||
message: "User role no longer supports any of the token's scopes",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Delete old access tokens for this refresh token
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.where("token_type", "=", "access")
|
||||
.execute();
|
||||
|
||||
// Generate new access token
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: row.client_type,
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshHash,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: input.refresh_token, // Return same refresh token
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REFRESH_ERROR",
|
||||
message: "Failed to refresh token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/token/revoke
|
||||
*
|
||||
* Revoke an access or refresh token. If a refresh token is revoked,
|
||||
* also revoke all associated access tokens.
|
||||
*
|
||||
* Per RFC 7009, this endpoint always returns 200 (even for invalid tokens).
|
||||
*/
|
||||
export async function handleTokenRevoke(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
token: string;
|
||||
},
|
||||
): Promise<ApiResult<{ revoked: boolean }>> {
|
||||
try {
|
||||
const hash = hashApiToken(input.token);
|
||||
|
||||
// Look up the token
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.select(["token_hash", "token_type", "refresh_token_hash"])
|
||||
.where("token_hash", "=", hash)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
// Per RFC 7009: always 200, even for invalid tokens
|
||||
return { success: true, data: { revoked: true } };
|
||||
}
|
||||
|
||||
if (row.token_type === "refresh") {
|
||||
// Revoke refresh token and all its access tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("refresh_token_hash", "=", hash).execute();
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
|
||||
} else {
|
||||
// Revoke just the access token
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
|
||||
}
|
||||
|
||||
return { success: true, data: { revoked: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REVOKE_ERROR",
|
||||
message: "Failed to revoke token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
170
packages/core/src/api/handlers/index.ts
Normal file
170
packages/core/src/api/handlers/index.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
/**
|
||||
* API handler implementations for EmDash REST endpoints
|
||||
*
|
||||
* Re-exports all handlers from their respective modules
|
||||
*/
|
||||
|
||||
// Content handlers
|
||||
export {
|
||||
handleContentList,
|
||||
handleContentGet,
|
||||
handleContentGetIncludingTrashed,
|
||||
handleContentCreate,
|
||||
handleContentUpdate,
|
||||
handleContentDuplicate,
|
||||
handleContentDelete,
|
||||
handleContentRestore,
|
||||
handleContentPermanentDelete,
|
||||
handleContentListTrashed,
|
||||
handleContentCountTrashed,
|
||||
handleContentSchedule,
|
||||
handleContentUnschedule,
|
||||
handleContentPublish,
|
||||
handleContentUnpublish,
|
||||
handleContentCountScheduled,
|
||||
handleContentDiscardDraft,
|
||||
handleContentCompare,
|
||||
handleContentTranslations,
|
||||
type TrashedContentItem,
|
||||
} from "./content.js";
|
||||
|
||||
// Dashboard stats
|
||||
export {
|
||||
handleDashboardStats,
|
||||
type CollectionStats,
|
||||
type DashboardStats,
|
||||
type RecentItem,
|
||||
} from "./dashboard.js";
|
||||
|
||||
// Manifest generation
|
||||
export { generateManifest } from "./manifest.js";
|
||||
|
||||
// Revision handlers
|
||||
export {
|
||||
handleRevisionList,
|
||||
handleRevisionGet,
|
||||
handleRevisionRestore,
|
||||
type RevisionListResponse,
|
||||
type RevisionResponse,
|
||||
} from "./revision.js";
|
||||
|
||||
// Media handlers
|
||||
export {
|
||||
handleMediaList,
|
||||
handleMediaGet,
|
||||
handleMediaCreate,
|
||||
handleMediaUpdate,
|
||||
handleMediaDelete,
|
||||
type MediaListResponse,
|
||||
type MediaResponse,
|
||||
} from "./media.js";
|
||||
|
||||
// Schema handlers
|
||||
export {
|
||||
handleSchemaCollectionList,
|
||||
handleSchemaCollectionGet,
|
||||
handleSchemaCollectionCreate,
|
||||
handleSchemaCollectionUpdate,
|
||||
handleSchemaCollectionDelete,
|
||||
handleSchemaFieldList,
|
||||
handleSchemaFieldGet,
|
||||
handleSchemaFieldCreate,
|
||||
handleSchemaFieldUpdate,
|
||||
handleSchemaFieldDelete,
|
||||
handleSchemaFieldReorder,
|
||||
handleOrphanedTableList,
|
||||
handleOrphanedTableRegister,
|
||||
type CollectionListResponse,
|
||||
type CollectionResponse,
|
||||
type CollectionWithFieldsResponse,
|
||||
type FieldListResponse,
|
||||
type FieldResponse,
|
||||
type OrphanedTable,
|
||||
type OrphanedTableListResponse,
|
||||
} from "./schema.js";
|
||||
|
||||
// SEO handlers
|
||||
export {
|
||||
handleSitemapData,
|
||||
type SitemapCollectionData,
|
||||
type SitemapContentEntry,
|
||||
type SitemapDataResponse,
|
||||
} from "./seo.js";
|
||||
|
||||
// Plugin handlers
|
||||
export {
|
||||
handlePluginList,
|
||||
handlePluginGet,
|
||||
handlePluginEnable,
|
||||
handlePluginDisable,
|
||||
type PluginInfo,
|
||||
type PluginListResponse,
|
||||
type PluginResponse,
|
||||
} from "./plugins.js";
|
||||
|
||||
// Menu handlers
|
||||
export {
|
||||
handleMenuList,
|
||||
handleMenuCreate,
|
||||
handleMenuGet,
|
||||
handleMenuUpdate,
|
||||
handleMenuDelete,
|
||||
handleMenuItemCreate,
|
||||
handleMenuItemUpdate,
|
||||
handleMenuItemDelete,
|
||||
handleMenuItemReorder,
|
||||
handleMenuSetItems,
|
||||
type MenuListItem,
|
||||
type MenuWithItems,
|
||||
type CreateMenuItemInput,
|
||||
type UpdateMenuItemInput,
|
||||
type ReorderItem,
|
||||
type MenuSetItemsInput,
|
||||
} from "./menus.js";
|
||||
|
||||
// Section handlers
|
||||
export {
|
||||
handleSectionList,
|
||||
handleSectionCreate,
|
||||
handleSectionGet,
|
||||
handleSectionUpdate,
|
||||
handleSectionDelete,
|
||||
type SectionListResponse,
|
||||
} from "./sections.js";
|
||||
|
||||
// Settings handlers
|
||||
export { handleSettingsGet, handleSettingsUpdate } from "./settings.js";
|
||||
|
||||
// Taxonomy handlers
|
||||
export {
|
||||
handleTaxonomyList,
|
||||
handleTermList,
|
||||
handleTermCreate,
|
||||
handleTermGet,
|
||||
handleTermUpdate,
|
||||
handleTermDelete,
|
||||
type TaxonomyDef,
|
||||
type TaxonomyListResponse,
|
||||
type TermData,
|
||||
type TermWithCount,
|
||||
type TermListResponse,
|
||||
type TermResponse,
|
||||
type TermGetResponse,
|
||||
} from "./taxonomies.js";
|
||||
|
||||
// Marketplace handlers
|
||||
export {
|
||||
handleMarketplaceInstall,
|
||||
handleMarketplaceUpdate,
|
||||
handleMarketplaceUninstall,
|
||||
handleMarketplaceUpdateCheck,
|
||||
handleMarketplaceSearch,
|
||||
handleMarketplaceGetPlugin,
|
||||
handleThemeSearch,
|
||||
handleThemeGetDetail,
|
||||
loadBundleFromR2,
|
||||
type MarketplaceInstallResult,
|
||||
type MarketplaceUpdateResult,
|
||||
type MarketplaceUpdateCheck,
|
||||
type MarketplaceUninstallResult,
|
||||
} from "./marketplace.js";
|
||||
158
packages/core/src/api/handlers/manifest.ts
Normal file
158
packages/core/src/api/handlers/manifest.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Manifest generation handlers
|
||||
*/
|
||||
|
||||
import { hashString } from "../../utils/hash.js";
|
||||
import type { ManifestResponse, FieldDescriptor } from "../types.js";
|
||||
|
||||
/** Pattern to add spaces before capital letters */
|
||||
const CAMEL_CASE_PATTERN = /([A-Z])/g;
|
||||
const FIRST_CHAR_PATTERN = /^./;
|
||||
|
||||
// Collection definition shape for manifest generation
|
||||
interface CollectionDefinition {
|
||||
schema: {
|
||||
_def?: { shape?: () => Record<string, unknown> };
|
||||
shape?: Record<string, unknown>;
|
||||
};
|
||||
admin: {
|
||||
label: string;
|
||||
labelSingular?: string;
|
||||
supports?: string[];
|
||||
};
|
||||
}
|
||||
type CollectionMap = Record<string, CollectionDefinition>;
|
||||
|
||||
/**
|
||||
* Generate admin manifest from collections
|
||||
*/
|
||||
export async function generateManifest(
|
||||
collections: CollectionMap,
|
||||
plugins: Record<
|
||||
string,
|
||||
{
|
||||
adminPages?: Array<{ path: string; component: string }>;
|
||||
widgets?: string[];
|
||||
}
|
||||
> = {},
|
||||
): Promise<ManifestResponse> {
|
||||
const manifestCollections: ManifestResponse["collections"] = {};
|
||||
|
||||
for (const [name, definition] of Object.entries(collections)) {
|
||||
// Extract field descriptors from Zod schema
|
||||
const fields = extractFieldDescriptors(definition.schema);
|
||||
|
||||
manifestCollections[name] = {
|
||||
label: definition.admin.label,
|
||||
labelSingular: definition.admin.labelSingular || definition.admin.label,
|
||||
supports: definition.admin.supports || [],
|
||||
fields,
|
||||
};
|
||||
}
|
||||
|
||||
// Generate hash from collections (for cache invalidation)
|
||||
const hash = await hashString(JSON.stringify(manifestCollections));
|
||||
|
||||
return {
|
||||
version: "0.1.0",
|
||||
hash,
|
||||
collections: manifestCollections,
|
||||
plugins,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field descriptors from Zod schema
|
||||
* Note: This is a simplified implementation that handles common types
|
||||
*/
|
||||
function extractFieldDescriptors(schema: {
|
||||
_def?: { shape?: () => Record<string, unknown> };
|
||||
shape?: Record<string, unknown>;
|
||||
}): Record<string, FieldDescriptor> {
|
||||
const fields: Record<string, FieldDescriptor> = {};
|
||||
|
||||
// Handle Zod object schema
|
||||
const shape = typeof schema._def?.shape === "function" ? schema._def.shape() : schema.shape || {};
|
||||
|
||||
for (const [name, fieldSchema] of Object.entries(shape)) {
|
||||
fields[name] = extractFieldType(name, fieldSchema);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field type from Zod schema
|
||||
*/
|
||||
/** Type guard: check if a value is a non-null object */
|
||||
function isObject(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
|
||||
function extractFieldType(name: string, schema: unknown): FieldDescriptor {
|
||||
if (!isObject(schema)) {
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
}
|
||||
|
||||
// Check for custom field markers
|
||||
if (schema.isPortableText) {
|
||||
return { kind: "portableText", label: formatLabel(name) };
|
||||
}
|
||||
if (schema.isImage) {
|
||||
return { kind: "image", label: formatLabel(name) };
|
||||
}
|
||||
if (schema.isReference) {
|
||||
return { kind: "reference", label: formatLabel(name) };
|
||||
}
|
||||
|
||||
// Handle standard Zod types
|
||||
const def = isObject(schema._def) ? schema._def : undefined;
|
||||
const typeName = typeof def?.typeName === "string" ? def.typeName : undefined;
|
||||
|
||||
switch (typeName) {
|
||||
case "ZodString":
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
case "ZodNumber":
|
||||
return { kind: "number", label: formatLabel(name) };
|
||||
case "ZodBoolean":
|
||||
return { kind: "boolean", label: formatLabel(name) };
|
||||
case "ZodDate":
|
||||
return { kind: "datetime", label: formatLabel(name) };
|
||||
case "ZodEnum": {
|
||||
const values = Array.isArray(def?.values) ? def.values : [];
|
||||
return {
|
||||
kind: "select",
|
||||
label: formatLabel(name),
|
||||
options: values
|
||||
.filter((v): v is string => typeof v === "string")
|
||||
.map((v) => ({
|
||||
value: v,
|
||||
label: v.charAt(0).toUpperCase() + v.slice(1),
|
||||
})),
|
||||
};
|
||||
}
|
||||
case "ZodArray":
|
||||
return { kind: "array", label: formatLabel(name) };
|
||||
case "ZodObject":
|
||||
return { kind: "object", label: formatLabel(name) };
|
||||
case "ZodOptional":
|
||||
case "ZodDefault":
|
||||
// Unwrap optional/default types
|
||||
if (def?.innerType) {
|
||||
return extractFieldType(name, def.innerType);
|
||||
}
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
default:
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format field name as label
|
||||
*/
|
||||
function formatLabel(name: string): string {
|
||||
return name
|
||||
.replace(CAMEL_CASE_PATTERN, " $1")
|
||||
.replace(FIRST_CHAR_PATTERN, (str) => str.toUpperCase())
|
||||
.trim();
|
||||
}
|
||||
940
packages/core/src/api/handlers/marketplace.ts
Normal file
940
packages/core/src/api/handlers/marketplace.ts
Normal file
@@ -0,0 +1,940 @@
|
||||
/**
|
||||
* Marketplace plugin handlers
|
||||
*
|
||||
* Business logic for installing, updating, uninstalling, and checking
|
||||
* updates for marketplace plugins. Routes are thin wrappers around these.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validatePluginIdentifier } from "../../database/validate.js";
|
||||
import { pluginManifestSchema } from "../../plugins/manifest-schema.js";
|
||||
import { normalizeManifestRoute } from "../../plugins/manifest-schema.js";
|
||||
import {
|
||||
createMarketplaceClient,
|
||||
MarketplaceError,
|
||||
MarketplaceUnavailableError,
|
||||
type MarketplaceClient,
|
||||
type MarketplacePluginDetail,
|
||||
type MarketplaceSearchOpts,
|
||||
type MarketplaceThemeSearchOpts,
|
||||
type MarketplaceVersionSummary,
|
||||
type PluginBundle,
|
||||
} from "../../plugins/marketplace.js";
|
||||
import type { SandboxRunner } from "../../plugins/sandbox/types.js";
|
||||
import { PluginStateRepository } from "../../plugins/state.js";
|
||||
import { normalizeCapabilities } from "../../plugins/types.js";
|
||||
import type { PluginManifest } from "../../plugins/types.js";
|
||||
import { EmDashStorageError } from "../../storage/types.js";
|
||||
import type { Storage } from "../../storage/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ── Types ──────────────────────────────────────────────────────────
|
||||
|
||||
export interface MarketplaceInstallResult {
|
||||
pluginId: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
}
|
||||
|
||||
export interface MarketplaceUpdateResult {
|
||||
pluginId: string;
|
||||
oldVersion: string;
|
||||
newVersion: string;
|
||||
capabilityChanges: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
routeVisibilityChanges?: {
|
||||
newlyPublic: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface MarketplaceUpdateCheck {
|
||||
pluginId: string;
|
||||
installed: string;
|
||||
latest: string;
|
||||
hasUpdate: boolean;
|
||||
hasCapabilityChanges: boolean;
|
||||
capabilityChanges?: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
hasRouteVisibilityChanges: boolean;
|
||||
routeVisibilityChanges?: {
|
||||
newlyPublic: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface MarketplaceUninstallResult {
|
||||
pluginId: string;
|
||||
dataDeleted: boolean;
|
||||
}
|
||||
|
||||
// ── Helpers ────────────────────────────────────────────────────────
|
||||
|
||||
/** Semver-like pattern: digits, dots, hyphens, plus signs (e.g. 1.0.0, 1.0.0-beta.1) */
|
||||
const VERSION_PATTERN = /^[a-z0-9][a-z0-9._+-]*$/i;
|
||||
|
||||
function validateVersion(version: string): void {
|
||||
if (version.includes("..")) throw new Error("Invalid version format");
|
||||
if (!VERSION_PATTERN.test(version)) {
|
||||
throw new Error("Invalid version format");
|
||||
}
|
||||
}
|
||||
|
||||
function getClient(
|
||||
marketplaceUrl: string | undefined,
|
||||
siteOrigin?: string,
|
||||
): MarketplaceClient | null {
|
||||
if (!marketplaceUrl) return null;
|
||||
return createMarketplaceClient(marketplaceUrl, siteOrigin);
|
||||
}
|
||||
|
||||
function diffCapabilities(
|
||||
oldCaps: string[],
|
||||
newCaps: string[],
|
||||
): { added: string[]; removed: string[] } {
|
||||
// Normalize both sides before diffing so that an installed v1 manifest
|
||||
// declaring `read:content` and an upgrade v2 manifest declaring
|
||||
// `content:read` produces an empty diff — users should not see a
|
||||
// spurious "capability changed" prompt for a pure rename.
|
||||
const oldNorm = normalizeCapabilities(oldCaps);
|
||||
const newNorm = normalizeCapabilities(newCaps);
|
||||
const oldSet = new Set(oldNorm);
|
||||
const newSet = new Set(newNorm);
|
||||
return {
|
||||
added: newNorm.filter((c) => !oldSet.has(c)),
|
||||
removed: oldNorm.filter((c) => !newSet.has(c)),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Diff route visibility between two manifests.
|
||||
* Returns routes that changed from private to public (newly exposed).
|
||||
*/
|
||||
function diffRouteVisibility(
|
||||
oldManifest: PluginManifest | undefined,
|
||||
newManifest: PluginManifest,
|
||||
): { newlyPublic: string[] } {
|
||||
const oldPublicRoutes = new Set<string>();
|
||||
if (oldManifest) {
|
||||
for (const entry of oldManifest.routes) {
|
||||
const normalized = normalizeManifestRoute(entry);
|
||||
if (normalized.public === true) {
|
||||
oldPublicRoutes.add(normalized.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const newlyPublic: string[] = [];
|
||||
for (const entry of newManifest.routes) {
|
||||
const normalized = normalizeManifestRoute(entry);
|
||||
if (normalized.public === true && !oldPublicRoutes.has(normalized.name)) {
|
||||
newlyPublic.push(normalized.name);
|
||||
}
|
||||
}
|
||||
|
||||
return { newlyPublic };
|
||||
}
|
||||
|
||||
async function resolveVersionMetadata(
|
||||
client: MarketplaceClient,
|
||||
pluginId: string,
|
||||
pluginDetail: MarketplacePluginDetail,
|
||||
version: string,
|
||||
): Promise<MarketplaceVersionSummary | null> {
|
||||
if (pluginDetail.latestVersion?.version === version) {
|
||||
return {
|
||||
version: pluginDetail.latestVersion.version,
|
||||
minEmDashVersion: pluginDetail.latestVersion.minEmDashVersion,
|
||||
bundleSize: pluginDetail.latestVersion.bundleSize,
|
||||
checksum: pluginDetail.latestVersion.checksum,
|
||||
changelog: pluginDetail.latestVersion.changelog,
|
||||
capabilities: pluginDetail.latestVersion.capabilities,
|
||||
status: pluginDetail.latestVersion.status,
|
||||
auditVerdict: pluginDetail.latestVersion.audit?.verdict ?? null,
|
||||
imageAuditVerdict: pluginDetail.latestVersion.imageAudit?.verdict ?? null,
|
||||
publishedAt: pluginDetail.latestVersion.publishedAt,
|
||||
};
|
||||
}
|
||||
|
||||
const versions = await client.getVersions(pluginId);
|
||||
return versions.find((v) => v.version === version) ?? null;
|
||||
}
|
||||
|
||||
function validateBundleIdentity(
|
||||
bundle: PluginBundle,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): ApiResult<never> | null {
|
||||
if (bundle.manifest.id !== pluginId) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MANIFEST_MISMATCH",
|
||||
message: `Bundle manifest ID (${bundle.manifest.id}) does not match requested plugin (${pluginId})`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (bundle.manifest.version !== version) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MANIFEST_VERSION_MISMATCH",
|
||||
message: `Bundle manifest version (${bundle.manifest.version}) does not match requested version (${version})`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Store a plugin bundle's files in site-local R2 storage */
|
||||
async function storeBundleInR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
bundle: PluginBundle,
|
||||
): Promise<void> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
|
||||
// Store manifest
|
||||
await storage.upload({
|
||||
key: `${prefix}/manifest.json`,
|
||||
body: new TextEncoder().encode(JSON.stringify(bundle.manifest)),
|
||||
contentType: "application/json",
|
||||
});
|
||||
|
||||
// Store backend code
|
||||
await storage.upload({
|
||||
key: `${prefix}/backend.js`,
|
||||
body: new TextEncoder().encode(bundle.backendCode),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
// Store admin code if present
|
||||
if (bundle.adminCode) {
|
||||
await storage.upload({
|
||||
key: `${prefix}/admin.js`,
|
||||
body: new TextEncoder().encode(bundle.adminCode),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** Read a ReadableStream to string */
|
||||
async function streamToText(stream: ReadableStream<Uint8Array>): Promise<string> {
|
||||
return new Response(stream).text();
|
||||
}
|
||||
|
||||
/** Load a plugin bundle from site-local R2 storage */
|
||||
export async function loadBundleFromR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): Promise<{ manifest: PluginManifest; backendCode: string; adminCode?: string } | null> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
|
||||
try {
|
||||
const manifestResult = await storage.download(`${prefix}/manifest.json`);
|
||||
const backendResult = await storage.download(`${prefix}/backend.js`);
|
||||
|
||||
const manifestText = await streamToText(manifestResult.body);
|
||||
const backendCode = await streamToText(backendResult.body);
|
||||
const parsed: unknown = JSON.parse(manifestText);
|
||||
const result = pluginManifestSchema.safeParse(parsed);
|
||||
if (!result.success) return null;
|
||||
// Elements are validated as unknown[] by Zod; cast to PluginManifest
|
||||
// for the Element[] type (Block Kit validation happens at render time).
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- Zod types elements as unknown[]; Element type validated at render time
|
||||
const manifest = result.data as unknown as PluginManifest;
|
||||
|
||||
// Try to load admin code (optional)
|
||||
let adminCode: string | undefined;
|
||||
try {
|
||||
const adminResult = await storage.download(`${prefix}/admin.js`);
|
||||
adminCode = await streamToText(adminResult.body);
|
||||
} catch {
|
||||
// admin.js is optional
|
||||
}
|
||||
|
||||
return { manifest, backendCode, adminCode };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete a plugin bundle from site-local R2 storage */
|
||||
async function deleteBundleFromR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): Promise<void> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
const files = ["manifest.json", "backend.js", "admin.js"];
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
await storage.delete(`${prefix}/${file}`);
|
||||
} catch {
|
||||
// Ignore missing files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Install ────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceInstall(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
sandboxRunner: SandboxRunner | null,
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
opts?: { version?: string; configuredPluginIds?: Set<string>; siteOrigin?: string },
|
||||
): Promise<ApiResult<MarketplaceInstallResult>> {
|
||||
const client = getClient(marketplaceUrl, opts?.siteOrigin);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MARKETPLACE_NOT_CONFIGURED",
|
||||
message: "Marketplace is not configured",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!storage) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "STORAGE_NOT_CONFIGURED",
|
||||
message: "Storage is required for marketplace plugin installation",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SANDBOX_NOT_AVAILABLE",
|
||||
message: "Sandbox runner is required for marketplace plugins",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if already installed
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (existing && existing.source === "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ALREADY_INSTALLED",
|
||||
message: `Plugin ${pluginId} is already installed`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Block installation if a configured (trusted) plugin with the same ID exists.
|
||||
// Without this check, the sandboxed plugin could shadow the trusted plugin's
|
||||
// route handlers while auth decisions are made against the trusted plugin's metadata.
|
||||
if (opts?.configuredPluginIds?.has(pluginId)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_ID_CONFLICT",
|
||||
message: `Cannot install marketplace plugin "${pluginId}" — a configured plugin with the same ID already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch plugin detail from marketplace
|
||||
const pluginDetail = await client.getPlugin(pluginId);
|
||||
const version = opts?.version ?? pluginDetail.latestVersion?.version;
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `No published versions found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const versionMetadata = await resolveVersionMetadata(client, pluginId, pluginDetail, version);
|
||||
if (!versionMetadata) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `Version ${version} was not found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Block installation of plugins that haven't passed audit.
|
||||
// Both "fail" (explicitly malicious) and "warn" (audit error or
|
||||
// inconclusive) are non-installable — only "pass" or null (no audit
|
||||
// ran) are allowed through.
|
||||
if (versionMetadata.auditVerdict === "fail" || versionMetadata.auditVerdict === "warn") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUDIT_FAILED",
|
||||
message:
|
||||
versionMetadata.auditVerdict === "fail"
|
||||
? "Plugin failed security audit and cannot be installed"
|
||||
: "Plugin audit was inconclusive and cannot be installed until reviewed",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Download and extract bundle
|
||||
const bundle = await client.downloadBundle(pluginId, version);
|
||||
|
||||
// Verify checksum matches marketplace-published checksum
|
||||
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CHECKSUM_MISMATCH",
|
||||
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, version);
|
||||
if (bundleIdentityError) return bundleIdentityError;
|
||||
|
||||
// Store bundle in site-local R2
|
||||
await storeBundleInR2(storage, pluginId, version, bundle);
|
||||
|
||||
// Write plugin state
|
||||
await stateRepo.upsert(pluginId, version, "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: version,
|
||||
displayName: pluginDetail.name,
|
||||
description: pluginDetail.description ?? undefined,
|
||||
});
|
||||
|
||||
// Fire-and-forget install stat
|
||||
client.reportInstall(pluginId, version).catch(() => {
|
||||
// Intentional: never fails the install
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
pluginId,
|
||||
version,
|
||||
capabilities: bundle.manifest.capabilities,
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MARKETPLACE_UNAVAILABLE",
|
||||
message: "Plugin marketplace is currently unavailable",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: err.code ?? "MARKETPLACE_ERROR",
|
||||
message: err.message,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err instanceof EmDashStorageError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: err.code ?? "STORAGE_ERROR",
|
||||
message: "Storage error while installing plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err && typeof err === "object" && "code" in err) {
|
||||
const code = (err as { code?: unknown }).code;
|
||||
if (typeof code === "string" && code.trim()) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code,
|
||||
message: "Failed to install plugin from marketplace",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
console.error("Failed to install marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INSTALL_FAILED",
|
||||
message: "Failed to install plugin from marketplace",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Update ─────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUpdate(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
sandboxRunner: SandboxRunner | null,
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
opts?: {
|
||||
version?: string;
|
||||
confirmCapabilityChanges?: boolean;
|
||||
confirmRouteVisibilityChanges?: boolean;
|
||||
},
|
||||
): Promise<ApiResult<MarketplaceUpdateResult>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
if (!storage) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "STORAGE_NOT_CONFIGURED", message: "Storage is required" },
|
||||
};
|
||||
}
|
||||
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SANDBOX_NOT_AVAILABLE", message: "Sandbox runner is required" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (!existing || existing.source !== "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `No marketplace plugin found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const oldVersion = existing.marketplaceVersion ?? existing.version;
|
||||
|
||||
// Get target version
|
||||
const pluginDetail = await client.getPlugin(pluginId);
|
||||
const newVersion = opts?.version ?? pluginDetail.latestVersion?.version;
|
||||
if (!newVersion) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NO_VERSION", message: "No newer version available" },
|
||||
};
|
||||
}
|
||||
|
||||
if (newVersion === oldVersion) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "ALREADY_UP_TO_DATE", message: "Plugin is already up to date" },
|
||||
};
|
||||
}
|
||||
|
||||
const versionMetadata = await resolveVersionMetadata(
|
||||
client,
|
||||
pluginId,
|
||||
pluginDetail,
|
||||
newVersion,
|
||||
);
|
||||
if (!versionMetadata) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `Version ${newVersion} was not found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Download new bundle
|
||||
const bundle = await client.downloadBundle(pluginId, newVersion);
|
||||
|
||||
// Verify checksum matches marketplace-published checksum for this version
|
||||
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CHECKSUM_MISMATCH",
|
||||
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, newVersion);
|
||||
if (bundleIdentityError) return bundleIdentityError;
|
||||
|
||||
// Diff capabilities and route visibility against old version
|
||||
const oldBundle = await loadBundleFromR2(storage, pluginId, oldVersion);
|
||||
const oldCaps = oldBundle?.manifest.capabilities ?? [];
|
||||
const capabilityChanges = diffCapabilities(oldCaps, bundle.manifest.capabilities);
|
||||
const hasEscalation = capabilityChanges.added.length > 0;
|
||||
|
||||
// If capabilities escalated, require explicit confirmation
|
||||
if (hasEscalation && !opts?.confirmCapabilityChanges) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CAPABILITY_ESCALATION",
|
||||
message: "Plugin update requires new capabilities",
|
||||
details: { capabilityChanges },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Diff route visibility — routes going from private to public are a
|
||||
// security-sensitive change that exposes unauthenticated endpoints.
|
||||
const routeVisibilityChanges = diffRouteVisibility(oldBundle?.manifest, bundle.manifest);
|
||||
const hasNewPublicRoutes = routeVisibilityChanges.newlyPublic.length > 0;
|
||||
|
||||
if (hasNewPublicRoutes && !opts?.confirmRouteVisibilityChanges) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ROUTE_VISIBILITY_ESCALATION",
|
||||
message: "Plugin update exposes new public (unauthenticated) routes",
|
||||
details: { routeVisibilityChanges, capabilityChanges },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Store new bundle
|
||||
await storeBundleInR2(storage, pluginId, newVersion, bundle);
|
||||
|
||||
// Update state
|
||||
await stateRepo.upsert(pluginId, newVersion, "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: newVersion,
|
||||
displayName: pluginDetail.name,
|
||||
description: pluginDetail.description ?? undefined,
|
||||
});
|
||||
|
||||
// Clean up old bundle from R2 (best-effort)
|
||||
deleteBundleFromR2(storage, pluginId, oldVersion).catch(() => {});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
pluginId,
|
||||
oldVersion,
|
||||
newVersion,
|
||||
capabilityChanges,
|
||||
routeVisibilityChanges: hasNewPublicRoutes ? routeVisibilityChanges : undefined,
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: err.code ?? "MARKETPLACE_ERROR", message: err.message },
|
||||
};
|
||||
}
|
||||
console.error("Failed to update marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UPDATE_FAILED", message: "Failed to update plugin" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Uninstall ──────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUninstall(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
pluginId: string,
|
||||
opts?: { deleteData?: boolean },
|
||||
): Promise<ApiResult<MarketplaceUninstallResult>> {
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (!existing || existing.source !== "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `No marketplace plugin found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const version = existing.marketplaceVersion ?? existing.version;
|
||||
|
||||
// Delete bundle from site R2
|
||||
if (storage) {
|
||||
await deleteBundleFromR2(storage, pluginId, version);
|
||||
}
|
||||
|
||||
// Optionally delete plugin storage data
|
||||
let dataDeleted = false;
|
||||
if (opts?.deleteData) {
|
||||
try {
|
||||
await db.deleteFrom("_plugin_storage").where("plugin_id", "=", pluginId).execute();
|
||||
dataDeleted = true;
|
||||
} catch {
|
||||
// Plugin storage table may not have data for this plugin
|
||||
}
|
||||
}
|
||||
|
||||
// Delete state row
|
||||
await stateRepo.delete(pluginId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { pluginId, dataDeleted },
|
||||
};
|
||||
} catch (err) {
|
||||
console.error("Failed to uninstall marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "UNINSTALL_FAILED",
|
||||
message: "Failed to uninstall plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Update check ───────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUpdateCheck(
|
||||
db: Kysely<Database>,
|
||||
marketplaceUrl: string | undefined,
|
||||
): Promise<ApiResult<{ items: MarketplaceUpdateCheck[] }>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const marketplacePlugins = await stateRepo.getMarketplacePlugins();
|
||||
|
||||
const items: MarketplaceUpdateCheck[] = [];
|
||||
|
||||
for (const plugin of marketplacePlugins) {
|
||||
try {
|
||||
const detail = await client.getPlugin(plugin.pluginId);
|
||||
const latest = detail.latestVersion?.version;
|
||||
const installed = plugin.marketplaceVersion ?? plugin.version;
|
||||
|
||||
if (!latest) continue;
|
||||
|
||||
const hasUpdate = latest !== installed;
|
||||
let capabilityChanges: { added: string[]; removed: string[] } | undefined;
|
||||
let hasCapabilityChanges = false;
|
||||
|
||||
if (hasUpdate && detail.latestVersion) {
|
||||
const oldCaps = detail.capabilities ?? [];
|
||||
const newCaps = detail.latestVersion.capabilities ?? [];
|
||||
capabilityChanges = diffCapabilities(oldCaps, newCaps);
|
||||
hasCapabilityChanges =
|
||||
capabilityChanges.added.length > 0 || capabilityChanges.removed.length > 0;
|
||||
}
|
||||
|
||||
items.push({
|
||||
pluginId: plugin.pluginId,
|
||||
installed,
|
||||
latest: latest ?? installed,
|
||||
hasUpdate,
|
||||
hasCapabilityChanges,
|
||||
capabilityChanges: hasCapabilityChanges ? capabilityChanges : undefined,
|
||||
// Route visibility changes require downloading both bundles to compare
|
||||
// manifests, which is too expensive for a preview check. The actual
|
||||
// enforcement happens at update time in handleMarketplaceUpdate.
|
||||
hasRouteVisibilityChanges: false,
|
||||
});
|
||||
} catch (err) {
|
||||
// Skip plugins that can't be checked (marketplace down, plugin delisted)
|
||||
console.warn(`Failed to check updates for ${plugin.pluginId}:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to check marketplace updates:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UPDATE_CHECK_FAILED", message: "Failed to check for updates" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Proxy ──────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceSearch(
|
||||
marketplaceUrl: string | undefined,
|
||||
query?: string,
|
||||
opts?: MarketplaceSearchOpts,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.search(query, opts);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to search marketplace:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SEARCH_FAILED", message: "Failed to search marketplace" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleMarketplaceGetPlugin(
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.getPlugin(pluginId);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceError && err.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Plugin not found: ${pluginId}` },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to get marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "GET_PLUGIN_FAILED", message: "Failed to get plugin details" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Theme proxy handlers ──────────────────────────────────────────
|
||||
|
||||
export async function handleThemeSearch(
|
||||
marketplaceUrl: string | undefined,
|
||||
query?: string,
|
||||
opts?: MarketplaceThemeSearchOpts,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.searchThemes(query, opts);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to search themes:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "THEME_SEARCH_FAILED", message: "Failed to search themes" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleThemeGetDetail(
|
||||
marketplaceUrl: string | undefined,
|
||||
themeId: string,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.getTheme(themeId);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceError && err.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Theme not found: ${themeId}` },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to get marketplace theme:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "GET_THEME_FAILED", message: "Failed to get theme details" },
|
||||
};
|
||||
}
|
||||
}
|
||||
214
packages/core/src/api/handlers/media.ts
Normal file
214
packages/core/src/api/handlers/media.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
/**
|
||||
* Media CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { MediaRepository, type MediaItem } from "../../database/repositories/media.js";
|
||||
import { InvalidCursorError } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface MediaListResponse {
|
||||
items: MediaItem[];
|
||||
nextCursor?: string;
|
||||
}
|
||||
|
||||
export interface MediaResponse {
|
||||
item: MediaItem;
|
||||
}
|
||||
|
||||
/**
|
||||
* List media items
|
||||
*/
|
||||
export async function handleMediaList(
|
||||
db: Kysely<Database>,
|
||||
params: {
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
mimeType?: string;
|
||||
},
|
||||
): Promise<ApiResult<MediaListResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const result = await repo.findMany({
|
||||
cursor: params.cursor,
|
||||
limit: Math.min(params.limit || 50, 100),
|
||||
mimeType: params.mimeType,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: result.items,
|
||||
nextCursor: result.nextCursor,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_LIST_ERROR",
|
||||
message: "Failed to list media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get single media item
|
||||
*/
|
||||
export async function handleMediaGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.findById(id);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_GET_ERROR",
|
||||
message: "Failed to get media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create media item (after file upload)
|
||||
*/
|
||||
export async function handleMediaCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
size?: number;
|
||||
width?: number;
|
||||
height?: number;
|
||||
alt?: string;
|
||||
storageKey: string;
|
||||
contentHash?: string;
|
||||
blurhash?: string;
|
||||
dominantColor?: string;
|
||||
authorId?: string;
|
||||
},
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.create(input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_CREATE_ERROR",
|
||||
message: "Failed to create media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update media metadata
|
||||
*/
|
||||
export async function handleMediaUpdate(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
input: {
|
||||
alt?: string;
|
||||
caption?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
},
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.update(id, input);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_UPDATE_ERROR",
|
||||
message: "Failed to update media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete media item
|
||||
*/
|
||||
export async function handleMediaDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { deleted: true },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_DELETE_ERROR",
|
||||
message: "Failed to delete media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
635
packages/core/src/api/handlers/menus.ts
Normal file
635
packages/core/src/api/handlers/menus.ts
Normal file
@@ -0,0 +1,635 @@
|
||||
/**
|
||||
* Menu CRUD handlers
|
||||
*
|
||||
* Business logic for menu and menu-item endpoints.
|
||||
* Routes are thin wrappers that parse input, check auth, and call these.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { withTransaction } from "../../database/transaction.js";
|
||||
import type { Database, MenuItemTable, MenuTable } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type MenuRow = Omit<MenuTable, "created_at" | "updated_at"> & {
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
type MenuItemRow = Omit<MenuItemTable, "created_at"> & {
|
||||
created_at: string;
|
||||
};
|
||||
|
||||
export interface MenuListItem extends MenuRow {
|
||||
itemCount: number;
|
||||
}
|
||||
|
||||
export interface MenuWithItems extends MenuRow {
|
||||
items: MenuItemRow[];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menu handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List all menus with item counts.
|
||||
*/
|
||||
export async function handleMenuList(db: Kysely<Database>): Promise<ApiResult<MenuListItem[]>> {
|
||||
try {
|
||||
// Single query: LEFT JOIN + GROUP BY for the per-menu item count.
|
||||
// Avoids the N+1 of one count query per menu.
|
||||
const rows = await db
|
||||
.selectFrom("_emdash_menus as m")
|
||||
.leftJoin("_emdash_menu_items as i", "i.menu_id", "m.id")
|
||||
.select(({ fn }) => [
|
||||
"m.id",
|
||||
"m.name",
|
||||
"m.label",
|
||||
"m.created_at",
|
||||
"m.updated_at",
|
||||
fn.count<number>("i.id").as("itemCount"),
|
||||
])
|
||||
.groupBy(["m.id", "m.name", "m.label", "m.created_at", "m.updated_at"])
|
||||
.orderBy("m.name", "asc")
|
||||
.execute();
|
||||
|
||||
// SQLite returns count as `number`, but some dialects (Postgres)
|
||||
// return `string` from a count() aggregate. Normalize to number.
|
||||
const menusWithCounts: MenuListItem[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
label: row.label,
|
||||
created_at: row.created_at,
|
||||
updated_at: row.updated_at,
|
||||
itemCount: typeof row.itemCount === "string" ? Number(row.itemCount) : row.itemCount,
|
||||
}));
|
||||
|
||||
return { success: true, data: menusWithCounts };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_LIST_ERROR", message: "Failed to fetch menus" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new menu.
|
||||
*/
|
||||
export async function handleMenuCreate(
|
||||
db: Kysely<Database>,
|
||||
input: { name: string; label: string },
|
||||
): Promise<ApiResult<MenuRow>> {
|
||||
try {
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", input.name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "CONFLICT", message: `Menu with name "${input.name}" already exists` },
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("id", "=", id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: menu };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_CREATE_ERROR", message: "Failed to create menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single menu with all its items.
|
||||
*/
|
||||
export async function handleMenuGet(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<ApiResult<MenuWithItems>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Menu '${name}' not found` },
|
||||
};
|
||||
}
|
||||
|
||||
const items = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("menu_id", "=", menu.id)
|
||||
.orderBy("sort_order", "asc")
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { ...menu, items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_GET_ERROR", message: "Failed to fetch menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a menu's metadata.
|
||||
*/
|
||||
export async function handleMenuUpdate(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
input: { label?: string },
|
||||
): Promise<ApiResult<MenuRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Menu '${name}' not found` },
|
||||
};
|
||||
}
|
||||
|
||||
if (input.label) {
|
||||
await db
|
||||
.updateTable("_emdash_menus")
|
||||
.set({ label: input.label })
|
||||
.where("id", "=", menu.id)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("id", "=", menu.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_UPDATE_ERROR", message: "Failed to update menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a menu and its items (cascade).
|
||||
*/
|
||||
export async function handleMenuDelete(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Menu '${name}' not found` },
|
||||
};
|
||||
}
|
||||
|
||||
// D1 has FOREIGN KEYS off by default, so the migration's `ON DELETE
|
||||
// CASCADE` won't fire there. Delete items explicitly first — this is
|
||||
// idempotent on SQLite/Postgres where the cascade also fires.
|
||||
await db.deleteFrom("_emdash_menu_items").where("menu_id", "=", menu.id).execute();
|
||||
await db.deleteFrom("_emdash_menus").where("id", "=", menu.id).execute();
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_DELETE_ERROR", message: "Failed to delete menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menu item handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface CreateMenuItemInput {
|
||||
type: string;
|
||||
label: string;
|
||||
referenceCollection?: string;
|
||||
referenceId?: string;
|
||||
customUrl?: string;
|
||||
target?: string;
|
||||
titleAttr?: string;
|
||||
cssClasses?: string;
|
||||
parentId?: string;
|
||||
sortOrder?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an item to a menu.
|
||||
*/
|
||||
export async function handleMenuItemCreate(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
input: CreateMenuItemInput,
|
||||
): Promise<ApiResult<MenuItemRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
let sortOrder = input.sortOrder ?? 0;
|
||||
if (input.sortOrder === undefined) {
|
||||
const maxOrder = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.select(({ fn }) => fn.max("sort_order").as("max"))
|
||||
.where("menu_id", "=", menu.id)
|
||||
.where("parent_id", "is", input.parentId ?? null)
|
||||
.executeTakeFirst();
|
||||
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely fn.max returns unknown; always a number for sort_order column
|
||||
sortOrder = ((maxOrder?.max as number) ?? -1) + 1;
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id,
|
||||
menu_id: menu.id,
|
||||
parent_id: input.parentId ?? null,
|
||||
sort_order: sortOrder,
|
||||
type: input.type,
|
||||
reference_collection: input.referenceCollection ?? null,
|
||||
reference_id: input.referenceId ?? null,
|
||||
custom_url: input.customUrl ?? null,
|
||||
label: input.label,
|
||||
title_attr: input.titleAttr ?? null,
|
||||
target: input.target ?? null,
|
||||
css_classes: input.cssClasses ?? null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const item = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("id", "=", id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: item };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_CREATE_ERROR", message: "Failed to create menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export interface UpdateMenuItemInput {
|
||||
label?: string;
|
||||
customUrl?: string;
|
||||
target?: string;
|
||||
titleAttr?: string;
|
||||
cssClasses?: string;
|
||||
parentId?: string | null;
|
||||
sortOrder?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a menu item.
|
||||
*/
|
||||
export async function handleMenuItemUpdate(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
itemId: string,
|
||||
input: UpdateMenuItemInput,
|
||||
): Promise<ApiResult<MenuItemRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const item = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.select("id")
|
||||
.where("id", "=", itemId)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu item not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = {};
|
||||
if (input.label !== undefined) updates.label = input.label;
|
||||
if (input.customUrl !== undefined) updates.custom_url = input.customUrl;
|
||||
if (input.target !== undefined) updates.target = input.target;
|
||||
if (input.titleAttr !== undefined) updates.title_attr = input.titleAttr;
|
||||
if (input.cssClasses !== undefined) updates.css_classes = input.cssClasses;
|
||||
if (input.parentId !== undefined) updates.parent_id = input.parentId;
|
||||
if (input.sortOrder !== undefined) updates.sort_order = input.sortOrder;
|
||||
|
||||
if (Object.keys(updates).length > 0) {
|
||||
await db.updateTable("_emdash_menu_items").set(updates).where("id", "=", itemId).execute();
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("id", "=", itemId)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_UPDATE_ERROR", message: "Failed to update menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a menu item.
|
||||
*/
|
||||
export async function handleMenuItemDelete(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
itemId: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_menu_items")
|
||||
.where("id", "=", itemId)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.execute();
|
||||
|
||||
if (result[0]?.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu item not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_DELETE_ERROR", message: "Failed to delete menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export interface ReorderItem {
|
||||
id: string;
|
||||
parentId: string | null;
|
||||
sortOrder: number;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Atomic-replace menu items (used by the MCP `menu_set_items` tool)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface MenuSetItemsInput {
|
||||
label: string;
|
||||
type: "custom" | "page" | "post" | "taxonomy" | "collection";
|
||||
customUrl?: string;
|
||||
referenceCollection?: string;
|
||||
referenceId?: string;
|
||||
titleAttr?: string;
|
||||
target?: string;
|
||||
cssClasses?: string;
|
||||
/**
|
||||
* Index of the parent item in this same array. Must be strictly less
|
||||
* than the current item's index so the insert order resolves parents
|
||||
* before children. `undefined` makes the item top-level.
|
||||
*/
|
||||
parentIndex?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the entire set of items for a menu in one atomic transaction.
|
||||
*
|
||||
* Existing items are deleted and the new list is inserted in the order
|
||||
* provided. `parentIndex` references resolve to actual parent IDs as the
|
||||
* insert proceeds.
|
||||
*/
|
||||
export async function handleMenuSetItems(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
items: MenuSetItemsInput[],
|
||||
): Promise<ApiResult<{ name: string; itemCount: number }>> {
|
||||
// Validate parentIndex references — must be strictly earlier so
|
||||
// the array can be inserted in order with parents resolved first.
|
||||
// Negative indices are out of range; only Zod's `.nonnegative()` at
|
||||
// the MCP boundary catches them today, so guard explicitly here for
|
||||
// any caller that bypasses Zod (REST routes, direct handler use).
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const item = items[i];
|
||||
if (item?.parentIndex !== undefined) {
|
||||
if (item.parentIndex < 0 || item.parentIndex >= i) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `item[${i}].parentIndex (${item.parentIndex}) must reference an earlier item`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Sentinel for "menu not found" thrown from inside the transaction
|
||||
// so the rollback fires before we return the structured error.
|
||||
const notFoundSentinel = Symbol("menu-not-found");
|
||||
|
||||
try {
|
||||
await withTransaction(db, async (trx) => {
|
||||
// Existence check INSIDE the transaction so a concurrent
|
||||
// menu_delete between lookup and write can't leave orphan
|
||||
// items on D1 (FKs disabled by default).
|
||||
const menu = await trx
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
throw notFoundSentinel;
|
||||
}
|
||||
|
||||
await trx.deleteFrom("_emdash_menu_items").where("menu_id", "=", menu.id).execute();
|
||||
|
||||
const insertedIds: string[] = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const item = items[i];
|
||||
if (!item) continue;
|
||||
const id = ulid();
|
||||
const parentId =
|
||||
item.parentIndex !== undefined ? (insertedIds[item.parentIndex] ?? null) : null;
|
||||
await trx
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id,
|
||||
menu_id: menu.id,
|
||||
parent_id: parentId,
|
||||
sort_order: i,
|
||||
type: item.type,
|
||||
reference_collection: item.referenceCollection ?? null,
|
||||
reference_id: item.referenceId ?? null,
|
||||
custom_url: item.customUrl ?? null,
|
||||
label: item.label,
|
||||
title_attr: item.titleAttr ?? null,
|
||||
target: item.target ?? null,
|
||||
css_classes: item.cssClasses ?? null,
|
||||
})
|
||||
.execute();
|
||||
insertedIds.push(id);
|
||||
}
|
||||
|
||||
await trx
|
||||
.updateTable("_emdash_menus")
|
||||
.set({ updated_at: new Date().toISOString() })
|
||||
.where("id", "=", menu.id)
|
||||
.execute();
|
||||
});
|
||||
} catch (error) {
|
||||
if (error === notFoundSentinel) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Menu '${menuName}' not found` },
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
return { success: true, data: { name: menuName, itemCount: items.length } };
|
||||
} catch (error) {
|
||||
console.error("[emdash] handleMenuSetItems failed:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_SET_ITEMS_ERROR", message: "Failed to set menu items" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch reorder menu items.
|
||||
*/
|
||||
export async function handleMenuItemReorder(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
items: ReorderItem[],
|
||||
): Promise<ApiResult<MenuItemRow[]>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const updatedItems = await withTransaction(db, async (trx) => {
|
||||
for (const item of items) {
|
||||
await trx
|
||||
.updateTable("_emdash_menu_items")
|
||||
.set({
|
||||
parent_id: item.parentId,
|
||||
sort_order: item.sortOrder,
|
||||
})
|
||||
.where("id", "=", item.id)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.execute();
|
||||
}
|
||||
|
||||
return trx
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("menu_id", "=", menu.id)
|
||||
.orderBy("sort_order", "asc")
|
||||
.execute();
|
||||
});
|
||||
|
||||
return { success: true, data: updatedItems };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_REORDER_ERROR", message: "Failed to reorder menu items" },
|
||||
};
|
||||
}
|
||||
}
|
||||
438
packages/core/src/api/handlers/oauth-authorization.ts
Normal file
438
packages/core/src/api/handlers/oauth-authorization.ts
Normal file
@@ -0,0 +1,438 @@
|
||||
/**
|
||||
* OAuth 2.1 Authorization Code + PKCE handlers.
|
||||
*
|
||||
* Implements the server side of the authorization code grant for MCP clients
|
||||
* (Claude Desktop, VS Code, etc.) per the MCP authorization spec (draft).
|
||||
*
|
||||
* Uses arctic for PKCE challenge generation and @emdash-cms/auth for token
|
||||
* utilities. Token infrastructure is shared with the device flow.
|
||||
*/
|
||||
|
||||
import { clampScopes, computeS256Challenge, secureCompare } from "@emdash-cms/auth";
|
||||
import type { RoleLevel } from "@emdash-cms/auth";
|
||||
import { generateCodeVerifier } from "arctic";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../auth/api-tokens.js";
|
||||
import { withTransaction } from "../../database/transaction.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateRedirectUri } from "../oauth/redirect-uri.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
import { lookupOAuthClient, validateClientRedirectUri } from "./oauth-clients.js";
|
||||
import { lookupUserRoleAndStatus } from "./oauth-user-lookup.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Authorization codes expire after 10 minutes (RFC 6749 §4.1.2 recommends short-lived) */
|
||||
const AUTH_CODE_TTL_SECONDS = 10 * 60;
|
||||
|
||||
/** Access token TTL: 1 hour */
|
||||
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
|
||||
|
||||
/** Refresh token TTL: 90 days */
|
||||
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface AuthorizationParams {
|
||||
response_type: string;
|
||||
client_id: string;
|
||||
redirect_uri: string;
|
||||
scope?: string;
|
||||
state?: string;
|
||||
code_challenge: string;
|
||||
code_challenge_method: string;
|
||||
resource?: string;
|
||||
}
|
||||
|
||||
export interface TokenExchangeParams {
|
||||
grant_type: string;
|
||||
code: string;
|
||||
redirect_uri: string;
|
||||
client_id: string;
|
||||
code_verifier: string;
|
||||
resource?: string;
|
||||
}
|
||||
|
||||
export interface TokenResponse {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type: "Bearer";
|
||||
expires_in: number;
|
||||
scope: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function expiresAt(seconds: number): string {
|
||||
return new Date(Date.now() + seconds * 1000).toISOString();
|
||||
}
|
||||
|
||||
export { validateRedirectUri };
|
||||
|
||||
/**
|
||||
* Validate and normalize scopes. Returns validated scope list.
|
||||
*/
|
||||
function normalizeScopes(requested?: string): string[] {
|
||||
if (!requested) return [];
|
||||
|
||||
const validSet = new Set<string>(VALID_SCOPES);
|
||||
const scopes = requested
|
||||
.split(" ")
|
||||
.filter(Boolean)
|
||||
.filter((s) => validSet.has(s));
|
||||
|
||||
return scopes;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Process an authorization request after the user approves consent.
|
||||
*
|
||||
* Generates an authorization code, stores it with the PKCE challenge,
|
||||
* and returns the redirect URL with the code appended.
|
||||
*
|
||||
* Scopes are clamped to the user's role to prevent scope escalation.
|
||||
*/
|
||||
export async function handleAuthorizationApproval(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
userRole: RoleLevel,
|
||||
params: AuthorizationParams,
|
||||
): Promise<ApiResult<{ redirect_url: string }>> {
|
||||
try {
|
||||
// Validate response_type
|
||||
if (params.response_type !== "code") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "UNSUPPORTED_RESPONSE_TYPE",
|
||||
message: "Only response_type=code is supported",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate redirect_uri scheme/host (basic security check)
|
||||
const uriError = validateRedirectUri(params.redirect_uri);
|
||||
if (uriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REDIRECT_URI", message: uriError },
|
||||
};
|
||||
}
|
||||
|
||||
// Look up the registered OAuth client
|
||||
const client = await lookupOAuthClient(db, params.client_id);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_CLIENT",
|
||||
message: "Unknown client_id",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate redirect_uri against client's registered URIs
|
||||
const clientUriError = validateClientRedirectUri(params.redirect_uri, client.redirectUris);
|
||||
if (clientUriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REDIRECT_URI", message: clientUriError },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate code_challenge_method
|
||||
if (params.code_challenge_method !== "S256") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_REQUEST",
|
||||
message: "Only S256 code_challenge_method is supported",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate code_challenge is present
|
||||
if (!params.code_challenge) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REQUEST", message: "code_challenge is required" },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate scopes, then clamp to user's role
|
||||
const userScopes = clampScopes(normalizeScopes(params.scope), userRole);
|
||||
|
||||
// SEC-41: Intersect with client's registered scopes (if restricted).
|
||||
// A client registered with scopes: ["content:read"] should never receive
|
||||
// admin or schema:write, regardless of the approving user's role.
|
||||
const clientScopes = client.scopes;
|
||||
const scopes = clientScopes?.length
|
||||
? userScopes.filter((s: string) => clientScopes.includes(s))
|
||||
: userScopes;
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
|
||||
};
|
||||
}
|
||||
|
||||
// Generate authorization code (high entropy, base64url)
|
||||
const code = generateCodeVerifier(); // 32 bytes random, base64url
|
||||
const codeHash = hashApiToken(code);
|
||||
|
||||
// Store the authorization code
|
||||
await db
|
||||
.insertInto("_emdash_authorization_codes")
|
||||
.values({
|
||||
code_hash: codeHash,
|
||||
client_id: params.client_id,
|
||||
redirect_uri: params.redirect_uri,
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(scopes),
|
||||
code_challenge: params.code_challenge,
|
||||
code_challenge_method: params.code_challenge_method,
|
||||
resource: params.resource ?? null,
|
||||
expires_at: expiresAt(AUTH_CODE_TTL_SECONDS),
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Build the redirect URL
|
||||
const redirectUrl = new URL(params.redirect_uri);
|
||||
redirectUrl.searchParams.set("code", code);
|
||||
if (params.state) {
|
||||
redirectUrl.searchParams.set("state", params.state);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { redirect_url: redirectUrl.toString() },
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Authorization error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUTHORIZATION_ERROR",
|
||||
message: "Failed to process authorization",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange an authorization code for access + refresh tokens.
|
||||
*
|
||||
* Validates the code, verifies PKCE, and issues tokens using the same
|
||||
* infrastructure as the device flow (ec_oat_*, ec_ort_*).
|
||||
*/
|
||||
export async function handleAuthorizationCodeExchange(
|
||||
db: Kysely<Database>,
|
||||
params: TokenExchangeParams,
|
||||
): Promise<ApiResult<TokenResponse>> {
|
||||
try {
|
||||
// Validate grant_type
|
||||
if (params.grant_type !== "authorization_code") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "unsupported_grant_type", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
// SEC-39: Atomically consume the authorization code using DELETE...RETURNING.
|
||||
// This prevents TOCTOU double-exchange: two concurrent requests with the
|
||||
// same code will race on the DELETE, and only one will get a row back.
|
||||
const codeHash = hashApiToken(params.code);
|
||||
|
||||
const row = await db
|
||||
.deleteFrom("_emdash_authorization_codes")
|
||||
.where("code_hash", "=", codeHash)
|
||||
.returningAll()
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "Invalid authorization code" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "Authorization code expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify redirect_uri matches exactly
|
||||
if (row.redirect_uri !== params.redirect_uri) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "redirect_uri mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify client_id matches
|
||||
if (row.client_id !== params.client_id) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "client_id mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// PKCE verification: SHA256(code_verifier) must match stored code_challenge
|
||||
// Use constant-time comparison to prevent timing side-channels
|
||||
const derivedChallenge = computeS256Challenge(params.code_verifier);
|
||||
if (!secureCompare(derivedChallenge, row.code_challenge)) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "PKCE verification failed" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify resource matches (if stored)
|
||||
if (row.resource && params.resource && row.resource !== params.resource) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "resource mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// Revalidate user role before issuing tokens (same pattern as handleTokenRefresh).
|
||||
// The user's role may have changed since the authorization code was issued.
|
||||
const userInfo = await lookupUserRoleAndStatus(db, row.user_id);
|
||||
if (!userInfo) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "User not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (userInfo.disabled) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "User account is disabled" },
|
||||
};
|
||||
}
|
||||
|
||||
// Re-clamp scopes against the user's current role
|
||||
const storedScopes = JSON.parse(row.scopes) as string[];
|
||||
let scopes = clampScopes(storedScopes, userInfo.role);
|
||||
|
||||
// Intersect with client's registered scopes (if restricted)
|
||||
const client = await lookupOAuthClient(db, row.client_id);
|
||||
if (client?.scopes?.length) {
|
||||
scopes = scopes.filter((s: string) => client.scopes!.includes(s));
|
||||
}
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "invalid_grant",
|
||||
message: "User role no longer supports any of the requested scopes",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Issue tokens (same as device flow)
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
|
||||
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
|
||||
|
||||
// Atomically store both tokens in a transaction
|
||||
await withTransaction(db, async (trx) => {
|
||||
await trx
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "mcp",
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshToken.hash,
|
||||
client_id: row.client_id,
|
||||
})
|
||||
.execute();
|
||||
|
||||
await trx
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: refreshToken.hash,
|
||||
token_type: "refresh",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "mcp",
|
||||
expires_at: refreshExpires,
|
||||
refresh_token_hash: null,
|
||||
client_id: row.client_id,
|
||||
})
|
||||
.execute();
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: refreshToken.raw,
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Token exchange error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_EXCHANGE_ERROR",
|
||||
message: "Failed to exchange authorization code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the authorization denied redirect URL.
|
||||
*/
|
||||
export function buildDeniedRedirect(redirectUri: string, state?: string): string {
|
||||
const url = new URL(redirectUri);
|
||||
url.searchParams.set("error", "access_denied");
|
||||
url.searchParams.set("error_description", "The user denied the authorization request");
|
||||
if (state) {
|
||||
url.searchParams.set("state", state);
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired authorization codes.
|
||||
*/
|
||||
export async function cleanupExpiredAuthorizationCodes(db: Kysely<Database>): Promise<number> {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_authorization_codes")
|
||||
.where("expires_at", "<", new Date().toISOString())
|
||||
.executeTakeFirst();
|
||||
|
||||
return Number(result.numDeletedRows);
|
||||
}
|
||||
385
packages/core/src/api/handlers/oauth-clients.ts
Normal file
385
packages/core/src/api/handlers/oauth-clients.ts
Normal file
@@ -0,0 +1,385 @@
|
||||
/**
|
||||
* OAuth client management handlers.
|
||||
*
|
||||
* CRUD operations for registered OAuth clients. Each client has a set
|
||||
* of pre-registered redirect URIs. The authorization endpoint rejects
|
||||
* any redirect_uri not in the client's registered set.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateRedirectUri } from "../oauth/redirect-uri.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Parse a JSON string column into a typed value. */
|
||||
function parseJsonColumn<T>(value: string): T {
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns unknown, callers provide the expected shape
|
||||
return JSON.parse(value) as T;
|
||||
}
|
||||
|
||||
function validateRegisteredRedirectUris(redirectUris: string[]): string | null {
|
||||
for (const redirectUri of redirectUris) {
|
||||
const error = validateRedirectUri(redirectUri);
|
||||
if (error) {
|
||||
return `Invalid redirect URI: ${error}`;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface OAuthClientInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
redirectUris: string[];
|
||||
scopes: string[] | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
id: string;
|
||||
name: string;
|
||||
redirectUris: string[];
|
||||
scopes?: string[] | null;
|
||||
},
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
if (input.redirectUris.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "At least one redirect URI is required",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const redirectUriError = validateRegisteredRedirectUris(input.redirectUris);
|
||||
if (redirectUriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: redirectUriError,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check for duplicate client ID
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.select("id")
|
||||
.where("id", "=", input.id)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "CONFLICT", message: "OAuth client with this ID already exists" },
|
||||
};
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_oauth_clients")
|
||||
.values({
|
||||
id: input.id,
|
||||
name: input.name,
|
||||
redirect_uris: JSON.stringify(input.redirectUris),
|
||||
scopes: input.scopes && input.scopes.length > 0 ? JSON.stringify(input.scopes) : null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: input.id,
|
||||
name: input.name,
|
||||
redirectUris: input.redirectUris,
|
||||
scopes: input.scopes && input.scopes.length > 0 ? input.scopes : null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_CREATE_ERROR",
|
||||
message: "Failed to create OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all registered OAuth clients.
|
||||
*/
|
||||
export async function handleOAuthClientList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<{ items: OAuthClientInfo[] }>> {
|
||||
try {
|
||||
const rows = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.orderBy("created_at", "desc")
|
||||
.execute();
|
||||
|
||||
const items: OAuthClientInfo[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
}));
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_LIST_ERROR",
|
||||
message: "Failed to list OAuth clients",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single OAuth client by ID.
|
||||
*/
|
||||
export async function handleOAuthClientGet(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_GET_ERROR",
|
||||
message: "Failed to get OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientUpdate(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
input: {
|
||||
name?: string;
|
||||
redirectUris?: string[];
|
||||
scopes?: string[] | null;
|
||||
},
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (input.redirectUris !== undefined && input.redirectUris.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "At least one redirect URI is required",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (input.redirectUris !== undefined) {
|
||||
const redirectUriError = validateRegisteredRedirectUris(input.redirectUris);
|
||||
if (redirectUriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: redirectUriError,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const updates: Record<string, string | null> = {
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (input.name !== undefined) {
|
||||
updates.name = input.name;
|
||||
}
|
||||
if (input.redirectUris !== undefined) {
|
||||
updates.redirect_uris = JSON.stringify(input.redirectUris);
|
||||
}
|
||||
if (input.scopes !== undefined) {
|
||||
updates.scopes =
|
||||
input.scopes && input.scopes.length > 0 ? JSON.stringify(input.scopes) : null;
|
||||
}
|
||||
|
||||
await db.updateTable("_emdash_oauth_clients").set(updates).where("id", "=", clientId).execute();
|
||||
|
||||
// Fetch the updated row
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found after update" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: updated.id,
|
||||
name: updated.name,
|
||||
redirectUris: parseJsonColumn<string[]>(updated.redirect_uris),
|
||||
scopes: updated.scopes ? parseJsonColumn<string[]>(updated.scopes) : null,
|
||||
createdAt: updated.created_at,
|
||||
updatedAt: updated.updated_at,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_UPDATE_ERROR",
|
||||
message: "Failed to update OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientDelete(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_oauth_clients")
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (result.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_DELETE_ERROR",
|
||||
message: "Failed to delete OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Lookup helpers (used by authorization handler)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Look up a registered OAuth client by ID.
|
||||
* Returns the client's redirect URIs or null if the client is not registered.
|
||||
*/
|
||||
export async function lookupOAuthClient(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<{ redirectUris: string[]; scopes: string[] | null } | null> {
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.select(["redirect_uris", "scopes"])
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a redirect URI is in the client's registered set.
|
||||
*
|
||||
* Comparison is exact string match (per RFC 6749 §3.1.2.3).
|
||||
* Returns null if valid, or an error message if not.
|
||||
*/
|
||||
export function validateClientRedirectUri(
|
||||
redirectUri: string,
|
||||
allowedUris: string[],
|
||||
): string | null {
|
||||
if (allowedUris.includes(redirectUri)) {
|
||||
return null; // OK
|
||||
}
|
||||
return "redirect_uri is not registered for this client";
|
||||
}
|
||||
39
packages/core/src/api/handlers/oauth-user-lookup.ts
Normal file
39
packages/core/src/api/handlers/oauth-user-lookup.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Shared user lookup for OAuth token operations.
|
||||
*
|
||||
* Extracts user role and disabled status from the database. Used by
|
||||
* handleTokenRefresh() to revalidate scopes against the user's current
|
||||
* role and reject disabled users.
|
||||
*/
|
||||
|
||||
import { toRoleLevel, type RoleLevel } from "@emdash-cms/auth";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
|
||||
export interface UserRoleAndStatus {
|
||||
role: RoleLevel;
|
||||
disabled: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a user's current role and disabled status.
|
||||
* Returns null if the user doesn't exist.
|
||||
*/
|
||||
export async function lookupUserRoleAndStatus(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
): Promise<UserRoleAndStatus | null> {
|
||||
const row = await db
|
||||
.selectFrom("users")
|
||||
.select(["role", "disabled"])
|
||||
.where("id", "=", userId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
role: toRoleLevel(row.role),
|
||||
disabled: row.disabled === 1,
|
||||
};
|
||||
}
|
||||
254
packages/core/src/api/handlers/plugins.ts
Normal file
254
packages/core/src/api/handlers/plugins.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
/**
|
||||
* Plugin management handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { PluginStateRepository, type PluginState, type PluginStatus } from "../../plugins/state.js";
|
||||
import type { ResolvedPlugin } from "../../plugins/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface PluginInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
version: string;
|
||||
package?: string;
|
||||
enabled: boolean;
|
||||
status: PluginStatus;
|
||||
source?: "config" | "marketplace";
|
||||
marketplaceVersion?: string;
|
||||
capabilities: string[];
|
||||
hasAdminPages: boolean;
|
||||
hasDashboardWidgets: boolean;
|
||||
hasHooks: boolean;
|
||||
installedAt?: string;
|
||||
activatedAt?: string;
|
||||
deactivatedAt?: string;
|
||||
/** Description of what the plugin does */
|
||||
description?: string;
|
||||
/** URL to the plugin icon on the marketplace */
|
||||
iconUrl?: string;
|
||||
}
|
||||
|
||||
export interface PluginListResponse {
|
||||
items: PluginInfo[];
|
||||
}
|
||||
|
||||
export interface PluginResponse {
|
||||
item: PluginInfo;
|
||||
}
|
||||
|
||||
function marketplaceIconUrl(marketplaceUrl: string, pluginId: string): string {
|
||||
return `${marketplaceUrl}/api/v1/plugins/${encodeURIComponent(pluginId)}/icon`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get plugin info from configured plugin and database state
|
||||
*/
|
||||
function buildPluginInfo(
|
||||
plugin: ResolvedPlugin,
|
||||
state: PluginState | null,
|
||||
marketplaceUrl?: string,
|
||||
): PluginInfo {
|
||||
// If no state exists, plugin is considered active (default on first run)
|
||||
const status = state?.status ?? "active";
|
||||
const enabled = status === "active";
|
||||
const isMarketplace = (state?.source ?? "config") === "marketplace";
|
||||
|
||||
return {
|
||||
id: plugin.id,
|
||||
name: state?.displayName || plugin.id,
|
||||
version: plugin.version,
|
||||
package: undefined, // v2 doesn't have package field
|
||||
enabled,
|
||||
status,
|
||||
source: state?.source ?? "config",
|
||||
marketplaceVersion: state?.marketplaceVersion ?? undefined,
|
||||
capabilities: plugin.capabilities,
|
||||
hasAdminPages: (plugin.admin.pages?.length ?? 0) > 0,
|
||||
hasDashboardWidgets: (plugin.admin.widgets?.length ?? 0) > 0,
|
||||
hasHooks: Object.keys(plugin.hooks ?? {}).length > 0,
|
||||
installedAt: state?.installedAt?.toISOString(),
|
||||
activatedAt: state?.activatedAt?.toISOString() ?? undefined,
|
||||
deactivatedAt: state?.deactivatedAt?.toISOString() ?? undefined,
|
||||
description: state?.description ?? undefined,
|
||||
iconUrl:
|
||||
isMarketplace && marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, plugin.id) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List all configured plugins with their state
|
||||
*/
|
||||
export async function handlePluginList(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
marketplaceUrl?: string,
|
||||
): Promise<ApiResult<PluginListResponse>> {
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const allStates = await stateRepo.getAll();
|
||||
const stateMap = new Map(allStates.map((s) => [s.pluginId, s]));
|
||||
|
||||
const configuredIds = new Set(configuredPlugins.map((p) => p.id));
|
||||
|
||||
const items = configuredPlugins.map((plugin) => {
|
||||
const state = stateMap.get(plugin.id) ?? null;
|
||||
return buildPluginInfo(plugin, state, marketplaceUrl);
|
||||
});
|
||||
|
||||
// Include marketplace-installed plugins that aren't in the configured plugins list
|
||||
for (const state of allStates) {
|
||||
if (state.source !== "marketplace") continue;
|
||||
if (configuredIds.has(state.pluginId)) continue;
|
||||
|
||||
items.push({
|
||||
id: state.pluginId,
|
||||
name: state.displayName || state.pluginId,
|
||||
version: state.marketplaceVersion ?? state.version,
|
||||
enabled: state.status === "active",
|
||||
status: state.status,
|
||||
source: "marketplace",
|
||||
marketplaceVersion: state.marketplaceVersion ?? undefined,
|
||||
capabilities: [],
|
||||
hasAdminPages: false,
|
||||
hasDashboardWidgets: false,
|
||||
hasHooks: false,
|
||||
installedAt: state.installedAt?.toISOString(),
|
||||
activatedAt: state.activatedAt?.toISOString() ?? undefined,
|
||||
deactivatedAt: state.deactivatedAt?.toISOString() ?? undefined,
|
||||
description: state.description ?? undefined,
|
||||
iconUrl: marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, state.pluginId) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_LIST_ERROR",
|
||||
message: "Failed to list plugins",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single plugin's info
|
||||
*/
|
||||
export async function handlePluginGet(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
marketplaceUrl?: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.get(pluginId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state, marketplaceUrl) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_GET_ERROR",
|
||||
message: "Failed to get plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a plugin
|
||||
*/
|
||||
export async function handlePluginEnable(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.enable(pluginId, plugin.version);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_ENABLE_ERROR",
|
||||
message: "Failed to enable plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a plugin
|
||||
*/
|
||||
export async function handlePluginDisable(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.disable(pluginId, plugin.version);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_DISABLE_ERROR",
|
||||
message: "Failed to disable plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
465
packages/core/src/api/handlers/redirects.ts
Normal file
465
packages/core/src/api/handlers/redirects.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
/**
|
||||
* Redirect CRUD and 404 log handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { OptionsRepository } from "../../database/repositories/options.js";
|
||||
import {
|
||||
RedirectRepository,
|
||||
type Redirect,
|
||||
type NotFoundEntry,
|
||||
type NotFoundSummary,
|
||||
} from "../../database/repositories/redirect.js";
|
||||
import { InvalidCursorError } from "../../database/repositories/types.js";
|
||||
import type { FindManyResult } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { wouldCreateLoop, detectLoops, type RedirectEdge } from "../../redirects/loops.js";
|
||||
import { validatePattern, validateDestinationParams, isPattern } from "../../redirects/patterns.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List redirects with cursor pagination and optional filters
|
||||
*/
|
||||
export async function handleRedirectList(
|
||||
db: Kysely<Database>,
|
||||
params: {
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
group?: string;
|
||||
enabled?: boolean;
|
||||
auto?: boolean;
|
||||
},
|
||||
): Promise<ApiResult<FindManyResult<Redirect> & { loopRedirectIds?: string[] }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const result = await repo.findMany(params);
|
||||
|
||||
const loopRedirectIds = await getLoopRedirectIds(db);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
...result,
|
||||
...(loopRedirectIds.length > 0 ? { loopRedirectIds } : {}),
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_LIST_ERROR", message: "Failed to fetch redirects" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a redirect rule
|
||||
*/
|
||||
export async function handleRedirectCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
source: string;
|
||||
destination: string;
|
||||
type?: number;
|
||||
enabled?: boolean;
|
||||
groupName?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
|
||||
// Source and destination must differ
|
||||
if (input.source === input.destination) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Source and destination must be different",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// If source looks like a pattern, validate it
|
||||
const sourceIsPattern = isPattern(input.source);
|
||||
if (sourceIsPattern) {
|
||||
const patternError = validatePattern(input.source);
|
||||
if (patternError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: `Invalid source pattern: ${patternError}` },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate destination params reference valid source params
|
||||
const destError = validateDestinationParams(input.source, input.destination);
|
||||
if (destError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: destError },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate source (exact match only for non-patterns)
|
||||
const existing = await repo.findBySource(input.source);
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `A redirect from "${input.source}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check for redirect loops (skip if creating as disabled)
|
||||
if (input.enabled !== false) {
|
||||
const edges = toEdges(await repo.findAllEnabled());
|
||||
const loopPath = wouldCreateLoop(input.source, input.destination, edges);
|
||||
if (loopPath) return loopError(loopPath);
|
||||
}
|
||||
|
||||
const redirect = await repo.create({
|
||||
source: input.source,
|
||||
destination: input.destination,
|
||||
type: input.type ?? 301,
|
||||
isPattern: sourceIsPattern,
|
||||
enabled: input.enabled ?? true,
|
||||
groupName: input.groupName ?? null,
|
||||
});
|
||||
|
||||
return { success: true, data: redirect };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_CREATE_ERROR", message: "Failed to create redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const redirect = await repo.findById(id);
|
||||
|
||||
if (!redirect) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: redirect };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_GET_ERROR", message: "Failed to fetch redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectUpdate(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
input: {
|
||||
source?: string;
|
||||
destination?: string;
|
||||
type?: number;
|
||||
enabled?: boolean;
|
||||
groupName?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
|
||||
const existing = await repo.findById(id);
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
const newSource = input.source ?? existing.source;
|
||||
const newDest = input.destination ?? existing.destination;
|
||||
|
||||
// Source and destination must differ
|
||||
if (newSource === newDest) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Source and destination must be different",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// If source is changing, validate patterns
|
||||
if (input.source !== undefined) {
|
||||
const sourceIsPattern = isPattern(input.source);
|
||||
if (sourceIsPattern) {
|
||||
const patternError = validatePattern(input.source);
|
||||
if (patternError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Invalid source pattern: ${patternError}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate source (exclude self)
|
||||
const dup = await repo.findBySource(input.source);
|
||||
if (dup && dup.id !== id) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `A redirect from "${input.source}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Validate destination params against the (possibly updated) source
|
||||
const newSourceIsPattern = isPattern(newSource);
|
||||
if (newSourceIsPattern) {
|
||||
const destError = validateDestinationParams(newSource, newDest);
|
||||
if (destError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: destError },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for redirect loops if source or destination changed
|
||||
if (input.source !== undefined || input.destination !== undefined) {
|
||||
const edges = toEdges(await repo.findAllEnabled());
|
||||
const loopPath = wouldCreateLoop(newSource, newDest, edges, id);
|
||||
if (loopPath) return loopError(loopPath);
|
||||
}
|
||||
|
||||
const updated = await repo.update(id, {
|
||||
source: input.source,
|
||||
destination: input.destination,
|
||||
type: input.type,
|
||||
enabled: input.enabled,
|
||||
groupName: input.groupName,
|
||||
});
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
|
||||
};
|
||||
}
|
||||
|
||||
// Recompute cache — redirect was modified, so re-fetch
|
||||
await updateLoopCache(db);
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
await updateLoopCache(db);
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_DELETE_ERROR", message: "Failed to delete redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Loop analysis cache
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function loopError(loopPath: string[]): ApiResult<never> {
|
||||
const hops = loopPath
|
||||
.slice(0, -1)
|
||||
.map((p, i) => `${p} \u2192 ${loopPath[i + 1]}`)
|
||||
.join("\n");
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `This redirect would create a loop:\n${hops}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function toEdges(redirects: Redirect[]): RedirectEdge[] {
|
||||
return redirects.map((r) => ({
|
||||
id: r.id,
|
||||
source: r.source,
|
||||
destination: r.destination,
|
||||
enabled: r.enabled,
|
||||
isPattern: r.isPattern,
|
||||
}));
|
||||
}
|
||||
|
||||
const LOOP_CACHE_KEY = "_redirect_loop_ids";
|
||||
|
||||
/**
|
||||
* Recompute loop redirect IDs and store in the options table.
|
||||
*/
|
||||
async function updateLoopCache(db: Kysely<Database>): Promise<void> {
|
||||
try {
|
||||
const options = new OptionsRepository(db);
|
||||
const edges = toEdges(await new RedirectRepository(db).findAllEnabled());
|
||||
const loopRedirectIds = detectLoops(edges);
|
||||
await options.set(LOOP_CACHE_KEY, loopRedirectIds);
|
||||
} catch (error) {
|
||||
console.error("Failed to update redirect loop cache:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get loop redirect IDs from cache, computing lazily on first access.
|
||||
*/
|
||||
async function getLoopRedirectIds(db: Kysely<Database>): Promise<string[]> {
|
||||
try {
|
||||
const options = new OptionsRepository(db);
|
||||
const cached = await options.get<string[]>(LOOP_CACHE_KEY);
|
||||
if (cached !== null) return cached;
|
||||
|
||||
// First access after upgrade — compute and cache
|
||||
await updateLoopCache(db);
|
||||
return (await options.get<string[]>(LOOP_CACHE_KEY)) ?? [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 404 Log
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List 404 log entries with cursor pagination
|
||||
*/
|
||||
export async function handleNotFoundList(
|
||||
db: Kysely<Database>,
|
||||
params: { cursor?: string; limit?: number; search?: string },
|
||||
): Promise<ApiResult<FindManyResult<NotFoundEntry>>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const result = await repo.find404s(params);
|
||||
return { success: true, data: result };
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_LIST_ERROR", message: "Failed to fetch 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get 404 summary (grouped by path, sorted by count)
|
||||
*/
|
||||
export async function handleNotFoundSummary(
|
||||
db: Kysely<Database>,
|
||||
limit?: number,
|
||||
): Promise<ApiResult<{ items: NotFoundSummary[] }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const items = await repo.get404Summary(limit);
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_SUMMARY_ERROR", message: "Failed to fetch 404 summary" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all 404 log entries
|
||||
*/
|
||||
export async function handleNotFoundClear(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<{ deleted: number }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.clear404s();
|
||||
return { success: true, data: { deleted } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_CLEAR_ERROR", message: "Failed to clear 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune 404 log entries older than a given date
|
||||
*/
|
||||
export async function handleNotFoundPrune(
|
||||
db: Kysely<Database>,
|
||||
olderThan: string,
|
||||
): Promise<ApiResult<{ deleted: number }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.prune404s(olderThan);
|
||||
return { success: true, data: { deleted } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_PRUNE_ERROR", message: "Failed to prune 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
154
packages/core/src/api/handlers/revision.ts
Normal file
154
packages/core/src/api/handlers/revision.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
/**
|
||||
* Revision history handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { ContentRepository } from "../../database/repositories/content.js";
|
||||
import { RevisionRepository, type Revision } from "../../database/repositories/revision.js";
|
||||
import { withTransaction } from "../../database/transaction.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult, ContentResponse } from "../types.js";
|
||||
|
||||
export interface RevisionListResponse {
|
||||
items: Revision[];
|
||||
total: number;
|
||||
}
|
||||
|
||||
export interface RevisionResponse {
|
||||
item: Revision;
|
||||
}
|
||||
|
||||
/**
|
||||
* List revisions for a content entry
|
||||
*/
|
||||
export async function handleRevisionList(
|
||||
db: Kysely<Database>,
|
||||
collection: string,
|
||||
entryId: string,
|
||||
params: { limit?: number } = {},
|
||||
): Promise<ApiResult<RevisionListResponse>> {
|
||||
try {
|
||||
const repo = new RevisionRepository(db);
|
||||
const [items, total] = await Promise.all([
|
||||
repo.findByEntry(collection, entryId, { limit: Math.min(params.limit || 50, 100) }),
|
||||
repo.countByEntry(collection, entryId),
|
||||
]);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items, total },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_LIST_ERROR",
|
||||
message: "Failed to list revisions",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific revision
|
||||
*/
|
||||
export async function handleRevisionGet(
|
||||
db: Kysely<Database>,
|
||||
revisionId: string,
|
||||
): Promise<ApiResult<RevisionResponse>> {
|
||||
try {
|
||||
const repo = new RevisionRepository(db);
|
||||
const item = await repo.findById(revisionId);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Revision not found: ${revisionId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_GET_ERROR",
|
||||
message: "Failed to get revision",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a revision (updates content to this revision's data and creates new revision)
|
||||
*/
|
||||
export async function handleRevisionRestore(
|
||||
db: Kysely<Database>,
|
||||
revisionId: string,
|
||||
callerUserId: string,
|
||||
): Promise<ApiResult<ContentResponse>> {
|
||||
try {
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
|
||||
// Get the revision
|
||||
const revision = await revisionRepo.findById(revisionId);
|
||||
if (!revision) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Revision not found: ${revisionId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Extract _slug from revision data (stored as metadata, not a real column)
|
||||
const { _slug, ...fieldData } = revision.data;
|
||||
|
||||
// Atomically update content and create a new revision to record the restore.
|
||||
// If either operation fails, neither is committed (on engines that support
|
||||
// transactions; on D1, withTransaction falls back to sequential execution).
|
||||
const item = await withTransaction(db, async (trx) => {
|
||||
const trxContentRepo = new ContentRepository(trx);
|
||||
const trxRevisionRepo = new RevisionRepository(trx);
|
||||
|
||||
const updated = await trxContentRepo.update(revision.collection, revision.entryId, {
|
||||
data: fieldData,
|
||||
slug: typeof _slug === "string" ? _slug : undefined,
|
||||
});
|
||||
|
||||
await trxRevisionRepo.create({
|
||||
collection: revision.collection,
|
||||
entryId: revision.entryId,
|
||||
data: revision.data,
|
||||
authorId: callerUserId,
|
||||
});
|
||||
|
||||
return updated;
|
||||
});
|
||||
|
||||
// Fire-and-forget: prune old revisions to prevent unbounded growth
|
||||
const pruneRepo = new RevisionRepository(db);
|
||||
void pruneRepo.pruneOldRevisions(revision.collection, revision.entryId, 50).catch(() => {});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_RESTORE_ERROR",
|
||||
message: "Failed to restore revision",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
534
packages/core/src/api/handlers/schema.ts
Normal file
534
packages/core/src/api/handlers/schema.ts
Normal file
@@ -0,0 +1,534 @@
|
||||
/**
|
||||
* Schema/collection management handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import {
|
||||
SchemaRegistry,
|
||||
SchemaError,
|
||||
type Collection,
|
||||
type Field,
|
||||
type CreateCollectionInput,
|
||||
type UpdateCollectionInput,
|
||||
type CreateFieldInput,
|
||||
type UpdateFieldInput,
|
||||
type CollectionWithFields,
|
||||
} from "../../schema/index.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface CollectionListResponse {
|
||||
items: Collection[];
|
||||
}
|
||||
|
||||
export interface CollectionResponse {
|
||||
item: Collection;
|
||||
}
|
||||
|
||||
export interface CollectionWithFieldsResponse {
|
||||
item: CollectionWithFields;
|
||||
}
|
||||
|
||||
export interface FieldListResponse {
|
||||
items: Field[];
|
||||
}
|
||||
|
||||
export interface FieldResponse {
|
||||
item: Field;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all collections
|
||||
*/
|
||||
export async function handleSchemaCollectionList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<CollectionListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const items = await registry.listCollections();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_LIST_ERROR",
|
||||
message: "Failed to list collections",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a collection by slug
|
||||
*/
|
||||
export async function handleSchemaCollectionGet(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: { includeFields?: boolean },
|
||||
): Promise<ApiResult<CollectionResponse | CollectionWithFieldsResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
|
||||
if (options?.includeFields) {
|
||||
const item = await registry.getCollectionWithFields(slug);
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${slug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
}
|
||||
|
||||
const item = await registry.getCollection(slug);
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${slug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_GET_ERROR",
|
||||
message: "Failed to get collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionCreate(
|
||||
db: Kysely<Database>,
|
||||
input: CreateCollectionInput,
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.createCollection(input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
console.error("[emdash] Failed to create collection:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_CREATE_ERROR",
|
||||
message: "Failed to create collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionUpdate(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
input: UpdateCollectionInput,
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.updateCollection(slug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_UPDATE_ERROR",
|
||||
message: "Failed to update collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionDelete(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: { force?: boolean },
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.deleteCollection(slug, options);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_DELETE_ERROR",
|
||||
message: "Failed to delete collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List fields for a collection
|
||||
*/
|
||||
export async function handleSchemaFieldList(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
): Promise<ApiResult<FieldListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const collection = await registry.getCollection(collectionSlug);
|
||||
|
||||
if (!collection) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${collectionSlug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const items = await registry.listFields(collection.id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_LIST_ERROR",
|
||||
message: "Failed to list fields",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a field
|
||||
*/
|
||||
export async function handleSchemaFieldGet(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.getField(collectionSlug, fieldSlug);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Field not found: ${fieldSlug} in collection ${collectionSlug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_GET_ERROR",
|
||||
message: "Failed to get field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a field
|
||||
*/
|
||||
export async function handleSchemaFieldCreate(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
input: CreateFieldInput,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.createField(collectionSlug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_CREATE_ERROR",
|
||||
message: "Failed to create field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a field
|
||||
*/
|
||||
export async function handleSchemaFieldUpdate(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
input: UpdateFieldInput,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.updateField(collectionSlug, fieldSlug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_UPDATE_ERROR",
|
||||
message: "Failed to update field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a field
|
||||
*/
|
||||
export async function handleSchemaFieldDelete(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.deleteField(collectionSlug, fieldSlug);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_DELETE_ERROR",
|
||||
message: "Failed to delete field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reorder fields
|
||||
*/
|
||||
export async function handleSchemaFieldReorder(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlugs: string[],
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.reorderFields(collectionSlug, fieldSlugs);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_REORDER_ERROR",
|
||||
message: "Failed to reorder fields",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Orphaned Table Discovery
|
||||
// ============================================
|
||||
|
||||
export interface OrphanedTable {
|
||||
slug: string;
|
||||
tableName: string;
|
||||
rowCount: number;
|
||||
}
|
||||
|
||||
export interface OrphanedTableListResponse {
|
||||
items: OrphanedTable[];
|
||||
}
|
||||
|
||||
/**
|
||||
* List orphaned content tables
|
||||
*/
|
||||
export async function handleOrphanedTableList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<OrphanedTableListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const items = await registry.discoverOrphanedTables();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[emdash] Failed to list orphaned tables:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ORPHAN_LIST_ERROR",
|
||||
message: "Failed to list orphaned tables",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an orphaned table as a collection
|
||||
*/
|
||||
export async function handleOrphanedTableRegister(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: {
|
||||
label?: string;
|
||||
labelSingular?: string;
|
||||
description?: string;
|
||||
},
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.registerOrphanedTable(slug, options);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ORPHAN_REGISTER_ERROR",
|
||||
message: "Failed to register orphaned table",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
296
packages/core/src/api/handlers/sections.ts
Normal file
296
packages/core/src/api/handlers/sections.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
/**
|
||||
* Section CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { InvalidCursorError } from "../../database/repositories/types.js";
|
||||
import type { FindManyResult } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import {
|
||||
getSectionById,
|
||||
getSectionWithDb,
|
||||
getSectionsWithDb,
|
||||
type Section,
|
||||
type GetSectionsOptions,
|
||||
} from "../../sections/index.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
const SLUG_PATTERN = /^[a-z0-9-]+$/;
|
||||
|
||||
export type SectionListResponse = FindManyResult<Section>;
|
||||
|
||||
/**
|
||||
* List sections with optional filters
|
||||
*/
|
||||
export async function handleSectionList(
|
||||
db: Kysely<Database>,
|
||||
params: GetSectionsOptions,
|
||||
): Promise<ApiResult<SectionListResponse>> {
|
||||
try {
|
||||
const result = await getSectionsWithDb(db, {
|
||||
source: params.source,
|
||||
search: params.search,
|
||||
limit: params.limit,
|
||||
cursor: params.cursor,
|
||||
});
|
||||
|
||||
return { success: true, data: result };
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidCursorError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CURSOR", message: error.message },
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_LIST_ERROR", message: "Failed to fetch sections" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a section
|
||||
*/
|
||||
export async function handleSectionCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
slug: string;
|
||||
title: string;
|
||||
description?: string;
|
||||
keywords?: string[];
|
||||
content: unknown[];
|
||||
previewMediaId?: string;
|
||||
source?: string;
|
||||
themeId?: string;
|
||||
},
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
// Validate slug format
|
||||
if (!SLUG_PATTERN.test(input.slug)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "slug must only contain lowercase letters, numbers, and hyphens",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if slug already exists
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select("id")
|
||||
.where("slug", "=", input.slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Section with slug "${input.slug}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
const now = new Date().toISOString();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_sections")
|
||||
.values({
|
||||
id,
|
||||
slug: input.slug,
|
||||
title: input.title,
|
||||
description: input.description ?? null,
|
||||
keywords: input.keywords ? JSON.stringify(input.keywords) : null,
|
||||
content: JSON.stringify(input.content),
|
||||
preview_media_id: input.previewMediaId ?? null,
|
||||
source: input.source ?? "user",
|
||||
theme_id: input.themeId ?? null,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const section = await getSectionById(id, db);
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_CREATE_ERROR", message: "Failed to fetch created section" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_CREATE_ERROR", message: "Failed to create section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a section by slug
|
||||
*/
|
||||
export async function handleSectionGet(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
const section = await getSectionWithDb(slug, db);
|
||||
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_GET_ERROR", message: "Failed to fetch section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a section by slug
|
||||
*/
|
||||
export async function handleSectionUpdate(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
input: {
|
||||
slug?: string;
|
||||
title?: string;
|
||||
description?: string;
|
||||
keywords?: string[];
|
||||
content?: unknown[];
|
||||
previewMediaId?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
// Check if section exists
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select(["id", "source"])
|
||||
.where("slug", "=", slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate new slug if changing
|
||||
if (input.slug && input.slug !== slug) {
|
||||
if (!SLUG_PATTERN.test(input.slug)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "slug must only contain lowercase letters, numbers, and hyphens",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if new slug already exists
|
||||
const slugExists = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select("id")
|
||||
.where("slug", "=", input.slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (slugExists) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Section with slug "${input.slug}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Build update object
|
||||
const updates: Record<string, unknown> = {
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (input.slug !== undefined) updates.slug = input.slug;
|
||||
if (input.title !== undefined) updates.title = input.title;
|
||||
if (input.description !== undefined) updates.description = input.description;
|
||||
if (input.keywords !== undefined) updates.keywords = JSON.stringify(input.keywords);
|
||||
if (input.content !== undefined) updates.content = JSON.stringify(input.content);
|
||||
if (input.previewMediaId !== undefined) updates.preview_media_id = input.previewMediaId;
|
||||
|
||||
await db.updateTable("_emdash_sections").set(updates).where("id", "=", existing.id).execute();
|
||||
|
||||
const section = await getSectionById(existing.id, db);
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to fetch updated section" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to update section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a section by slug
|
||||
*/
|
||||
export async function handleSectionDelete(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
// Check if section exists and get source
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select(["id", "source", "theme_id"])
|
||||
.where("slug", "=", slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
// Prevent deleting theme sections
|
||||
if (existing.source === "theme") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "FORBIDDEN",
|
||||
message:
|
||||
"Cannot delete theme-provided sections. Edit the section to create a user copy, then delete that.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await db.deleteFrom("_emdash_sections").where("id", "=", existing.id).execute();
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_DELETE_ERROR", message: "Failed to delete section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
142
packages/core/src/api/handlers/seo.ts
Normal file
142
packages/core/src/api/handlers/seo.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
* SEO Handlers
|
||||
*
|
||||
* Business logic for sitemap generation and robots.txt.
|
||||
*/
|
||||
|
||||
import { sql, type Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateIdentifier } from "../../database/validate.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/** Raw content data for sitemap generation — the route builds the actual URLs */
|
||||
export interface SitemapContentEntry {
|
||||
/** Content ID (ULID) */
|
||||
id: string;
|
||||
/** Content slug, or null when the entry has no slug */
|
||||
slug: string | null;
|
||||
/** ISO date of last modification */
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
/** Per-collection sitemap data with entries and URL pattern */
|
||||
export interface SitemapCollectionData {
|
||||
/** Collection slug (e.g., "post", "page") */
|
||||
collection: string;
|
||||
/** URL pattern with {slug} placeholder, or null for default /{collection}/{slug} */
|
||||
urlPattern: string | null;
|
||||
/** Most recent updated_at across all entries (for sitemap index lastmod) */
|
||||
lastmod: string;
|
||||
/** Individual content entries */
|
||||
entries: SitemapContentEntry[];
|
||||
}
|
||||
|
||||
export interface SitemapDataResponse {
|
||||
collections: SitemapCollectionData[];
|
||||
}
|
||||
|
||||
/** Maximum entries per sitemap (per spec) */
|
||||
const SITEMAP_MAX_ENTRIES = 50_000;
|
||||
|
||||
/**
|
||||
* Collect all published, indexable content across SEO-enabled collections
|
||||
* for sitemap generation, grouped by collection.
|
||||
*
|
||||
* Only includes content from collections with `has_seo = 1`.
|
||||
* Excludes content with `seo_no_index = 1` in the `_emdash_seo` table.
|
||||
*
|
||||
* Returns raw data grouped per collection. The caller (route) is
|
||||
* responsible for building absolute URLs — this handler does NOT
|
||||
* assume a URL structure.
|
||||
*/
|
||||
export async function handleSitemapData(
|
||||
db: Kysely<Database>,
|
||||
/** When set, only return data for this collection. */
|
||||
collectionSlug?: string,
|
||||
): Promise<ApiResult<SitemapDataResponse>> {
|
||||
try {
|
||||
// Find SEO-enabled collections (optionally filtered)
|
||||
let query = db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select(["slug", "url_pattern"])
|
||||
.where("has_seo", "=", 1);
|
||||
|
||||
if (collectionSlug) {
|
||||
query = query.where("slug", "=", collectionSlug);
|
||||
}
|
||||
|
||||
const collections = await query.execute();
|
||||
|
||||
const result: SitemapCollectionData[] = [];
|
||||
|
||||
for (const col of collections) {
|
||||
// Validate the slug before using it as a table name identifier.
|
||||
// Should always pass (slugs are validated on creation), but
|
||||
// guards against corrupted DB data.
|
||||
try {
|
||||
validateIdentifier(col.slug, "collection slug");
|
||||
} catch {
|
||||
console.warn(`[SITEMAP] Skipping collection with invalid slug: ${col.slug}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const tableName = `ec_${col.slug}`;
|
||||
|
||||
// Query published, non-deleted content.
|
||||
// LEFT JOIN _emdash_seo to check noindex flag.
|
||||
// Content without an SEO row is assumed indexable (default).
|
||||
// Wrapped in try/catch so a missing/broken table doesn't fail the
|
||||
// entire sitemap — we skip that collection and continue.
|
||||
try {
|
||||
const rows = await sql<{
|
||||
slug: string | null;
|
||||
id: string;
|
||||
updated_at: string;
|
||||
}>`
|
||||
SELECT c.slug, c.id, c.updated_at
|
||||
FROM ${sql.ref(tableName)} c
|
||||
LEFT JOIN _emdash_seo s
|
||||
ON s.collection = ${col.slug}
|
||||
AND s.content_id = c.id
|
||||
WHERE c.status = 'published'
|
||||
AND c.deleted_at IS NULL
|
||||
AND (s.seo_no_index IS NULL OR s.seo_no_index = 0)
|
||||
ORDER BY c.updated_at DESC
|
||||
LIMIT ${SITEMAP_MAX_ENTRIES}
|
||||
`.execute(db);
|
||||
|
||||
if (rows.rows.length === 0) continue;
|
||||
|
||||
const entries: SitemapContentEntry[] = [];
|
||||
for (const row of rows.rows) {
|
||||
entries.push({
|
||||
id: row.id,
|
||||
slug: row.slug,
|
||||
updatedAt: row.updated_at,
|
||||
});
|
||||
}
|
||||
|
||||
result.push({
|
||||
collection: col.slug,
|
||||
urlPattern: col.url_pattern,
|
||||
// Rows are ordered by updated_at DESC, so first row is the latest
|
||||
lastmod: rows.rows[0].updated_at,
|
||||
entries,
|
||||
});
|
||||
} catch (err) {
|
||||
// Table missing or query error — skip this collection
|
||||
console.warn(`[SITEMAP] Failed to query collection "${col.slug}":`, err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, data: { collections: result } };
|
||||
} catch (error) {
|
||||
console.error("[SITEMAP_ERROR]", error);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SITEMAP_ERROR", message: "Failed to generate sitemap data" },
|
||||
};
|
||||
}
|
||||
}
|
||||
49
packages/core/src/api/handlers/settings.ts
Normal file
49
packages/core/src/api/handlers/settings.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Settings handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { getSiteSettingsWithDb, setSiteSettings } from "../../settings/index.js";
|
||||
import type { SiteSettings } from "../../settings/types.js";
|
||||
import type { Storage } from "../../storage/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/**
|
||||
* Get all site settings
|
||||
*/
|
||||
export async function handleSettingsGet(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
): Promise<ApiResult<Partial<SiteSettings>>> {
|
||||
try {
|
||||
const settings = await getSiteSettingsWithDb(db, storage);
|
||||
return { success: true, data: settings };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SETTINGS_READ_ERROR", message: "Failed to get settings" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update site settings
|
||||
*/
|
||||
export async function handleSettingsUpdate(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
input: Partial<SiteSettings>,
|
||||
): Promise<ApiResult<Partial<SiteSettings>>> {
|
||||
try {
|
||||
await setSiteSettings(input, db);
|
||||
const updatedSettings = await getSiteSettingsWithDb(db, storage);
|
||||
return { success: true, data: updatedSettings };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SETTINGS_UPDATE_ERROR", message: "Failed to update settings" },
|
||||
};
|
||||
}
|
||||
}
|
||||
350
packages/core/src/api/handlers/snapshot.ts
Normal file
350
packages/core/src/api/handlers/snapshot.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
/**
|
||||
* Snapshot handler — generates a portable database snapshot.
|
||||
*
|
||||
* Returns all content tables, schema definitions, and supporting data
|
||||
* needed to render content in an isolated preview database.
|
||||
*
|
||||
* Used by:
|
||||
* - DO preview database (EmDashPreviewDB.populateFromSnapshot)
|
||||
* - Future: CLI export, backup, site migration
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { sql } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
|
||||
// ─<><E29480> Preview signature verification ──────────────────────────────
|
||||
|
||||
/**
|
||||
* Verify HMAC-SHA256 preview signature using crypto.subtle.
|
||||
* Returns true if the signature is valid and not expired.
|
||||
*/
|
||||
export async function verifyPreviewSignature(
|
||||
source: string,
|
||||
exp: number,
|
||||
sig: string,
|
||||
secret: string,
|
||||
): Promise<boolean> {
|
||||
if (exp < Date.now() / 1000) return false;
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const key = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
encoder.encode(secret),
|
||||
{ name: "HMAC", hash: "SHA-256" },
|
||||
false,
|
||||
["verify"],
|
||||
);
|
||||
|
||||
const sigBytes = new Uint8Array(sig.length / 2);
|
||||
for (let i = 0; i < sig.length; i += 2) {
|
||||
sigBytes[i / 2] = parseInt(sig.substring(i, i + 2), 16);
|
||||
}
|
||||
|
||||
return crypto.subtle.verify("HMAC", key, sigBytes, encoder.encode(`${source}:${exp}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an X-Preview-Signature header value into its components.
|
||||
*
|
||||
* Format: "source:exp:sig" where source is a URL (contains colons),
|
||||
* exp is a unix timestamp, and sig is 64 hex chars.
|
||||
*
|
||||
* Parses from the right since source URLs contain colons.
|
||||
*
|
||||
* @returns Parsed components, or null if the format is invalid
|
||||
*/
|
||||
export function parsePreviewSignatureHeader(
|
||||
header: string,
|
||||
): { source: string; exp: number; sig: string } | null {
|
||||
const lastColon = header.lastIndexOf(":");
|
||||
if (lastColon <= 0) return null;
|
||||
|
||||
const sig = header.substring(lastColon + 1);
|
||||
if (sig.length !== 64) return null;
|
||||
|
||||
const rest = header.substring(0, lastColon);
|
||||
const secondLastColon = rest.lastIndexOf(":");
|
||||
if (secondLastColon <= 0) return null;
|
||||
|
||||
const source = rest.substring(0, secondLastColon);
|
||||
const exp = parseInt(rest.substring(secondLastColon + 1), 10);
|
||||
|
||||
if (isNaN(exp) || source.length === 0) return null;
|
||||
|
||||
return { source, exp, sig };
|
||||
}
|
||||
|
||||
// ── Media URL rewriting ─────────────────────────────────────────
|
||||
|
||||
const MEDIA_FILE_PREFIX = "/_emdash/api/media/file/";
|
||||
|
||||
/**
|
||||
* Parse a JSON string value and inject `src` for local media objects.
|
||||
* Returns the original string if it's not a local media value.
|
||||
*/
|
||||
function injectMediaSrc(jsonStr: string, origin: string): string {
|
||||
try {
|
||||
const obj = JSON.parse(jsonStr);
|
||||
if (typeof obj !== "object" || obj === null || Array.isArray(obj)) return jsonStr;
|
||||
if (injectMediaSrcInto(obj, origin)) {
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
return jsonStr;
|
||||
} catch {
|
||||
return jsonStr;
|
||||
}
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively walk an object and inject `src` into local media values.
|
||||
* Returns true if any modifications were made.
|
||||
*/
|
||||
function injectMediaSrcInto(obj: Record<string, unknown>, origin: string): boolean {
|
||||
let modified = false;
|
||||
|
||||
// Check if this object itself is a local media value
|
||||
if ((obj.provider === "local" || (!obj.provider && obj.id && obj.meta)) && !obj.src) {
|
||||
const meta = isRecord(obj.meta) ? obj.meta : undefined;
|
||||
const storageKey = meta?.storageKey ?? obj.id;
|
||||
if (typeof storageKey === "string" && storageKey) {
|
||||
obj.src = `${origin}${MEDIA_FILE_PREFIX}${storageKey}`;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse into nested objects/arrays (e.g. Portable Text with image blocks)
|
||||
for (const value of Object.values(obj)) {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
if (isRecord(item)) {
|
||||
if (injectMediaSrcInto(item, origin)) {
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isRecord(value)) {
|
||||
if (injectMediaSrcInto(value, origin)) {
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return modified;
|
||||
}
|
||||
|
||||
// ── Snapshot generation ─────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Safe identifier pattern for snapshot table names.
|
||||
* More permissive than validateIdentifier() — allows leading underscores
|
||||
* (needed for system tables like _emdash_collections).
|
||||
*/
|
||||
const SAFE_TABLE_NAME = /^[a-z_][a-z0-9_]*$/;
|
||||
|
||||
/** Snapshot shape consumed by the DO preview database */
|
||||
export interface Snapshot {
|
||||
tables: Record<string, Record<string, unknown>[]>;
|
||||
schema: Record<
|
||||
string,
|
||||
{
|
||||
columns: string[];
|
||||
types?: Record<string, string>;
|
||||
}
|
||||
>;
|
||||
generatedAt: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* System tables included in snapshots.
|
||||
* Content tables (ec_*) are discovered dynamically.
|
||||
*/
|
||||
const SYSTEM_TABLES = [
|
||||
"_emdash_collections",
|
||||
"_emdash_fields",
|
||||
"_emdash_taxonomy_defs",
|
||||
"_emdash_menus",
|
||||
"_emdash_menu_items",
|
||||
"_emdash_sections",
|
||||
"_emdash_widget_areas",
|
||||
"_emdash_widgets",
|
||||
"_emdash_seo",
|
||||
"_emdash_migrations",
|
||||
"taxonomies",
|
||||
"content_taxonomies",
|
||||
"media",
|
||||
"options",
|
||||
"revisions",
|
||||
];
|
||||
|
||||
/**
|
||||
* Table name prefixes excluded from snapshots (auth/security data).
|
||||
*/
|
||||
const EXCLUDED_PREFIXES = [
|
||||
"_emdash_api_tokens",
|
||||
"_emdash_oauth_tokens",
|
||||
"_emdash_authorization_codes",
|
||||
"_emdash_device_codes",
|
||||
"_emdash_migrations_lock",
|
||||
"_plugin_",
|
||||
"users",
|
||||
"sessions",
|
||||
"credentials",
|
||||
"challenges",
|
||||
];
|
||||
|
||||
/**
|
||||
* Options key prefixes safe for inclusion in snapshots.
|
||||
*
|
||||
* The options table contains plugin secrets (plugin:*), passkey challenges
|
||||
* (emdash:passkey_pending:*), and setup state that must not leak to
|
||||
* preview databases. Only site-level rendering settings are needed.
|
||||
*/
|
||||
const SAFE_OPTIONS_PREFIXES = ["site:"];
|
||||
|
||||
function isExcluded(tableName: string): boolean {
|
||||
return EXCLUDED_PREFIXES.some((prefix) => tableName.startsWith(prefix));
|
||||
}
|
||||
|
||||
/** Column info from PRAGMA table_info */
|
||||
interface ColumnInfo {
|
||||
name: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export interface GenerateSnapshotOptions {
|
||||
/** Include draft and trashed content (default: false) */
|
||||
includeDrafts?: boolean;
|
||||
/** Origin URL for absolutizing local media URLs (e.g. "https://mysite.com") */
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a portable database snapshot.
|
||||
*
|
||||
* Discovers ec_* content tables dynamically, exports system tables
|
||||
* needed for rendering, and includes schema info for table recreation.
|
||||
*/
|
||||
export async function generateSnapshot(
|
||||
db: Kysely<Database>,
|
||||
options?: GenerateSnapshotOptions,
|
||||
): Promise<Snapshot> {
|
||||
const includeDrafts = options?.includeDrafts ?? false;
|
||||
|
||||
// Discover all ec_* content tables
|
||||
const tableResult = await sql<{ name: string }>`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name LIKE 'ec_%'
|
||||
ORDER BY name
|
||||
`.execute(db);
|
||||
|
||||
const contentTables = tableResult.rows.map((r) => r.name);
|
||||
|
||||
// Build list of all tables to export
|
||||
const allTables = [...contentTables, ...SYSTEM_TABLES];
|
||||
|
||||
const tables: Record<string, Record<string, unknown>[]> = {};
|
||||
const schema: Record<string, { columns: string[]; types?: Record<string, string> }> = {};
|
||||
|
||||
for (const tableName of allTables) {
|
||||
if (isExcluded(tableName)) continue;
|
||||
|
||||
// Validate identifier before interpolating into sql.raw().
|
||||
// SYSTEM_TABLES are hardcoded and safe, but ec_* names come from
|
||||
// sqlite_master and must be validated.
|
||||
if (!SAFE_TABLE_NAME.test(tableName)) continue;
|
||||
|
||||
try {
|
||||
// Get column info via PRAGMA
|
||||
const pragmaResult = await sql<ColumnInfo>`
|
||||
PRAGMA table_info(${sql.raw(`"${tableName}"`)})
|
||||
`.execute(db);
|
||||
|
||||
if (pragmaResult.rows.length === 0) continue;
|
||||
|
||||
const columns = pragmaResult.rows.map((r) => r.name);
|
||||
const types: Record<string, string> = {};
|
||||
for (const row of pragmaResult.rows) {
|
||||
types[row.name] = row.type || "TEXT";
|
||||
}
|
||||
|
||||
schema[tableName] = { columns, types };
|
||||
|
||||
// Fetch rows
|
||||
let rows: Record<string, unknown>[];
|
||||
|
||||
if (tableName.startsWith("ec_")) {
|
||||
if (includeDrafts) {
|
||||
// Include all non-deleted content (published, draft, scheduled)
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db)
|
||||
).rows;
|
||||
} else {
|
||||
// Only export published content
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
WHERE deleted_at IS NULL
|
||||
AND (status = 'published' OR (status = 'scheduled' AND scheduled_at <= datetime('now')))
|
||||
`.execute(db)
|
||||
).rows;
|
||||
}
|
||||
} else if (tableName === "options") {
|
||||
// Filter options to safe rendering-only prefixes.
|
||||
// Excludes plugin secrets, passkey challenges, and setup state.
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
`.execute(db)
|
||||
).rows.filter((row) => {
|
||||
const name = typeof row.name === "string" ? row.name : "";
|
||||
return SAFE_OPTIONS_PREFIXES.some((prefix) => name.startsWith(prefix));
|
||||
});
|
||||
} else {
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
`.execute(db)
|
||||
).rows;
|
||||
}
|
||||
|
||||
if (rows.length > 0) {
|
||||
tables[tableName] = rows;
|
||||
}
|
||||
} catch {
|
||||
// Table might not exist yet (e.g. pre-migration) — skip silently
|
||||
}
|
||||
}
|
||||
|
||||
// Absolutize local media URLs in content tables so snapshots are portable.
|
||||
// Local image fields are stored as JSON with provider:"local" and
|
||||
// meta.storageKey but no src — the URL is derived at render time.
|
||||
// For snapshots consumed by external preview services, inject src now.
|
||||
if (options?.origin) {
|
||||
const origin = options.origin;
|
||||
for (const [tableName, rows] of Object.entries(tables)) {
|
||||
if (!tableName.startsWith("ec_")) continue;
|
||||
for (const row of rows) {
|
||||
for (const [col, value] of Object.entries(row)) {
|
||||
if (typeof value !== "string" || !value.startsWith("{")) continue;
|
||||
row[col] = injectMediaSrc(value, origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tables,
|
||||
schema,
|
||||
generatedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
645
packages/core/src/api/handlers/taxonomies.ts
Normal file
645
packages/core/src/api/handlers/taxonomies.ts
Normal file
@@ -0,0 +1,645 @@
|
||||
/**
|
||||
* Taxonomy and term CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { TaxonomyRepository } from "../../database/repositories/taxonomy.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { invalidateTermCache } from "../../taxonomies/index.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/** Taxonomy name validation pattern: lowercase alphanumeric + underscores, starts with letter */
|
||||
const NAME_PATTERN = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface TaxonomyDef {
|
||||
id: string;
|
||||
name: string;
|
||||
label: string;
|
||||
labelSingular?: string;
|
||||
hierarchical: boolean;
|
||||
collections: string[];
|
||||
}
|
||||
|
||||
export interface TaxonomyListResponse {
|
||||
taxonomies: TaxonomyDef[];
|
||||
}
|
||||
|
||||
export interface TermData {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
label: string;
|
||||
parentId: string | null;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface TermWithCount extends TermData {
|
||||
count: number;
|
||||
children: TermWithCount[];
|
||||
}
|
||||
|
||||
export interface TermListResponse {
|
||||
terms: TermWithCount[];
|
||||
}
|
||||
|
||||
export interface TermResponse {
|
||||
term: TermData;
|
||||
}
|
||||
|
||||
export interface TermGetResponse {
|
||||
term: TermData & {
|
||||
count: number;
|
||||
children: Array<{ id: string; slug: string; label: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Build tree structure from flat terms
|
||||
*/
|
||||
function buildTree(flatTerms: TermWithCount[]): TermWithCount[] {
|
||||
const map = new Map<string, TermWithCount>();
|
||||
const roots: TermWithCount[] = [];
|
||||
|
||||
for (const term of flatTerms) {
|
||||
map.set(term.id, term);
|
||||
}
|
||||
|
||||
for (const term of flatTerms) {
|
||||
if (term.parentId && map.has(term.parentId)) {
|
||||
map.get(term.parentId)!.children.push(term);
|
||||
} else {
|
||||
roots.push(term);
|
||||
}
|
||||
}
|
||||
|
||||
return roots;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a taxonomy definition by name, returning a NOT_FOUND error if missing.
|
||||
*/
|
||||
async function requireTaxonomyDef(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<
|
||||
| { success: true; def: { hierarchical: number } }
|
||||
| { success: false; error: { code: string; message: string } }
|
||||
> {
|
||||
const def = await db
|
||||
.selectFrom("_emdash_taxonomy_defs")
|
||||
.selectAll()
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!def) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Taxonomy '${name}' not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, def };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List all taxonomy definitions
|
||||
*/
|
||||
export async function handleTaxonomyList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<TaxonomyListResponse>> {
|
||||
try {
|
||||
const [rows, collectionRows] = await Promise.all([
|
||||
db.selectFrom("_emdash_taxonomy_defs").selectAll().execute(),
|
||||
db.selectFrom("_emdash_collections").select("slug").execute(),
|
||||
]);
|
||||
|
||||
// Filter orphan collection references on read so the response stays
|
||||
// consistent with `schema_list_collections`. Storage is untouched —
|
||||
// re-creating the collection re-links automatically.
|
||||
const realCollections = new Set(collectionRows.map((r) => r.slug));
|
||||
|
||||
const taxonomies: TaxonomyDef[] = rows.map((row) => {
|
||||
const stored: string[] = row.collections ? JSON.parse(row.collections) : [];
|
||||
return {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
label: row.label,
|
||||
labelSingular: row.label_singular ?? undefined,
|
||||
hierarchical: row.hierarchical === 1,
|
||||
collections: stored.filter((slug) => realCollections.has(slug)),
|
||||
};
|
||||
});
|
||||
|
||||
return { success: true, data: { taxonomies } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TAXONOMY_LIST_ERROR", message: "Failed to list taxonomies" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new taxonomy definition
|
||||
*/
|
||||
export async function handleTaxonomyCreate(
|
||||
db: Kysely<Database>,
|
||||
input: { name: string; label: string; hierarchical?: boolean; collections?: string[] },
|
||||
): Promise<ApiResult<{ taxonomy: TaxonomyDef }>> {
|
||||
try {
|
||||
// Validate name format
|
||||
if (!NAME_PATTERN.test(input.name)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message:
|
||||
"Taxonomy name must start with a letter and contain only lowercase letters, numbers, and underscores",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const collections = [...new Set(input.collections ?? [])];
|
||||
|
||||
// Validate that referenced collections exist
|
||||
if (collections.length > 0) {
|
||||
const existingCollections = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select("slug")
|
||||
.where("slug", "in", collections)
|
||||
.execute();
|
||||
|
||||
const existingSlugs = new Set(existingCollections.map((c) => c.slug));
|
||||
const invalid = collections.filter((c) => !existingSlugs.has(c));
|
||||
if (invalid.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Unknown collection(s): ${invalid.join(", ")}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate name
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_taxonomy_defs")
|
||||
.selectAll()
|
||||
.where("name", "=", input.name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Taxonomy '${input.name}' already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_taxonomy_defs")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
label_singular: null,
|
||||
hierarchical: input.hierarchical ? 1 : 0,
|
||||
collections: JSON.stringify(collections),
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
taxonomy: {
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
hierarchical: input.hierarchical ?? false,
|
||||
collections,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle UNIQUE constraint violation from concurrent duplicate inserts
|
||||
if (error instanceof Error && error.message.includes("UNIQUE constraint failed")) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Taxonomy '${input.name}' already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TAXONOMY_CREATE_ERROR", message: "Failed to create taxonomy" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all terms for a taxonomy (returns tree for hierarchical taxonomies)
|
||||
*/
|
||||
export async function handleTermList(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
): Promise<ApiResult<TermListResponse>> {
|
||||
try {
|
||||
const lookup = await requireTaxonomyDef(db, taxonomyName);
|
||||
if (!lookup.success) return lookup;
|
||||
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const terms = await repo.findByName(taxonomyName);
|
||||
|
||||
// Batch count entries per term in a single query (replaces N+1 pattern)
|
||||
const termIds = terms.map((t) => t.id);
|
||||
const counts = await repo.countEntriesForTerms(termIds);
|
||||
|
||||
const termData: TermWithCount[] = terms.map((term) => ({
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description: typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
children: [],
|
||||
count: counts.get(term.id) ?? 0,
|
||||
}));
|
||||
|
||||
const isHierarchical = lookup.def.hierarchical === 1;
|
||||
const result = isHierarchical ? buildTree(termData) : termData;
|
||||
|
||||
return { success: true, data: { terms: result } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_LIST_ERROR", message: "Failed to list terms" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a parent term reference for create/update.
|
||||
*
|
||||
* Returns `null` on success or a structured error message that callers
|
||||
* wrap in their own ApiResult.
|
||||
*
|
||||
* - `parentId === undefined` -> no-op (no parent change requested).
|
||||
* - `parentId === null` -> caller intends to detach; no-op here.
|
||||
* - parent must exist (FK exists -> term row not soft-deleted).
|
||||
* - parent must live in the same taxonomy.
|
||||
* - if `termId` is provided (update path), reject `parentId === termId`
|
||||
* (self-parent) and walk up the parent chain to detect cycles.
|
||||
*/
|
||||
async function validateParentTerm(
|
||||
repo: TaxonomyRepository,
|
||||
taxonomyName: string,
|
||||
termId: string | undefined,
|
||||
parentId: string | null | undefined,
|
||||
): Promise<{ code: "VALIDATION_ERROR"; message: string } | null> {
|
||||
if (parentId === undefined || parentId === null) return null;
|
||||
|
||||
if (termId !== undefined && parentId === termId) {
|
||||
return {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "A term cannot be its own parent",
|
||||
};
|
||||
}
|
||||
|
||||
const parent = await repo.findById(parentId);
|
||||
if (!parent) {
|
||||
return {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Parent term '${parentId}' not found`,
|
||||
};
|
||||
}
|
||||
if (parent.name !== taxonomyName) {
|
||||
return {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Parent term '${parentId}' belongs to taxonomy '${parent.name}', not '${taxonomyName}'`,
|
||||
};
|
||||
}
|
||||
|
||||
// Walk up the parent chain. Two checks fold into one walk:
|
||||
// - Cycle detection (only on update — a non-existent term-being-
|
||||
// created can't be its own ancestor): if the walk revisits termId
|
||||
// the proposed parent makes the term a descendant of itself.
|
||||
// - Depth bound: refuse to extend a chain past MAX_DEPTH ancestors.
|
||||
// Runs on both create and update so a malicious or buggy caller
|
||||
// can't grow the tree without limit.
|
||||
//
|
||||
// The depth-exceeded error fires only when we hit the limit AND there
|
||||
// was still chain to walk — a legitimate chain of exactly MAX_DEPTH
|
||||
// ancestors exits with `cursor === null` and is accepted.
|
||||
const MAX_DEPTH = 100;
|
||||
let cursor: string | null = parent.parentId;
|
||||
let steps = 0;
|
||||
while (cursor !== null && steps < MAX_DEPTH) {
|
||||
if (termId !== undefined && cursor === termId) {
|
||||
return {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Cycle detected: cannot make a descendant the parent",
|
||||
};
|
||||
}
|
||||
const next = await repo.findById(cursor);
|
||||
if (!next) break;
|
||||
cursor = next.parentId;
|
||||
steps++;
|
||||
}
|
||||
if (cursor !== null && steps >= MAX_DEPTH) {
|
||||
return {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Parent chain exceeds maximum depth",
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new term in a taxonomy
|
||||
*/
|
||||
export async function handleTermCreate(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
input: { slug: string; label: string; parentId?: string | null; description?: string },
|
||||
): Promise<ApiResult<TermResponse>> {
|
||||
try {
|
||||
const lookup = await requireTaxonomyDef(db, taxonomyName);
|
||||
if (!lookup.success) return lookup;
|
||||
|
||||
const repo = new TaxonomyRepository(db);
|
||||
|
||||
// Coerce empty-string parentId to undefined (treat as "no parent").
|
||||
const parentId =
|
||||
input.parentId === "" || input.parentId === undefined ? undefined : input.parentId;
|
||||
|
||||
// Check for slug conflict
|
||||
const existing = await repo.findBySlug(taxonomyName, input.slug);
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Term with slug '${input.slug}' already exists in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate parentId: must exist AND belong to the same taxonomy.
|
||||
// (Cycle check is N/A on create — the term doesn't exist yet.)
|
||||
const parentError = await validateParentTerm(repo, taxonomyName, undefined, parentId);
|
||||
if (parentError) {
|
||||
return { success: false, error: parentError };
|
||||
}
|
||||
|
||||
const term = await repo.create({
|
||||
name: taxonomyName,
|
||||
slug: input.slug,
|
||||
label: input.label,
|
||||
parentId: parentId ?? undefined,
|
||||
data: input.description ? { description: input.description } : undefined,
|
||||
});
|
||||
|
||||
// New term means `hasAnyTermAssignments` may flip from false->true next
|
||||
// time an entry is tagged. Clear the cache so the next read re-probes.
|
||||
invalidateTermCache();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description:
|
||||
typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_CREATE_ERROR", message: "Failed to create term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single term by slug
|
||||
*/
|
||||
export async function handleTermGet(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
): Promise<ApiResult<TermGetResponse>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const count = await repo.countEntriesWithTerm(term.id);
|
||||
const children = await repo.findChildren(term.id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description:
|
||||
typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
count,
|
||||
children: children.map((c) => ({
|
||||
id: c.id,
|
||||
slug: c.slug,
|
||||
label: c.label,
|
||||
})),
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_GET_ERROR", message: "Failed to get term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a term
|
||||
*/
|
||||
export async function handleTermUpdate(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
input: { slug?: string; label?: string; parentId?: string | null; description?: string },
|
||||
): Promise<ApiResult<TermResponse>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Coerce empty-string slug/parentId to undefined (treat as "no change").
|
||||
// `null` parentId is a valid request meaning "detach from parent".
|
||||
const newSlug = input.slug === "" || input.slug === undefined ? undefined : input.slug;
|
||||
const newParentId =
|
||||
input.parentId === "" || input.parentId === undefined ? undefined : input.parentId;
|
||||
|
||||
// Check if new slug conflicts
|
||||
if (newSlug !== undefined && newSlug !== termSlug) {
|
||||
const existing = await repo.findBySlug(taxonomyName, newSlug);
|
||||
if (existing && existing.id !== term.id) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Term with slug '${newSlug}' already exists in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Validate parentId: existence, same-taxonomy, no self-parent, no cycle.
|
||||
const parentError = await validateParentTerm(repo, taxonomyName, term.id, newParentId);
|
||||
if (parentError) {
|
||||
return { success: false, error: parentError };
|
||||
}
|
||||
|
||||
const updated = await repo.update(term.id, {
|
||||
slug: newSlug,
|
||||
label: input.label,
|
||||
parentId: newParentId,
|
||||
data: input.description !== undefined ? { description: input.description } : undefined,
|
||||
});
|
||||
|
||||
// Term label/slug changes are reflected in hydrated entry.data.terms —
|
||||
// invalidate so the next read doesn't short-circuit on a stale probe.
|
||||
invalidateTermCache();
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: updated.id,
|
||||
name: updated.name,
|
||||
slug: updated.slug,
|
||||
label: updated.label,
|
||||
parentId: updated.parentId,
|
||||
description:
|
||||
typeof updated.data?.description === "string" ? updated.data.description : undefined,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a term
|
||||
*/
|
||||
export async function handleTermDelete(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Prevent deletion of terms with children
|
||||
const children = await repo.findChildren(term.id);
|
||||
if (children.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Cannot delete term with children. Delete children first.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = await repo.delete(term.id);
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
|
||||
};
|
||||
}
|
||||
|
||||
// Deleting a term cascades to content_taxonomies; invalidate so
|
||||
// hydration no longer sees the stale assignments.
|
||||
invalidateTermCache();
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
212
packages/core/src/api/handlers/validation.ts
Normal file
212
packages/core/src/api/handlers/validation.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
/**
|
||||
* Field-level validation for content create / update.
|
||||
*
|
||||
* Wires the existing `generateZodSchema()` pipeline (`schema/zod-generator.ts`)
|
||||
* into the handler boundary so REST and MCP both get the same enforcement:
|
||||
*
|
||||
* - required fields must be present and non-empty
|
||||
* - select / multiSelect values must match the configured options
|
||||
* - reference fields must resolve to a real, non-trashed target
|
||||
*
|
||||
* Errors surface as `{ code: "VALIDATION_ERROR", message }` with all
|
||||
* offending fields listed in one message so callers can fix everything in
|
||||
* a single round trip.
|
||||
*/
|
||||
|
||||
import { sql, type Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateIdentifier } from "../../database/validate.js";
|
||||
import { SchemaRegistry } from "../../schema/registry.js";
|
||||
import type { Field } from "../../schema/types.js";
|
||||
import { generateZodSchema } from "../../schema/zod-generator.js";
|
||||
import { chunks, SQL_BATCH_SIZE } from "../../utils/chunks.js";
|
||||
import { isMissingTableError } from "../../utils/db-errors.js";
|
||||
|
||||
type ValidationResult =
|
||||
| { ok: true }
|
||||
| { ok: false; error: { code: "VALIDATION_ERROR" | "COLLECTION_NOT_FOUND"; message: string } };
|
||||
|
||||
/** Treat `undefined`, `null`, and `""` as "not set". */
|
||||
function isMissing(value: unknown): boolean {
|
||||
return value === undefined || value === null || value === "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the target collection slug for a reference field.
|
||||
*
|
||||
* Schema-defined reference fields (the static `reference()` factory in
|
||||
* `fields/reference.ts`) put the target in `options.collection`. The MCP
|
||||
* `schema_create_field` tool also puts it there. Tests and some admin paths
|
||||
* stash it inside `validation.collection` directly; we accept both.
|
||||
*/
|
||||
function getReferenceTargetCollection(field: Field): string | undefined {
|
||||
const fromOptions = field.options?.collection;
|
||||
if (typeof fromOptions === "string" && fromOptions.length > 0) return fromOptions;
|
||||
const validation = field.validation;
|
||||
if (validation && "collection" in validation) {
|
||||
const fromValidation: unknown = (validation as { collection?: unknown }).collection;
|
||||
if (typeof fromValidation === "string" && fromValidation.length > 0) return fromValidation;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a Zod issue path into a human-readable field reference, e.g.
|
||||
* `tags`, `tags.1`, `image.alt`.
|
||||
*/
|
||||
function formatIssuePath(path: ReadonlyArray<PropertyKey>): string {
|
||||
if (path.length === 0) return "(root)";
|
||||
return path.map((seg) => String(seg)).join(".");
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate `data` against the collection's field definitions.
|
||||
*
|
||||
* `partial: true` switches Zod into partial mode so updates can include
|
||||
* only the fields being changed without tripping required-field errors on
|
||||
* fields the caller didn't touch. Required fields that ARE present in
|
||||
* partial-mode data still get the empty-string check below.
|
||||
*/
|
||||
export async function validateContentData(
|
||||
db: Kysely<Database>,
|
||||
collection: string,
|
||||
data: Record<string, unknown>,
|
||||
options: { partial?: boolean } = {},
|
||||
): Promise<ValidationResult> {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const collectionWithFields = await registry.getCollectionWithFields(collection);
|
||||
if (!collectionWithFields) {
|
||||
return {
|
||||
ok: false,
|
||||
error: {
|
||||
code: "COLLECTION_NOT_FOUND",
|
||||
message: `Collection '${collection}' not found`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const issues: string[] = [];
|
||||
|
||||
// Detect unknown keys explicitly so callers get a useful error rather
|
||||
// than silently dropped data. Leading-underscore keys (e.g. `_slug`,
|
||||
// `_rev`) are reserved for internal handler/runtime use and aren't real
|
||||
// fields; skip them.
|
||||
const knownFields = new Set(collectionWithFields.fields.map((f) => f.slug));
|
||||
for (const key of Object.keys(data)) {
|
||||
if (key.startsWith("_")) continue;
|
||||
if (!knownFields.has(key)) {
|
||||
issues.push(`${key}: unknown field on collection '${collection}'`);
|
||||
}
|
||||
}
|
||||
|
||||
// Zod handles type, enum, length and missing-required (in non-partial
|
||||
// mode) checks. Empty-string handling for required string fields is
|
||||
// done as a separate pass below since Zod's `z.string()` accepts "".
|
||||
const baseSchema = generateZodSchema(collectionWithFields);
|
||||
const schema = options.partial ? baseSchema.partial() : baseSchema;
|
||||
const parsed = schema.safeParse(data);
|
||||
if (!parsed.success) {
|
||||
for (const issue of parsed.error.issues) {
|
||||
issues.push(`${formatIssuePath(issue.path)}: ${issue.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Empty-string-on-required check. In create mode (partial=false) Zod
|
||||
// already catches missing/null for required fields, but `z.string()`
|
||||
// happily accepts "". In update mode (partial=true) the field is only
|
||||
// checked if it's present in `data`.
|
||||
for (const field of collectionWithFields.fields) {
|
||||
if (!field.required) continue;
|
||||
const present = Object.hasOwn(data, field.slug);
|
||||
if (options.partial && !present) continue;
|
||||
if (data[field.slug] === "") {
|
||||
issues.push(`${field.slug}: required (empty value not allowed)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Reference target existence. Only check fields that:
|
||||
// - have a value (non-missing) in `data`
|
||||
// - have a resolvable target collection
|
||||
// - in partial mode: are present in `data`
|
||||
// Batch one IN-query per target collection to keep round-trips low.
|
||||
const refsByTarget = new Map<string, { field: string; id: string }[]>();
|
||||
for (const field of collectionWithFields.fields) {
|
||||
if (field.type !== "reference") continue;
|
||||
if (options.partial && !Object.hasOwn(data, field.slug)) continue;
|
||||
const value = data[field.slug];
|
||||
if (isMissing(value)) continue;
|
||||
if (typeof value !== "string") continue; // Zod will have flagged this already
|
||||
const target = getReferenceTargetCollection(field);
|
||||
if (!target) continue;
|
||||
const list = refsByTarget.get(target) ?? [];
|
||||
list.push({ field: field.slug, id: value });
|
||||
refsByTarget.set(target, list);
|
||||
}
|
||||
|
||||
for (const [target, refs] of refsByTarget) {
|
||||
// Validate the target collection slug before interpolating into raw
|
||||
// SQL — defense-in-depth even though slugs are already validated at
|
||||
// schema-create time.
|
||||
try {
|
||||
validateIdentifier(target, "reference target collection");
|
||||
} catch {
|
||||
for (const ref of refs) {
|
||||
issues.push(`${ref.field}: invalid reference target collection '${target}'`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const ids = [...new Set(refs.map((r) => r.id))];
|
||||
const tableName = `ec_${target}`;
|
||||
|
||||
// Chunk the IN clause to stay below D1's bind-parameter limit. One
|
||||
// reference per request is the common case today; chunking makes the
|
||||
// helper safe if a future multiSelect-of-references is added.
|
||||
const found = new Set<string>();
|
||||
let targetTableMissing = false;
|
||||
for (const idChunk of chunks(ids, SQL_BATCH_SIZE)) {
|
||||
try {
|
||||
const rows = await sql<{ id: string }>`
|
||||
SELECT id FROM ${sql.ref(tableName)}
|
||||
WHERE id IN (${sql.join(idChunk)})
|
||||
AND deleted_at IS NULL
|
||||
`.execute(db);
|
||||
for (const row of rows.rows) {
|
||||
found.add(row.id);
|
||||
}
|
||||
} catch (error) {
|
||||
// Missing table = the target collection table doesn't exist
|
||||
// (orphan reference). Treat all those references as missing.
|
||||
// Any other DB error (permissions, connection, syntax) must
|
||||
// propagate — silently dropping data integrity errors as
|
||||
// "not found" is exactly the bug F5 fixes.
|
||||
if (isMissingTableError(error)) {
|
||||
targetTableMissing = true;
|
||||
break;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
if (targetTableMissing) {
|
||||
for (const ref of refs) {
|
||||
issues.push(`${ref.field}: target '${ref.id}' not found in collection '${target}'`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (const ref of refs) {
|
||||
if (!found.has(ref.id)) {
|
||||
issues.push(`${ref.field}: target '${ref.id}' not found in collection '${target}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (issues.length === 0) return { ok: true };
|
||||
return {
|
||||
ok: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: issues.join("; "),
|
||||
},
|
||||
};
|
||||
}
|
||||
6
packages/core/src/api/index.ts
Normal file
6
packages/core/src/api/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./types.js";
|
||||
export * from "./handlers/index.js";
|
||||
export * from "./parse.js";
|
||||
export * from "./schemas/index.js";
|
||||
export * from "./error.js";
|
||||
export * from "./errors.js";
|
||||
34
packages/core/src/api/oauth/redirect-uri.ts
Normal file
34
packages/core/src/api/oauth/redirect-uri.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* Validate a redirect URI per OAuth 2.1 security requirements.
|
||||
*
|
||||
* Allows localhost / loopback redirect URIs over HTTP for native clients,
|
||||
* and any HTTPS URL for web-based flows.
|
||||
*/
|
||||
export function validateRedirectUri(uri: string): string | null {
|
||||
try {
|
||||
const url = new URL(uri);
|
||||
|
||||
// Reject protocol-relative URLs
|
||||
if (uri.startsWith("//")) {
|
||||
return "Protocol-relative redirect URIs are not allowed";
|
||||
}
|
||||
|
||||
// Allow localhost/loopback over HTTP (for desktop MCP clients)
|
||||
if (url.protocol === "http:") {
|
||||
const host = url.hostname;
|
||||
if (host === "127.0.0.1" || host === "localhost" || host === "[::1]") {
|
||||
return null;
|
||||
}
|
||||
return "HTTP redirect URIs are only allowed for localhost";
|
||||
}
|
||||
|
||||
// Allow HTTPS
|
||||
if (url.protocol === "https:") {
|
||||
return null;
|
||||
}
|
||||
|
||||
return `Unsupported redirect URI scheme: ${url.protocol}`;
|
||||
} catch {
|
||||
return "Invalid redirect URI";
|
||||
}
|
||||
}
|
||||
2379
packages/core/src/api/openapi/document.ts
Normal file
2379
packages/core/src/api/openapi/document.ts
Normal file
File diff suppressed because it is too large
Load Diff
1
packages/core/src/api/openapi/index.ts
Normal file
1
packages/core/src/api/openapi/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { generateOpenApiDocument } from "./document.js";
|
||||
139
packages/core/src/api/parse.ts
Normal file
139
packages/core/src/api/parse.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Request body and query parameter parsing with Zod validation.
|
||||
*
|
||||
* All API routes should use these utilities instead of `request.json() as T`
|
||||
* or raw `url.searchParams.get()` with manual coercion.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
/** Maximum allowed JSON request body size (10 MB). */
|
||||
const MAX_BODY_SIZE = 10 * 1024 * 1024;
|
||||
|
||||
/**
|
||||
* Result of parsing: either the validated data or an error Response.
|
||||
* Routes should check `if (result instanceof Response) return result;`
|
||||
*/
|
||||
export type ParseResult<T> = T | Response;
|
||||
|
||||
/**
|
||||
* Parse and validate a JSON request body against a Zod schema.
|
||||
*
|
||||
* Returns the validated data on success, or a 400 Response on failure.
|
||||
* Replaces all `(await request.json()) as T` casts.
|
||||
*/
|
||||
export async function parseBody<T extends z.ZodType>(
|
||||
request: Request,
|
||||
schema: T,
|
||||
): Promise<ParseResult<z.infer<T>>> {
|
||||
// Best-effort size check via Content-Length (can be absent with chunked encoding)
|
||||
const contentLength = request.headers.get("Content-Length");
|
||||
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
|
||||
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
|
||||
}
|
||||
|
||||
let raw: unknown;
|
||||
try {
|
||||
raw = await request.json();
|
||||
} catch {
|
||||
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
|
||||
}
|
||||
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and validate an optional JSON request body.
|
||||
*
|
||||
* Returns `defaultValue` if the body is empty, or the validated data if present.
|
||||
* For endpoints where the body is optional (e.g., preview-url, confirm).
|
||||
*/
|
||||
export async function parseOptionalBody<T extends z.ZodType>(
|
||||
request: Request,
|
||||
schema: T,
|
||||
defaultValue: z.infer<T>,
|
||||
): Promise<ParseResult<z.infer<T>>> {
|
||||
// Best-effort size check via Content-Length (can be absent with chunked encoding)
|
||||
const contentLength = request.headers.get("Content-Length");
|
||||
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
|
||||
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
|
||||
}
|
||||
|
||||
let text: string;
|
||||
try {
|
||||
text = await request.text();
|
||||
} catch {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
if (!text.trim()) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
let raw: unknown;
|
||||
try {
|
||||
raw = JSON.parse(text);
|
||||
} catch {
|
||||
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
|
||||
}
|
||||
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and validate URL search params against a Zod schema.
|
||||
*
|
||||
* Converts searchParams to a plain object before validation.
|
||||
* Zod coercion handles string -> number/boolean conversion.
|
||||
* Replaces manual `url.searchParams.get()` + `parseInt()` patterns.
|
||||
*/
|
||||
export function parseQuery<T extends z.ZodType>(url: URL, schema: T): ParseResult<z.infer<T>> {
|
||||
const raw: Record<string, string> = {};
|
||||
for (const [key, value] of url.searchParams) {
|
||||
raw[key] = value;
|
||||
}
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate raw data against a schema. Returns data or error Response.
|
||||
*/
|
||||
function validate<T extends z.ZodType>(schema: T, data: unknown): ParseResult<z.infer<T>> {
|
||||
const result = schema.safeParse(data);
|
||||
|
||||
if (result.success) {
|
||||
return result.data as z.infer<T>;
|
||||
}
|
||||
|
||||
// Format Zod errors into a readable structure
|
||||
const issues = result.error.issues.map((issue: z.ZodIssue) => ({
|
||||
path: issue.path.join("."),
|
||||
message: issue.message,
|
||||
}));
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Invalid request data",
|
||||
details: { issues },
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
headers: {
|
||||
"Cache-Control": "private, no-store",
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a ParseResult is an error Response.
|
||||
* Usage: `if (isParseError(result)) return result;`
|
||||
*/
|
||||
export function isParseError<T>(result: ParseResult<T>): result is Response {
|
||||
return result instanceof Response;
|
||||
}
|
||||
133
packages/core/src/api/public-url.ts
Normal file
133
packages/core/src/api/public-url.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
/**
|
||||
* Public URL helpers for reverse-proxy deployments.
|
||||
*
|
||||
* Behind a TLS-terminating proxy the internal request URL
|
||||
* (`http://localhost:4321`) differs from the browser-facing origin
|
||||
* (`https://mysite.example.com`). These pure helpers resolve the
|
||||
* correct public origin from config, falling back to the request URL.
|
||||
*
|
||||
* Workers-safe: no Node.js imports.
|
||||
*/
|
||||
|
||||
/** Minimal config shape — avoids importing the full EmDashConfig type tree. */
|
||||
interface SiteUrlConfig {
|
||||
siteUrl?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve siteUrl from runtime environment variables.
|
||||
*
|
||||
* Uses process.env (not import.meta.env) because Vite statically replaces
|
||||
* import.meta.env at build time, baking out any env vars not present during
|
||||
* the build. Container deployments set env vars at runtime, so we must read
|
||||
* process.env which Vite leaves untouched.
|
||||
*
|
||||
* On Cloudflare Workers process.env is unavailable (returns undefined),
|
||||
* so the fallback chain continues to url.origin.
|
||||
*
|
||||
* Caches after first call.
|
||||
*/
|
||||
let _envSiteUrl: string | undefined | null = null;
|
||||
|
||||
/** @internal Reset cached env values — test-only. */
|
||||
export function _resetEnvCache(): void {
|
||||
_envSiteUrl = null;
|
||||
_envAllowedOrigins = null;
|
||||
}
|
||||
|
||||
function getEnvSiteUrl(): string | undefined {
|
||||
if (_envSiteUrl !== null) return _envSiteUrl || undefined;
|
||||
try {
|
||||
// process.env is available on Node.js; undefined on Workers
|
||||
const value =
|
||||
(typeof process !== "undefined" && process.env?.EMDASH_SITE_URL) ||
|
||||
(typeof process !== "undefined" && process.env?.SITE_URL) ||
|
||||
"";
|
||||
if (value) {
|
||||
const parsed = new URL(value);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
_envSiteUrl = "";
|
||||
return undefined;
|
||||
}
|
||||
_envSiteUrl = parsed.origin;
|
||||
} else {
|
||||
_envSiteUrl = "";
|
||||
}
|
||||
} catch {
|
||||
_envSiteUrl = "";
|
||||
}
|
||||
return _envSiteUrl || undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the public-facing origin for the site.
|
||||
*
|
||||
* Resolution order:
|
||||
* 1. `config.siteUrl` (set in astro.config.mjs, origin-normalized at startup)
|
||||
* 2. `EMDASH_SITE_URL` or `SITE_URL` env var (resolved at runtime for containers)
|
||||
* 3. `url.origin` (internal request URL — correct when no proxy)
|
||||
*
|
||||
* @param url The request URL (`new URL(request.url)` or `Astro.url`)
|
||||
* @param config The EmDash config (from `locals.emdash?.config`)
|
||||
* @returns Origin string, e.g. `"https://mysite.example.com"`
|
||||
*/
|
||||
export function getPublicOrigin(url: URL, config?: SiteUrlConfig): string {
|
||||
return config?.siteUrl || getEnvSiteUrl() || url.origin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve additional accepted passkey origins from runtime environment.
|
||||
*
|
||||
* Reads `EMDASH_ALLOWED_ORIGINS` (comma-separated list of origins) for
|
||||
* multi-origin deployments where the same RP is reachable under several
|
||||
* hostnames sharing the registrable parent domain (e.g. apex + preview).
|
||||
*
|
||||
* Each entry is parsed via `new URL()` and reduced to its `origin`. Unlike
|
||||
* `getEnvSiteUrl` (which silently falls back to `url.origin` on bad input),
|
||||
* this throws on any unparseable or non-http(s) entry — `EMDASH_ALLOWED_ORIGINS`
|
||||
* is an allowlist for passkey verification, so silently dropping a typo would
|
||||
* surface as "I can't authenticate on this origin" with no diagnostic. Fail
|
||||
* loud at first read.
|
||||
*
|
||||
* Uses `process.env` (Vite leaves it untouched at runtime). Result is cached
|
||||
* on success.
|
||||
*/
|
||||
let _envAllowedOrigins: string[] | null = null;
|
||||
|
||||
export function getEnvAllowedOrigins(): string[] {
|
||||
if (_envAllowedOrigins !== null) return _envAllowedOrigins;
|
||||
const raw = typeof process !== "undefined" ? process.env?.EMDASH_ALLOWED_ORIGINS || "" : "";
|
||||
const parsed: string[] = [];
|
||||
for (const entry of raw.split(",")) {
|
||||
const trimmed = entry.trim();
|
||||
if (!trimmed) continue;
|
||||
let u: URL;
|
||||
try {
|
||||
u = new URL(trimmed);
|
||||
} catch (e) {
|
||||
throw new Error(`EmDash config error in EMDASH_ALLOWED_ORIGINS: invalid URL: "${trimmed}"`, {
|
||||
cause: e,
|
||||
});
|
||||
}
|
||||
if (u.protocol !== "http:" && u.protocol !== "https:") {
|
||||
throw new Error(
|
||||
`EmDash config error in EMDASH_ALLOWED_ORIGINS: origin must be http or https: "${trimmed}" (got ${u.protocol})`,
|
||||
);
|
||||
}
|
||||
parsed.push(u.origin);
|
||||
}
|
||||
_envAllowedOrigins = parsed;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a full public URL by appending a path to the public origin.
|
||||
*
|
||||
* @param url The request URL
|
||||
* @param config The EmDash config
|
||||
* @param path Path to append (must start with `/`)
|
||||
* @returns Full URL string, e.g. `"https://mysite.example.com/_emdash/admin/login"`
|
||||
*/
|
||||
export function getPublicUrl(url: URL, config: SiteUrlConfig | undefined, path: string): string {
|
||||
return `${getPublicOrigin(url, config)}${path}`;
|
||||
}
|
||||
14
packages/core/src/api/redirect.ts
Normal file
14
packages/core/src/api/redirect.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Validate that a redirect URL is a safe local path.
|
||||
*
|
||||
* Rejects:
|
||||
* - Protocol-relative URLs (`//evil.com`)
|
||||
* - Backslash bypass (`/\evil.com` — browsers normalize `\` to `/` in Location headers)
|
||||
* - Absolute URLs (`https://evil.com`)
|
||||
* - Empty / nullish values
|
||||
*/
|
||||
export function isSafeRedirect(url: string | null | undefined): url is string {
|
||||
return (
|
||||
typeof url === "string" && url.startsWith("/") && !url.startsWith("//") && !url.includes("\\")
|
||||
);
|
||||
}
|
||||
67
packages/core/src/api/rev.ts
Normal file
67
packages/core/src/api/rev.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* Opaque _rev token generation and validation.
|
||||
*
|
||||
* Format: base64("version:updated_at")
|
||||
* Stateless — server decodes and checks both components.
|
||||
*
|
||||
* Rules:
|
||||
* - No _rev sent → blind write (backwards-compatible)
|
||||
* - _rev matches → write proceeds, new _rev returned
|
||||
* - _rev mismatch → 409 Conflict
|
||||
*/
|
||||
|
||||
import type { ContentItem } from "../database/repositories/types.js";
|
||||
import { encodeBase64, decodeBase64 } from "../utils/base64.js";
|
||||
|
||||
/**
|
||||
* Generate a _rev token from a content item's version and updatedAt.
|
||||
*/
|
||||
export function encodeRev(item: ContentItem): string {
|
||||
return encodeBase64(`${item.version}:${item.updatedAt}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a _rev token into its components.
|
||||
* Returns null if the token is malformed.
|
||||
*/
|
||||
export function decodeRev(rev: string): { version: number; updatedAt: string } | null {
|
||||
try {
|
||||
const decoded = decodeBase64(rev);
|
||||
const colonIdx = decoded.indexOf(":");
|
||||
if (colonIdx === -1) return null;
|
||||
|
||||
const version = parseInt(decoded.slice(0, colonIdx), 10);
|
||||
const updatedAt = decoded.slice(colonIdx + 1);
|
||||
|
||||
if (isNaN(version) || !updatedAt) return null;
|
||||
return { version, updatedAt };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a _rev token against a content item.
|
||||
* Returns null if valid (or if no _rev provided), or an error message if invalid.
|
||||
*/
|
||||
export function validateRev(
|
||||
rev: string | undefined,
|
||||
item: ContentItem,
|
||||
): { valid: true } | { valid: false; message: string } {
|
||||
// No _rev = blind write (backwards-compatible)
|
||||
if (!rev) return { valid: true };
|
||||
|
||||
const decoded = decodeRev(rev);
|
||||
if (!decoded) {
|
||||
return { valid: false, message: "Malformed _rev token" };
|
||||
}
|
||||
|
||||
if (decoded.version !== item.version || decoded.updatedAt !== item.updatedAt) {
|
||||
return {
|
||||
valid: false,
|
||||
message: "Content has been modified since last read (version conflict)",
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true };
|
||||
}
|
||||
14
packages/core/src/api/route-utils.ts
Normal file
14
packages/core/src/api/route-utils.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Public API route utilities for auth provider routes.
|
||||
*
|
||||
* This module re-exports the utilities that auth provider route handlers
|
||||
* need from core. Auth providers (plugins) import these via `emdash/api/route-utils`.
|
||||
*/
|
||||
|
||||
export { apiError, apiSuccess, handleError } from "./error.js";
|
||||
export { parseBody, parseQuery, isParseError } from "./parse.js";
|
||||
export type { ParseResult } from "./parse.js";
|
||||
export { finalizeSetup } from "./setup-complete.js";
|
||||
export { OptionsRepository } from "../database/repositories/options.js";
|
||||
export { getAuthProviderStorage } from "./auth-storage.js";
|
||||
export { getPublicOrigin } from "./public-url.js";
|
||||
119
packages/core/src/api/schemas/auth.ts
Normal file
119
packages/core/src/api/schemas/auth.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { roleLevel } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// WebAuthn credential schemas (matching @emdash-cms/auth/passkey types)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
|
||||
|
||||
/** RegistrationResponse — sent by the browser after navigator.credentials.create() */
|
||||
const registrationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
attestationObject: z.string(),
|
||||
transports: z.array(authenticatorTransport).optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
/** AuthenticationResponse — sent by the browser after navigator.credentials.get() */
|
||||
const authenticationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
authenticatorData: z.string(),
|
||||
signature: z.string(),
|
||||
userHandle: z.string().optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Auth: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const signupRequestBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
})
|
||||
.meta({ id: "SignupRequestBody" });
|
||||
|
||||
export const signupCompleteBody = z
|
||||
.object({
|
||||
token: z.string().min(1),
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SignupCompleteBody" });
|
||||
|
||||
export const inviteCreateBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
role: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "InviteCreateBody" });
|
||||
|
||||
export const inviteRegisterOptionsBody = z
|
||||
.object({
|
||||
token: z.string().min(1),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "InviteRegisterOptionsBody" });
|
||||
|
||||
export const inviteCompleteBody = z
|
||||
.object({
|
||||
token: z.string().min(1),
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "InviteCompleteBody" });
|
||||
|
||||
export const magicLinkSendBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
})
|
||||
.meta({ id: "MagicLinkSendBody" });
|
||||
|
||||
export const passkeyOptionsBody = z
|
||||
.object({
|
||||
email: z.string().email().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyOptionsBody" });
|
||||
|
||||
export const passkeyVerifyBody = z
|
||||
.object({
|
||||
credential: authenticationCredential,
|
||||
})
|
||||
.meta({ id: "PasskeyVerifyBody" });
|
||||
|
||||
export const passkeyRegisterOptionsBody = z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyRegisterOptionsBody" });
|
||||
|
||||
export const passkeyRegisterVerifyBody = z
|
||||
.object({
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyRegisterVerifyBody" });
|
||||
|
||||
export const passkeyRenameBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "PasskeyRenameBody" });
|
||||
|
||||
export const authMeActionBody = z
|
||||
.object({
|
||||
action: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "AuthMeActionBody" });
|
||||
85
packages/core/src/api/schemas/bylines.ts
Normal file
85
packages/core/src/api/schemas/bylines.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery, httpUrl } from "./common.js";
|
||||
|
||||
/** Slug pattern: lowercase letters, digits, and hyphens; must start with a letter */
|
||||
const bylineSlugPattern = /^[a-z][a-z0-9-]*$/;
|
||||
|
||||
export const bylineSummarySchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
displayName: z.string(),
|
||||
bio: z.string().nullable(),
|
||||
avatarMediaId: z.string().nullable(),
|
||||
websiteUrl: z.string().nullable(),
|
||||
userId: z.string().nullable(),
|
||||
isGuest: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "BylineSummary" });
|
||||
|
||||
export const bylineCreditSchema = z
|
||||
.object({
|
||||
byline: bylineSummarySchema,
|
||||
sortOrder: z.number().int(),
|
||||
roleLabel: z.string().nullable(),
|
||||
source: z.enum(["explicit", "inferred"]).optional().meta({
|
||||
description: "Whether this credit was explicitly assigned or inferred from authorId",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "BylineCredit" });
|
||||
|
||||
export const contentBylineInputSchema = z
|
||||
.object({
|
||||
bylineId: z.string().min(1),
|
||||
roleLabel: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "ContentBylineInput" });
|
||||
|
||||
export const bylinesListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
isGuest: z.coerce.boolean().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "BylinesListQuery" });
|
||||
|
||||
export const bylineCreateBody = z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens"),
|
||||
displayName: z.string().min(1),
|
||||
bio: z.string().nullish(),
|
||||
avatarMediaId: z.string().nullish(),
|
||||
websiteUrl: httpUrl.nullish(),
|
||||
userId: z.string().nullish(),
|
||||
isGuest: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "BylineCreateBody" });
|
||||
|
||||
export const bylineUpdateBody = z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens")
|
||||
.optional(),
|
||||
displayName: z.string().min(1).optional(),
|
||||
bio: z.string().nullish(),
|
||||
avatarMediaId: z.string().nullish(),
|
||||
websiteUrl: httpUrl.nullish(),
|
||||
userId: z.string().nullish(),
|
||||
isGuest: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "BylineUpdateBody" });
|
||||
|
||||
export const bylineListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(bylineSummarySchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "BylineListResponse" });
|
||||
117
packages/core/src/api/schemas/comments.ts
Normal file
117
packages/core/src/api/schemas/comments.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Comments: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const createCommentBody = z
|
||||
.object({
|
||||
authorName: z.string().min(1).max(100),
|
||||
authorEmail: z.string().email(),
|
||||
body: z.string().min(1).max(5000),
|
||||
parentId: z.string().optional(),
|
||||
/** Honeypot field — hidden in the form, filled only by bots */
|
||||
website_url: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateCommentBody" });
|
||||
|
||||
export const commentStatusBody = z
|
||||
.object({
|
||||
status: z.enum(["approved", "pending", "spam", "trash"]),
|
||||
})
|
||||
.meta({ id: "CommentStatusBody" });
|
||||
|
||||
export const commentBulkBody = z
|
||||
.object({
|
||||
ids: z.array(z.string().min(1)).min(1).max(100),
|
||||
action: z.enum(["approve", "spam", "trash", "delete"]),
|
||||
})
|
||||
.meta({ id: "CommentBulkBody" });
|
||||
|
||||
export const commentListQuery = z
|
||||
.object({
|
||||
status: z.enum(["pending", "approved", "spam", "trash"]).optional(),
|
||||
collection: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
cursor: z.string().max(2048).optional(),
|
||||
})
|
||||
.meta({ id: "CommentListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Comments: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const commentStatusValues = z.enum(["pending", "approved", "spam", "trash"]);
|
||||
|
||||
/**
|
||||
* Public-facing comment (no email/IP).
|
||||
*
|
||||
* `replies` is recursive in practice (each reply can have replies), but we
|
||||
* model it as a single level here to avoid circular type inference issues
|
||||
* with tsgo. OpenAPI consumers should treat replies as the same shape.
|
||||
*/
|
||||
export const publicCommentSchema: z.ZodObject<{
|
||||
id: z.ZodString;
|
||||
authorName: z.ZodString;
|
||||
isRegisteredUser: z.ZodBoolean;
|
||||
body: z.ZodString;
|
||||
parentId: z.ZodNullable<z.ZodString>;
|
||||
createdAt: z.ZodString;
|
||||
replies: z.ZodOptional<z.ZodArray<z.ZodAny>>;
|
||||
}> = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
authorName: z.string(),
|
||||
isRegisteredUser: z.boolean(),
|
||||
body: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
replies: z.array(z.any()).optional(),
|
||||
})
|
||||
.meta({ id: "PublicComment" });
|
||||
|
||||
/** Admin comment with full details */
|
||||
export const commentSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
collection: z.string(),
|
||||
contentId: z.string(),
|
||||
authorName: z.string(),
|
||||
authorEmail: z.string(),
|
||||
body: z.string(),
|
||||
status: commentStatusValues,
|
||||
parentId: z.string().nullable(),
|
||||
ipHash: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Comment" });
|
||||
|
||||
export const publicCommentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(publicCommentSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
total: z.number().int(),
|
||||
})
|
||||
.meta({ id: "PublicCommentListResponse" });
|
||||
|
||||
export const adminCommentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(commentSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "AdminCommentListResponse" });
|
||||
|
||||
export const commentCountsResponseSchema = z
|
||||
.object({
|
||||
pending: z.number().int(),
|
||||
approved: z.number().int(),
|
||||
spam: z.number().int(),
|
||||
trash: z.number().int(),
|
||||
})
|
||||
.meta({ id: "CommentCountsResponse" });
|
||||
|
||||
export const commentBulkResponseSchema = z
|
||||
.object({ affected: z.number().int() })
|
||||
.meta({ id: "CommentBulkResponse" });
|
||||
89
packages/core/src/api/schemas/common.ts
Normal file
89
packages/core/src/api/schemas/common.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Role level
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Valid role level values */
|
||||
export const VALID_ROLE_LEVELS = new Set([10, 20, 30, 40, 50]);
|
||||
|
||||
/** Role level — coerces string/number to valid RoleLevel (10|20|30|40|50) */
|
||||
export const roleLevel = z.coerce
|
||||
.number()
|
||||
.int()
|
||||
.refine((n): n is 10 | 20 | 30 | 40 | 50 => VALID_ROLE_LEVELS.has(n), {
|
||||
message: "Invalid role level. Must be 10, 20, 30, 40, or 50",
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Pagination
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Pagination query params — cursor-based */
|
||||
export const cursorPaginationQuery = z
|
||||
.object({
|
||||
cursor: z.string().max(2048).optional().meta({ description: "Opaque cursor for pagination" }),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50).meta({
|
||||
description: "Maximum number of items to return (1-100, default 50)",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "CursorPaginationQuery" });
|
||||
|
||||
/** Pagination query params — offset-based */
|
||||
export const offsetPaginationQuery = z
|
||||
.object({
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
offset: z.coerce.number().int().min(0).optional().default(0),
|
||||
})
|
||||
.meta({ id: "OffsetPaginationQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared primitives
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Slug pattern: lowercase letters, digits, underscores; starts with letter */
|
||||
export const slugPattern = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
/** Matches http(s) scheme at start of URL */
|
||||
const HTTP_SCHEME_RE = /^https?:\/\//i;
|
||||
|
||||
/** Validates that a URL string uses http or https scheme. Rejects javascript:/data: URI XSS vectors. */
|
||||
export const httpUrl = z
|
||||
.string()
|
||||
.url()
|
||||
.refine((url) => HTTP_SCHEME_RE.test(url), "URL must use http or https");
|
||||
|
||||
/** BCP 47 locale code — language with optional script/region subtags (e.g. en, en-US, pt-BR, es-419, zh-Hant) */
|
||||
export const localeCode = z
|
||||
.string()
|
||||
.regex(/^[a-z]{2,3}(-[a-z0-9]{2,8})*$/i, "Invalid locale code")
|
||||
.transform((v) => v.toLowerCase());
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// OpenAPI: Shared response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Standard API error response */
|
||||
export const apiErrorSchema = z
|
||||
.object({
|
||||
error: z.object({
|
||||
code: z.string().meta({ description: "Machine-readable error code", example: "NOT_FOUND" }),
|
||||
message: z.string().meta({ description: "Human-readable error message" }),
|
||||
}),
|
||||
})
|
||||
.meta({ id: "ApiError" });
|
||||
|
||||
/** Wrap a data schema in the standard success envelope: { data: T } */
|
||||
export function successEnvelope<T extends z.ZodType>(dataSchema: T) {
|
||||
return z.object({ data: dataSchema });
|
||||
}
|
||||
|
||||
/** Standard delete response */
|
||||
export const deleteResponseSchema = z.object({ deleted: z.literal(true) }).meta({
|
||||
id: "DeleteResponse",
|
||||
});
|
||||
|
||||
/** Standard count response */
|
||||
export const countResponseSchema = z
|
||||
.object({ count: z.number().int().min(0) })
|
||||
.meta({ id: "CountResponse" });
|
||||
216
packages/core/src/api/schemas/content.ts
Normal file
216
packages/core/src/api/schemas/content.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { bylineSummarySchema, bylineCreditSchema, contentBylineInputSchema } from "./bylines.js";
|
||||
import { cursorPaginationQuery, httpUrl, localeCode } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** SEO input — per-content meta fields */
|
||||
export const contentSeoInput = z
|
||||
.object({
|
||||
title: z.string().max(200).nullish(),
|
||||
description: z.string().max(500).nullish(),
|
||||
image: z.string().nullish(),
|
||||
canonical: httpUrl.nullish(),
|
||||
noIndex: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "ContentSeoInput" });
|
||||
|
||||
export const contentListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
status: z.string().optional(),
|
||||
orderBy: z.string().optional(),
|
||||
order: z.enum(["asc", "desc"]).optional(),
|
||||
locale: localeCode.optional(),
|
||||
})
|
||||
.meta({ id: "ContentListQuery" });
|
||||
|
||||
/** ISO 8601 datetime for `publishedAt` / `createdAt`. Routes gate writes behind `content:publish_any`. */
|
||||
const contentDateOverride = z.iso
|
||||
.datetime({ offset: true, message: "must be an ISO 8601 datetime" })
|
||||
.nullish();
|
||||
|
||||
export const contentCreateBody = z
|
||||
.object({
|
||||
data: z.record(z.string(), z.unknown()),
|
||||
slug: z.string().nullish(),
|
||||
status: z.enum(["draft"]).optional(),
|
||||
bylines: z.array(contentBylineInputSchema).optional(),
|
||||
locale: localeCode.optional(),
|
||||
translationOf: z.string().optional(),
|
||||
seo: contentSeoInput.optional(),
|
||||
publishedAt: contentDateOverride,
|
||||
createdAt: contentDateOverride,
|
||||
})
|
||||
.meta({ id: "ContentCreateBody" });
|
||||
|
||||
export const contentUpdateBody = z
|
||||
.object({
|
||||
data: z.record(z.string(), z.unknown()).optional(),
|
||||
slug: z.string().nullish(),
|
||||
status: z.enum(["draft"]).optional(),
|
||||
authorId: z.string().nullish(),
|
||||
bylines: z.array(contentBylineInputSchema).optional(),
|
||||
_rev: z
|
||||
.string()
|
||||
.optional()
|
||||
.meta({ description: "Opaque revision token for optimistic concurrency" }),
|
||||
skipRevision: z.boolean().optional(),
|
||||
seo: contentSeoInput.optional(),
|
||||
publishedAt: contentDateOverride,
|
||||
})
|
||||
.meta({ id: "ContentUpdateBody" });
|
||||
|
||||
export const contentScheduleBody = z
|
||||
.object({
|
||||
scheduledAt: z.string().min(1, "scheduledAt is required").meta({
|
||||
description: "ISO 8601 datetime for scheduled publishing",
|
||||
example: "2025-06-15T09:00:00Z",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "ContentScheduleBody" });
|
||||
|
||||
export const contentPublishBody = z
|
||||
.object({
|
||||
// .optional() rather than .nullish(): publishing has no semantic
|
||||
// meaning for `null` (you can't "clear" a publish timestamp by
|
||||
// publishing). Tightening the schema here means callers either
|
||||
// pass a valid datetime or omit the field, and the route doesn't
|
||||
// have to silently drop a null that snuck through.
|
||||
publishedAt: z.iso
|
||||
.datetime({ offset: true, message: "must be an ISO 8601 datetime" })
|
||||
.optional()
|
||||
.meta({
|
||||
description:
|
||||
"Optional ISO 8601 datetime to backdate the publish (e.g. when migrating content). Requires content:publish_any permission. Without this, existing published_at is preserved on re-publish.",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "ContentPublishBody" });
|
||||
|
||||
export const contentPreviewUrlBody = z
|
||||
.object({
|
||||
expiresIn: z.union([z.string(), z.number()]).optional(),
|
||||
pathPattern: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "ContentPreviewUrlBody" });
|
||||
|
||||
export const contentTermsBody = z
|
||||
.object({
|
||||
termIds: z.array(z.string()),
|
||||
})
|
||||
.meta({ id: "ContentTermsBody" });
|
||||
|
||||
export const contentTrashQuery = cursorPaginationQuery;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** SEO metadata on a content item */
|
||||
export const contentSeoSchema = z
|
||||
.object({
|
||||
title: z.string().nullable(),
|
||||
description: z.string().nullable(),
|
||||
image: z.string().nullable(),
|
||||
canonical: z.string().nullable(),
|
||||
noIndex: z.boolean(),
|
||||
})
|
||||
.meta({ id: "ContentSeo" });
|
||||
|
||||
/** A single content item as returned by the API */
|
||||
export const contentItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.string().meta({ description: "Collection slug this item belongs to" }),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string().meta({ description: "draft, published, or scheduled" }),
|
||||
data: z.record(z.string(), z.unknown()).meta({
|
||||
description: "User-defined field values",
|
||||
}),
|
||||
authorId: z.string().nullable(),
|
||||
primaryBylineId: z.string().nullable(),
|
||||
byline: bylineSummarySchema.nullable().optional(),
|
||||
bylines: z.array(bylineCreditSchema).optional(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
publishedAt: z.string().nullable(),
|
||||
scheduledAt: z.string().nullable(),
|
||||
liveRevisionId: z.string().nullable(),
|
||||
draftRevisionId: z.string().nullable(),
|
||||
version: z.number().int(),
|
||||
locale: z.string().nullable(),
|
||||
translationGroup: z.string().nullable(),
|
||||
seo: contentSeoSchema.optional(),
|
||||
})
|
||||
.meta({ id: "ContentItem" });
|
||||
|
||||
/** Response for single content item endpoints (get, create, update) */
|
||||
export const contentResponseSchema = z
|
||||
.object({
|
||||
item: contentItemSchema,
|
||||
_rev: z
|
||||
.string()
|
||||
.optional()
|
||||
.meta({ description: "Opaque revision token for optimistic concurrency" }),
|
||||
})
|
||||
.meta({ id: "ContentResponse" });
|
||||
|
||||
/** Response for content list endpoints */
|
||||
export const contentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(contentItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "ContentListResponse" });
|
||||
|
||||
/** Trashed content item */
|
||||
export const trashedContentItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.string(),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string(),
|
||||
data: z.record(z.string(), z.unknown()),
|
||||
authorId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
publishedAt: z.string().nullable(),
|
||||
deletedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "TrashedContentItem" });
|
||||
|
||||
/** Response for trashed content list */
|
||||
export const trashedContentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(trashedContentItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "TrashedContentListResponse" });
|
||||
|
||||
/** Response for content compare (live vs draft) */
|
||||
export const contentCompareResponseSchema = z
|
||||
.object({
|
||||
hasChanges: z.boolean(),
|
||||
live: z.record(z.string(), z.unknown()).nullable(),
|
||||
draft: z.record(z.string(), z.unknown()).nullable(),
|
||||
})
|
||||
.meta({ id: "ContentCompareResponse" });
|
||||
|
||||
/** Translation summary for a content item */
|
||||
export const contentTranslationSchema = z.object({
|
||||
id: z.string(),
|
||||
locale: z.string().nullable(),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string(),
|
||||
updatedAt: z.string(),
|
||||
});
|
||||
|
||||
/** Response for content translations endpoint */
|
||||
export const contentTranslationsResponseSchema = z
|
||||
.object({
|
||||
translationGroup: z.string(),
|
||||
translations: z.array(contentTranslationSchema),
|
||||
})
|
||||
.meta({ id: "ContentTranslationsResponse" });
|
||||
52
packages/core/src/api/schemas/import.ts
Normal file
52
packages/core/src/api/schemas/import.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { httpUrl } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Import
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const importProbeBody = z.object({
|
||||
url: httpUrl,
|
||||
});
|
||||
|
||||
export const wpPluginAnalyzeBody = z.object({
|
||||
url: httpUrl,
|
||||
token: z.string().min(1),
|
||||
});
|
||||
|
||||
export const wpPluginExecuteBody = z.object({
|
||||
url: httpUrl,
|
||||
token: z.string().min(1),
|
||||
config: z.record(z.string(), z.unknown()),
|
||||
});
|
||||
|
||||
export const wpPrepareBody = z.object({
|
||||
postTypes: z.array(
|
||||
z.object({
|
||||
name: z.string().min(1),
|
||||
collection: z.string().min(1),
|
||||
fields: z
|
||||
.array(
|
||||
z.object({
|
||||
slug: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
type: z.string().min(1),
|
||||
required: z.boolean(),
|
||||
searchable: z.boolean().optional(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export const wpMediaImportBody = z.object({
|
||||
attachments: z.array(z.record(z.string(), z.unknown())),
|
||||
stream: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export const wpRewriteUrlsBody = z.object({
|
||||
urlMap: z.record(z.string(), z.string()),
|
||||
collections: z.array(z.string()).optional(),
|
||||
});
|
||||
17
packages/core/src/api/schemas/index.ts
Normal file
17
packages/core/src/api/schemas/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export * from "./common.js";
|
||||
export * from "./content.js";
|
||||
export * from "./media.js";
|
||||
export * from "./schema.js";
|
||||
export * from "./comments.js";
|
||||
export * from "./auth.js";
|
||||
export * from "./menus.js";
|
||||
export * from "./taxonomies.js";
|
||||
export * from "./sections.js";
|
||||
export * from "./settings.js";
|
||||
export * from "./search.js";
|
||||
export * from "./import.js";
|
||||
export * from "./setup.js";
|
||||
export * from "./users.js";
|
||||
export * from "./widgets.js";
|
||||
export * from "./redirects.js";
|
||||
export * from "./bylines.js";
|
||||
127
packages/core/src/api/schemas/media.ts
Normal file
127
packages/core/src/api/schemas/media.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Media: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const mediaListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaListQuery" });
|
||||
|
||||
export const mediaUpdateBody = z
|
||||
.object({
|
||||
alt: z.string().optional(),
|
||||
caption: z.string().optional(),
|
||||
width: z.number().int().positive().optional(),
|
||||
height: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({ id: "MediaUpdateBody" });
|
||||
|
||||
/** Default maximum allowed file upload size (50 MB). */
|
||||
export const DEFAULT_MAX_UPLOAD_SIZE = 50 * 1024 * 1024;
|
||||
|
||||
export function formatFileSize(bytes: number): string {
|
||||
if (bytes < 1024) return `${bytes}B`;
|
||||
if (bytes < 1024 * 1024) return `${Math.floor(bytes / 1024)}KB`;
|
||||
return `${Math.floor(bytes / 1024 / 1024)}MB`;
|
||||
}
|
||||
|
||||
export function mediaUploadUrlBody(maxSize: number) {
|
||||
if (!Number.isFinite(maxSize) || maxSize <= 0) {
|
||||
throw new Error(`EmDash: maxUploadSize must be a positive finite number, got ${maxSize}`);
|
||||
}
|
||||
return z
|
||||
.object({
|
||||
filename: z.string().min(1, "filename is required"),
|
||||
contentType: z.string().min(1, "contentType is required"),
|
||||
size: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.max(maxSize, `File size must not exceed ${formatFileSize(maxSize)}`),
|
||||
contentHash: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaUploadUrlBody" });
|
||||
}
|
||||
|
||||
export const mediaConfirmBody = z
|
||||
.object({
|
||||
size: z.number().int().positive().optional(),
|
||||
width: z.number().int().positive().optional(),
|
||||
height: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({ id: "MediaConfirmBody" });
|
||||
|
||||
export const mediaProviderListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
query: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaProviderListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Media: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const mediaStatusSchema = z.enum(["pending", "ready", "failed"]);
|
||||
|
||||
export const mediaItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
filename: z.string(),
|
||||
mimeType: z.string(),
|
||||
size: z.number().nullable(),
|
||||
width: z.number().nullable(),
|
||||
height: z.number().nullable(),
|
||||
alt: z.string().nullable(),
|
||||
caption: z.string().nullable(),
|
||||
storageKey: z.string(),
|
||||
status: mediaStatusSchema,
|
||||
contentHash: z.string().nullable(),
|
||||
blurhash: z.string().nullable(),
|
||||
dominantColor: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
authorId: z.string().nullable(),
|
||||
})
|
||||
.meta({ id: "MediaItem" });
|
||||
|
||||
export const mediaResponseSchema = z
|
||||
.object({ item: mediaItemSchema })
|
||||
.meta({ id: "MediaResponse" });
|
||||
|
||||
export const mediaListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(mediaItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaListResponse" });
|
||||
|
||||
export const mediaUploadUrlResponseSchema = z
|
||||
.object({
|
||||
uploadUrl: z.string(),
|
||||
method: z.literal("PUT"),
|
||||
headers: z.record(z.string(), z.string()),
|
||||
mediaId: z.string(),
|
||||
storageKey: z.string(),
|
||||
expiresAt: z.string(),
|
||||
})
|
||||
.meta({ id: "MediaUploadUrlResponse" });
|
||||
|
||||
export const mediaExistingResponseSchema = z
|
||||
.object({
|
||||
existing: z.literal(true),
|
||||
mediaId: z.string(),
|
||||
storageKey: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
.meta({ id: "MediaExistingResponse" });
|
||||
|
||||
export const mediaConfirmResponseSchema = z
|
||||
.object({
|
||||
item: mediaItemSchema.extend({ url: z.string() }),
|
||||
})
|
||||
.meta({ id: "MediaConfirmResponse" });
|
||||
121
packages/core/src/api/schemas/menus.ts
Normal file
121
packages/core/src/api/schemas/menus.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { isSafeHref } from "../../utils/url.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menus: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const menuItemType = z.string().min(1);
|
||||
|
||||
const safeHref = z
|
||||
.string()
|
||||
.trim()
|
||||
.refine(
|
||||
isSafeHref,
|
||||
"URL must use http, https, mailto, tel, a relative path, or a fragment identifier",
|
||||
);
|
||||
|
||||
export const createMenuBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "CreateMenuBody" });
|
||||
|
||||
export const updateMenuBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateMenuBody" });
|
||||
|
||||
export const createMenuItemBody = z
|
||||
.object({
|
||||
type: menuItemType,
|
||||
label: z.string().min(1),
|
||||
referenceCollection: z.string().optional(),
|
||||
referenceId: z.string().optional(),
|
||||
customUrl: safeHref.optional(),
|
||||
target: z.string().optional(),
|
||||
titleAttr: z.string().optional(),
|
||||
cssClasses: z.string().optional(),
|
||||
parentId: z.string().optional(),
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
})
|
||||
.meta({ id: "CreateMenuItemBody" });
|
||||
|
||||
export const updateMenuItemBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
customUrl: safeHref.optional(),
|
||||
target: z.string().optional(),
|
||||
titleAttr: z.string().optional(),
|
||||
cssClasses: z.string().optional(),
|
||||
parentId: z.string().nullish(),
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateMenuItemBody" });
|
||||
|
||||
export const menuItemDeleteQuery = z.object({
|
||||
id: z.string().min(1),
|
||||
});
|
||||
|
||||
export const menuItemUpdateQuery = z.object({
|
||||
id: z.string().min(1),
|
||||
});
|
||||
|
||||
export const reorderMenuItemsBody = z
|
||||
.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string().min(1),
|
||||
parentId: z.string().nullable(),
|
||||
sortOrder: z.number().int().min(0),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.meta({ id: "ReorderMenuItemsBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menus: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const menuSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
})
|
||||
.meta({ id: "Menu" });
|
||||
|
||||
export const menuItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
menu_id: z.string(),
|
||||
parent_id: z.string().nullable(),
|
||||
sort_order: z.number().int(),
|
||||
type: z.string(),
|
||||
reference_collection: z.string().nullable(),
|
||||
reference_id: z.string().nullable(),
|
||||
custom_url: z.string().nullable(),
|
||||
label: z.string(),
|
||||
title_attr: z.string().nullable(),
|
||||
target: z.string().nullable(),
|
||||
css_classes: z.string().nullable(),
|
||||
created_at: z.string(),
|
||||
})
|
||||
.meta({ id: "MenuItem" });
|
||||
|
||||
export const menuListItemSchema = menuSchema
|
||||
.extend({
|
||||
itemCount: z.number().int(),
|
||||
})
|
||||
.meta({ id: "MenuListItem" });
|
||||
|
||||
export const menuWithItemsSchema = menuSchema
|
||||
.extend({
|
||||
items: z.array(menuItemSchema),
|
||||
})
|
||||
.meta({ id: "MenuWithItems" });
|
||||
156
packages/core/src/api/schemas/redirects.ts
Normal file
156
packages/core/src/api/schemas/redirects.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const redirectType = z.coerce
|
||||
.number()
|
||||
.int()
|
||||
.refine((n) => [301, 302, 307, 308].includes(n), {
|
||||
message: "Redirect type must be 301, 302, 307, or 308",
|
||||
});
|
||||
|
||||
/** Matches CR or LF characters */
|
||||
const CRLF = /[\r\n]/;
|
||||
|
||||
/** Path must start with / and not be protocol-relative, contain no CRLF, and no path traversal */
|
||||
const urlPath = z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine((s) => s.startsWith("/") && !s.startsWith("//"), {
|
||||
message: "Must be a path starting with / (no protocol-relative URLs)",
|
||||
})
|
||||
.refine((s) => !CRLF.test(s), {
|
||||
message: "URL must not contain newline characters",
|
||||
})
|
||||
.refine(
|
||||
(s) => {
|
||||
try {
|
||||
return !decodeURIComponent(s).split("/").includes("..");
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{ message: "URL must not contain path traversal segments" },
|
||||
);
|
||||
|
||||
export const createRedirectBody = z
|
||||
.object({
|
||||
source: urlPath,
|
||||
destination: urlPath,
|
||||
type: redirectType.optional().default(301),
|
||||
enabled: z.boolean().optional().default(true),
|
||||
groupName: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "CreateRedirectBody" });
|
||||
|
||||
export const updateRedirectBody = z
|
||||
.object({
|
||||
source: urlPath.optional(),
|
||||
destination: urlPath.optional(),
|
||||
type: redirectType.optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
groupName: z.string().nullish(),
|
||||
})
|
||||
.refine((o) => Object.values(o).some((v) => v !== undefined), {
|
||||
message: "At least one field must be provided",
|
||||
})
|
||||
.meta({ id: "UpdateRedirectBody" });
|
||||
|
||||
export const redirectsListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
group: z.string().optional(),
|
||||
enabled: z
|
||||
.enum(["true", "false"])
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
auto: z
|
||||
.enum(["true", "false"])
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
})
|
||||
.meta({ id: "RedirectsListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 404 Log: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const notFoundListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "NotFoundListQuery" });
|
||||
|
||||
export const notFoundSummaryQuery = z.object({
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
});
|
||||
|
||||
export const notFoundPruneBody = z
|
||||
.object({
|
||||
olderThan: z.string().datetime({ message: "olderThan must be an ISO 8601 datetime" }),
|
||||
})
|
||||
.meta({ id: "NotFoundPruneBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const redirectSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
source: z.string(),
|
||||
destination: z.string(),
|
||||
type: z.number().int(),
|
||||
isPattern: z.boolean(),
|
||||
enabled: z.boolean(),
|
||||
hits: z.number().int(),
|
||||
lastHitAt: z.string().nullable(),
|
||||
groupName: z.string().nullable(),
|
||||
auto: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Redirect" });
|
||||
|
||||
export const redirectListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(redirectSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
loopRedirectIds: z.array(z.string()).optional(),
|
||||
})
|
||||
.meta({ id: "RedirectListResponse" });
|
||||
|
||||
export const notFoundEntrySchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
path: z.string(),
|
||||
referrer: z.string().nullable(),
|
||||
userAgent: z.string().nullable(),
|
||||
ip: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
})
|
||||
.meta({ id: "NotFoundEntry" });
|
||||
|
||||
export const notFoundListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(notFoundEntrySchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "NotFoundListResponse" });
|
||||
|
||||
export const notFoundSummarySchema = z
|
||||
.object({
|
||||
path: z.string(),
|
||||
count: z.number().int(),
|
||||
lastSeen: z.string(),
|
||||
topReferrer: z.string().nullable(),
|
||||
})
|
||||
.meta({ id: "NotFoundSummary" });
|
||||
|
||||
export const notFoundSummaryResponseSchema = z
|
||||
.object({ items: z.array(notFoundSummarySchema) })
|
||||
.meta({ id: "NotFoundSummaryResponse" });
|
||||
216
packages/core/src/api/schemas/schema.ts
Normal file
216
packages/core/src/api/schemas/schema.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { slugPattern } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema (collections & fields): Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const collectionSupportValues = z.enum(["drafts", "revisions", "preview", "scheduling", "search"]);
|
||||
|
||||
const collectionSourcePattern = /^(template:.+|import:.+|manual|discovered|seed)$/;
|
||||
|
||||
const fieldTypeValues = z.enum([
|
||||
"string",
|
||||
"text",
|
||||
"url",
|
||||
"number",
|
||||
"integer",
|
||||
"boolean",
|
||||
"datetime",
|
||||
"select",
|
||||
"multiSelect",
|
||||
"portableText",
|
||||
"image",
|
||||
"file",
|
||||
"reference",
|
||||
"json",
|
||||
"slug",
|
||||
"repeater",
|
||||
]);
|
||||
|
||||
const repeaterSubFieldSchema = z.object({
|
||||
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
|
||||
type: z.enum(["string", "text", "number", "integer", "boolean", "datetime", "select"]),
|
||||
label: z.string().min(1),
|
||||
required: z.boolean().optional(),
|
||||
options: z.array(z.string()).optional(),
|
||||
});
|
||||
|
||||
const fieldValidation = z
|
||||
.object({
|
||||
required: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
minLength: z.number().int().min(0).optional(),
|
||||
maxLength: z.number().int().min(0).optional(),
|
||||
pattern: z.string().optional(),
|
||||
options: z.array(z.string()).optional(),
|
||||
subFields: z.array(repeaterSubFieldSchema).min(1).optional(),
|
||||
minItems: z.number().int().min(0).optional(),
|
||||
maxItems: z.number().int().min(1).optional(),
|
||||
})
|
||||
.optional();
|
||||
|
||||
const fieldWidgetOptions = z.record(z.string(), z.unknown()).optional();
|
||||
|
||||
export const createCollectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
|
||||
label: z.string().min(1),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
icon: z.string().optional(),
|
||||
supports: z.array(collectionSupportValues).optional(),
|
||||
source: z.string().regex(collectionSourcePattern).optional(),
|
||||
urlPattern: z.string().optional(),
|
||||
hasSeo: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "CreateCollectionBody" });
|
||||
|
||||
export const updateCollectionBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
icon: z.string().optional(),
|
||||
supports: z.array(collectionSupportValues).optional(),
|
||||
urlPattern: z.string().nullish(),
|
||||
hasSeo: z.boolean().optional(),
|
||||
commentsEnabled: z.boolean().optional(),
|
||||
commentsModeration: z.enum(["all", "first_time", "none"]).optional(),
|
||||
commentsClosedAfterDays: z.number().int().min(0).optional(),
|
||||
commentsAutoApproveUsers: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateCollectionBody" });
|
||||
|
||||
export const createFieldBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
|
||||
label: z.string().min(1),
|
||||
type: fieldTypeValues,
|
||||
required: z.boolean().optional(),
|
||||
unique: z.boolean().optional(),
|
||||
defaultValue: z.unknown().optional(),
|
||||
validation: fieldValidation,
|
||||
widget: z.string().optional(),
|
||||
options: fieldWidgetOptions,
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
searchable: z.boolean().optional(),
|
||||
translatable: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "CreateFieldBody" });
|
||||
|
||||
export const updateFieldBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
required: z.boolean().optional(),
|
||||
unique: z.boolean().optional(),
|
||||
defaultValue: z.unknown().optional(),
|
||||
validation: fieldValidation,
|
||||
widget: z.string().optional(),
|
||||
options: fieldWidgetOptions,
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
searchable: z.boolean().optional(),
|
||||
translatable: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateFieldBody" });
|
||||
|
||||
export const fieldReorderBody = z
|
||||
.object({
|
||||
fieldSlugs: z.array(z.string().min(1)),
|
||||
})
|
||||
.meta({ id: "FieldReorderBody" });
|
||||
|
||||
export const orphanRegisterBody = z
|
||||
.object({
|
||||
label: z.string().optional(),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "OrphanRegisterBody" });
|
||||
|
||||
export const schemaExportQuery = z.object({
|
||||
format: z.string().optional(),
|
||||
});
|
||||
|
||||
export const collectionGetQuery = z.object({
|
||||
includeFields: z
|
||||
.string()
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const collectionSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
labelSingular: z.string().nullable(),
|
||||
description: z.string().nullable(),
|
||||
icon: z.string().nullable(),
|
||||
supports: z.array(z.string()),
|
||||
source: z.string().nullable(),
|
||||
urlPattern: z.string().nullable(),
|
||||
hasSeo: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Collection" });
|
||||
|
||||
export const fieldSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
collectionId: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
type: fieldTypeValues,
|
||||
required: z.boolean(),
|
||||
unique: z.boolean(),
|
||||
defaultValue: z.unknown().nullable(),
|
||||
validation: z.record(z.string(), z.unknown()).nullable(),
|
||||
widget: z.string().nullable(),
|
||||
options: z.record(z.string(), z.unknown()).nullable(),
|
||||
sortOrder: z.number().int(),
|
||||
searchable: z.boolean(),
|
||||
translatable: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Field" });
|
||||
|
||||
export const collectionResponseSchema = z
|
||||
.object({ item: collectionSchema })
|
||||
.meta({ id: "CollectionResponse" });
|
||||
|
||||
export const collectionWithFieldsResponseSchema = z
|
||||
.object({
|
||||
item: collectionSchema.extend({ fields: z.array(fieldSchema) }),
|
||||
})
|
||||
.meta({ id: "CollectionWithFieldsResponse" });
|
||||
|
||||
export const collectionListResponseSchema = z
|
||||
.object({ items: z.array(collectionSchema) })
|
||||
.meta({ id: "CollectionListResponse" });
|
||||
|
||||
export const fieldResponseSchema = z.object({ item: fieldSchema }).meta({ id: "FieldResponse" });
|
||||
|
||||
export const fieldListResponseSchema = z
|
||||
.object({ items: z.array(fieldSchema) })
|
||||
.meta({ id: "FieldListResponse" });
|
||||
|
||||
export const orphanedTableSchema = z
|
||||
.object({
|
||||
slug: z.string(),
|
||||
tableName: z.string(),
|
||||
rowCount: z.number().int(),
|
||||
})
|
||||
.meta({ id: "OrphanedTable" });
|
||||
|
||||
export const orphanedTableListResponseSchema = z
|
||||
.object({ items: z.array(orphanedTableSchema) })
|
||||
.meta({ id: "OrphanedTableListResponse" });
|
||||
63
packages/core/src/api/schemas/search.ts
Normal file
63
packages/core/src/api/schemas/search.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { localeCode } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Search: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const searchQuery = z
|
||||
.object({
|
||||
q: z.string().min(1),
|
||||
collections: z.string().optional(),
|
||||
status: z.string().optional(),
|
||||
locale: localeCode.optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional(),
|
||||
})
|
||||
.meta({ id: "SearchQuery" });
|
||||
|
||||
export const searchSuggestQuery = z
|
||||
.object({
|
||||
q: z.string().min(1),
|
||||
collections: z.string().optional(),
|
||||
locale: localeCode.optional(),
|
||||
limit: z.coerce.number().int().min(1).max(20).optional(),
|
||||
})
|
||||
.meta({ id: "SearchSuggestQuery" });
|
||||
|
||||
export const searchRebuildBody = z
|
||||
.object({
|
||||
collection: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "SearchRebuildBody" });
|
||||
|
||||
export const searchEnableBody = z
|
||||
.object({
|
||||
collection: z.string().min(1),
|
||||
enabled: z.boolean(),
|
||||
weights: z.record(z.string(), z.number()).optional(),
|
||||
})
|
||||
.meta({ id: "SearchEnableBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Search: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const searchResultSchema = z
|
||||
.object({
|
||||
collection: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string().nullable(),
|
||||
locale: z.string(),
|
||||
title: z.string().optional(),
|
||||
snippet: z.string().optional(),
|
||||
score: z.number(),
|
||||
})
|
||||
.meta({ id: "SearchResult" });
|
||||
|
||||
export const searchResponseSchema = z
|
||||
.object({
|
||||
items: z.array(searchResultSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SearchResponse" });
|
||||
67
packages/core/src/api/schemas/sections.ts
Normal file
67
packages/core/src/api/schemas/sections.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sections: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const sectionSource = z.enum(["theme", "user", "import"]);
|
||||
|
||||
export const sectionsListQuery = z
|
||||
.object({
|
||||
source: sectionSource.optional(),
|
||||
search: z.string().optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
cursor: z.string().max(2048).optional(),
|
||||
})
|
||||
.meta({ id: "SectionsListQuery" });
|
||||
|
||||
export const createSectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1),
|
||||
title: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())),
|
||||
previewMediaId: z.string().optional(),
|
||||
source: z.enum(["user", "import"]).optional(),
|
||||
themeId: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateSectionBody" });
|
||||
|
||||
export const updateSectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).optional(),
|
||||
title: z.string().min(1).optional(),
|
||||
description: z.string().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
previewMediaId: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "UpdateSectionBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sections: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const sectionSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
title: z.string(),
|
||||
description: z.string().nullable(),
|
||||
keywords: z.array(z.string()).nullable(),
|
||||
content: z.array(z.record(z.string(), z.unknown())),
|
||||
previewMediaId: z.string().nullable(),
|
||||
source: z.string(),
|
||||
themeId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Section" });
|
||||
|
||||
export const sectionListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(sectionSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SectionListResponse" });
|
||||
63
packages/core/src/api/schemas/settings.ts
Normal file
63
packages/core/src/api/schemas/settings.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { httpUrl } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Settings: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const mediaReference = z.object({
|
||||
mediaId: z.string(),
|
||||
alt: z.string().optional(),
|
||||
});
|
||||
|
||||
const socialSettings = z.object({
|
||||
twitter: z.string().optional(),
|
||||
github: z.string().optional(),
|
||||
facebook: z.string().optional(),
|
||||
instagram: z.string().optional(),
|
||||
linkedin: z.string().optional(),
|
||||
youtube: z.string().optional(),
|
||||
});
|
||||
|
||||
const seoSettings = z.object({
|
||||
titleSeparator: z.string().max(10).optional(),
|
||||
defaultOgImage: mediaReference.optional(),
|
||||
robotsTxt: z.string().max(5000).optional(),
|
||||
googleVerification: z.string().max(100).optional(),
|
||||
bingVerification: z.string().max(100).optional(),
|
||||
});
|
||||
|
||||
export const settingsUpdateBody = z
|
||||
.object({
|
||||
title: z.string().optional(),
|
||||
tagline: z.string().optional(),
|
||||
logo: mediaReference.optional(),
|
||||
favicon: mediaReference.optional(),
|
||||
url: z.union([httpUrl, z.literal("")]).optional(),
|
||||
postsPerPage: z.number().int().min(1).max(100).optional(),
|
||||
dateFormat: z.string().optional(),
|
||||
timezone: z.string().optional(),
|
||||
social: socialSettings.optional(),
|
||||
seo: seoSettings.optional(),
|
||||
})
|
||||
.meta({ id: "SettingsUpdateBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Settings: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const siteSettingsSchema = z
|
||||
.object({
|
||||
title: z.string().optional(),
|
||||
tagline: z.string().optional(),
|
||||
logo: mediaReference.optional(),
|
||||
favicon: mediaReference.optional(),
|
||||
url: z.string().optional(),
|
||||
postsPerPage: z.number().int().optional(),
|
||||
dateFormat: z.string().optional(),
|
||||
timezone: z.string().optional(),
|
||||
social: socialSettings.optional(),
|
||||
seo: seoSettings.optional(),
|
||||
})
|
||||
.meta({ id: "SiteSettings" });
|
||||
45
packages/core/src/api/schemas/setup.ts
Normal file
45
packages/core/src/api/schemas/setup.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Setup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Registration credential — duplicated reference for setup flow.
|
||||
* The canonical definition lives in auth.ts but setup needs it independently
|
||||
* because setup runs before auth is configured. */
|
||||
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
|
||||
|
||||
const registrationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
attestationObject: z.string(),
|
||||
transports: z.array(authenticatorTransport).optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
export const setupBody = z.object({
|
||||
title: z.string().min(1),
|
||||
tagline: z.string().optional(),
|
||||
includeContent: z.boolean(),
|
||||
});
|
||||
|
||||
export const setupAdminBody = z.object({
|
||||
email: z.string().email(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
export const setupAdminVerifyBody = z.object({
|
||||
credential: registrationCredential,
|
||||
});
|
||||
|
||||
export const atprotoLoginBody = z.object({
|
||||
handle: z.string().trim().min(1),
|
||||
});
|
||||
|
||||
export const setupAtprotoAdminBody = z.object({
|
||||
handle: z.string().trim().min(1),
|
||||
});
|
||||
113
packages/core/src/api/schemas/taxonomies.ts
Normal file
113
packages/core/src/api/schemas/taxonomies.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomy definitions: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Collection slug format: lowercase alphanumeric + underscores, starts with letter */
|
||||
const collectionSlugPattern = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
export const createTaxonomyDefBody = z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(63)
|
||||
.regex(/^[a-z][a-z0-9_]*$/, "Name must be lowercase alphanumeric with underscores"),
|
||||
label: z.string().min(1).max(200),
|
||||
hierarchical: z.boolean().optional().default(false),
|
||||
collections: z
|
||||
.array(
|
||||
z.string().min(1).max(63).regex(collectionSlugPattern, "Invalid collection slug format"),
|
||||
)
|
||||
.max(100)
|
||||
.optional()
|
||||
.default([]),
|
||||
})
|
||||
.meta({ id: "CreateTaxonomyDefBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomy terms: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const createTermBody = z
|
||||
.object({
|
||||
slug: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
parentId: z.string().nullish(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateTermBody" });
|
||||
|
||||
export const updateTermBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).optional(),
|
||||
label: z.string().min(1).optional(),
|
||||
parentId: z.string().nullish(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateTermBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomies: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const taxonomyDefSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
labelSingular: z.string().optional(),
|
||||
hierarchical: z.boolean(),
|
||||
collections: z.array(z.string()),
|
||||
})
|
||||
.meta({ id: "TaxonomyDef" });
|
||||
|
||||
export const taxonomyListResponseSchema = z
|
||||
.object({ taxonomies: z.array(taxonomyDefSchema) })
|
||||
.meta({ id: "TaxonomyListResponse" });
|
||||
|
||||
export const termSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "Term" });
|
||||
|
||||
export const termWithCountSchema: z.ZodType = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
description: z.string().optional(),
|
||||
count: z.number().int(),
|
||||
children: z.array(z.lazy(() => termWithCountSchema)),
|
||||
})
|
||||
.meta({ id: "TermWithCount" });
|
||||
|
||||
export const termListResponseSchema = z
|
||||
.object({ terms: z.array(termWithCountSchema) })
|
||||
.meta({ id: "TermListResponse" });
|
||||
|
||||
export const termResponseSchema = z.object({ term: termSchema }).meta({ id: "TermResponse" });
|
||||
|
||||
export const termGetResponseSchema = z
|
||||
.object({
|
||||
term: termSchema.extend({
|
||||
count: z.number().int(),
|
||||
children: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
}),
|
||||
),
|
||||
}),
|
||||
})
|
||||
.meta({ id: "TermGetResponse" });
|
||||
96
packages/core/src/api/schemas/users.ts
Normal file
96
packages/core/src/api/schemas/users.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { roleLevel } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin / Users: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const usersListQuery = z
|
||||
.object({
|
||||
search: z.string().optional(),
|
||||
role: z.string().optional(),
|
||||
cursor: z.string().max(2048).optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
})
|
||||
.meta({ id: "UsersListQuery" });
|
||||
|
||||
export const userUpdateBody = z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
email: z.string().email().optional(),
|
||||
role: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "UserUpdateBody" });
|
||||
|
||||
export const allowedDomainCreateBody = z
|
||||
.object({
|
||||
domain: z.string().min(1),
|
||||
defaultRole: roleLevel,
|
||||
})
|
||||
.meta({ id: "AllowedDomainCreateBody" });
|
||||
|
||||
export const allowedDomainUpdateBody = z
|
||||
.object({
|
||||
enabled: z.boolean().optional(),
|
||||
defaultRole: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "AllowedDomainUpdateBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin / Users: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const userSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
email: z.string(),
|
||||
name: z.string().nullable(),
|
||||
avatarUrl: z.string().nullable(),
|
||||
role: z.number().int(),
|
||||
emailVerified: z.boolean(),
|
||||
disabled: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
lastLogin: z.string().nullable(),
|
||||
credentialCount: z.number().int().optional(),
|
||||
oauthProviders: z.array(z.string()).optional(),
|
||||
})
|
||||
.meta({ id: "User" });
|
||||
|
||||
export const userListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(userSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "UserListResponse" });
|
||||
|
||||
export const userDetailSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
email: z.string(),
|
||||
name: z.string().nullable(),
|
||||
avatarUrl: z.string().nullable(),
|
||||
role: z.number().int(),
|
||||
emailVerified: z.boolean(),
|
||||
disabled: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
lastLogin: z.string().nullable(),
|
||||
credentials: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().nullable(),
|
||||
deviceType: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
lastUsedAt: z.string(),
|
||||
}),
|
||||
),
|
||||
oauthAccounts: z.array(
|
||||
z.object({
|
||||
provider: z.string(),
|
||||
createdAt: z.string(),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.meta({ id: "UserDetail" });
|
||||
82
packages/core/src/api/schemas/widgets.ts
Normal file
82
packages/core/src/api/schemas/widgets.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Widgets: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const widgetType = z.enum(["content", "menu", "component"]);
|
||||
|
||||
export const createWidgetAreaBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateWidgetAreaBody" });
|
||||
|
||||
export const createWidgetBody = z
|
||||
.object({
|
||||
type: widgetType,
|
||||
title: z.string().optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
menuName: z.string().optional(),
|
||||
componentId: z.string().optional(),
|
||||
componentProps: z.record(z.string(), z.unknown()).optional(),
|
||||
})
|
||||
.meta({ id: "CreateWidgetBody" });
|
||||
|
||||
export const updateWidgetBody = z
|
||||
.object({
|
||||
type: widgetType.optional(),
|
||||
title: z.string().optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
menuName: z.string().optional(),
|
||||
componentId: z.string().optional(),
|
||||
componentProps: z.record(z.string(), z.unknown()).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateWidgetBody" });
|
||||
|
||||
export const reorderWidgetsBody = z
|
||||
.object({
|
||||
widgetIds: z.array(z.string().min(1)),
|
||||
})
|
||||
.meta({ id: "ReorderWidgetsBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Widgets: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const widgetAreaSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
description: z.string().nullable(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
})
|
||||
.meta({ id: "WidgetArea" });
|
||||
|
||||
export const widgetSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: widgetType,
|
||||
title: z.string().optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
menuName: z.string().optional(),
|
||||
componentId: z.string().optional(),
|
||||
componentProps: z.record(z.string(), z.unknown()).optional(),
|
||||
})
|
||||
.meta({ id: "Widget" });
|
||||
|
||||
export const widgetAreaWithWidgetsSchema = widgetAreaSchema
|
||||
.extend({
|
||||
widgets: z.array(widgetSchema),
|
||||
})
|
||||
.meta({ id: "WidgetAreaWithWidgets" });
|
||||
|
||||
export const widgetAreaWithWidgetsAndCountSchema = widgetAreaWithWidgetsSchema
|
||||
.extend({
|
||||
widgetCount: z.number().int(),
|
||||
})
|
||||
.meta({ id: "WidgetAreaWithWidgetsAndCount" });
|
||||
40
packages/core/src/api/setup-complete.ts
Normal file
40
packages/core/src/api/setup-complete.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Shared setup completion logic.
|
||||
*
|
||||
* Called by OAuth callbacks and the passkey verify step when the first user
|
||||
* is created during setup. Persists site title/tagline from setup state
|
||||
* and marks setup as complete.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { OptionsRepository } from "../database/repositories/options.js";
|
||||
import type { Database } from "../database/types.js";
|
||||
|
||||
/**
|
||||
* Finalize setup after the first admin user is created.
|
||||
*
|
||||
* Reads the setup_state option (written by the setup wizard's step 1),
|
||||
* persists site_title and site_tagline, then marks setup complete.
|
||||
*
|
||||
* Safe to call multiple times — checks setup_complete first and no-ops
|
||||
* if already done.
|
||||
*/
|
||||
export async function finalizeSetup(db: Kysely<Database>): Promise<void> {
|
||||
const options = new OptionsRepository(db);
|
||||
|
||||
const setupComplete = await options.get("emdash:setup_complete");
|
||||
if (setupComplete === true || setupComplete === "true") return;
|
||||
|
||||
// Persist site title/tagline from setup state (stored in step 1)
|
||||
const setupState = await options.get<Record<string, unknown>>("emdash:setup_state");
|
||||
if (setupState?.title && typeof setupState.title === "string") {
|
||||
await options.set("emdash:site_title", setupState.title);
|
||||
}
|
||||
if (setupState?.tagline && typeof setupState.tagline === "string") {
|
||||
await options.set("emdash:site_tagline", setupState.tagline);
|
||||
}
|
||||
|
||||
await options.set("emdash:setup_complete", true);
|
||||
await options.delete("emdash:setup_state");
|
||||
}
|
||||
25
packages/core/src/api/site-url.ts
Normal file
25
packages/core/src/api/site-url.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Resolve the canonical site base URL for use in outbound links (emails, etc.).
|
||||
*
|
||||
* Uses the stored `emdash:site_url` (set during setup on the real domain)
|
||||
* so that Host header spoofing in later requests cannot redirect users to
|
||||
* attacker-controlled domains.
|
||||
*
|
||||
* Falls back to the request URL only if no stored value exists (pre-setup).
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { OptionsRepository } from "../database/repositories/options.js";
|
||||
import type { Database } from "../database/types.js";
|
||||
|
||||
export async function getSiteBaseUrl(db: Kysely<Database>, request: Request): Promise<string> {
|
||||
const options = new OptionsRepository(db);
|
||||
const storedUrl = await options.get<string>("emdash:site_url");
|
||||
if (storedUrl) {
|
||||
return `${storedUrl}/_emdash`;
|
||||
}
|
||||
// Fallback: derive from request (only reached before setup completes)
|
||||
const url = new URL(request.url);
|
||||
return `${url.protocol}//${url.host}/_emdash`;
|
||||
}
|
||||
86
packages/core/src/api/types.ts
Normal file
86
packages/core/src/api/types.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/**
|
||||
* API types for EmDash REST endpoints
|
||||
*/
|
||||
|
||||
import type { ContentItem } from "../database/repositories/types.js";
|
||||
|
||||
/**
|
||||
* List response with cursor pagination
|
||||
*/
|
||||
export interface ListResponse<T> {
|
||||
items: T[];
|
||||
nextCursor?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Content API responses
|
||||
*/
|
||||
export interface ContentListResponse extends ListResponse<ContentItem> {}
|
||||
|
||||
export interface ContentResponse {
|
||||
item: ContentItem;
|
||||
/** Opaque revision token for optimistic concurrency */
|
||||
_rev?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manifest API response
|
||||
*/
|
||||
export interface ManifestResponse {
|
||||
version: string;
|
||||
hash: string;
|
||||
collections: Record<
|
||||
string,
|
||||
{
|
||||
label: string;
|
||||
labelSingular: string;
|
||||
supports: string[];
|
||||
fields: Record<string, FieldDescriptor>;
|
||||
}
|
||||
>;
|
||||
plugins: Record<
|
||||
string,
|
||||
{
|
||||
adminPages?: Array<{ path: string; component: string }>;
|
||||
widgets?: string[];
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
export interface FieldDescriptor {
|
||||
kind: string;
|
||||
label?: string;
|
||||
required?: boolean;
|
||||
/**
|
||||
* For `select` / `multiSelect`: the list of enum choices.
|
||||
* For `json` fields driven by a plugin `widget`: arbitrary widget config.
|
||||
*/
|
||||
options?: Array<{ value: string; label: string }> | Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discriminated union for handler results.
|
||||
*
|
||||
* Handlers return `ApiResult<T>` -- either `{ success: true, data: T }` or
|
||||
* `{ success: false, error: { code, message } }`. The `success` literal
|
||||
* enables TypeScript narrowing on `.data`.
|
||||
*
|
||||
* The generic `E` parameter defaults to `ErrorCode` but can be narrowed to
|
||||
* `OAuthErrorCode` for OAuth token-endpoint handlers.
|
||||
*
|
||||
* Use `unwrapResult()` from `error.ts` to convert to an HTTP Response.
|
||||
*/
|
||||
export type ApiResult<T, E extends string = string> =
|
||||
| { success: true; data: T }
|
||||
| {
|
||||
success: false;
|
||||
error: { code: E; message: string; details?: Record<string, unknown> };
|
||||
};
|
||||
|
||||
/**
|
||||
* API request context
|
||||
*/
|
||||
export interface ApiContext {
|
||||
userId?: string;
|
||||
userRole?: string;
|
||||
}
|
||||
27
packages/core/src/astro/index.ts
Normal file
27
packages/core/src/astro/index.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* emdash/astro
|
||||
*
|
||||
* Astro integration for EmDash CMS (build-time only)
|
||||
*
|
||||
* For runtime APIs (loader, query functions, dialects), import from "emdash" directly.
|
||||
* For Cloudflare-specific adapters (d1, r2, access), import from "@emdash-cms/cloudflare".
|
||||
*/
|
||||
|
||||
// Locals types (for typing Astro.locals in API routes)
|
||||
export type {
|
||||
EmDashHandlers,
|
||||
EmDashManifest,
|
||||
MediaItem,
|
||||
ContentItem,
|
||||
ManifestCollection,
|
||||
} from "./types.js";
|
||||
|
||||
// Storage adapters (for integration config)
|
||||
// Note: For R2 bindings, use `r2()` from `@emdash-cms/cloudflare`
|
||||
export { local, s3 } from "./storage/index.js";
|
||||
export type { StorageDescriptor, LocalStorageConfig, S3StorageConfig } from "./storage/index.js";
|
||||
|
||||
// Integration (build-time only - the emdash() function uses Node.js APIs)
|
||||
export { default } from "./integration/index.js";
|
||||
export { getStoredConfig } from "./integration/runtime.js";
|
||||
export type { EmDashConfig, ResolvedPlugin } from "./integration/runtime.js";
|
||||
178
packages/core/src/astro/integration/font-provider.ts
Normal file
178
packages/core/src/astro/integration/font-provider.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
/**
|
||||
* EmDash Noto Sans font provider
|
||||
*
|
||||
* A custom Astro font provider that wraps Google Fonts to resolve
|
||||
* multiple Noto Sans families (Latin, Arabic, JP, etc.) under a
|
||||
* single logical font entry. This lets all @font-face blocks share
|
||||
* the same font-family name, so the browser picks the right file
|
||||
* per character via unicode-range.
|
||||
*
|
||||
* Without this, registering "Noto Sans" and "Noto Sans Arabic" as
|
||||
* separate font entries on the same cssVariable triggers an Astro
|
||||
* warning and the last entry overwrites the first.
|
||||
*/
|
||||
|
||||
import { fontProviders } from "astro/config";
|
||||
|
||||
/**
|
||||
* All subset names used by Google Fonts CSS responses.
|
||||
* Passed when resolving extra script families so the unifont
|
||||
* provider doesn't filter out any faces.
|
||||
*/
|
||||
const ALL_GOOGLE_SUBSETS = [
|
||||
"arabic",
|
||||
"armenian",
|
||||
"bengali",
|
||||
"chinese-simplified",
|
||||
"chinese-traditional",
|
||||
"chinese-hongkong",
|
||||
"cyrillic",
|
||||
"cyrillic-ext",
|
||||
"devanagari",
|
||||
"ethiopic",
|
||||
"farsi",
|
||||
"georgian",
|
||||
"greek",
|
||||
"greek-ext",
|
||||
"gujarati",
|
||||
"gurmukhi",
|
||||
"hebrew",
|
||||
"japanese",
|
||||
"kannada",
|
||||
"khmer",
|
||||
"korean",
|
||||
"lao",
|
||||
"latin",
|
||||
"latin-ext",
|
||||
"malayalam",
|
||||
"math",
|
||||
"myanmar",
|
||||
"oriya",
|
||||
"sinhala",
|
||||
"symbols",
|
||||
"tamil",
|
||||
"telugu",
|
||||
"thai",
|
||||
"tibetan",
|
||||
"vietnamese",
|
||||
];
|
||||
|
||||
/**
|
||||
* Known Noto Sans and Sans script families on Google Fonts.
|
||||
* Maps user-friendly script names to Google Fonts family names.
|
||||
*/
|
||||
const NOTO_SCRIPT_FAMILIES: Record<string, string> = {
|
||||
arabic: "Noto Sans Arabic",
|
||||
armenian: "Noto Sans Armenian",
|
||||
bengali: "Noto Sans Bengali",
|
||||
"chinese-simplified": "Noto Sans SC",
|
||||
"chinese-traditional": "Noto Sans TC",
|
||||
"chinese-hongkong": "Noto Sans HK",
|
||||
devanagari: "Noto Sans Devanagari",
|
||||
ethiopic: "Noto Sans Ethiopic",
|
||||
farsi: "Vazirmatn",
|
||||
georgian: "Noto Sans Georgian",
|
||||
gujarati: "Noto Sans Gujarati",
|
||||
gurmukhi: "Noto Sans Gurmukhi",
|
||||
hebrew: "Noto Sans Hebrew",
|
||||
japanese: "Noto Sans JP",
|
||||
kannada: "Noto Sans Kannada",
|
||||
khmer: "Noto Sans Khmer",
|
||||
korean: "Noto Sans KR",
|
||||
lao: "Noto Sans Lao",
|
||||
malayalam: "Noto Sans Malayalam",
|
||||
myanmar: "Noto Sans Myanmar",
|
||||
oriya: "Noto Sans Oriya",
|
||||
sinhala: "Noto Sans Sinhala",
|
||||
tamil: "Noto Sans Tamil",
|
||||
telugu: "Noto Sans Telugu",
|
||||
thai: "Noto Sans Thai",
|
||||
tibetan: "Noto Sans Tibetan",
|
||||
};
|
||||
|
||||
export interface NotoSansProviderOptions {
|
||||
/**
|
||||
* Additional Noto Sans script families to include.
|
||||
* Use script names like "arabic", "japanese", "chinese-simplified".
|
||||
*
|
||||
* @see {@link NOTO_SCRIPT_FAMILIES} for the full list of supported scripts.
|
||||
*/
|
||||
scripts?: string[];
|
||||
}
|
||||
|
||||
// Use ReturnType to get the provider type without importing it directly.
|
||||
// The Astro FontProvider type is not part of the public API surface.
|
||||
type GoogleProvider = ReturnType<typeof fontProviders.google>;
|
||||
|
||||
/**
|
||||
* Create a font provider that resolves Noto Sans plus additional
|
||||
* script-specific Noto families from Google Fonts, all under one
|
||||
* font-family name.
|
||||
*/
|
||||
export function notoSans(options?: NotoSansProviderOptions): GoogleProvider {
|
||||
// Create a single Google provider instance to share initialization
|
||||
const googleProvider = fontProviders.google();
|
||||
|
||||
return {
|
||||
name: "emdash-noto",
|
||||
async init(context) {
|
||||
await googleProvider.init?.(context);
|
||||
},
|
||||
async resolveFont(resolveFontOptions) {
|
||||
// Resolve the base Noto Sans (Latin, Cyrillic, Greek, etc.)
|
||||
const base = await googleProvider.resolveFont(resolveFontOptions);
|
||||
const baseFonts = base?.fonts ?? [];
|
||||
|
||||
if (!options?.scripts?.length) {
|
||||
return base;
|
||||
}
|
||||
|
||||
// Collect subset names already covered by the base font so we
|
||||
// can filter out duplicate faces from extra script families.
|
||||
// e.g. Noto Sans Arabic includes latin/latin-ext faces that
|
||||
// would otherwise override the base Noto Sans latin faces.
|
||||
const baseSubsets = new Set(baseFonts.map((f) => f.meta?.subset).filter(Boolean));
|
||||
|
||||
// Resolve additional script families
|
||||
const extraFonts = await Promise.all(
|
||||
options.scripts.map(async (script) => {
|
||||
const family = NOTO_SCRIPT_FAMILIES[script];
|
||||
if (!family) {
|
||||
// Silently skip subset names that are already covered
|
||||
// by the base Noto Sans font (latin, cyrillic, etc.)
|
||||
if (ALL_GOOGLE_SUBSETS.includes(script)) {
|
||||
return undefined;
|
||||
}
|
||||
console.warn(
|
||||
`[emdash] Unknown Noto Sans script "${script}". ` +
|
||||
`Available: ${Object.keys(NOTO_SCRIPT_FAMILIES).join(", ")}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
return googleProvider.resolveFont({
|
||||
...resolveFontOptions,
|
||||
familyName: family,
|
||||
// Pass all known subset names so the unifont provider
|
||||
// doesn't filter out any faces. Each script family
|
||||
// only returns faces for its own subsets anyway.
|
||||
subsets: ALL_GOOGLE_SUBSETS,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
// Merge, dropping faces from extra fonts that duplicate base subsets
|
||||
const extraFaces = extraFonts.flatMap((r) =>
|
||||
(r?.fonts ?? []).filter((f) => !f.meta?.subset || !baseSubsets.has(f.meta.subset)),
|
||||
);
|
||||
|
||||
return {
|
||||
fonts: [...baseFonts, ...extraFaces],
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Get the list of available Noto Sans script names */
|
||||
export function getAvailableNotoScripts(): string[] {
|
||||
return Object.keys(NOTO_SCRIPT_FAMILIES);
|
||||
}
|
||||
414
packages/core/src/astro/integration/index.ts
Normal file
414
packages/core/src/astro/integration/index.ts
Normal file
@@ -0,0 +1,414 @@
|
||||
/**
|
||||
* EmDash Astro Integration
|
||||
*
|
||||
* This integration:
|
||||
* - Injects the admin shell route at /_emdash/admin/[...path].astro
|
||||
* - Sets up REST API endpoints under /_emdash/api/*
|
||||
* - Configures middleware to provide database and manifest
|
||||
*
|
||||
* NOTE: This file is for build-time only. Runtime utilities are in runtime.ts
|
||||
* to avoid bundling Node.js-only code into the production build.
|
||||
*/
|
||||
|
||||
import type { AstroIntegration, AstroIntegrationLogger } from "astro";
|
||||
|
||||
import { validateAllowedOrigins, validateOriginShape } from "../../auth/allowed-origins.js";
|
||||
import type { ResolvedPlugin } from "../../plugins/types.js";
|
||||
import { local } from "../storage/adapters.js";
|
||||
import { notoSans } from "./font-provider.js";
|
||||
import {
|
||||
injectCoreRoutes,
|
||||
injectBuiltinAuthRoutes,
|
||||
injectAuthProviderRoutes,
|
||||
injectMcpRoute,
|
||||
} from "./routes.js";
|
||||
import type { EmDashConfig, PluginDescriptor } from "./runtime.js";
|
||||
import { createViteConfig } from "./vite-config.js";
|
||||
|
||||
// Re-export runtime types and functions
|
||||
export type {
|
||||
EmDashConfig,
|
||||
PluginDescriptor,
|
||||
SandboxedPluginDescriptor,
|
||||
ResolvedPlugin,
|
||||
} from "./runtime.js";
|
||||
export { getStoredConfig } from "./runtime.js";
|
||||
|
||||
/** Default storage: Local filesystem in .emdash directory */
|
||||
const DEFAULT_STORAGE = local({
|
||||
directory: "./.emdash/uploads",
|
||||
baseUrl: "/_emdash/api/media/file",
|
||||
});
|
||||
|
||||
// Terminal formatting
|
||||
const dim = (s: string) => `\x1b[2m${s}\x1b[22m`;
|
||||
const bold = (s: string) => `\x1b[1m${s}\x1b[22m`;
|
||||
const cyan = (s: string) => `\x1b[36m${s}\x1b[39m`;
|
||||
|
||||
/** Print the EmDash startup banner */
|
||||
function printBanner(_logger: AstroIntegrationLogger): void {
|
||||
const banner = `
|
||||
|
||||
${bold(cyan("— E M D A S H —"))}
|
||||
`;
|
||||
console.log(banner);
|
||||
}
|
||||
|
||||
/** Print route injection summary */
|
||||
function printRoutesSummary(_logger: AstroIntegrationLogger): void {
|
||||
console.log(`\n ${dim("›")} Admin UI ${cyan("/_emdash/admin")}`);
|
||||
console.log(` ${dim("›")} API ${cyan("/_emdash/api/*")}`);
|
||||
console.log("");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the EmDash Astro integration
|
||||
*/
|
||||
export function emdash(config: EmDashConfig = {}): AstroIntegration {
|
||||
// Apply defaults
|
||||
const resolvedConfig: EmDashConfig = {
|
||||
...config,
|
||||
storage: config.storage ?? DEFAULT_STORAGE,
|
||||
};
|
||||
|
||||
// Validate marketplace URL
|
||||
if (resolvedConfig.marketplace) {
|
||||
const url = resolvedConfig.marketplace;
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
const isLocalhost = parsed.hostname === "localhost" || parsed.hostname === "127.0.0.1";
|
||||
if (parsed.protocol !== "https:" && !isLocalhost) {
|
||||
throw new Error(
|
||||
`Marketplace URL must use HTTPS (got ${parsed.protocol}). ` +
|
||||
`Only localhost URLs are allowed over HTTP.`,
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof TypeError) {
|
||||
throw new Error(`Invalid marketplace URL: "${url}"`, { cause: e });
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
if (!resolvedConfig.sandboxRunner) {
|
||||
throw new Error(
|
||||
"Marketplace requires `sandboxRunner` to be configured. " +
|
||||
"Marketplace plugins run in sandboxed V8 isolates.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate siteUrl if provided in astro.config.mjs.
|
||||
// Env-var fallback (EMDASH_SITE_URL / SITE_URL) is handled at runtime by
|
||||
// getPublicOrigin() in api/public-url.ts — NOT here — so Docker images built
|
||||
// without a domain can pick it up at container start via process.env.
|
||||
if (resolvedConfig.siteUrl) {
|
||||
const raw = resolvedConfig.siteUrl;
|
||||
try {
|
||||
const parsed = new URL(raw);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
throw new Error(`siteUrl must be http or https (got ${parsed.protocol})`);
|
||||
}
|
||||
// Always store origin-normalized value (no path) — security invariant L-1
|
||||
resolvedConfig.siteUrl = parsed.origin;
|
||||
} catch (e) {
|
||||
if (e instanceof TypeError) {
|
||||
throw new Error(`Invalid siteUrl: "${raw}"`, { cause: e });
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate config.allowedOrigins shape at startup (per-entry rules: parseable,
|
||||
// http(s), no trailing dots, no empty labels). The siteUrl-dependent rules
|
||||
// (Rule A: requires siteUrl; Rule B: must be a subdomain of siteUrl) are
|
||||
// deferred to runtime when config.siteUrl is absent — EMDASH_SITE_URL may
|
||||
// supply it post-build, just like the env-var fallback for siteUrl above.
|
||||
// When config.siteUrl IS present, run the full validator here for fail-fast.
|
||||
if (resolvedConfig.allowedOrigins?.length) {
|
||||
const tagged = resolvedConfig.allowedOrigins.map((origin) => ({
|
||||
origin,
|
||||
source: "config.allowedOrigins" as const,
|
||||
}));
|
||||
resolvedConfig.allowedOrigins = resolvedConfig.siteUrl
|
||||
? validateAllowedOrigins(resolvedConfig.siteUrl, tagged)
|
||||
: validateOriginShape(tagged);
|
||||
}
|
||||
|
||||
// Plugin descriptors from config
|
||||
const pluginDescriptors = resolvedConfig.plugins ?? [];
|
||||
const sandboxedDescriptors = resolvedConfig.sandboxed ?? [];
|
||||
|
||||
// Validate all plugin descriptors
|
||||
for (const descriptor of [...pluginDescriptors, ...sandboxedDescriptors]) {
|
||||
// Standard-format plugins can't use features that require trusted mode
|
||||
if (descriptor.format === "standard") {
|
||||
if (descriptor.adminEntry) {
|
||||
throw new Error(
|
||||
`Plugin "${descriptor.id}" is standard format but declares adminEntry. ` +
|
||||
`Standard plugins use Block Kit for admin UI, not React components. ` +
|
||||
`Remove adminEntry or change format to "native".`,
|
||||
);
|
||||
}
|
||||
if (descriptor.componentsEntry) {
|
||||
throw new Error(
|
||||
`Plugin "${descriptor.id}" is standard format but declares componentsEntry. ` +
|
||||
`Portable Text block components require native format. ` +
|
||||
`Remove componentsEntry or change format to "native".`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate: non-standard plugins cannot be placed in sandboxed: []
|
||||
for (const descriptor of sandboxedDescriptors) {
|
||||
if (descriptor.format !== "standard") {
|
||||
throw new Error(
|
||||
`Plugin "${descriptor.id}" uses the native format and cannot be placed in ` +
|
||||
`\`sandboxed: []\`. Native plugins can only run in \`plugins: []\`. ` +
|
||||
`To sandbox this plugin, convert it to the standard format.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Resolved plugins (populated at build time by importing entrypoints)
|
||||
let _resolvedPlugins: ResolvedPlugin[] = [];
|
||||
|
||||
// Serialize config for virtual module (database/storage/auth - plugins handled separately)
|
||||
// i18n is populated in astro:config:setup from astroConfig.i18n
|
||||
const serializableConfig: Record<string, unknown> = {
|
||||
database: resolvedConfig.database,
|
||||
storage: resolvedConfig.storage,
|
||||
auth: resolvedConfig.auth,
|
||||
authProviders: resolvedConfig.authProviders,
|
||||
marketplace: resolvedConfig.marketplace,
|
||||
siteUrl: resolvedConfig.siteUrl,
|
||||
trustedProxyHeaders: resolvedConfig.trustedProxyHeaders,
|
||||
maxUploadSize: resolvedConfig.maxUploadSize,
|
||||
admin: resolvedConfig.admin,
|
||||
};
|
||||
|
||||
// Determine auth mode for route injection
|
||||
// Check if auth is an AuthDescriptor (has entrypoint) indicating external auth
|
||||
const useExternalAuth = !!(resolvedConfig.auth && "entrypoint" in resolvedConfig.auth);
|
||||
|
||||
return {
|
||||
name: "emdash",
|
||||
hooks: {
|
||||
"astro:config:setup": ({
|
||||
injectRoute,
|
||||
addMiddleware,
|
||||
logger,
|
||||
updateConfig,
|
||||
config: astroConfig,
|
||||
command,
|
||||
}) => {
|
||||
printBanner(logger);
|
||||
// Extract i18n config from Astro config
|
||||
// Astro locales can be strings OR { path, codes } objects — normalize to paths
|
||||
if (astroConfig.i18n) {
|
||||
const routing = astroConfig.i18n.routing;
|
||||
serializableConfig.i18n = {
|
||||
defaultLocale: astroConfig.i18n.defaultLocale,
|
||||
locales: astroConfig.i18n.locales.map((l) => (typeof l === "string" ? l : l.path)),
|
||||
fallback: astroConfig.i18n.fallback,
|
||||
prefixDefaultLocale:
|
||||
typeof routing === "object" ? (routing.prefixDefaultLocale ?? false) : false,
|
||||
};
|
||||
}
|
||||
|
||||
// Disable Astro's built-in checkOrigin -- EmDash's own CSRF
|
||||
// layer (checkPublicCsrf in api/csrf.ts) handles origin
|
||||
// validation with dual-origin support: it accepts both the
|
||||
// internal origin AND the public origin from getPublicOrigin(),
|
||||
// which resolves siteUrl from config or env vars at runtime.
|
||||
// Astro's check can't do this because allowedDomains is baked
|
||||
// at build time, which breaks Docker deployments where the
|
||||
// domain is only known at container start via EMDASH_SITE_URL.
|
||||
//
|
||||
// When siteUrl is known at build time, also set allowedDomains
|
||||
// so Astro.url reflects the public origin (helps user template
|
||||
// code that reads Astro.url directly).
|
||||
const securityConfig: Record<string, unknown> = {
|
||||
checkOrigin: false,
|
||||
...(resolvedConfig.siteUrl
|
||||
? { allowedDomains: [{ hostname: new URL(resolvedConfig.siteUrl).hostname }] }
|
||||
: {}),
|
||||
};
|
||||
|
||||
// Inject default Noto Sans font for the admin UI.
|
||||
// Uses the Astro Font API so fonts are downloaded at build time
|
||||
// and self-hosted (no runtime CDN requests).
|
||||
//
|
||||
// The admin CSS references var(--font-emdash) with a system font
|
||||
// fallback. Users can add extra script coverage (Arabic, CJK, etc.)
|
||||
// by passing fonts.scripts in the emdash() config. The custom
|
||||
// notoSans provider resolves all script families from Google Fonts
|
||||
// under a single font-family name, so they stack via unicode-range.
|
||||
const fontsConfig = resolvedConfig.fonts;
|
||||
const emdashFonts =
|
||||
fontsConfig === false
|
||||
? []
|
||||
: [
|
||||
{
|
||||
provider: notoSans({
|
||||
scripts: fontsConfig?.scripts,
|
||||
}),
|
||||
name: "Noto Sans",
|
||||
cssVariable: "--font-emdash",
|
||||
weights: ["100 900" as const],
|
||||
styles: ["normal" as const, "italic" as const],
|
||||
subsets: [
|
||||
"latin" as const,
|
||||
"latin-ext" as const,
|
||||
"cyrillic" as const,
|
||||
"cyrillic-ext" as const,
|
||||
"devanagari" as const,
|
||||
"greek" as const,
|
||||
"greek-ext" as const,
|
||||
"vietnamese" as const,
|
||||
],
|
||||
fallbacks: ["ui-sans-serif", "system-ui", "sans-serif"],
|
||||
},
|
||||
];
|
||||
|
||||
updateConfig({
|
||||
security: securityConfig,
|
||||
// fonts is a valid AstroConfig key but may not be in the
|
||||
// type definition for the minimum supported Astro version
|
||||
...({ fonts: emdashFonts } as Record<string, unknown>),
|
||||
vite: createViteConfig(
|
||||
{
|
||||
serializableConfig,
|
||||
resolvedConfig,
|
||||
pluginDescriptors,
|
||||
astroConfig,
|
||||
},
|
||||
command,
|
||||
),
|
||||
});
|
||||
|
||||
// Inject all core routes
|
||||
injectCoreRoutes(injectRoute);
|
||||
|
||||
// Inject routes from pluggable auth providers (authProviders config)
|
||||
if (resolvedConfig.authProviders?.length) {
|
||||
injectAuthProviderRoutes(injectRoute, resolvedConfig.authProviders);
|
||||
}
|
||||
|
||||
// Inject passkey/oauth/magic-link routes unless transparent external auth is active
|
||||
if (!useExternalAuth) {
|
||||
injectBuiltinAuthRoutes(injectRoute);
|
||||
}
|
||||
|
||||
// Inject MCP endpoint (always on — bearer-token-only, no cost if unused)
|
||||
if (resolvedConfig.mcp !== false) {
|
||||
injectMcpRoute(injectRoute);
|
||||
}
|
||||
|
||||
// In playground mode, inject the playground middleware FIRST.
|
||||
// It sets up a per-session DO database in ALS before anything
|
||||
// else runs, so the runtime init middleware sees a real DB.
|
||||
if (resolvedConfig.playground) {
|
||||
addMiddleware({
|
||||
entrypoint: resolvedConfig.playground.middlewareEntrypoint,
|
||||
order: "pre",
|
||||
});
|
||||
}
|
||||
|
||||
// Add middleware to provide database and manifest
|
||||
addMiddleware({
|
||||
entrypoint: "emdash/middleware",
|
||||
order: "pre",
|
||||
});
|
||||
|
||||
// Add redirect middleware (runs after runtime init, before setup/auth)
|
||||
addMiddleware({
|
||||
entrypoint: "emdash/middleware/redirect",
|
||||
order: "pre",
|
||||
});
|
||||
|
||||
// Skip setup and auth in playground mode -- the playground middleware
|
||||
// handles session creation and injects an anonymous admin user.
|
||||
if (!resolvedConfig.playground) {
|
||||
addMiddleware({
|
||||
entrypoint: "emdash/middleware/setup",
|
||||
order: "pre",
|
||||
});
|
||||
|
||||
addMiddleware({
|
||||
entrypoint: "emdash/middleware/auth",
|
||||
order: "pre",
|
||||
});
|
||||
}
|
||||
|
||||
// Add request context middleware (runs after auth, on ALL routes)
|
||||
// Sets up ALS-based context for query functions (edit mode, preview)
|
||||
addMiddleware({
|
||||
entrypoint: "emdash/middleware/request-context",
|
||||
order: "pre",
|
||||
});
|
||||
|
||||
printRoutesSummary(logger);
|
||||
},
|
||||
"astro:server:setup": ({ server, logger }) => {
|
||||
// Generate types once the server is listening.
|
||||
// The endpoint returns the types content; we write the file here
|
||||
// (in Node) because workerd has no real filesystem access.
|
||||
server.httpServer?.once("listening", async () => {
|
||||
const { writeFile, readFile } = await import("node:fs/promises");
|
||||
const { resolve } = await import("node:path");
|
||||
|
||||
const address = server.httpServer?.address();
|
||||
if (!address || typeof address === "string") return;
|
||||
|
||||
const port = address.port;
|
||||
const typegenUrl = `http://localhost:${port}/_emdash/api/typegen`;
|
||||
const outputPath = resolve(process.cwd(), "emdash-env.d.ts");
|
||||
|
||||
try {
|
||||
const response = await fetch(typegenUrl, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.text().catch(() => "");
|
||||
logger.warn(`Typegen failed: ${response.status} ${body.slice(0, 200)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { data: result } = (await response.json()) as {
|
||||
data: {
|
||||
types: string;
|
||||
hash: string;
|
||||
collections: number;
|
||||
};
|
||||
};
|
||||
|
||||
// Only write if content changed
|
||||
let needsWrite = true;
|
||||
try {
|
||||
const existing = await readFile(outputPath, "utf-8");
|
||||
if (existing === result.types) needsWrite = false;
|
||||
} catch {
|
||||
// File doesn't exist yet
|
||||
}
|
||||
|
||||
if (needsWrite) {
|
||||
await writeFile(outputPath, result.types, "utf-8");
|
||||
logger.info(`Generated emdash-env.d.ts (${result.collections} collections)`);
|
||||
}
|
||||
} catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.warn(`Typegen failed: ${msg}`);
|
||||
}
|
||||
});
|
||||
},
|
||||
"astro:build:done": ({ logger }) => {
|
||||
logger.info("Build complete");
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export default emdash;
|
||||
889
packages/core/src/astro/integration/routes.ts
Normal file
889
packages/core/src/astro/integration/routes.ts
Normal file
@@ -0,0 +1,889 @@
|
||||
/**
|
||||
* Route Injection
|
||||
*
|
||||
* Defines and injects all EmDash routes into the Astro application.
|
||||
*/
|
||||
|
||||
import { createRequire } from "node:module";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
/**
|
||||
* Resolve path to a route file in the package
|
||||
* Uses Node.js APIs - only call at build time
|
||||
*/
|
||||
function resolveRoute(route: string): string {
|
||||
// Lazy initialization to avoid running Node.js code at import time
|
||||
// This prevents issues when the module is bundled for Cloudflare Workers
|
||||
const require = createRequire(import.meta.url);
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
try {
|
||||
// Try to resolve as package export
|
||||
return require.resolve(`emdash/routes/${route}`);
|
||||
} catch {
|
||||
// Fallback to relative path (for development)
|
||||
return resolve(__dirname, "../routes", route);
|
||||
}
|
||||
}
|
||||
|
||||
/** Route injection function type */
|
||||
type InjectRoute = (route: { pattern: string; entrypoint: string }) => void;
|
||||
|
||||
/**
|
||||
* Injects all core EmDash routes.
|
||||
*/
|
||||
export function injectCoreRoutes(injectRoute: InjectRoute): void {
|
||||
// Inject admin shell route
|
||||
injectRoute({
|
||||
pattern: "/_emdash/admin/[...path]",
|
||||
entrypoint: resolveRoute("admin.astro"),
|
||||
});
|
||||
|
||||
// Inject API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/manifest",
|
||||
entrypoint: resolveRoute("api/manifest.ts"),
|
||||
});
|
||||
|
||||
// Auth mode endpoint (public — used by the login page to pick the right UI)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/mode",
|
||||
entrypoint: resolveRoute("api/auth/mode.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/dashboard",
|
||||
entrypoint: resolveRoute("api/dashboard.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]",
|
||||
entrypoint: resolveRoute("api/content/[collection]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/revisions",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/revisions.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/preview-url",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/preview-url.ts"),
|
||||
});
|
||||
|
||||
// Trash/restore routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/trash",
|
||||
entrypoint: resolveRoute("api/content/[collection]/trash.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/restore",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/restore.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/permanent",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/permanent.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/duplicate",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/duplicate.ts"),
|
||||
});
|
||||
|
||||
// Publishing routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/publish",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/publish.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/unpublish",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/unpublish.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/discard-draft",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/discard-draft.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/compare",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/compare.ts"),
|
||||
});
|
||||
|
||||
// i18n translation routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/translations",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/translations.ts"),
|
||||
});
|
||||
|
||||
// Scheduled publishing routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/schedule",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/schedule.ts"),
|
||||
});
|
||||
|
||||
// Revision management routes (for restore, etc.)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/revisions/[revisionId]",
|
||||
entrypoint: resolveRoute("api/revisions/[revisionId]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/revisions/[revisionId]/restore",
|
||||
entrypoint: resolveRoute("api/revisions/[revisionId]/restore.ts"),
|
||||
});
|
||||
|
||||
// Media API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media",
|
||||
entrypoint: resolveRoute("api/media.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/upload-url",
|
||||
entrypoint: resolveRoute("api/media/upload-url.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/file/[...key]",
|
||||
entrypoint: resolveRoute("api/media/file/[...key].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/[id]",
|
||||
entrypoint: resolveRoute("api/media/[id].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/[id]/confirm",
|
||||
entrypoint: resolveRoute("api/media/[id]/confirm.ts"),
|
||||
});
|
||||
|
||||
// Media provider routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/providers",
|
||||
entrypoint: resolveRoute("api/media/providers/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/providers/[providerId]",
|
||||
entrypoint: resolveRoute("api/media/providers/[providerId]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/media/providers/[providerId]/[itemId]",
|
||||
entrypoint: resolveRoute("api/media/providers/[providerId]/[itemId].ts"),
|
||||
});
|
||||
|
||||
// Import API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/probe",
|
||||
entrypoint: resolveRoute("api/import/probe.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress/analyze",
|
||||
entrypoint: resolveRoute("api/import/wordpress/analyze.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress/prepare",
|
||||
entrypoint: resolveRoute("api/import/wordpress/prepare.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress/execute",
|
||||
entrypoint: resolveRoute("api/import/wordpress/execute.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress/media",
|
||||
entrypoint: resolveRoute("api/import/wordpress/media.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress/rewrite-urls",
|
||||
entrypoint: resolveRoute("api/import/wordpress/rewrite-urls.ts"),
|
||||
});
|
||||
|
||||
// WordPress Plugin (EmDash Exporter) direct import routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress-plugin/analyze",
|
||||
entrypoint: resolveRoute("api/import/wordpress-plugin/analyze.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress-plugin/execute",
|
||||
entrypoint: resolveRoute("api/import/wordpress-plugin/execute.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/import/wordpress-plugin/callback",
|
||||
entrypoint: resolveRoute("api/import/wordpress-plugin/callback.ts"),
|
||||
});
|
||||
|
||||
// Schema API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema",
|
||||
entrypoint: resolveRoute("api/schema/index.ts"),
|
||||
});
|
||||
|
||||
// Typegen endpoint (dev-only)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/typegen",
|
||||
entrypoint: resolveRoute("api/typegen.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/collections",
|
||||
entrypoint: resolveRoute("api/schema/collections/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/collections/[slug]",
|
||||
entrypoint: resolveRoute("api/schema/collections/[slug]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/collections/[slug]/fields",
|
||||
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/collections/[slug]/fields/reorder",
|
||||
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/reorder.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/collections/[slug]/fields/[fieldSlug]",
|
||||
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/[fieldSlug].ts"),
|
||||
});
|
||||
|
||||
// Orphaned tables discovery
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/orphans",
|
||||
entrypoint: resolveRoute("api/schema/orphans/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/schema/orphans/[slug]",
|
||||
entrypoint: resolveRoute("api/schema/orphans/[slug].ts"),
|
||||
});
|
||||
|
||||
// Site settings route
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/settings",
|
||||
entrypoint: resolveRoute("api/settings.ts"),
|
||||
});
|
||||
|
||||
// Email settings route
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/settings/email",
|
||||
entrypoint: resolveRoute("api/settings/email.ts"),
|
||||
});
|
||||
|
||||
// Snapshot route (for DO preview database population)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/snapshot",
|
||||
entrypoint: resolveRoute("api/snapshot.ts"),
|
||||
});
|
||||
|
||||
// Taxonomy API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/taxonomies",
|
||||
entrypoint: resolveRoute("api/taxonomies/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/taxonomies/[name]/terms",
|
||||
entrypoint: resolveRoute("api/taxonomies/[name]/terms/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/taxonomies/[name]/terms/[slug]",
|
||||
entrypoint: resolveRoute("api/taxonomies/[name]/terms/[slug].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/content/[collection]/[id]/terms/[taxonomy]",
|
||||
entrypoint: resolveRoute("api/content/[collection]/[id]/terms/[taxonomy].ts"),
|
||||
});
|
||||
|
||||
// Plugin management routes (under /admin to avoid conflict with plugin API routes)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins",
|
||||
entrypoint: resolveRoute("api/admin/plugins/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/[id]",
|
||||
entrypoint: resolveRoute("api/admin/plugins/[id]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/[id]/enable",
|
||||
entrypoint: resolveRoute("api/admin/plugins/[id]/enable.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/[id]/disable",
|
||||
entrypoint: resolveRoute("api/admin/plugins/[id]/disable.ts"),
|
||||
});
|
||||
|
||||
// Marketplace plugin routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/marketplace",
|
||||
entrypoint: resolveRoute("api/admin/plugins/marketplace/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/marketplace/[id]",
|
||||
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/marketplace/[id]/icon",
|
||||
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/icon.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/marketplace/[id]/install",
|
||||
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/install.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/[id]/update",
|
||||
entrypoint: resolveRoute("api/admin/plugins/[id]/update.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/[id]/uninstall",
|
||||
entrypoint: resolveRoute("api/admin/plugins/[id]/uninstall.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/plugins/updates",
|
||||
entrypoint: resolveRoute("api/admin/plugins/updates.ts"),
|
||||
});
|
||||
|
||||
// Exclusive hooks admin routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/hooks/exclusive",
|
||||
entrypoint: resolveRoute("api/admin/hooks/exclusive/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/hooks/exclusive/[hookName]",
|
||||
entrypoint: resolveRoute("api/admin/hooks/exclusive/[hookName].ts"),
|
||||
});
|
||||
|
||||
// Theme marketplace routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/themes/marketplace",
|
||||
entrypoint: resolveRoute("api/admin/themes/marketplace/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/themes/marketplace/[id]",
|
||||
entrypoint: resolveRoute("api/admin/themes/marketplace/[id]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/themes/marketplace/[id]/thumbnail",
|
||||
entrypoint: resolveRoute("api/admin/themes/marketplace/[id]/thumbnail.ts"),
|
||||
});
|
||||
|
||||
// Theme preview signing (local, not proxied)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/themes/preview",
|
||||
entrypoint: resolveRoute("api/themes/preview.ts"),
|
||||
});
|
||||
|
||||
// User management routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/users",
|
||||
entrypoint: resolveRoute("api/admin/users/index.ts"),
|
||||
});
|
||||
|
||||
// Bylines routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/bylines",
|
||||
entrypoint: resolveRoute("api/admin/bylines/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/bylines/[id]",
|
||||
entrypoint: resolveRoute("api/admin/bylines/[id]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/users/[id]",
|
||||
entrypoint: resolveRoute("api/admin/users/[id]/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/users/[id]/disable",
|
||||
entrypoint: resolveRoute("api/admin/users/[id]/disable.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/users/[id]/enable",
|
||||
entrypoint: resolveRoute("api/admin/users/[id]/enable.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/users/[id]/send-recovery",
|
||||
entrypoint: resolveRoute("api/admin/users/[id]/send-recovery.ts"),
|
||||
});
|
||||
|
||||
// API token admin routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/api-tokens",
|
||||
entrypoint: resolveRoute("api/admin/api-tokens/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/api-tokens/[id]",
|
||||
entrypoint: resolveRoute("api/admin/api-tokens/[id].ts"),
|
||||
});
|
||||
|
||||
// OAuth client admin routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/oauth-clients",
|
||||
entrypoint: resolveRoute("api/admin/oauth-clients/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/oauth-clients/[id]",
|
||||
entrypoint: resolveRoute("api/admin/oauth-clients/[id].ts"),
|
||||
});
|
||||
|
||||
// OAuth Device Flow routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/device/code",
|
||||
entrypoint: resolveRoute("api/oauth/device/code.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/device/token",
|
||||
entrypoint: resolveRoute("api/oauth/device/token.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/device/authorize",
|
||||
entrypoint: resolveRoute("api/oauth/device/authorize.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/token/refresh",
|
||||
entrypoint: resolveRoute("api/oauth/token/refresh.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/token/revoke",
|
||||
entrypoint: resolveRoute("api/oauth/token/revoke.ts"),
|
||||
});
|
||||
|
||||
// Auth discovery endpoint
|
||||
injectRoute({
|
||||
pattern: "/_emdash/.well-known/auth",
|
||||
entrypoint: resolveRoute("api/well-known/auth.ts"),
|
||||
});
|
||||
|
||||
// OAuth 2.1 Authorization Code flow routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/token",
|
||||
entrypoint: resolveRoute("api/oauth/token.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/oauth/authorize",
|
||||
entrypoint: resolveRoute("api/oauth/authorize.ts"),
|
||||
});
|
||||
|
||||
// OAuth discovery endpoints (RFC 9728, RFC 8414)
|
||||
injectRoute({
|
||||
pattern: "/.well-known/oauth-protected-resource",
|
||||
entrypoint: resolveRoute("api/well-known/oauth-protected-resource.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/.well-known/oauth-authorization-server/_emdash",
|
||||
entrypoint: resolveRoute("api/well-known/oauth-authorization-server.ts"),
|
||||
});
|
||||
|
||||
// RFC 7591 Dynamic Client Registration
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/oauth/register",
|
||||
entrypoint: resolveRoute("api/oauth/register.ts"),
|
||||
});
|
||||
|
||||
// Plugin-defined API routes
|
||||
// All plugin routes are handled by a single catch-all handler
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/plugins/[pluginId]/[...path]",
|
||||
entrypoint: resolveRoute("api/plugins/[pluginId]/[...path].ts"),
|
||||
});
|
||||
|
||||
// Menu API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/menus",
|
||||
entrypoint: resolveRoute("api/menus/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/menus/[name]",
|
||||
entrypoint: resolveRoute("api/menus/[name].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/menus/[name]/items",
|
||||
entrypoint: resolveRoute("api/menus/[name]/items.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/menus/[name]/reorder",
|
||||
entrypoint: resolveRoute("api/menus/[name]/reorder.ts"),
|
||||
});
|
||||
|
||||
// Widget area routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-areas",
|
||||
entrypoint: resolveRoute("api/widget-areas/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-components",
|
||||
entrypoint: resolveRoute("api/widget-components.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-areas/[name]",
|
||||
entrypoint: resolveRoute("api/widget-areas/[name].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-areas/[name]/widgets",
|
||||
entrypoint: resolveRoute("api/widget-areas/[name]/widgets.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-areas/[name]/widgets/[id]",
|
||||
entrypoint: resolveRoute("api/widget-areas/[name]/widgets/[id].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/widget-areas/[name]/reorder",
|
||||
entrypoint: resolveRoute("api/widget-areas/[name]/reorder.ts"),
|
||||
});
|
||||
|
||||
// Section routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/sections",
|
||||
entrypoint: resolveRoute("api/sections/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/sections/[slug]",
|
||||
entrypoint: resolveRoute("api/sections/[slug].ts"),
|
||||
});
|
||||
|
||||
// Redirect routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/redirects",
|
||||
entrypoint: resolveRoute("api/redirects/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/redirects/404s/summary",
|
||||
entrypoint: resolveRoute("api/redirects/404s/summary.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/redirects/404s",
|
||||
entrypoint: resolveRoute("api/redirects/404s/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/redirects/[id]",
|
||||
entrypoint: resolveRoute("api/redirects/[id].ts"),
|
||||
});
|
||||
|
||||
// Search routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/search",
|
||||
entrypoint: resolveRoute("api/search/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/search/suggest",
|
||||
entrypoint: resolveRoute("api/search/suggest.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/search/stats",
|
||||
entrypoint: resolveRoute("api/search/stats.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/search/rebuild",
|
||||
entrypoint: resolveRoute("api/search/rebuild.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/search/enable",
|
||||
entrypoint: resolveRoute("api/search/enable.ts"),
|
||||
});
|
||||
|
||||
// Comment routes (public)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/comments/[collection]/[contentId]",
|
||||
entrypoint: resolveRoute("api/comments/[collection]/[contentId]/index.ts"),
|
||||
});
|
||||
|
||||
// Comment routes (admin)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/comments",
|
||||
entrypoint: resolveRoute("api/admin/comments/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/comments/counts",
|
||||
entrypoint: resolveRoute("api/admin/comments/counts.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/comments/bulk",
|
||||
entrypoint: resolveRoute("api/admin/comments/bulk.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/comments/[id]/status",
|
||||
entrypoint: resolveRoute("api/admin/comments/[id]/status.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/comments/[id]",
|
||||
entrypoint: resolveRoute("api/admin/comments/[id].ts"),
|
||||
});
|
||||
|
||||
// SEO routes (public, at site root)
|
||||
injectRoute({
|
||||
pattern: "/sitemap.xml",
|
||||
entrypoint: resolveRoute("sitemap.xml.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/sitemap-[collection].xml",
|
||||
entrypoint: resolveRoute("sitemap-[collection].xml.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/robots.txt",
|
||||
entrypoint: resolveRoute("robots.txt.ts"),
|
||||
});
|
||||
|
||||
// Setup wizard API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup/status",
|
||||
entrypoint: resolveRoute("api/setup/status.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup",
|
||||
entrypoint: resolveRoute("api/setup/index.ts"),
|
||||
});
|
||||
|
||||
// Auth API routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup/admin",
|
||||
entrypoint: resolveRoute("api/setup/admin.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup/admin/verify",
|
||||
entrypoint: resolveRoute("api/setup/admin-verify.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup/dev-bypass",
|
||||
entrypoint: resolveRoute("api/setup/dev-bypass.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/setup/dev-reset",
|
||||
entrypoint: resolveRoute("api/setup/dev-reset.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/dev/emails",
|
||||
entrypoint: resolveRoute("api/dev/emails.ts"),
|
||||
});
|
||||
|
||||
// Current user endpoint (always available)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/me",
|
||||
entrypoint: resolveRoute("api/auth/me.ts"),
|
||||
});
|
||||
|
||||
// Logout is always available (though behavior differs by auth mode)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/logout",
|
||||
entrypoint: resolveRoute("api/auth/logout.ts"),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects the MCP (Model Context Protocol) server route.
|
||||
* Only injected when `mcp: true` is set in the EmDash config.
|
||||
*/
|
||||
export function injectMcpRoute(injectRoute: InjectRoute): void {
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/mcp",
|
||||
entrypoint: resolveRoute("api/mcp.ts"),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects routes from pluggable auth providers.
|
||||
*
|
||||
* Each provider declares the routes it needs in its `AuthProviderDescriptor.routes` array.
|
||||
* Routes are injected at build time so Vite can bundle them.
|
||||
*/
|
||||
export function injectAuthProviderRoutes(
|
||||
injectRoute: InjectRoute,
|
||||
providers: Array<{ routes?: Array<{ pattern: string; entrypoint: string }> }>,
|
||||
): void {
|
||||
for (const provider of providers) {
|
||||
if (provider.routes) {
|
||||
for (const route of provider.routes) {
|
||||
injectRoute({
|
||||
pattern: route.pattern,
|
||||
entrypoint: route.entrypoint,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects passkey/oauth/magic-link auth routes.
|
||||
* Only used when NOT using external auth.
|
||||
*/
|
||||
export function injectBuiltinAuthRoutes(injectRoute: InjectRoute): void {
|
||||
// Passkey authentication routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey/options",
|
||||
entrypoint: resolveRoute("api/auth/passkey/options.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey/verify",
|
||||
entrypoint: resolveRoute("api/auth/passkey/verify.ts"),
|
||||
});
|
||||
|
||||
// Passkey management routes (authenticated users)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey",
|
||||
entrypoint: resolveRoute("api/auth/passkey/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey/register/options",
|
||||
entrypoint: resolveRoute("api/auth/passkey/register/options.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey/register/verify",
|
||||
entrypoint: resolveRoute("api/auth/passkey/register/verify.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/passkey/[id]",
|
||||
entrypoint: resolveRoute("api/auth/passkey/[id].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/dev-bypass",
|
||||
entrypoint: resolveRoute("api/auth/dev-bypass.ts"),
|
||||
});
|
||||
|
||||
// Invite routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/invite",
|
||||
entrypoint: resolveRoute("api/auth/invite/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/invite/accept",
|
||||
entrypoint: resolveRoute("api/auth/invite/accept.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/invite/complete",
|
||||
entrypoint: resolveRoute("api/auth/invite/complete.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/invite/register-options",
|
||||
entrypoint: resolveRoute("api/auth/invite/register-options.ts"),
|
||||
});
|
||||
|
||||
// Magic link routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/magic-link/send",
|
||||
entrypoint: resolveRoute("api/auth/magic-link/send.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/magic-link/verify",
|
||||
entrypoint: resolveRoute("api/auth/magic-link/verify.ts"),
|
||||
});
|
||||
|
||||
// OAuth routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/oauth/[provider]",
|
||||
entrypoint: resolveRoute("api/auth/oauth/[provider].ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/oauth/[provider]/callback",
|
||||
entrypoint: resolveRoute("api/auth/oauth/[provider]/callback.ts"),
|
||||
});
|
||||
|
||||
// Self-signup routes
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/signup/request",
|
||||
entrypoint: resolveRoute("api/auth/signup/request.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/signup/verify",
|
||||
entrypoint: resolveRoute("api/auth/signup/verify.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/auth/signup/complete",
|
||||
entrypoint: resolveRoute("api/auth/signup/complete.ts"),
|
||||
});
|
||||
|
||||
// Allowed domains admin routes (only relevant for passkey mode)
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/allowed-domains",
|
||||
entrypoint: resolveRoute("api/admin/allowed-domains/index.ts"),
|
||||
});
|
||||
|
||||
injectRoute({
|
||||
pattern: "/_emdash/api/admin/allowed-domains/[domain]",
|
||||
entrypoint: resolveRoute("api/admin/allowed-domains/[domain].ts"),
|
||||
});
|
||||
}
|
||||
512
packages/core/src/astro/integration/runtime.ts
Normal file
512
packages/core/src/astro/integration/runtime.ts
Normal file
@@ -0,0 +1,512 @@
|
||||
/**
|
||||
* Runtime utilities for EmDash
|
||||
*
|
||||
* This file contains functions that are used at runtime (in middleware, routes, etc.)
|
||||
* and must work in all environments including Cloudflare Workers.
|
||||
*
|
||||
* DO NOT import Node.js-only modules here (fs, path, module, etc.)
|
||||
*/
|
||||
|
||||
import type { AuthDescriptor, AuthProviderDescriptor } from "../../auth/types.js";
|
||||
import type { DatabaseDescriptor } from "../../db/adapters.js";
|
||||
import type { MediaProviderDescriptor } from "../../media/types.js";
|
||||
import type { ResolvedPlugin } from "../../plugins/types.js";
|
||||
import type { StorageDescriptor } from "../storage/types.js";
|
||||
|
||||
export type { ResolvedPlugin };
|
||||
export type { MediaProviderDescriptor };
|
||||
|
||||
/**
|
||||
* Admin page definition (copied from plugins/types to avoid circular deps)
|
||||
*/
|
||||
export interface PluginAdminPage {
|
||||
path: string;
|
||||
label: string;
|
||||
icon?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dashboard widget definition (copied from plugins/types to avoid circular deps)
|
||||
*/
|
||||
export interface PluginDashboardWidget {
|
||||
id: string;
|
||||
size?: "full" | "half" | "third";
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin descriptor - returned by plugin factory functions
|
||||
*
|
||||
* Contains all static metadata needed for manifest and admin UI,
|
||||
* plus the entrypoint for runtime instantiation.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* export function myPlugin(options?: MyPluginOptions): PluginDescriptor {
|
||||
* return {
|
||||
* id: "my-plugin",
|
||||
* version: "1.0.0",
|
||||
* entrypoint: "@my-org/emdash-plugin-foo",
|
||||
* options: options ?? {},
|
||||
* adminEntry: "@my-org/emdash-plugin-foo/admin",
|
||||
* adminPages: [{ path: "/settings", label: "Settings" }],
|
||||
* };
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
/**
|
||||
* Storage collection declaration for sandboxed plugins
|
||||
*/
|
||||
export interface StorageCollectionDeclaration {
|
||||
indexes?: string[];
|
||||
uniqueIndexes?: string[];
|
||||
}
|
||||
|
||||
export interface PluginDescriptor<TOptions = Record<string, unknown>> {
|
||||
/** Unique plugin identifier */
|
||||
id: string;
|
||||
/** Plugin version (semver) */
|
||||
version: string;
|
||||
/** Module specifier to import (e.g., "@emdash-cms/plugin-api-test") */
|
||||
entrypoint: string;
|
||||
/**
|
||||
* Options to pass to createPlugin(). Native format only.
|
||||
* Standard-format plugins configure themselves via KV settings
|
||||
* and Block Kit admin pages -- not constructor options.
|
||||
*/
|
||||
options?: TOptions;
|
||||
/**
|
||||
* Plugin format. Determines how the entrypoint is loaded:
|
||||
* - `"standard"` -- exports `definePlugin({ hooks, routes })` as default.
|
||||
* Wrapped with `adaptSandboxEntry` for in-process execution. Can run in both
|
||||
* `plugins: []` (in-process) and `sandboxed: []` (isolate).
|
||||
* - `"native"` -- exports `createPlugin(options)` returning a `ResolvedPlugin`.
|
||||
* Can only run in `plugins: []`. Cannot be sandboxed or published to marketplace.
|
||||
*
|
||||
* Defaults to `"native"` when unset.
|
||||
*
|
||||
*/
|
||||
format?: "standard" | "native";
|
||||
/** Admin UI module specifier (e.g., "@emdash-cms/plugin-audit-log/admin") */
|
||||
adminEntry?: string;
|
||||
/** Module specifier for site-side Astro rendering components (must export `blockComponents`) */
|
||||
componentsEntry?: string;
|
||||
/** Admin pages for navigation */
|
||||
adminPages?: PluginAdminPage[];
|
||||
/** Dashboard widgets */
|
||||
adminWidgets?: PluginDashboardWidget[];
|
||||
|
||||
// === Sandbox-specific fields (for sandboxed plugins) ===
|
||||
|
||||
/**
|
||||
* Capabilities the plugin requests.
|
||||
* For standard-format plugins, capabilities are enforced in both trusted and
|
||||
* sandboxed modes via the PluginContextFactory.
|
||||
*/
|
||||
capabilities?: string[];
|
||||
/**
|
||||
* Allowed hosts for network:fetch capability
|
||||
* Supports wildcards like "*.example.com"
|
||||
*/
|
||||
allowedHosts?: string[];
|
||||
/**
|
||||
* Storage collections the plugin declares
|
||||
* Sandboxed plugins can only access declared collections.
|
||||
*/
|
||||
storage?: Record<string, StorageCollectionDeclaration>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sandboxed plugin descriptor - same format as PluginDescriptor
|
||||
*
|
||||
* These run in isolated V8 isolates via Worker Loader on Cloudflare.
|
||||
* The `entrypoint` is resolved to a file and bundled at build time.
|
||||
*/
|
||||
export type SandboxedPluginDescriptor<TOptions = Record<string, unknown>> =
|
||||
PluginDescriptor<TOptions>;
|
||||
|
||||
export interface EmDashConfig {
|
||||
/**
|
||||
* Database configuration
|
||||
*
|
||||
* Use one of the adapter functions:
|
||||
* - `sqlite({ url: "file:./data.db" })` - Local SQLite
|
||||
* - `libsql({ url: "...", authToken: "..." })` - Turso/libSQL
|
||||
* - `d1({ binding: "DB" })` - Cloudflare D1
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { sqlite } from "emdash/db";
|
||||
*
|
||||
* emdash({
|
||||
* database: sqlite({ url: "file:./data.db" }),
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
database?: DatabaseDescriptor;
|
||||
/**
|
||||
* Storage configuration (for media)
|
||||
*/
|
||||
storage?: StorageDescriptor;
|
||||
/**
|
||||
* Trusted plugins to load (run in main isolate)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { auditLogPlugin } from "@emdash-cms/plugin-audit-log";
|
||||
* import { webhookNotifierPlugin } from "@emdash-cms/plugin-webhook-notifier";
|
||||
*
|
||||
* emdash({
|
||||
* plugins: [
|
||||
* auditLogPlugin(),
|
||||
* webhookNotifierPlugin({ url: "https://example.com/webhook" }),
|
||||
* ],
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
plugins?: PluginDescriptor[];
|
||||
/**
|
||||
* Sandboxed plugins to load (run in isolated V8 isolates)
|
||||
*
|
||||
* Only works on Cloudflare with Worker Loader enabled.
|
||||
* Uses the same format as `plugins` - the difference is where they run.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { untrustedPlugin } from "some-third-party-plugin";
|
||||
*
|
||||
* emdash({
|
||||
* plugins: [trustedPlugin()], // runs in host
|
||||
* sandboxed: [untrustedPlugin()], // runs in isolate
|
||||
* sandboxRunner: "@emdash-cms/sandbox-cloudflare",
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
sandboxed?: SandboxedPluginDescriptor[];
|
||||
/**
|
||||
* Module that exports the sandbox runner factory.
|
||||
* Required if using sandboxed plugins.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({
|
||||
* sandboxRunner: "@emdash-cms/sandbox-cloudflare",
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
sandboxRunner?: string;
|
||||
|
||||
/**
|
||||
* Authentication configuration
|
||||
*
|
||||
* Use an auth adapter function from a platform package:
|
||||
* - `access({ teamDomain: "..." })` from `@emdash-cms/cloudflare`
|
||||
*
|
||||
* When an external auth provider is configured, passkey auth is disabled.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { access } from "@emdash-cms/cloudflare";
|
||||
*
|
||||
* emdash({
|
||||
* auth: access({
|
||||
* teamDomain: "myteam.cloudflareaccess.com",
|
||||
* audience: "abc123...",
|
||||
* roleMapping: {
|
||||
* "Admins": 50,
|
||||
* "Editors": 30,
|
||||
* },
|
||||
* }),
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
auth?: AuthDescriptor;
|
||||
|
||||
/**
|
||||
* Pluggable auth providers (login methods on the login page).
|
||||
*
|
||||
* Auth providers appear as options alongside passkey on the login page
|
||||
* and setup wizard. Any provider can be used to create the initial
|
||||
* admin account. Passkey is built-in; providers listed here are additive.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { atproto } from "@emdash-cms/auth-atproto";
|
||||
*
|
||||
* emdash({
|
||||
* authProviders: [atproto()],
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
authProviders?: AuthProviderDescriptor[];
|
||||
|
||||
/**
|
||||
* MCP (Model Context Protocol) server endpoint.
|
||||
*
|
||||
* Exposes an MCP Streamable HTTP server at `/_emdash/api/mcp`
|
||||
* that allows AI agents and tools to interact with the CMS using
|
||||
* the standardized MCP protocol.
|
||||
*
|
||||
* Enabled by default. The endpoint requires bearer token auth, so
|
||||
* it has no effect unless the user creates an API token and
|
||||
* configures a client. Set to `false` to disable.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
mcp?: boolean;
|
||||
|
||||
/**
|
||||
* Plugin marketplace URL
|
||||
*
|
||||
* When set, enables the marketplace features: browse, install, update,
|
||||
* and uninstall plugins from a remote marketplace.
|
||||
*
|
||||
* Must be an HTTPS URL in production, or localhost/127.0.0.1 in dev.
|
||||
* Requires `sandboxRunner` to be configured (marketplace plugins run sandboxed).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({
|
||||
* marketplace: "https://marketplace.emdashcms.com",
|
||||
* sandboxRunner: "@emdash-cms/sandbox-cloudflare",
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
marketplace?: string;
|
||||
|
||||
/**
|
||||
* Maximum allowed media file upload size in bytes.
|
||||
*
|
||||
* Applies to both direct multipart uploads and signed-URL uploads.
|
||||
* When unset, defaults to 52_428_800 (50 MB).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({ maxUploadSize: 100 * 1024 * 1024 }) // 100 MB
|
||||
* ```
|
||||
*/
|
||||
maxUploadSize?: number;
|
||||
|
||||
/**
|
||||
* Public browser-facing origin for the site.
|
||||
*
|
||||
* Use when `Astro.url` / `request.url` do not match what users open — common with a
|
||||
* **TLS-terminating reverse proxy**: the app often sees `http://` on the internal hop
|
||||
* while the browser uses `https://`, which breaks WebAuthn, CSRF, OAuth, and redirect URLs.
|
||||
*
|
||||
* Set to the full origin users type in the address bar (no path), e.g.
|
||||
* `https://mysite.example.com`. When not set, falls back to environment variables
|
||||
* `EMDASH_SITE_URL` > `SITE_URL`, then to the request URL's origin.
|
||||
*
|
||||
* Replaces `passkeyPublicOrigin` (which only fixed passkeys).
|
||||
*/
|
||||
siteUrl?: string;
|
||||
|
||||
/**
|
||||
* Additional origins accepted by passkey verification.
|
||||
*
|
||||
* When the same EmDash deployment is reachable under several hostnames sharing
|
||||
* a registrable parent (e.g. `https://example.com` plus
|
||||
* `https://preview.example.com`), the canonical `siteUrl` defines the `rpId`
|
||||
* and the entries here are the *additional* origins from which assertions
|
||||
* are accepted. Each entry must be the same hostname as `siteUrl` or a
|
||||
* subdomain of it — WebAuthn requires `rpId` to be a registrable suffix of
|
||||
* every origin.
|
||||
*
|
||||
* Merged at runtime with the `EMDASH_ALLOWED_ORIGINS` env var (comma-separated).
|
||||
* Validation:
|
||||
* - Config-declared entries are shape-checked at Astro startup.
|
||||
* - Subdomain relationship to `siteUrl` is checked at startup when
|
||||
* `siteUrl` is also config-declared, otherwise at first passkey
|
||||
* verification (since `siteUrl` may come from `EMDASH_SITE_URL`).
|
||||
*
|
||||
* Mismatches throw with a source-attributed message naming
|
||||
* `config.allowedOrigins` or `EMDASH_ALLOWED_ORIGINS`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({
|
||||
* siteUrl: "https://example.com",
|
||||
* allowedOrigins: ["https://preview.example.com"],
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
allowedOrigins?: string[];
|
||||
/*
|
||||
* Headers to trust for client IP resolution when running behind a reverse
|
||||
* proxy. The first header in this list that is present on the request
|
||||
* wins. Applies to rate limiting for auth endpoints and comment
|
||||
* submission.
|
||||
*
|
||||
* Common values:
|
||||
* - `x-real-ip` — nginx, Caddy, Traefik
|
||||
* - `fly-client-ip` — Fly.io
|
||||
* - `x-forwarded-for` — generic (first entry is used)
|
||||
*
|
||||
* Only set this when you **control the reverse proxy**. Untrusted
|
||||
* clients can set any header they like; trusting headers from an open
|
||||
* network is an IP-spoofing vulnerability that defeats rate limiting.
|
||||
*
|
||||
* On Cloudflare the `cf` object on the request is used automatically —
|
||||
* you normally don't need to set this. Leave unset (or empty) to
|
||||
* preserve the default: IP is resolved only when the request came
|
||||
* through Cloudflare's edge.
|
||||
*
|
||||
* Falls back to `EMDASH_TRUSTED_PROXY_HEADERS` env var (comma-separated)
|
||||
* when this option is not set, so operators can configure at deploy
|
||||
* time without touching the Astro config.
|
||||
*/
|
||||
trustedProxyHeaders?: string[];
|
||||
|
||||
/**
|
||||
* Enable playground mode for ephemeral "try EmDash" sites.
|
||||
*
|
||||
* When set, the integration injects a playground middleware (order: "pre")
|
||||
* that runs BEFORE the normal EmDash middleware chain. It creates an
|
||||
* isolated Durable Object database per session, runs migrations, applies
|
||||
* the seed, creates an anonymous admin user, and sets the DB in ALS.
|
||||
* By the time the runtime middleware runs, the database is fully ready.
|
||||
*
|
||||
* Setup and auth middleware are skipped (the playground handles both).
|
||||
*
|
||||
* Requires `@emdash-cms/cloudflare` as a dependency and a DO binding
|
||||
* in wrangler.jsonc.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({
|
||||
* database: playgroundDatabase({ binding: "PLAYGROUND_DB" }),
|
||||
* playground: {
|
||||
* middlewareEntrypoint: "@emdash-cms/cloudflare/db/playground-middleware",
|
||||
* },
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
playground?: {
|
||||
/** Module path for the playground middleware. */
|
||||
middlewareEntrypoint: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Media providers for browsing and uploading media
|
||||
*
|
||||
* The local media provider (using storage adapter) is available by default.
|
||||
* Additional providers can be added for external services like Unsplash,
|
||||
* Cloudinary, Mux, Cloudflare Images, etc.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { cloudflareImages, cloudflareStream } from "@emdash-cms/cloudflare";
|
||||
* import { unsplash } from "@emdash-cms/provider-unsplash";
|
||||
*
|
||||
* emdash({
|
||||
* mediaProviders: [
|
||||
* cloudflareImages({ accountId: "..." }),
|
||||
* cloudflareStream({ accountId: "..." }),
|
||||
* unsplash({ accessKey: "..." }),
|
||||
* ],
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
mediaProviders?: MediaProviderDescriptor[];
|
||||
|
||||
/**
|
||||
* Admin UI font configuration.
|
||||
*
|
||||
* By default, EmDash loads Noto Sans via the Astro Font API, covering
|
||||
* Latin, Latin Extended, Cyrillic, Cyrillic Extended, Greek, Greek
|
||||
* Extended, Devanagari, and Vietnamese. Fonts are downloaded from
|
||||
* Google at build time and self-hosted, so there are no runtime CDN
|
||||
* requests.
|
||||
*
|
||||
* To add support for additional writing systems (Arabic, CJK, etc.),
|
||||
* pass script names. EmDash resolves the matching Noto Sans variant
|
||||
* from Google Fonts and merges all script faces under a single
|
||||
* font-family, so the browser downloads only the glyphs it needs
|
||||
* via unicode-range.
|
||||
*
|
||||
* Set to `false` to disable font injection entirely and use system fonts.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Add Arabic and Japanese support
|
||||
* emdash({
|
||||
* fonts: {
|
||||
* scripts: ["arabic", "japanese"],
|
||||
* },
|
||||
* })
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Disable web fonts entirely (use system fonts)
|
||||
* emdash({
|
||||
* fonts: false,
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
fonts?:
|
||||
| false
|
||||
| {
|
||||
/**
|
||||
* Additional Noto Sans script families to include.
|
||||
*
|
||||
* Available scripts: arabic, armenian, bengali, chinese-simplified,
|
||||
* chinese-traditional, chinese-hongkong, devanagari, ethiopic, farsi,
|
||||
* georgian, gujarati, gurmukhi, hebrew, japanese, kannada, khmer,
|
||||
* korean, lao, malayalam, myanmar, oriya, sinhala, tamil, telugu,
|
||||
* thai, tibetan.
|
||||
*/
|
||||
scripts?: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Admin UI branding (white-labeling).
|
||||
*
|
||||
* Overrides the default EmDash logo and name in the admin panel.
|
||||
* Use this to white-label the CMS for agency or enterprise deployments.
|
||||
* These settings are separate from the public site settings (title, logo,
|
||||
* favicon) which remain available for SEO and front-end use.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* emdash({
|
||||
* admin: {
|
||||
* logo: "/images/agency-logo.webp",
|
||||
* siteName: "AgencyX CMS",
|
||||
* favicon: "/favicon.ico",
|
||||
* },
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
admin?: {
|
||||
/** URL or path to a custom logo image for the admin UI (login page, sidebar). */
|
||||
logo?: string;
|
||||
/** Custom name displayed in the admin sidebar and browser tab. */
|
||||
siteName?: string;
|
||||
/** URL or path to a custom favicon for the admin panel. */
|
||||
favicon?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get stored config from global
|
||||
* This is set by the virtual module at build time
|
||||
*/
|
||||
export function getStoredConfig(): EmDashConfig | null {
|
||||
return globalThis.__emdashConfig || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set stored config in global
|
||||
* Called by the integration at config time
|
||||
*/
|
||||
export function setStoredConfig(config: EmDashConfig): void {
|
||||
globalThis.__emdashConfig = config;
|
||||
}
|
||||
|
||||
// Declare global type
|
||||
declare global {
|
||||
// eslint-disable-next-line no-var
|
||||
var __emdashConfig: EmDashConfig | undefined;
|
||||
}
|
||||
554
packages/core/src/astro/integration/virtual-modules.ts
Normal file
554
packages/core/src/astro/integration/virtual-modules.ts
Normal file
@@ -0,0 +1,554 @@
|
||||
/**
|
||||
* Virtual Module Generators
|
||||
*
|
||||
* Functions that generate virtual module content for Vite.
|
||||
* These modules statically import configured dependencies
|
||||
* so Vite can properly resolve and bundle them.
|
||||
*/
|
||||
|
||||
import { readFileSync } from "node:fs";
|
||||
import { createRequire } from "node:module";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
import type { AuthProviderDescriptor } from "../../auth/types.js";
|
||||
import type { MediaProviderDescriptor } from "../../media/types.js";
|
||||
import { defaultSeed } from "../../seed/default.js";
|
||||
import type { PluginDescriptor } from "./runtime.js";
|
||||
|
||||
const TS_SOURCE_EXT_RE = /^\.(ts|tsx|mts|cts|jsx)$/;
|
||||
|
||||
/** Pattern to remove scoped package prefix from plugin ID */
|
||||
const SCOPED_PREFIX_PATTERN = /^@[^/]+\/plugin-/;
|
||||
|
||||
/** Pattern to remove emdash-plugin- prefix from plugin ID */
|
||||
const EMDASH_PREFIX_PATTERN = /^emdash-plugin-/;
|
||||
|
||||
// Virtual module IDs
|
||||
export const VIRTUAL_CONFIG_ID = "virtual:emdash/config";
|
||||
export const RESOLVED_VIRTUAL_CONFIG_ID = "\0" + VIRTUAL_CONFIG_ID;
|
||||
|
||||
export const VIRTUAL_DIALECT_ID = "virtual:emdash/dialect";
|
||||
export const RESOLVED_VIRTUAL_DIALECT_ID = "\0" + VIRTUAL_DIALECT_ID;
|
||||
|
||||
export const VIRTUAL_STORAGE_ID = "virtual:emdash/storage";
|
||||
export const RESOLVED_VIRTUAL_STORAGE_ID = "\0" + VIRTUAL_STORAGE_ID;
|
||||
|
||||
export const VIRTUAL_ADMIN_REGISTRY_ID = "virtual:emdash/admin-registry";
|
||||
export const RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID = "\0" + VIRTUAL_ADMIN_REGISTRY_ID;
|
||||
|
||||
export const VIRTUAL_PLUGINS_ID = "virtual:emdash/plugins";
|
||||
export const RESOLVED_VIRTUAL_PLUGINS_ID = "\0" + VIRTUAL_PLUGINS_ID;
|
||||
|
||||
export const VIRTUAL_SANDBOX_RUNNER_ID = "virtual:emdash/sandbox-runner";
|
||||
export const RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID = "\0" + VIRTUAL_SANDBOX_RUNNER_ID;
|
||||
|
||||
export const VIRTUAL_SANDBOXED_PLUGINS_ID = "virtual:emdash/sandboxed-plugins";
|
||||
export const RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID = "\0" + VIRTUAL_SANDBOXED_PLUGINS_ID;
|
||||
|
||||
export const VIRTUAL_AUTH_ID = "virtual:emdash/auth";
|
||||
export const RESOLVED_VIRTUAL_AUTH_ID = "\0" + VIRTUAL_AUTH_ID;
|
||||
|
||||
export const VIRTUAL_AUTH_PROVIDERS_ID = "virtual:emdash/auth-providers";
|
||||
export const RESOLVED_VIRTUAL_AUTH_PROVIDERS_ID = "\0" + VIRTUAL_AUTH_PROVIDERS_ID;
|
||||
|
||||
export const VIRTUAL_MEDIA_PROVIDERS_ID = "virtual:emdash/media-providers";
|
||||
export const RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID = "\0" + VIRTUAL_MEDIA_PROVIDERS_ID;
|
||||
|
||||
export const VIRTUAL_BLOCK_COMPONENTS_ID = "virtual:emdash/block-components";
|
||||
export const RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID = "\0" + VIRTUAL_BLOCK_COMPONENTS_ID;
|
||||
|
||||
export const VIRTUAL_SEED_ID = "virtual:emdash/seed";
|
||||
export const RESOLVED_VIRTUAL_SEED_ID = "\0" + VIRTUAL_SEED_ID;
|
||||
|
||||
export const VIRTUAL_WAIT_UNTIL_ID = "virtual:emdash/wait-until";
|
||||
export const RESOLVED_VIRTUAL_WAIT_UNTIL_ID = "\0" + VIRTUAL_WAIT_UNTIL_ID;
|
||||
|
||||
/**
|
||||
* Generates the config virtual module.
|
||||
*/
|
||||
export function generateConfigModule(serializableConfig: Record<string, unknown>): string {
|
||||
return `export default ${JSON.stringify(serializableConfig)};`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the dialect virtual module.
|
||||
*
|
||||
* Adapters that set `supportsRequestScope: true` on their descriptor are
|
||||
* expected to export `createRequestScopedDb` from their runtime entrypoint;
|
||||
* the generator re-exports it so middleware can ask for a per-request Kysely
|
||||
* (used for D1 Sessions API, bookmark cookies, read-replica routing). Other
|
||||
* adapters get a stub that returns null.
|
||||
*/
|
||||
export function generateDialectModule(opts: {
|
||||
entrypoint?: string;
|
||||
type?: string;
|
||||
supportsRequestScope: boolean;
|
||||
}): string {
|
||||
const { entrypoint, supportsRequestScope } = opts;
|
||||
if (!entrypoint) {
|
||||
return [
|
||||
`export const createDialect = undefined;`,
|
||||
`export const dialectType = "sqlite";`,
|
||||
`export const createRequestScopedDb = (_opts) => null;`,
|
||||
].join("\n");
|
||||
}
|
||||
const type = opts.type ?? "sqlite";
|
||||
|
||||
if (supportsRequestScope) {
|
||||
return `
|
||||
import { createDialect as _createDialect } from "${entrypoint}";
|
||||
export { createRequestScopedDb } from "${entrypoint}";
|
||||
export const createDialect = _createDialect;
|
||||
export const dialectType = ${JSON.stringify(type)};
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
import { createDialect as _createDialect } from "${entrypoint}";
|
||||
export const createDialect = _createDialect;
|
||||
export const dialectType = ${JSON.stringify(type)};
|
||||
export const createRequestScopedDb = (_opts) => null;
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the storage virtual module.
|
||||
* Statically imports the configured storage adapter.
|
||||
*/
|
||||
export function generateStorageModule(storageEntrypoint?: string): string {
|
||||
if (!storageEntrypoint) {
|
||||
return `export const createStorage = undefined;`;
|
||||
}
|
||||
return `
|
||||
import { createStorage as _createStorage } from "${storageEntrypoint}";
|
||||
export const createStorage = _createStorage;
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the auth virtual module.
|
||||
* Statically imports the configured auth provider.
|
||||
*/
|
||||
export function generateAuthModule(authEntrypoint?: string): string {
|
||||
if (!authEntrypoint) {
|
||||
return `export const authenticate = undefined;`;
|
||||
}
|
||||
return `
|
||||
import { authenticate as _authenticate } from "${authEntrypoint}";
|
||||
export const authenticate = _authenticate;
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the auth providers module.
|
||||
*
|
||||
* Statically imports each auth provider's `adminEntry` module and exports
|
||||
* a registry keyed by provider ID. The admin UI uses this to render
|
||||
* provider-specific login buttons/forms and setup steps.
|
||||
*
|
||||
* Follows the same pattern as `generateAdminRegistryModule()` for plugins.
|
||||
*/
|
||||
export function generateAuthProvidersModule(descriptors: AuthProviderDescriptor[]): string {
|
||||
const withAdmin = descriptors.filter((d) => d.adminEntry);
|
||||
|
||||
if (withAdmin.length === 0) {
|
||||
return `export const authProviders = {};`;
|
||||
}
|
||||
|
||||
const imports: string[] = [];
|
||||
const entries: string[] = [];
|
||||
|
||||
withAdmin.forEach((descriptor, index) => {
|
||||
const varName = `authProvider${index}`;
|
||||
imports.push(`import * as ${varName} from ${JSON.stringify(descriptor.adminEntry)};`);
|
||||
entries.push(
|
||||
` ${JSON.stringify(descriptor.id)}: { ...${varName}, id: ${JSON.stringify(descriptor.id)}, label: ${JSON.stringify(descriptor.label)} },`,
|
||||
);
|
||||
});
|
||||
|
||||
return `
|
||||
// Auto-generated auth provider registry
|
||||
${imports.join("\n")}
|
||||
|
||||
export const authProviders = {
|
||||
${entries.join("\n")}
|
||||
};
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the plugins module.
|
||||
* Imports and instantiates all plugins at runtime.
|
||||
*
|
||||
* Handles two plugin formats:
|
||||
* - **Native**: imports `createPlugin` and calls it with options
|
||||
* - **Standard**: imports the default export and wraps it with `adaptSandboxEntry`
|
||||
*
|
||||
* The format is determined by `descriptor.format`:
|
||||
* - `"standard"` -- uses adaptSandboxEntry
|
||||
* - `"native"` or undefined -- uses createPlugin
|
||||
*
|
||||
* This is critical for Cloudflare Workers where globals don't persist
|
||||
* between build time and runtime.
|
||||
*/
|
||||
export function generatePluginsModule(descriptors: PluginDescriptor[]): string {
|
||||
if (descriptors.length === 0) {
|
||||
return `export const plugins = [];`;
|
||||
}
|
||||
|
||||
const imports: string[] = [];
|
||||
const instantiations: string[] = [];
|
||||
|
||||
// Track whether we need the adapter import
|
||||
let needsAdapter = false;
|
||||
|
||||
descriptors.forEach((descriptor, index) => {
|
||||
if (descriptor.format === "standard") {
|
||||
// Standard format: import default export, wrap with adaptSandboxEntry
|
||||
needsAdapter = true;
|
||||
const varName = `pluginDef${index}`;
|
||||
imports.push(`import ${varName} from "${descriptor.entrypoint}";`);
|
||||
instantiations.push(
|
||||
`adaptSandboxEntry(${varName}, ${JSON.stringify({
|
||||
id: descriptor.id,
|
||||
version: descriptor.version,
|
||||
capabilities: descriptor.capabilities,
|
||||
allowedHosts: descriptor.allowedHosts,
|
||||
storage: descriptor.storage,
|
||||
adminPages: descriptor.adminPages,
|
||||
adminWidgets: descriptor.adminWidgets,
|
||||
})})`,
|
||||
);
|
||||
} else {
|
||||
// Native format: import createPlugin and call with options
|
||||
const varName = `createPlugin${index}`;
|
||||
imports.push(`import { createPlugin as ${varName} } from "${descriptor.entrypoint}";`);
|
||||
instantiations.push(`${varName}(${JSON.stringify(descriptor.options ?? {})})`);
|
||||
}
|
||||
});
|
||||
|
||||
const adapterImport = needsAdapter
|
||||
? `import { adaptSandboxEntry } from "emdash/plugins/adapt-sandbox-entry";\n`
|
||||
: "";
|
||||
|
||||
return `
|
||||
// Auto-generated plugins module
|
||||
// Imports and instantiates all configured plugins at runtime
|
||||
|
||||
${adapterImport}${imports.join("\n")}
|
||||
|
||||
/** Resolved plugins array */
|
||||
export const plugins = [
|
||||
${instantiations.join(",\n ")}
|
||||
];
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the admin registry module.
|
||||
* Uses adminEntry from plugin descriptors to statically import admin modules.
|
||||
*/
|
||||
export function generateAdminRegistryModule(descriptors: PluginDescriptor[]): string {
|
||||
// Filter to descriptors with admin entries
|
||||
const adminDescriptors = descriptors.filter((d) => d.adminEntry);
|
||||
|
||||
if (adminDescriptors.length === 0) {
|
||||
return `export const pluginAdmins = {};`;
|
||||
}
|
||||
|
||||
const imports: string[] = [];
|
||||
const entries: string[] = [];
|
||||
|
||||
adminDescriptors.forEach((descriptor, index) => {
|
||||
const varName = `admin${index}`;
|
||||
// Use explicit ID from descriptor if available, otherwise derive from entrypoint
|
||||
const pluginId =
|
||||
descriptor.id ??
|
||||
descriptor.entrypoint.replace(SCOPED_PREFIX_PATTERN, "").replace(EMDASH_PREFIX_PATTERN, "");
|
||||
|
||||
imports.push(`import * as ${varName} from "${descriptor.adminEntry}";`);
|
||||
entries.push(` "${pluginId}": ${varName},`);
|
||||
});
|
||||
|
||||
return `
|
||||
// Auto-generated plugin admin registry
|
||||
${imports.join("\n")}
|
||||
|
||||
export const pluginAdmins = {
|
||||
${entries.join("\n")}
|
||||
};
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the sandbox runner module.
|
||||
* Imports the configured sandbox runner factory or provides a noop default.
|
||||
*/
|
||||
export function generateSandboxRunnerModule(sandboxRunner?: string): string {
|
||||
if (!sandboxRunner) {
|
||||
// No sandbox runner configured - use noop
|
||||
return `
|
||||
// No sandbox runner configured - sandboxed plugins disabled
|
||||
import { createNoopSandboxRunner } from "emdash";
|
||||
|
||||
export const createSandboxRunner = createNoopSandboxRunner;
|
||||
export const sandboxEnabled = false;
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
// Auto-generated sandbox runner module
|
||||
import { createSandboxRunner as _createSandboxRunner } from "${sandboxRunner}";
|
||||
|
||||
export const createSandboxRunner = _createSandboxRunner;
|
||||
export const sandboxEnabled = true;
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the media providers module.
|
||||
* Imports and instantiates configured media providers at runtime.
|
||||
*/
|
||||
export function generateMediaProvidersModule(descriptors: MediaProviderDescriptor[]): string {
|
||||
// Always include local provider by default unless explicitly disabled
|
||||
const localDisabled = descriptors.some((d) => d.id === "local" && d.config.enabled === false);
|
||||
|
||||
const imports: string[] = [];
|
||||
const entries: string[] = [];
|
||||
|
||||
// Add local provider first if not disabled
|
||||
if (!localDisabled) {
|
||||
imports.push(
|
||||
`import { createMediaProvider as createLocalProvider } from "emdash/media/local-runtime";`,
|
||||
);
|
||||
entries.push(`{
|
||||
id: "local",
|
||||
name: "Library",
|
||||
icon: "folder",
|
||||
capabilities: { browse: true, search: false, upload: true, delete: true },
|
||||
createProvider: (ctx) => createLocalProvider({ ...ctx, enabled: true }),
|
||||
}`);
|
||||
}
|
||||
|
||||
// Add custom providers
|
||||
descriptors
|
||||
.filter((d) => d.id !== "local" || d.config.enabled !== false)
|
||||
.filter((d) => d.id !== "local") // Skip local if we already added it
|
||||
.forEach((descriptor, index) => {
|
||||
const varName = `createProvider${index}`;
|
||||
imports.push(`import { createMediaProvider as ${varName} } from "${descriptor.entrypoint}";`);
|
||||
entries.push(`{
|
||||
id: ${JSON.stringify(descriptor.id)},
|
||||
name: ${JSON.stringify(descriptor.name)},
|
||||
icon: ${JSON.stringify(descriptor.icon)},
|
||||
capabilities: ${JSON.stringify(descriptor.capabilities)},
|
||||
createProvider: (ctx) => ${varName}({ ...${JSON.stringify(descriptor.config)}, ...ctx }),
|
||||
}`);
|
||||
});
|
||||
|
||||
return `
|
||||
// Auto-generated media providers module
|
||||
${imports.join("\n")}
|
||||
|
||||
/** Media provider descriptors with factory functions */
|
||||
export const mediaProviders = [
|
||||
${entries.join(",\n ")}
|
||||
];
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the block components module.
|
||||
* Collects and merges `blockComponents` exports from plugin component entries.
|
||||
*/
|
||||
export function generateBlockComponentsModule(descriptors: PluginDescriptor[]): string {
|
||||
const withComponents = descriptors.filter((d) => d.componentsEntry);
|
||||
if (withComponents.length === 0) {
|
||||
return `export const pluginBlockComponents = {};`;
|
||||
}
|
||||
|
||||
const imports: string[] = [];
|
||||
const spreads: string[] = [];
|
||||
withComponents.forEach((d, i) => {
|
||||
imports.push(`import { blockComponents as bc${i} } from "${d.componentsEntry}";`);
|
||||
spreads.push(`...bc${i}`);
|
||||
});
|
||||
|
||||
return `${imports.join("\n")}\nexport const pluginBlockComponents = { ${spreads.join(", ")} };`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the wait-until virtual module.
|
||||
*
|
||||
* Under @astrojs/cloudflare, re-exports `waitUntil` from `cloudflare:workers`
|
||||
* so `after(fn)` in core can extend the worker's lifetime past the response
|
||||
* for deferred bookkeeping. For any other adapter, exports `undefined` —
|
||||
* Node's long-lived event loop keeps deferred promises running without a
|
||||
* lifetime extender.
|
||||
*
|
||||
* Keeping the adapter check here — rather than in core — means core itself
|
||||
* has no Cloudflare-specific imports or code paths.
|
||||
*/
|
||||
export function generateWaitUntilModule(adapterName: string | undefined): string {
|
||||
if (adapterName === "@astrojs/cloudflare") {
|
||||
return `export { waitUntil } from "cloudflare:workers";`;
|
||||
}
|
||||
return `export const waitUntil = undefined;`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the seed virtual module.
|
||||
* Reads the user's seed file at build time (in Node context) and embeds it,
|
||||
* so the runtime doesn't need filesystem access (required for workerd).
|
||||
*
|
||||
* Search order:
|
||||
* 1. `.emdash/seed.json`
|
||||
* 2. `package.json` → `emdash.seed` reference
|
||||
* 3. `seed/seed.json` (conventional template path)
|
||||
*
|
||||
* Exports `userSeed` (user's seed or null) and `seed` (user's seed or default).
|
||||
*
|
||||
* When no user seed is found, falls back to the built-in default seed and
|
||||
* (if `warnOnFallback` is true) logs a warning so misconfiguration is visible
|
||||
* during `astro dev`. Build/preview/sync stay silent so sites that
|
||||
* intentionally use the default seed (e.g. the blank template) don't
|
||||
* generate noisy logs.
|
||||
*/
|
||||
export function generateSeedModule(projectRoot: string, warnOnFallback = false): string {
|
||||
let userSeedJson: string | null = null;
|
||||
|
||||
// Try .emdash/seed.json
|
||||
try {
|
||||
const seedPath = resolve(projectRoot, ".emdash", "seed.json");
|
||||
const content = readFileSync(seedPath, "utf-8");
|
||||
JSON.parse(content); // validate
|
||||
userSeedJson = content;
|
||||
} catch {
|
||||
// Not found, try next
|
||||
}
|
||||
|
||||
// Try package.json → emdash.seed reference
|
||||
if (!userSeedJson) {
|
||||
try {
|
||||
const pkgPath = resolve(projectRoot, "package.json");
|
||||
const pkgContent = readFileSync(pkgPath, "utf-8");
|
||||
const pkg: { emdash?: { seed?: string } } = JSON.parse(pkgContent);
|
||||
|
||||
if (pkg.emdash?.seed) {
|
||||
const seedPath = resolve(projectRoot, pkg.emdash.seed);
|
||||
const content = readFileSync(seedPath, "utf-8");
|
||||
JSON.parse(content); // validate
|
||||
userSeedJson = content;
|
||||
}
|
||||
} catch {
|
||||
// Not found
|
||||
}
|
||||
}
|
||||
|
||||
// Try conventional seed/seed.json fallback
|
||||
if (!userSeedJson) {
|
||||
try {
|
||||
const seedPath = resolve(projectRoot, "seed", "seed.json");
|
||||
const content = readFileSync(seedPath, "utf-8");
|
||||
JSON.parse(content); // validate
|
||||
userSeedJson = content;
|
||||
} catch {
|
||||
// Not found
|
||||
}
|
||||
}
|
||||
|
||||
if (userSeedJson) {
|
||||
return [`export const userSeed = ${userSeedJson};`, `export const seed = userSeed;`].join("\n");
|
||||
}
|
||||
|
||||
// No user seed — inline the default. Caller (the Vite plugin) gates this
|
||||
// to dev-only so production builds stay quiet for sites that intentionally
|
||||
// rely on the default seed.
|
||||
if (warnOnFallback) {
|
||||
console.warn(
|
||||
"[emdash] No user seed found at .emdash/seed.json, package.json#emdash.seed, or seed/seed.json. Falling back to the built-in default seed; the setup wizard will not offer demo content for this site.",
|
||||
);
|
||||
}
|
||||
return [
|
||||
`export const userSeed = null;`,
|
||||
`export const seed = ${JSON.stringify(defaultSeed)};`,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a module specifier from the project's context.
|
||||
* Uses Node.js require.resolve with the project root as base.
|
||||
*/
|
||||
function resolveModulePathFromProject(specifier: string, projectRoot: string): string {
|
||||
// Create require from the project's package.json location
|
||||
const projectPackageJson = resolve(projectRoot, "package.json");
|
||||
const require = createRequire(projectPackageJson);
|
||||
return require.resolve(specifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the sandboxed plugins module.
|
||||
* Resolves plugin entrypoints to files, reads them, and embeds the code.
|
||||
*
|
||||
* At runtime, middleware uses SandboxRunner to load these into isolates.
|
||||
*/
|
||||
export function generateSandboxedPluginsModule(
|
||||
sandboxed: PluginDescriptor[],
|
||||
projectRoot: string,
|
||||
): string {
|
||||
if (sandboxed.length === 0) {
|
||||
return `
|
||||
// No sandboxed plugins configured
|
||||
export const sandboxedPlugins = [];
|
||||
`;
|
||||
}
|
||||
|
||||
const pluginEntries: string[] = [];
|
||||
|
||||
for (const descriptor of sandboxed) {
|
||||
const bundleSpecifier = descriptor.entrypoint;
|
||||
|
||||
// Resolve the bundle to a file path using project's require context
|
||||
const filePath = resolveModulePathFromProject(bundleSpecifier, projectRoot);
|
||||
|
||||
const ext = filePath.slice(filePath.lastIndexOf("."));
|
||||
if (TS_SOURCE_EXT_RE.test(ext)) {
|
||||
throw new Error(
|
||||
`Sandboxed plugin "${descriptor.id}" entrypoint "${bundleSpecifier}" resolves to ` +
|
||||
`unbuilt source (${filePath}). Sandbox entries must be pre-built JavaScript. ` +
|
||||
`Ensure the plugin's package.json exports point to built files (e.g. dist/*.mjs) ` +
|
||||
`and run the plugin's build step before building the site.`,
|
||||
);
|
||||
}
|
||||
|
||||
const code = readFileSync(filePath, "utf-8");
|
||||
|
||||
// Create the plugin entry with embedded code and sandbox config
|
||||
pluginEntries.push(`{
|
||||
id: ${JSON.stringify(descriptor.id)},
|
||||
version: ${JSON.stringify(descriptor.version)},
|
||||
options: ${JSON.stringify(descriptor.options ?? {})},
|
||||
capabilities: ${JSON.stringify(descriptor.capabilities ?? [])},
|
||||
allowedHosts: ${JSON.stringify(descriptor.allowedHosts ?? [])},
|
||||
storage: ${JSON.stringify(descriptor.storage ?? {})},
|
||||
adminPages: ${JSON.stringify(descriptor.adminPages ?? [])},
|
||||
adminWidgets: ${JSON.stringify(descriptor.adminWidgets ?? [])},
|
||||
adminEntry: ${JSON.stringify(descriptor.adminEntry)},
|
||||
// Code read from: ${filePath}
|
||||
code: ${JSON.stringify(code)},
|
||||
}`);
|
||||
}
|
||||
|
||||
return `
|
||||
// Auto-generated sandboxed plugins module
|
||||
// Plugin code is embedded at build time
|
||||
|
||||
/**
|
||||
* Sandboxed plugin entries with embedded code.
|
||||
* Loaded at runtime via SandboxRunner.
|
||||
*/
|
||||
export const sandboxedPlugins = [
|
||||
${pluginEntries.join(",\n ")}
|
||||
];
|
||||
`;
|
||||
}
|
||||
435
packages/core/src/astro/integration/vite-config.ts
Normal file
435
packages/core/src/astro/integration/vite-config.ts
Normal file
@@ -0,0 +1,435 @@
|
||||
/**
|
||||
* Vite Plugin Configuration
|
||||
*
|
||||
* Defines the Vite plugin that handles virtual modules and other
|
||||
* Vite-specific configuration for EmDash.
|
||||
*/
|
||||
|
||||
import { existsSync } from "node:fs";
|
||||
import { createRequire } from "node:module";
|
||||
import { dirname, isAbsolute, relative, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import type { AstroConfig } from "astro";
|
||||
import type { Plugin } from "vite";
|
||||
|
||||
import { COMMIT, VERSION } from "../../version.js";
|
||||
import type { EmDashConfig, PluginDescriptor } from "./runtime.js";
|
||||
import {
|
||||
VIRTUAL_CONFIG_ID,
|
||||
RESOLVED_VIRTUAL_CONFIG_ID,
|
||||
VIRTUAL_DIALECT_ID,
|
||||
RESOLVED_VIRTUAL_DIALECT_ID,
|
||||
VIRTUAL_STORAGE_ID,
|
||||
RESOLVED_VIRTUAL_STORAGE_ID,
|
||||
VIRTUAL_ADMIN_REGISTRY_ID,
|
||||
RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID,
|
||||
VIRTUAL_PLUGINS_ID,
|
||||
RESOLVED_VIRTUAL_PLUGINS_ID,
|
||||
VIRTUAL_SANDBOX_RUNNER_ID,
|
||||
RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID,
|
||||
VIRTUAL_SANDBOXED_PLUGINS_ID,
|
||||
RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID,
|
||||
VIRTUAL_AUTH_ID,
|
||||
RESOLVED_VIRTUAL_AUTH_ID,
|
||||
VIRTUAL_AUTH_PROVIDERS_ID,
|
||||
RESOLVED_VIRTUAL_AUTH_PROVIDERS_ID,
|
||||
VIRTUAL_MEDIA_PROVIDERS_ID,
|
||||
RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID,
|
||||
VIRTUAL_BLOCK_COMPONENTS_ID,
|
||||
RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID,
|
||||
VIRTUAL_SEED_ID,
|
||||
RESOLVED_VIRTUAL_SEED_ID,
|
||||
VIRTUAL_WAIT_UNTIL_ID,
|
||||
RESOLVED_VIRTUAL_WAIT_UNTIL_ID,
|
||||
generateSeedModule,
|
||||
generateWaitUntilModule,
|
||||
generateConfigModule,
|
||||
generateDialectModule,
|
||||
generateStorageModule,
|
||||
generateAuthModule,
|
||||
generateAuthProvidersModule,
|
||||
generatePluginsModule,
|
||||
generateAdminRegistryModule,
|
||||
generateSandboxRunnerModule,
|
||||
generateSandboxedPluginsModule,
|
||||
generateMediaProvidersModule,
|
||||
generateBlockComponentsModule,
|
||||
} from "./virtual-modules.js";
|
||||
|
||||
const LOCALE_MESSAGES_RE = /[/\\]([a-z]{2}(?:-[A-Z]{2})?)[/\\]messages\.mjs$/;
|
||||
/**
|
||||
* Vite plugin that compiles Lingui macros in admin source files.
|
||||
* Only active in dev mode when the admin package is aliased to source for HMR.
|
||||
* @babel/core is dynamically imported from admin's devDependencies —
|
||||
* not declared by core, never ships to end users.
|
||||
*/
|
||||
function linguiMacroPlugin(adminSourcePath: string, adminDistPath: string): Plugin {
|
||||
// Resolve @babel/core from admin's devDependencies, not core's.
|
||||
const adminRequire = createRequire(resolve(adminDistPath, "index.js"));
|
||||
const babelCorePath = adminRequire.resolve("@babel/core");
|
||||
|
||||
return {
|
||||
name: "emdash-lingui-macro",
|
||||
enforce: "pre",
|
||||
resolveId(id, importer) {
|
||||
// Redirect relative locale catalog imports (e.g. ./de/messages.mjs) from
|
||||
// within admin source to the compiled dist/locales/ directory, since
|
||||
// lingui compile only runs during build — not in dev watch mode.
|
||||
if (!importer?.startsWith(adminSourcePath)) return;
|
||||
const match = id.match(LOCALE_MESSAGES_RE);
|
||||
if (match?.[1]) {
|
||||
return resolve(adminDistPath, "locales", match[1], "messages.mjs");
|
||||
}
|
||||
},
|
||||
async transform(code, id) {
|
||||
if (!id.startsWith(adminSourcePath) || !code.includes("@lingui")) return;
|
||||
const { transformAsync } = (await import(babelCorePath)) as typeof import("@babel/core");
|
||||
const result = await transformAsync(code, {
|
||||
filename: id,
|
||||
plugins: ["@lingui/babel-plugin-lingui-macro"],
|
||||
parserOpts: { plugins: ["jsx", "typescript"] },
|
||||
});
|
||||
if (!result?.code) return;
|
||||
return { code: result.code, map: result.map ?? undefined };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve path to the admin package dist directory.
|
||||
* Used for Vite alias to ensure the package is found in pnpm's isolated node_modules.
|
||||
*/
|
||||
function resolveAdminDist(): string {
|
||||
const require = createRequire(import.meta.url);
|
||||
const adminPath = require.resolve("@emdash-cms/admin");
|
||||
// Return the directory containing the built package (dist/)
|
||||
return dirname(adminPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether child is inside parent without relying on simple prefix checks.
|
||||
*/
|
||||
function isInside(parent: string, child: string): boolean {
|
||||
const relativePath = relative(parent, child);
|
||||
return relativePath === "" || (!relativePath.startsWith("..") && !isAbsolute(relativePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve path to the admin package source directory.
|
||||
* In dev mode inside this repo, we alias @emdash-cms/admin to the source so
|
||||
* Vite processes it directly — giving instant HMR instead of requiring a
|
||||
* rebuild + restart. External apps should use the built package surface.
|
||||
*/
|
||||
function resolveAdminSource(projectRoot: string): string | undefined {
|
||||
const require = createRequire(import.meta.url);
|
||||
const adminPath = require.resolve("@emdash-cms/admin");
|
||||
// dist/index.js -> go up to package root, then into src/
|
||||
const packageRoot = resolve(dirname(adminPath), "..");
|
||||
const repoRoot = resolve(packageRoot, "..", "..");
|
||||
const srcEntry = resolve(packageRoot, "src", "index.ts");
|
||||
|
||||
try {
|
||||
if (existsSync(srcEntry) && isInside(repoRoot, projectRoot)) {
|
||||
return resolve(packageRoot, "src");
|
||||
}
|
||||
} catch {
|
||||
// Not in local repo — fall back to dist
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export interface VitePluginOptions {
|
||||
/** Serializable config (database, storage, auth descriptors) */
|
||||
serializableConfig: Record<string, unknown>;
|
||||
/** Resolved EmDash config */
|
||||
resolvedConfig: EmDashConfig;
|
||||
/** Plugin descriptors */
|
||||
pluginDescriptors: PluginDescriptor[];
|
||||
/** Astro config */
|
||||
astroConfig: AstroConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the EmDash virtual modules Vite plugin.
|
||||
*/
|
||||
export function createVirtualModulesPlugin(options: VitePluginOptions): Plugin {
|
||||
const { serializableConfig, resolvedConfig, pluginDescriptors, astroConfig } = options;
|
||||
|
||||
let viteCommand: "build" | "serve" | undefined;
|
||||
|
||||
return {
|
||||
name: "emdash-virtual-modules",
|
||||
configResolved(config) {
|
||||
viteCommand = config.command;
|
||||
},
|
||||
resolveId(id: string) {
|
||||
if (id === VIRTUAL_CONFIG_ID) {
|
||||
return RESOLVED_VIRTUAL_CONFIG_ID;
|
||||
}
|
||||
if (id === VIRTUAL_DIALECT_ID) {
|
||||
return RESOLVED_VIRTUAL_DIALECT_ID;
|
||||
}
|
||||
if (id === VIRTUAL_STORAGE_ID) {
|
||||
return RESOLVED_VIRTUAL_STORAGE_ID;
|
||||
}
|
||||
if (id === VIRTUAL_ADMIN_REGISTRY_ID) {
|
||||
return RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID;
|
||||
}
|
||||
if (id === VIRTUAL_PLUGINS_ID) {
|
||||
return RESOLVED_VIRTUAL_PLUGINS_ID;
|
||||
}
|
||||
if (id === VIRTUAL_SANDBOX_RUNNER_ID) {
|
||||
return RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID;
|
||||
}
|
||||
if (id === VIRTUAL_SANDBOXED_PLUGINS_ID) {
|
||||
return RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID;
|
||||
}
|
||||
if (id === VIRTUAL_AUTH_ID) {
|
||||
return RESOLVED_VIRTUAL_AUTH_ID;
|
||||
}
|
||||
if (id === VIRTUAL_AUTH_PROVIDERS_ID) {
|
||||
return RESOLVED_VIRTUAL_AUTH_PROVIDERS_ID;
|
||||
}
|
||||
if (id === VIRTUAL_MEDIA_PROVIDERS_ID) {
|
||||
return RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID;
|
||||
}
|
||||
if (id === VIRTUAL_BLOCK_COMPONENTS_ID) {
|
||||
return RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID;
|
||||
}
|
||||
if (id === VIRTUAL_SEED_ID) {
|
||||
return RESOLVED_VIRTUAL_SEED_ID;
|
||||
}
|
||||
if (id === VIRTUAL_WAIT_UNTIL_ID) {
|
||||
return RESOLVED_VIRTUAL_WAIT_UNTIL_ID;
|
||||
}
|
||||
},
|
||||
load(id: string) {
|
||||
if (id === RESOLVED_VIRTUAL_CONFIG_ID) {
|
||||
return generateConfigModule(serializableConfig);
|
||||
}
|
||||
// Generate a module that statically imports the configured dialect
|
||||
// This allows Vite to properly resolve and bundle it
|
||||
if (id === RESOLVED_VIRTUAL_DIALECT_ID) {
|
||||
return generateDialectModule({
|
||||
entrypoint: resolvedConfig.database?.entrypoint,
|
||||
type: resolvedConfig.database?.type,
|
||||
supportsRequestScope: resolvedConfig.database?.supportsRequestScope ?? false,
|
||||
});
|
||||
}
|
||||
// Generate a module that statically imports the configured storage
|
||||
if (id === RESOLVED_VIRTUAL_STORAGE_ID) {
|
||||
return generateStorageModule(resolvedConfig.storage?.entrypoint);
|
||||
}
|
||||
// Generate plugins module that imports and instantiates all plugins
|
||||
if (id === RESOLVED_VIRTUAL_PLUGINS_ID) {
|
||||
return generatePluginsModule(pluginDescriptors);
|
||||
}
|
||||
// Generate admin registry module with plugin components
|
||||
if (id === RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID) {
|
||||
// Include both trusted and sandboxed plugins
|
||||
const allDescriptors = [...pluginDescriptors, ...(resolvedConfig.sandboxed ?? [])];
|
||||
return generateAdminRegistryModule(allDescriptors);
|
||||
}
|
||||
// Generate sandbox runner module
|
||||
if (id === RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID) {
|
||||
return generateSandboxRunnerModule(resolvedConfig.sandboxRunner);
|
||||
}
|
||||
// Generate sandboxed plugins config module
|
||||
if (id === RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID) {
|
||||
// Pass project root for proper module resolution
|
||||
const projectRoot = fileURLToPath(astroConfig.root);
|
||||
return generateSandboxedPluginsModule(resolvedConfig.sandboxed ?? [], projectRoot);
|
||||
}
|
||||
// Generate auth module that statically imports the configured auth provider
|
||||
if (id === RESOLVED_VIRTUAL_AUTH_ID) {
|
||||
const authDescriptor = resolvedConfig.auth;
|
||||
if (!authDescriptor || !("entrypoint" in authDescriptor)) {
|
||||
return generateAuthModule(undefined);
|
||||
}
|
||||
return generateAuthModule(authDescriptor.entrypoint);
|
||||
}
|
||||
// Generate auth providers module (pluggable login methods)
|
||||
if (id === RESOLVED_VIRTUAL_AUTH_PROVIDERS_ID) {
|
||||
return generateAuthProvidersModule(resolvedConfig.authProviders ?? []);
|
||||
}
|
||||
// Generate media providers module
|
||||
if (id === RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID) {
|
||||
return generateMediaProvidersModule(resolvedConfig.mediaProviders ?? []);
|
||||
}
|
||||
// Generate block components module (plugin rendering components for PortableText)
|
||||
if (id === RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID) {
|
||||
return generateBlockComponentsModule(pluginDescriptors);
|
||||
}
|
||||
// Generate seed module — embeds user seed or default at build time
|
||||
if (id === RESOLVED_VIRTUAL_SEED_ID) {
|
||||
const projectRoot = fileURLToPath(astroConfig.root);
|
||||
return generateSeedModule(projectRoot, viteCommand === "serve");
|
||||
}
|
||||
// Generate wait-until module — re-exports cloudflare:workers'
|
||||
// waitUntil under the Cloudflare adapter, undefined otherwise.
|
||||
if (id === RESOLVED_VIRTUAL_WAIT_UNTIL_ID) {
|
||||
return generateWaitUntilModule(astroConfig.adapter?.name);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Modules that contain native Node.js addons or Node-only code.
|
||||
* These must be external in SSR to avoid bundling failures on Node.
|
||||
* On Cloudflare, the adapter handles its own externalization — setting
|
||||
* ssr.external there conflicts with @cloudflare/vite-plugin's validation.
|
||||
*/
|
||||
const NODE_NATIVE_EXTERNALS = [
|
||||
"better-sqlite3",
|
||||
"bindings",
|
||||
"file-uri-to-path",
|
||||
"@libsql/kysely-libsql",
|
||||
"pg",
|
||||
];
|
||||
|
||||
/**
|
||||
* Detect whether the Cloudflare adapter is being used.
|
||||
*/
|
||||
function isCloudflareAdapter(astroConfig: AstroConfig): boolean {
|
||||
return astroConfig.adapter?.name === "@astrojs/cloudflare";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the Vite config update for EmDash.
|
||||
*/
|
||||
export function createViteConfig(
|
||||
options: VitePluginOptions,
|
||||
command: "dev" | "build" | "preview" | "sync",
|
||||
): NonNullable<AstroConfig["vite"]> {
|
||||
const adminDistPath = resolveAdminDist();
|
||||
const cloudflare = isCloudflareAdapter(options.astroConfig);
|
||||
const isDev = command === "dev";
|
||||
const projectRoot = fileURLToPath(options.astroConfig.root);
|
||||
|
||||
const adminSourcePath = isDev ? resolveAdminSource(projectRoot) : undefined;
|
||||
const useSource = adminSourcePath !== undefined;
|
||||
|
||||
return {
|
||||
// Astro SSR routes resolve version.ts from source (not tsdown dist),
|
||||
// so Vite needs its own define pass for the __EMDASH_*__ placeholders.
|
||||
define: {
|
||||
__EMDASH_VERSION__: JSON.stringify(VERSION),
|
||||
__EMDASH_COMMIT__: JSON.stringify(COMMIT),
|
||||
__EMDASH_PSEUDO_LOCALE__: JSON.stringify(
|
||||
isDev && process.env["EMDASH_PSEUDO_LOCALE"] === "1",
|
||||
),
|
||||
},
|
||||
resolve: {
|
||||
dedupe: ["@emdash-cms/admin", "react", "react-dom"],
|
||||
// Array form so more-specific entries are checked first.
|
||||
// The styles.css alias must come before the package alias, otherwise
|
||||
// Vite's prefix matching on "@emdash-cms/admin" would resolve
|
||||
// "@emdash-cms/admin/styles.css" through the source directory.
|
||||
alias: [
|
||||
{ find: "@emdash-cms/admin/styles.css", replacement: resolve(adminDistPath, "styles.css") },
|
||||
{ find: "@emdash-cms/admin", replacement: useSource ? adminSourcePath : adminDistPath },
|
||||
// `use-sync-external-store/shim` is a React <18 polyfill that ships
|
||||
// only as CJS. It's pulled in transitively by `@tiptap/react`. With
|
||||
// pnpm's virtual store the file lives under .pnpm/, where Vite's
|
||||
// dep scanner can't reach it for pre-bundling — so the browser is
|
||||
// served raw `module.exports` and hydration fails with
|
||||
// `SyntaxError: ... does not provide an export named
|
||||
// 'useSyncExternalStore'`. Redirect both shim entry points to the
|
||||
// main `use-sync-external-store` package, which on React >=18
|
||||
// (our peer-dep floor) delegates to React's built-in hook.
|
||||
{
|
||||
find: "use-sync-external-store/shim/index.js",
|
||||
replacement: "use-sync-external-store",
|
||||
},
|
||||
{ find: "use-sync-external-store/shim", replacement: "use-sync-external-store" },
|
||||
],
|
||||
},
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Monorepo has both vite 6 (docs) and vite 7 (core). tsgo resolves correctly.
|
||||
plugins: [
|
||||
createVirtualModulesPlugin(options),
|
||||
// In dev mode with source alias, compile Lingui macros on the fly
|
||||
// and redirect locale .mjs imports to dist/.
|
||||
// In production, macros are pre-compiled by tsdown in the admin package.
|
||||
...(useSource ? [linguiMacroPlugin(adminSourcePath, adminDistPath)] : []),
|
||||
] as NonNullable<AstroConfig["vite"]>["plugins"],
|
||||
// Handle native modules for SSR.
|
||||
// On Node: external keeps native addons out of the SSR bundle.
|
||||
// On Cloudflare: skip — the adapter handles externalization, and setting
|
||||
// ssr.external conflicts with @cloudflare/vite-plugin's resolve.external validation.
|
||||
ssr: cloudflare
|
||||
? {
|
||||
noExternal: ["emdash", "@emdash-cms/admin"],
|
||||
// Pre-bundle EmDash's runtime deps for workerd. Without this,
|
||||
// Vite discovers them one-by-one on first request, causing workerd
|
||||
// to enter "worker cancelled" state on cold cache.
|
||||
optimizeDeps: {
|
||||
// Exclude EmDash virtual modules from esbuild's dependency
|
||||
// scan. These are resolved by the Vite plugin at transform time,
|
||||
// but esbuild encounters them when crawling emdash's dist files
|
||||
// during pre-bundling and can't resolve them. Vite's exclude
|
||||
// uses prefix matching (id.startsWith(m + "/")), so
|
||||
// "virtual:emdash" matches all "virtual:emdash/*" imports.
|
||||
exclude: ["virtual:emdash"],
|
||||
include: [
|
||||
// EmDash direct deps
|
||||
"emdash > @portabletext/toolkit",
|
||||
"emdash > @unpic/placeholder",
|
||||
"emdash > blurhash",
|
||||
"emdash > croner",
|
||||
"emdash > image-size",
|
||||
"emdash > jose",
|
||||
"emdash > jpeg-js",
|
||||
"emdash > kysely",
|
||||
"emdash > mime/lite",
|
||||
"emdash > modern-tar",
|
||||
"emdash > sanitize-html",
|
||||
"emdash > ulidx",
|
||||
"emdash > upng-js",
|
||||
"emdash > astro-portabletext",
|
||||
"emdash > sax",
|
||||
// Deeper transitive deps
|
||||
"emdash > sanitize-html > parse5",
|
||||
"emdash > @emdash-cms/gutenberg-to-portable-text > @wordpress/block-serialization-default-parser",
|
||||
"emdash > @emdash-cms/auth > @oslojs/crypto/ecdsa",
|
||||
"emdash > @emdash-cms/auth > @oslojs/crypto/sha2",
|
||||
"emdash > @emdash-cms/auth > @oslojs/webauthn",
|
||||
// MCP SDK — server/index.js statically imports ajv (CJS-only).
|
||||
// Pre-bundling converts CJS to ESM so workerd can load it.
|
||||
"emdash > @modelcontextprotocol/sdk > ajv",
|
||||
"emdash > @modelcontextprotocol/sdk > ajv-formats",
|
||||
// React (commonly used, may be hoisted)
|
||||
"react",
|
||||
"react/jsx-dev-runtime",
|
||||
"react/jsx-runtime",
|
||||
"react-dom",
|
||||
"react-dom/server",
|
||||
// Top-level deps (use astro > path for pnpm compat)
|
||||
"astro > zod/v4",
|
||||
"astro > zod/v4/core",
|
||||
"@emdash-cms/cloudflare > kysely-d1",
|
||||
// Astro internal deps not covered by @astrojs/cloudflare adapter
|
||||
"astro/virtual-modules/middleware.js",
|
||||
"astro/virtual-modules/live-config",
|
||||
"astro/content/runtime",
|
||||
"astro/assets/utils/inferRemoteSize.js",
|
||||
"astro/assets/fonts/runtime.js",
|
||||
"@astrojs/cloudflare/image-service",
|
||||
],
|
||||
},
|
||||
}
|
||||
: {
|
||||
external: NODE_NATIVE_EXTERNALS,
|
||||
noExternal: ["emdash", "@emdash-cms/admin"],
|
||||
},
|
||||
optimizeDeps: {
|
||||
// When using source, don't pre-bundle JS — let Vite transform on the fly for HMR.
|
||||
// When using dist, pre-bundle to avoid re-optimization on first hydration.
|
||||
include: useSource
|
||||
? ["@astrojs/react/client.js"]
|
||||
: ["@emdash-cms/admin", "@astrojs/react/client.js"],
|
||||
exclude: cloudflare ? ["virtual:emdash"] : [...NODE_NATIVE_EXTERNALS, "virtual:emdash"],
|
||||
},
|
||||
};
|
||||
}
|
||||
561
packages/core/src/astro/middleware.ts
Normal file
561
packages/core/src/astro/middleware.ts
Normal file
@@ -0,0 +1,561 @@
|
||||
/**
|
||||
* EmDash middleware
|
||||
*
|
||||
* Thin wrapper that initializes EmDashRuntime and attaches it to locals.
|
||||
* All heavy lifting happens in EmDashRuntime.
|
||||
*/
|
||||
|
||||
import { defineMiddleware } from "astro:middleware";
|
||||
import type { Kysely } from "kysely";
|
||||
// Import from virtual modules (populated by integration at build time)
|
||||
// @ts-ignore - virtual module
|
||||
import virtualConfig from "virtual:emdash/config";
|
||||
// @ts-ignore - virtual module
|
||||
import {
|
||||
createDialect as virtualCreateDialect,
|
||||
createRequestScopedDb as virtualCreateRequestScopedDb,
|
||||
} from "virtual:emdash/dialect";
|
||||
import type { RequestScopedDbOpts } from "virtual:emdash/dialect";
|
||||
// @ts-ignore - virtual module
|
||||
import { mediaProviders as virtualMediaProviders } from "virtual:emdash/media-providers";
|
||||
// @ts-ignore - virtual module
|
||||
import { plugins as virtualPlugins } from "virtual:emdash/plugins";
|
||||
import {
|
||||
createSandboxRunner as virtualCreateSandboxRunner,
|
||||
sandboxEnabled as virtualSandboxEnabled,
|
||||
// @ts-ignore - virtual module
|
||||
} from "virtual:emdash/sandbox-runner";
|
||||
// @ts-ignore - virtual module
|
||||
import { sandboxedPlugins as virtualSandboxedPlugins } from "virtual:emdash/sandboxed-plugins";
|
||||
// @ts-ignore - virtual module
|
||||
import { createStorage as virtualCreateStorage } from "virtual:emdash/storage";
|
||||
|
||||
import {
|
||||
createRecorder,
|
||||
flushRecorder,
|
||||
isInstrumentationEnabled,
|
||||
} from "../database/instrumentation.js";
|
||||
import {
|
||||
EmDashRuntime,
|
||||
type RuntimeDependencies,
|
||||
type SandboxedPluginEntry,
|
||||
type MediaProviderEntry,
|
||||
} from "../emdash-runtime.js";
|
||||
import { setI18nConfig } from "../i18n/config.js";
|
||||
import type { Database, Storage } from "../index.js";
|
||||
import { createPublicMediaUrlResolver } from "../media/url.js";
|
||||
import type { SandboxRunner } from "../plugins/sandbox/types.js";
|
||||
import type { ResolvedPlugin } from "../plugins/types.js";
|
||||
import { invalidateUrlPatternCache } from "../query.js";
|
||||
import { getRequestContext, runWithContext } from "../request-context.js";
|
||||
import type { EmDashConfig } from "./integration/runtime.js";
|
||||
import type { EmDashHandlers } from "./types.js";
|
||||
|
||||
// Cached runtime instance (persists across requests within worker)
|
||||
let runtimeInstance: EmDashRuntime | null = null;
|
||||
// Whether initialization is in progress (prevents concurrent init attempts)
|
||||
let runtimeInitializing = false;
|
||||
|
||||
/** Whether i18n config has been initialized from the virtual module */
|
||||
let i18nInitialized = false;
|
||||
|
||||
/**
|
||||
* Whether we've verified the database has been set up.
|
||||
* On a fresh deployment the first request may hit a public page, bypassing
|
||||
* runtime init. Without this check, template helpers like getSiteSettings()
|
||||
* would query an empty database and crash. Once verified (or once the runtime
|
||||
* has initialized via an admin/API request), this stays true for the worker's
|
||||
* lifetime.
|
||||
*/
|
||||
let setupVerified = false;
|
||||
|
||||
/**
|
||||
* Get EmDash configuration from virtual module
|
||||
*/
|
||||
function getConfig(): EmDashConfig | null {
|
||||
if (virtualConfig && typeof virtualConfig === "object") {
|
||||
// Initialize i18n config on first access (once per worker lifetime)
|
||||
if (!i18nInitialized) {
|
||||
i18nInitialized = true;
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module checked as object above
|
||||
const config = virtualConfig as Record<string, unknown>;
|
||||
if (config.i18n && typeof config.i18n === "object") {
|
||||
setI18nConfig(
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- runtime-checked above
|
||||
config.i18n as {
|
||||
defaultLocale: string;
|
||||
locales: string[];
|
||||
fallback?: Record<string, string>;
|
||||
},
|
||||
);
|
||||
} else {
|
||||
setI18nConfig(null);
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
return virtualConfig as EmDashConfig;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get plugins from virtual module
|
||||
*/
|
||||
function getPlugins(): ResolvedPlugin[] {
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
return (virtualPlugins as ResolvedPlugin[]) || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Build runtime dependencies from virtual modules
|
||||
*/
|
||||
function buildDependencies(config: EmDashConfig): RuntimeDependencies {
|
||||
return {
|
||||
config,
|
||||
plugins: getPlugins(),
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
createDialect: virtualCreateDialect as (config: Record<string, unknown>) => unknown,
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
createStorage: virtualCreateStorage as ((config: Record<string, unknown>) => Storage) | null,
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
sandboxEnabled: virtualSandboxEnabled as boolean,
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
sandboxedPluginEntries: (virtualSandboxedPlugins as SandboxedPluginEntry[]) || [],
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
createSandboxRunner: virtualCreateSandboxRunner as
|
||||
| ((opts: { db: Kysely<Database> }) => SandboxRunner)
|
||||
| null,
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
|
||||
mediaProviderEntries: (virtualMediaProviders as MediaProviderEntry[]) || [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create the runtime instance.
|
||||
*
|
||||
* When `initTimings` is provided, any timing samples recorded during a
|
||||
* genuine cold init are appended. Subsequent warm calls (hitting the
|
||||
* cached instance) push nothing — callers should treat an empty array
|
||||
* as "warm, nothing to report".
|
||||
*/
|
||||
async function getRuntime(
|
||||
config: EmDashConfig,
|
||||
initTimings?: Array<{ name: string; dur: number; desc?: string }>,
|
||||
): Promise<EmDashRuntime> {
|
||||
// Return cached instance if available
|
||||
if (runtimeInstance) {
|
||||
return runtimeInstance;
|
||||
}
|
||||
|
||||
// If another request is already initializing, wait and retry.
|
||||
// We don't share the promise across requests because workerd flags
|
||||
// cross-request promise resolution (causes warnings + potential hangs).
|
||||
if (runtimeInitializing) {
|
||||
// Poll until the initializing request finishes
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
return getRuntime(config, initTimings);
|
||||
}
|
||||
|
||||
runtimeInitializing = true;
|
||||
try {
|
||||
const deps = buildDependencies(config);
|
||||
const runtime = await EmDashRuntime.create(deps, initTimings);
|
||||
runtimeInstance = runtime;
|
||||
return runtime;
|
||||
} finally {
|
||||
runtimeInitializing = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Astro attaches AstroCookies to outgoing responses via a well-known global
|
||||
* symbol. Cloning a Response (`new Response(body, init)`) drops non-header
|
||||
* metadata, so any middleware that wraps the response must explicitly forward
|
||||
* this symbol or `cookies.set()` calls will be silently dropped.
|
||||
*/
|
||||
const ASTRO_COOKIES_SYMBOL = Symbol.for("astro.cookies");
|
||||
|
||||
/**
|
||||
* Baseline security headers applied to all responses.
|
||||
* Admin routes get additional headers (strict CSP) from auth middleware.
|
||||
*/
|
||||
function finalizeResponse(
|
||||
response: Response,
|
||||
serverTimings?: Array<{ name: string; dur: number; desc?: string }>,
|
||||
): Response {
|
||||
const res = new Response(response.body, response);
|
||||
const astroCookies = Reflect.get(response, ASTRO_COOKIES_SYMBOL);
|
||||
if (astroCookies !== undefined) {
|
||||
Reflect.set(res, ASTRO_COOKIES_SYMBOL, astroCookies);
|
||||
}
|
||||
res.headers.set("X-Content-Type-Options", "nosniff");
|
||||
res.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
|
||||
res.headers.set("Permissions-Policy", "camera=(), microphone=(), geolocation=(), payment=()");
|
||||
if (!res.headers.has("Content-Security-Policy")) {
|
||||
res.headers.set("X-Frame-Options", "SAMEORIGIN");
|
||||
}
|
||||
if (serverTimings && serverTimings.length > 0) {
|
||||
res.headers.set(
|
||||
"Server-Timing",
|
||||
serverTimings
|
||||
.map((t) => {
|
||||
const dur = Math.round(t.dur);
|
||||
return t.desc ? `${t.name};dur=${dur};desc="${t.desc}"` : `${t.name};dur=${dur}`;
|
||||
})
|
||||
.join(", "),
|
||||
);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/** Public routes that require the runtime (sitemap, robots.txt, etc.) */
|
||||
const PUBLIC_RUNTIME_ROUTES = new Set(["/sitemap.xml", "/robots.txt"]);
|
||||
const SITEMAP_COLLECTION_RE = /^\/sitemap-[a-z][a-z0-9_]*\.xml$/;
|
||||
|
||||
/**
|
||||
* Ask the configured database adapter for a per-request scoped Kysely. The
|
||||
* adapter encapsulates any per-request semantics (D1 sessions, read-replica
|
||||
* routing, bookmark cookies, etc.); core just forwards the cookie jar and
|
||||
* request flags and wraps next() in ALS if a scope was returned.
|
||||
*/
|
||||
function createRequestScopedDb(
|
||||
opts: RequestScopedDbOpts,
|
||||
): { db: Kysely<Database>; commit: () => void } | null {
|
||||
if (typeof virtualCreateRequestScopedDb !== "function") return null;
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- adapter returns Kysely<unknown>; cast to Database since core owns that type
|
||||
const fn = virtualCreateRequestScopedDb as (
|
||||
o: RequestScopedDbOpts,
|
||||
) => { db: Kysely<Database>; commit: () => void } | null;
|
||||
return fn(opts);
|
||||
}
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
const { request, locals, cookies } = context;
|
||||
const url = context.url;
|
||||
|
||||
// Fast path: routes outside /_emdash/ that plugins inject (e.g.,
|
||||
// /.well-known/atproto-client-metadata.json) skip the entire runtime
|
||||
// init + middleware chain. External servers fetch these with tight
|
||||
// timeouts (~1-2s) so they must respond quickly even on cold starts.
|
||||
if (!url.pathname.startsWith("/_emdash") && virtualConfig?.authProviders) {
|
||||
const isPluginFastRoute = virtualConfig.authProviders.some(
|
||||
(p: { routes?: { pattern?: string }[] }) =>
|
||||
p.routes?.some((r: { pattern?: string }) => r.pattern && url.pathname === r.pattern),
|
||||
);
|
||||
if (isPluginFastRoute) {
|
||||
return finalizeResponse(await next());
|
||||
}
|
||||
}
|
||||
|
||||
const queryRecorder = isInstrumentationEnabled()
|
||||
? createRecorder(url.pathname, request.method, request.headers.get("x-perf-phase") ?? "default")
|
||||
: undefined;
|
||||
|
||||
const run = async (): Promise<Response> => {
|
||||
// Process /_emdash routes and public routes with an active session
|
||||
// (logged-in editors need the runtime for toolbar/visual editing on public pages)
|
||||
const isEmDashRoute = url.pathname.startsWith("/_emdash");
|
||||
const isPublicRuntimeRoute =
|
||||
PUBLIC_RUNTIME_ROUTES.has(url.pathname) || SITEMAP_COLLECTION_RE.test(url.pathname);
|
||||
|
||||
// Check for edit mode cookie - editors viewing public pages need the runtime
|
||||
// so auth middleware can verify their session for visual editing
|
||||
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
|
||||
const hasPreviewToken = url.searchParams.has("_preview");
|
||||
|
||||
// Playground mode: the playground middleware stashes the per-session DO database
|
||||
// on locals.__playgroundDb. When present, use runWithContext() to make it
|
||||
// available to getDb() and the runtime's db getter via the correct ALS instance.
|
||||
const playgroundDb = locals.__playgroundDb;
|
||||
|
||||
// Read the Astro session user once up-front. Both the anonymous fast path
|
||||
// and the full doInit path need this, and the session store is network-backed
|
||||
// (KV / Durable Object) so we want to avoid re-fetching on the hot path.
|
||||
// Skipped entirely for:
|
||||
// - prerendered requests (no session at build time)
|
||||
// - requests without an `astro-session` cookie (no session to look up)
|
||||
// The cookie check matters on Cloudflare Workers, where Astro's session
|
||||
// backend is KV: calling session.get() on every anonymous public request
|
||||
// turns normal traffic into a flood of KV read misses. See #733.
|
||||
const hasSessionCookie = cookies.get("astro-session") !== undefined;
|
||||
const sessionUser =
|
||||
context.isPrerendered || !hasSessionCookie ? null : await context.session?.get("user");
|
||||
|
||||
if (!isEmDashRoute && !isPublicRuntimeRoute && !hasEditCookie && !hasPreviewToken) {
|
||||
if (!sessionUser && !playgroundDb) {
|
||||
const timings: Array<{ name: string; dur: number; desc?: string }> = [];
|
||||
const mwStart = performance.now();
|
||||
|
||||
// On a fresh deployment the database may be completely empty.
|
||||
// Public pages call getSiteSettings() / getMenu() via getDb(), which
|
||||
// bypasses runtime init and would crash with "no such table: options".
|
||||
// Do a one-time lightweight probe using the same getDb() instance the
|
||||
// page will use: if the migrations table doesn't exist, no migrations
|
||||
// have ever run -- redirect to the setup wizard.
|
||||
if (!setupVerified) {
|
||||
const t0 = performance.now();
|
||||
try {
|
||||
const { getDb } = await import("../loader.js");
|
||||
const db = await getDb();
|
||||
await db
|
||||
.selectFrom("_emdash_migrations" as keyof Database)
|
||||
.selectAll()
|
||||
.limit(1)
|
||||
.execute();
|
||||
setupVerified = true;
|
||||
} catch {
|
||||
// Table doesn't exist -> fresh database, redirect to setup
|
||||
return context.redirect("/_emdash/admin/setup");
|
||||
}
|
||||
timings.push({ name: "setup", dur: performance.now() - t0, desc: "Setup probe" });
|
||||
}
|
||||
|
||||
// Initialize the runtime for page:metadata and page:fragments hooks.
|
||||
// The runtime is a cached singleton — after the first request,
|
||||
// getRuntime() is just a null-check. This enables SEO plugins to
|
||||
// contribute meta tags for all visitors, not just logged-in editors.
|
||||
const config = getConfig();
|
||||
if (config) {
|
||||
// Sub-phase timings are populated only on the cold init. Warm
|
||||
// requests hit the cached runtime and leave this empty.
|
||||
const initSubTimings: Array<{ name: string; dur: number; desc?: string }> = [];
|
||||
const t0 = performance.now();
|
||||
try {
|
||||
const runtime = await getRuntime(config, initSubTimings);
|
||||
setupVerified = true;
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- partial object; getPageRuntime() only checks for the page-contribution methods
|
||||
locals.emdash = {
|
||||
collectPageMetadata: runtime.collectPageMetadata.bind(runtime),
|
||||
collectPageFragments: runtime.collectPageFragments.bind(runtime),
|
||||
getPublicMediaUrl: createPublicMediaUrlResolver(runtime.storage),
|
||||
} as EmDashHandlers;
|
||||
} catch {
|
||||
// Non-fatal — EmDashHead will fall back to base SEO contributions
|
||||
}
|
||||
timings.push({ name: "rt", dur: performance.now() - t0, desc: "Runtime init" });
|
||||
// Append cold-only sub-phase timings so the breakdown is visible
|
||||
// in Server-Timing (rt.db, rt.fts, rt.plugins, rt.site,
|
||||
// rt.sandbox, rt.market, rt.hooks, rt.cron).
|
||||
for (const sub of initSubTimings) timings.push(sub);
|
||||
}
|
||||
|
||||
// Even on the anonymous fast path we ask the adapter for a per-request
|
||||
// scoped db. For D1 with read replication this routes anonymous reads
|
||||
// to the nearest replica; for other adapters it's a no-op.
|
||||
const anonScoped = createRequestScopedDb({
|
||||
config: config?.database?.config,
|
||||
isAuthenticated: false,
|
||||
isWrite: request.method !== "GET" && request.method !== "HEAD",
|
||||
cookies,
|
||||
url,
|
||||
});
|
||||
const runAnon = async () => {
|
||||
const t0 = performance.now();
|
||||
const response = await next();
|
||||
timings.push({ name: "render", dur: performance.now() - t0, desc: "Page render" });
|
||||
timings.push({ name: "mw", dur: performance.now() - mwStart, desc: "Total middleware" });
|
||||
return finalizeResponse(response, timings);
|
||||
};
|
||||
if (anonScoped) {
|
||||
const parent = getRequestContext();
|
||||
const ctx = parent
|
||||
? { ...parent, db: anonScoped.db }
|
||||
: { editMode: false, db: anonScoped.db };
|
||||
return runWithContext(ctx, async () => {
|
||||
const response = await runAnon();
|
||||
anonScoped.commit();
|
||||
return response;
|
||||
});
|
||||
}
|
||||
return runAnon();
|
||||
}
|
||||
}
|
||||
|
||||
const config = getConfig();
|
||||
if (!config) {
|
||||
console.error("EmDash: No configuration found");
|
||||
return finalizeResponse(await next());
|
||||
}
|
||||
|
||||
// In playground mode, wrap the entire runtime init + request handling in
|
||||
// runWithContext so that getDatabase() and all init queries use the real
|
||||
// DO database via the same AsyncLocalStorage instance as the loader.
|
||||
const doInit = async () => {
|
||||
const timings: Array<{ name: string; dur: number; desc?: string }> = [];
|
||||
const mwStart = performance.now();
|
||||
|
||||
try {
|
||||
// Get or create runtime. Sub-phase timings (rt.db, rt.fts, rt.plugins,
|
||||
// rt.site, rt.sandbox, rt.market, rt.hooks, rt.cron) are populated
|
||||
// only on the cold init — subsequent warm calls find the cached
|
||||
// instance and `initSubTimings` stays empty.
|
||||
const initSubTimings: Array<{ name: string; dur: number; desc?: string }> = [];
|
||||
let t0 = performance.now();
|
||||
const runtime = await getRuntime(config, initSubTimings);
|
||||
timings.push({ name: "rt", dur: performance.now() - t0, desc: "Runtime init" });
|
||||
// Forward any sub-phase samples so cold-start breakdown is visible
|
||||
// in Server-Timing. Each phase appears prefixed "rt." to distinguish
|
||||
// from the aggregate "rt" timing above.
|
||||
for (const sub of initSubTimings) timings.push(sub);
|
||||
|
||||
// Runtime init runs migrations, so the DB is guaranteed set up
|
||||
setupVerified = true;
|
||||
|
||||
// The manifest is no longer pre-loaded here. It's admin-only
|
||||
// content that public/anonymous requests never read, and
|
||||
// loading it on every request put logged-out hot paths on
|
||||
// the same staleness budget as admin operations. Admin
|
||||
// routes call `emdash.getManifest()` directly.
|
||||
|
||||
// Attach to locals for route handlers
|
||||
locals.emdash = {
|
||||
// Content handlers
|
||||
handleContentList: runtime.handleContentList.bind(runtime),
|
||||
handleContentGet: runtime.handleContentGet.bind(runtime),
|
||||
handleContentCreate: runtime.handleContentCreate.bind(runtime),
|
||||
handleContentUpdate: runtime.handleContentUpdate.bind(runtime),
|
||||
handleContentDelete: runtime.handleContentDelete.bind(runtime),
|
||||
|
||||
// Trash handlers
|
||||
handleContentListTrashed: runtime.handleContentListTrashed.bind(runtime),
|
||||
handleContentRestore: runtime.handleContentRestore.bind(runtime),
|
||||
handleContentPermanentDelete: runtime.handleContentPermanentDelete.bind(runtime),
|
||||
handleContentCountTrashed: runtime.handleContentCountTrashed.bind(runtime),
|
||||
handleContentGetIncludingTrashed: runtime.handleContentGetIncludingTrashed.bind(runtime),
|
||||
|
||||
// Duplicate handler
|
||||
handleContentDuplicate: runtime.handleContentDuplicate.bind(runtime),
|
||||
|
||||
// Publishing & Scheduling handlers
|
||||
handleContentPublish: runtime.handleContentPublish.bind(runtime),
|
||||
handleContentUnpublish: runtime.handleContentUnpublish.bind(runtime),
|
||||
handleContentSchedule: runtime.handleContentSchedule.bind(runtime),
|
||||
handleContentUnschedule: runtime.handleContentUnschedule.bind(runtime),
|
||||
handleContentCountScheduled: runtime.handleContentCountScheduled.bind(runtime),
|
||||
handleContentDiscardDraft: runtime.handleContentDiscardDraft.bind(runtime),
|
||||
handleContentCompare: runtime.handleContentCompare.bind(runtime),
|
||||
handleContentTranslations: runtime.handleContentTranslations.bind(runtime),
|
||||
|
||||
// Media handlers
|
||||
handleMediaList: runtime.handleMediaList.bind(runtime),
|
||||
handleMediaGet: runtime.handleMediaGet.bind(runtime),
|
||||
handleMediaCreate: runtime.handleMediaCreate.bind(runtime),
|
||||
handleMediaUpdate: runtime.handleMediaUpdate.bind(runtime),
|
||||
handleMediaDelete: runtime.handleMediaDelete.bind(runtime),
|
||||
|
||||
// Revision handlers
|
||||
handleRevisionList: runtime.handleRevisionList.bind(runtime),
|
||||
handleRevisionGet: runtime.handleRevisionGet.bind(runtime),
|
||||
handleRevisionRestore: runtime.handleRevisionRestore.bind(runtime),
|
||||
|
||||
// Plugin routes
|
||||
handlePluginApiRoute: runtime.handlePluginApiRoute.bind(runtime),
|
||||
getPluginRouteMeta: runtime.getPluginRouteMeta.bind(runtime),
|
||||
|
||||
// Media provider methods
|
||||
getMediaProvider: runtime.getMediaProvider.bind(runtime),
|
||||
getMediaProviderList: runtime.getMediaProviderList.bind(runtime),
|
||||
|
||||
// Page contribution methods (for EmDashHead/EmDashBodyStart/EmDashBodyEnd)
|
||||
collectPageMetadata: runtime.collectPageMetadata.bind(runtime),
|
||||
collectPageFragments: runtime.collectPageFragments.bind(runtime),
|
||||
|
||||
// Lazy search index health check — search endpoints call this
|
||||
// before querying so a crash-corrupted index gets repaired on
|
||||
// first use rather than stalling every cold start.
|
||||
ensureSearchHealthy: runtime.ensureSearchHealthy.bind(runtime),
|
||||
|
||||
// Direct access (for advanced use cases)
|
||||
storage: runtime.storage,
|
||||
db: runtime.db,
|
||||
getPublicMediaUrl: createPublicMediaUrlResolver(runtime.storage),
|
||||
hooks: runtime.hooks,
|
||||
email: runtime.email,
|
||||
configuredPlugins: runtime.configuredPlugins,
|
||||
|
||||
// Configuration (for checking database type, auth mode, etc.)
|
||||
config,
|
||||
|
||||
// Lazy manifest accessor — admin-only consumers call this on
|
||||
// demand. `requestCached` inside `getManifest` dedupes within
|
||||
// a single request.
|
||||
getManifest: runtime.getManifest.bind(runtime),
|
||||
|
||||
// Clear the URL pattern cache after schema mutations that
|
||||
// affect collection URL patterns.
|
||||
invalidateUrlPatternCache,
|
||||
|
||||
// Sandbox runner (for marketplace plugin install/update)
|
||||
getSandboxRunner: runtime.getSandboxRunner.bind(runtime),
|
||||
|
||||
// Sync marketplace plugin states (after install/update/uninstall)
|
||||
syncMarketplacePlugins: runtime.syncMarketplacePlugins.bind(runtime),
|
||||
|
||||
// Update plugin enabled/disabled status and rebuild hook pipeline
|
||||
setPluginStatus: runtime.setPluginStatus.bind(runtime),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("EmDash middleware error:", error);
|
||||
}
|
||||
|
||||
// Ask the adapter for a request-scoped db. When it returns one, we stash
|
||||
// it in ALS so the runtime's db getter and loader's getDb() pick it up,
|
||||
// then call commit() after next() so the adapter can persist any
|
||||
// per-request state (e.g. a D1 bookmark cookie for read-your-writes).
|
||||
const scoped = createRequestScopedDb({
|
||||
config: config?.database?.config,
|
||||
isAuthenticated: !!sessionUser,
|
||||
isWrite: request.method !== "GET" && request.method !== "HEAD",
|
||||
cookies: context.cookies,
|
||||
url,
|
||||
});
|
||||
|
||||
const renderAndFinalize = async () => {
|
||||
const t0 = performance.now();
|
||||
const response = await next();
|
||||
timings.push({ name: "render", dur: performance.now() - t0, desc: "Page render" });
|
||||
timings.push({ name: "mw", dur: performance.now() - mwStart, desc: "Total middleware" });
|
||||
return finalizeResponse(response, timings);
|
||||
};
|
||||
|
||||
if (scoped) {
|
||||
const parent = getRequestContext();
|
||||
const ctx = parent ? { ...parent, db: scoped.db } : { editMode: false, db: scoped.db };
|
||||
return runWithContext(ctx, async () => {
|
||||
const response = await renderAndFinalize();
|
||||
scoped.commit();
|
||||
return response;
|
||||
});
|
||||
}
|
||||
|
||||
return renderAndFinalize();
|
||||
}; // end doInit
|
||||
|
||||
if (playgroundDb) {
|
||||
// Read the edit-mode cookie to determine if visual editing is active.
|
||||
// Default to false -- editing is opt-in via the playground toolbar toggle.
|
||||
const editMode = context.cookies.get("emdash-edit-mode")?.value === "true";
|
||||
// Playground DBs are per-session isolated instances whose schema is
|
||||
// independent of the configured one — flag as isolated so schema-
|
||||
// derived caches (manifest, taxonomy defs) rebuild against it.
|
||||
const parent = getRequestContext();
|
||||
const ctx = parent
|
||||
? { ...parent, editMode, db: playgroundDb, dbIsIsolated: true }
|
||||
: { editMode, db: playgroundDb, dbIsIsolated: true };
|
||||
return runWithContext(ctx, doInit);
|
||||
}
|
||||
return doInit();
|
||||
};
|
||||
|
||||
if (queryRecorder) {
|
||||
try {
|
||||
return await runWithContext({ editMode: false, queryRecorder }, run);
|
||||
} finally {
|
||||
flushRecorder(queryRecorder);
|
||||
}
|
||||
}
|
||||
return run();
|
||||
});
|
||||
|
||||
export default onRequest;
|
||||
808
packages/core/src/astro/middleware/auth.ts
Normal file
808
packages/core/src/astro/middleware/auth.ts
Normal file
@@ -0,0 +1,808 @@
|
||||
/**
|
||||
* Auth middleware for admin routes
|
||||
*
|
||||
* Checks if the user is authenticated and has appropriate permissions.
|
||||
* Supports two auth modes:
|
||||
* - Passkey (default): Session-based auth with passkey login
|
||||
* - External providers: JWT-based auth (Cloudflare Access, etc.)
|
||||
*
|
||||
* This middleware runs AFTER the setup middleware - so if we get here,
|
||||
* we know setup is complete and users exist.
|
||||
*/
|
||||
|
||||
import type { User, RoleLevel } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import { defineMiddleware } from "astro:middleware";
|
||||
import { ulid } from "ulidx";
|
||||
// Import auth provider via virtual module (statically bundled)
|
||||
// This avoids dynamic import issues in Cloudflare Workers
|
||||
import { authenticate as virtualAuthenticate } from "virtual:emdash/auth";
|
||||
// @ts-ignore - virtual module
|
||||
import virtualConfig from "virtual:emdash/config";
|
||||
|
||||
import { checkPublicCsrf } from "../../api/csrf.js";
|
||||
import { apiError } from "../../api/error.js";
|
||||
import { getPublicOrigin } from "../../api/public-url.js";
|
||||
|
||||
/** Cache headers for middleware error responses (matches API_CACHE_HEADERS in api/error.ts) */
|
||||
const MW_CACHE_HEADERS = {
|
||||
"Cache-Control": "private, no-store",
|
||||
} as const;
|
||||
import { resolveApiToken, resolveOAuthToken } from "../../api/handlers/api-tokens.js";
|
||||
import { hasScope } from "../../auth/api-tokens.js";
|
||||
import { getAuthMode, type ExternalAuthMode } from "../../auth/mode.js";
|
||||
import type { ExternalAuthConfig } from "../../auth/types.js";
|
||||
import type { EmDashHandlers } from "../types.js";
|
||||
import { buildEmDashCsp } from "./csp.js";
|
||||
|
||||
declare global {
|
||||
namespace App {
|
||||
interface Locals {
|
||||
user?: User;
|
||||
/** Token scopes when authenticated via API token or OAuth token. Undefined for session auth. */
|
||||
tokenScopes?: string[];
|
||||
emdash?: EmDashHandlers;
|
||||
}
|
||||
interface SessionData {
|
||||
user: { id: string };
|
||||
hasSeenWelcome: boolean;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Role level constants (matching @emdash-cms/auth)
|
||||
const ROLE_ADMIN = 50;
|
||||
const MCP_ENDPOINT_PATH = "/_emdash/api/mcp";
|
||||
|
||||
function isUnsafeMethod(method: string): boolean {
|
||||
return method !== "GET" && method !== "HEAD" && method !== "OPTIONS";
|
||||
}
|
||||
|
||||
function csrfRejectedResponse(): Response {
|
||||
return new Response(
|
||||
JSON.stringify({ error: { code: "CSRF_REJECTED", message: "Missing required header" } }),
|
||||
{
|
||||
status: 403,
|
||||
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
function mcpUnauthorizedResponse(
|
||||
url: URL,
|
||||
config?: Parameters<typeof getPublicOrigin>[1],
|
||||
): Response {
|
||||
const origin = getPublicOrigin(url, config);
|
||||
return Response.json(
|
||||
{ error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" } },
|
||||
{
|
||||
status: 401,
|
||||
headers: {
|
||||
"WWW-Authenticate": `Bearer resource_metadata="${origin}/.well-known/oauth-protected-resource"`,
|
||||
...MW_CACHE_HEADERS,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* API routes that skip auth — each handles its own access control.
|
||||
*
|
||||
* Prefix entries match any path starting with that prefix.
|
||||
* Exact entries (no trailing slash or wildcard) match that path only.
|
||||
*/
|
||||
const PUBLIC_API_PREFIXES = [
|
||||
"/_emdash/api/setup",
|
||||
"/_emdash/api/auth/login",
|
||||
"/_emdash/api/auth/register",
|
||||
"/_emdash/api/auth/dev-bypass",
|
||||
"/_emdash/api/auth/signup/",
|
||||
"/_emdash/api/auth/magic-link/",
|
||||
"/_emdash/api/auth/invite/",
|
||||
"/_emdash/api/auth/oauth/",
|
||||
"/_emdash/api/oauth/device/token",
|
||||
"/_emdash/api/oauth/device/code",
|
||||
"/_emdash/api/oauth/token",
|
||||
"/_emdash/api/oauth/register",
|
||||
"/_emdash/api/comments/",
|
||||
"/_emdash/api/media/file/",
|
||||
"/_emdash/.well-known/",
|
||||
];
|
||||
|
||||
const PUBLIC_API_EXACT = new Set([
|
||||
"/_emdash/api/auth/passkey/options",
|
||||
"/_emdash/api/auth/passkey/verify",
|
||||
"/_emdash/api/auth/mode",
|
||||
"/_emdash/api/oauth/token",
|
||||
"/_emdash/api/snapshot",
|
||||
// Public site search — read-only. The query layer hardcodes status='published'
|
||||
// so unauthenticated callers only see published content. Admin endpoints
|
||||
// (/enable, /rebuild, /stats) remain private because they're not in this set.
|
||||
"/_emdash/api/search",
|
||||
]);
|
||||
|
||||
// Build merged public routes at module load from auth provider descriptors.
|
||||
// Routes ending with "/" are treated as prefixes; all others are exact matches.
|
||||
const { exact: _providerExactRoutes, prefixes: _providerPrefixRoutes } = (() => {
|
||||
const exact = new Set<string>();
|
||||
const prefixes: string[] = [];
|
||||
if (!virtualConfig?.authProviders) return { exact, prefixes };
|
||||
for (const route of virtualConfig.authProviders.flatMap((p) => p.publicRoutes ?? [])) {
|
||||
if (route.endsWith("/")) {
|
||||
prefixes.push(route);
|
||||
} else {
|
||||
exact.add(route);
|
||||
}
|
||||
}
|
||||
return { exact, prefixes };
|
||||
})();
|
||||
|
||||
/**
|
||||
* OAuth protocol endpoints that are CSRF-exempt by design.
|
||||
*
|
||||
* These are RFC-defined endpoints (RFC 6749 §3.2, RFC 7591 §3, RFC 8628 §3.1/§3.4)
|
||||
* specified to be called cross-origin by external clients (MCP clients, CLIs,
|
||||
* native apps). They authenticate each request on its own merits:
|
||||
*
|
||||
* - /oauth/token: requires PKCE code_verifier, device_code, or refresh_token
|
||||
* - /oauth/register: RFC 7591 dynamic client registration — anonymous by design
|
||||
* - /oauth/device/code: RFC 8628 device flow initiation — anonymous by design
|
||||
* - /oauth/device/token: requires device_code the client already holds
|
||||
*
|
||||
* None of these rely on ambient cookie credentials, so browser-based CSRF
|
||||
* attacks have nothing to exploit. The endpoints themselves advertise
|
||||
* `Access-Control-Allow-Origin: *`. Note: /oauth/device/authorize (the user
|
||||
* consent step) is NOT in this list — it is session-authenticated.
|
||||
*/
|
||||
const CSRF_EXEMPT_PUBLIC_ROUTES = new Set([
|
||||
"/_emdash/api/oauth/token",
|
||||
"/_emdash/api/oauth/register",
|
||||
"/_emdash/api/oauth/device/code",
|
||||
"/_emdash/api/oauth/device/token",
|
||||
]);
|
||||
|
||||
function isPublicEmDashRoute(pathname: string): boolean {
|
||||
if (PUBLIC_API_EXACT.has(pathname)) return true;
|
||||
if (PUBLIC_API_PREFIXES.some((p) => pathname.startsWith(p))) return true;
|
||||
if (_providerExactRoutes.has(pathname)) return true;
|
||||
if (_providerPrefixRoutes.some((p) => pathname.startsWith(p))) return true;
|
||||
if (import.meta.env.DEV && pathname === "/_emdash/api/typegen") return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
function isCsrfExemptPublicRoute(pathname: string): boolean {
|
||||
return CSRF_EXEMPT_PUBLIC_ROUTES.has(pathname);
|
||||
}
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
const { url } = context;
|
||||
|
||||
// Only check auth on admin routes and API routes
|
||||
const isAdminRoute = url.pathname.startsWith("/_emdash/admin");
|
||||
const isSetupRoute = url.pathname.startsWith("/_emdash/admin/setup");
|
||||
const isApiRoute = url.pathname.startsWith("/_emdash/api");
|
||||
const isPublicApiRoute = isPublicEmDashRoute(url.pathname);
|
||||
|
||||
const isPublicRoute = !isAdminRoute && !isApiRoute;
|
||||
|
||||
// Public API routes skip auth but still need CSRF protection on state-changing methods.
|
||||
// We check Origin header against the request host (same approach as Astro's checkOrigin).
|
||||
// This prevents cross-origin form submissions and fetch requests from malicious sites.
|
||||
if (isPublicApiRoute) {
|
||||
const method = context.request.method.toUpperCase();
|
||||
if (
|
||||
isUnsafeMethod(method) &&
|
||||
!isCsrfExemptPublicRoute(url.pathname) // OAuth protocol endpoints — cross-origin by design
|
||||
) {
|
||||
const publicOrigin = getPublicOrigin(url, context.locals.emdash?.config);
|
||||
const csrfError = checkPublicCsrf(context.request, url, publicOrigin);
|
||||
if (csrfError) return csrfError;
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
// Plugin routes: soft auth (resolve user if credentials present, but never block).
|
||||
// The catch-all handler decides per-route whether auth is required (public vs private).
|
||||
// Public plugin routes that accept POST are vulnerable to cross-origin form submissions,
|
||||
// so we apply the same Origin-based CSRF check as other public routes.
|
||||
const isPluginRoute = url.pathname.startsWith("/_emdash/api/plugins/");
|
||||
if (isPluginRoute) {
|
||||
const method = context.request.method.toUpperCase();
|
||||
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
|
||||
const publicOrigin = getPublicOrigin(url, context.locals.emdash?.config);
|
||||
const csrfError = checkPublicCsrf(context.request, url, publicOrigin);
|
||||
if (csrfError) return csrfError;
|
||||
}
|
||||
return handlePluginRouteAuth(context, next);
|
||||
}
|
||||
|
||||
// Setup routes: skip auth but still enforce CSRF on state-changing methods
|
||||
if (isSetupRoute) {
|
||||
const method = context.request.method.toUpperCase();
|
||||
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
|
||||
const csrfHeader = context.request.headers.get("X-EmDash-Request");
|
||||
if (csrfHeader !== "1") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: { code: "CSRF_REJECTED", message: "Missing required header" },
|
||||
}),
|
||||
{
|
||||
status: 403,
|
||||
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
// For public routes: soft auth check (set locals.user if session exists, but never block)
|
||||
if (isPublicRoute) {
|
||||
return handlePublicRouteAuth(context, next);
|
||||
}
|
||||
|
||||
// --- Everything below is /_emdash (admin + API) ---
|
||||
|
||||
// Try Bearer token auth first (API tokens and OAuth tokens).
|
||||
// If successful, skip CSRF (tokens aren't ambient credentials like cookies).
|
||||
const bearerResult = await handleBearerAuth(context);
|
||||
|
||||
if (bearerResult === "invalid") {
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
...MW_CACHE_HEADERS,
|
||||
};
|
||||
// Add WWW-Authenticate header on MCP endpoint 401s to trigger OAuth discovery
|
||||
if (url.pathname === "/_emdash/api/mcp") {
|
||||
const origin = getPublicOrigin(url, context.locals.emdash?.config);
|
||||
headers["WWW-Authenticate"] =
|
||||
`Bearer resource_metadata="${origin}/.well-known/oauth-protected-resource"`;
|
||||
}
|
||||
return new Response(
|
||||
JSON.stringify({ error: { code: "INVALID_TOKEN", message: "Invalid or expired token" } }),
|
||||
{ status: 401, headers },
|
||||
);
|
||||
}
|
||||
|
||||
const isTokenAuth = bearerResult === "authenticated";
|
||||
|
||||
// MCP discovery/tooling is bearer-only. Session/external auth should never
|
||||
// be consulted for this endpoint, and unauthenticated requests must return
|
||||
// the OAuth discovery-style 401 response.
|
||||
const method = context.request.method.toUpperCase();
|
||||
const isMcpEndpoint = url.pathname === MCP_ENDPOINT_PATH;
|
||||
if (isMcpEndpoint && !isTokenAuth) {
|
||||
return mcpUnauthorizedResponse(url, context.locals.emdash?.config);
|
||||
}
|
||||
|
||||
// CSRF protection: require X-EmDash-Request header on state-changing requests.
|
||||
// Skip for token-authenticated requests (tokens aren't ambient credentials).
|
||||
// Browsers block cross-origin custom headers, so this prevents CSRF without tokens.
|
||||
// OAuth authorize consent is exempt: it's a standard HTML form POST that can't
|
||||
// include custom headers. The consent flow is protected by session + single-use codes.
|
||||
const isOAuthConsent = url.pathname.startsWith("/_emdash/oauth/authorize");
|
||||
if (
|
||||
isApiRoute &&
|
||||
!isTokenAuth &&
|
||||
!isOAuthConsent &&
|
||||
isUnsafeMethod(method) &&
|
||||
!isPublicApiRoute
|
||||
) {
|
||||
const csrfHeader = context.request.headers.get("X-EmDash-Request");
|
||||
if (csrfHeader !== "1") {
|
||||
return csrfRejectedResponse();
|
||||
}
|
||||
}
|
||||
|
||||
// If already authenticated via Bearer token, enforce scope then skip session/external auth
|
||||
if (isTokenAuth) {
|
||||
// Enforce API token scopes based on URL pattern + HTTP method
|
||||
const scopeError = enforceTokenScope(url.pathname, method, context.locals.tokenScopes);
|
||||
if (scopeError) return scopeError;
|
||||
|
||||
const response = await next();
|
||||
if (!import.meta.env.DEV) {
|
||||
response.headers.set("Content-Security-Policy", buildEmDashCsp());
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
const response = await handleEmDashAuth(context, next);
|
||||
|
||||
// Set strict CSP on all /_emdash responses (prod only)
|
||||
if (!import.meta.env.DEV) {
|
||||
response.headers.set("Content-Security-Policy", buildEmDashCsp());
|
||||
}
|
||||
|
||||
return response;
|
||||
});
|
||||
|
||||
/**
|
||||
* Auth handling for /_emdash routes. Returns a Response from either
|
||||
* an auth error/redirect or the downstream route handler.
|
||||
*/
|
||||
async function handleEmDashAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
|
||||
): Promise<Response> {
|
||||
const { url, locals } = context;
|
||||
const { emdash } = locals;
|
||||
|
||||
const isPublicAdminRoute =
|
||||
url.pathname.startsWith("/_emdash/admin/login") ||
|
||||
url.pathname.startsWith("/_emdash/admin/invite/accept");
|
||||
const isApiRoute = url.pathname.startsWith("/_emdash/api");
|
||||
|
||||
if (!emdash?.db) {
|
||||
// No database - let the admin handle this error
|
||||
return next();
|
||||
}
|
||||
|
||||
// Determine auth mode from config
|
||||
const authMode = getAuthMode(emdash.config);
|
||||
|
||||
if (authMode.type === "external") {
|
||||
// In dev mode, fall back to passkey auth since external JWT won't be present
|
||||
if (import.meta.env.DEV) {
|
||||
if (isPublicAdminRoute) {
|
||||
return next();
|
||||
}
|
||||
|
||||
return handlePasskeyAuth(context, next, isApiRoute);
|
||||
}
|
||||
|
||||
// External auth provider (Cloudflare Access, etc.)
|
||||
return handleExternalAuth(context, next, authMode, isApiRoute);
|
||||
}
|
||||
|
||||
// Passkey authentication (default)
|
||||
if (isPublicAdminRoute) {
|
||||
return next();
|
||||
}
|
||||
|
||||
return handlePasskeyAuth(context, next, isApiRoute);
|
||||
}
|
||||
|
||||
/**
|
||||
* Soft auth for plugin routes: resolve user from Bearer token or session if present,
|
||||
* but never block unauthenticated requests. The catch-all handler checks route
|
||||
* metadata to decide whether auth is required (public vs private routes).
|
||||
*/
|
||||
async function handlePluginRouteAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
|
||||
): Promise<Response> {
|
||||
const { locals } = context;
|
||||
const { emdash } = locals;
|
||||
|
||||
try {
|
||||
// Try Bearer token auth first (API tokens and OAuth tokens)
|
||||
const bearerResult = await handleBearerAuth(context);
|
||||
if (bearerResult === "authenticated") {
|
||||
// User and tokenScopes are set on locals by handleBearerAuth
|
||||
return next();
|
||||
}
|
||||
if (bearerResult === "invalid") {
|
||||
// A token was presented but is invalid/expired — return 401 so the
|
||||
// caller knows their token is bad (don't silently downgrade to no-auth).
|
||||
return new Response(
|
||||
JSON.stringify({ error: { code: "INVALID_TOKEN", message: "Invalid or expired token" } }),
|
||||
{
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
|
||||
},
|
||||
);
|
||||
}
|
||||
// "none" — no token presented, try session auth below.
|
||||
} catch (error) {
|
||||
console.error("Plugin route bearer auth error:", error);
|
||||
}
|
||||
|
||||
try {
|
||||
// Try session auth (sets locals.user if session exists)
|
||||
const { session } = context;
|
||||
const sessionUser = await session?.get("user");
|
||||
if (sessionUser?.id && emdash?.db) {
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
const user = await adapter.getUserById(sessionUser.id);
|
||||
if (user && !user.disabled) {
|
||||
locals.user = user;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Log but don't block — public routes should still work without session
|
||||
console.error("Plugin route session auth error:", error);
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Soft auth check for public routes with edit mode cookie.
|
||||
* Checks the session and sets locals.user if valid, but never blocks the request.
|
||||
*/
|
||||
async function handlePublicRouteAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
|
||||
): Promise<Response> {
|
||||
const { locals, session } = context;
|
||||
const { emdash } = locals;
|
||||
|
||||
try {
|
||||
const sessionUser = await session?.get("user");
|
||||
if (sessionUser?.id && emdash?.db) {
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
const user = await adapter.getUserById(sessionUser.id);
|
||||
if (user && !user.disabled) {
|
||||
locals.user = user;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Silently continue — public page should render normally
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle external auth provider authentication (Cloudflare Access, etc.)
|
||||
*/
|
||||
async function handleExternalAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
|
||||
authMode: ExternalAuthMode,
|
||||
_isApiRoute: boolean,
|
||||
): Promise<Response> {
|
||||
const { locals, request } = context;
|
||||
const { emdash } = locals;
|
||||
|
||||
try {
|
||||
// Use the authenticate function from the virtual module
|
||||
// (statically imported at build time to work with Cloudflare Workers)
|
||||
if (typeof virtualAuthenticate !== "function") {
|
||||
throw new Error(
|
||||
`Auth provider ${authMode.entrypoint} does not export an authenticate function`,
|
||||
);
|
||||
}
|
||||
|
||||
// Authenticate via the provider
|
||||
const authResult = await virtualAuthenticate(request, authMode.config);
|
||||
|
||||
// Get external auth config for auto-provision settings
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- narrowing AuthModeConfig to ExternalAuthConfig after provider check
|
||||
const externalConfig = authMode.config as ExternalAuthConfig;
|
||||
|
||||
// Find or create user
|
||||
const adapter = createKyselyAdapter(emdash!.db);
|
||||
let user = await adapter.getUserByEmail(authResult.email);
|
||||
|
||||
if (!user) {
|
||||
// User doesn't exist
|
||||
if (externalConfig.autoProvision === false) {
|
||||
return new Response("User not authorized", {
|
||||
status: 403,
|
||||
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
|
||||
});
|
||||
}
|
||||
|
||||
// Check if this is the first user (they become admin)
|
||||
const userCount = await emdash!.db
|
||||
.selectFrom("users")
|
||||
.select(emdash!.db.fn.count("id").as("count"))
|
||||
.executeTakeFirst();
|
||||
|
||||
const isFirstUser = Number(userCount?.count ?? 0) === 0;
|
||||
const role = isFirstUser ? ROLE_ADMIN : authResult.role;
|
||||
|
||||
// Create user
|
||||
const now = new Date().toISOString();
|
||||
const newUser = {
|
||||
id: ulid(),
|
||||
email: authResult.email,
|
||||
name: authResult.name,
|
||||
role,
|
||||
email_verified: 1,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
|
||||
await emdash!.db.insertInto("users").values(newUser).execute();
|
||||
|
||||
user = await adapter.getUserByEmail(authResult.email);
|
||||
|
||||
console.log(
|
||||
`[external-auth] Provisioned user: ${authResult.email} (role: ${role}, first: ${isFirstUser})`,
|
||||
);
|
||||
} else {
|
||||
// User exists - check if we need to sync anything
|
||||
const updates: Record<string, unknown> = {};
|
||||
let newName: string | undefined;
|
||||
let newRole: RoleLevel | undefined;
|
||||
|
||||
// Sync name from provider if provider provides one and local differs
|
||||
if (authResult.name && user.name !== authResult.name) {
|
||||
newName = authResult.name;
|
||||
updates.name = newName;
|
||||
}
|
||||
|
||||
// Sync role if enabled
|
||||
if (externalConfig.syncRoles && user.role !== authResult.role) {
|
||||
newRole = authResult.role;
|
||||
updates.role = newRole;
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length > 0) {
|
||||
updates.updated_at = new Date().toISOString();
|
||||
await emdash!.db.updateTable("users").set(updates).where("id", "=", user.id).execute();
|
||||
|
||||
user = {
|
||||
...user,
|
||||
...(newName ? { name: newName } : {}),
|
||||
...(newRole ? { role: newRole } : {}),
|
||||
};
|
||||
|
||||
console.log(
|
||||
`[external-auth] Updated user ${authResult.email}:`,
|
||||
Object.keys(updates).filter((k) => k !== "updated_at"),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
// This shouldn't happen, but handle it gracefully
|
||||
return new Response("Failed to provision user", {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
|
||||
});
|
||||
}
|
||||
|
||||
// Check if user is disabled locally
|
||||
if (user.disabled) {
|
||||
return new Response("Account disabled", {
|
||||
status: 403,
|
||||
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
|
||||
});
|
||||
}
|
||||
|
||||
// Set user in locals
|
||||
locals.user = user;
|
||||
|
||||
// Persist to session so public pages can identify the user
|
||||
// (external auth headers are only verified on /_emdash routes)
|
||||
const { session } = context;
|
||||
session?.set("user", { id: user.id });
|
||||
|
||||
return next();
|
||||
} catch (error) {
|
||||
console.error("[external-auth] Auth error:", error);
|
||||
|
||||
return new Response("Authentication failed", {
|
||||
status: 401,
|
||||
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to authenticate via Bearer token (API token or OAuth token).
|
||||
*
|
||||
* Returns:
|
||||
* - "authenticated" if token is valid and user is resolved
|
||||
* - "invalid" if a token was provided but is invalid/expired
|
||||
* - "none" if no Bearer token was provided
|
||||
*/
|
||||
async function handleBearerAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
): Promise<"authenticated" | "invalid" | "none"> {
|
||||
const authHeader = context.request.headers.get("Authorization");
|
||||
if (!authHeader?.startsWith("Bearer ")) return "none";
|
||||
|
||||
const token = authHeader.slice(7);
|
||||
if (!token) return "none";
|
||||
|
||||
const { locals } = context;
|
||||
const { emdash } = locals;
|
||||
if (!emdash?.db) return "none";
|
||||
|
||||
// Resolve token based on prefix
|
||||
let resolved: { userId: string; scopes: string[] } | null = null;
|
||||
|
||||
if (token.startsWith("ec_pat_")) {
|
||||
resolved = await resolveApiToken(emdash.db, token);
|
||||
} else if (token.startsWith("ec_oat_")) {
|
||||
resolved = await resolveOAuthToken(emdash.db, token);
|
||||
} else {
|
||||
// Unknown token format
|
||||
return "invalid";
|
||||
}
|
||||
|
||||
if (!resolved) return "invalid";
|
||||
|
||||
// Look up the user
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
const user = await adapter.getUserById(resolved.userId);
|
||||
|
||||
if (!user || user.disabled) return "invalid";
|
||||
|
||||
// Set user and scopes on locals
|
||||
locals.user = user;
|
||||
locals.tokenScopes = resolved.scopes;
|
||||
|
||||
return "authenticated";
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle passkey (session-based) authentication
|
||||
*/
|
||||
async function handlePasskeyAuth(
|
||||
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
|
||||
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
|
||||
isApiRoute: boolean,
|
||||
): Promise<Response> {
|
||||
const { url, locals, session } = context;
|
||||
const { emdash } = locals;
|
||||
|
||||
try {
|
||||
// Check session for user (session.get returns a Promise)
|
||||
const sessionUser = await session?.get("user");
|
||||
|
||||
if (!sessionUser?.id) {
|
||||
if (isApiRoute) {
|
||||
return Response.json(
|
||||
{ error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" } },
|
||||
{ status: 401, headers: MW_CACHE_HEADERS },
|
||||
);
|
||||
}
|
||||
const loginUrl = new URL("/_emdash/admin/login", getPublicOrigin(url, emdash?.config));
|
||||
loginUrl.searchParams.set("redirect", url.pathname);
|
||||
return context.redirect(loginUrl.toString());
|
||||
}
|
||||
|
||||
// Get full user from database
|
||||
const adapter = createKyselyAdapter(emdash!.db);
|
||||
const user = await adapter.getUserById(sessionUser.id);
|
||||
|
||||
if (!user) {
|
||||
// User no longer exists - clear session
|
||||
session?.destroy();
|
||||
if (isApiRoute) {
|
||||
return Response.json(
|
||||
{ error: { code: "NOT_FOUND", message: "User not found" } },
|
||||
{ status: 401, headers: MW_CACHE_HEADERS },
|
||||
);
|
||||
}
|
||||
const loginUrl = new URL("/_emdash/admin/login", getPublicOrigin(url, emdash?.config));
|
||||
return context.redirect(loginUrl.toString());
|
||||
}
|
||||
|
||||
// Check if user is disabled
|
||||
if (user.disabled) {
|
||||
session?.destroy();
|
||||
if (isApiRoute) {
|
||||
return apiError("ACCOUNT_DISABLED", "Account disabled", 403);
|
||||
}
|
||||
const loginUrl = new URL("/_emdash/admin/login", getPublicOrigin(url, emdash?.config));
|
||||
loginUrl.searchParams.set("error", "account_disabled");
|
||||
return context.redirect(loginUrl.toString());
|
||||
}
|
||||
|
||||
// Set user in locals for use by routes
|
||||
locals.user = user;
|
||||
} catch (error) {
|
||||
console.error("Auth middleware error:", error);
|
||||
// On error, redirect to login
|
||||
return context.redirect("/_emdash/admin/login");
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Token scope enforcement
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Scope rules: ordered list of (pathPrefix, method, requiredScope) tuples.
|
||||
* First matching rule wins. Methods: "*" = any, "WRITE" = POST/PUT/PATCH/DELETE.
|
||||
*
|
||||
* Routes not matched by any rule default to "admin" scope (fail-closed).
|
||||
*/
|
||||
const SCOPE_RULES: Array<[prefix: string, method: string, scope: string]> = [
|
||||
// Content routes
|
||||
["/_emdash/api/content", "GET", "content:read"],
|
||||
["/_emdash/api/content", "WRITE", "content:write"],
|
||||
|
||||
// Media routes (excluding /file/ which is public)
|
||||
["/_emdash/api/media/file", "*", "media:read"], // public anyway, but scope if token-authed
|
||||
["/_emdash/api/media", "GET", "media:read"],
|
||||
["/_emdash/api/media", "WRITE", "media:write"],
|
||||
|
||||
// Schema routes
|
||||
["/_emdash/api/schema", "GET", "schema:read"],
|
||||
["/_emdash/api/schema", "WRITE", "schema:write"],
|
||||
|
||||
// Taxonomy, menu, section, widget, revision — all content domain
|
||||
// GET uses content:read (implicit from taxonomies:read / menus:read via role).
|
||||
// WRITE uses the granular scope so tokens with only taxonomies:manage or
|
||||
// menus:manage are not rejected. content:write implicitly grants these via
|
||||
// IMPLICIT_SCOPE_GRANTS in @emdash-cms/auth.
|
||||
["/_emdash/api/taxonomies", "GET", "content:read"],
|
||||
["/_emdash/api/taxonomies", "WRITE", "taxonomies:manage"],
|
||||
["/_emdash/api/menus", "GET", "content:read"],
|
||||
["/_emdash/api/menus", "WRITE", "menus:manage"],
|
||||
["/_emdash/api/sections", "GET", "content:read"],
|
||||
["/_emdash/api/sections", "WRITE", "content:write"],
|
||||
["/_emdash/api/widget-areas", "GET", "content:read"],
|
||||
["/_emdash/api/widget-areas", "WRITE", "content:write"],
|
||||
["/_emdash/api/revisions", "GET", "content:read"],
|
||||
["/_emdash/api/revisions", "WRITE", "content:write"],
|
||||
|
||||
// Search
|
||||
["/_emdash/api/search", "GET", "content:read"],
|
||||
["/_emdash/api/search", "WRITE", "admin"],
|
||||
|
||||
// Import, admin, plugins — all require admin scope
|
||||
["/_emdash/api/import", "*", "admin"],
|
||||
["/_emdash/api/admin", "*", "admin"],
|
||||
["/_emdash/api/plugins", "*", "admin"],
|
||||
|
||||
// Settings — use granular scopes so tokens with settings:read or
|
||||
// settings:manage are not rejected at the middleware level.
|
||||
["/_emdash/api/settings", "GET", "settings:read"],
|
||||
["/_emdash/api/settings", "WRITE", "settings:manage"],
|
||||
|
||||
// MCP endpoint — scopes enforced per-tool inside mcp/server.ts
|
||||
["/_emdash/api/mcp", "*", "content:read"],
|
||||
];
|
||||
|
||||
const WRITE_METHODS = new Set(["POST", "PUT", "PATCH", "DELETE"]);
|
||||
|
||||
/**
|
||||
* Enforce API token scopes based on the request URL and HTTP method.
|
||||
* Returns a 403 Response if the scope is insufficient, or null if allowed.
|
||||
*
|
||||
* Session-authenticated requests (tokenScopes === undefined) are never checked.
|
||||
*/
|
||||
function enforceTokenScope(
|
||||
pathname: string,
|
||||
method: string,
|
||||
tokenScopes: string[] | undefined,
|
||||
): Response | null {
|
||||
// Session auth — implicit full access, no scope restrictions
|
||||
if (!tokenScopes) return null;
|
||||
|
||||
const isWrite = WRITE_METHODS.has(method);
|
||||
|
||||
for (const [prefix, ruleMethod, scope] of SCOPE_RULES) {
|
||||
// Match exact prefix or prefix followed by /
|
||||
if (pathname !== prefix && !pathname.startsWith(prefix + "/")) continue;
|
||||
|
||||
// Check method match
|
||||
if (ruleMethod === "*" || (ruleMethod === "WRITE" && isWrite) || ruleMethod === method) {
|
||||
if (hasScope(tokenScopes, scope)) return null;
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: {
|
||||
code: "INSUFFICIENT_SCOPE",
|
||||
message: `Token lacks required scope: ${scope}`,
|
||||
},
|
||||
}),
|
||||
{ status: 403, headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS } },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// No rule matched — default to admin scope (fail-closed)
|
||||
if (hasScope(tokenScopes, "admin")) return null;
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: {
|
||||
code: "INSUFFICIENT_SCOPE",
|
||||
message: "Token lacks required scope: admin",
|
||||
},
|
||||
}),
|
||||
{ status: 403, headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS } },
|
||||
);
|
||||
}
|
||||
25
packages/core/src/astro/middleware/csp.ts
Normal file
25
packages/core/src/astro/middleware/csp.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Strict Content-Security-Policy for /_emdash routes (admin + API).
|
||||
*
|
||||
* Applied via middleware header rather than Astro's built-in CSP because
|
||||
* Astro's auto-hashing defeats 'unsafe-inline' (CSP3 ignores 'unsafe-inline'
|
||||
* when hashes are present), which would break user-facing pages.
|
||||
*
|
||||
* img-src allows any HTTPS origin because the admin renders user content that
|
||||
* may reference external images (migrations, external hosting, embeds).
|
||||
* Plugin security does not rely on img-src -- plugins run in V8 isolates with
|
||||
* no DOM access, and connect-src 'self' blocks fetch-based exfiltration.
|
||||
*/
|
||||
export function buildEmDashCsp(): string {
|
||||
return [
|
||||
"default-src 'self'",
|
||||
"script-src 'self' 'unsafe-inline'",
|
||||
"style-src 'self' 'unsafe-inline'",
|
||||
"connect-src 'self'",
|
||||
"form-action 'self'",
|
||||
"frame-ancestors 'none'",
|
||||
"img-src 'self' https: data: blob:",
|
||||
"object-src 'none'",
|
||||
"base-uri 'self'",
|
||||
].join("; ");
|
||||
}
|
||||
117
packages/core/src/astro/middleware/redirect.ts
Normal file
117
packages/core/src/astro/middleware/redirect.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* Redirect middleware
|
||||
*
|
||||
* Intercepts incoming requests and checks for matching redirect rules.
|
||||
* Runs after runtime init (needs db) but before setup/auth (should handle
|
||||
* ALL routes, including public ones, and should be fast).
|
||||
*
|
||||
* Skip paths:
|
||||
* - /_emdash/* (admin UI, API routes, auth endpoints)
|
||||
* - /_image (Astro image optimization)
|
||||
* - Static assets (files with extensions)
|
||||
*
|
||||
* 404 logging happens post-response: if next() returns 404 and the path
|
||||
* wasn't already matched by a redirect, log it.
|
||||
*/
|
||||
|
||||
import { defineMiddleware } from "astro:middleware";
|
||||
|
||||
import { RedirectRepository } from "../../database/repositories/redirect.js";
|
||||
import { getDb } from "../../loader.js";
|
||||
import {
|
||||
getCachedRedirects,
|
||||
matchCachedPatterns,
|
||||
setCachedRedirects,
|
||||
} from "../../redirects/cache.js";
|
||||
|
||||
/** Paths that should never be intercepted by redirects */
|
||||
const SKIP_PREFIXES = ["/_emdash", "/_image"];
|
||||
|
||||
/** Static asset extensions -- don't redirect file requests */
|
||||
const ASSET_EXTENSION = /\.\w{1,10}$/;
|
||||
|
||||
type RedirectCode = 301 | 302 | 303 | 307 | 308;
|
||||
|
||||
function isRedirectCode(code: number): code is RedirectCode {
|
||||
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
||||
}
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
const { pathname } = context.url;
|
||||
|
||||
// Skip internal paths and static assets
|
||||
if (SKIP_PREFIXES.some((prefix) => pathname.startsWith(prefix))) {
|
||||
return next();
|
||||
}
|
||||
if (ASSET_EXTENSION.test(pathname)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Public visitors hit the runtime's anonymous fast path, which intentionally
|
||||
// omits `db` from `locals.emdash` to keep the public render boundary minimal
|
||||
// (issue #808). Fall back to `getDb()`, which transparently returns the
|
||||
// per-request scoped db (set in ALS by the runtime middleware) or the
|
||||
// singleton — same path the loader and template helpers use.
|
||||
let db = context.locals.emdash?.db;
|
||||
if (!db) {
|
||||
try {
|
||||
db = await getDb();
|
||||
} catch {
|
||||
return next();
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
|
||||
// One query loads both exact and pattern rules into the cache; warm
|
||||
// requests issue zero queries. Empty-redirect sites cache an empty
|
||||
// Map + array, so the next request returns immediately without probing.
|
||||
let cached = getCachedRedirects();
|
||||
if (!cached) {
|
||||
const all = await repo.findAllEnabled();
|
||||
cached = setCachedRedirects(all);
|
||||
}
|
||||
|
||||
// 1. Exact match (O(1) Map lookup)
|
||||
const exact = cached.exact.get(pathname);
|
||||
if (exact) {
|
||||
const dest = exact.destination;
|
||||
if (dest.startsWith("//") || dest.startsWith("/\\")) return next();
|
||||
repo.recordHit(exact.id).catch(() => {});
|
||||
const code = isRedirectCode(exact.type) ? exact.type : 301;
|
||||
return context.redirect(dest, code);
|
||||
}
|
||||
|
||||
// 2. Pattern match (compile once, match every request)
|
||||
const patternMatch = matchCachedPatterns(cached.patterns, pathname);
|
||||
if (patternMatch) {
|
||||
const { redirect, destination } = patternMatch;
|
||||
if (destination.startsWith("//") || destination.startsWith("/\\")) return next();
|
||||
repo.recordHit(redirect.id).catch(() => {});
|
||||
const code = isRedirectCode(redirect.type) ? redirect.type : 301;
|
||||
return context.redirect(destination, code);
|
||||
}
|
||||
|
||||
// No redirect matched -- proceed and check for 404
|
||||
const response = await next();
|
||||
|
||||
// Log 404s for unmatched paths (fire-and-forget)
|
||||
if (response.status === 404) {
|
||||
const referrer = context.request.headers.get("referer") ?? null;
|
||||
const userAgent = context.request.headers.get("user-agent") ?? null;
|
||||
repo
|
||||
.log404({
|
||||
path: pathname,
|
||||
referrer,
|
||||
userAgent,
|
||||
})
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch {
|
||||
// If the redirects table doesn't exist yet (pre-migration), skip silently
|
||||
return next();
|
||||
}
|
||||
});
|
||||
151
packages/core/src/astro/middleware/request-context.ts
Normal file
151
packages/core/src/astro/middleware/request-context.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* EmDash Request Context Middleware
|
||||
*
|
||||
* Sets up AsyncLocalStorage-based request context for query functions.
|
||||
* Skips ALS entirely for logged-out users with no CMS signals (fast path).
|
||||
*
|
||||
* Handles:
|
||||
* - Preview tokens: _preview query param with signed HMAC token
|
||||
* - Edit mode: emdash-edit-mode cookie (for visual editing)
|
||||
* - Toolbar injection: floating pill for authenticated editors
|
||||
*/
|
||||
|
||||
import { defineMiddleware } from "astro:middleware";
|
||||
|
||||
import { resolveSecretsCached } from "#config/secrets.js";
|
||||
|
||||
import { verifyPreviewToken, parseContentId } from "../../preview/tokens.js";
|
||||
import { getRequestContext, runWithContext } from "../../request-context.js";
|
||||
import { renderToolbar } from "../../visual-editing/toolbar.js";
|
||||
|
||||
/**
|
||||
* Inject toolbar HTML into a response if it's an HTML page.
|
||||
* Returns the original response if not HTML.
|
||||
*/
|
||||
async function injectToolbar(response: Response, toolbarHtml: string): Promise<Response> {
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (!contentType?.includes("text/html")) return response;
|
||||
|
||||
const html = await response.text();
|
||||
if (!html.includes("</body>")) return new Response(html, response);
|
||||
|
||||
const injected = html.replace("</body>", `${toolbarHtml}</body>`);
|
||||
return new Response(injected, {
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
});
|
||||
}
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
const { cookies, url } = context;
|
||||
|
||||
// Skip /_emdash routes (admin has its own UI, no rendering context needed)
|
||||
if (url.pathname.startsWith("/_emdash")) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check for authenticated editor (role >= 30)
|
||||
const { user } = context.locals;
|
||||
const isEditor = !!user && user.role >= 30;
|
||||
|
||||
// Playground mode: the playground middleware (from @emdash-cms/cloudflare) stashes
|
||||
// the per-session DO database on locals.__playgroundDb. We set it via ALS here
|
||||
// (same module instance as the loader) so getDb() picks it up correctly.
|
||||
//
|
||||
// `dbIsIsolated: true` tells schema-derived caches (manifest, taxonomy defs,
|
||||
// byline/term existence probes) to bypass module-scope memoization — each
|
||||
// playground session is its own database with its own schema, so a cached
|
||||
// value from another session would be wrong.
|
||||
const playgroundDb = context.locals.__playgroundDb;
|
||||
if (playgroundDb) {
|
||||
// Check if playground user has toggled edit mode on
|
||||
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
|
||||
return runWithContext({ editMode: hasEditCookie, db: playgroundDb, dbIsIsolated: true }, () =>
|
||||
next(),
|
||||
);
|
||||
}
|
||||
|
||||
// Fast path: check for CMS signals before doing any work
|
||||
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
|
||||
const hasPreviewToken = url.searchParams.has("_preview");
|
||||
|
||||
// No CMS signals and not an editor → skip everything (zero overhead)
|
||||
if (!hasEditCookie && !hasPreviewToken && !isEditor) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Determine edit mode: cookie AND authenticated editor
|
||||
const editMode = hasEditCookie && isEditor;
|
||||
|
||||
// Read locale from Astro's i18n routing
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Astro context includes currentLocale when i18n is configured
|
||||
const locale = (context as { currentLocale?: string }).currentLocale;
|
||||
|
||||
// Verify preview token if present.
|
||||
// The preview secret is resolved via `resolveSecretsCached`: env wins,
|
||||
// otherwise a DB-stored value is read (or generated on first need).
|
||||
// `emdash.db` is set by the runtime middleware which runs first; the
|
||||
// only path where it's missing is a runtime-init failure.
|
||||
let preview: { collection: string; id: string } | undefined;
|
||||
if (hasPreviewToken) {
|
||||
const db = context.locals.emdash?.db;
|
||||
if (db) {
|
||||
const { previewSecret } = await resolveSecretsCached(db);
|
||||
const result = await verifyPreviewToken({ url, secret: previewSecret });
|
||||
if (result.valid) {
|
||||
const { collection, id } = parseContentId(result.payload.cid);
|
||||
preview = { collection, id };
|
||||
}
|
||||
} else {
|
||||
console.warn(
|
||||
"[emdash] Preview token present but EmDash runtime not initialized; preview disabled.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If we have CMS signals, wrap in ALS context
|
||||
const needsContext = hasEditCookie || hasPreviewToken;
|
||||
|
||||
if (needsContext) {
|
||||
// Merge with any outer ALS context (e.g. the per-request D1 session db
|
||||
// set by the runtime middleware). `storage.run()` replaces the store
|
||||
// wholesale, so without the spread the outer `db` would be lost and
|
||||
// loaders would fall back to the singleton non-session dialect.
|
||||
const parent = getRequestContext();
|
||||
return runWithContext({ ...parent, editMode, preview, locale }, async () => {
|
||||
let response = await next();
|
||||
|
||||
// Preview responses must not be cached -- draft content could leak past token expiry.
|
||||
// Clone the response before modifying headers — the original may be immutable.
|
||||
if (preview) {
|
||||
response = new Response(response.body, response);
|
||||
response.headers.set("Cache-Control", "private, no-store");
|
||||
}
|
||||
|
||||
// Inject toolbar for authenticated editors
|
||||
if (isEditor) {
|
||||
const toolbarHtml = renderToolbar({
|
||||
editMode,
|
||||
isPreview: !!preview,
|
||||
});
|
||||
return injectToolbar(response, toolbarHtml);
|
||||
}
|
||||
|
||||
return response;
|
||||
});
|
||||
}
|
||||
|
||||
// Editor without CMS signals — no ALS needed, but inject toolbar
|
||||
if (isEditor) {
|
||||
const response = await next();
|
||||
const toolbarHtml = renderToolbar({
|
||||
editMode: false,
|
||||
isPreview: false,
|
||||
});
|
||||
return injectToolbar(response, toolbarHtml);
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
|
||||
export default onRequest;
|
||||
89
packages/core/src/astro/middleware/setup.ts
Normal file
89
packages/core/src/astro/middleware/setup.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
/**
|
||||
* Setup detection middleware
|
||||
*
|
||||
* Redirects to setup wizard if the site hasn't been set up yet.
|
||||
* Checks both "emdash:setup_complete" option AND user existence.
|
||||
*
|
||||
* Detection logic (in order):
|
||||
* 1. Does options table exist? No → setup needed
|
||||
* 2. Is setup_complete true? No → setup needed
|
||||
* 3. In passkey mode: Are there any users? No → setup needed
|
||||
* In Access mode: Skip user check (first user created on first login)
|
||||
* 4. Proceed to admin
|
||||
*/
|
||||
|
||||
import { defineMiddleware } from "astro:middleware";
|
||||
|
||||
import { getAuthMode } from "../../auth/mode.js";
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
// Only check setup on admin routes (but not the setup page itself)
|
||||
const isAdminRoute = context.url.pathname.startsWith("/_emdash/admin");
|
||||
const isSetupRoute = context.url.pathname.startsWith("/_emdash/admin/setup");
|
||||
|
||||
if (isAdminRoute && !isSetupRoute) {
|
||||
// Check if setup is complete
|
||||
const { emdash } = context.locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
// No database configured - let the admin handle this error
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check setup_complete flag
|
||||
const setupComplete = await emdash.db
|
||||
.selectFrom("options")
|
||||
.select("value")
|
||||
.where("name", "=", "emdash:setup_complete")
|
||||
.executeTakeFirst();
|
||||
|
||||
// Value is JSON-encoded, parse it. Accepts both boolean true and string "true"
|
||||
const isComplete =
|
||||
setupComplete &&
|
||||
(() => {
|
||||
try {
|
||||
const parsed = JSON.parse(setupComplete.value);
|
||||
return parsed === true || parsed === "true";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
})();
|
||||
|
||||
if (!isComplete) {
|
||||
// Redirect to setup wizard
|
||||
return context.redirect("/_emdash/admin/setup");
|
||||
}
|
||||
|
||||
// Check auth mode - user verification differs by mode
|
||||
const authMode = getAuthMode(emdash.config);
|
||||
|
||||
// In passkey mode, verify users exist
|
||||
// In Access mode, skip this check - first user is created on first Access login
|
||||
if (authMode.type === "passkey") {
|
||||
// Setup is marked complete, but verify users exist
|
||||
// This catches edge case where setup_complete is true but no users
|
||||
const userCount = await emdash.db
|
||||
.selectFrom("users")
|
||||
.select((eb) => eb.fn.countAll<number>().as("count"))
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
if (userCount.count === 0) {
|
||||
// No users - need to complete admin creation
|
||||
return context.redirect("/_emdash/admin/setup");
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// If the options table doesn't exist yet, redirect to setup
|
||||
// This handles fresh installations where migrations haven't run
|
||||
if (error instanceof Error && error.message.includes("no such table")) {
|
||||
return context.redirect("/_emdash/admin/setup");
|
||||
}
|
||||
|
||||
// Other errors - let the admin handle them
|
||||
console.error("Setup middleware error:", error);
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
30
packages/core/src/astro/routes/PluginRegistry.tsx
Normal file
30
packages/core/src/astro/routes/PluginRegistry.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* Admin Wrapper
|
||||
*
|
||||
* Imports plugin admin modules from the virtual module and passes them
|
||||
* to AdminApp via props. This ensures plugin components are bundled
|
||||
* together with the admin app and available via React context.
|
||||
*/
|
||||
|
||||
import { AdminApp } from "@emdash-cms/admin";
|
||||
import type { Messages } from "@lingui/core";
|
||||
// @ts-ignore - virtual module generated by integration
|
||||
import { pluginAdmins } from "virtual:emdash/admin-registry";
|
||||
// @ts-ignore - virtual module generated by integration
|
||||
import { authProviders } from "virtual:emdash/auth-providers";
|
||||
|
||||
interface AdminWrapperProps {
|
||||
locale: string;
|
||||
messages: Messages;
|
||||
}
|
||||
|
||||
export default function AdminWrapper({ locale, messages }: AdminWrapperProps) {
|
||||
return (
|
||||
<AdminApp
|
||||
pluginAdmins={pluginAdmins}
|
||||
authProviders={authProviders}
|
||||
locale={locale}
|
||||
messages={messages}
|
||||
/>
|
||||
);
|
||||
}
|
||||
98
packages/core/src/astro/routes/admin.astro
Normal file
98
packages/core/src/astro/routes/admin.astro
Normal file
@@ -0,0 +1,98 @@
|
||||
---
|
||||
/**
|
||||
* Admin shell route - injected by EmDash integration
|
||||
*
|
||||
* This page serves the EmDash admin React SPA.
|
||||
* AdminWrapper imports plugin admin modules and passes them to AdminApp.
|
||||
*/
|
||||
import "@emdash-cms/admin/styles.css";
|
||||
// Use package-qualified import so Astro generates a proper module URL
|
||||
// (relative imports resolve to absolute paths which break client hydration)
|
||||
import AdminWrapper from "emdash/routes/PluginRegistry";
|
||||
import { Font } from "astro:assets";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
import { resolveLocale, loadMessages, getLocaleDir } from "@emdash-cms/admin/locales";
|
||||
|
||||
const resolvedLocale = resolveLocale(Astro.request);
|
||||
const resolvedDir = getLocaleDir(resolvedLocale);
|
||||
const messages = await loadMessages(resolvedLocale);
|
||||
|
||||
const adminConfig = Astro.locals.emdash?.config?.admin;
|
||||
const pageTitle = adminConfig?.siteName ? `${adminConfig.siteName} Admin` : "EmDash Admin";
|
||||
---
|
||||
|
||||
<!doctype html>
|
||||
<html lang={resolvedLocale} dir={resolvedDir}>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<Font cssVariable="--font-emdash" />
|
||||
{adminConfig?.favicon ? (
|
||||
<link rel="icon" href={adminConfig.favicon} />
|
||||
) : (
|
||||
<link
|
||||
rel="icon"
|
||||
href="data:image/svg+xml,<svg width='75' height='75' viewBox='0 0 75 75' fill='none' xmlns='http://www.w3.org/2000/svg'> <g clip-path='url(%23clip0_50_99)'> <rect x='3' y='3' width='69' height='69' rx='10.518' stroke='url(%23paint0_linear_50_99)' stroke-width='6'/> <rect x='18' y='34' width='39.3661' height='6.56101' fill='url(%23paint1_linear_50_99)'/> </g> <defs> <linearGradient id='paint0_linear_50_99' x1='-42.9996' y1='124' x2='92.4233' y2='-41.7456' gradientUnits='userSpaceOnUse'> <stop stop-color='%230F006B'/> <stop offset='0.0833333' stop-color='%23281A81'/> <stop offset='0.166667' stop-color='%235D0C83'/> <stop offset='0.25' stop-color='%23911475'/> <stop offset='0.333333' stop-color='%23CE2F55'/> <stop offset='0.416667' stop-color='%23FF6633'/> <stop offset='0.5' stop-color='%23F6821F'/> <stop offset='0.583333' stop-color='%23FBAD41'/> <stop offset='0.666667' stop-color='%23FFCD89'/> <stop offset='0.75' stop-color='%23FFE9CB'/> <stop offset='0.833333' stop-color='%23FFF7EC'/> <stop offset='0.916667' stop-color='%23FFF8EE'/> <stop offset='1' stop-color='white'/> </linearGradient> <linearGradient id='paint1_linear_50_99' x1='91.4992' y1='27.4982' x2='28.1217' y2='54.1775' gradientUnits='userSpaceOnUse'> <stop stop-color='white'/> <stop offset='0.129253' stop-color='%23FFF8EE'/> <stop offset='0.617058' stop-color='%23FBAD41'/> <stop offset='0.848019' stop-color='%23F6821F'/> <stop offset='1' stop-color='%23FF6633'/> </linearGradient> <clipPath id='clip0_50_99'> <rect width='75' height='75' fill='white'/> </clipPath> </defs> </svg>"
|
||||
/>
|
||||
)}
|
||||
<title>{pageTitle}</title>
|
||||
</head>
|
||||
<body class="isolate">
|
||||
<div id="admin-root" class="min-h-screen">
|
||||
<div id="emdash-boot-loader">
|
||||
<style>
|
||||
#emdash-boot-loader {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-height: 100vh;
|
||||
color-scheme: light dark;
|
||||
background: light-dark(hsl(0 0% 100%), hsl(222.2 84% 4.9%));
|
||||
}
|
||||
#emdash-boot-loader .loader-inner {
|
||||
text-align: center;
|
||||
}
|
||||
#emdash-boot-loader .spinner {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
margin: 0 auto;
|
||||
border: 2.5px solid
|
||||
light-dark(
|
||||
hsl(215.4 16.3% 46.9% / 0.3),
|
||||
hsl(215 20.2% 65.1% / 0.3)
|
||||
);
|
||||
border-top-color: light-dark(
|
||||
hsl(215.4 16.3% 46.9%),
|
||||
hsl(215 20.2% 65.1%)
|
||||
);
|
||||
border-radius: 50%;
|
||||
animation: emdash-spin 0.8s linear infinite;
|
||||
}
|
||||
#emdash-boot-loader p {
|
||||
margin-top: 1rem;
|
||||
font-family: var(
|
||||
--font-emdash,
|
||||
ui-sans-serif,
|
||||
system-ui,
|
||||
sans-serif
|
||||
);
|
||||
font-size: 0.875rem;
|
||||
color: light-dark(hsl(215.4 16.3% 46.9%), hsl(215 20.2% 65.1%));
|
||||
}
|
||||
@keyframes emdash-spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
<div class="loader-inner">
|
||||
<div class="spinner"></div>
|
||||
<p>{adminConfig?.siteName ? `Loading ${adminConfig.siteName}...` : "Loading EmDash..."}</p>
|
||||
</div>
|
||||
</div>
|
||||
<AdminWrapper client:only="react" locale={resolvedLocale} messages={messages} />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* PATCH/DELETE /_emdash/api/admin/allowed-domains/[domain]
|
||||
*
|
||||
* Admin endpoints for managing a specific allowed domain.
|
||||
* PATCH - Update domain settings (enabled, defaultRole)
|
||||
* DELETE - Remove an allowed domain
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
import { Role, roleFromLevel } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { allowedDomainUpdateBody } from "#api/schemas.js";
|
||||
|
||||
/**
|
||||
* PATCH - Update domain settings
|
||||
*/
|
||||
export const PATCH: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { domain } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "Database not configured", 500);
|
||||
}
|
||||
|
||||
if (!domain) {
|
||||
return apiError("VALIDATION_ERROR", "Domain is required", 400);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, allowedDomainUpdateBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
// Check if domain exists
|
||||
const existing = await adapter.getAllowedDomain(domain);
|
||||
if (!existing) {
|
||||
return apiError("NOT_FOUND", "Domain not found", 404);
|
||||
}
|
||||
|
||||
// Role is already validated as RoleLevel by Zod schema
|
||||
const defaultRole = body.defaultRole;
|
||||
|
||||
// Update domain
|
||||
const enabled = body.enabled ?? existing.enabled;
|
||||
await adapter.updateAllowedDomain(domain, enabled, defaultRole);
|
||||
|
||||
// Fetch updated domain
|
||||
const updated = await adapter.getAllowedDomain(domain);
|
||||
|
||||
return apiSuccess({
|
||||
success: true,
|
||||
domain: updated
|
||||
? {
|
||||
domain: updated.domain,
|
||||
defaultRole: updated.defaultRole,
|
||||
roleName: roleFromLevel(updated.defaultRole),
|
||||
enabled: updated.enabled,
|
||||
createdAt: updated.createdAt.toISOString(),
|
||||
}
|
||||
: null,
|
||||
});
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to update allowed domain", "DOMAIN_UPDATE_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* DELETE - Remove an allowed domain
|
||||
*/
|
||||
export const DELETE: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { domain } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "Database not configured", 500);
|
||||
}
|
||||
|
||||
if (!domain) {
|
||||
return apiError("VALIDATION_ERROR", "Domain is required", 400);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
|
||||
try {
|
||||
// Check if domain exists (optional - delete is idempotent)
|
||||
const existing = await adapter.getAllowedDomain(domain);
|
||||
if (!existing) {
|
||||
return apiError("NOT_FOUND", "Domain not found", 404);
|
||||
}
|
||||
|
||||
await adapter.deleteAllowedDomain(domain);
|
||||
|
||||
return apiSuccess({ success: true });
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to delete allowed domain", "DOMAIN_DELETE_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* GET/POST /_emdash/api/admin/allowed-domains
|
||||
*
|
||||
* Admin endpoints for managing allowed signup domains.
|
||||
* GET - List all allowed domains
|
||||
* POST - Add a new allowed domain
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
import { Role, roleFromLevel } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { allowedDomainCreateBody } from "#api/schemas.js";
|
||||
|
||||
const DOMAIN_REGEX = /^[a-zA-Z0-9][a-zA-Z0-9-]*(\.[a-zA-Z0-9-]+)+$/;
|
||||
|
||||
/**
|
||||
* GET - List all allowed domains
|
||||
*/
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "Database not configured", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
|
||||
try {
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
|
||||
return apiSuccess({
|
||||
domains: domains.map((d) => ({
|
||||
domain: d.domain,
|
||||
defaultRole: d.defaultRole,
|
||||
roleName: roleFromLevel(d.defaultRole),
|
||||
enabled: d.enabled,
|
||||
createdAt: d.createdAt.toISOString(),
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to list allowed domains", "DOMAIN_LIST_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* POST - Add a new allowed domain
|
||||
*/
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "Database not configured", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const adapter = createKyselyAdapter(emdash.db);
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, allowedDomainCreateBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
// Role is already validated as RoleLevel by Zod schema
|
||||
const defaultRole = body.defaultRole;
|
||||
|
||||
// Validate domain format (no protocol, just domain)
|
||||
const cleanDomain = body.domain.toLowerCase().trim();
|
||||
if (!DOMAIN_REGEX.test(cleanDomain)) {
|
||||
return apiError("VALIDATION_ERROR", "Invalid domain format", 400);
|
||||
}
|
||||
|
||||
// Check if domain already exists
|
||||
const existing = await adapter.getAllowedDomain(cleanDomain);
|
||||
if (existing) {
|
||||
return apiError("CONFLICT", "Domain already exists", 409);
|
||||
}
|
||||
|
||||
const domain = await adapter.createAllowedDomain(cleanDomain, defaultRole);
|
||||
|
||||
return apiSuccess(
|
||||
{
|
||||
success: true,
|
||||
domain: {
|
||||
domain: domain.domain,
|
||||
defaultRole: domain.defaultRole,
|
||||
roleName: roleFromLevel(domain.defaultRole),
|
||||
enabled: domain.enabled,
|
||||
createdAt: domain.createdAt.toISOString(),
|
||||
},
|
||||
},
|
||||
201,
|
||||
);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to create allowed domain", "DOMAIN_CREATE_ERROR");
|
||||
}
|
||||
};
|
||||
40
packages/core/src/astro/routes/api/admin/api-tokens/[id].ts
Normal file
40
packages/core/src/astro/routes/api/admin/api-tokens/[id].ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Single API token endpoint
|
||||
*
|
||||
* DELETE /_emdash/api/admin/api-tokens/:id — Revoke a token
|
||||
*/
|
||||
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { apiError, handleError, unwrapResult } from "#api/error.js";
|
||||
import { handleApiTokenRevoke } from "#api/handlers/api-tokens.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
/**
|
||||
* Revoke (delete) an API token.
|
||||
*/
|
||||
export const DELETE: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const tokenId = params.id;
|
||||
if (!tokenId) {
|
||||
return apiError("VALIDATION_ERROR", "Token ID is required", 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await handleApiTokenRevoke(emdash.db, tokenId, user.id);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to revoke API token", "TOKEN_REVOKE_ERROR");
|
||||
}
|
||||
};
|
||||
68
packages/core/src/astro/routes/api/admin/api-tokens/index.ts
Normal file
68
packages/core/src/astro/routes/api/admin/api-tokens/index.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* API token management endpoints
|
||||
*
|
||||
* GET /_emdash/api/admin/api-tokens — List tokens for current user
|
||||
* POST /_emdash/api/admin/api-tokens — Create a new token
|
||||
*/
|
||||
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiError, handleError, unwrapResult } from "#api/error.js";
|
||||
import { handleApiTokenCreate, handleApiTokenList } from "#api/handlers/api-tokens.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { VALID_SCOPES } from "#auth/api-tokens.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const createTokenSchema = z.object({
|
||||
name: z.string().min(1).max(100),
|
||||
scopes: z.array(z.enum(VALID_SCOPES)).min(1),
|
||||
expiresAt: z.string().datetime().optional(),
|
||||
});
|
||||
|
||||
/**
|
||||
* List API tokens for the current user.
|
||||
* Admins can list all tokens (future: add ?userId= filter).
|
||||
*/
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const result = await handleApiTokenList(emdash.db, user.id);
|
||||
return unwrapResult(result);
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new API token.
|
||||
* Returns the raw token once — it cannot be retrieved again.
|
||||
*/
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, createTokenSchema);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const result = await handleApiTokenCreate(emdash.db, user.id, body);
|
||||
return unwrapResult(result, 201);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to create API token", "TOKEN_CREATE_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,90 @@
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { bylineUpdateBody } from "#api/schemas.js";
|
||||
import { invalidateBylineCache } from "#bylines/index.js";
|
||||
import { BylineRepository } from "#db/repositories/byline.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
function requireEditor(user: { role: number } | undefined): Response | null {
|
||||
if (!user || user.role < Role.EDITOR) {
|
||||
return apiError("FORBIDDEN", "Editor privileges required", 403);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
// Read access uses content:read so all authenticated roles can view byline data
|
||||
const denied = requirePerm(user, "content:read");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
try {
|
||||
const repo = new BylineRepository(emdash.db);
|
||||
const byline = await repo.findById(params.id!);
|
||||
if (!byline) return apiError("NOT_FOUND", "Byline not found", 404);
|
||||
return apiSuccess(byline);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to get byline", "BYLINE_GET_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
export const PUT: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const denied = requireEditor(user);
|
||||
if (denied) return denied;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const body = await parseBody(request, bylineUpdateBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
try {
|
||||
const repo = new BylineRepository(emdash.db);
|
||||
const byline = await repo.update(params.id!, {
|
||||
slug: body.slug,
|
||||
displayName: body.displayName,
|
||||
bio: body.bio ?? null,
|
||||
avatarMediaId: body.avatarMediaId ?? null,
|
||||
websiteUrl: body.websiteUrl ?? null,
|
||||
userId: body.userId ?? null,
|
||||
isGuest: body.isGuest,
|
||||
});
|
||||
|
||||
if (!byline) return apiError("NOT_FOUND", "Byline not found", 404);
|
||||
invalidateBylineCache();
|
||||
return apiSuccess(byline);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to update byline", "BYLINE_UPDATE_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
export const DELETE: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const denied = requireEditor(user);
|
||||
if (denied) return denied;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
try {
|
||||
const repo = new BylineRepository(emdash.db);
|
||||
const deleted = await repo.delete(params.id!);
|
||||
if (!deleted) return apiError("NOT_FOUND", "Byline not found", 404);
|
||||
invalidateBylineCache();
|
||||
return apiSuccess({ deleted: true });
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to delete byline", "BYLINE_DELETE_ERROR");
|
||||
}
|
||||
};
|
||||
74
packages/core/src/astro/routes/api/admin/bylines/index.ts
Normal file
74
packages/core/src/astro/routes/api/admin/bylines/index.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { isParseError, parseBody, parseQuery } from "#api/parse.js";
|
||||
import { bylineCreateBody, bylinesListQuery } from "#api/schemas.js";
|
||||
import { invalidateBylineCache } from "#bylines/index.js";
|
||||
import { BylineRepository } from "#db/repositories/byline.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ url, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
// Read access uses content:read so all authenticated roles can view byline data
|
||||
const denied = requirePerm(user, "content:read");
|
||||
if (denied) return denied;
|
||||
|
||||
const query = parseQuery(url, bylinesListQuery);
|
||||
if (isParseError(query)) return query;
|
||||
|
||||
try {
|
||||
const repo = new BylineRepository(emdash.db);
|
||||
const result = await repo.findMany({
|
||||
search: query.search,
|
||||
isGuest: query.isGuest,
|
||||
userId: query.userId,
|
||||
cursor: query.cursor,
|
||||
limit: query.limit,
|
||||
});
|
||||
|
||||
return apiSuccess(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to list bylines", "BYLINE_LIST_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.EDITOR) {
|
||||
return apiError("FORBIDDEN", "Editor privileges required", 403);
|
||||
}
|
||||
|
||||
const body = await parseBody(request, bylineCreateBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
try {
|
||||
const repo = new BylineRepository(emdash.db);
|
||||
const byline = await repo.create({
|
||||
slug: body.slug,
|
||||
displayName: body.displayName,
|
||||
bio: body.bio ?? null,
|
||||
avatarMediaId: body.avatarMediaId ?? null,
|
||||
websiteUrl: body.websiteUrl ?? null,
|
||||
userId: body.userId ?? null,
|
||||
isGuest: body.isGuest,
|
||||
});
|
||||
|
||||
invalidateBylineCache();
|
||||
return apiSuccess(byline, 201);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to create byline", "BYLINE_CREATE_ERROR");
|
||||
}
|
||||
};
|
||||
64
packages/core/src/astro/routes/api/admin/comments/[id].ts
Normal file
64
packages/core/src/astro/routes/api/admin/comments/[id].ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Single comment admin endpoints
|
||||
*
|
||||
* GET /_emdash/api/admin/comments/:id - Get comment detail
|
||||
* DELETE /_emdash/api/admin/comments/:id - Hard delete (ADMIN only)
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, handleError, requireDb, unwrapResult } from "#api/error.js";
|
||||
import { handleCommentGet, handleCommentDelete } from "#api/handlers/comments.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
/**
|
||||
* Get single comment detail (includes moderation_metadata)
|
||||
*/
|
||||
export const GET: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!id) {
|
||||
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
|
||||
}
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
const denied = requirePerm(user, "comments:moderate");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const result = await handleCommentGet(emdash.db, id);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to get comment", "COMMENT_GET_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Hard delete a comment (ADMIN only)
|
||||
*/
|
||||
export const DELETE: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!id) {
|
||||
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
|
||||
}
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
const denied = requirePerm(user, "comments:delete");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const result = await handleCommentDelete(emdash.db, id);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to delete comment", "COMMENT_DELETE_ERROR");
|
||||
}
|
||||
};
|
||||
116
packages/core/src/astro/routes/api/admin/comments/[id]/status.ts
Normal file
116
packages/core/src/astro/routes/api/admin/comments/[id]/status.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
/**
|
||||
* Comment status change
|
||||
*
|
||||
* PUT /_emdash/api/admin/comments/:id/status - Change comment status
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, apiSuccess, handleError, requireDb, unwrapResult } from "#api/error.js";
|
||||
import { handleCommentGet } from "#api/handlers/comments.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { commentStatusBody } from "#api/schemas.js";
|
||||
import { getSiteBaseUrl } from "#api/site-url.js";
|
||||
import { lookupContentAuthor, sendCommentNotification } from "#comments/notifications.js";
|
||||
import { moderateComment, type CommentHookRunner } from "#comments/service.js";
|
||||
import type { CommentStatus } from "#db/repositories/comment.js";
|
||||
import type { ModerationDecision } from "#plugins/types.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const PUT: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!id) {
|
||||
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
|
||||
}
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
const denied = requirePerm(user, "comments:moderate");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, commentStatusBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const newStatus = body.status as CommentStatus;
|
||||
|
||||
// Build hook runner for the service
|
||||
const hookRunner: CommentHookRunner = {
|
||||
async runBeforeCreate(event) {
|
||||
return emdash.hooks.runCommentBeforeCreate(event);
|
||||
},
|
||||
async runModerate(event) {
|
||||
const result = await emdash.hooks.invokeExclusiveHook("comment:moderate", event);
|
||||
if (!result) return { status: "pending" as const, reason: "No moderator configured" };
|
||||
if (result.error) return { status: "pending" as const, reason: "Moderation error" };
|
||||
return result.result as ModerationDecision;
|
||||
},
|
||||
fireAfterCreate(event) {
|
||||
emdash.hooks
|
||||
.runCommentAfterCreate(event)
|
||||
.catch((err) =>
|
||||
console.error(
|
||||
"[comments] afterCreate error:",
|
||||
err instanceof Error ? err.message : err,
|
||||
),
|
||||
);
|
||||
},
|
||||
fireAfterModerate(event) {
|
||||
emdash.hooks
|
||||
.runCommentAfterModerate(event)
|
||||
.catch((err) =>
|
||||
console.error(
|
||||
"[comments] afterModerate error:",
|
||||
err instanceof Error ? err.message : err,
|
||||
),
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
// Read the comment before updating so we know the previous status
|
||||
const existing = await handleCommentGet(emdash.db, id);
|
||||
if (!existing.success) {
|
||||
return unwrapResult(existing);
|
||||
}
|
||||
const previousStatus = existing.data.status;
|
||||
|
||||
const updated = await moderateComment(
|
||||
emdash.db,
|
||||
id,
|
||||
newStatus,
|
||||
{ id: user!.id, name: user!.name ?? null },
|
||||
hookRunner,
|
||||
);
|
||||
|
||||
if (!updated) {
|
||||
return apiError("NOT_FOUND", "Comment not found", 404);
|
||||
}
|
||||
|
||||
// Send notification when a comment is newly approved
|
||||
if (newStatus === "approved" && previousStatus !== "approved" && emdash.email) {
|
||||
try {
|
||||
const adminBaseUrl = await getSiteBaseUrl(emdash.db, request);
|
||||
const content = await lookupContentAuthor(emdash.db, updated.collection, updated.contentId);
|
||||
if (content?.author) {
|
||||
await sendCommentNotification({
|
||||
email: emdash.email,
|
||||
comment: updated,
|
||||
contentAuthor: content.author,
|
||||
adminBaseUrl,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("[comments] notification error:", err instanceof Error ? err.message : err);
|
||||
}
|
||||
}
|
||||
|
||||
return apiSuccess(updated);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to update comment status", "COMMENT_STATUS_ERROR");
|
||||
}
|
||||
};
|
||||
42
packages/core/src/astro/routes/api/admin/comments/bulk.ts
Normal file
42
packages/core/src/astro/routes/api/admin/comments/bulk.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* Bulk comment operations
|
||||
*
|
||||
* POST /_emdash/api/admin/comments/bulk - Bulk status change or delete
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { handleError, requireDb, unwrapResult } from "#api/error.js";
|
||||
import { handleCommentBulk } from "#api/handlers/comments.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { commentBulkBody } from "#api/schemas.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, commentBulkBody);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
// Bulk delete requires ADMIN, bulk status change requires EDITOR
|
||||
if (body.action === "delete") {
|
||||
const denied = requirePerm(user, "comments:delete");
|
||||
if (denied) return denied;
|
||||
} else {
|
||||
const denied = requirePerm(user, "comments:moderate");
|
||||
if (denied) return denied;
|
||||
}
|
||||
|
||||
const result = await handleCommentBulk(emdash.db, body.ids, body.action);
|
||||
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to perform bulk operation", "COMMENT_BULK_ERROR");
|
||||
}
|
||||
};
|
||||
30
packages/core/src/astro/routes/api/admin/comments/counts.ts
Normal file
30
packages/core/src/astro/routes/api/admin/comments/counts.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* Comment status counts for inbox badges
|
||||
*
|
||||
* GET /_emdash/api/admin/comments/counts
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { handleError, requireDb, unwrapResult } from "#api/error.js";
|
||||
import { handleCommentCounts } from "#api/handlers/comments.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
const denied = requirePerm(user, "comments:moderate");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const result = await handleCommentCounts(emdash.db);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to get comment counts", "COMMENT_COUNTS_ERROR");
|
||||
}
|
||||
};
|
||||
46
packages/core/src/astro/routes/api/admin/comments/index.ts
Normal file
46
packages/core/src/astro/routes/api/admin/comments/index.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Admin comment inbox
|
||||
*
|
||||
* GET /_emdash/api/admin/comments - List comments (filterable by status, collection, search)
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { handleError, requireDb, unwrapResult } from "#api/error.js";
|
||||
import { handleCommentInbox } from "#api/handlers/comments.js";
|
||||
import { isParseError, parseQuery } from "#api/parse.js";
|
||||
import { commentListQuery } from "#api/schemas.js";
|
||||
import type { CommentStatus } from "#db/repositories/comment.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
/**
|
||||
* List comments for moderation inbox
|
||||
*/
|
||||
export const GET: APIRoute = async ({ url, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
const dbErr = requireDb(emdash?.db);
|
||||
if (dbErr) return dbErr;
|
||||
|
||||
const denied = requirePerm(user, "comments:moderate");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const query = parseQuery(url, commentListQuery);
|
||||
if (isParseError(query)) return query;
|
||||
|
||||
const result = await handleCommentInbox(emdash.db, {
|
||||
status: query.status as CommentStatus | undefined,
|
||||
collection: query.collection,
|
||||
search: query.search,
|
||||
limit: query.limit,
|
||||
cursor: query.cursor,
|
||||
});
|
||||
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to list comments", "COMMENT_INBOX_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Exclusive hook selection endpoint
|
||||
*
|
||||
* PUT /_emdash/api/admin/hooks/exclusive/:hookName
|
||||
*
|
||||
* Sets or clears the selected provider for an exclusive hook.
|
||||
* Body: { pluginId: string | null }
|
||||
* Requires settings:manage permission.
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
import { OptionsRepository } from "#db/repositories/options.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
/** Hook name format: namespace:action (e.g., "content:beforeSave") */
|
||||
const HOOK_NAME_RE = /^[a-z]+:[a-zA-Z]+$/;
|
||||
|
||||
const setSelectionSchema = z.object({
|
||||
pluginId: z.string().min(1).nullable(),
|
||||
});
|
||||
|
||||
export const PUT: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "settings:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
const hookName = params.hookName;
|
||||
if (!hookName) {
|
||||
return apiError("VALIDATION_ERROR", "Hook name is required", 400);
|
||||
}
|
||||
|
||||
// Validate hook name format: must be namespace:action (e.g., "content:beforeSave")
|
||||
if (!HOOK_NAME_RE.test(hookName)) {
|
||||
return apiError("VALIDATION_ERROR", "Invalid hook name format", 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const pipeline = emdash.hooks;
|
||||
|
||||
// Verify this is actually an exclusive hook
|
||||
if (!pipeline.isExclusiveHook(hookName)) {
|
||||
return apiError("NOT_FOUND", `Hook '${hookName}' is not a registered exclusive hook`, 404);
|
||||
}
|
||||
|
||||
const body = await parseBody(request, setSelectionSchema);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const optionsRepo = new OptionsRepository(emdash.db);
|
||||
const optionKey = `emdash:exclusive_hook:${hookName}`;
|
||||
|
||||
if (body.pluginId === null) {
|
||||
// Clear the selection
|
||||
await optionsRepo.delete(optionKey);
|
||||
pipeline.clearExclusiveSelection(hookName);
|
||||
} else {
|
||||
// Validate that the pluginId is an actual provider for this hook
|
||||
const providers = pipeline.getExclusiveHookProviders(hookName);
|
||||
const isValidProvider = providers.some(
|
||||
(p: { pluginId: string }) => p.pluginId === body.pluginId,
|
||||
);
|
||||
if (!isValidProvider) {
|
||||
return apiError(
|
||||
"VALIDATION_ERROR",
|
||||
`Plugin '${body.pluginId}' is not a provider for hook '${hookName}'`,
|
||||
400,
|
||||
);
|
||||
}
|
||||
|
||||
await optionsRepo.set(optionKey, body.pluginId);
|
||||
pipeline.setExclusiveSelection(hookName, body.pluginId);
|
||||
}
|
||||
|
||||
return apiSuccess({
|
||||
hookName,
|
||||
selectedPluginId: body.pluginId,
|
||||
});
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to set exclusive hook selection", "EXCLUSIVE_HOOK_SET_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* Exclusive hooks list endpoint
|
||||
*
|
||||
* GET /_emdash/api/admin/hooks/exclusive
|
||||
*
|
||||
* Lists all exclusive hooks with their providers and current selections.
|
||||
* Requires admin role.
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, apiSuccess, handleError } from "#api/error.js";
|
||||
import { OptionsRepository } from "#db/repositories/options.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "settings:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
try {
|
||||
const pipeline = emdash.hooks;
|
||||
const exclusiveHookNames = pipeline.getRegisteredExclusiveHooks();
|
||||
const optionsRepo = new OptionsRepository(emdash.db);
|
||||
|
||||
const hooks = [];
|
||||
for (const hookName of exclusiveHookNames) {
|
||||
const providers = pipeline.getExclusiveHookProviders(hookName);
|
||||
const selection = await optionsRepo.get<string>(`emdash:exclusive_hook:${hookName}`);
|
||||
|
||||
hooks.push({
|
||||
hookName,
|
||||
providers: providers.map((provider: { pluginId: string }) => ({
|
||||
pluginId: provider.pluginId,
|
||||
})),
|
||||
selectedPluginId: selection,
|
||||
});
|
||||
}
|
||||
|
||||
return apiSuccess({ items: hooks });
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to list exclusive hooks", "EXCLUSIVE_HOOKS_LIST_ERROR");
|
||||
}
|
||||
};
|
||||
110
packages/core/src/astro/routes/api/admin/oauth-clients/[id].ts
Normal file
110
packages/core/src/astro/routes/api/admin/oauth-clients/[id].ts
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Single OAuth client endpoints
|
||||
*
|
||||
* GET /_emdash/api/admin/oauth-clients/:id — Get a client
|
||||
* PUT /_emdash/api/admin/oauth-clients/:id — Update a client
|
||||
* DELETE /_emdash/api/admin/oauth-clients/:id — Delete a client
|
||||
*/
|
||||
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiError, handleError, unwrapResult } from "#api/error.js";
|
||||
import {
|
||||
handleOAuthClientDelete,
|
||||
handleOAuthClientGet,
|
||||
handleOAuthClientUpdate,
|
||||
} from "#api/handlers/oauth-clients.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const updateClientSchema = z.object({
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
redirectUris: z
|
||||
.array(z.string().url("Each redirect URI must be a valid URL"))
|
||||
.min(1, "At least one redirect URI is required")
|
||||
.optional(),
|
||||
scopes: z.array(z.string()).nullable().optional(),
|
||||
});
|
||||
|
||||
/**
|
||||
* Get a single OAuth client.
|
||||
*/
|
||||
export const GET: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const clientId = params.id;
|
||||
if (!clientId) {
|
||||
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
|
||||
}
|
||||
|
||||
const result = await handleOAuthClientGet(emdash.db, clientId);
|
||||
return unwrapResult(result);
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an OAuth client.
|
||||
*/
|
||||
export const PUT: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const clientId = params.id;
|
||||
if (!clientId) {
|
||||
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, updateClientSchema);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const result = await handleOAuthClientUpdate(emdash.db, clientId, body);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to update OAuth client", "CLIENT_UPDATE_ERROR");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete an OAuth client.
|
||||
*/
|
||||
export const DELETE: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const clientId = params.id;
|
||||
if (!clientId) {
|
||||
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await handleOAuthClientDelete(emdash.db, clientId);
|
||||
return unwrapResult(result);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to delete OAuth client", "CLIENT_DELETE_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* OAuth client management endpoints
|
||||
*
|
||||
* GET /_emdash/api/admin/oauth-clients — List all registered OAuth clients
|
||||
* POST /_emdash/api/admin/oauth-clients — Register a new OAuth client
|
||||
*/
|
||||
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiError, handleError, unwrapResult } from "#api/error.js";
|
||||
import { handleOAuthClientCreate, handleOAuthClientList } from "#api/handlers/oauth-clients.js";
|
||||
import { isParseError, parseBody } from "#api/parse.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const createClientSchema = z.object({
|
||||
id: z
|
||||
.string()
|
||||
.min(1, "Client ID is required")
|
||||
.max(255, "Client ID must be at most 255 characters"),
|
||||
name: z.string().min(1, "Name is required").max(255, "Name must be at most 255 characters"),
|
||||
redirectUris: z
|
||||
.array(z.string().url("Each redirect URI must be a valid URL"))
|
||||
.min(1, "At least one redirect URI is required"),
|
||||
scopes: z.array(z.string()).optional(),
|
||||
});
|
||||
|
||||
/**
|
||||
* List all registered OAuth clients.
|
||||
*/
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
const result = await handleOAuthClientList(emdash.db);
|
||||
return unwrapResult(result);
|
||||
};
|
||||
|
||||
/**
|
||||
* Register a new OAuth client.
|
||||
*/
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
if (!user || user.role < Role.ADMIN) {
|
||||
return apiError("FORBIDDEN", "Admin privileges required", 403);
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await parseBody(request, createClientSchema);
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const result = await handleOAuthClientCreate(emdash.db, body);
|
||||
return unwrapResult(result, 201);
|
||||
} catch (error) {
|
||||
return handleError(error, "Failed to create OAuth client", "CLIENT_CREATE_ERROR");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Plugin disable endpoint
|
||||
*
|
||||
* POST /_emdash/api/admin/plugins/:id/disable - Disable a plugin
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handlePluginDisable } from "#api/index.js";
|
||||
import { setCronTasksEnabled } from "#plugins/cron.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const POST: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const result = await handlePluginDisable(emdash.db, emdash.configuredPlugins, id);
|
||||
|
||||
if (!result.success) return unwrapResult(result);
|
||||
|
||||
await emdash.setPluginStatus(id, "inactive");
|
||||
await setCronTasksEnabled(emdash.db, id, false);
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Plugin enable endpoint
|
||||
*
|
||||
* POST /_emdash/api/admin/plugins/:id/enable - Enable a plugin
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handlePluginEnable } from "#api/index.js";
|
||||
import { setCronTasksEnabled } from "#plugins/cron.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const POST: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const result = await handlePluginEnable(emdash.db, emdash.configuredPlugins, id);
|
||||
|
||||
if (!result.success) return unwrapResult(result);
|
||||
|
||||
await emdash.setPluginStatus(id, "active");
|
||||
await setCronTasksEnabled(emdash.db, id, true);
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* Plugin management single plugin endpoint
|
||||
*
|
||||
* GET /_emdash/api/admin/plugins/:id - Get plugin details
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handlePluginGet } from "#api/index.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:read");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const result = await handlePluginGet(
|
||||
emdash.db,
|
||||
emdash.configuredPlugins,
|
||||
id,
|
||||
emdash.config.marketplace,
|
||||
);
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Marketplace plugin uninstall endpoint
|
||||
*
|
||||
* POST /_emdash/api/admin/plugins/:id/uninstall - Uninstall a marketplace plugin
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handleMarketplaceUninstall } from "#api/index.js";
|
||||
import { isParseError, parseOptionalBody } from "#api/parse.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const uninstallBodySchema = z.object({
|
||||
deleteData: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export const POST: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const body = await parseOptionalBody(request, uninstallBodySchema, {});
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const result = await handleMarketplaceUninstall(emdash.db, emdash.storage, id, {
|
||||
deleteData: body.deleteData ?? false,
|
||||
});
|
||||
|
||||
if (!result.success) return unwrapResult(result);
|
||||
|
||||
await emdash.syncMarketplacePlugins();
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Marketplace plugin update endpoint
|
||||
*
|
||||
* POST /_emdash/api/admin/plugins/:id/update - Update a marketplace plugin
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handleMarketplaceUpdate } from "#api/index.js";
|
||||
import { isParseError, parseOptionalBody } from "#api/parse.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const updateBodySchema = z.object({
|
||||
version: z.string().min(1).optional(),
|
||||
confirmCapabilityChanges: z.boolean().optional(),
|
||||
confirmRouteVisibilityChanges: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export const POST: APIRoute = async ({ params, request, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const body = await parseOptionalBody(request, updateBodySchema, {});
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
emdash.db,
|
||||
emdash.storage,
|
||||
emdash.getSandboxRunner(),
|
||||
emdash.config.marketplace,
|
||||
id,
|
||||
{
|
||||
version: body.version,
|
||||
confirmCapabilityChanges: body.confirmCapabilityChanges,
|
||||
confirmRouteVisibilityChanges: body.confirmRouteVisibilityChanges,
|
||||
},
|
||||
);
|
||||
|
||||
if (!result.success) return unwrapResult(result);
|
||||
|
||||
await emdash.syncMarketplacePlugins();
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
32
packages/core/src/astro/routes/api/admin/plugins/index.ts
Normal file
32
packages/core/src/astro/routes/api/admin/plugins/index.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Plugin management list endpoint
|
||||
*
|
||||
* GET /_emdash/api/admin/plugins - List all configured plugins with state
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handlePluginList } from "#api/index.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:read");
|
||||
if (denied) return denied;
|
||||
|
||||
const result = await handlePluginList(
|
||||
emdash.db,
|
||||
emdash.configuredPlugins,
|
||||
emdash.config.marketplace,
|
||||
);
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Marketplace plugin icon proxy
|
||||
*
|
||||
* GET /_emdash/api/admin/plugins/marketplace/:id/icon - Proxy icon from marketplace
|
||||
*
|
||||
* Avoids CORS/auth issues when the marketplace Worker is behind Cloudflare Access
|
||||
* or on a different origin. The admin UI uses this instead of linking directly.
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError } from "#api/error.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ params, url, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:read");
|
||||
if (denied) return denied;
|
||||
|
||||
const marketplaceUrl = emdash.config.marketplace;
|
||||
if (!marketplaceUrl || !id) {
|
||||
return apiError("NOT_CONFIGURED", "Marketplace not configured", 400);
|
||||
}
|
||||
|
||||
const width = url.searchParams.get("w");
|
||||
const target = new URL(`/api/v1/plugins/${encodeURIComponent(id)}/icon`, marketplaceUrl);
|
||||
if (width) target.searchParams.set("w", width);
|
||||
|
||||
try {
|
||||
const resp = await fetch(target.href);
|
||||
if (!resp.ok) {
|
||||
// Allowlist: only forward Content-Type from upstream.
|
||||
// Never copy all upstream headers (denylist approach leaks
|
||||
// headers we haven't anticipated).
|
||||
return new Response(resp.body, {
|
||||
status: resp.status,
|
||||
headers: {
|
||||
"Content-Type": resp.headers.get("Content-Type") ?? "application/octet-stream",
|
||||
"Cache-Control": "private, no-store",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(resp.body, {
|
||||
headers: {
|
||||
"Content-Type": resp.headers.get("Content-Type") ?? "image/png",
|
||||
"Cache-Control": "private, no-store",
|
||||
},
|
||||
});
|
||||
} catch {
|
||||
return apiError("PROXY_ERROR", "Failed to fetch icon", 502);
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Marketplace plugin detail proxy endpoint
|
||||
*
|
||||
* GET /_emdash/api/admin/plugins/marketplace/:id - Get plugin details
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handleMarketplaceGetPlugin } from "#api/index.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ params, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:read");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const result = await handleMarketplaceGetPlugin(emdash.config.marketplace, id);
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Marketplace plugin install endpoint
|
||||
*
|
||||
* POST /_emdash/api/admin/plugins/marketplace/:id/install - Install a marketplace plugin
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
import { z } from "zod";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, handleError, unwrapResult } from "#api/error.js";
|
||||
import { handleMarketplaceInstall } from "#api/index.js";
|
||||
import { isParseError, parseOptionalBody } from "#api/parse.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
const installBodySchema = z.object({
|
||||
version: z.string().min(1).optional(),
|
||||
});
|
||||
|
||||
export const POST: APIRoute = async ({ params, request, locals }) => {
|
||||
try {
|
||||
const { emdash, user } = locals;
|
||||
const { id } = params;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:manage");
|
||||
if (denied) return denied;
|
||||
|
||||
if (!id) {
|
||||
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
|
||||
}
|
||||
|
||||
const body = await parseOptionalBody(request, installBodySchema, {});
|
||||
if (isParseError(body)) return body;
|
||||
|
||||
const configuredPluginIds = new Set<string>(
|
||||
emdash.configuredPlugins.map((p: { id: string }) => p.id),
|
||||
);
|
||||
|
||||
const siteOrigin = new URL(request.url).origin;
|
||||
|
||||
const result = await handleMarketplaceInstall(
|
||||
emdash.db,
|
||||
emdash.storage,
|
||||
emdash.getSandboxRunner(),
|
||||
emdash.config.marketplace,
|
||||
id,
|
||||
{ version: body.version, configuredPluginIds, siteOrigin },
|
||||
);
|
||||
|
||||
if (!result.success) return unwrapResult(result);
|
||||
|
||||
await emdash.syncMarketplacePlugins();
|
||||
|
||||
return unwrapResult(result, 201);
|
||||
} catch (error) {
|
||||
console.error("[marketplace-install] Unhandled error:", error);
|
||||
return handleError(error, "Failed to install plugin from marketplace", "INSTALL_FAILED");
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* Marketplace search proxy endpoint
|
||||
*
|
||||
* GET /_emdash/api/admin/plugins/marketplace - Search marketplace plugins
|
||||
*/
|
||||
|
||||
import type { APIRoute } from "astro";
|
||||
|
||||
import { requirePerm } from "#api/authorize.js";
|
||||
import { apiError, unwrapResult } from "#api/error.js";
|
||||
import { handleMarketplaceSearch } from "#api/index.js";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
export const GET: APIRoute = async ({ url, locals }) => {
|
||||
const { emdash, user } = locals;
|
||||
|
||||
if (!emdash?.db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
|
||||
const denied = requirePerm(user, "plugins:read");
|
||||
if (denied) return denied;
|
||||
|
||||
const query = url.searchParams.get("q") ?? undefined;
|
||||
const category = url.searchParams.get("category") ?? undefined;
|
||||
const cursor = url.searchParams.get("cursor") ?? undefined;
|
||||
const limitParam = url.searchParams.get("limit");
|
||||
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam, 10) || 50), 100) : undefined;
|
||||
|
||||
const result = await handleMarketplaceSearch(emdash.config.marketplace, query, {
|
||||
category,
|
||||
cursor,
|
||||
limit,
|
||||
});
|
||||
|
||||
return unwrapResult(result);
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user