first commit
This commit is contained in:
63
packages/core/src/api/authorize.ts
Normal file
63
packages/core/src/api/authorize.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Authorization helpers for API routes
|
||||
*
|
||||
* Thin wrappers around @emdashcms/auth RBAC that return HTTP responses.
|
||||
* Auth middleware handles authentication; these handle authorization.
|
||||
*/
|
||||
|
||||
import type { Permission, RoleLevel } from "@emdashcms/auth";
|
||||
import { hasPermission, canActOnOwn } from "@emdashcms/auth";
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
interface UserLike {
|
||||
id: string;
|
||||
role: RoleLevel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has a permission. Returns a 401/403 Response if not, or null if authorized.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const denied = requirePerm(user, "schema:manage");
|
||||
* if (denied) return denied;
|
||||
* ```
|
||||
*/
|
||||
export function requirePerm(
|
||||
user: UserLike | null | undefined,
|
||||
permission: Permission,
|
||||
): Response | null {
|
||||
if (!user) {
|
||||
return apiError("UNAUTHORIZED", "Authentication required", 401);
|
||||
}
|
||||
if (!hasPermission(user, permission)) {
|
||||
return apiError("FORBIDDEN", "Insufficient permissions", 403);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user can act on a resource, considering ownership.
|
||||
* Returns a 401/403 Response if not, or null if authorized.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const denied = requireOwnerPerm(user, item.authorId, "content:edit_own", "content:edit_any");
|
||||
* if (denied) return denied;
|
||||
* ```
|
||||
*/
|
||||
export function requireOwnerPerm(
|
||||
user: UserLike | null | undefined,
|
||||
ownerId: string,
|
||||
ownPermission: Permission,
|
||||
anyPermission: Permission,
|
||||
): Response | null {
|
||||
if (!user) {
|
||||
return apiError("UNAUTHORIZED", "Authentication required", 401);
|
||||
}
|
||||
if (!canActOnOwn(user, ownerId, ownPermission, anyPermission)) {
|
||||
return apiError("FORBIDDEN", "Insufficient permissions", 403);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
48
packages/core/src/api/csrf.ts
Normal file
48
packages/core/src/api/csrf.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* CSRF protection utilities.
|
||||
*
|
||||
* Two mechanisms:
|
||||
* 1. Custom header check (X-EmDash-Request: 1) — used for authenticated API routes.
|
||||
* Browsers block cross-origin custom headers, so presence proves same-origin.
|
||||
* 2. Origin check — used for public API routes that skip auth. Compares the Origin
|
||||
* header against the request origin. Same approach as Astro's `checkOrigin`.
|
||||
*/
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
/**
|
||||
* Origin-based CSRF check for public API routes that skip auth.
|
||||
*
|
||||
* State-changing requests (POST/PUT/DELETE) to public endpoints must either:
|
||||
* 1. Include the X-EmDash-Request: 1 header (custom header blocked cross-origin), OR
|
||||
* 2. Have an Origin header matching the request origin
|
||||
*
|
||||
* This prevents cross-origin form submissions (which can't set custom headers)
|
||||
* and cross-origin fetch (blocked by CORS unless allowed). Same-origin requests
|
||||
* always include a matching Origin header.
|
||||
*
|
||||
* Returns a 403 Response if the check fails, or null if allowed.
|
||||
*/
|
||||
export function checkPublicCsrf(request: Request, url: URL): Response | null {
|
||||
// Custom header present — browser blocks cross-origin custom headers
|
||||
const csrfHeader = request.headers.get("X-EmDash-Request");
|
||||
if (csrfHeader === "1") return null;
|
||||
|
||||
// Check Origin header — present on all POST/PUT/DELETE from browsers
|
||||
const origin = request.headers.get("Origin");
|
||||
if (origin) {
|
||||
try {
|
||||
const originUrl = new URL(origin);
|
||||
if (originUrl.origin === url.origin) return null;
|
||||
} catch {
|
||||
// Malformed Origin — fall through to reject
|
||||
}
|
||||
|
||||
return apiError("CSRF_REJECTED", "Cross-origin request blocked", 403);
|
||||
}
|
||||
|
||||
// No Origin header — non-browser client (curl, server-to-server).
|
||||
// Allow these through since CSRF is a browser-specific attack vector.
|
||||
// Server-to-server requests don't carry ambient credentials (cookies).
|
||||
return null;
|
||||
}
|
||||
99
packages/core/src/api/error.ts
Normal file
99
packages/core/src/api/error.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* Standardized API error responses.
|
||||
*
|
||||
* All API routes should use these utilities instead of inline
|
||||
* `new Response(JSON.stringify({ error: ... }), ...)` patterns.
|
||||
*/
|
||||
|
||||
import { mapErrorStatus } from "./errors.js";
|
||||
import type { ApiResult } from "./types.js";
|
||||
|
||||
// Re-export everything from errors.ts so existing `import { mapErrorStatus } from "./error.js"` still works
|
||||
export * from "./errors.js";
|
||||
|
||||
/**
|
||||
* Standard cache headers for all API responses.
|
||||
*
|
||||
* Cache-Control: private, no-store -- prevents CDN/proxy caching of authenticated data.
|
||||
* no-store already tells caches not to store the response, so Vary is unnecessary.
|
||||
*/
|
||||
const API_CACHE_HEADERS: HeadersInit = {
|
||||
"Cache-Control": "private, no-store",
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a standardized error response.
|
||||
*
|
||||
* Always returns `{ error: { code, message } }` with correct Content-Type.
|
||||
* Use this for all error responses in API routes.
|
||||
*/
|
||||
export function apiError(code: string, message: string, status: number): Response {
|
||||
return Response.json({ error: { code, message } }, { status, headers: API_CACHE_HEADERS });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a standardized success response.
|
||||
*
|
||||
* Always returns `{ data: T }` with correct status code.
|
||||
* Use this for all success responses in API routes.
|
||||
*/
|
||||
export function apiSuccess<T>(data: T, status = 200): Response {
|
||||
return Response.json({ data }, { status, headers: API_CACHE_HEADERS });
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an unknown error in a catch block.
|
||||
*
|
||||
* - Logs the full error server-side
|
||||
* - Returns a generic message to the client (never leaks error.message)
|
||||
* - Use `fallbackMessage` for the public-facing message
|
||||
* - Use `fallbackCode` for the error code
|
||||
*/
|
||||
export function handleError(
|
||||
error: unknown,
|
||||
fallbackMessage: string,
|
||||
fallbackCode: string,
|
||||
): Response {
|
||||
console.error(`[${fallbackCode}]`, error);
|
||||
return apiError(fallbackCode, fallbackMessage, 500);
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard initialization check.
|
||||
*
|
||||
* Returns an error response if EmDash is not initialized, or null if OK.
|
||||
* Usage: `const err = requireInit(emdash); if (err) return err;`
|
||||
*/
|
||||
export function requireInit(emdash: unknown): Response | null {
|
||||
if (!emdash || typeof emdash !== "object") {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard database check.
|
||||
*
|
||||
* Returns an error response if the database is not available, or null if OK.
|
||||
* Usage: `const err = requireDb(emdash?.db); if (err) return err;`
|
||||
*/
|
||||
export function requireDb(db: unknown): Response | null {
|
||||
if (!db) {
|
||||
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an ApiResult into an HTTP Response.
|
||||
*
|
||||
* Collapses the handler-to-response boilerplate:
|
||||
* - Success: returns `apiSuccess(result.data, successStatus)`
|
||||
* - Error: returns `apiError(code, message, mapErrorStatus(code))`
|
||||
*/
|
||||
export function unwrapResult<T>(result: ApiResult<T>, successStatus = 200): Response {
|
||||
if (!result.success) {
|
||||
return apiError(result.error.code, result.error.message, mapErrorStatus(result.error.code));
|
||||
}
|
||||
return apiSuccess(result.data, successStatus);
|
||||
}
|
||||
445
packages/core/src/api/errors.ts
Normal file
445
packages/core/src/api/errors.ts
Normal file
@@ -0,0 +1,445 @@
|
||||
/**
|
||||
* Typed error codes and status mapping for the EmDash REST API.
|
||||
*
|
||||
* All handler-level and route-level error codes are defined here.
|
||||
* Routes and handlers should import error codes from this module
|
||||
* instead of using ad-hoc strings.
|
||||
*/
|
||||
|
||||
export const ErrorCode = {
|
||||
// Shared (used across domains)
|
||||
NOT_FOUND: "NOT_FOUND",
|
||||
VALIDATION_ERROR: "VALIDATION_ERROR",
|
||||
INVALID_INPUT: "INVALID_INPUT",
|
||||
INVALID_JSON: "INVALID_JSON",
|
||||
CONFLICT: "CONFLICT",
|
||||
NOT_CONFIGURED: "NOT_CONFIGURED",
|
||||
UNAUTHORIZED: "UNAUTHORIZED",
|
||||
FORBIDDEN: "FORBIDDEN",
|
||||
RATE_LIMITED: "RATE_LIMITED",
|
||||
NOT_AUTHENTICATED: "NOT_AUTHENTICATED",
|
||||
NOT_IMPLEMENTED: "NOT_IMPLEMENTED",
|
||||
NOT_SUPPORTED: "NOT_SUPPORTED",
|
||||
MISSING_PARAM: "MISSING_PARAM",
|
||||
CSRF_REJECTED: "CSRF_REJECTED",
|
||||
|
||||
// Content
|
||||
CONTENT_CREATE_ERROR: "CONTENT_CREATE_ERROR",
|
||||
CONTENT_UPDATE_ERROR: "CONTENT_UPDATE_ERROR",
|
||||
CONTENT_DELETE_ERROR: "CONTENT_DELETE_ERROR",
|
||||
CONTENT_LIST_ERROR: "CONTENT_LIST_ERROR",
|
||||
CONTENT_GET_ERROR: "CONTENT_GET_ERROR",
|
||||
CONTENT_DUPLICATE_ERROR: "CONTENT_DUPLICATE_ERROR",
|
||||
CONTENT_RESTORE_ERROR: "CONTENT_RESTORE_ERROR",
|
||||
CONTENT_PUBLISH_ERROR: "CONTENT_PUBLISH_ERROR",
|
||||
CONTENT_UNPUBLISH_ERROR: "CONTENT_UNPUBLISH_ERROR",
|
||||
CONTENT_SCHEDULE_ERROR: "CONTENT_SCHEDULE_ERROR",
|
||||
CONTENT_UNSCHEDULE_ERROR: "CONTENT_UNSCHEDULE_ERROR",
|
||||
CONTENT_DISCARD_DRAFT_ERROR: "CONTENT_DISCARD_DRAFT_ERROR",
|
||||
CONTENT_COMPARE_ERROR: "CONTENT_COMPARE_ERROR",
|
||||
CONTENT_TRANSLATIONS_ERROR: "CONTENT_TRANSLATIONS_ERROR",
|
||||
CONTENT_COUNT_ERROR: "CONTENT_COUNT_ERROR",
|
||||
|
||||
// Revisions
|
||||
REVISION_LIST_ERROR: "REVISION_LIST_ERROR",
|
||||
REVISION_GET_ERROR: "REVISION_GET_ERROR",
|
||||
REVISION_RESTORE_ERROR: "REVISION_RESTORE_ERROR",
|
||||
INVALID_REVISION: "INVALID_REVISION",
|
||||
|
||||
// Schema
|
||||
SCHEMA_LIST_ERROR: "SCHEMA_LIST_ERROR",
|
||||
SCHEMA_GET_ERROR: "SCHEMA_GET_ERROR",
|
||||
SCHEMA_CREATE_ERROR: "SCHEMA_CREATE_ERROR",
|
||||
SCHEMA_UPDATE_ERROR: "SCHEMA_UPDATE_ERROR",
|
||||
SCHEMA_DELETE_ERROR: "SCHEMA_DELETE_ERROR",
|
||||
SCHEMA_EXPORT_ERROR: "SCHEMA_EXPORT_ERROR",
|
||||
SCHEMA_FIELD_LIST_ERROR: "SCHEMA_FIELD_LIST_ERROR",
|
||||
SCHEMA_FIELD_GET_ERROR: "SCHEMA_FIELD_GET_ERROR",
|
||||
SCHEMA_FIELD_CREATE_ERROR: "SCHEMA_FIELD_CREATE_ERROR",
|
||||
SCHEMA_FIELD_UPDATE_ERROR: "SCHEMA_FIELD_UPDATE_ERROR",
|
||||
SCHEMA_FIELD_DELETE_ERROR: "SCHEMA_FIELD_DELETE_ERROR",
|
||||
SCHEMA_FIELD_REORDER_ERROR: "SCHEMA_FIELD_REORDER_ERROR",
|
||||
ORPHAN_LIST_ERROR: "ORPHAN_LIST_ERROR",
|
||||
ORPHAN_REGISTER_ERROR: "ORPHAN_REGISTER_ERROR",
|
||||
COLLECTION_EXISTS: "COLLECTION_EXISTS",
|
||||
COLLECTION_NOT_FOUND: "COLLECTION_NOT_FOUND",
|
||||
TABLE_NOT_FOUND: "TABLE_NOT_FOUND",
|
||||
FIELD_EXISTS: "FIELD_EXISTS",
|
||||
RESERVED_SLUG: "RESERVED_SLUG",
|
||||
INVALID_SLUG: "INVALID_SLUG",
|
||||
CREATE_FAILED: "CREATE_FAILED",
|
||||
UPDATE_FAILED: "UPDATE_FAILED",
|
||||
REGISTER_FAILED: "REGISTER_FAILED",
|
||||
|
||||
// Media
|
||||
MEDIA_LIST_ERROR: "MEDIA_LIST_ERROR",
|
||||
MEDIA_GET_ERROR: "MEDIA_GET_ERROR",
|
||||
MEDIA_CREATE_ERROR: "MEDIA_CREATE_ERROR",
|
||||
MEDIA_UPDATE_ERROR: "MEDIA_UPDATE_ERROR",
|
||||
MEDIA_DELETE_ERROR: "MEDIA_DELETE_ERROR",
|
||||
NO_STORAGE: "NO_STORAGE",
|
||||
NO_FILE: "NO_FILE",
|
||||
INVALID_TYPE: "INVALID_TYPE",
|
||||
UPLOAD_ERROR: "UPLOAD_ERROR",
|
||||
UPLOAD_URL_ERROR: "UPLOAD_URL_ERROR",
|
||||
CONFIRM_ERROR: "CONFIRM_ERROR",
|
||||
CONFIRM_FAILED: "CONFIRM_FAILED",
|
||||
FILE_NOT_FOUND: "FILE_NOT_FOUND",
|
||||
INVALID_STATE: "INVALID_STATE",
|
||||
FILE_SERVE_ERROR: "FILE_SERVE_ERROR",
|
||||
STORAGE_NOT_CONFIGURED: "STORAGE_NOT_CONFIGURED",
|
||||
PROVIDER_LIST_ERROR: "PROVIDER_LIST_ERROR",
|
||||
PROVIDER_UPLOAD_ERROR: "PROVIDER_UPLOAD_ERROR",
|
||||
PROVIDER_GET_ERROR: "PROVIDER_GET_ERROR",
|
||||
PROVIDER_DELETE_ERROR: "PROVIDER_DELETE_ERROR",
|
||||
|
||||
// Comments
|
||||
COMMENT_LIST_ERROR: "COMMENT_LIST_ERROR",
|
||||
COMMENT_GET_ERROR: "COMMENT_GET_ERROR",
|
||||
COMMENT_STATUS_ERROR: "COMMENT_STATUS_ERROR",
|
||||
COMMENT_DELETE_ERROR: "COMMENT_DELETE_ERROR",
|
||||
COMMENT_BULK_ERROR: "COMMENT_BULK_ERROR",
|
||||
COMMENT_INBOX_ERROR: "COMMENT_INBOX_ERROR",
|
||||
COMMENT_COUNTS_ERROR: "COMMENT_COUNTS_ERROR",
|
||||
COMMENT_CREATE_ERROR: "COMMENT_CREATE_ERROR",
|
||||
COMMENTS_DISABLED: "COMMENTS_DISABLED",
|
||||
COMMENTS_CLOSED: "COMMENTS_CLOSED",
|
||||
COMMENT_REJECTED: "COMMENT_REJECTED",
|
||||
|
||||
// Auth
|
||||
ACCOUNT_DISABLED: "ACCOUNT_DISABLED",
|
||||
ADMIN_EXISTS: "ADMIN_EXISTS",
|
||||
SETUP_COMPLETE: "SETUP_COMPLETE",
|
||||
CREDENTIAL_EXISTS: "CREDENTIAL_EXISTS",
|
||||
CHALLENGE_EXPIRED: "CHALLENGE_EXPIRED",
|
||||
PASSKEY_REGISTER_ERROR: "PASSKEY_REGISTER_ERROR",
|
||||
PASSKEY_REGISTER_OPTIONS_ERROR: "PASSKEY_REGISTER_OPTIONS_ERROR",
|
||||
PASSKEY_OPTIONS_ERROR: "PASSKEY_OPTIONS_ERROR",
|
||||
PASSKEY_VERIFY_ERROR: "PASSKEY_VERIFY_ERROR",
|
||||
PASSKEY_LIST_ERROR: "PASSKEY_LIST_ERROR",
|
||||
PASSKEY_RENAME_ERROR: "PASSKEY_RENAME_ERROR",
|
||||
PASSKEY_DELETE_ERROR: "PASSKEY_DELETE_ERROR",
|
||||
PASSKEY_LIMIT: "PASSKEY_LIMIT",
|
||||
LAST_PASSKEY: "LAST_PASSKEY",
|
||||
LOGOUT_ERROR: "LOGOUT_ERROR",
|
||||
SELF_ROLE_CHANGE: "SELF_ROLE_CHANGE",
|
||||
EMAIL_IN_USE: "EMAIL_IN_USE",
|
||||
EMAIL_NOT_CONFIGURED: "EMAIL_NOT_CONFIGURED",
|
||||
USER_EXISTS: "USER_EXISTS",
|
||||
INVALID_TOKEN: "INVALID_TOKEN",
|
||||
TOKEN_EXPIRED: "TOKEN_EXPIRED",
|
||||
DOMAIN_NOT_ALLOWED: "DOMAIN_NOT_ALLOWED",
|
||||
INVITE_CREATE_ERROR: "INVITE_CREATE_ERROR",
|
||||
INVITE_VALIDATE_ERROR: "INVITE_VALIDATE_ERROR",
|
||||
INVITE_COMPLETE_ERROR: "INVITE_COMPLETE_ERROR",
|
||||
SIGNUP_VERIFY_ERROR: "SIGNUP_VERIFY_ERROR",
|
||||
SIGNUP_COMPLETE_ERROR: "SIGNUP_COMPLETE_ERROR",
|
||||
RECOVERY_SEND_ERROR: "RECOVERY_SEND_ERROR",
|
||||
USER_LIST_ERROR: "USER_LIST_ERROR",
|
||||
USER_DETAIL_ERROR: "USER_DETAIL_ERROR",
|
||||
USER_UPDATE_ERROR: "USER_UPDATE_ERROR",
|
||||
USER_DISABLE_ERROR: "USER_DISABLE_ERROR",
|
||||
USER_ENABLE_ERROR: "USER_ENABLE_ERROR",
|
||||
|
||||
// OAuth (internal codes -- distinct from RFC OAuthErrorCode)
|
||||
UNSUPPORTED_RESPONSE_TYPE: "UNSUPPORTED_RESPONSE_TYPE",
|
||||
INVALID_REDIRECT_URI: "INVALID_REDIRECT_URI",
|
||||
INVALID_CLIENT: "INVALID_CLIENT",
|
||||
INVALID_SCOPE: "INVALID_SCOPE",
|
||||
AUTHORIZATION_ERROR: "AUTHORIZATION_ERROR",
|
||||
INVALID_GRANT: "INVALID_GRANT",
|
||||
UNSUPPORTED_GRANT_TYPE: "UNSUPPORTED_GRANT_TYPE",
|
||||
INVALID_CODE: "INVALID_CODE",
|
||||
EXPIRED_CODE: "EXPIRED_CODE",
|
||||
INSUFFICIENT_ROLE: "INSUFFICIENT_ROLE",
|
||||
TOKEN_EXCHANGE_ERROR: "TOKEN_EXCHANGE_ERROR",
|
||||
TOKEN_REFRESH_ERROR: "TOKEN_REFRESH_ERROR",
|
||||
TOKEN_REVOKE_ERROR: "TOKEN_REVOKE_ERROR",
|
||||
TOKEN_CREATE_ERROR: "TOKEN_CREATE_ERROR",
|
||||
TOKEN_LIST_ERROR: "TOKEN_LIST_ERROR",
|
||||
TOKEN_ERROR: "TOKEN_ERROR",
|
||||
DEVICE_CODE_ERROR: "DEVICE_CODE_ERROR",
|
||||
AUTHORIZE_ERROR: "AUTHORIZE_ERROR",
|
||||
CLIENT_LIST_ERROR: "CLIENT_LIST_ERROR",
|
||||
CLIENT_GET_ERROR: "CLIENT_GET_ERROR",
|
||||
CLIENT_CREATE_ERROR: "CLIENT_CREATE_ERROR",
|
||||
CLIENT_UPDATE_ERROR: "CLIENT_UPDATE_ERROR",
|
||||
CLIENT_DELETE_ERROR: "CLIENT_DELETE_ERROR",
|
||||
|
||||
// Allowed domains
|
||||
DOMAIN_LIST_ERROR: "DOMAIN_LIST_ERROR",
|
||||
DOMAIN_CREATE_ERROR: "DOMAIN_CREATE_ERROR",
|
||||
DOMAIN_UPDATE_ERROR: "DOMAIN_UPDATE_ERROR",
|
||||
DOMAIN_DELETE_ERROR: "DOMAIN_DELETE_ERROR",
|
||||
|
||||
// Plugins / Marketplace
|
||||
PLUGIN_LIST_ERROR: "PLUGIN_LIST_ERROR",
|
||||
PLUGIN_GET_ERROR: "PLUGIN_GET_ERROR",
|
||||
PLUGIN_ENABLE_ERROR: "PLUGIN_ENABLE_ERROR",
|
||||
PLUGIN_DISABLE_ERROR: "PLUGIN_DISABLE_ERROR",
|
||||
PLUGIN_ID_CONFLICT: "PLUGIN_ID_CONFLICT",
|
||||
MARKETPLACE_NOT_CONFIGURED: "MARKETPLACE_NOT_CONFIGURED",
|
||||
MARKETPLACE_UNAVAILABLE: "MARKETPLACE_UNAVAILABLE",
|
||||
MARKETPLACE_ERROR: "MARKETPLACE_ERROR",
|
||||
SANDBOX_NOT_AVAILABLE: "SANDBOX_NOT_AVAILABLE",
|
||||
ALREADY_INSTALLED: "ALREADY_INSTALLED",
|
||||
ALREADY_UP_TO_DATE: "ALREADY_UP_TO_DATE",
|
||||
NO_VERSION: "NO_VERSION",
|
||||
MANIFEST_MISMATCH: "MANIFEST_MISMATCH",
|
||||
MANIFEST_VERSION_MISMATCH: "MANIFEST_VERSION_MISMATCH",
|
||||
AUDIT_FAILED: "AUDIT_FAILED",
|
||||
CHECKSUM_MISMATCH: "CHECKSUM_MISMATCH",
|
||||
INVALID_BUNDLE: "INVALID_BUNDLE",
|
||||
BUNDLE_EXTRACT_FAILED: "BUNDLE_EXTRACT_FAILED",
|
||||
BUNDLE_DOWNLOAD_FAILED: "BUNDLE_DOWNLOAD_FAILED",
|
||||
CAPABILITY_ESCALATION: "CAPABILITY_ESCALATION",
|
||||
ROUTE_VISIBILITY_ESCALATION: "ROUTE_VISIBILITY_ESCALATION",
|
||||
INSTALL_FAILED: "INSTALL_FAILED",
|
||||
UNINSTALL_FAILED: "UNINSTALL_FAILED",
|
||||
SEARCH_FAILED: "SEARCH_FAILED",
|
||||
GET_PLUGIN_FAILED: "GET_PLUGIN_FAILED",
|
||||
GET_THEME_FAILED: "GET_THEME_FAILED",
|
||||
THEME_SEARCH_FAILED: "THEME_SEARCH_FAILED",
|
||||
UPDATE_CHECK_FAILED: "UPDATE_CHECK_FAILED",
|
||||
EXCLUSIVE_HOOKS_LIST_ERROR: "EXCLUSIVE_HOOKS_LIST_ERROR",
|
||||
EXCLUSIVE_HOOK_SET_ERROR: "EXCLUSIVE_HOOK_SET_ERROR",
|
||||
|
||||
// Menus
|
||||
MENU_LIST_ERROR: "MENU_LIST_ERROR",
|
||||
MENU_CREATE_ERROR: "MENU_CREATE_ERROR",
|
||||
MENU_GET_ERROR: "MENU_GET_ERROR",
|
||||
MENU_UPDATE_ERROR: "MENU_UPDATE_ERROR",
|
||||
MENU_DELETE_ERROR: "MENU_DELETE_ERROR",
|
||||
MENU_ITEM_CREATE_ERROR: "MENU_ITEM_CREATE_ERROR",
|
||||
MENU_ITEM_UPDATE_ERROR: "MENU_ITEM_UPDATE_ERROR",
|
||||
MENU_ITEM_DELETE_ERROR: "MENU_ITEM_DELETE_ERROR",
|
||||
MENU_REORDER_ERROR: "MENU_REORDER_ERROR",
|
||||
|
||||
// Taxonomies
|
||||
TAXONOMY_LIST_ERROR: "TAXONOMY_LIST_ERROR",
|
||||
TAXONOMY_CREATE_ERROR: "TAXONOMY_CREATE_ERROR",
|
||||
TERM_LIST_ERROR: "TERM_LIST_ERROR",
|
||||
TERM_CREATE_ERROR: "TERM_CREATE_ERROR",
|
||||
TERM_GET_ERROR: "TERM_GET_ERROR",
|
||||
TERM_UPDATE_ERROR: "TERM_UPDATE_ERROR",
|
||||
TERM_DELETE_ERROR: "TERM_DELETE_ERROR",
|
||||
TERMS_GET_ERROR: "TERMS_GET_ERROR",
|
||||
TERMS_SET_ERROR: "TERMS_SET_ERROR",
|
||||
|
||||
// Sections
|
||||
SECTION_LIST_ERROR: "SECTION_LIST_ERROR",
|
||||
SECTION_CREATE_ERROR: "SECTION_CREATE_ERROR",
|
||||
SECTION_GET_ERROR: "SECTION_GET_ERROR",
|
||||
SECTION_UPDATE_ERROR: "SECTION_UPDATE_ERROR",
|
||||
SECTION_DELETE_ERROR: "SECTION_DELETE_ERROR",
|
||||
|
||||
// Redirects
|
||||
REDIRECT_LIST_ERROR: "REDIRECT_LIST_ERROR",
|
||||
REDIRECT_CREATE_ERROR: "REDIRECT_CREATE_ERROR",
|
||||
REDIRECT_GET_ERROR: "REDIRECT_GET_ERROR",
|
||||
REDIRECT_UPDATE_ERROR: "REDIRECT_UPDATE_ERROR",
|
||||
REDIRECT_DELETE_ERROR: "REDIRECT_DELETE_ERROR",
|
||||
NOT_FOUND_LIST_ERROR: "NOT_FOUND_LIST_ERROR",
|
||||
NOT_FOUND_SUMMARY_ERROR: "NOT_FOUND_SUMMARY_ERROR",
|
||||
NOT_FOUND_CLEAR_ERROR: "NOT_FOUND_CLEAR_ERROR",
|
||||
NOT_FOUND_PRUNE_ERROR: "NOT_FOUND_PRUNE_ERROR",
|
||||
|
||||
// Widgets
|
||||
WIDGET_AREA_LIST_ERROR: "WIDGET_AREA_LIST_ERROR",
|
||||
WIDGET_AREA_CREATE_ERROR: "WIDGET_AREA_CREATE_ERROR",
|
||||
WIDGET_AREA_GET_ERROR: "WIDGET_AREA_GET_ERROR",
|
||||
WIDGET_AREA_DELETE_ERROR: "WIDGET_AREA_DELETE_ERROR",
|
||||
WIDGET_CREATE_ERROR: "WIDGET_CREATE_ERROR",
|
||||
WIDGET_UPDATE_ERROR: "WIDGET_UPDATE_ERROR",
|
||||
WIDGET_DELETE_ERROR: "WIDGET_DELETE_ERROR",
|
||||
WIDGET_REORDER_ERROR: "WIDGET_REORDER_ERROR",
|
||||
WIDGET_COMPONENTS_ERROR: "WIDGET_COMPONENTS_ERROR",
|
||||
|
||||
// Setup
|
||||
ALREADY_CONFIGURED: "ALREADY_CONFIGURED",
|
||||
INVALID_SEED: "INVALID_SEED",
|
||||
INVALID_REDIRECT: "INVALID_REDIRECT",
|
||||
SETUP_ERROR: "SETUP_ERROR",
|
||||
SETUP_STATUS_ERROR: "SETUP_STATUS_ERROR",
|
||||
SETUP_ADMIN_ERROR: "SETUP_ADMIN_ERROR",
|
||||
SETUP_VERIFY_ERROR: "SETUP_VERIFY_ERROR",
|
||||
DEV_BYPASS_ERROR: "DEV_BYPASS_ERROR",
|
||||
DEV_RESET_ERROR: "DEV_RESET_ERROR",
|
||||
MIGRATION_ERROR: "MIGRATION_ERROR",
|
||||
SEED_ERROR: "SEED_ERROR",
|
||||
|
||||
// Settings
|
||||
SETTINGS_READ_ERROR: "SETTINGS_READ_ERROR",
|
||||
SETTINGS_UPDATE_ERROR: "SETTINGS_UPDATE_ERROR",
|
||||
EMAIL_SETTINGS_READ_ERROR: "EMAIL_SETTINGS_READ_ERROR",
|
||||
EMAIL_TEST_ERROR: "EMAIL_TEST_ERROR",
|
||||
|
||||
// Search
|
||||
SEARCH_ERROR: "SEARCH_ERROR",
|
||||
STATS_ERROR: "STATS_ERROR",
|
||||
SUGGESTION_ERROR: "SUGGESTION_ERROR",
|
||||
REBUILD_ERROR: "REBUILD_ERROR",
|
||||
|
||||
// Import
|
||||
WXR_ANALYZE_ERROR: "WXR_ANALYZE_ERROR",
|
||||
WXR_PREPARE_ERROR: "WXR_PREPARE_ERROR",
|
||||
WXR_IMPORT_ERROR: "WXR_IMPORT_ERROR",
|
||||
IMPORT_ERROR: "IMPORT_ERROR",
|
||||
REWRITE_ERROR: "REWRITE_ERROR",
|
||||
WP_PLUGIN_ANALYZE_ERROR: "WP_PLUGIN_ANALYZE_ERROR",
|
||||
WP_PLUGIN_IMPORT_ERROR: "WP_PLUGIN_IMPORT_ERROR",
|
||||
SSRF_BLOCKED: "SSRF_BLOCKED",
|
||||
PROBE_ERROR: "PROBE_ERROR",
|
||||
|
||||
// Dashboard
|
||||
DASHBOARD_ERROR: "DASHBOARD_ERROR",
|
||||
DASHBOARD_STATS_ERROR: "DASHBOARD_STATS_ERROR",
|
||||
|
||||
// Misc
|
||||
SNAPSHOT_ERROR: "SNAPSHOT_ERROR",
|
||||
TYPEGEN_ERROR: "TYPEGEN_ERROR",
|
||||
SITEMAP_ERROR: "SITEMAP_ERROR",
|
||||
NO_DB: "NO_DB",
|
||||
INVALID_REQUEST: "INVALID_REQUEST",
|
||||
UNKNOWN_ACTION: "UNKNOWN_ACTION",
|
||||
} as const;
|
||||
|
||||
export type ErrorCode = (typeof ErrorCode)[keyof typeof ErrorCode];
|
||||
|
||||
/**
|
||||
* OAuth RFC 6749 error codes.
|
||||
*
|
||||
* These MUST be lowercase per the RFC spec. Used only by OAuth token endpoints.
|
||||
* Separate from ErrorCode to prevent mixing conventions.
|
||||
*/
|
||||
export const OAuthErrorCode = {
|
||||
INVALID_GRANT: "invalid_grant",
|
||||
UNSUPPORTED_GRANT_TYPE: "unsupported_grant_type",
|
||||
EXPIRED_TOKEN: "expired_token",
|
||||
ACCESS_DENIED: "access_denied",
|
||||
AUTHORIZATION_PENDING: "authorization_pending",
|
||||
} as const;
|
||||
|
||||
export type OAuthErrorCode = (typeof OAuthErrorCode)[keyof typeof OAuthErrorCode];
|
||||
|
||||
/**
|
||||
* Map a handler error code to an HTTP status code.
|
||||
*
|
||||
* Shared codes have explicit mappings. Domain-specific `*_ERROR` codes
|
||||
* (used in catch blocks via handleError) default to 500. Everything else
|
||||
* defaults to 400 (client error).
|
||||
*/
|
||||
export function mapErrorStatus(code: string | undefined): number {
|
||||
switch (code) {
|
||||
// 400 Bad Request
|
||||
case ErrorCode.VALIDATION_ERROR:
|
||||
case ErrorCode.INVALID_INPUT:
|
||||
case ErrorCode.INVALID_JSON:
|
||||
case ErrorCode.MISSING_PARAM:
|
||||
case ErrorCode.INVALID_REQUEST:
|
||||
case ErrorCode.NOT_SUPPORTED:
|
||||
case ErrorCode.INVALID_SLUG:
|
||||
case ErrorCode.RESERVED_SLUG:
|
||||
case ErrorCode.INVALID_TYPE:
|
||||
case ErrorCode.NO_FILE:
|
||||
case ErrorCode.INVALID_STATE:
|
||||
case ErrorCode.INVALID_SEED:
|
||||
case ErrorCode.INVALID_REDIRECT:
|
||||
case ErrorCode.INVALID_TOKEN:
|
||||
case ErrorCode.INVALID_REVISION:
|
||||
case ErrorCode.INVALID_CODE:
|
||||
case ErrorCode.CHALLENGE_EXPIRED:
|
||||
case ErrorCode.EXPIRED_CODE:
|
||||
case ErrorCode.LAST_PASSKEY:
|
||||
case ErrorCode.PASSKEY_LIMIT:
|
||||
case ErrorCode.ADMIN_EXISTS:
|
||||
case ErrorCode.SETUP_COMPLETE:
|
||||
case ErrorCode.SELF_ROLE_CHANGE:
|
||||
case ErrorCode.SSRF_BLOCKED:
|
||||
case ErrorCode.UNKNOWN_ACTION:
|
||||
return 400;
|
||||
|
||||
// 401 Unauthorized
|
||||
case ErrorCode.UNAUTHORIZED:
|
||||
case ErrorCode.NOT_AUTHENTICATED:
|
||||
return 401;
|
||||
|
||||
// 403 Forbidden
|
||||
case ErrorCode.FORBIDDEN:
|
||||
case ErrorCode.CSRF_REJECTED:
|
||||
case ErrorCode.ACCOUNT_DISABLED:
|
||||
case ErrorCode.COMMENTS_DISABLED:
|
||||
case ErrorCode.COMMENTS_CLOSED:
|
||||
case ErrorCode.COMMENT_REJECTED:
|
||||
case ErrorCode.DOMAIN_NOT_ALLOWED:
|
||||
case ErrorCode.INSUFFICIENT_ROLE:
|
||||
case ErrorCode.CAPABILITY_ESCALATION:
|
||||
case ErrorCode.ROUTE_VISIBILITY_ESCALATION:
|
||||
case ErrorCode.AUDIT_FAILED:
|
||||
return 403;
|
||||
|
||||
// 404 Not Found
|
||||
case ErrorCode.NOT_FOUND:
|
||||
case ErrorCode.TABLE_NOT_FOUND:
|
||||
case ErrorCode.COLLECTION_NOT_FOUND:
|
||||
case ErrorCode.FILE_NOT_FOUND:
|
||||
case ErrorCode.NO_VERSION:
|
||||
return 404;
|
||||
|
||||
// 409 Conflict
|
||||
case ErrorCode.CONFLICT:
|
||||
case ErrorCode.COLLECTION_EXISTS:
|
||||
case ErrorCode.FIELD_EXISTS:
|
||||
case ErrorCode.CREDENTIAL_EXISTS:
|
||||
case ErrorCode.EMAIL_IN_USE:
|
||||
case ErrorCode.USER_EXISTS:
|
||||
case ErrorCode.PLUGIN_ID_CONFLICT:
|
||||
case ErrorCode.ALREADY_INSTALLED:
|
||||
case ErrorCode.ALREADY_CONFIGURED:
|
||||
case ErrorCode.ALREADY_UP_TO_DATE:
|
||||
return 409;
|
||||
|
||||
// 410 Gone
|
||||
case ErrorCode.TOKEN_EXPIRED:
|
||||
return 410;
|
||||
|
||||
// 422 Unprocessable Entity
|
||||
case ErrorCode.CHECKSUM_MISMATCH:
|
||||
case ErrorCode.INVALID_BUNDLE:
|
||||
case ErrorCode.BUNDLE_EXTRACT_FAILED:
|
||||
return 422;
|
||||
|
||||
// 429 Too Many Requests
|
||||
case ErrorCode.RATE_LIMITED:
|
||||
return 429;
|
||||
|
||||
// 500 Internal Server Error
|
||||
case ErrorCode.NOT_CONFIGURED:
|
||||
case ErrorCode.NO_STORAGE:
|
||||
case ErrorCode.NO_DB:
|
||||
case ErrorCode.STORAGE_NOT_CONFIGURED:
|
||||
case ErrorCode.EMAIL_NOT_CONFIGURED:
|
||||
return 500;
|
||||
|
||||
// 501 Not Implemented
|
||||
case ErrorCode.NOT_IMPLEMENTED:
|
||||
return 501;
|
||||
|
||||
// 502 Bad Gateway
|
||||
case ErrorCode.BUNDLE_DOWNLOAD_FAILED:
|
||||
return 502;
|
||||
|
||||
// 503 Service Unavailable
|
||||
case ErrorCode.MARKETPLACE_UNAVAILABLE:
|
||||
case ErrorCode.MARKETPLACE_NOT_CONFIGURED:
|
||||
case ErrorCode.SANDBOX_NOT_AVAILABLE:
|
||||
return 503;
|
||||
|
||||
// Domain-specific *_ERROR codes are catch-block codes -- always 500.
|
||||
// WARNING: If adding a new code that ends in _ERROR but represents a
|
||||
// client error (4xx), add it to an explicit case above or it will
|
||||
// be incorrectly mapped to 500.
|
||||
default:
|
||||
return code?.endsWith("_ERROR") ? 500 : 400;
|
||||
}
|
||||
}
|
||||
9
packages/core/src/api/escape.ts
Normal file
9
packages/core/src/api/escape.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/** HTML-escape a string to prevent XSS when interpolated into HTML/JS */
|
||||
export function escapeHtml(str: string): string {
|
||||
return str
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">")
|
||||
.replaceAll('"', """)
|
||||
.replaceAll("'", "'");
|
||||
}
|
||||
240
packages/core/src/api/handlers/api-tokens.ts
Normal file
240
packages/core/src/api/handlers/api-tokens.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
/**
|
||||
* API token management handlers.
|
||||
*
|
||||
* Creates, lists, and revokes Personal Access Tokens (PATs).
|
||||
* Token format: ec_pat_<base64url>
|
||||
* Only the SHA-256 hash is stored — raw token shown once at creation.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { hashApiToken, generatePrefixedToken } from "../../auth/api-tokens.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface ApiTokenInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
prefix: string;
|
||||
scopes: string[];
|
||||
userId: string;
|
||||
expiresAt: string | null;
|
||||
lastUsedAt: string | null;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface ApiTokenCreateResult {
|
||||
/** The raw token — shown once, never stored */
|
||||
token: string;
|
||||
/** Token metadata */
|
||||
info: ApiTokenInfo;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new API token for a user.
|
||||
*/
|
||||
export async function handleApiTokenCreate(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
input: {
|
||||
name: string;
|
||||
scopes: string[];
|
||||
expiresAt?: string;
|
||||
},
|
||||
): Promise<ApiResult<ApiTokenCreateResult>> {
|
||||
try {
|
||||
const id = ulid();
|
||||
const { raw, hash, prefix } = generatePrefixedToken("ec_pat_");
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_api_tokens")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
token_hash: hash,
|
||||
prefix,
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(input.scopes),
|
||||
expires_at: input.expiresAt ?? null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const info: ApiTokenInfo = {
|
||||
id,
|
||||
name: input.name,
|
||||
prefix,
|
||||
scopes: input.scopes,
|
||||
userId,
|
||||
expiresAt: input.expiresAt ?? null,
|
||||
lastUsedAt: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return { success: true, data: { token: raw, info } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_CREATE_ERROR",
|
||||
message: "Failed to create API token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all API tokens for a user (never returns the raw token or hash).
|
||||
*/
|
||||
export async function handleApiTokenList(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
): Promise<ApiResult<{ items: ApiTokenInfo[] }>> {
|
||||
try {
|
||||
const rows = await db
|
||||
.selectFrom("_emdash_api_tokens")
|
||||
.select([
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"scopes",
|
||||
"user_id",
|
||||
"expires_at",
|
||||
"last_used_at",
|
||||
"created_at",
|
||||
])
|
||||
.where("user_id", "=", userId)
|
||||
.orderBy("created_at", "desc")
|
||||
.execute();
|
||||
|
||||
const items: ApiTokenInfo[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
prefix: row.prefix,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
userId: row.user_id,
|
||||
expiresAt: row.expires_at,
|
||||
lastUsedAt: row.last_used_at,
|
||||
createdAt: row.created_at,
|
||||
}));
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_LIST_ERROR",
|
||||
message: "Failed to list API tokens",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke (delete) an API token.
|
||||
*/
|
||||
export async function handleApiTokenRevoke(
|
||||
db: Kysely<Database>,
|
||||
tokenId: string,
|
||||
userId: string,
|
||||
): Promise<ApiResult<{ revoked: boolean }>> {
|
||||
try {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_api_tokens")
|
||||
.where("id", "=", tokenId)
|
||||
.where("user_id", "=", userId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (result.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Token not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { revoked: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REVOKE_ERROR",
|
||||
message: "Failed to revoke API token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a raw API token (ec_pat_...) to a user ID and scopes.
|
||||
* Updates last_used_at on successful lookup.
|
||||
* Returns null if the token is invalid or expired.
|
||||
*/
|
||||
export async function resolveApiToken(
|
||||
db: Kysely<Database>,
|
||||
rawToken: string,
|
||||
): Promise<{ userId: string; scopes: string[] } | null> {
|
||||
const hash = hashApiToken(rawToken);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_api_tokens")
|
||||
.select(["id", "user_id", "scopes", "expires_at"])
|
||||
.where("token_hash", "=", hash)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
// Check expiry
|
||||
if (row.expires_at && new Date(row.expires_at) < new Date()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update last_used_at (fire-and-forget, don't block the request)
|
||||
db.updateTable("_emdash_api_tokens")
|
||||
.set({ last_used_at: new Date().toISOString() })
|
||||
.where("id", "=", row.id)
|
||||
.execute()
|
||||
.catch(() => {}); // Non-critical, swallow errors
|
||||
|
||||
return {
|
||||
userId: row.user_id,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve an OAuth access token (ec_oat_...) to a user ID and scopes.
|
||||
* Returns null if the token is invalid or expired.
|
||||
*/
|
||||
export async function resolveOAuthToken(
|
||||
db: Kysely<Database>,
|
||||
rawToken: string,
|
||||
): Promise<{ userId: string; scopes: string[] } | null> {
|
||||
const hash = hashApiToken(rawToken);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.select(["user_id", "scopes", "expires_at", "token_type"])
|
||||
.where("token_hash", "=", hash)
|
||||
.where("token_type", "=", "access")
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
userId: row.user_id,
|
||||
scopes: JSON.parse(row.scopes) as string[],
|
||||
};
|
||||
}
|
||||
314
packages/core/src/api/handlers/comments.ts
Normal file
314
packages/core/src/api/handlers/comments.ts
Normal file
@@ -0,0 +1,314 @@
|
||||
/**
|
||||
* Comment handlers — business logic for comment API routes.
|
||||
*
|
||||
* Standalone functions that return ApiResult<T>. Routes are thin wrappers.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { CommentRepository } from "../../database/repositories/comment.js";
|
||||
import type { Comment, CommentStatus, PublicComment } from "../../database/repositories/comment.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public: List approved comments for content
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentList(
|
||||
db: Kysely<Database>,
|
||||
collection: string,
|
||||
contentId: string,
|
||||
options: { limit?: number; cursor?: string; threaded?: boolean } = {},
|
||||
): Promise<ApiResult<{ items: PublicComment[]; nextCursor?: string; total: number }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
|
||||
// Get total approved count
|
||||
const total = await repo.countByContent(collection, contentId, "approved");
|
||||
|
||||
let publicItems: PublicComment[];
|
||||
let nextCursor: string | undefined;
|
||||
|
||||
if (options.threaded) {
|
||||
// Threaded mode: fetch all approved comments (capped) so threading
|
||||
// doesn't lose children that would fall on later pages.
|
||||
const MAX_THREADED = 500;
|
||||
const result = await repo.findByContent(collection, contentId, {
|
||||
status: "approved",
|
||||
limit: MAX_THREADED,
|
||||
});
|
||||
const threaded = CommentRepository.assembleThreads(result.items);
|
||||
publicItems = threaded.map((c) => CommentRepository.toPublicComment(c));
|
||||
// No cursor for threaded mode — all comments returned at once
|
||||
} else {
|
||||
const result = await repo.findByContent(collection, contentId, {
|
||||
status: "approved",
|
||||
limit: options.limit,
|
||||
cursor: options.cursor,
|
||||
});
|
||||
publicItems = result.items.map((c) => CommentRepository.toPublicComment(c));
|
||||
nextCursor = result.nextCursor;
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: publicItems,
|
||||
nextCursor,
|
||||
total,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Comment list error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_LIST_ERROR",
|
||||
message: "Failed to list comments",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Moderation inbox
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentInbox(
|
||||
db: Kysely<Database>,
|
||||
options: {
|
||||
status?: CommentStatus;
|
||||
collection?: string;
|
||||
search?: string;
|
||||
limit?: number;
|
||||
cursor?: string;
|
||||
} = {},
|
||||
): Promise<ApiResult<{ items: Comment[]; nextCursor?: string }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const status = options.status ?? "pending";
|
||||
|
||||
const result = await repo.findByStatus(status, {
|
||||
collection: options.collection,
|
||||
search: options.search,
|
||||
limit: options.limit,
|
||||
cursor: options.cursor,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: result.items,
|
||||
nextCursor: result.nextCursor,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Comment inbox error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_INBOX_ERROR",
|
||||
message: "Failed to list comments",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Status counts for inbox badges
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentCounts(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<Record<CommentStatus, number>>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const counts = await repo.countByStatus();
|
||||
return { success: true, data: counts };
|
||||
} catch (error) {
|
||||
console.error("Comment counts error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_COUNTS_ERROR",
|
||||
message: "Failed to get comment counts",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Get single comment detail
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<Comment>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const comment = await repo.findById(id);
|
||||
|
||||
if (!comment) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: comment };
|
||||
} catch (error) {
|
||||
console.error("Comment get error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_GET_ERROR",
|
||||
message: "Failed to get comment",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Change comment status
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentStatusChange(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
status: CommentStatus,
|
||||
): Promise<ApiResult<Comment>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const updated = await repo.updateStatus(id, status);
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch (error) {
|
||||
console.error("Comment status change error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_STATUS_ERROR",
|
||||
message: "Failed to update comment status",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Hard delete comment
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch (error) {
|
||||
console.error("Comment delete error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_DELETE_ERROR",
|
||||
message: "Failed to delete comment",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin: Bulk operations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export async function handleCommentBulk(
|
||||
db: Kysely<Database>,
|
||||
ids: string[],
|
||||
action: "approve" | "spam" | "trash" | "delete",
|
||||
): Promise<ApiResult<{ affected: number }>> {
|
||||
try {
|
||||
const repo = new CommentRepository(db);
|
||||
|
||||
let affected: number;
|
||||
if (action === "delete") {
|
||||
affected = await repo.bulkDelete(ids);
|
||||
} else {
|
||||
const statusMap: Record<string, CommentStatus> = {
|
||||
approve: "approved",
|
||||
spam: "spam",
|
||||
trash: "trash",
|
||||
};
|
||||
affected = await repo.bulkUpdateStatus(ids, statusMap[action]);
|
||||
}
|
||||
|
||||
return { success: true, data: { affected } };
|
||||
} catch (error) {
|
||||
console.error("Comment bulk error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "COMMENT_BULK_ERROR",
|
||||
message: "Failed to perform bulk operation",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Anti-spam: Rate limiting
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Check if an IP has exceeded the comment rate limit.
|
||||
* Uses ip_hash in the comments table — no separate counter storage.
|
||||
*/
|
||||
export async function checkRateLimit(
|
||||
db: Kysely<Database>,
|
||||
ipHash: string,
|
||||
maxPerWindow: number = 5,
|
||||
windowMinutes: number = 10,
|
||||
): Promise<boolean> {
|
||||
const cutoff = new Date(Date.now() - windowMinutes * 60 * 1000).toISOString();
|
||||
|
||||
// Count recent comments from this IP
|
||||
const result = await db
|
||||
.selectFrom("_emdash_comments")
|
||||
.select((eb) => eb.fn.count("id").as("count"))
|
||||
.where("ip_hash", "=", ipHash)
|
||||
.where("created_at", ">", cutoff)
|
||||
.executeTakeFirst();
|
||||
|
||||
const count = Number(result?.count ?? 0);
|
||||
return count >= maxPerWindow;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash an IP address for storage (never store cleartext IPs).
|
||||
*
|
||||
* Uses full SHA-256 with an application salt to prevent rainbow-table
|
||||
* recovery of IPs. The caller should pass a site-specific secret;
|
||||
* falls back to a static salt if none is provided.
|
||||
*/
|
||||
export async function hashIp(ip: string, salt: string = "emdash-ip-salt"): Promise<string> {
|
||||
const data = `ip:${salt}:${ip}`;
|
||||
const buf = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(data));
|
||||
return Array.from(new Uint8Array(buf), (b) => b.toString(16).padStart(2, "0")).join("");
|
||||
}
|
||||
1315
packages/core/src/api/handlers/content.ts
Normal file
1315
packages/core/src/api/handlers/content.ts
Normal file
File diff suppressed because it is too large
Load Diff
205
packages/core/src/api/handlers/dashboard.ts
Normal file
205
packages/core/src/api/handlers/dashboard.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Dashboard stats handler
|
||||
*
|
||||
* Returns summary data for the admin dashboard in a single request:
|
||||
* collection content counts, media count, user count, and recent
|
||||
* content across all collections.
|
||||
*/
|
||||
|
||||
import { sql, type Kysely } from "kysely";
|
||||
|
||||
import { ContentRepository } from "../../database/repositories/content.js";
|
||||
import { MediaRepository } from "../../database/repositories/media.js";
|
||||
import { UserRepository } from "../../database/repositories/user.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateIdentifier } from "../../database/validate.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface CollectionStats {
|
||||
slug: string;
|
||||
label: string;
|
||||
total: number;
|
||||
published: number;
|
||||
draft: number;
|
||||
}
|
||||
|
||||
export interface RecentItem {
|
||||
id: string;
|
||||
collection: string;
|
||||
collectionLabel: string;
|
||||
title: string;
|
||||
slug: string | null;
|
||||
status: string;
|
||||
updatedAt: string;
|
||||
authorId: string | null;
|
||||
}
|
||||
|
||||
export interface DashboardStats {
|
||||
collections: CollectionStats[];
|
||||
mediaCount: number;
|
||||
userCount: number;
|
||||
recentItems: RecentItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch dashboard statistics.
|
||||
*
|
||||
* Queries are intentionally lightweight — counts use indexed columns,
|
||||
* and recent items are capped at 10.
|
||||
*/
|
||||
export async function handleDashboardStats(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<DashboardStats>> {
|
||||
try {
|
||||
// Discover collections from the system table
|
||||
const collections = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select(["slug", "label"])
|
||||
.orderBy("slug", "asc")
|
||||
.execute();
|
||||
|
||||
// Gather per-collection counts in parallel
|
||||
const contentRepo = new ContentRepository(db);
|
||||
const collectionStats: CollectionStats[] = await Promise.all(
|
||||
collections.map(async (col) => {
|
||||
const [total, published, draft] = await Promise.all([
|
||||
contentRepo.count(col.slug),
|
||||
contentRepo.count(col.slug, { status: "published" }),
|
||||
contentRepo.count(col.slug, { status: "draft" }),
|
||||
]);
|
||||
return {
|
||||
slug: col.slug,
|
||||
label: col.label,
|
||||
total,
|
||||
published,
|
||||
draft,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
// Media and user counts
|
||||
const mediaRepo = new MediaRepository(db);
|
||||
const userRepo = new UserRepository(db);
|
||||
const [mediaCount, userCount] = await Promise.all([mediaRepo.count(), userRepo.count()]);
|
||||
|
||||
// Recent items across all collections (last 10 updated, any status)
|
||||
const recentItems = await fetchRecentItems(db, collections);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
collections: collectionStats,
|
||||
mediaCount,
|
||||
userCount,
|
||||
recentItems,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Dashboard stats error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "DASHBOARD_STATS_ERROR",
|
||||
message: "Failed to load dashboard statistics",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/** Raw row shape from the UNION ALL query — all snake_case. */
|
||||
interface RecentItemRow {
|
||||
id: string;
|
||||
collection: string;
|
||||
collection_label: string;
|
||||
title: string;
|
||||
slug: string | null;
|
||||
status: string;
|
||||
updated_at: string;
|
||||
author_id: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the 10 most recently updated items across all collections.
|
||||
*
|
||||
* Uses UNION ALL over each ec_* table. The query is safe because
|
||||
* collection slugs come from the system table and are validated.
|
||||
*
|
||||
* `title` is not a standard column — it's a user-defined field. We query
|
||||
* `_emdash_fields` to discover which collections have one and fall back
|
||||
* to `slug` (which is always present) otherwise.
|
||||
*/
|
||||
async function fetchRecentItems(
|
||||
db: Kysely<Database>,
|
||||
collections: Array<{ slug: string; label: string }>,
|
||||
): Promise<RecentItem[]> {
|
||||
if (collections.length === 0) return [];
|
||||
|
||||
// Discover which collections have a "title" column
|
||||
const titleFields = await db
|
||||
.selectFrom("_emdash_fields as f")
|
||||
.innerJoin("_emdash_collections as c", "c.id", "f.collection_id")
|
||||
.select(["c.slug as collection_slug"])
|
||||
.where("f.slug", "=", "title")
|
||||
.execute();
|
||||
|
||||
const collectionsWithTitle = new Set(titleFields.map((r) => r.collection_slug));
|
||||
|
||||
// Build a UNION ALL query across all content tables.
|
||||
// Each branch is wrapped in SELECT * FROM (...) so the inner
|
||||
// ORDER BY + LIMIT is valid SQLite (bare ORDER BY inside UNION
|
||||
// branches is a syntax error in SQLite).
|
||||
const subQueries = collections.map((col) => {
|
||||
validateIdentifier(col.slug);
|
||||
const table = `ec_${col.slug}`;
|
||||
const hasTitle = collectionsWithTitle.has(col.slug);
|
||||
|
||||
// Use title column if it exists, otherwise fall back to slug → id.
|
||||
// All output uses snake_case to avoid SQLite quoting issues on D1.
|
||||
const titleExpr = hasTitle ? sql`COALESCE(title, slug, id)` : sql`COALESCE(slug, id)`;
|
||||
|
||||
return sql<RecentItemRow>`
|
||||
SELECT * FROM (
|
||||
SELECT
|
||||
id,
|
||||
${sql.lit(col.slug)} AS collection,
|
||||
${sql.lit(col.label)} AS collection_label,
|
||||
${titleExpr} AS title,
|
||||
slug,
|
||||
status,
|
||||
updated_at,
|
||||
author_id
|
||||
FROM ${sql.ref(table)}
|
||||
WHERE deleted_at IS NULL
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 10
|
||||
)
|
||||
`;
|
||||
});
|
||||
|
||||
// Combine with UNION ALL
|
||||
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
|
||||
let combined = subQueries[0]!;
|
||||
for (let i = 1; i < subQueries.length; i++) {
|
||||
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
|
||||
combined = sql<RecentItemRow>`${combined} UNION ALL ${subQueries[i]!}`;
|
||||
}
|
||||
|
||||
// Final sort + limit across all branches
|
||||
const result = await sql<RecentItemRow>`
|
||||
SELECT * FROM (${combined})
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 10
|
||||
`.execute(db);
|
||||
|
||||
// Map snake_case DB rows → camelCase API shape
|
||||
return result.rows.map((row) => ({
|
||||
id: row.id,
|
||||
collection: row.collection,
|
||||
collectionLabel: row.collection_label,
|
||||
title: row.title,
|
||||
slug: row.slug,
|
||||
status: row.status,
|
||||
updatedAt: row.updated_at,
|
||||
authorId: row.author_id,
|
||||
}));
|
||||
}
|
||||
687
packages/core/src/api/handlers/device-flow.ts
Normal file
687
packages/core/src/api/handlers/device-flow.ts
Normal file
@@ -0,0 +1,687 @@
|
||||
/**
|
||||
* OAuth Device Flow handlers (RFC 8628).
|
||||
*
|
||||
* EmDash acts as an OAuth 2.0 authorization server. The CLI requests
|
||||
* a device code, displays a URL + user code, and polls for a token.
|
||||
* The user opens a browser, logs in, enters the code, and the CLI gets
|
||||
* an access + refresh token pair.
|
||||
*
|
||||
* Uses arctic for code generation and @emdashcms/auth for token utilities.
|
||||
*/
|
||||
|
||||
import { clampScopes } from "@emdashcms/auth";
|
||||
import type { RoleLevel } from "@emdashcms/auth";
|
||||
import { generateCodeVerifier } from "arctic";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../auth/api-tokens.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
import { lookupOAuthClient } from "./oauth-clients.js";
|
||||
import { lookupUserRoleAndStatus } from "./oauth-user-lookup.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Device codes expire after 15 minutes */
|
||||
const DEVICE_CODE_TTL_SECONDS = 15 * 60;
|
||||
|
||||
/** Default polling interval in seconds */
|
||||
const DEFAULT_INTERVAL = 5;
|
||||
|
||||
/** RFC 8628 §3.5: interval increase on slow_down */
|
||||
const SLOW_DOWN_INCREMENT = 5;
|
||||
|
||||
/** Maximum slow_down interval cap (seconds) */
|
||||
const MAX_SLOW_DOWN_INTERVAL = 60;
|
||||
|
||||
/** Access token TTL: 1 hour */
|
||||
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
|
||||
|
||||
/** Refresh token TTL: 90 days */
|
||||
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
|
||||
|
||||
/** Default scopes for CLI login */
|
||||
const DEFAULT_SCOPES = [
|
||||
"content:read",
|
||||
"content:write",
|
||||
"media:read",
|
||||
"media:write",
|
||||
"schema:read",
|
||||
] as const;
|
||||
|
||||
/** Pattern to normalize user codes (strip hyphens) */
|
||||
const HYPHEN_PATTERN = /-/g;
|
||||
|
||||
/** Characters for user codes (uppercase, no ambiguous chars like 0/O, 1/I) */
|
||||
const USER_CODE_CHARS = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface DeviceCodeResponse {
|
||||
device_code: string;
|
||||
user_code: string;
|
||||
verification_uri: string;
|
||||
expires_in: number;
|
||||
interval: number;
|
||||
}
|
||||
|
||||
export interface TokenResponse {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type: "Bearer";
|
||||
expires_in: number;
|
||||
scope: string;
|
||||
}
|
||||
|
||||
// RFC 8628 error codes
|
||||
export type DeviceFlowError =
|
||||
| "authorization_pending"
|
||||
| "slow_down"
|
||||
| "expired_token"
|
||||
| "access_denied";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Generate a short human-readable user code (XXXX-XXXX) */
|
||||
function generateUserCode(): string {
|
||||
const bytes = new Uint8Array(8);
|
||||
crypto.getRandomValues(bytes);
|
||||
const chars = Array.from(bytes, (b) => USER_CODE_CHARS[b % USER_CODE_CHARS.length]).join("");
|
||||
return `${chars.slice(0, 4)}-${chars.slice(4, 8)}`;
|
||||
}
|
||||
|
||||
/** Get an ISO datetime string offset from now */
|
||||
function expiresAt(seconds: number): string {
|
||||
return new Date(Date.now() + seconds * 1000).toISOString();
|
||||
}
|
||||
|
||||
/** Validate and normalize scopes. Returns validated scope list. */
|
||||
function normalizeScopes(requested?: string[]): string[] {
|
||||
if (!requested || requested.length === 0) {
|
||||
return [...DEFAULT_SCOPES];
|
||||
}
|
||||
const validSet = new Set<string>(VALID_SCOPES);
|
||||
return requested.filter((s) => validSet.has(s));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* POST /oauth/device/code
|
||||
*
|
||||
* Issue a device code + user code. The CLI displays the user code
|
||||
* and tells the user to open the verification URI.
|
||||
*/
|
||||
export async function handleDeviceCodeRequest(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
client_id?: string;
|
||||
scope?: string;
|
||||
},
|
||||
verificationUri: string,
|
||||
): Promise<ApiResult<DeviceCodeResponse>> {
|
||||
try {
|
||||
// Parse and validate scopes
|
||||
const requestedScopes = input.scope ? input.scope.split(" ").filter(Boolean) : [];
|
||||
const scopes = normalizeScopes(requestedScopes);
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
|
||||
};
|
||||
}
|
||||
|
||||
const deviceCode = generateCodeVerifier();
|
||||
const userCode = generateUserCode();
|
||||
const expires = expiresAt(DEVICE_CODE_TTL_SECONDS);
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_device_codes")
|
||||
.values({
|
||||
device_code: deviceCode,
|
||||
user_code: userCode,
|
||||
scopes: JSON.stringify(scopes),
|
||||
status: "pending",
|
||||
expires_at: expires,
|
||||
interval: DEFAULT_INTERVAL,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
device_code: deviceCode,
|
||||
user_code: userCode,
|
||||
verification_uri: verificationUri,
|
||||
expires_in: DEVICE_CODE_TTL_SECONDS,
|
||||
interval: DEFAULT_INTERVAL,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "DEVICE_CODE_ERROR",
|
||||
message: "Failed to create device code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/device/token
|
||||
*
|
||||
* CLI polls this endpoint with the device_code. Returns:
|
||||
* - 200 with tokens if authorized
|
||||
* - 400 with error "authorization_pending" while waiting
|
||||
* - 400 with error "slow_down" if polling too fast
|
||||
* - 400 with error "expired_token" if the code expired
|
||||
* - 400 with error "access_denied" if the user denied
|
||||
*/
|
||||
export async function handleDeviceTokenExchange(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
device_code: string;
|
||||
grant_type: string;
|
||||
},
|
||||
): Promise<
|
||||
ApiResult<TokenResponse> & { deviceFlowError?: DeviceFlowError; deviceFlowInterval?: number }
|
||||
> {
|
||||
try {
|
||||
// Validate grant_type
|
||||
if (input.grant_type !== "urn:ietf:params:oauth:grant-type:device_code") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
// Look up the device code
|
||||
const row = await db
|
||||
.selectFrom("_emdash_device_codes")
|
||||
.selectAll()
|
||||
.where("device_code", "=", input.device_code)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid device code" },
|
||||
};
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < now) {
|
||||
// Clean up expired code
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "expired_token",
|
||||
error: { code: "expired_token", message: "The device code has expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check status
|
||||
if (row.status === "denied") {
|
||||
// Clean up denied code
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "access_denied",
|
||||
error: { code: "access_denied", message: "The user denied the request" },
|
||||
};
|
||||
}
|
||||
|
||||
if (row.status === "pending") {
|
||||
// RFC 8628 §3.5: slow_down enforcement during polling phase.
|
||||
// Only applies while waiting for authorization — once authorized,
|
||||
// the final exchange proceeds without throttling.
|
||||
if (row.last_polled_at) {
|
||||
const lastPolled = new Date(row.last_polled_at);
|
||||
const elapsedSeconds = (now.getTime() - lastPolled.getTime()) / 1000;
|
||||
|
||||
if (elapsedSeconds < row.interval) {
|
||||
// Too fast — increase interval by 5s per RFC 8628 §3.5, capped at 60s
|
||||
const newInterval = Math.min(row.interval + SLOW_DOWN_INCREMENT, MAX_SLOW_DOWN_INTERVAL);
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({
|
||||
interval: newInterval,
|
||||
last_polled_at: now.toISOString(),
|
||||
})
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "slow_down",
|
||||
deviceFlowInterval: newInterval,
|
||||
error: { code: "slow_down", message: "Polling too fast" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Update last_polled_at for future slow_down checks
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({ last_polled_at: now.toISOString() })
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
deviceFlowError: "authorization_pending",
|
||||
error: { code: "authorization_pending", message: "Authorization pending" },
|
||||
};
|
||||
}
|
||||
|
||||
if (row.status !== "authorized" || !row.user_id) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid device code state" },
|
||||
};
|
||||
}
|
||||
|
||||
// Authorized! Generate tokens.
|
||||
const scopes = JSON.parse(row.scopes) as string[];
|
||||
|
||||
// Generate access token
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
|
||||
// Generate refresh token
|
||||
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
|
||||
|
||||
// Store both tokens
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "cli",
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshToken.hash,
|
||||
})
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: refreshToken.hash,
|
||||
token_type: "refresh",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "cli",
|
||||
expires_at: refreshExpires,
|
||||
refresh_token_hash: null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Consume the device code (delete it)
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", input.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: refreshToken.raw,
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_EXCHANGE_ERROR",
|
||||
message: "Failed to exchange device code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/device/authorize
|
||||
*
|
||||
* The user submits the user_code after logging in via the browser.
|
||||
* This authorizes the device code, allowing the CLI to exchange it for tokens.
|
||||
*
|
||||
* Scopes are clamped to the user's role at this point. The stored scopes
|
||||
* are replaced with the intersection of requested scopes and the scopes
|
||||
* the user's role permits. This prevents scope escalation.
|
||||
*/
|
||||
export async function handleDeviceAuthorize(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
userRole: RoleLevel,
|
||||
input: {
|
||||
user_code: string;
|
||||
action?: "approve" | "deny";
|
||||
},
|
||||
): Promise<ApiResult<{ authorized: boolean }>> {
|
||||
try {
|
||||
// Normalize user code (strip hyphens, uppercase)
|
||||
const normalizedCode = input.user_code.replace(HYPHEN_PATTERN, "").toUpperCase();
|
||||
|
||||
// Look up the device code by user_code
|
||||
const row = await db
|
||||
.selectFrom("_emdash_device_codes")
|
||||
.selectAll()
|
||||
.where("status", "=", "pending")
|
||||
.execute();
|
||||
|
||||
// Find the matching code (strip hyphens for comparison)
|
||||
const match = row.find(
|
||||
(r) => r.user_code.replace(HYPHEN_PATTERN, "").toUpperCase() === normalizedCode,
|
||||
);
|
||||
|
||||
if (!match) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_CODE", message: "Invalid or expired code" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(match.expires_at) < new Date()) {
|
||||
await db
|
||||
.deleteFrom("_emdash_device_codes")
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "EXPIRED_CODE", message: "This code has expired" },
|
||||
};
|
||||
}
|
||||
|
||||
const action = input.action ?? "approve";
|
||||
|
||||
if (action === "deny") {
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({ status: "denied" })
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { authorized: false } };
|
||||
}
|
||||
|
||||
// Clamp requested scopes to those the user's role permits.
|
||||
// effective_scopes = requested_scopes ∩ scopesForRole(user.role)
|
||||
const requestedScopes = JSON.parse(match.scopes) as string[];
|
||||
const effectiveScopes = clampScopes(requestedScopes, userRole);
|
||||
|
||||
if (effectiveScopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INSUFFICIENT_ROLE",
|
||||
message: "Your role does not permit any of the requested scopes",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Approve: set user_id, status, and clamped scopes
|
||||
await db
|
||||
.updateTable("_emdash_device_codes")
|
||||
.set({
|
||||
status: "authorized",
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(effectiveScopes),
|
||||
})
|
||||
.where("device_code", "=", match.device_code)
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { authorized: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUTHORIZE_ERROR",
|
||||
message: "Failed to authorize device",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/token/refresh
|
||||
*
|
||||
* Exchange a refresh token for a new access token.
|
||||
* The refresh token itself is not rotated (per spec: optional rotation).
|
||||
*/
|
||||
export async function handleTokenRefresh(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
refresh_token: string;
|
||||
grant_type: string;
|
||||
},
|
||||
): Promise<ApiResult<TokenResponse>> {
|
||||
try {
|
||||
if (input.grant_type !== "refresh_token") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.refresh_token.startsWith(TOKEN_PREFIXES.OAUTH_REFRESH)) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid refresh token format" },
|
||||
};
|
||||
}
|
||||
|
||||
const refreshHash = hashApiToken(input.refresh_token);
|
||||
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.selectAll()
|
||||
.where("token_hash", "=", refreshHash)
|
||||
.where("token_type", "=", "refresh")
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Invalid refresh token" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
// Clean up expired refresh token and its access tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "Refresh token expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// SEC-42: Revalidate user role before issuing new access token.
|
||||
// SEC-43: Reject refresh if user is disabled or deleted.
|
||||
const userInfo = await lookupUserRoleAndStatus(db, row.user_id);
|
||||
if (!userInfo) {
|
||||
// User no longer exists — revoke all their tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "User not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (userInfo.disabled) {
|
||||
// User is disabled — revoke all their tokens
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_GRANT", message: "User account is disabled" },
|
||||
};
|
||||
}
|
||||
|
||||
// Revalidate stored scopes against the user's current role.
|
||||
// A demoted user's refresh token may carry stale elevated scopes.
|
||||
const storedScopes = JSON.parse(row.scopes) as string[];
|
||||
let scopes = clampScopes(storedScopes, userInfo.role);
|
||||
|
||||
// SEC-41: Intersect with the client's registered scopes (if any).
|
||||
// Same check as the approval path — a client registered with limited
|
||||
// scopes should never receive elevated scopes on refresh, even if the
|
||||
// user's role would allow them.
|
||||
if (row.client_id) {
|
||||
const client = await lookupOAuthClient(db, row.client_id);
|
||||
if (client?.scopes?.length) {
|
||||
scopes = scopes.filter((s: string) => client.scopes!.includes(s));
|
||||
}
|
||||
}
|
||||
|
||||
if (scopes.length === 0) {
|
||||
// User's role no longer supports any of the token's scopes — revoke
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.execute();
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_GRANT",
|
||||
message: "User role no longer supports any of the token's scopes",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Delete old access tokens for this refresh token
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", refreshHash)
|
||||
.where("token_type", "=", "access")
|
||||
.execute();
|
||||
|
||||
// Generate new access token
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: row.client_type,
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshHash,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: input.refresh_token, // Return same refresh token
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REFRESH_ERROR",
|
||||
message: "Failed to refresh token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /oauth/token/revoke
|
||||
*
|
||||
* Revoke an access or refresh token. If a refresh token is revoked,
|
||||
* also revoke all associated access tokens.
|
||||
*
|
||||
* Per RFC 7009, this endpoint always returns 200 (even for invalid tokens).
|
||||
*/
|
||||
export async function handleTokenRevoke(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
token: string;
|
||||
},
|
||||
): Promise<ApiResult<{ revoked: boolean }>> {
|
||||
try {
|
||||
const hash = hashApiToken(input.token);
|
||||
|
||||
// Look up the token
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_tokens")
|
||||
.select(["token_hash", "token_type", "refresh_token_hash"])
|
||||
.where("token_hash", "=", hash)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
// Per RFC 7009: always 200, even for invalid tokens
|
||||
return { success: true, data: { revoked: true } };
|
||||
}
|
||||
|
||||
if (row.token_type === "refresh") {
|
||||
// Revoke refresh token and all its access tokens
|
||||
await db
|
||||
.deleteFrom("_emdash_oauth_tokens")
|
||||
.where("refresh_token_hash", "=", hash)
|
||||
.execute();
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
|
||||
} else {
|
||||
// Revoke just the access token
|
||||
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
|
||||
}
|
||||
|
||||
return { success: true, data: { revoked: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_REVOKE_ERROR",
|
||||
message: "Failed to revoke token",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
163
packages/core/src/api/handlers/index.ts
Normal file
163
packages/core/src/api/handlers/index.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
/**
|
||||
* API handler implementations for EmDash REST endpoints
|
||||
*
|
||||
* Re-exports all handlers from their respective modules
|
||||
*/
|
||||
|
||||
// Content handlers
|
||||
export {
|
||||
handleContentList,
|
||||
handleContentGet,
|
||||
handleContentGetIncludingTrashed,
|
||||
handleContentCreate,
|
||||
handleContentUpdate,
|
||||
handleContentDuplicate,
|
||||
handleContentDelete,
|
||||
handleContentRestore,
|
||||
handleContentPermanentDelete,
|
||||
handleContentListTrashed,
|
||||
handleContentCountTrashed,
|
||||
handleContentSchedule,
|
||||
handleContentUnschedule,
|
||||
handleContentPublish,
|
||||
handleContentUnpublish,
|
||||
handleContentCountScheduled,
|
||||
handleContentDiscardDraft,
|
||||
handleContentCompare,
|
||||
handleContentTranslations,
|
||||
type TrashedContentItem,
|
||||
} from "./content.js";
|
||||
|
||||
// Dashboard stats
|
||||
export {
|
||||
handleDashboardStats,
|
||||
type CollectionStats,
|
||||
type DashboardStats,
|
||||
type RecentItem,
|
||||
} from "./dashboard.js";
|
||||
|
||||
// Manifest generation
|
||||
export { generateManifest } from "./manifest.js";
|
||||
|
||||
// Revision handlers
|
||||
export {
|
||||
handleRevisionList,
|
||||
handleRevisionGet,
|
||||
handleRevisionRestore,
|
||||
type RevisionListResponse,
|
||||
type RevisionResponse,
|
||||
} from "./revision.js";
|
||||
|
||||
// Media handlers
|
||||
export {
|
||||
handleMediaList,
|
||||
handleMediaGet,
|
||||
handleMediaCreate,
|
||||
handleMediaUpdate,
|
||||
handleMediaDelete,
|
||||
type MediaListResponse,
|
||||
type MediaResponse,
|
||||
} from "./media.js";
|
||||
|
||||
// Schema handlers
|
||||
export {
|
||||
handleSchemaCollectionList,
|
||||
handleSchemaCollectionGet,
|
||||
handleSchemaCollectionCreate,
|
||||
handleSchemaCollectionUpdate,
|
||||
handleSchemaCollectionDelete,
|
||||
handleSchemaFieldList,
|
||||
handleSchemaFieldGet,
|
||||
handleSchemaFieldCreate,
|
||||
handleSchemaFieldUpdate,
|
||||
handleSchemaFieldDelete,
|
||||
handleSchemaFieldReorder,
|
||||
handleOrphanedTableList,
|
||||
handleOrphanedTableRegister,
|
||||
type CollectionListResponse,
|
||||
type CollectionResponse,
|
||||
type CollectionWithFieldsResponse,
|
||||
type FieldListResponse,
|
||||
type FieldResponse,
|
||||
type OrphanedTable,
|
||||
type OrphanedTableListResponse,
|
||||
} from "./schema.js";
|
||||
|
||||
// SEO handlers
|
||||
export { handleSitemapData, type SitemapContentEntry, type SitemapDataResponse } from "./seo.js";
|
||||
|
||||
// Plugin handlers
|
||||
export {
|
||||
handlePluginList,
|
||||
handlePluginGet,
|
||||
handlePluginEnable,
|
||||
handlePluginDisable,
|
||||
type PluginInfo,
|
||||
type PluginListResponse,
|
||||
type PluginResponse,
|
||||
} from "./plugins.js";
|
||||
|
||||
// Menu handlers
|
||||
export {
|
||||
handleMenuList,
|
||||
handleMenuCreate,
|
||||
handleMenuGet,
|
||||
handleMenuUpdate,
|
||||
handleMenuDelete,
|
||||
handleMenuItemCreate,
|
||||
handleMenuItemUpdate,
|
||||
handleMenuItemDelete,
|
||||
handleMenuItemReorder,
|
||||
type MenuListItem,
|
||||
type MenuWithItems,
|
||||
type CreateMenuItemInput,
|
||||
type UpdateMenuItemInput,
|
||||
type ReorderItem,
|
||||
} from "./menus.js";
|
||||
|
||||
// Section handlers
|
||||
export {
|
||||
handleSectionList,
|
||||
handleSectionCreate,
|
||||
handleSectionGet,
|
||||
handleSectionUpdate,
|
||||
handleSectionDelete,
|
||||
type SectionListResponse,
|
||||
} from "./sections.js";
|
||||
|
||||
// Settings handlers
|
||||
export { handleSettingsGet, handleSettingsUpdate } from "./settings.js";
|
||||
|
||||
// Taxonomy handlers
|
||||
export {
|
||||
handleTaxonomyList,
|
||||
handleTermList,
|
||||
handleTermCreate,
|
||||
handleTermGet,
|
||||
handleTermUpdate,
|
||||
handleTermDelete,
|
||||
type TaxonomyDef,
|
||||
type TaxonomyListResponse,
|
||||
type TermData,
|
||||
type TermWithCount,
|
||||
type TermListResponse,
|
||||
type TermResponse,
|
||||
type TermGetResponse,
|
||||
} from "./taxonomies.js";
|
||||
|
||||
// Marketplace handlers
|
||||
export {
|
||||
handleMarketplaceInstall,
|
||||
handleMarketplaceUpdate,
|
||||
handleMarketplaceUninstall,
|
||||
handleMarketplaceUpdateCheck,
|
||||
handleMarketplaceSearch,
|
||||
handleMarketplaceGetPlugin,
|
||||
handleThemeSearch,
|
||||
handleThemeGetDetail,
|
||||
loadBundleFromR2,
|
||||
type MarketplaceInstallResult,
|
||||
type MarketplaceUpdateResult,
|
||||
type MarketplaceUpdateCheck,
|
||||
type MarketplaceUninstallResult,
|
||||
} from "./marketplace.js";
|
||||
158
packages/core/src/api/handlers/manifest.ts
Normal file
158
packages/core/src/api/handlers/manifest.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Manifest generation handlers
|
||||
*/
|
||||
|
||||
import { hashString } from "../../utils/hash.js";
|
||||
import type { ManifestResponse, FieldDescriptor } from "../types.js";
|
||||
|
||||
/** Pattern to add spaces before capital letters */
|
||||
const CAMEL_CASE_PATTERN = /([A-Z])/g;
|
||||
const FIRST_CHAR_PATTERN = /^./;
|
||||
|
||||
// Collection definition shape for manifest generation
|
||||
interface CollectionDefinition {
|
||||
schema: {
|
||||
_def?: { shape?: () => Record<string, unknown> };
|
||||
shape?: Record<string, unknown>;
|
||||
};
|
||||
admin: {
|
||||
label: string;
|
||||
labelSingular?: string;
|
||||
supports?: string[];
|
||||
};
|
||||
}
|
||||
type CollectionMap = Record<string, CollectionDefinition>;
|
||||
|
||||
/**
|
||||
* Generate admin manifest from collections
|
||||
*/
|
||||
export async function generateManifest(
|
||||
collections: CollectionMap,
|
||||
plugins: Record<
|
||||
string,
|
||||
{
|
||||
adminPages?: Array<{ path: string; component: string }>;
|
||||
widgets?: string[];
|
||||
}
|
||||
> = {},
|
||||
): Promise<ManifestResponse> {
|
||||
const manifestCollections: ManifestResponse["collections"] = {};
|
||||
|
||||
for (const [name, definition] of Object.entries(collections)) {
|
||||
// Extract field descriptors from Zod schema
|
||||
const fields = extractFieldDescriptors(definition.schema);
|
||||
|
||||
manifestCollections[name] = {
|
||||
label: definition.admin.label,
|
||||
labelSingular: definition.admin.labelSingular || definition.admin.label,
|
||||
supports: definition.admin.supports || [],
|
||||
fields,
|
||||
};
|
||||
}
|
||||
|
||||
// Generate hash from collections (for cache invalidation)
|
||||
const hash = await hashString(JSON.stringify(manifestCollections));
|
||||
|
||||
return {
|
||||
version: "0.1.0",
|
||||
hash,
|
||||
collections: manifestCollections,
|
||||
plugins,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field descriptors from Zod schema
|
||||
* Note: This is a simplified implementation that handles common types
|
||||
*/
|
||||
function extractFieldDescriptors(schema: {
|
||||
_def?: { shape?: () => Record<string, unknown> };
|
||||
shape?: Record<string, unknown>;
|
||||
}): Record<string, FieldDescriptor> {
|
||||
const fields: Record<string, FieldDescriptor> = {};
|
||||
|
||||
// Handle Zod object schema
|
||||
const shape = typeof schema._def?.shape === "function" ? schema._def.shape() : schema.shape || {};
|
||||
|
||||
for (const [name, fieldSchema] of Object.entries(shape)) {
|
||||
fields[name] = extractFieldType(name, fieldSchema);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field type from Zod schema
|
||||
*/
|
||||
/** Type guard: check if a value is a non-null object */
|
||||
function isObject(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
|
||||
function extractFieldType(name: string, schema: unknown): FieldDescriptor {
|
||||
if (!isObject(schema)) {
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
}
|
||||
|
||||
// Check for custom field markers
|
||||
if (schema.isPortableText) {
|
||||
return { kind: "portableText", label: formatLabel(name) };
|
||||
}
|
||||
if (schema.isImage) {
|
||||
return { kind: "image", label: formatLabel(name) };
|
||||
}
|
||||
if (schema.isReference) {
|
||||
return { kind: "reference", label: formatLabel(name) };
|
||||
}
|
||||
|
||||
// Handle standard Zod types
|
||||
const def = isObject(schema._def) ? schema._def : undefined;
|
||||
const typeName = typeof def?.typeName === "string" ? def.typeName : undefined;
|
||||
|
||||
switch (typeName) {
|
||||
case "ZodString":
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
case "ZodNumber":
|
||||
return { kind: "number", label: formatLabel(name) };
|
||||
case "ZodBoolean":
|
||||
return { kind: "boolean", label: formatLabel(name) };
|
||||
case "ZodDate":
|
||||
return { kind: "datetime", label: formatLabel(name) };
|
||||
case "ZodEnum": {
|
||||
const values = Array.isArray(def?.values) ? def.values : [];
|
||||
return {
|
||||
kind: "select",
|
||||
label: formatLabel(name),
|
||||
options: values
|
||||
.filter((v): v is string => typeof v === "string")
|
||||
.map((v) => ({
|
||||
value: v,
|
||||
label: v.charAt(0).toUpperCase() + v.slice(1),
|
||||
})),
|
||||
};
|
||||
}
|
||||
case "ZodArray":
|
||||
return { kind: "array", label: formatLabel(name) };
|
||||
case "ZodObject":
|
||||
return { kind: "object", label: formatLabel(name) };
|
||||
case "ZodOptional":
|
||||
case "ZodDefault":
|
||||
// Unwrap optional/default types
|
||||
if (def?.innerType) {
|
||||
return extractFieldType(name, def.innerType);
|
||||
}
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
default:
|
||||
return { kind: "string", label: formatLabel(name) };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format field name as label
|
||||
*/
|
||||
function formatLabel(name: string): string {
|
||||
return name
|
||||
.replace(CAMEL_CASE_PATTERN, " $1")
|
||||
.replace(FIRST_CHAR_PATTERN, (str) => str.toUpperCase())
|
||||
.trim();
|
||||
}
|
||||
930
packages/core/src/api/handlers/marketplace.ts
Normal file
930
packages/core/src/api/handlers/marketplace.ts
Normal file
@@ -0,0 +1,930 @@
|
||||
/**
|
||||
* Marketplace plugin handlers
|
||||
*
|
||||
* Business logic for installing, updating, uninstalling, and checking
|
||||
* updates for marketplace plugins. Routes are thin wrappers around these.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validatePluginIdentifier } from "../../database/validate.js";
|
||||
import { pluginManifestSchema } from "../../plugins/manifest-schema.js";
|
||||
import { normalizeManifestRoute } from "../../plugins/manifest-schema.js";
|
||||
import {
|
||||
createMarketplaceClient,
|
||||
MarketplaceError,
|
||||
MarketplaceUnavailableError,
|
||||
type MarketplaceClient,
|
||||
type MarketplacePluginDetail,
|
||||
type MarketplaceSearchOpts,
|
||||
type MarketplaceThemeSearchOpts,
|
||||
type MarketplaceVersionSummary,
|
||||
type PluginBundle,
|
||||
} from "../../plugins/marketplace.js";
|
||||
import type { SandboxRunner } from "../../plugins/sandbox/types.js";
|
||||
import { PluginStateRepository } from "../../plugins/state.js";
|
||||
import type { PluginManifest } from "../../plugins/types.js";
|
||||
import { EmDashStorageError } from "../../storage/types.js";
|
||||
import type { Storage } from "../../storage/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ── Types ──────────────────────────────────────────────────────────
|
||||
|
||||
export interface MarketplaceInstallResult {
|
||||
pluginId: string;
|
||||
version: string;
|
||||
capabilities: string[];
|
||||
}
|
||||
|
||||
export interface MarketplaceUpdateResult {
|
||||
pluginId: string;
|
||||
oldVersion: string;
|
||||
newVersion: string;
|
||||
capabilityChanges: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
routeVisibilityChanges?: {
|
||||
newlyPublic: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface MarketplaceUpdateCheck {
|
||||
pluginId: string;
|
||||
installed: string;
|
||||
latest: string;
|
||||
hasUpdate: boolean;
|
||||
hasCapabilityChanges: boolean;
|
||||
capabilityChanges?: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
hasRouteVisibilityChanges: boolean;
|
||||
routeVisibilityChanges?: {
|
||||
newlyPublic: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface MarketplaceUninstallResult {
|
||||
pluginId: string;
|
||||
dataDeleted: boolean;
|
||||
}
|
||||
|
||||
// ── Helpers ────────────────────────────────────────────────────────
|
||||
|
||||
/** Semver-like pattern: digits, dots, hyphens, plus signs (e.g. 1.0.0, 1.0.0-beta.1) */
|
||||
const VERSION_PATTERN = /^[a-z0-9][a-z0-9._+-]*$/i;
|
||||
|
||||
function validateVersion(version: string): void {
|
||||
if (version.includes("..")) throw new Error("Invalid version format");
|
||||
if (!VERSION_PATTERN.test(version)) {
|
||||
throw new Error("Invalid version format");
|
||||
}
|
||||
}
|
||||
|
||||
function getClient(marketplaceUrl: string | undefined): MarketplaceClient | null {
|
||||
if (!marketplaceUrl) return null;
|
||||
return createMarketplaceClient(marketplaceUrl);
|
||||
}
|
||||
|
||||
function diffCapabilities(
|
||||
oldCaps: string[],
|
||||
newCaps: string[],
|
||||
): { added: string[]; removed: string[] } {
|
||||
const oldSet = new Set(oldCaps);
|
||||
const newSet = new Set(newCaps);
|
||||
return {
|
||||
added: newCaps.filter((c) => !oldSet.has(c)),
|
||||
removed: oldCaps.filter((c) => !newSet.has(c)),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Diff route visibility between two manifests.
|
||||
* Returns routes that changed from private to public (newly exposed).
|
||||
*/
|
||||
function diffRouteVisibility(
|
||||
oldManifest: PluginManifest | undefined,
|
||||
newManifest: PluginManifest,
|
||||
): { newlyPublic: string[] } {
|
||||
const oldPublicRoutes = new Set<string>();
|
||||
if (oldManifest) {
|
||||
for (const entry of oldManifest.routes) {
|
||||
const normalized = normalizeManifestRoute(entry);
|
||||
if (normalized.public === true) {
|
||||
oldPublicRoutes.add(normalized.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const newlyPublic: string[] = [];
|
||||
for (const entry of newManifest.routes) {
|
||||
const normalized = normalizeManifestRoute(entry);
|
||||
if (normalized.public === true && !oldPublicRoutes.has(normalized.name)) {
|
||||
newlyPublic.push(normalized.name);
|
||||
}
|
||||
}
|
||||
|
||||
return { newlyPublic };
|
||||
}
|
||||
|
||||
async function resolveVersionMetadata(
|
||||
client: MarketplaceClient,
|
||||
pluginId: string,
|
||||
pluginDetail: MarketplacePluginDetail,
|
||||
version: string,
|
||||
): Promise<MarketplaceVersionSummary | null> {
|
||||
if (pluginDetail.latestVersion?.version === version) {
|
||||
return {
|
||||
version: pluginDetail.latestVersion.version,
|
||||
minEmDashVersion: pluginDetail.latestVersion.minEmDashVersion,
|
||||
bundleSize: pluginDetail.latestVersion.bundleSize,
|
||||
checksum: pluginDetail.latestVersion.checksum,
|
||||
changelog: pluginDetail.latestVersion.changelog,
|
||||
capabilities: pluginDetail.latestVersion.capabilities,
|
||||
status: pluginDetail.latestVersion.status,
|
||||
auditVerdict: pluginDetail.latestVersion.audit?.verdict ?? null,
|
||||
imageAuditVerdict: pluginDetail.latestVersion.imageAudit?.verdict ?? null,
|
||||
publishedAt: pluginDetail.latestVersion.publishedAt,
|
||||
};
|
||||
}
|
||||
|
||||
const versions = await client.getVersions(pluginId);
|
||||
return versions.find((v) => v.version === version) ?? null;
|
||||
}
|
||||
|
||||
function validateBundleIdentity(
|
||||
bundle: PluginBundle,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): ApiResult<never> | null {
|
||||
if (bundle.manifest.id !== pluginId) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MANIFEST_MISMATCH",
|
||||
message: `Bundle manifest ID (${bundle.manifest.id}) does not match requested plugin (${pluginId})`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (bundle.manifest.version !== version) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MANIFEST_VERSION_MISMATCH",
|
||||
message: `Bundle manifest version (${bundle.manifest.version}) does not match requested version (${version})`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Store a plugin bundle's files in site-local R2 storage */
|
||||
async function storeBundleInR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
bundle: PluginBundle,
|
||||
): Promise<void> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
|
||||
// Store manifest
|
||||
await storage.upload({
|
||||
key: `${prefix}/manifest.json`,
|
||||
body: new TextEncoder().encode(JSON.stringify(bundle.manifest)),
|
||||
contentType: "application/json",
|
||||
});
|
||||
|
||||
// Store backend code
|
||||
await storage.upload({
|
||||
key: `${prefix}/backend.js`,
|
||||
body: new TextEncoder().encode(bundle.backendCode),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
// Store admin code if present
|
||||
if (bundle.adminCode) {
|
||||
await storage.upload({
|
||||
key: `${prefix}/admin.js`,
|
||||
body: new TextEncoder().encode(bundle.adminCode),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** Read a ReadableStream to string */
|
||||
async function streamToText(stream: ReadableStream<Uint8Array>): Promise<string> {
|
||||
return new Response(stream).text();
|
||||
}
|
||||
|
||||
/** Load a plugin bundle from site-local R2 storage */
|
||||
export async function loadBundleFromR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): Promise<{ manifest: PluginManifest; backendCode: string; adminCode?: string } | null> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
|
||||
try {
|
||||
const manifestResult = await storage.download(`${prefix}/manifest.json`);
|
||||
const backendResult = await storage.download(`${prefix}/backend.js`);
|
||||
|
||||
const manifestText = await streamToText(manifestResult.body);
|
||||
const backendCode = await streamToText(backendResult.body);
|
||||
const parsed: unknown = JSON.parse(manifestText);
|
||||
const result = pluginManifestSchema.safeParse(parsed);
|
||||
if (!result.success) return null;
|
||||
// Elements are validated as unknown[] by Zod; cast to PluginManifest
|
||||
// for the Element[] type (Block Kit validation happens at render time).
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- Zod types elements as unknown[]; Element type validated at render time
|
||||
const manifest = result.data as unknown as PluginManifest;
|
||||
|
||||
// Try to load admin code (optional)
|
||||
let adminCode: string | undefined;
|
||||
try {
|
||||
const adminResult = await storage.download(`${prefix}/admin.js`);
|
||||
adminCode = await streamToText(adminResult.body);
|
||||
} catch {
|
||||
// admin.js is optional
|
||||
}
|
||||
|
||||
return { manifest, backendCode, adminCode };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Delete a plugin bundle from site-local R2 storage */
|
||||
async function deleteBundleFromR2(
|
||||
storage: Storage,
|
||||
pluginId: string,
|
||||
version: string,
|
||||
): Promise<void> {
|
||||
validatePluginIdentifier(pluginId, "plugin ID");
|
||||
validateVersion(version);
|
||||
const prefix = `marketplace/${pluginId}/${version}`;
|
||||
const files = ["manifest.json", "backend.js", "admin.js"];
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
await storage.delete(`${prefix}/${file}`);
|
||||
} catch {
|
||||
// Ignore missing files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Install ────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceInstall(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
sandboxRunner: SandboxRunner | null,
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
opts?: { version?: string; configuredPluginIds?: Set<string> },
|
||||
): Promise<ApiResult<MarketplaceInstallResult>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MARKETPLACE_NOT_CONFIGURED",
|
||||
message: "Marketplace is not configured",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!storage) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "STORAGE_NOT_CONFIGURED",
|
||||
message: "Storage is required for marketplace plugin installation",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SANDBOX_NOT_AVAILABLE",
|
||||
message: "Sandbox runner is required for marketplace plugins",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if already installed
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (existing && existing.source === "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ALREADY_INSTALLED",
|
||||
message: `Plugin ${pluginId} is already installed`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Block installation if a configured (trusted) plugin with the same ID exists.
|
||||
// Without this check, the sandboxed plugin could shadow the trusted plugin's
|
||||
// route handlers while auth decisions are made against the trusted plugin's metadata.
|
||||
if (opts?.configuredPluginIds?.has(pluginId)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_ID_CONFLICT",
|
||||
message: `Cannot install marketplace plugin "${pluginId}" — a configured plugin with the same ID already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch plugin detail from marketplace
|
||||
const pluginDetail = await client.getPlugin(pluginId);
|
||||
const version = opts?.version ?? pluginDetail.latestVersion?.version;
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `No published versions found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const versionMetadata = await resolveVersionMetadata(client, pluginId, pluginDetail, version);
|
||||
if (!versionMetadata) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `Version ${version} was not found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Block installation of plugins that haven't passed audit.
|
||||
// Both "fail" (explicitly malicious) and "warn" (audit error or
|
||||
// inconclusive) are non-installable — only "pass" or null (no audit
|
||||
// ran) are allowed through.
|
||||
if (versionMetadata.auditVerdict === "fail" || versionMetadata.auditVerdict === "warn") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUDIT_FAILED",
|
||||
message:
|
||||
versionMetadata.auditVerdict === "fail"
|
||||
? "Plugin failed security audit and cannot be installed"
|
||||
: "Plugin audit was inconclusive and cannot be installed until reviewed",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Download and extract bundle
|
||||
const bundle = await client.downloadBundle(pluginId, version);
|
||||
|
||||
// Verify checksum matches marketplace-published checksum
|
||||
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CHECKSUM_MISMATCH",
|
||||
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, version);
|
||||
if (bundleIdentityError) return bundleIdentityError;
|
||||
|
||||
// Store bundle in site-local R2
|
||||
await storeBundleInR2(storage, pluginId, version, bundle);
|
||||
|
||||
// Write plugin state
|
||||
await stateRepo.upsert(pluginId, version, "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: version,
|
||||
displayName: pluginDetail.name,
|
||||
description: pluginDetail.description ?? undefined,
|
||||
});
|
||||
|
||||
// Fire-and-forget install stat
|
||||
client.reportInstall(pluginId, version).catch(() => {
|
||||
// Intentional: never fails the install
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
pluginId,
|
||||
version,
|
||||
capabilities: bundle.manifest.capabilities,
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MARKETPLACE_UNAVAILABLE",
|
||||
message: "Plugin marketplace is currently unavailable",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: err.code ?? "MARKETPLACE_ERROR",
|
||||
message: err.message,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err instanceof EmDashStorageError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: err.code ?? "STORAGE_ERROR",
|
||||
message: "Storage error while installing plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
if (err && typeof err === "object" && "code" in err) {
|
||||
const code = (err as { code?: unknown }).code;
|
||||
if (typeof code === "string" && code.trim()) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code,
|
||||
message: "Failed to install plugin from marketplace",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
console.error("Failed to install marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INSTALL_FAILED",
|
||||
message: "Failed to install plugin from marketplace",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Update ─────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUpdate(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
sandboxRunner: SandboxRunner | null,
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
opts?: {
|
||||
version?: string;
|
||||
confirmCapabilityChanges?: boolean;
|
||||
confirmRouteVisibilityChanges?: boolean;
|
||||
},
|
||||
): Promise<ApiResult<MarketplaceUpdateResult>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
if (!storage) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "STORAGE_NOT_CONFIGURED", message: "Storage is required" },
|
||||
};
|
||||
}
|
||||
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SANDBOX_NOT_AVAILABLE", message: "Sandbox runner is required" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (!existing || existing.source !== "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `No marketplace plugin found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const oldVersion = existing.marketplaceVersion ?? existing.version;
|
||||
|
||||
// Get target version
|
||||
const pluginDetail = await client.getPlugin(pluginId);
|
||||
const newVersion = opts?.version ?? pluginDetail.latestVersion?.version;
|
||||
if (!newVersion) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NO_VERSION", message: "No newer version available" },
|
||||
};
|
||||
}
|
||||
|
||||
if (newVersion === oldVersion) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "ALREADY_UP_TO_DATE", message: "Plugin is already up to date" },
|
||||
};
|
||||
}
|
||||
|
||||
const versionMetadata = await resolveVersionMetadata(
|
||||
client,
|
||||
pluginId,
|
||||
pluginDetail,
|
||||
newVersion,
|
||||
);
|
||||
if (!versionMetadata) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NO_VERSION",
|
||||
message: `Version ${newVersion} was not found for plugin ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Download new bundle
|
||||
const bundle = await client.downloadBundle(pluginId, newVersion);
|
||||
|
||||
// Verify checksum matches marketplace-published checksum for this version
|
||||
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CHECKSUM_MISMATCH",
|
||||
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, newVersion);
|
||||
if (bundleIdentityError) return bundleIdentityError;
|
||||
|
||||
// Diff capabilities and route visibility against old version
|
||||
const oldBundle = await loadBundleFromR2(storage, pluginId, oldVersion);
|
||||
const oldCaps = oldBundle?.manifest.capabilities ?? [];
|
||||
const capabilityChanges = diffCapabilities(oldCaps, bundle.manifest.capabilities);
|
||||
const hasEscalation = capabilityChanges.added.length > 0;
|
||||
|
||||
// If capabilities escalated, require explicit confirmation
|
||||
if (hasEscalation && !opts?.confirmCapabilityChanges) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CAPABILITY_ESCALATION",
|
||||
message: "Plugin update requires new capabilities",
|
||||
details: { capabilityChanges },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Diff route visibility — routes going from private to public are a
|
||||
// security-sensitive change that exposes unauthenticated endpoints.
|
||||
const routeVisibilityChanges = diffRouteVisibility(oldBundle?.manifest, bundle.manifest);
|
||||
const hasNewPublicRoutes = routeVisibilityChanges.newlyPublic.length > 0;
|
||||
|
||||
if (hasNewPublicRoutes && !opts?.confirmRouteVisibilityChanges) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ROUTE_VISIBILITY_ESCALATION",
|
||||
message: "Plugin update exposes new public (unauthenticated) routes",
|
||||
details: { routeVisibilityChanges, capabilityChanges },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Store new bundle
|
||||
await storeBundleInR2(storage, pluginId, newVersion, bundle);
|
||||
|
||||
// Update state
|
||||
await stateRepo.upsert(pluginId, newVersion, "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: newVersion,
|
||||
displayName: pluginDetail.name,
|
||||
description: pluginDetail.description ?? undefined,
|
||||
});
|
||||
|
||||
// Clean up old bundle from R2 (best-effort)
|
||||
deleteBundleFromR2(storage, pluginId, oldVersion).catch(() => {});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
pluginId,
|
||||
oldVersion,
|
||||
newVersion,
|
||||
capabilityChanges,
|
||||
routeVisibilityChanges: hasNewPublicRoutes ? routeVisibilityChanges : undefined,
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: err.code ?? "MARKETPLACE_ERROR", message: err.message },
|
||||
};
|
||||
}
|
||||
console.error("Failed to update marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UPDATE_FAILED", message: "Failed to update plugin" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Uninstall ──────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUninstall(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
pluginId: string,
|
||||
opts?: { deleteData?: boolean },
|
||||
): Promise<ApiResult<MarketplaceUninstallResult>> {
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const existing = await stateRepo.get(pluginId);
|
||||
if (!existing || existing.source !== "marketplace") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `No marketplace plugin found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const version = existing.marketplaceVersion ?? existing.version;
|
||||
|
||||
// Delete bundle from site R2
|
||||
if (storage) {
|
||||
await deleteBundleFromR2(storage, pluginId, version);
|
||||
}
|
||||
|
||||
// Optionally delete plugin storage data
|
||||
let dataDeleted = false;
|
||||
if (opts?.deleteData) {
|
||||
try {
|
||||
await db.deleteFrom("_plugin_storage").where("plugin_id", "=", pluginId).execute();
|
||||
dataDeleted = true;
|
||||
} catch {
|
||||
// Plugin storage table may not have data for this plugin
|
||||
}
|
||||
}
|
||||
|
||||
// Delete state row
|
||||
await stateRepo.delete(pluginId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { pluginId, dataDeleted },
|
||||
};
|
||||
} catch (err) {
|
||||
console.error("Failed to uninstall marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "UNINSTALL_FAILED",
|
||||
message: "Failed to uninstall plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Update check ───────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceUpdateCheck(
|
||||
db: Kysely<Database>,
|
||||
marketplaceUrl: string | undefined,
|
||||
): Promise<ApiResult<{ items: MarketplaceUpdateCheck[] }>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const marketplacePlugins = await stateRepo.getMarketplacePlugins();
|
||||
|
||||
const items: MarketplaceUpdateCheck[] = [];
|
||||
|
||||
for (const plugin of marketplacePlugins) {
|
||||
try {
|
||||
const detail = await client.getPlugin(plugin.pluginId);
|
||||
const latest = detail.latestVersion?.version;
|
||||
const installed = plugin.marketplaceVersion ?? plugin.version;
|
||||
|
||||
if (!latest) continue;
|
||||
|
||||
const hasUpdate = latest !== installed;
|
||||
let capabilityChanges: { added: string[]; removed: string[] } | undefined;
|
||||
let hasCapabilityChanges = false;
|
||||
|
||||
if (hasUpdate && detail.latestVersion) {
|
||||
const oldCaps = detail.capabilities ?? [];
|
||||
const newCaps = detail.latestVersion.capabilities ?? [];
|
||||
capabilityChanges = diffCapabilities(oldCaps, newCaps);
|
||||
hasCapabilityChanges =
|
||||
capabilityChanges.added.length > 0 || capabilityChanges.removed.length > 0;
|
||||
}
|
||||
|
||||
items.push({
|
||||
pluginId: plugin.pluginId,
|
||||
installed,
|
||||
latest: latest ?? installed,
|
||||
hasUpdate,
|
||||
hasCapabilityChanges,
|
||||
capabilityChanges: hasCapabilityChanges ? capabilityChanges : undefined,
|
||||
// Route visibility changes require downloading both bundles to compare
|
||||
// manifests, which is too expensive for a preview check. The actual
|
||||
// enforcement happens at update time in handleMarketplaceUpdate.
|
||||
hasRouteVisibilityChanges: false,
|
||||
});
|
||||
} catch (err) {
|
||||
// Skip plugins that can't be checked (marketplace down, plugin delisted)
|
||||
console.warn(`Failed to check updates for ${plugin.pluginId}:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to check marketplace updates:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "UPDATE_CHECK_FAILED", message: "Failed to check for updates" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Proxy ──────────────────────────────────────────────────────────
|
||||
|
||||
export async function handleMarketplaceSearch(
|
||||
marketplaceUrl: string | undefined,
|
||||
query?: string,
|
||||
opts?: MarketplaceSearchOpts,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.search(query, opts);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to search marketplace:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SEARCH_FAILED", message: "Failed to search marketplace" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleMarketplaceGetPlugin(
|
||||
marketplaceUrl: string | undefined,
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.getPlugin(pluginId);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceError && err.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Plugin not found: ${pluginId}` },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to get marketplace plugin:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "GET_PLUGIN_FAILED", message: "Failed to get plugin details" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── Theme proxy handlers ──────────────────────────────────────────
|
||||
|
||||
export async function handleThemeSearch(
|
||||
marketplaceUrl: string | undefined,
|
||||
query?: string,
|
||||
opts?: MarketplaceThemeSearchOpts,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.searchThemes(query, opts);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to search themes:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "THEME_SEARCH_FAILED", message: "Failed to search themes" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleThemeGetDetail(
|
||||
marketplaceUrl: string | undefined,
|
||||
themeId: string,
|
||||
): Promise<ApiResult<unknown>> {
|
||||
const client = getClient(marketplaceUrl);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await client.getTheme(themeId);
|
||||
return { success: true, data: result };
|
||||
} catch (err) {
|
||||
if (err instanceof MarketplaceError && err.status === 404) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Theme not found: ${themeId}` },
|
||||
};
|
||||
}
|
||||
if (err instanceof MarketplaceUnavailableError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
|
||||
};
|
||||
}
|
||||
console.error("Failed to get marketplace theme:", err);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "GET_THEME_FAILED", message: "Failed to get theme details" },
|
||||
};
|
||||
}
|
||||
}
|
||||
207
packages/core/src/api/handlers/media.ts
Normal file
207
packages/core/src/api/handlers/media.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
/**
|
||||
* Media CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { MediaRepository, type MediaItem } from "../../database/repositories/media.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface MediaListResponse {
|
||||
items: MediaItem[];
|
||||
nextCursor?: string;
|
||||
}
|
||||
|
||||
export interface MediaResponse {
|
||||
item: MediaItem;
|
||||
}
|
||||
|
||||
/**
|
||||
* List media items
|
||||
*/
|
||||
export async function handleMediaList(
|
||||
db: Kysely<Database>,
|
||||
params: {
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
mimeType?: string;
|
||||
},
|
||||
): Promise<ApiResult<MediaListResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const result = await repo.findMany({
|
||||
cursor: params.cursor,
|
||||
limit: Math.min(params.limit || 50, 100),
|
||||
mimeType: params.mimeType,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
items: result.items,
|
||||
nextCursor: result.nextCursor,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_LIST_ERROR",
|
||||
message: "Failed to list media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get single media item
|
||||
*/
|
||||
export async function handleMediaGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.findById(id);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_GET_ERROR",
|
||||
message: "Failed to get media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create media item (after file upload)
|
||||
*/
|
||||
export async function handleMediaCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
size?: number;
|
||||
width?: number;
|
||||
height?: number;
|
||||
alt?: string;
|
||||
storageKey: string;
|
||||
contentHash?: string;
|
||||
blurhash?: string;
|
||||
dominantColor?: string;
|
||||
authorId?: string;
|
||||
},
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.create(input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_CREATE_ERROR",
|
||||
message: "Failed to create media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update media metadata
|
||||
*/
|
||||
export async function handleMediaUpdate(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
input: {
|
||||
alt?: string;
|
||||
caption?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
},
|
||||
): Promise<ApiResult<MediaResponse>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const item = await repo.update(id, input);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_UPDATE_ERROR",
|
||||
message: "Failed to update media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete media item
|
||||
*/
|
||||
export async function handleMediaDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new MediaRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Media item not found: ${id}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { deleted: true },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "MEDIA_DELETE_ERROR",
|
||||
message: "Failed to delete media",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
493
packages/core/src/api/handlers/menus.ts
Normal file
493
packages/core/src/api/handlers/menus.ts
Normal file
@@ -0,0 +1,493 @@
|
||||
/**
|
||||
* Menu CRUD handlers
|
||||
*
|
||||
* Business logic for menu and menu-item endpoints.
|
||||
* Routes are thin wrappers that parse input, check auth, and call these.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import type { Database, MenuItemTable, MenuTable } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type MenuRow = Omit<MenuTable, "created_at" | "updated_at"> & {
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
type MenuItemRow = Omit<MenuItemTable, "created_at"> & {
|
||||
created_at: string;
|
||||
};
|
||||
|
||||
export interface MenuListItem extends MenuRow {
|
||||
itemCount: number;
|
||||
}
|
||||
|
||||
export interface MenuWithItems extends MenuRow {
|
||||
items: MenuItemRow[];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menu handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List all menus with item counts.
|
||||
*/
|
||||
export async function handleMenuList(db: Kysely<Database>): Promise<ApiResult<MenuListItem[]>> {
|
||||
try {
|
||||
const menus = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select(["id", "name", "label", "created_at", "updated_at"])
|
||||
.orderBy("name", "asc")
|
||||
.execute();
|
||||
|
||||
const menusWithCounts = await Promise.all(
|
||||
menus.map(async (menu) => {
|
||||
const { count } = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.select(({ fn }) => fn.countAll<number>().as("count"))
|
||||
.where("menu_id", "=", menu.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return {
|
||||
...menu,
|
||||
itemCount: count,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
return { success: true, data: menusWithCounts };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_LIST_ERROR", message: "Failed to fetch menus" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new menu.
|
||||
*/
|
||||
export async function handleMenuCreate(
|
||||
db: Kysely<Database>,
|
||||
input: { name: string; label: string },
|
||||
): Promise<ApiResult<MenuRow>> {
|
||||
try {
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", input.name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "CONFLICT", message: `Menu with name "${input.name}" already exists` },
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("id", "=", id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: menu };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_CREATE_ERROR", message: "Failed to create menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single menu with all its items.
|
||||
*/
|
||||
export async function handleMenuGet(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<ApiResult<MenuWithItems>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const items = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("menu_id", "=", menu.id)
|
||||
.orderBy("sort_order", "asc")
|
||||
.execute();
|
||||
|
||||
return { success: true, data: { ...menu, items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_GET_ERROR", message: "Failed to fetch menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a menu's metadata.
|
||||
*/
|
||||
export async function handleMenuUpdate(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
input: { label?: string },
|
||||
): Promise<ApiResult<MenuRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (input.label) {
|
||||
await db
|
||||
.updateTable("_emdash_menus")
|
||||
.set({ label: input.label })
|
||||
.where("id", "=", menu.id)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.selectAll()
|
||||
.where("id", "=", menu.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_UPDATE_ERROR", message: "Failed to update menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a menu and its items (cascade).
|
||||
*/
|
||||
export async function handleMenuDelete(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
await db.deleteFrom("_emdash_menus").where("id", "=", menu.id).execute();
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_DELETE_ERROR", message: "Failed to delete menu" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menu item handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface CreateMenuItemInput {
|
||||
type: string;
|
||||
label: string;
|
||||
referenceCollection?: string;
|
||||
referenceId?: string;
|
||||
customUrl?: string;
|
||||
target?: string;
|
||||
titleAttr?: string;
|
||||
cssClasses?: string;
|
||||
parentId?: string;
|
||||
sortOrder?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an item to a menu.
|
||||
*/
|
||||
export async function handleMenuItemCreate(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
input: CreateMenuItemInput,
|
||||
): Promise<ApiResult<MenuItemRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
let sortOrder = input.sortOrder ?? 0;
|
||||
if (input.sortOrder === undefined) {
|
||||
const maxOrder = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.select(({ fn }) => fn.max("sort_order").as("max"))
|
||||
.where("menu_id", "=", menu.id)
|
||||
.where("parent_id", "is", input.parentId ?? null)
|
||||
.executeTakeFirst();
|
||||
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely fn.max returns unknown; always a number for sort_order column
|
||||
sortOrder = ((maxOrder?.max as number) ?? -1) + 1;
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id,
|
||||
menu_id: menu.id,
|
||||
parent_id: input.parentId ?? null,
|
||||
sort_order: sortOrder,
|
||||
type: input.type,
|
||||
reference_collection: input.referenceCollection ?? null,
|
||||
reference_id: input.referenceId ?? null,
|
||||
custom_url: input.customUrl ?? null,
|
||||
label: input.label,
|
||||
title_attr: input.titleAttr ?? null,
|
||||
target: input.target ?? null,
|
||||
css_classes: input.cssClasses ?? null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const item = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("id", "=", id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: item };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_CREATE_ERROR", message: "Failed to create menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export interface UpdateMenuItemInput {
|
||||
label?: string;
|
||||
customUrl?: string;
|
||||
target?: string;
|
||||
titleAttr?: string;
|
||||
cssClasses?: string;
|
||||
parentId?: string | null;
|
||||
sortOrder?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a menu item.
|
||||
*/
|
||||
export async function handleMenuItemUpdate(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
itemId: string,
|
||||
input: UpdateMenuItemInput,
|
||||
): Promise<ApiResult<MenuItemRow>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const item = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.select("id")
|
||||
.where("id", "=", itemId)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu item not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = {};
|
||||
if (input.label !== undefined) updates.label = input.label;
|
||||
if (input.customUrl !== undefined) updates.custom_url = input.customUrl;
|
||||
if (input.target !== undefined) updates.target = input.target;
|
||||
if (input.titleAttr !== undefined) updates.title_attr = input.titleAttr;
|
||||
if (input.cssClasses !== undefined) updates.css_classes = input.cssClasses;
|
||||
if (input.parentId !== undefined) updates.parent_id = input.parentId;
|
||||
if (input.sortOrder !== undefined) updates.sort_order = input.sortOrder;
|
||||
|
||||
if (Object.keys(updates).length > 0) {
|
||||
await db.updateTable("_emdash_menu_items").set(updates).where("id", "=", itemId).execute();
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("id", "=", itemId)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_UPDATE_ERROR", message: "Failed to update menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a menu item.
|
||||
*/
|
||||
export async function handleMenuItemDelete(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
itemId: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_menu_items")
|
||||
.where("id", "=", itemId)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.execute();
|
||||
|
||||
if (result[0]?.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu item not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_ITEM_DELETE_ERROR", message: "Failed to delete menu item" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export interface ReorderItem {
|
||||
id: string;
|
||||
parentId: string | null;
|
||||
sortOrder: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch reorder menu items.
|
||||
*/
|
||||
export async function handleMenuItemReorder(
|
||||
db: Kysely<Database>,
|
||||
menuName: string,
|
||||
items: ReorderItem[],
|
||||
): Promise<ApiResult<MenuItemRow[]>> {
|
||||
try {
|
||||
const menu = await db
|
||||
.selectFrom("_emdash_menus")
|
||||
.select("id")
|
||||
.where("name", "=", menuName)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!menu) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Menu not found" },
|
||||
};
|
||||
}
|
||||
|
||||
for (const item of items) {
|
||||
await db
|
||||
.updateTable("_emdash_menu_items")
|
||||
.set({
|
||||
parent_id: item.parentId,
|
||||
sort_order: item.sortOrder,
|
||||
})
|
||||
.where("id", "=", item.id)
|
||||
.where("menu_id", "=", menu.id)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const updatedItems = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.selectAll()
|
||||
.where("menu_id", "=", menu.id)
|
||||
.orderBy("sort_order", "asc")
|
||||
.execute();
|
||||
|
||||
return { success: true, data: updatedItems };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "MENU_REORDER_ERROR", message: "Failed to reorder menu items" },
|
||||
};
|
||||
}
|
||||
}
|
||||
429
packages/core/src/api/handlers/oauth-authorization.ts
Normal file
429
packages/core/src/api/handlers/oauth-authorization.ts
Normal file
@@ -0,0 +1,429 @@
|
||||
/**
|
||||
* OAuth 2.1 Authorization Code + PKCE handlers.
|
||||
*
|
||||
* Implements the server side of the authorization code grant for MCP clients
|
||||
* (Claude Desktop, VS Code, etc.) per the MCP authorization spec (draft).
|
||||
*
|
||||
* Uses arctic for PKCE challenge generation and @emdashcms/auth for token
|
||||
* utilities. Token infrastructure is shared with the device flow.
|
||||
*/
|
||||
|
||||
import { clampScopes, computeS256Challenge } from "@emdashcms/auth";
|
||||
import type { RoleLevel } from "@emdashcms/auth";
|
||||
import { generateCodeVerifier } from "arctic";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../auth/api-tokens.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
import { lookupOAuthClient, validateClientRedirectUri } from "./oauth-clients.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Authorization codes expire after 10 minutes (RFC 6749 §4.1.2 recommends short-lived) */
|
||||
const AUTH_CODE_TTL_SECONDS = 10 * 60;
|
||||
|
||||
/** Access token TTL: 1 hour */
|
||||
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
|
||||
|
||||
/** Refresh token TTL: 90 days */
|
||||
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface AuthorizationParams {
|
||||
response_type: string;
|
||||
client_id: string;
|
||||
redirect_uri: string;
|
||||
scope?: string;
|
||||
state?: string;
|
||||
code_challenge: string;
|
||||
code_challenge_method: string;
|
||||
resource?: string;
|
||||
}
|
||||
|
||||
export interface TokenExchangeParams {
|
||||
grant_type: string;
|
||||
code: string;
|
||||
redirect_uri: string;
|
||||
client_id: string;
|
||||
code_verifier: string;
|
||||
resource?: string;
|
||||
}
|
||||
|
||||
export interface TokenResponse {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type: "Bearer";
|
||||
expires_in: number;
|
||||
scope: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function expiresAt(seconds: number): string {
|
||||
return new Date(Date.now() + seconds * 1000).toISOString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a redirect URI per OAuth 2.1 security requirements.
|
||||
* Allows localhost (loopback) over HTTP, and any HTTPS URL.
|
||||
*/
|
||||
export function validateRedirectUri(uri: string): string | null {
|
||||
try {
|
||||
const url = new URL(uri);
|
||||
|
||||
// Reject protocol-relative URLs
|
||||
if (uri.startsWith("//")) {
|
||||
return "Protocol-relative redirect URIs are not allowed";
|
||||
}
|
||||
|
||||
// Allow localhost/loopback over HTTP (for desktop MCP clients)
|
||||
if (url.protocol === "http:") {
|
||||
const host = url.hostname;
|
||||
if (host === "127.0.0.1" || host === "localhost" || host === "[::1]") {
|
||||
return null; // OK
|
||||
}
|
||||
return "HTTP redirect URIs are only allowed for localhost";
|
||||
}
|
||||
|
||||
// Allow HTTPS
|
||||
if (url.protocol === "https:") {
|
||||
return null; // OK
|
||||
}
|
||||
|
||||
return `Unsupported redirect URI scheme: ${url.protocol}`;
|
||||
} catch {
|
||||
return "Invalid redirect URI";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and normalize scopes. Returns validated scope list.
|
||||
*/
|
||||
function normalizeScopes(requested?: string): string[] {
|
||||
if (!requested) return [];
|
||||
|
||||
const validSet = new Set<string>(VALID_SCOPES);
|
||||
const scopes = requested
|
||||
.split(" ")
|
||||
.filter(Boolean)
|
||||
.filter((s) => validSet.has(s));
|
||||
|
||||
return scopes;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Process an authorization request after the user approves consent.
|
||||
*
|
||||
* Generates an authorization code, stores it with the PKCE challenge,
|
||||
* and returns the redirect URL with the code appended.
|
||||
*
|
||||
* Scopes are clamped to the user's role to prevent scope escalation.
|
||||
*/
|
||||
export async function handleAuthorizationApproval(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
userRole: RoleLevel,
|
||||
params: AuthorizationParams,
|
||||
): Promise<ApiResult<{ redirect_url: string }>> {
|
||||
try {
|
||||
// Validate response_type
|
||||
if (params.response_type !== "code") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "UNSUPPORTED_RESPONSE_TYPE",
|
||||
message: "Only response_type=code is supported",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate redirect_uri scheme/host (basic security check)
|
||||
const uriError = validateRedirectUri(params.redirect_uri);
|
||||
if (uriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REDIRECT_URI", message: uriError },
|
||||
};
|
||||
}
|
||||
|
||||
// Look up the registered OAuth client
|
||||
const client = await lookupOAuthClient(db, params.client_id);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_CLIENT",
|
||||
message: "Unknown client_id",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate redirect_uri against client's registered URIs
|
||||
const clientUriError = validateClientRedirectUri(params.redirect_uri, client.redirectUris);
|
||||
if (clientUriError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REDIRECT_URI", message: clientUriError },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate code_challenge_method
|
||||
if (params.code_challenge_method !== "S256") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "INVALID_REQUEST",
|
||||
message: "Only S256 code_challenge_method is supported",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Validate code_challenge is present
|
||||
if (!params.code_challenge) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_REQUEST", message: "code_challenge is required" },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate scopes, then clamp to user's role
|
||||
const userScopes = clampScopes(normalizeScopes(params.scope), userRole);
|
||||
|
||||
// SEC-41: Intersect with client's registered scopes (if restricted).
|
||||
// A client registered with scopes: ["content:read"] should never receive
|
||||
// admin or schema:write, regardless of the approving user's role.
|
||||
const clientScopes = client.scopes;
|
||||
const scopes = clientScopes?.length
|
||||
? userScopes.filter((s: string) => clientScopes.includes(s))
|
||||
: userScopes;
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
|
||||
};
|
||||
}
|
||||
|
||||
// Generate authorization code (high entropy, base64url)
|
||||
const code = generateCodeVerifier(); // 32 bytes random, base64url
|
||||
const codeHash = hashApiToken(code);
|
||||
|
||||
// Store the authorization code
|
||||
await db
|
||||
.insertInto("_emdash_authorization_codes")
|
||||
.values({
|
||||
code_hash: codeHash,
|
||||
client_id: params.client_id,
|
||||
redirect_uri: params.redirect_uri,
|
||||
user_id: userId,
|
||||
scopes: JSON.stringify(scopes),
|
||||
code_challenge: params.code_challenge,
|
||||
code_challenge_method: params.code_challenge_method,
|
||||
resource: params.resource ?? null,
|
||||
expires_at: expiresAt(AUTH_CODE_TTL_SECONDS),
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Build the redirect URL
|
||||
const redirectUrl = new URL(params.redirect_uri);
|
||||
redirectUrl.searchParams.set("code", code);
|
||||
if (params.state) {
|
||||
redirectUrl.searchParams.set("state", params.state);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { redirect_url: redirectUrl.toString() },
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Authorization error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "AUTHORIZATION_ERROR",
|
||||
message: "Failed to process authorization",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange an authorization code for access + refresh tokens.
|
||||
*
|
||||
* Validates the code, verifies PKCE, and issues tokens using the same
|
||||
* infrastructure as the device flow (ec_oat_*, ec_ort_*).
|
||||
*/
|
||||
export async function handleAuthorizationCodeExchange(
|
||||
db: Kysely<Database>,
|
||||
params: TokenExchangeParams,
|
||||
): Promise<ApiResult<TokenResponse>> {
|
||||
try {
|
||||
// Validate grant_type
|
||||
if (params.grant_type !== "authorization_code") {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "unsupported_grant_type", message: "Invalid grant_type" },
|
||||
};
|
||||
}
|
||||
|
||||
// SEC-39: Atomically consume the authorization code using DELETE...RETURNING.
|
||||
// This prevents TOCTOU double-exchange: two concurrent requests with the
|
||||
// same code will race on the DELETE, and only one will get a row back.
|
||||
const codeHash = hashApiToken(params.code);
|
||||
|
||||
const row = await db
|
||||
.deleteFrom("_emdash_authorization_codes")
|
||||
.where("code_hash", "=", codeHash)
|
||||
.returningAll()
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "Invalid authorization code" },
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (new Date(row.expires_at) < new Date()) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "Authorization code expired" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify redirect_uri matches exactly
|
||||
if (row.redirect_uri !== params.redirect_uri) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "redirect_uri mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify client_id matches
|
||||
if (row.client_id !== params.client_id) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "client_id mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// PKCE verification: SHA256(code_verifier) must match stored code_challenge
|
||||
const derivedChallenge = computeS256Challenge(params.code_verifier);
|
||||
if (derivedChallenge !== row.code_challenge) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "PKCE verification failed" },
|
||||
};
|
||||
}
|
||||
|
||||
// Verify resource matches (if stored)
|
||||
if (row.resource && params.resource && row.resource !== params.resource) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "invalid_grant", message: "resource mismatch" },
|
||||
};
|
||||
}
|
||||
|
||||
// Issue tokens (same as device flow)
|
||||
const scopes = JSON.parse(row.scopes) as string[];
|
||||
|
||||
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
|
||||
|
||||
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
|
||||
|
||||
// Store access token
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: accessToken.hash,
|
||||
token_type: "access",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "mcp",
|
||||
expires_at: accessExpires,
|
||||
refresh_token_hash: refreshToken.hash,
|
||||
client_id: row.client_id,
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Store refresh token
|
||||
await db
|
||||
.insertInto("_emdash_oauth_tokens")
|
||||
.values({
|
||||
token_hash: refreshToken.hash,
|
||||
token_type: "refresh",
|
||||
user_id: row.user_id,
|
||||
scopes: JSON.stringify(scopes),
|
||||
client_type: "mcp",
|
||||
expires_at: refreshExpires,
|
||||
refresh_token_hash: null,
|
||||
client_id: row.client_id,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
access_token: accessToken.raw,
|
||||
refresh_token: refreshToken.raw,
|
||||
token_type: "Bearer",
|
||||
expires_in: ACCESS_TOKEN_TTL_SECONDS,
|
||||
scope: scopes.join(" "),
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Token exchange error:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "TOKEN_EXCHANGE_ERROR",
|
||||
message: "Failed to exchange authorization code",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the authorization denied redirect URL.
|
||||
*/
|
||||
export function buildDeniedRedirect(redirectUri: string, state?: string): string {
|
||||
const url = new URL(redirectUri);
|
||||
url.searchParams.set("error", "access_denied");
|
||||
url.searchParams.set("error_description", "The user denied the authorization request");
|
||||
if (state) {
|
||||
url.searchParams.set("state", state);
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired authorization codes.
|
||||
*/
|
||||
export async function cleanupExpiredAuthorizationCodes(db: Kysely<Database>): Promise<number> {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_authorization_codes")
|
||||
.where("expires_at", "<", new Date().toISOString())
|
||||
.executeTakeFirst();
|
||||
|
||||
return Number(result.numDeletedRows);
|
||||
}
|
||||
353
packages/core/src/api/handlers/oauth-clients.ts
Normal file
353
packages/core/src/api/handlers/oauth-clients.ts
Normal file
@@ -0,0 +1,353 @@
|
||||
/**
|
||||
* OAuth client management handlers.
|
||||
*
|
||||
* CRUD operations for registered OAuth clients. Each client has a set
|
||||
* of pre-registered redirect URIs. The authorization endpoint rejects
|
||||
* any redirect_uri not in the client's registered set.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Parse a JSON string column into a typed value. */
|
||||
function parseJsonColumn<T>(value: string): T {
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns unknown, callers provide the expected shape
|
||||
return JSON.parse(value) as T;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface OAuthClientInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
redirectUris: string[];
|
||||
scopes: string[] | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
id: string;
|
||||
name: string;
|
||||
redirectUris: string[];
|
||||
scopes?: string[] | null;
|
||||
},
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
if (input.redirectUris.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "At least one redirect URI is required",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check for duplicate client ID
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.select("id")
|
||||
.where("id", "=", input.id)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "CONFLICT", message: "OAuth client with this ID already exists" },
|
||||
};
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_oauth_clients")
|
||||
.values({
|
||||
id: input.id,
|
||||
name: input.name,
|
||||
redirect_uris: JSON.stringify(input.redirectUris),
|
||||
scopes: input.scopes ? JSON.stringify(input.scopes) : null,
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: input.id,
|
||||
name: input.name,
|
||||
redirectUris: input.redirectUris,
|
||||
scopes: input.scopes ?? null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_CREATE_ERROR",
|
||||
message: "Failed to create OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all registered OAuth clients.
|
||||
*/
|
||||
export async function handleOAuthClientList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<{ items: OAuthClientInfo[] }>> {
|
||||
try {
|
||||
const rows = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.orderBy("created_at", "desc")
|
||||
.execute();
|
||||
|
||||
const items: OAuthClientInfo[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
}));
|
||||
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_LIST_ERROR",
|
||||
message: "Failed to list OAuth clients",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single OAuth client by ID.
|
||||
*/
|
||||
export async function handleOAuthClientGet(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_GET_ERROR",
|
||||
message: "Failed to get OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientUpdate(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
input: {
|
||||
name?: string;
|
||||
redirectUris?: string[];
|
||||
scopes?: string[] | null;
|
||||
},
|
||||
): Promise<ApiResult<OAuthClientInfo>> {
|
||||
try {
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
if (input.redirectUris !== undefined && input.redirectUris.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "At least one redirect URI is required",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const updates: Record<string, string> = {
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (input.name !== undefined) {
|
||||
updates.name = input.name;
|
||||
}
|
||||
if (input.redirectUris !== undefined) {
|
||||
updates.redirect_uris = JSON.stringify(input.redirectUris);
|
||||
}
|
||||
if (input.scopes !== undefined) {
|
||||
updates.scopes = input.scopes ? JSON.stringify(input.scopes) : "";
|
||||
}
|
||||
|
||||
await db
|
||||
.updateTable("_emdash_oauth_clients")
|
||||
.set(updates)
|
||||
.where("id", "=", clientId)
|
||||
.execute();
|
||||
|
||||
// Fetch the updated row
|
||||
const updated = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.selectAll()
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found after update" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: updated.id,
|
||||
name: updated.name,
|
||||
redirectUris: parseJsonColumn<string[]>(updated.redirect_uris),
|
||||
scopes: updated.scopes ? parseJsonColumn<string[]>(updated.scopes) : null,
|
||||
createdAt: updated.created_at,
|
||||
updatedAt: updated.updated_at,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_UPDATE_ERROR",
|
||||
message: "Failed to update OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an OAuth client.
|
||||
*/
|
||||
export async function handleOAuthClientDelete(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const result = await db
|
||||
.deleteFrom("_emdash_oauth_clients")
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (result.numDeletedRows === 0n) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "OAuth client not found" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CLIENT_DELETE_ERROR",
|
||||
message: "Failed to delete OAuth client",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Lookup helpers (used by authorization handler)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Look up a registered OAuth client by ID.
|
||||
* Returns the client's redirect URIs or null if the client is not registered.
|
||||
*/
|
||||
export async function lookupOAuthClient(
|
||||
db: Kysely<Database>,
|
||||
clientId: string,
|
||||
): Promise<{ redirectUris: string[]; scopes: string[] | null } | null> {
|
||||
const row = await db
|
||||
.selectFrom("_emdash_oauth_clients")
|
||||
.select(["redirect_uris", "scopes"])
|
||||
.where("id", "=", clientId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
|
||||
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a redirect URI is in the client's registered set.
|
||||
*
|
||||
* Comparison is exact string match (per RFC 6749 §3.1.2.3).
|
||||
* Returns null if valid, or an error message if not.
|
||||
*/
|
||||
export function validateClientRedirectUri(
|
||||
redirectUri: string,
|
||||
allowedUris: string[],
|
||||
): string | null {
|
||||
if (allowedUris.includes(redirectUri)) {
|
||||
return null; // OK
|
||||
}
|
||||
return "redirect_uri is not registered for this client";
|
||||
}
|
||||
39
packages/core/src/api/handlers/oauth-user-lookup.ts
Normal file
39
packages/core/src/api/handlers/oauth-user-lookup.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Shared user lookup for OAuth token operations.
|
||||
*
|
||||
* Extracts user role and disabled status from the database. Used by
|
||||
* handleTokenRefresh() to revalidate scopes against the user's current
|
||||
* role and reject disabled users.
|
||||
*/
|
||||
|
||||
import { toRoleLevel, type RoleLevel } from "@emdashcms/auth";
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
|
||||
export interface UserRoleAndStatus {
|
||||
role: RoleLevel;
|
||||
disabled: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a user's current role and disabled status.
|
||||
* Returns null if the user doesn't exist.
|
||||
*/
|
||||
export async function lookupUserRoleAndStatus(
|
||||
db: Kysely<Database>,
|
||||
userId: string,
|
||||
): Promise<UserRoleAndStatus | null> {
|
||||
const row = await db
|
||||
.selectFrom("users")
|
||||
.select(["role", "disabled"])
|
||||
.where("id", "=", userId)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
role: toRoleLevel(row.role),
|
||||
disabled: row.disabled === 1,
|
||||
};
|
||||
}
|
||||
254
packages/core/src/api/handlers/plugins.ts
Normal file
254
packages/core/src/api/handlers/plugins.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
/**
|
||||
* Plugin management handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { PluginStateRepository, type PluginState, type PluginStatus } from "../../plugins/state.js";
|
||||
import type { ResolvedPlugin } from "../../plugins/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface PluginInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
version: string;
|
||||
package?: string;
|
||||
enabled: boolean;
|
||||
status: PluginStatus;
|
||||
source?: "config" | "marketplace";
|
||||
marketplaceVersion?: string;
|
||||
capabilities: string[];
|
||||
hasAdminPages: boolean;
|
||||
hasDashboardWidgets: boolean;
|
||||
hasHooks: boolean;
|
||||
installedAt?: string;
|
||||
activatedAt?: string;
|
||||
deactivatedAt?: string;
|
||||
/** Description of what the plugin does */
|
||||
description?: string;
|
||||
/** URL to the plugin icon on the marketplace */
|
||||
iconUrl?: string;
|
||||
}
|
||||
|
||||
export interface PluginListResponse {
|
||||
items: PluginInfo[];
|
||||
}
|
||||
|
||||
export interface PluginResponse {
|
||||
item: PluginInfo;
|
||||
}
|
||||
|
||||
function marketplaceIconUrl(marketplaceUrl: string, pluginId: string): string {
|
||||
return `${marketplaceUrl}/api/v1/plugins/${encodeURIComponent(pluginId)}/icon`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get plugin info from configured plugin and database state
|
||||
*/
|
||||
function buildPluginInfo(
|
||||
plugin: ResolvedPlugin,
|
||||
state: PluginState | null,
|
||||
marketplaceUrl?: string,
|
||||
): PluginInfo {
|
||||
// If no state exists, plugin is considered active (default on first run)
|
||||
const status = state?.status ?? "active";
|
||||
const enabled = status === "active";
|
||||
const isMarketplace = (state?.source ?? "config") === "marketplace";
|
||||
|
||||
return {
|
||||
id: plugin.id,
|
||||
name: state?.displayName || plugin.id,
|
||||
version: plugin.version,
|
||||
package: undefined, // v2 doesn't have package field
|
||||
enabled,
|
||||
status,
|
||||
source: state?.source ?? "config",
|
||||
marketplaceVersion: state?.marketplaceVersion ?? undefined,
|
||||
capabilities: plugin.capabilities,
|
||||
hasAdminPages: (plugin.admin.pages?.length ?? 0) > 0,
|
||||
hasDashboardWidgets: (plugin.admin.widgets?.length ?? 0) > 0,
|
||||
hasHooks: Object.keys(plugin.hooks ?? {}).length > 0,
|
||||
installedAt: state?.installedAt?.toISOString(),
|
||||
activatedAt: state?.activatedAt?.toISOString() ?? undefined,
|
||||
deactivatedAt: state?.deactivatedAt?.toISOString() ?? undefined,
|
||||
description: state?.description ?? undefined,
|
||||
iconUrl:
|
||||
isMarketplace && marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, plugin.id) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List all configured plugins with their state
|
||||
*/
|
||||
export async function handlePluginList(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
marketplaceUrl?: string,
|
||||
): Promise<ApiResult<PluginListResponse>> {
|
||||
try {
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const allStates = await stateRepo.getAll();
|
||||
const stateMap = new Map(allStates.map((s) => [s.pluginId, s]));
|
||||
|
||||
const configuredIds = new Set(configuredPlugins.map((p) => p.id));
|
||||
|
||||
const items = configuredPlugins.map((plugin) => {
|
||||
const state = stateMap.get(plugin.id) ?? null;
|
||||
return buildPluginInfo(plugin, state, marketplaceUrl);
|
||||
});
|
||||
|
||||
// Include marketplace-installed plugins that aren't in the configured plugins list
|
||||
for (const state of allStates) {
|
||||
if (state.source !== "marketplace") continue;
|
||||
if (configuredIds.has(state.pluginId)) continue;
|
||||
|
||||
items.push({
|
||||
id: state.pluginId,
|
||||
name: state.displayName || state.pluginId,
|
||||
version: state.marketplaceVersion ?? state.version,
|
||||
enabled: state.status === "active",
|
||||
status: state.status,
|
||||
source: "marketplace",
|
||||
marketplaceVersion: state.marketplaceVersion ?? undefined,
|
||||
capabilities: [],
|
||||
hasAdminPages: false,
|
||||
hasDashboardWidgets: false,
|
||||
hasHooks: false,
|
||||
installedAt: state.installedAt?.toISOString(),
|
||||
activatedAt: state.activatedAt?.toISOString() ?? undefined,
|
||||
deactivatedAt: state.deactivatedAt?.toISOString() ?? undefined,
|
||||
description: state.description ?? undefined,
|
||||
iconUrl: marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, state.pluginId) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_LIST_ERROR",
|
||||
message: "Failed to list plugins",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single plugin's info
|
||||
*/
|
||||
export async function handlePluginGet(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
marketplaceUrl?: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.get(pluginId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state, marketplaceUrl) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_GET_ERROR",
|
||||
message: "Failed to get plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a plugin
|
||||
*/
|
||||
export async function handlePluginEnable(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.enable(pluginId, plugin.version);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_ENABLE_ERROR",
|
||||
message: "Failed to enable plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a plugin
|
||||
*/
|
||||
export async function handlePluginDisable(
|
||||
db: Kysely<Database>,
|
||||
configuredPlugins: ResolvedPlugin[],
|
||||
pluginId: string,
|
||||
): Promise<ApiResult<PluginResponse>> {
|
||||
try {
|
||||
const plugin = configuredPlugins.find((p) => p.id === pluginId);
|
||||
if (!plugin) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Plugin not found: ${pluginId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const stateRepo = new PluginStateRepository(db);
|
||||
const state = await stateRepo.disable(pluginId, plugin.version);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item: buildPluginInfo(plugin, state) },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "PLUGIN_DISABLE_ERROR",
|
||||
message: "Failed to disable plugin",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
360
packages/core/src/api/handlers/redirects.ts
Normal file
360
packages/core/src/api/handlers/redirects.ts
Normal file
@@ -0,0 +1,360 @@
|
||||
/**
|
||||
* Redirect CRUD and 404 log handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import {
|
||||
RedirectRepository,
|
||||
type Redirect,
|
||||
type NotFoundEntry,
|
||||
type NotFoundSummary,
|
||||
} from "../../database/repositories/redirect.js";
|
||||
import type { FindManyResult } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validatePattern, validateDestinationParams, isPattern } from "../../redirects/patterns.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List redirects with cursor pagination and optional filters
|
||||
*/
|
||||
export async function handleRedirectList(
|
||||
db: Kysely<Database>,
|
||||
params: {
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
search?: string;
|
||||
group?: string;
|
||||
enabled?: boolean;
|
||||
auto?: boolean;
|
||||
},
|
||||
): Promise<ApiResult<FindManyResult<Redirect>>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const result = await repo.findMany(params);
|
||||
return { success: true, data: result };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_LIST_ERROR", message: "Failed to fetch redirects" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a redirect rule
|
||||
*/
|
||||
export async function handleRedirectCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
source: string;
|
||||
destination: string;
|
||||
type?: number;
|
||||
enabled?: boolean;
|
||||
groupName?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
|
||||
// Source and destination must differ
|
||||
if (input.source === input.destination) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Source and destination must be different",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// If source looks like a pattern, validate it
|
||||
const sourceIsPattern = isPattern(input.source);
|
||||
if (sourceIsPattern) {
|
||||
const patternError = validatePattern(input.source);
|
||||
if (patternError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: `Invalid source pattern: ${patternError}` },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate destination params reference valid source params
|
||||
const destError = validateDestinationParams(input.source, input.destination);
|
||||
if (destError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: destError },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate source (exact match only for non-patterns)
|
||||
const existing = await repo.findBySource(input.source);
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `A redirect from "${input.source}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const redirect = await repo.create({
|
||||
source: input.source,
|
||||
destination: input.destination,
|
||||
type: input.type ?? 301,
|
||||
isPattern: sourceIsPattern,
|
||||
enabled: input.enabled ?? true,
|
||||
groupName: input.groupName ?? null,
|
||||
});
|
||||
|
||||
return { success: true, data: redirect };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_CREATE_ERROR", message: "Failed to create redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectGet(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const redirect = await repo.findById(id);
|
||||
|
||||
if (!redirect) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: redirect };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_GET_ERROR", message: "Failed to fetch redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectUpdate(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
input: {
|
||||
source?: string;
|
||||
destination?: string;
|
||||
type?: number;
|
||||
enabled?: boolean;
|
||||
groupName?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Redirect>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
|
||||
const existing = await repo.findById(id);
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
const newSource = input.source ?? existing.source;
|
||||
const newDest = input.destination ?? existing.destination;
|
||||
|
||||
// Source and destination must differ
|
||||
if (newSource === newDest) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Source and destination must be different",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// If source is changing, validate patterns
|
||||
if (input.source !== undefined) {
|
||||
const sourceIsPattern = isPattern(input.source);
|
||||
if (sourceIsPattern) {
|
||||
const patternError = validatePattern(input.source);
|
||||
if (patternError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Invalid source pattern: ${patternError}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate source (exclude self)
|
||||
const dup = await repo.findBySource(input.source);
|
||||
if (dup && dup.id !== id) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `A redirect from "${input.source}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Validate destination params against the (possibly updated) source
|
||||
if (isPattern(newSource)) {
|
||||
const destError = validateDestinationParams(newSource, newDest);
|
||||
if (destError) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "VALIDATION_ERROR", message: destError },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const updated = await repo.update(id, {
|
||||
source: input.source,
|
||||
destination: input.destination,
|
||||
type: input.type,
|
||||
enabled: input.enabled,
|
||||
groupName: input.groupName,
|
||||
});
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: updated };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a redirect by ID
|
||||
*/
|
||||
export async function handleRedirectDelete(
|
||||
db: Kysely<Database>,
|
||||
id: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.delete(id);
|
||||
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "REDIRECT_DELETE_ERROR", message: "Failed to delete redirect" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 404 Log
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List 404 log entries with cursor pagination
|
||||
*/
|
||||
export async function handleNotFoundList(
|
||||
db: Kysely<Database>,
|
||||
params: { cursor?: string; limit?: number; search?: string },
|
||||
): Promise<ApiResult<FindManyResult<NotFoundEntry>>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const result = await repo.find404s(params);
|
||||
return { success: true, data: result };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_LIST_ERROR", message: "Failed to fetch 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get 404 summary (grouped by path, sorted by count)
|
||||
*/
|
||||
export async function handleNotFoundSummary(
|
||||
db: Kysely<Database>,
|
||||
limit?: number,
|
||||
): Promise<ApiResult<{ items: NotFoundSummary[] }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const items = await repo.get404Summary(limit);
|
||||
return { success: true, data: { items } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_SUMMARY_ERROR", message: "Failed to fetch 404 summary" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all 404 log entries
|
||||
*/
|
||||
export async function handleNotFoundClear(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<{ deleted: number }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.clear404s();
|
||||
return { success: true, data: { deleted } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_CLEAR_ERROR", message: "Failed to clear 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune 404 log entries older than a given date
|
||||
*/
|
||||
export async function handleNotFoundPrune(
|
||||
db: Kysely<Database>,
|
||||
olderThan: string,
|
||||
): Promise<ApiResult<{ deleted: number }>> {
|
||||
try {
|
||||
const repo = new RedirectRepository(db);
|
||||
const deleted = await repo.prune404s(olderThan);
|
||||
return { success: true, data: { deleted } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND_PRUNE_ERROR", message: "Failed to prune 404 log" },
|
||||
};
|
||||
}
|
||||
}
|
||||
145
packages/core/src/api/handlers/revision.ts
Normal file
145
packages/core/src/api/handlers/revision.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
/**
|
||||
* Revision history handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { ContentRepository } from "../../database/repositories/content.js";
|
||||
import { RevisionRepository, type Revision } from "../../database/repositories/revision.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult, ContentResponse } from "../types.js";
|
||||
|
||||
export interface RevisionListResponse {
|
||||
items: Revision[];
|
||||
total: number;
|
||||
}
|
||||
|
||||
export interface RevisionResponse {
|
||||
item: Revision;
|
||||
}
|
||||
|
||||
/**
|
||||
* List revisions for a content entry
|
||||
*/
|
||||
export async function handleRevisionList(
|
||||
db: Kysely<Database>,
|
||||
collection: string,
|
||||
entryId: string,
|
||||
params: { limit?: number } = {},
|
||||
): Promise<ApiResult<RevisionListResponse>> {
|
||||
try {
|
||||
const repo = new RevisionRepository(db);
|
||||
const [items, total] = await Promise.all([
|
||||
repo.findByEntry(collection, entryId, { limit: Math.min(params.limit || 50, 100) }),
|
||||
repo.countByEntry(collection, entryId),
|
||||
]);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items, total },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_LIST_ERROR",
|
||||
message: "Failed to list revisions",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific revision
|
||||
*/
|
||||
export async function handleRevisionGet(
|
||||
db: Kysely<Database>,
|
||||
revisionId: string,
|
||||
): Promise<ApiResult<RevisionResponse>> {
|
||||
try {
|
||||
const repo = new RevisionRepository(db);
|
||||
const item = await repo.findById(revisionId);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Revision not found: ${revisionId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_GET_ERROR",
|
||||
message: "Failed to get revision",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a revision (updates content to this revision's data and creates new revision)
|
||||
*/
|
||||
export async function handleRevisionRestore(
|
||||
db: Kysely<Database>,
|
||||
revisionId: string,
|
||||
callerUserId: string,
|
||||
): Promise<ApiResult<ContentResponse>> {
|
||||
try {
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
// Get the revision
|
||||
const revision = await revisionRepo.findById(revisionId);
|
||||
if (!revision) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Revision not found: ${revisionId}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Extract _slug from revision data (stored as metadata, not a real column)
|
||||
const { _slug, ...fieldData } = revision.data;
|
||||
|
||||
// Update the content with the revision's data
|
||||
const item = await contentRepo.update(revision.collection, revision.entryId, {
|
||||
data: fieldData,
|
||||
slug: typeof _slug === "string" ? _slug : undefined,
|
||||
});
|
||||
|
||||
// Create a new revision to record the restore, attributed to the caller
|
||||
await revisionRepo.create({
|
||||
collection: revision.collection,
|
||||
entryId: revision.entryId,
|
||||
data: revision.data,
|
||||
authorId: callerUserId,
|
||||
});
|
||||
|
||||
// Fire-and-forget: prune old revisions to prevent unbounded growth
|
||||
void revisionRepo.pruneOldRevisions(revision.collection, revision.entryId, 50).catch(() => {});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "REVISION_RESTORE_ERROR",
|
||||
message: "Failed to restore revision",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
534
packages/core/src/api/handlers/schema.ts
Normal file
534
packages/core/src/api/handlers/schema.ts
Normal file
@@ -0,0 +1,534 @@
|
||||
/**
|
||||
* Schema/collection management handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import {
|
||||
SchemaRegistry,
|
||||
SchemaError,
|
||||
type Collection,
|
||||
type Field,
|
||||
type CreateCollectionInput,
|
||||
type UpdateCollectionInput,
|
||||
type CreateFieldInput,
|
||||
type UpdateFieldInput,
|
||||
type CollectionWithFields,
|
||||
} from "../../schema/index.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
export interface CollectionListResponse {
|
||||
items: Collection[];
|
||||
}
|
||||
|
||||
export interface CollectionResponse {
|
||||
item: Collection;
|
||||
}
|
||||
|
||||
export interface CollectionWithFieldsResponse {
|
||||
item: CollectionWithFields;
|
||||
}
|
||||
|
||||
export interface FieldListResponse {
|
||||
items: Field[];
|
||||
}
|
||||
|
||||
export interface FieldResponse {
|
||||
item: Field;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all collections
|
||||
*/
|
||||
export async function handleSchemaCollectionList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<CollectionListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const items = await registry.listCollections();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_LIST_ERROR",
|
||||
message: "Failed to list collections",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a collection by slug
|
||||
*/
|
||||
export async function handleSchemaCollectionGet(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: { includeFields?: boolean },
|
||||
): Promise<ApiResult<CollectionResponse | CollectionWithFieldsResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
|
||||
if (options?.includeFields) {
|
||||
const item = await registry.getCollectionWithFields(slug);
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${slug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
}
|
||||
|
||||
const item = await registry.getCollection(slug);
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${slug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_GET_ERROR",
|
||||
message: "Failed to get collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionCreate(
|
||||
db: Kysely<Database>,
|
||||
input: CreateCollectionInput,
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.createCollection(input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
console.error("[emdash] Failed to create collection:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_CREATE_ERROR",
|
||||
message: "Failed to create collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionUpdate(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
input: UpdateCollectionInput,
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.updateCollection(slug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_UPDATE_ERROR",
|
||||
message: "Failed to update collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a collection
|
||||
*/
|
||||
export async function handleSchemaCollectionDelete(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: { force?: boolean },
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.deleteCollection(slug, options);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_DELETE_ERROR",
|
||||
message: "Failed to delete collection",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List fields for a collection
|
||||
*/
|
||||
export async function handleSchemaFieldList(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
): Promise<ApiResult<FieldListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const collection = await registry.getCollection(collectionSlug);
|
||||
|
||||
if (!collection) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Collection not found: ${collectionSlug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const items = await registry.listFields(collection.id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_LIST_ERROR",
|
||||
message: "Failed to list fields",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a field
|
||||
*/
|
||||
export async function handleSchemaFieldGet(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.getField(collectionSlug, fieldSlug);
|
||||
|
||||
if (!item) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Field not found: ${fieldSlug} in collection ${collectionSlug}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_GET_ERROR",
|
||||
message: "Failed to get field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a field
|
||||
*/
|
||||
export async function handleSchemaFieldCreate(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
input: CreateFieldInput,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.createField(collectionSlug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_CREATE_ERROR",
|
||||
message: "Failed to create field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a field
|
||||
*/
|
||||
export async function handleSchemaFieldUpdate(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
input: UpdateFieldInput,
|
||||
): Promise<ApiResult<FieldResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.updateField(collectionSlug, fieldSlug, input);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_UPDATE_ERROR",
|
||||
message: "Failed to update field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a field
|
||||
*/
|
||||
export async function handleSchemaFieldDelete(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlug: string,
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.deleteField(collectionSlug, fieldSlug);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_DELETE_ERROR",
|
||||
message: "Failed to delete field",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reorder fields
|
||||
*/
|
||||
export async function handleSchemaFieldReorder(
|
||||
db: Kysely<Database>,
|
||||
collectionSlug: string,
|
||||
fieldSlugs: string[],
|
||||
): Promise<ApiResult<{ success: boolean }>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.reorderFields(collectionSlug, fieldSlugs);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { success: true },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "SCHEMA_FIELD_REORDER_ERROR",
|
||||
message: "Failed to reorder fields",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Orphaned Table Discovery
|
||||
// ============================================
|
||||
|
||||
export interface OrphanedTable {
|
||||
slug: string;
|
||||
tableName: string;
|
||||
rowCount: number;
|
||||
}
|
||||
|
||||
export interface OrphanedTableListResponse {
|
||||
items: OrphanedTable[];
|
||||
}
|
||||
|
||||
/**
|
||||
* List orphaned content tables
|
||||
*/
|
||||
export async function handleOrphanedTableList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<OrphanedTableListResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const items = await registry.discoverOrphanedTables();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { items },
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("[emdash] Failed to list orphaned tables:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ORPHAN_LIST_ERROR",
|
||||
message: "Failed to list orphaned tables",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an orphaned table as a collection
|
||||
*/
|
||||
export async function handleOrphanedTableRegister(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
options?: {
|
||||
label?: string;
|
||||
labelSingular?: string;
|
||||
description?: string;
|
||||
},
|
||||
): Promise<ApiResult<CollectionResponse>> {
|
||||
try {
|
||||
const registry = new SchemaRegistry(db);
|
||||
const item = await registry.registerOrphanedTable(slug, options);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: { item },
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof SchemaError) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
details: error.details,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "ORPHAN_REGISTER_ERROR",
|
||||
message: "Failed to register orphaned table",
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
289
packages/core/src/api/handlers/sections.ts
Normal file
289
packages/core/src/api/handlers/sections.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
/**
|
||||
* Section CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import type { FindManyResult } from "../../database/repositories/types.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import {
|
||||
getSectionById,
|
||||
getSectionWithDb,
|
||||
getSectionsWithDb,
|
||||
type Section,
|
||||
type GetSectionsOptions,
|
||||
} from "../../sections/index.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
const SLUG_PATTERN = /^[a-z0-9-]+$/;
|
||||
|
||||
export type SectionListResponse = FindManyResult<Section>;
|
||||
|
||||
/**
|
||||
* List sections with optional filters
|
||||
*/
|
||||
export async function handleSectionList(
|
||||
db: Kysely<Database>,
|
||||
params: GetSectionsOptions,
|
||||
): Promise<ApiResult<SectionListResponse>> {
|
||||
try {
|
||||
const result = await getSectionsWithDb(db, {
|
||||
source: params.source,
|
||||
search: params.search,
|
||||
limit: params.limit,
|
||||
cursor: params.cursor,
|
||||
});
|
||||
|
||||
return { success: true, data: result };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_LIST_ERROR", message: "Failed to fetch sections" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a section
|
||||
*/
|
||||
export async function handleSectionCreate(
|
||||
db: Kysely<Database>,
|
||||
input: {
|
||||
slug: string;
|
||||
title: string;
|
||||
description?: string;
|
||||
keywords?: string[];
|
||||
content: unknown[];
|
||||
previewMediaId?: string;
|
||||
source?: string;
|
||||
themeId?: string;
|
||||
},
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
// Validate slug format
|
||||
if (!SLUG_PATTERN.test(input.slug)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "slug must only contain lowercase letters, numbers, and hyphens",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if slug already exists
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select("id")
|
||||
.where("slug", "=", input.slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Section with slug "${input.slug}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
const now = new Date().toISOString();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_sections")
|
||||
.values({
|
||||
id,
|
||||
slug: input.slug,
|
||||
title: input.title,
|
||||
description: input.description ?? null,
|
||||
keywords: input.keywords ? JSON.stringify(input.keywords) : null,
|
||||
content: JSON.stringify(input.content),
|
||||
preview_media_id: input.previewMediaId ?? null,
|
||||
source: input.source ?? "user",
|
||||
theme_id: input.themeId ?? null,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const section = await getSectionById(id, db);
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_CREATE_ERROR", message: "Failed to fetch created section" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_CREATE_ERROR", message: "Failed to create section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a section by slug
|
||||
*/
|
||||
export async function handleSectionGet(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
const section = await getSectionWithDb(slug, db);
|
||||
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_GET_ERROR", message: "Failed to fetch section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a section by slug
|
||||
*/
|
||||
export async function handleSectionUpdate(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
input: {
|
||||
slug?: string;
|
||||
title?: string;
|
||||
description?: string;
|
||||
keywords?: string[];
|
||||
content?: unknown[];
|
||||
previewMediaId?: string | null;
|
||||
},
|
||||
): Promise<ApiResult<Section>> {
|
||||
try {
|
||||
// Check if section exists
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select(["id", "source"])
|
||||
.where("slug", "=", slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
// Validate new slug if changing
|
||||
if (input.slug && input.slug !== slug) {
|
||||
if (!SLUG_PATTERN.test(input.slug)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "slug must only contain lowercase letters, numbers, and hyphens",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if new slug already exists
|
||||
const slugExists = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select("id")
|
||||
.where("slug", "=", input.slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (slugExists) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Section with slug "${input.slug}" already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Build update object
|
||||
const updates: Record<string, unknown> = {
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (input.slug !== undefined) updates.slug = input.slug;
|
||||
if (input.title !== undefined) updates.title = input.title;
|
||||
if (input.description !== undefined) updates.description = input.description;
|
||||
if (input.keywords !== undefined) updates.keywords = JSON.stringify(input.keywords);
|
||||
if (input.content !== undefined) updates.content = JSON.stringify(input.content);
|
||||
if (input.previewMediaId !== undefined) updates.preview_media_id = input.previewMediaId;
|
||||
|
||||
await db.updateTable("_emdash_sections").set(updates).where("id", "=", existing.id).execute();
|
||||
|
||||
const section = await getSectionById(existing.id, db);
|
||||
if (!section) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to fetch updated section" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: section };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to update section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a section by slug
|
||||
*/
|
||||
export async function handleSectionDelete(
|
||||
db: Kysely<Database>,
|
||||
slug: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
// Check if section exists and get source
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.select(["id", "source", "theme_id"])
|
||||
.where("slug", "=", slug)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
|
||||
};
|
||||
}
|
||||
|
||||
// Prevent deleting theme sections
|
||||
if (existing.source === "theme") {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "FORBIDDEN",
|
||||
message:
|
||||
"Cannot delete theme-provided sections. Edit the section to create a user copy, then delete that.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await db.deleteFrom("_emdash_sections").where("id", "=", existing.id).execute();
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SECTION_DELETE_ERROR", message: "Failed to delete section" },
|
||||
};
|
||||
}
|
||||
}
|
||||
115
packages/core/src/api/handlers/seo.ts
Normal file
115
packages/core/src/api/handlers/seo.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* SEO Handlers
|
||||
*
|
||||
* Business logic for sitemap generation and robots.txt.
|
||||
*/
|
||||
|
||||
import { sql, type Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { validateIdentifier } from "../../database/validate.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/** Raw content data for sitemap generation — the route builds the actual URLs */
|
||||
export interface SitemapContentEntry {
|
||||
/** Collection slug (e.g., "post", "page") */
|
||||
collection: string;
|
||||
/** Content slug or ID */
|
||||
identifier: string;
|
||||
/** ISO date of last modification */
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface SitemapDataResponse {
|
||||
entries: SitemapContentEntry[];
|
||||
}
|
||||
|
||||
/** Maximum entries per sitemap (per spec) */
|
||||
const SITEMAP_MAX_ENTRIES = 50_000;
|
||||
|
||||
/**
|
||||
* Collect all published, indexable content across SEO-enabled collections
|
||||
* for sitemap generation.
|
||||
*
|
||||
* Only includes content from collections with `has_seo = 1`.
|
||||
* Excludes content with `seo_no_index = 1` in the `_emdash_seo` table.
|
||||
*
|
||||
* Returns raw data (collection + identifier + date). The caller (route)
|
||||
* is responsible for building absolute URLs — this handler does NOT
|
||||
* assume a URL structure.
|
||||
*/
|
||||
export async function handleSitemapData(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<SitemapDataResponse>> {
|
||||
try {
|
||||
// Find all SEO-enabled collections
|
||||
const collections = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select(["slug"])
|
||||
.where("has_seo", "=", 1)
|
||||
.execute();
|
||||
|
||||
const entries: SitemapContentEntry[] = [];
|
||||
|
||||
for (const col of collections) {
|
||||
if (entries.length >= SITEMAP_MAX_ENTRIES) break;
|
||||
|
||||
// Validate the slug before using it as a table name identifier.
|
||||
// Should always pass (slugs are validated on creation), but
|
||||
// guards against corrupted DB data.
|
||||
try {
|
||||
validateIdentifier(col.slug, "collection slug");
|
||||
} catch {
|
||||
console.warn(`[SITEMAP] Skipping collection with invalid slug: ${col.slug}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const tableName = `ec_${col.slug}`;
|
||||
const remaining = SITEMAP_MAX_ENTRIES - entries.length;
|
||||
|
||||
// Query published, non-deleted content.
|
||||
// LEFT JOIN _emdash_seo to check noindex flag.
|
||||
// Content without an SEO row is assumed indexable (default).
|
||||
// Wrapped in try/catch so a missing/broken table doesn't fail the
|
||||
// entire sitemap — we skip that collection and continue.
|
||||
try {
|
||||
const rows = await sql<{
|
||||
slug: string | null;
|
||||
id: string;
|
||||
updated_at: string;
|
||||
}>`
|
||||
SELECT c.slug, c.id, c.updated_at
|
||||
FROM ${sql.ref(tableName)} c
|
||||
LEFT JOIN _emdash_seo s
|
||||
ON s.collection = ${col.slug}
|
||||
AND s.content_id = c.id
|
||||
WHERE c.status = 'published'
|
||||
AND c.deleted_at IS NULL
|
||||
AND (s.seo_no_index IS NULL OR s.seo_no_index = 0)
|
||||
ORDER BY c.updated_at DESC
|
||||
LIMIT ${remaining}
|
||||
`.execute(db);
|
||||
|
||||
for (const row of rows.rows) {
|
||||
entries.push({
|
||||
collection: col.slug,
|
||||
identifier: row.slug || row.id,
|
||||
updatedAt: row.updated_at,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
// Table missing or query error — skip this collection
|
||||
console.warn(`[SITEMAP] Failed to query collection "${col.slug}":`, err);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, data: { entries } };
|
||||
} catch (error) {
|
||||
console.error("[SITEMAP_ERROR]", error);
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SITEMAP_ERROR", message: "Failed to generate sitemap data" },
|
||||
};
|
||||
}
|
||||
}
|
||||
49
packages/core/src/api/handlers/settings.ts
Normal file
49
packages/core/src/api/handlers/settings.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Settings handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
import { getSiteSettingsWithDb, setSiteSettings } from "../../settings/index.js";
|
||||
import type { SiteSettings } from "../../settings/types.js";
|
||||
import type { Storage } from "../../storage/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/**
|
||||
* Get all site settings
|
||||
*/
|
||||
export async function handleSettingsGet(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
): Promise<ApiResult<Partial<SiteSettings>>> {
|
||||
try {
|
||||
const settings = await getSiteSettingsWithDb(db, storage);
|
||||
return { success: true, data: settings };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SETTINGS_READ_ERROR", message: "Failed to get settings" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update site settings
|
||||
*/
|
||||
export async function handleSettingsUpdate(
|
||||
db: Kysely<Database>,
|
||||
storage: Storage | null,
|
||||
input: Partial<SiteSettings>,
|
||||
): Promise<ApiResult<Partial<SiteSettings>>> {
|
||||
try {
|
||||
await setSiteSettings(input, db);
|
||||
const updatedSettings = await getSiteSettingsWithDb(db, storage);
|
||||
return { success: true, data: updatedSettings };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "SETTINGS_UPDATE_ERROR", message: "Failed to update settings" },
|
||||
};
|
||||
}
|
||||
}
|
||||
350
packages/core/src/api/handlers/snapshot.ts
Normal file
350
packages/core/src/api/handlers/snapshot.ts
Normal file
@@ -0,0 +1,350 @@
|
||||
/**
|
||||
* Snapshot handler — generates a portable database snapshot.
|
||||
*
|
||||
* Returns all content tables, schema definitions, and supporting data
|
||||
* needed to render content in an isolated preview database.
|
||||
*
|
||||
* Used by:
|
||||
* - DO preview database (EmDashPreviewDB.populateFromSnapshot)
|
||||
* - Future: CLI export, backup, site migration
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { sql } from "kysely";
|
||||
|
||||
import type { Database } from "../../database/types.js";
|
||||
|
||||
// ─<><E29480> Preview signature verification ──────────────────────────────
|
||||
|
||||
/**
|
||||
* Verify HMAC-SHA256 preview signature using crypto.subtle.
|
||||
* Returns true if the signature is valid and not expired.
|
||||
*/
|
||||
export async function verifyPreviewSignature(
|
||||
source: string,
|
||||
exp: number,
|
||||
sig: string,
|
||||
secret: string,
|
||||
): Promise<boolean> {
|
||||
if (exp < Date.now() / 1000) return false;
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const key = await crypto.subtle.importKey(
|
||||
"raw",
|
||||
encoder.encode(secret),
|
||||
{ name: "HMAC", hash: "SHA-256" },
|
||||
false,
|
||||
["verify"],
|
||||
);
|
||||
|
||||
const sigBytes = new Uint8Array(sig.length / 2);
|
||||
for (let i = 0; i < sig.length; i += 2) {
|
||||
sigBytes[i / 2] = parseInt(sig.substring(i, i + 2), 16);
|
||||
}
|
||||
|
||||
return crypto.subtle.verify("HMAC", key, sigBytes, encoder.encode(`${source}:${exp}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an X-Preview-Signature header value into its components.
|
||||
*
|
||||
* Format: "source:exp:sig" where source is a URL (contains colons),
|
||||
* exp is a unix timestamp, and sig is 64 hex chars.
|
||||
*
|
||||
* Parses from the right since source URLs contain colons.
|
||||
*
|
||||
* @returns Parsed components, or null if the format is invalid
|
||||
*/
|
||||
export function parsePreviewSignatureHeader(
|
||||
header: string,
|
||||
): { source: string; exp: number; sig: string } | null {
|
||||
const lastColon = header.lastIndexOf(":");
|
||||
if (lastColon <= 0) return null;
|
||||
|
||||
const sig = header.substring(lastColon + 1);
|
||||
if (sig.length !== 64) return null;
|
||||
|
||||
const rest = header.substring(0, lastColon);
|
||||
const secondLastColon = rest.lastIndexOf(":");
|
||||
if (secondLastColon <= 0) return null;
|
||||
|
||||
const source = rest.substring(0, secondLastColon);
|
||||
const exp = parseInt(rest.substring(secondLastColon + 1), 10);
|
||||
|
||||
if (isNaN(exp) || source.length === 0) return null;
|
||||
|
||||
return { source, exp, sig };
|
||||
}
|
||||
|
||||
// ── Media URL rewriting ─────────────────────────────────────────
|
||||
|
||||
const MEDIA_FILE_PREFIX = "/_emdash/api/media/file/";
|
||||
|
||||
/**
|
||||
* Parse a JSON string value and inject `src` for local media objects.
|
||||
* Returns the original string if it's not a local media value.
|
||||
*/
|
||||
function injectMediaSrc(jsonStr: string, origin: string): string {
|
||||
try {
|
||||
const obj = JSON.parse(jsonStr);
|
||||
if (typeof obj !== "object" || obj === null || Array.isArray(obj)) return jsonStr;
|
||||
if (injectMediaSrcInto(obj, origin)) {
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
return jsonStr;
|
||||
} catch {
|
||||
return jsonStr;
|
||||
}
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively walk an object and inject `src` into local media values.
|
||||
* Returns true if any modifications were made.
|
||||
*/
|
||||
function injectMediaSrcInto(obj: Record<string, unknown>, origin: string): boolean {
|
||||
let modified = false;
|
||||
|
||||
// Check if this object itself is a local media value
|
||||
if ((obj.provider === "local" || (!obj.provider && obj.id && obj.meta)) && !obj.src) {
|
||||
const meta = isRecord(obj.meta) ? obj.meta : undefined;
|
||||
const storageKey = meta?.storageKey ?? obj.id;
|
||||
if (typeof storageKey === "string" && storageKey) {
|
||||
obj.src = `${origin}${MEDIA_FILE_PREFIX}${storageKey}`;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse into nested objects/arrays (e.g. Portable Text with image blocks)
|
||||
for (const value of Object.values(obj)) {
|
||||
if (Array.isArray(value)) {
|
||||
for (const item of value) {
|
||||
if (isRecord(item)) {
|
||||
if (injectMediaSrcInto(item, origin)) {
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isRecord(value)) {
|
||||
if (injectMediaSrcInto(value, origin)) {
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return modified;
|
||||
}
|
||||
|
||||
// ── Snapshot generation ─────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Safe identifier pattern for snapshot table names.
|
||||
* More permissive than validateIdentifier() — allows leading underscores
|
||||
* (needed for system tables like _emdash_collections).
|
||||
*/
|
||||
const SAFE_TABLE_NAME = /^[a-z_][a-z0-9_]*$/;
|
||||
|
||||
/** Snapshot shape consumed by the DO preview database */
|
||||
export interface Snapshot {
|
||||
tables: Record<string, Record<string, unknown>[]>;
|
||||
schema: Record<
|
||||
string,
|
||||
{
|
||||
columns: string[];
|
||||
types?: Record<string, string>;
|
||||
}
|
||||
>;
|
||||
generatedAt: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* System tables included in snapshots.
|
||||
* Content tables (ec_*) are discovered dynamically.
|
||||
*/
|
||||
const SYSTEM_TABLES = [
|
||||
"_emdash_collections",
|
||||
"_emdash_fields",
|
||||
"_emdash_taxonomy_defs",
|
||||
"_emdash_menus",
|
||||
"_emdash_menu_items",
|
||||
"_emdash_sections",
|
||||
"_emdash_widget_areas",
|
||||
"_emdash_widgets",
|
||||
"_emdash_seo",
|
||||
"_emdash_migrations",
|
||||
"taxonomies",
|
||||
"content_taxonomies",
|
||||
"media",
|
||||
"options",
|
||||
"revisions",
|
||||
];
|
||||
|
||||
/**
|
||||
* Table name prefixes excluded from snapshots (auth/security data).
|
||||
*/
|
||||
const EXCLUDED_PREFIXES = [
|
||||
"_emdash_api_tokens",
|
||||
"_emdash_oauth_tokens",
|
||||
"_emdash_authorization_codes",
|
||||
"_emdash_device_codes",
|
||||
"_emdash_migrations_lock",
|
||||
"_plugin_",
|
||||
"users",
|
||||
"sessions",
|
||||
"credentials",
|
||||
"challenges",
|
||||
];
|
||||
|
||||
/**
|
||||
* Options key prefixes safe for inclusion in snapshots.
|
||||
*
|
||||
* The options table contains plugin secrets (plugin:*), passkey challenges
|
||||
* (emdash:passkey_pending:*), and setup state that must not leak to
|
||||
* preview databases. Only site-level rendering settings are needed.
|
||||
*/
|
||||
const SAFE_OPTIONS_PREFIXES = ["site:"];
|
||||
|
||||
function isExcluded(tableName: string): boolean {
|
||||
return EXCLUDED_PREFIXES.some((prefix) => tableName.startsWith(prefix));
|
||||
}
|
||||
|
||||
/** Column info from PRAGMA table_info */
|
||||
interface ColumnInfo {
|
||||
name: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export interface GenerateSnapshotOptions {
|
||||
/** Include draft and trashed content (default: false) */
|
||||
includeDrafts?: boolean;
|
||||
/** Origin URL for absolutizing local media URLs (e.g. "https://mysite.com") */
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a portable database snapshot.
|
||||
*
|
||||
* Discovers ec_* content tables dynamically, exports system tables
|
||||
* needed for rendering, and includes schema info for table recreation.
|
||||
*/
|
||||
export async function generateSnapshot(
|
||||
db: Kysely<Database>,
|
||||
options?: GenerateSnapshotOptions,
|
||||
): Promise<Snapshot> {
|
||||
const includeDrafts = options?.includeDrafts ?? false;
|
||||
|
||||
// Discover all ec_* content tables
|
||||
const tableResult = await sql<{ name: string }>`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name LIKE 'ec_%'
|
||||
ORDER BY name
|
||||
`.execute(db);
|
||||
|
||||
const contentTables = tableResult.rows.map((r) => r.name);
|
||||
|
||||
// Build list of all tables to export
|
||||
const allTables = [...contentTables, ...SYSTEM_TABLES];
|
||||
|
||||
const tables: Record<string, Record<string, unknown>[]> = {};
|
||||
const schema: Record<string, { columns: string[]; types?: Record<string, string> }> = {};
|
||||
|
||||
for (const tableName of allTables) {
|
||||
if (isExcluded(tableName)) continue;
|
||||
|
||||
// Validate identifier before interpolating into sql.raw().
|
||||
// SYSTEM_TABLES are hardcoded and safe, but ec_* names come from
|
||||
// sqlite_master and must be validated.
|
||||
if (!SAFE_TABLE_NAME.test(tableName)) continue;
|
||||
|
||||
try {
|
||||
// Get column info via PRAGMA
|
||||
const pragmaResult = await sql<ColumnInfo>`
|
||||
PRAGMA table_info(${sql.raw(`"${tableName}"`)})
|
||||
`.execute(db);
|
||||
|
||||
if (pragmaResult.rows.length === 0) continue;
|
||||
|
||||
const columns = pragmaResult.rows.map((r) => r.name);
|
||||
const types: Record<string, string> = {};
|
||||
for (const row of pragmaResult.rows) {
|
||||
types[row.name] = row.type || "TEXT";
|
||||
}
|
||||
|
||||
schema[tableName] = { columns, types };
|
||||
|
||||
// Fetch rows
|
||||
let rows: Record<string, unknown>[];
|
||||
|
||||
if (tableName.startsWith("ec_")) {
|
||||
if (includeDrafts) {
|
||||
// Include all non-deleted content (published, draft, scheduled)
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db)
|
||||
).rows;
|
||||
} else {
|
||||
// Only export published content
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
WHERE deleted_at IS NULL
|
||||
AND (status = 'published' OR (status = 'scheduled' AND scheduled_at <= datetime('now')))
|
||||
`.execute(db)
|
||||
).rows;
|
||||
}
|
||||
} else if (tableName === "options") {
|
||||
// Filter options to safe rendering-only prefixes.
|
||||
// Excludes plugin secrets, passkey challenges, and setup state.
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
`.execute(db)
|
||||
).rows.filter((row) => {
|
||||
const name = typeof row.name === "string" ? row.name : "";
|
||||
return SAFE_OPTIONS_PREFIXES.some((prefix) => name.startsWith(prefix));
|
||||
});
|
||||
} else {
|
||||
rows = (
|
||||
await sql<Record<string, unknown>>`
|
||||
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
||||
`.execute(db)
|
||||
).rows;
|
||||
}
|
||||
|
||||
if (rows.length > 0) {
|
||||
tables[tableName] = rows;
|
||||
}
|
||||
} catch {
|
||||
// Table might not exist yet (e.g. pre-migration) — skip silently
|
||||
}
|
||||
}
|
||||
|
||||
// Absolutize local media URLs in content tables so snapshots are portable.
|
||||
// Local image fields are stored as JSON with provider:"local" and
|
||||
// meta.storageKey but no src — the URL is derived at render time.
|
||||
// For snapshots consumed by external preview services, inject src now.
|
||||
if (options?.origin) {
|
||||
const origin = options.origin;
|
||||
for (const [tableName, rows] of Object.entries(tables)) {
|
||||
if (!tableName.startsWith("ec_")) continue;
|
||||
for (const row of rows) {
|
||||
for (const [col, value] of Object.entries(row)) {
|
||||
if (typeof value !== "string" || !value.startsWith("{")) continue;
|
||||
row[col] = injectMediaSrc(value, origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tables,
|
||||
schema,
|
||||
generatedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
523
packages/core/src/api/handlers/taxonomies.ts
Normal file
523
packages/core/src/api/handlers/taxonomies.ts
Normal file
@@ -0,0 +1,523 @@
|
||||
/**
|
||||
* Taxonomy and term CRUD handlers
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
|
||||
import { TaxonomyRepository } from "../../database/repositories/taxonomy.js";
|
||||
import type { Database } from "../../database/types.js";
|
||||
import type { ApiResult } from "../types.js";
|
||||
|
||||
/** Taxonomy name validation pattern: lowercase alphanumeric + underscores, starts with letter */
|
||||
const NAME_PATTERN = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface TaxonomyDef {
|
||||
id: string;
|
||||
name: string;
|
||||
label: string;
|
||||
labelSingular?: string;
|
||||
hierarchical: boolean;
|
||||
collections: string[];
|
||||
}
|
||||
|
||||
export interface TaxonomyListResponse {
|
||||
taxonomies: TaxonomyDef[];
|
||||
}
|
||||
|
||||
export interface TermData {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
label: string;
|
||||
parentId: string | null;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface TermWithCount extends TermData {
|
||||
count: number;
|
||||
children: TermWithCount[];
|
||||
}
|
||||
|
||||
export interface TermListResponse {
|
||||
terms: TermWithCount[];
|
||||
}
|
||||
|
||||
export interface TermResponse {
|
||||
term: TermData;
|
||||
}
|
||||
|
||||
export interface TermGetResponse {
|
||||
term: TermData & {
|
||||
count: number;
|
||||
children: Array<{ id: string; slug: string; label: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Build tree structure from flat terms
|
||||
*/
|
||||
function buildTree(flatTerms: TermWithCount[]): TermWithCount[] {
|
||||
const map = new Map<string, TermWithCount>();
|
||||
const roots: TermWithCount[] = [];
|
||||
|
||||
for (const term of flatTerms) {
|
||||
map.set(term.id, term);
|
||||
}
|
||||
|
||||
for (const term of flatTerms) {
|
||||
if (term.parentId && map.has(term.parentId)) {
|
||||
map.get(term.parentId)!.children.push(term);
|
||||
} else {
|
||||
roots.push(term);
|
||||
}
|
||||
}
|
||||
|
||||
return roots;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a taxonomy definition by name, returning a NOT_FOUND error if missing.
|
||||
*/
|
||||
async function requireTaxonomyDef(
|
||||
db: Kysely<Database>,
|
||||
name: string,
|
||||
): Promise<
|
||||
| { success: true; def: { hierarchical: number } }
|
||||
| { success: false; error: { code: string; message: string } }
|
||||
> {
|
||||
const def = await db
|
||||
.selectFrom("_emdash_taxonomy_defs")
|
||||
.selectAll()
|
||||
.where("name", "=", name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (!def) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: `Taxonomy '${name}' not found` },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, def };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handlers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List all taxonomy definitions
|
||||
*/
|
||||
export async function handleTaxonomyList(
|
||||
db: Kysely<Database>,
|
||||
): Promise<ApiResult<TaxonomyListResponse>> {
|
||||
try {
|
||||
const rows = await db.selectFrom("_emdash_taxonomy_defs").selectAll().execute();
|
||||
|
||||
const taxonomies: TaxonomyDef[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
label: row.label,
|
||||
labelSingular: row.label_singular ?? undefined,
|
||||
hierarchical: row.hierarchical === 1,
|
||||
collections: row.collections ? JSON.parse(row.collections) : [],
|
||||
}));
|
||||
|
||||
return { success: true, data: { taxonomies } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TAXONOMY_LIST_ERROR", message: "Failed to list taxonomies" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new taxonomy definition
|
||||
*/
|
||||
export async function handleTaxonomyCreate(
|
||||
db: Kysely<Database>,
|
||||
input: { name: string; label: string; hierarchical?: boolean; collections?: string[] },
|
||||
): Promise<ApiResult<{ taxonomy: TaxonomyDef }>> {
|
||||
try {
|
||||
// Validate name format
|
||||
if (!NAME_PATTERN.test(input.name)) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message:
|
||||
"Taxonomy name must start with a letter and contain only lowercase letters, numbers, and underscores",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const collections = [...new Set(input.collections ?? [])];
|
||||
|
||||
// Validate that referenced collections exist
|
||||
if (collections.length > 0) {
|
||||
const existingCollections = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.select("slug")
|
||||
.where("slug", "in", collections)
|
||||
.execute();
|
||||
|
||||
const existingSlugs = new Set(existingCollections.map((c) => c.slug));
|
||||
const invalid = collections.filter((c) => !existingSlugs.has(c));
|
||||
if (invalid.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: `Unknown collection(s): ${invalid.join(", ")}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate name
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_taxonomy_defs")
|
||||
.selectAll()
|
||||
.where("name", "=", input.name)
|
||||
.executeTakeFirst();
|
||||
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Taxonomy '${input.name}' already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const id = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_taxonomy_defs")
|
||||
.values({
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
label_singular: null,
|
||||
hierarchical: input.hierarchical ? 1 : 0,
|
||||
collections: JSON.stringify(collections),
|
||||
})
|
||||
.execute();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
taxonomy: {
|
||||
id,
|
||||
name: input.name,
|
||||
label: input.label,
|
||||
hierarchical: input.hierarchical ?? false,
|
||||
collections,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle UNIQUE constraint violation from concurrent duplicate inserts
|
||||
if (error instanceof Error && error.message.includes("UNIQUE constraint failed")) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Taxonomy '${input.name}' already exists`,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TAXONOMY_CREATE_ERROR", message: "Failed to create taxonomy" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all terms for a taxonomy (returns tree for hierarchical taxonomies)
|
||||
*/
|
||||
export async function handleTermList(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
): Promise<ApiResult<TermListResponse>> {
|
||||
try {
|
||||
const lookup = await requireTaxonomyDef(db, taxonomyName);
|
||||
if (!lookup.success) return lookup;
|
||||
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const terms = await repo.findByName(taxonomyName);
|
||||
|
||||
// Get counts for each term
|
||||
const counts = new Map<string, number>();
|
||||
for (const term of terms) {
|
||||
const count = await repo.countEntriesWithTerm(term.id);
|
||||
counts.set(term.id, count);
|
||||
}
|
||||
|
||||
const termData: TermWithCount[] = terms.map((term) => ({
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description: typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
children: [],
|
||||
count: counts.get(term.id) ?? 0,
|
||||
}));
|
||||
|
||||
const isHierarchical = lookup.def.hierarchical === 1;
|
||||
const result = isHierarchical ? buildTree(termData) : termData;
|
||||
|
||||
return { success: true, data: { terms: result } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_LIST_ERROR", message: "Failed to list terms" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new term in a taxonomy
|
||||
*/
|
||||
export async function handleTermCreate(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
input: { slug: string; label: string; parentId?: string | null; description?: string },
|
||||
): Promise<ApiResult<TermResponse>> {
|
||||
try {
|
||||
const lookup = await requireTaxonomyDef(db, taxonomyName);
|
||||
if (!lookup.success) return lookup;
|
||||
|
||||
const repo = new TaxonomyRepository(db);
|
||||
|
||||
// Check for slug conflict
|
||||
const existing = await repo.findBySlug(taxonomyName, input.slug);
|
||||
if (existing) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Term with slug '${input.slug}' already exists in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const term = await repo.create({
|
||||
name: taxonomyName,
|
||||
slug: input.slug,
|
||||
label: input.label,
|
||||
parentId: input.parentId ?? undefined,
|
||||
data: input.description ? { description: input.description } : undefined,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description:
|
||||
typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_CREATE_ERROR", message: "Failed to create term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single term by slug
|
||||
*/
|
||||
export async function handleTermGet(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
): Promise<ApiResult<TermGetResponse>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const count = await repo.countEntriesWithTerm(term.id);
|
||||
const children = await repo.findChildren(term.id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: term.id,
|
||||
name: term.name,
|
||||
slug: term.slug,
|
||||
label: term.label,
|
||||
parentId: term.parentId,
|
||||
description:
|
||||
typeof term.data?.description === "string" ? term.data.description : undefined,
|
||||
count,
|
||||
children: children.map((c) => ({
|
||||
id: c.id,
|
||||
slug: c.slug,
|
||||
label: c.label,
|
||||
})),
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_GET_ERROR", message: "Failed to get term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a term
|
||||
*/
|
||||
export async function handleTermUpdate(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
input: { slug?: string; label?: string; parentId?: string | null; description?: string },
|
||||
): Promise<ApiResult<TermResponse>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Check if new slug conflicts
|
||||
if (input.slug && input.slug !== termSlug) {
|
||||
const existing = await repo.findBySlug(taxonomyName, input.slug);
|
||||
if (existing && existing.id !== term.id) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "CONFLICT",
|
||||
message: `Term with slug '${input.slug}' already exists in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const updated = await repo.update(term.id, {
|
||||
slug: input.slug,
|
||||
label: input.label,
|
||||
parentId: input.parentId,
|
||||
data: input.description !== undefined ? { description: input.description } : undefined,
|
||||
});
|
||||
|
||||
if (!updated) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
term: {
|
||||
id: updated.id,
|
||||
name: updated.name,
|
||||
slug: updated.slug,
|
||||
label: updated.label,
|
||||
parentId: updated.parentId,
|
||||
description:
|
||||
typeof updated.data?.description === "string" ? updated.data.description : undefined,
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a term
|
||||
*/
|
||||
export async function handleTermDelete(
|
||||
db: Kysely<Database>,
|
||||
taxonomyName: string,
|
||||
termSlug: string,
|
||||
): Promise<ApiResult<{ deleted: true }>> {
|
||||
try {
|
||||
const repo = new TaxonomyRepository(db);
|
||||
const term = await repo.findBySlug(taxonomyName, termSlug);
|
||||
|
||||
if (!term) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "NOT_FOUND",
|
||||
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Prevent deletion of terms with children
|
||||
const children = await repo.findChildren(term.id);
|
||||
if (children.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Cannot delete term with children. Delete children first.",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = await repo.delete(term.id);
|
||||
if (!deleted) {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true, data: { deleted: true } };
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
|
||||
};
|
||||
}
|
||||
}
|
||||
6
packages/core/src/api/index.ts
Normal file
6
packages/core/src/api/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./types.js";
|
||||
export * from "./handlers/index.js";
|
||||
export * from "./parse.js";
|
||||
export * from "./schemas/index.js";
|
||||
export * from "./error.js";
|
||||
export * from "./errors.js";
|
||||
2368
packages/core/src/api/openapi/document.ts
Normal file
2368
packages/core/src/api/openapi/document.ts
Normal file
File diff suppressed because it is too large
Load Diff
1
packages/core/src/api/openapi/index.ts
Normal file
1
packages/core/src/api/openapi/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { generateOpenApiDocument } from "./document.js";
|
||||
139
packages/core/src/api/parse.ts
Normal file
139
packages/core/src/api/parse.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Request body and query parameter parsing with Zod validation.
|
||||
*
|
||||
* All API routes should use these utilities instead of `request.json() as T`
|
||||
* or raw `url.searchParams.get()` with manual coercion.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiError } from "./error.js";
|
||||
|
||||
/** Maximum allowed JSON request body size (10 MB). */
|
||||
const MAX_BODY_SIZE = 10 * 1024 * 1024;
|
||||
|
||||
/**
|
||||
* Result of parsing: either the validated data or an error Response.
|
||||
* Routes should check `if (result instanceof Response) return result;`
|
||||
*/
|
||||
export type ParseResult<T> = T | Response;
|
||||
|
||||
/**
|
||||
* Parse and validate a JSON request body against a Zod schema.
|
||||
*
|
||||
* Returns the validated data on success, or a 400 Response on failure.
|
||||
* Replaces all `(await request.json()) as T` casts.
|
||||
*/
|
||||
export async function parseBody<T extends z.ZodType>(
|
||||
request: Request,
|
||||
schema: T,
|
||||
): Promise<ParseResult<z.infer<T>>> {
|
||||
// Best-effort size check via Content-Length (can be absent with chunked encoding)
|
||||
const contentLength = request.headers.get("Content-Length");
|
||||
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
|
||||
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
|
||||
}
|
||||
|
||||
let raw: unknown;
|
||||
try {
|
||||
raw = await request.json();
|
||||
} catch {
|
||||
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
|
||||
}
|
||||
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and validate an optional JSON request body.
|
||||
*
|
||||
* Returns `defaultValue` if the body is empty, or the validated data if present.
|
||||
* For endpoints where the body is optional (e.g., preview-url, confirm).
|
||||
*/
|
||||
export async function parseOptionalBody<T extends z.ZodType>(
|
||||
request: Request,
|
||||
schema: T,
|
||||
defaultValue: z.infer<T>,
|
||||
): Promise<ParseResult<z.infer<T>>> {
|
||||
// Best-effort size check via Content-Length (can be absent with chunked encoding)
|
||||
const contentLength = request.headers.get("Content-Length");
|
||||
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
|
||||
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
|
||||
}
|
||||
|
||||
let text: string;
|
||||
try {
|
||||
text = await request.text();
|
||||
} catch {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
if (!text.trim()) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
let raw: unknown;
|
||||
try {
|
||||
raw = JSON.parse(text);
|
||||
} catch {
|
||||
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
|
||||
}
|
||||
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse and validate URL search params against a Zod schema.
|
||||
*
|
||||
* Converts searchParams to a plain object before validation.
|
||||
* Zod coercion handles string -> number/boolean conversion.
|
||||
* Replaces manual `url.searchParams.get()` + `parseInt()` patterns.
|
||||
*/
|
||||
export function parseQuery<T extends z.ZodType>(url: URL, schema: T): ParseResult<z.infer<T>> {
|
||||
const raw: Record<string, string> = {};
|
||||
for (const [key, value] of url.searchParams) {
|
||||
raw[key] = value;
|
||||
}
|
||||
return validate(schema, raw);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate raw data against a schema. Returns data or error Response.
|
||||
*/
|
||||
function validate<T extends z.ZodType>(schema: T, data: unknown): ParseResult<z.infer<T>> {
|
||||
const result = schema.safeParse(data);
|
||||
|
||||
if (result.success) {
|
||||
return result.data as z.infer<T>;
|
||||
}
|
||||
|
||||
// Format Zod errors into a readable structure
|
||||
const issues = result.error.issues.map((issue: z.ZodIssue) => ({
|
||||
path: issue.path.join("."),
|
||||
message: issue.message,
|
||||
}));
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
error: {
|
||||
code: "VALIDATION_ERROR",
|
||||
message: "Invalid request data",
|
||||
details: { issues },
|
||||
},
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
headers: {
|
||||
"Cache-Control": "private, no-store",
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a ParseResult is an error Response.
|
||||
* Usage: `if (isParseError(result)) return result;`
|
||||
*/
|
||||
export function isParseError<T>(result: ParseResult<T>): result is Response {
|
||||
return result instanceof Response;
|
||||
}
|
||||
14
packages/core/src/api/redirect.ts
Normal file
14
packages/core/src/api/redirect.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Validate that a redirect URL is a safe local path.
|
||||
*
|
||||
* Rejects:
|
||||
* - Protocol-relative URLs (`//evil.com`)
|
||||
* - Backslash bypass (`/\evil.com` — browsers normalize `\` to `/` in Location headers)
|
||||
* - Absolute URLs (`https://evil.com`)
|
||||
* - Empty / nullish values
|
||||
*/
|
||||
export function isSafeRedirect(url: string | null | undefined): url is string {
|
||||
return (
|
||||
typeof url === "string" && url.startsWith("/") && !url.startsWith("//") && !url.includes("\\")
|
||||
);
|
||||
}
|
||||
67
packages/core/src/api/rev.ts
Normal file
67
packages/core/src/api/rev.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* Opaque _rev token generation and validation.
|
||||
*
|
||||
* Format: base64("version:updated_at")
|
||||
* Stateless — server decodes and checks both components.
|
||||
*
|
||||
* Rules:
|
||||
* - No _rev sent → blind write (backwards-compatible)
|
||||
* - _rev matches → write proceeds, new _rev returned
|
||||
* - _rev mismatch → 409 Conflict
|
||||
*/
|
||||
|
||||
import type { ContentItem } from "../database/repositories/types.js";
|
||||
import { encodeBase64, decodeBase64 } from "../utils/base64.js";
|
||||
|
||||
/**
|
||||
* Generate a _rev token from a content item's version and updatedAt.
|
||||
*/
|
||||
export function encodeRev(item: ContentItem): string {
|
||||
return encodeBase64(`${item.version}:${item.updatedAt}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a _rev token into its components.
|
||||
* Returns null if the token is malformed.
|
||||
*/
|
||||
export function decodeRev(rev: string): { version: number; updatedAt: string } | null {
|
||||
try {
|
||||
const decoded = decodeBase64(rev);
|
||||
const colonIdx = decoded.indexOf(":");
|
||||
if (colonIdx === -1) return null;
|
||||
|
||||
const version = parseInt(decoded.slice(0, colonIdx), 10);
|
||||
const updatedAt = decoded.slice(colonIdx + 1);
|
||||
|
||||
if (isNaN(version) || !updatedAt) return null;
|
||||
return { version, updatedAt };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a _rev token against a content item.
|
||||
* Returns null if valid (or if no _rev provided), or an error message if invalid.
|
||||
*/
|
||||
export function validateRev(
|
||||
rev: string | undefined,
|
||||
item: ContentItem,
|
||||
): { valid: true } | { valid: false; message: string } {
|
||||
// No _rev = blind write (backwards-compatible)
|
||||
if (!rev) return { valid: true };
|
||||
|
||||
const decoded = decodeRev(rev);
|
||||
if (!decoded) {
|
||||
return { valid: false, message: "Malformed _rev token" };
|
||||
}
|
||||
|
||||
if (decoded.version !== item.version || decoded.updatedAt !== item.updatedAt) {
|
||||
return {
|
||||
valid: false,
|
||||
message: "Content has been modified since last read (version conflict)",
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true };
|
||||
}
|
||||
112
packages/core/src/api/schemas/auth.ts
Normal file
112
packages/core/src/api/schemas/auth.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { roleLevel } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// WebAuthn credential schemas (matching @emdashcms/auth/passkey types)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
|
||||
|
||||
/** RegistrationResponse — sent by the browser after navigator.credentials.create() */
|
||||
const registrationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
attestationObject: z.string(),
|
||||
transports: z.array(authenticatorTransport).optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
/** AuthenticationResponse — sent by the browser after navigator.credentials.get() */
|
||||
const authenticationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
authenticatorData: z.string(),
|
||||
signature: z.string(),
|
||||
userHandle: z.string().optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Auth: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const signupRequestBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
})
|
||||
.meta({ id: "SignupRequestBody" });
|
||||
|
||||
export const signupCompleteBody = z
|
||||
.object({
|
||||
token: z.string().min(1),
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SignupCompleteBody" });
|
||||
|
||||
export const inviteCreateBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
role: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "InviteCreateBody" });
|
||||
|
||||
export const inviteCompleteBody = z
|
||||
.object({
|
||||
token: z.string().min(1),
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "InviteCompleteBody" });
|
||||
|
||||
export const magicLinkSendBody = z
|
||||
.object({
|
||||
email: z.string().email(),
|
||||
})
|
||||
.meta({ id: "MagicLinkSendBody" });
|
||||
|
||||
export const passkeyOptionsBody = z
|
||||
.object({
|
||||
email: z.string().email().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyOptionsBody" });
|
||||
|
||||
export const passkeyVerifyBody = z
|
||||
.object({
|
||||
credential: authenticationCredential,
|
||||
})
|
||||
.meta({ id: "PasskeyVerifyBody" });
|
||||
|
||||
export const passkeyRegisterOptionsBody = z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyRegisterOptionsBody" });
|
||||
|
||||
export const passkeyRegisterVerifyBody = z
|
||||
.object({
|
||||
credential: registrationCredential,
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "PasskeyRegisterVerifyBody" });
|
||||
|
||||
export const passkeyRenameBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "PasskeyRenameBody" });
|
||||
|
||||
export const authMeActionBody = z
|
||||
.object({
|
||||
action: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "AuthMeActionBody" });
|
||||
85
packages/core/src/api/schemas/bylines.ts
Normal file
85
packages/core/src/api/schemas/bylines.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery, httpUrl } from "./common.js";
|
||||
|
||||
/** Slug pattern: lowercase letters, digits, and hyphens; must start with a letter */
|
||||
const bylineSlugPattern = /^[a-z][a-z0-9-]*$/;
|
||||
|
||||
export const bylineSummarySchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
displayName: z.string(),
|
||||
bio: z.string().nullable(),
|
||||
avatarMediaId: z.string().nullable(),
|
||||
websiteUrl: z.string().nullable(),
|
||||
userId: z.string().nullable(),
|
||||
isGuest: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "BylineSummary" });
|
||||
|
||||
export const bylineCreditSchema = z
|
||||
.object({
|
||||
byline: bylineSummarySchema,
|
||||
sortOrder: z.number().int(),
|
||||
roleLabel: z.string().nullable(),
|
||||
source: z.enum(["explicit", "inferred"]).optional().meta({
|
||||
description: "Whether this credit was explicitly assigned or inferred from authorId",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "BylineCredit" });
|
||||
|
||||
export const contentBylineInputSchema = z
|
||||
.object({
|
||||
bylineId: z.string().min(1),
|
||||
roleLabel: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "ContentBylineInput" });
|
||||
|
||||
export const bylinesListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
isGuest: z.coerce.boolean().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "BylinesListQuery" });
|
||||
|
||||
export const bylineCreateBody = z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens"),
|
||||
displayName: z.string().min(1),
|
||||
bio: z.string().nullish(),
|
||||
avatarMediaId: z.string().nullish(),
|
||||
websiteUrl: httpUrl.nullish(),
|
||||
userId: z.string().nullish(),
|
||||
isGuest: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "BylineCreateBody" });
|
||||
|
||||
export const bylineUpdateBody = z
|
||||
.object({
|
||||
slug: z
|
||||
.string()
|
||||
.min(1)
|
||||
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens")
|
||||
.optional(),
|
||||
displayName: z.string().min(1).optional(),
|
||||
bio: z.string().nullish(),
|
||||
avatarMediaId: z.string().nullish(),
|
||||
websiteUrl: httpUrl.nullish(),
|
||||
userId: z.string().nullish(),
|
||||
isGuest: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "BylineUpdateBody" });
|
||||
|
||||
export const bylineListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(bylineSummarySchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "BylineListResponse" });
|
||||
117
packages/core/src/api/schemas/comments.ts
Normal file
117
packages/core/src/api/schemas/comments.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Comments: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const createCommentBody = z
|
||||
.object({
|
||||
authorName: z.string().min(1).max(100),
|
||||
authorEmail: z.string().email(),
|
||||
body: z.string().min(1).max(5000),
|
||||
parentId: z.string().optional(),
|
||||
/** Honeypot field — hidden in the form, filled only by bots */
|
||||
website_url: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateCommentBody" });
|
||||
|
||||
export const commentStatusBody = z
|
||||
.object({
|
||||
status: z.enum(["approved", "pending", "spam", "trash"]),
|
||||
})
|
||||
.meta({ id: "CommentStatusBody" });
|
||||
|
||||
export const commentBulkBody = z
|
||||
.object({
|
||||
ids: z.array(z.string().min(1)).min(1).max(100),
|
||||
action: z.enum(["approve", "spam", "trash", "delete"]),
|
||||
})
|
||||
.meta({ id: "CommentBulkBody" });
|
||||
|
||||
export const commentListQuery = z
|
||||
.object({
|
||||
status: z.enum(["pending", "approved", "spam", "trash"]).optional(),
|
||||
collection: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional(),
|
||||
cursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CommentListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Comments: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const commentStatusValues = z.enum(["pending", "approved", "spam", "trash"]);
|
||||
|
||||
/**
|
||||
* Public-facing comment (no email/IP).
|
||||
*
|
||||
* `replies` is recursive in practice (each reply can have replies), but we
|
||||
* model it as a single level here to avoid circular type inference issues
|
||||
* with tsgo. OpenAPI consumers should treat replies as the same shape.
|
||||
*/
|
||||
export const publicCommentSchema: z.ZodObject<{
|
||||
id: z.ZodString;
|
||||
authorName: z.ZodString;
|
||||
isRegisteredUser: z.ZodBoolean;
|
||||
body: z.ZodString;
|
||||
parentId: z.ZodNullable<z.ZodString>;
|
||||
createdAt: z.ZodString;
|
||||
replies: z.ZodOptional<z.ZodArray<z.ZodAny>>;
|
||||
}> = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
authorName: z.string(),
|
||||
isRegisteredUser: z.boolean(),
|
||||
body: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
replies: z.array(z.any()).optional(),
|
||||
})
|
||||
.meta({ id: "PublicComment" });
|
||||
|
||||
/** Admin comment with full details */
|
||||
export const commentSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
collection: z.string(),
|
||||
contentId: z.string(),
|
||||
authorName: z.string(),
|
||||
authorEmail: z.string(),
|
||||
body: z.string(),
|
||||
status: commentStatusValues,
|
||||
parentId: z.string().nullable(),
|
||||
ipHash: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Comment" });
|
||||
|
||||
export const publicCommentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(publicCommentSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
total: z.number().int(),
|
||||
})
|
||||
.meta({ id: "PublicCommentListResponse" });
|
||||
|
||||
export const adminCommentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(commentSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "AdminCommentListResponse" });
|
||||
|
||||
export const commentCountsResponseSchema = z
|
||||
.object({
|
||||
pending: z.number().int(),
|
||||
approved: z.number().int(),
|
||||
spam: z.number().int(),
|
||||
trash: z.number().int(),
|
||||
})
|
||||
.meta({ id: "CommentCountsResponse" });
|
||||
|
||||
export const commentBulkResponseSchema = z
|
||||
.object({ affected: z.number().int() })
|
||||
.meta({ id: "CommentBulkResponse" });
|
||||
89
packages/core/src/api/schemas/common.ts
Normal file
89
packages/core/src/api/schemas/common.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Role level
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Valid role level values */
|
||||
export const VALID_ROLE_LEVELS = new Set([10, 20, 30, 40, 50]);
|
||||
|
||||
/** Role level — coerces string/number to valid RoleLevel (10|20|30|40|50) */
|
||||
export const roleLevel = z.coerce
|
||||
.number()
|
||||
.int()
|
||||
.refine((n): n is 10 | 20 | 30 | 40 | 50 => VALID_ROLE_LEVELS.has(n), {
|
||||
message: "Invalid role level. Must be 10, 20, 30, 40, or 50",
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Pagination
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Pagination query params — cursor-based */
|
||||
export const cursorPaginationQuery = z
|
||||
.object({
|
||||
cursor: z.string().optional().meta({ description: "Opaque cursor for pagination" }),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50).meta({
|
||||
description: "Maximum number of items to return (1-100, default 50)",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "CursorPaginationQuery" });
|
||||
|
||||
/** Pagination query params — offset-based */
|
||||
export const offsetPaginationQuery = z
|
||||
.object({
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
offset: z.coerce.number().int().min(0).optional().default(0),
|
||||
})
|
||||
.meta({ id: "OffsetPaginationQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared primitives
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Slug pattern: lowercase letters, digits, underscores; starts with letter */
|
||||
export const slugPattern = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
/** Matches http(s) scheme at start of URL */
|
||||
const HTTP_SCHEME_RE = /^https?:\/\//i;
|
||||
|
||||
/** Validates that a URL string uses http or https scheme. Rejects javascript:/data: URI XSS vectors. */
|
||||
export const httpUrl = z
|
||||
.string()
|
||||
.url()
|
||||
.refine((url) => HTTP_SCHEME_RE.test(url), "URL must use http or https");
|
||||
|
||||
/** BCP 47 locale code — language with optional script/region subtags (e.g. en, en-US, pt-BR, es-419, zh-Hant) */
|
||||
export const localeCode = z
|
||||
.string()
|
||||
.regex(/^[a-z]{2,3}(-[a-z0-9]{2,8})*$/i, "Invalid locale code")
|
||||
.transform((v) => v.toLowerCase());
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// OpenAPI: Shared response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Standard API error response */
|
||||
export const apiErrorSchema = z
|
||||
.object({
|
||||
error: z.object({
|
||||
code: z.string().meta({ description: "Machine-readable error code", example: "NOT_FOUND" }),
|
||||
message: z.string().meta({ description: "Human-readable error message" }),
|
||||
}),
|
||||
})
|
||||
.meta({ id: "ApiError" });
|
||||
|
||||
/** Wrap a data schema in the standard success envelope: { data: T } */
|
||||
export function successEnvelope<T extends z.ZodType>(dataSchema: T) {
|
||||
return z.object({ data: dataSchema });
|
||||
}
|
||||
|
||||
/** Standard delete response */
|
||||
export const deleteResponseSchema = z.object({ deleted: z.literal(true) }).meta({
|
||||
id: "DeleteResponse",
|
||||
});
|
||||
|
||||
/** Standard count response */
|
||||
export const countResponseSchema = z
|
||||
.object({ count: z.number().int().min(0) })
|
||||
.meta({ id: "CountResponse" });
|
||||
191
packages/core/src/api/schemas/content.ts
Normal file
191
packages/core/src/api/schemas/content.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { bylineSummarySchema, bylineCreditSchema, contentBylineInputSchema } from "./bylines.js";
|
||||
import { cursorPaginationQuery, httpUrl, localeCode } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** SEO input — per-content meta fields */
|
||||
export const contentSeoInput = z
|
||||
.object({
|
||||
title: z.string().max(200).nullish(),
|
||||
description: z.string().max(500).nullish(),
|
||||
image: z.string().nullish(),
|
||||
canonical: httpUrl.nullish(),
|
||||
noIndex: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "ContentSeoInput" });
|
||||
|
||||
export const contentListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
status: z.string().optional(),
|
||||
orderBy: z.string().optional(),
|
||||
order: z.enum(["asc", "desc"]).optional(),
|
||||
locale: localeCode.optional(),
|
||||
})
|
||||
.meta({ id: "ContentListQuery" });
|
||||
|
||||
export const contentCreateBody = z
|
||||
.object({
|
||||
data: z.record(z.string(), z.unknown()),
|
||||
slug: z.string().nullish(),
|
||||
status: z.string().optional(),
|
||||
bylines: z.array(contentBylineInputSchema).optional(),
|
||||
locale: localeCode.optional(),
|
||||
translationOf: z.string().optional(),
|
||||
seo: contentSeoInput.optional(),
|
||||
})
|
||||
.meta({ id: "ContentCreateBody" });
|
||||
|
||||
export const contentUpdateBody = z
|
||||
.object({
|
||||
data: z.record(z.string(), z.unknown()).optional(),
|
||||
slug: z.string().nullish(),
|
||||
status: z.string().optional(),
|
||||
authorId: z.string().nullish(),
|
||||
bylines: z.array(contentBylineInputSchema).optional(),
|
||||
_rev: z
|
||||
.string()
|
||||
.optional()
|
||||
.meta({ description: "Opaque revision token for optimistic concurrency" }),
|
||||
skipRevision: z.boolean().optional(),
|
||||
seo: contentSeoInput.optional(),
|
||||
})
|
||||
.meta({ id: "ContentUpdateBody" });
|
||||
|
||||
export const contentScheduleBody = z
|
||||
.object({
|
||||
scheduledAt: z.string().min(1, "scheduledAt is required").meta({
|
||||
description: "ISO 8601 datetime for scheduled publishing",
|
||||
example: "2025-06-15T09:00:00Z",
|
||||
}),
|
||||
})
|
||||
.meta({ id: "ContentScheduleBody" });
|
||||
|
||||
export const contentPreviewUrlBody = z
|
||||
.object({
|
||||
expiresIn: z.union([z.string(), z.number()]).optional(),
|
||||
pathPattern: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "ContentPreviewUrlBody" });
|
||||
|
||||
export const contentTermsBody = z
|
||||
.object({
|
||||
termIds: z.array(z.string()),
|
||||
})
|
||||
.meta({ id: "ContentTermsBody" });
|
||||
|
||||
export const contentTrashQuery = cursorPaginationQuery;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** SEO metadata on a content item */
|
||||
export const contentSeoSchema = z
|
||||
.object({
|
||||
title: z.string().nullable(),
|
||||
description: z.string().nullable(),
|
||||
image: z.string().nullable(),
|
||||
canonical: z.string().nullable(),
|
||||
noIndex: z.boolean(),
|
||||
})
|
||||
.meta({ id: "ContentSeo" });
|
||||
|
||||
/** A single content item as returned by the API */
|
||||
export const contentItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.string().meta({ description: "Collection slug this item belongs to" }),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string().meta({ description: "draft, published, or scheduled" }),
|
||||
data: z.record(z.string(), z.unknown()).meta({
|
||||
description: "User-defined field values",
|
||||
}),
|
||||
authorId: z.string().nullable(),
|
||||
primaryBylineId: z.string().nullable(),
|
||||
byline: bylineSummarySchema.nullable().optional(),
|
||||
bylines: z.array(bylineCreditSchema).optional(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
publishedAt: z.string().nullable(),
|
||||
scheduledAt: z.string().nullable(),
|
||||
liveRevisionId: z.string().nullable(),
|
||||
draftRevisionId: z.string().nullable(),
|
||||
version: z.number().int(),
|
||||
locale: z.string().nullable(),
|
||||
translationGroup: z.string().nullable(),
|
||||
seo: contentSeoSchema.optional(),
|
||||
})
|
||||
.meta({ id: "ContentItem" });
|
||||
|
||||
/** Response for single content item endpoints (get, create, update) */
|
||||
export const contentResponseSchema = z
|
||||
.object({
|
||||
item: contentItemSchema,
|
||||
_rev: z
|
||||
.string()
|
||||
.optional()
|
||||
.meta({ description: "Opaque revision token for optimistic concurrency" }),
|
||||
})
|
||||
.meta({ id: "ContentResponse" });
|
||||
|
||||
/** Response for content list endpoints */
|
||||
export const contentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(contentItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "ContentListResponse" });
|
||||
|
||||
/** Trashed content item */
|
||||
export const trashedContentItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.string(),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string(),
|
||||
data: z.record(z.string(), z.unknown()),
|
||||
authorId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
publishedAt: z.string().nullable(),
|
||||
deletedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "TrashedContentItem" });
|
||||
|
||||
/** Response for trashed content list */
|
||||
export const trashedContentListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(trashedContentItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "TrashedContentListResponse" });
|
||||
|
||||
/** Response for content compare (live vs draft) */
|
||||
export const contentCompareResponseSchema = z
|
||||
.object({
|
||||
hasChanges: z.boolean(),
|
||||
live: z.record(z.string(), z.unknown()).nullable(),
|
||||
draft: z.record(z.string(), z.unknown()).nullable(),
|
||||
})
|
||||
.meta({ id: "ContentCompareResponse" });
|
||||
|
||||
/** Translation summary for a content item */
|
||||
export const contentTranslationSchema = z.object({
|
||||
id: z.string(),
|
||||
locale: z.string().nullable(),
|
||||
slug: z.string().nullable(),
|
||||
status: z.string(),
|
||||
updatedAt: z.string(),
|
||||
});
|
||||
|
||||
/** Response for content translations endpoint */
|
||||
export const contentTranslationsResponseSchema = z
|
||||
.object({
|
||||
translationGroup: z.string(),
|
||||
translations: z.array(contentTranslationSchema),
|
||||
})
|
||||
.meta({ id: "ContentTranslationsResponse" });
|
||||
52
packages/core/src/api/schemas/import.ts
Normal file
52
packages/core/src/api/schemas/import.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { httpUrl } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Import
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const importProbeBody = z.object({
|
||||
url: httpUrl,
|
||||
});
|
||||
|
||||
export const wpPluginAnalyzeBody = z.object({
|
||||
url: httpUrl,
|
||||
token: z.string().min(1),
|
||||
});
|
||||
|
||||
export const wpPluginExecuteBody = z.object({
|
||||
url: httpUrl,
|
||||
token: z.string().min(1),
|
||||
config: z.record(z.string(), z.unknown()),
|
||||
});
|
||||
|
||||
export const wpPrepareBody = z.object({
|
||||
postTypes: z.array(
|
||||
z.object({
|
||||
name: z.string().min(1),
|
||||
collection: z.string().min(1),
|
||||
fields: z
|
||||
.array(
|
||||
z.object({
|
||||
slug: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
type: z.string().min(1),
|
||||
required: z.boolean(),
|
||||
searchable: z.boolean().optional(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export const wpMediaImportBody = z.object({
|
||||
attachments: z.array(z.record(z.string(), z.unknown())),
|
||||
stream: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export const wpRewriteUrlsBody = z.object({
|
||||
urlMap: z.record(z.string(), z.string()),
|
||||
collections: z.array(z.string()).optional(),
|
||||
});
|
||||
17
packages/core/src/api/schemas/index.ts
Normal file
17
packages/core/src/api/schemas/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export * from "./common.js";
|
||||
export * from "./content.js";
|
||||
export * from "./media.js";
|
||||
export * from "./schema.js";
|
||||
export * from "./comments.js";
|
||||
export * from "./auth.js";
|
||||
export * from "./menus.js";
|
||||
export * from "./taxonomies.js";
|
||||
export * from "./sections.js";
|
||||
export * from "./settings.js";
|
||||
export * from "./search.js";
|
||||
export * from "./import.js";
|
||||
export * from "./setup.js";
|
||||
export * from "./users.js";
|
||||
export * from "./widgets.js";
|
||||
export * from "./redirects.js";
|
||||
export * from "./bylines.js";
|
||||
116
packages/core/src/api/schemas/media.ts
Normal file
116
packages/core/src/api/schemas/media.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Media: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const mediaListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaListQuery" });
|
||||
|
||||
export const mediaUpdateBody = z
|
||||
.object({
|
||||
alt: z.string().optional(),
|
||||
caption: z.string().optional(),
|
||||
width: z.number().int().positive().optional(),
|
||||
height: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({ id: "MediaUpdateBody" });
|
||||
|
||||
/** Maximum allowed file upload size (50 MB). */
|
||||
const MAX_UPLOAD_SIZE = 50 * 1024 * 1024;
|
||||
|
||||
export const mediaUploadUrlBody = z
|
||||
.object({
|
||||
filename: z.string().min(1, "filename is required"),
|
||||
contentType: z.string().min(1, "contentType is required"),
|
||||
size: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.max(MAX_UPLOAD_SIZE, `File size must not exceed ${MAX_UPLOAD_SIZE / 1024 / 1024}MB`),
|
||||
contentHash: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaUploadUrlBody" });
|
||||
|
||||
export const mediaConfirmBody = z
|
||||
.object({
|
||||
size: z.number().int().positive().optional(),
|
||||
width: z.number().int().positive().optional(),
|
||||
height: z.number().int().positive().optional(),
|
||||
})
|
||||
.meta({ id: "MediaConfirmBody" });
|
||||
|
||||
export const mediaProviderListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
query: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaProviderListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Media: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const mediaStatusSchema = z.enum(["pending", "ready", "failed"]);
|
||||
|
||||
export const mediaItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
filename: z.string(),
|
||||
mimeType: z.string(),
|
||||
size: z.number().nullable(),
|
||||
width: z.number().nullable(),
|
||||
height: z.number().nullable(),
|
||||
alt: z.string().nullable(),
|
||||
caption: z.string().nullable(),
|
||||
storageKey: z.string(),
|
||||
status: mediaStatusSchema,
|
||||
contentHash: z.string().nullable(),
|
||||
blurhash: z.string().nullable(),
|
||||
dominantColor: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
authorId: z.string().nullable(),
|
||||
})
|
||||
.meta({ id: "MediaItem" });
|
||||
|
||||
export const mediaResponseSchema = z
|
||||
.object({ item: mediaItemSchema })
|
||||
.meta({ id: "MediaResponse" });
|
||||
|
||||
export const mediaListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(mediaItemSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "MediaListResponse" });
|
||||
|
||||
export const mediaUploadUrlResponseSchema = z
|
||||
.object({
|
||||
uploadUrl: z.string(),
|
||||
method: z.literal("PUT"),
|
||||
headers: z.record(z.string(), z.string()),
|
||||
mediaId: z.string(),
|
||||
storageKey: z.string(),
|
||||
expiresAt: z.string(),
|
||||
})
|
||||
.meta({ id: "MediaUploadUrlResponse" });
|
||||
|
||||
export const mediaExistingResponseSchema = z
|
||||
.object({
|
||||
existing: z.literal(true),
|
||||
mediaId: z.string(),
|
||||
storageKey: z.string(),
|
||||
url: z.string(),
|
||||
})
|
||||
.meta({ id: "MediaExistingResponse" });
|
||||
|
||||
export const mediaConfirmResponseSchema = z
|
||||
.object({
|
||||
item: mediaItemSchema.extend({ url: z.string() }),
|
||||
})
|
||||
.meta({ id: "MediaConfirmResponse" });
|
||||
111
packages/core/src/api/schemas/menus.ts
Normal file
111
packages/core/src/api/schemas/menus.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menus: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const menuItemType = z.string().min(1);
|
||||
|
||||
export const createMenuBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "CreateMenuBody" });
|
||||
|
||||
export const updateMenuBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateMenuBody" });
|
||||
|
||||
export const createMenuItemBody = z
|
||||
.object({
|
||||
type: menuItemType,
|
||||
label: z.string().min(1),
|
||||
referenceCollection: z.string().optional(),
|
||||
referenceId: z.string().optional(),
|
||||
customUrl: z.string().optional(),
|
||||
target: z.string().optional(),
|
||||
titleAttr: z.string().optional(),
|
||||
cssClasses: z.string().optional(),
|
||||
parentId: z.string().optional(),
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
})
|
||||
.meta({ id: "CreateMenuItemBody" });
|
||||
|
||||
export const updateMenuItemBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
customUrl: z.string().optional(),
|
||||
target: z.string().optional(),
|
||||
titleAttr: z.string().optional(),
|
||||
cssClasses: z.string().optional(),
|
||||
parentId: z.string().nullish(),
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateMenuItemBody" });
|
||||
|
||||
export const menuItemDeleteQuery = z.object({
|
||||
id: z.string().min(1),
|
||||
});
|
||||
|
||||
export const menuItemUpdateQuery = z.object({
|
||||
id: z.string().min(1),
|
||||
});
|
||||
|
||||
export const reorderMenuItemsBody = z
|
||||
.object({
|
||||
items: z.array(
|
||||
z.object({
|
||||
id: z.string().min(1),
|
||||
parentId: z.string().nullable(),
|
||||
sortOrder: z.number().int().min(0),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.meta({ id: "ReorderMenuItemsBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Menus: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const menuSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
})
|
||||
.meta({ id: "Menu" });
|
||||
|
||||
export const menuItemSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
menu_id: z.string(),
|
||||
parent_id: z.string().nullable(),
|
||||
sort_order: z.number().int(),
|
||||
type: z.string(),
|
||||
reference_collection: z.string().nullable(),
|
||||
reference_id: z.string().nullable(),
|
||||
custom_url: z.string().nullable(),
|
||||
label: z.string(),
|
||||
title_attr: z.string().nullable(),
|
||||
target: z.string().nullable(),
|
||||
css_classes: z.string().nullable(),
|
||||
created_at: z.string(),
|
||||
})
|
||||
.meta({ id: "MenuItem" });
|
||||
|
||||
export const menuListItemSchema = menuSchema
|
||||
.extend({
|
||||
itemCount: z.number().int(),
|
||||
})
|
||||
.meta({ id: "MenuListItem" });
|
||||
|
||||
export const menuWithItemsSchema = menuSchema
|
||||
.extend({
|
||||
items: z.array(menuItemSchema),
|
||||
})
|
||||
.meta({ id: "MenuWithItems" });
|
||||
155
packages/core/src/api/schemas/redirects.ts
Normal file
155
packages/core/src/api/schemas/redirects.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { cursorPaginationQuery } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const redirectType = z.coerce
|
||||
.number()
|
||||
.int()
|
||||
.refine((n) => [301, 302, 307, 308].includes(n), {
|
||||
message: "Redirect type must be 301, 302, 307, or 308",
|
||||
});
|
||||
|
||||
/** Matches CR or LF characters */
|
||||
const CRLF = /[\r\n]/;
|
||||
|
||||
/** Path must start with / and not be protocol-relative, contain no CRLF, and no path traversal */
|
||||
const urlPath = z
|
||||
.string()
|
||||
.min(1)
|
||||
.refine((s) => s.startsWith("/") && !s.startsWith("//"), {
|
||||
message: "Must be a path starting with / (no protocol-relative URLs)",
|
||||
})
|
||||
.refine((s) => !CRLF.test(s), {
|
||||
message: "URL must not contain newline characters",
|
||||
})
|
||||
.refine(
|
||||
(s) => {
|
||||
try {
|
||||
return !decodeURIComponent(s).split("/").includes("..");
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{ message: "URL must not contain path traversal segments" },
|
||||
);
|
||||
|
||||
export const createRedirectBody = z
|
||||
.object({
|
||||
source: urlPath,
|
||||
destination: urlPath,
|
||||
type: redirectType.optional().default(301),
|
||||
enabled: z.boolean().optional().default(true),
|
||||
groupName: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "CreateRedirectBody" });
|
||||
|
||||
export const updateRedirectBody = z
|
||||
.object({
|
||||
source: urlPath.optional(),
|
||||
destination: urlPath.optional(),
|
||||
type: redirectType.optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
groupName: z.string().nullish(),
|
||||
})
|
||||
.refine((o) => Object.values(o).some((v) => v !== undefined), {
|
||||
message: "At least one field must be provided",
|
||||
})
|
||||
.meta({ id: "UpdateRedirectBody" });
|
||||
|
||||
export const redirectsListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
group: z.string().optional(),
|
||||
enabled: z
|
||||
.enum(["true", "false"])
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
auto: z
|
||||
.enum(["true", "false"])
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
})
|
||||
.meta({ id: "RedirectsListQuery" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 404 Log: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const notFoundListQuery = cursorPaginationQuery
|
||||
.extend({
|
||||
search: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "NotFoundListQuery" });
|
||||
|
||||
export const notFoundSummaryQuery = z.object({
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
});
|
||||
|
||||
export const notFoundPruneBody = z
|
||||
.object({
|
||||
olderThan: z.string().datetime({ message: "olderThan must be an ISO 8601 datetime" }),
|
||||
})
|
||||
.meta({ id: "NotFoundPruneBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Redirects: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const redirectSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
source: z.string(),
|
||||
destination: z.string(),
|
||||
type: z.number().int(),
|
||||
isPattern: z.boolean(),
|
||||
enabled: z.boolean(),
|
||||
hits: z.number().int(),
|
||||
lastHitAt: z.string().nullable(),
|
||||
groupName: z.string().nullable(),
|
||||
auto: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Redirect" });
|
||||
|
||||
export const redirectListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(redirectSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "RedirectListResponse" });
|
||||
|
||||
export const notFoundEntrySchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
path: z.string(),
|
||||
referrer: z.string().nullable(),
|
||||
userAgent: z.string().nullable(),
|
||||
ip: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
})
|
||||
.meta({ id: "NotFoundEntry" });
|
||||
|
||||
export const notFoundListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(notFoundEntrySchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "NotFoundListResponse" });
|
||||
|
||||
export const notFoundSummarySchema = z
|
||||
.object({
|
||||
path: z.string(),
|
||||
count: z.number().int(),
|
||||
lastSeen: z.string(),
|
||||
topReferrer: z.string().nullable(),
|
||||
})
|
||||
.meta({ id: "NotFoundSummary" });
|
||||
|
||||
export const notFoundSummaryResponseSchema = z
|
||||
.object({ items: z.array(notFoundSummarySchema) })
|
||||
.meta({ id: "NotFoundSummaryResponse" });
|
||||
203
packages/core/src/api/schemas/schema.ts
Normal file
203
packages/core/src/api/schemas/schema.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { slugPattern } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema (collections & fields): Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const collectionSupportValues = z.enum(["drafts", "revisions", "preview", "scheduling", "search"]);
|
||||
|
||||
const collectionSourcePattern = /^(template:.+|import:.+|manual|discovered|seed)$/;
|
||||
|
||||
const fieldTypeValues = z.enum([
|
||||
"string",
|
||||
"text",
|
||||
"number",
|
||||
"integer",
|
||||
"boolean",
|
||||
"datetime",
|
||||
"select",
|
||||
"multiSelect",
|
||||
"portableText",
|
||||
"image",
|
||||
"file",
|
||||
"reference",
|
||||
"json",
|
||||
"slug",
|
||||
]);
|
||||
|
||||
const fieldValidation = z
|
||||
.object({
|
||||
required: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
minLength: z.number().int().min(0).optional(),
|
||||
maxLength: z.number().int().min(0).optional(),
|
||||
pattern: z.string().optional(),
|
||||
options: z.array(z.string()).optional(),
|
||||
})
|
||||
.optional();
|
||||
|
||||
const fieldWidgetOptions = z.record(z.string(), z.unknown()).optional();
|
||||
|
||||
export const createCollectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
|
||||
label: z.string().min(1),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
icon: z.string().optional(),
|
||||
supports: z.array(collectionSupportValues).optional(),
|
||||
source: z.string().regex(collectionSourcePattern).optional(),
|
||||
urlPattern: z.string().optional(),
|
||||
hasSeo: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "CreateCollectionBody" });
|
||||
|
||||
export const updateCollectionBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
icon: z.string().optional(),
|
||||
supports: z.array(collectionSupportValues).optional(),
|
||||
urlPattern: z.string().nullish(),
|
||||
hasSeo: z.boolean().optional(),
|
||||
commentsEnabled: z.boolean().optional(),
|
||||
commentsModeration: z.enum(["all", "first_time", "none"]).optional(),
|
||||
commentsClosedAfterDays: z.number().int().min(0).optional(),
|
||||
commentsAutoApproveUsers: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateCollectionBody" });
|
||||
|
||||
export const createFieldBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
|
||||
label: z.string().min(1),
|
||||
type: fieldTypeValues,
|
||||
required: z.boolean().optional(),
|
||||
unique: z.boolean().optional(),
|
||||
defaultValue: z.unknown().optional(),
|
||||
validation: fieldValidation,
|
||||
widget: z.string().optional(),
|
||||
options: fieldWidgetOptions,
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
searchable: z.boolean().optional(),
|
||||
translatable: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "CreateFieldBody" });
|
||||
|
||||
export const updateFieldBody = z
|
||||
.object({
|
||||
label: z.string().min(1).optional(),
|
||||
required: z.boolean().optional(),
|
||||
unique: z.boolean().optional(),
|
||||
defaultValue: z.unknown().optional(),
|
||||
validation: fieldValidation,
|
||||
widget: z.string().optional(),
|
||||
options: fieldWidgetOptions,
|
||||
sortOrder: z.number().int().min(0).optional(),
|
||||
searchable: z.boolean().optional(),
|
||||
translatable: z.boolean().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateFieldBody" });
|
||||
|
||||
export const fieldReorderBody = z
|
||||
.object({
|
||||
fieldSlugs: z.array(z.string().min(1)),
|
||||
})
|
||||
.meta({ id: "FieldReorderBody" });
|
||||
|
||||
export const orphanRegisterBody = z
|
||||
.object({
|
||||
label: z.string().optional(),
|
||||
labelSingular: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "OrphanRegisterBody" });
|
||||
|
||||
export const schemaExportQuery = z.object({
|
||||
format: z.string().optional(),
|
||||
});
|
||||
|
||||
export const collectionGetQuery = z.object({
|
||||
includeFields: z
|
||||
.string()
|
||||
.transform((v) => v === "true")
|
||||
.optional(),
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const collectionSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
labelSingular: z.string().nullable(),
|
||||
description: z.string().nullable(),
|
||||
icon: z.string().nullable(),
|
||||
supports: z.array(z.string()),
|
||||
source: z.string().nullable(),
|
||||
urlPattern: z.string().nullable(),
|
||||
hasSeo: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Collection" });
|
||||
|
||||
export const fieldSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
collectionId: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
type: fieldTypeValues,
|
||||
required: z.boolean(),
|
||||
unique: z.boolean(),
|
||||
defaultValue: z.unknown().nullable(),
|
||||
validation: z.record(z.string(), z.unknown()).nullable(),
|
||||
widget: z.string().nullable(),
|
||||
options: z.record(z.string(), z.unknown()).nullable(),
|
||||
sortOrder: z.number().int(),
|
||||
searchable: z.boolean(),
|
||||
translatable: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Field" });
|
||||
|
||||
export const collectionResponseSchema = z
|
||||
.object({ item: collectionSchema })
|
||||
.meta({ id: "CollectionResponse" });
|
||||
|
||||
export const collectionWithFieldsResponseSchema = z
|
||||
.object({
|
||||
item: collectionSchema.extend({ fields: z.array(fieldSchema) }),
|
||||
})
|
||||
.meta({ id: "CollectionWithFieldsResponse" });
|
||||
|
||||
export const collectionListResponseSchema = z
|
||||
.object({ items: z.array(collectionSchema) })
|
||||
.meta({ id: "CollectionListResponse" });
|
||||
|
||||
export const fieldResponseSchema = z.object({ item: fieldSchema }).meta({ id: "FieldResponse" });
|
||||
|
||||
export const fieldListResponseSchema = z
|
||||
.object({ items: z.array(fieldSchema) })
|
||||
.meta({ id: "FieldListResponse" });
|
||||
|
||||
export const orphanedTableSchema = z
|
||||
.object({
|
||||
slug: z.string(),
|
||||
tableName: z.string(),
|
||||
rowCount: z.number().int(),
|
||||
})
|
||||
.meta({ id: "OrphanedTable" });
|
||||
|
||||
export const orphanedTableListResponseSchema = z
|
||||
.object({ items: z.array(orphanedTableSchema) })
|
||||
.meta({ id: "OrphanedTableListResponse" });
|
||||
63
packages/core/src/api/schemas/search.ts
Normal file
63
packages/core/src/api/schemas/search.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { localeCode } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Search: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const searchQuery = z
|
||||
.object({
|
||||
q: z.string().min(1),
|
||||
collections: z.string().optional(),
|
||||
status: z.string().optional(),
|
||||
locale: localeCode.optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional(),
|
||||
})
|
||||
.meta({ id: "SearchQuery" });
|
||||
|
||||
export const searchSuggestQuery = z
|
||||
.object({
|
||||
q: z.string().min(1),
|
||||
collections: z.string().optional(),
|
||||
locale: localeCode.optional(),
|
||||
limit: z.coerce.number().int().min(1).max(20).optional(),
|
||||
})
|
||||
.meta({ id: "SearchSuggestQuery" });
|
||||
|
||||
export const searchRebuildBody = z
|
||||
.object({
|
||||
collection: z.string().min(1),
|
||||
})
|
||||
.meta({ id: "SearchRebuildBody" });
|
||||
|
||||
export const searchEnableBody = z
|
||||
.object({
|
||||
collection: z.string().min(1),
|
||||
enabled: z.boolean(),
|
||||
weights: z.record(z.string(), z.number()).optional(),
|
||||
})
|
||||
.meta({ id: "SearchEnableBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Search: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const searchResultSchema = z
|
||||
.object({
|
||||
collection: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string().nullable(),
|
||||
locale: z.string(),
|
||||
title: z.string().optional(),
|
||||
snippet: z.string().optional(),
|
||||
score: z.number(),
|
||||
})
|
||||
.meta({ id: "SearchResult" });
|
||||
|
||||
export const searchResponseSchema = z
|
||||
.object({
|
||||
items: z.array(searchResultSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SearchResponse" });
|
||||
67
packages/core/src/api/schemas/sections.ts
Normal file
67
packages/core/src/api/schemas/sections.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sections: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const sectionSource = z.enum(["theme", "user", "import"]);
|
||||
|
||||
export const sectionsListQuery = z
|
||||
.object({
|
||||
source: sectionSource.optional(),
|
||||
search: z.string().optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional(),
|
||||
cursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SectionsListQuery" });
|
||||
|
||||
export const createSectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1),
|
||||
title: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())),
|
||||
previewMediaId: z.string().optional(),
|
||||
source: sectionSource.optional(),
|
||||
themeId: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateSectionBody" });
|
||||
|
||||
export const updateSectionBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).optional(),
|
||||
title: z.string().min(1).optional(),
|
||||
description: z.string().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
previewMediaId: z.string().nullish(),
|
||||
})
|
||||
.meta({ id: "UpdateSectionBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sections: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const sectionSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
title: z.string(),
|
||||
description: z.string().nullable(),
|
||||
keywords: z.array(z.string()).nullable(),
|
||||
content: z.array(z.record(z.string(), z.unknown())),
|
||||
previewMediaId: z.string().nullable(),
|
||||
source: z.string(),
|
||||
themeId: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
})
|
||||
.meta({ id: "Section" });
|
||||
|
||||
export const sectionListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(sectionSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "SectionListResponse" });
|
||||
63
packages/core/src/api/schemas/settings.ts
Normal file
63
packages/core/src/api/schemas/settings.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { httpUrl } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Settings: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const mediaReference = z.object({
|
||||
mediaId: z.string(),
|
||||
alt: z.string().optional(),
|
||||
});
|
||||
|
||||
const socialSettings = z.object({
|
||||
twitter: z.string().optional(),
|
||||
github: z.string().optional(),
|
||||
facebook: z.string().optional(),
|
||||
instagram: z.string().optional(),
|
||||
linkedin: z.string().optional(),
|
||||
youtube: z.string().optional(),
|
||||
});
|
||||
|
||||
const seoSettings = z.object({
|
||||
titleSeparator: z.string().max(10).optional(),
|
||||
defaultOgImage: mediaReference.optional(),
|
||||
robotsTxt: z.string().max(5000).optional(),
|
||||
googleVerification: z.string().max(100).optional(),
|
||||
bingVerification: z.string().max(100).optional(),
|
||||
});
|
||||
|
||||
export const settingsUpdateBody = z
|
||||
.object({
|
||||
title: z.string().optional(),
|
||||
tagline: z.string().optional(),
|
||||
logo: mediaReference.optional(),
|
||||
favicon: mediaReference.optional(),
|
||||
url: z.union([httpUrl, z.literal("")]).optional(),
|
||||
postsPerPage: z.number().int().min(1).max(100).optional(),
|
||||
dateFormat: z.string().optional(),
|
||||
timezone: z.string().optional(),
|
||||
social: socialSettings.optional(),
|
||||
seo: seoSettings.optional(),
|
||||
})
|
||||
.meta({ id: "SettingsUpdateBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Settings: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const siteSettingsSchema = z
|
||||
.object({
|
||||
title: z.string().optional(),
|
||||
tagline: z.string().optional(),
|
||||
logo: mediaReference.optional(),
|
||||
favicon: mediaReference.optional(),
|
||||
url: z.string().optional(),
|
||||
postsPerPage: z.number().int().optional(),
|
||||
dateFormat: z.string().optional(),
|
||||
timezone: z.string().optional(),
|
||||
social: socialSettings.optional(),
|
||||
seo: seoSettings.optional(),
|
||||
})
|
||||
.meta({ id: "SiteSettings" });
|
||||
37
packages/core/src/api/schemas/setup.ts
Normal file
37
packages/core/src/api/schemas/setup.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Setup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Registration credential — duplicated reference for setup flow.
|
||||
* The canonical definition lives in auth.ts but setup needs it independently
|
||||
* because setup runs before auth is configured. */
|
||||
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
|
||||
|
||||
const registrationCredential = z.object({
|
||||
id: z.string(),
|
||||
rawId: z.string(),
|
||||
type: z.literal("public-key"),
|
||||
response: z.object({
|
||||
clientDataJSON: z.string(),
|
||||
attestationObject: z.string(),
|
||||
transports: z.array(authenticatorTransport).optional(),
|
||||
}),
|
||||
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
|
||||
});
|
||||
|
||||
export const setupBody = z.object({
|
||||
title: z.string().min(1),
|
||||
tagline: z.string().optional(),
|
||||
includeContent: z.boolean(),
|
||||
});
|
||||
|
||||
export const setupAdminBody = z.object({
|
||||
email: z.string().email(),
|
||||
name: z.string().optional(),
|
||||
});
|
||||
|
||||
export const setupAdminVerifyBody = z.object({
|
||||
credential: registrationCredential,
|
||||
});
|
||||
113
packages/core/src/api/schemas/taxonomies.ts
Normal file
113
packages/core/src/api/schemas/taxonomies.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomy definitions: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Collection slug format: lowercase alphanumeric + underscores, starts with letter */
|
||||
const collectionSlugPattern = /^[a-z][a-z0-9_]*$/;
|
||||
|
||||
export const createTaxonomyDefBody = z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(63)
|
||||
.regex(/^[a-z][a-z0-9_]*$/, "Name must be lowercase alphanumeric with underscores"),
|
||||
label: z.string().min(1).max(200),
|
||||
hierarchical: z.boolean().optional().default(false),
|
||||
collections: z
|
||||
.array(
|
||||
z.string().min(1).max(63).regex(collectionSlugPattern, "Invalid collection slug format"),
|
||||
)
|
||||
.max(100)
|
||||
.optional()
|
||||
.default([]),
|
||||
})
|
||||
.meta({ id: "CreateTaxonomyDefBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomy terms: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const createTermBody = z
|
||||
.object({
|
||||
slug: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
parentId: z.string().nullish(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateTermBody" });
|
||||
|
||||
export const updateTermBody = z
|
||||
.object({
|
||||
slug: z.string().min(1).optional(),
|
||||
label: z.string().min(1).optional(),
|
||||
parentId: z.string().nullish(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "UpdateTermBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Taxonomies: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const taxonomyDefSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
labelSingular: z.string().optional(),
|
||||
hierarchical: z.boolean(),
|
||||
collections: z.array(z.string()),
|
||||
})
|
||||
.meta({ id: "TaxonomyDef" });
|
||||
|
||||
export const taxonomyListResponseSchema = z
|
||||
.object({ taxonomies: z.array(taxonomyDefSchema) })
|
||||
.meta({ id: "TaxonomyListResponse" });
|
||||
|
||||
export const termSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "Term" });
|
||||
|
||||
export const termWithCountSchema: z.ZodType = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
parentId: z.string().nullable(),
|
||||
description: z.string().optional(),
|
||||
count: z.number().int(),
|
||||
children: z.array(z.lazy(() => termWithCountSchema)),
|
||||
})
|
||||
.meta({ id: "TermWithCount" });
|
||||
|
||||
export const termListResponseSchema = z
|
||||
.object({ terms: z.array(termWithCountSchema) })
|
||||
.meta({ id: "TermListResponse" });
|
||||
|
||||
export const termResponseSchema = z.object({ term: termSchema }).meta({ id: "TermResponse" });
|
||||
|
||||
export const termGetResponseSchema = z
|
||||
.object({
|
||||
term: termSchema.extend({
|
||||
count: z.number().int(),
|
||||
children: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
slug: z.string(),
|
||||
label: z.string(),
|
||||
}),
|
||||
),
|
||||
}),
|
||||
})
|
||||
.meta({ id: "TermGetResponse" });
|
||||
96
packages/core/src/api/schemas/users.ts
Normal file
96
packages/core/src/api/schemas/users.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { roleLevel } from "./common.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin / Users: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const usersListQuery = z
|
||||
.object({
|
||||
search: z.string().optional(),
|
||||
role: z.string().optional(),
|
||||
cursor: z.string().optional(),
|
||||
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
|
||||
})
|
||||
.meta({ id: "UsersListQuery" });
|
||||
|
||||
export const userUpdateBody = z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
email: z.string().email().optional(),
|
||||
role: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "UserUpdateBody" });
|
||||
|
||||
export const allowedDomainCreateBody = z
|
||||
.object({
|
||||
domain: z.string().min(1),
|
||||
defaultRole: roleLevel,
|
||||
})
|
||||
.meta({ id: "AllowedDomainCreateBody" });
|
||||
|
||||
export const allowedDomainUpdateBody = z
|
||||
.object({
|
||||
enabled: z.boolean().optional(),
|
||||
defaultRole: roleLevel.optional(),
|
||||
})
|
||||
.meta({ id: "AllowedDomainUpdateBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Admin / Users: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const userSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
email: z.string(),
|
||||
name: z.string().nullable(),
|
||||
avatarUrl: z.string().nullable(),
|
||||
role: z.number().int(),
|
||||
emailVerified: z.boolean(),
|
||||
disabled: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
lastLogin: z.string().nullable(),
|
||||
credentialCount: z.number().int().optional(),
|
||||
oauthProviders: z.array(z.string()).optional(),
|
||||
})
|
||||
.meta({ id: "User" });
|
||||
|
||||
export const userListResponseSchema = z
|
||||
.object({
|
||||
items: z.array(userSchema),
|
||||
nextCursor: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "UserListResponse" });
|
||||
|
||||
export const userDetailSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
email: z.string(),
|
||||
name: z.string().nullable(),
|
||||
avatarUrl: z.string().nullable(),
|
||||
role: z.number().int(),
|
||||
emailVerified: z.boolean(),
|
||||
disabled: z.boolean(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
lastLogin: z.string().nullable(),
|
||||
credentials: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().nullable(),
|
||||
deviceType: z.string().nullable(),
|
||||
createdAt: z.string(),
|
||||
lastUsedAt: z.string(),
|
||||
}),
|
||||
),
|
||||
oauthAccounts: z.array(
|
||||
z.object({
|
||||
provider: z.string(),
|
||||
createdAt: z.string(),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.meta({ id: "UserDetail" });
|
||||
80
packages/core/src/api/schemas/widgets.ts
Normal file
80
packages/core/src/api/schemas/widgets.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Widgets: Input schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const widgetType = z.enum(["content", "menu", "component"]);
|
||||
|
||||
export const createWidgetAreaBody = z
|
||||
.object({
|
||||
name: z.string().min(1),
|
||||
label: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.meta({ id: "CreateWidgetAreaBody" });
|
||||
|
||||
export const createWidgetBody = z
|
||||
.object({
|
||||
type: widgetType,
|
||||
title: z.string().optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
menuName: z.string().optional(),
|
||||
componentId: z.string().optional(),
|
||||
componentProps: z.record(z.string(), z.unknown()).optional(),
|
||||
})
|
||||
.meta({ id: "CreateWidgetBody" });
|
||||
|
||||
export const updateWidgetBody = z
|
||||
.object({
|
||||
type: widgetType.optional(),
|
||||
title: z.string().optional(),
|
||||
content: z.array(z.record(z.string(), z.unknown())).optional(),
|
||||
menuName: z.string().optional(),
|
||||
componentId: z.string().optional(),
|
||||
componentProps: z.record(z.string(), z.unknown()).optional(),
|
||||
})
|
||||
.meta({ id: "UpdateWidgetBody" });
|
||||
|
||||
export const reorderWidgetsBody = z
|
||||
.object({
|
||||
widgetIds: z.array(z.string().min(1)),
|
||||
})
|
||||
.meta({ id: "ReorderWidgetsBody" });
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Widgets: Response schemas
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const widgetAreaSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
label: z.string(),
|
||||
description: z.string().nullable(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
})
|
||||
.meta({ id: "WidgetArea" });
|
||||
|
||||
export const widgetSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
area_id: z.string(),
|
||||
type: z.string(),
|
||||
title: z.string().nullable(),
|
||||
content: z.string().nullable(),
|
||||
menu_name: z.string().nullable(),
|
||||
component_id: z.string().nullable(),
|
||||
component_props: z.string().nullable(),
|
||||
sort_order: z.number().int(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
})
|
||||
.meta({ id: "Widget" });
|
||||
|
||||
export const widgetAreaWithWidgetsSchema = widgetAreaSchema
|
||||
.extend({
|
||||
widgets: z.array(widgetSchema),
|
||||
})
|
||||
.meta({ id: "WidgetAreaWithWidgets" });
|
||||
25
packages/core/src/api/site-url.ts
Normal file
25
packages/core/src/api/site-url.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Resolve the canonical site base URL for use in outbound links (emails, etc.).
|
||||
*
|
||||
* Uses the stored `emdash:site_url` (set during setup on the real domain)
|
||||
* so that Host header spoofing in later requests cannot redirect users to
|
||||
* attacker-controlled domains.
|
||||
*
|
||||
* Falls back to the request URL only if no stored value exists (pre-setup).
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
|
||||
import { OptionsRepository } from "../database/repositories/options.js";
|
||||
import type { Database } from "../database/types.js";
|
||||
|
||||
export async function getSiteBaseUrl(db: Kysely<Database>, request: Request): Promise<string> {
|
||||
const options = new OptionsRepository(db);
|
||||
const storedUrl = await options.get<string>("emdash:site_url");
|
||||
if (storedUrl) {
|
||||
return `${storedUrl}/_emdash`;
|
||||
}
|
||||
// Fallback: derive from request (only reached before setup completes)
|
||||
const url = new URL(request.url);
|
||||
return `${url.protocol}//${url.host}/_emdash`;
|
||||
}
|
||||
82
packages/core/src/api/types.ts
Normal file
82
packages/core/src/api/types.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* API types for EmDash REST endpoints
|
||||
*/
|
||||
|
||||
import type { ContentItem } from "../database/repositories/types.js";
|
||||
|
||||
/**
|
||||
* List response with cursor pagination
|
||||
*/
|
||||
export interface ListResponse<T> {
|
||||
items: T[];
|
||||
nextCursor?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Content API responses
|
||||
*/
|
||||
export interface ContentListResponse extends ListResponse<ContentItem> {}
|
||||
|
||||
export interface ContentResponse {
|
||||
item: ContentItem;
|
||||
/** Opaque revision token for optimistic concurrency */
|
||||
_rev?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manifest API response
|
||||
*/
|
||||
export interface ManifestResponse {
|
||||
version: string;
|
||||
hash: string;
|
||||
collections: Record<
|
||||
string,
|
||||
{
|
||||
label: string;
|
||||
labelSingular: string;
|
||||
supports: string[];
|
||||
fields: Record<string, FieldDescriptor>;
|
||||
}
|
||||
>;
|
||||
plugins: Record<
|
||||
string,
|
||||
{
|
||||
adminPages?: Array<{ path: string; component: string }>;
|
||||
widgets?: string[];
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
export interface FieldDescriptor {
|
||||
kind: string;
|
||||
label?: string;
|
||||
required?: boolean;
|
||||
options?: Array<{ value: string; label: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discriminated union for handler results.
|
||||
*
|
||||
* Handlers return `ApiResult<T>` -- either `{ success: true, data: T }` or
|
||||
* `{ success: false, error: { code, message } }`. The `success` literal
|
||||
* enables TypeScript narrowing on `.data`.
|
||||
*
|
||||
* The generic `E` parameter defaults to `ErrorCode` but can be narrowed to
|
||||
* `OAuthErrorCode` for OAuth token-endpoint handlers.
|
||||
*
|
||||
* Use `unwrapResult()` from `error.ts` to convert to an HTTP Response.
|
||||
*/
|
||||
export type ApiResult<T, E extends string = string> =
|
||||
| { success: true; data: T }
|
||||
| {
|
||||
success: false;
|
||||
error: { code: E; message: string; details?: Record<string, unknown> };
|
||||
};
|
||||
|
||||
/**
|
||||
* API request context
|
||||
*/
|
||||
export interface ApiContext {
|
||||
userId?: string;
|
||||
userRole?: string;
|
||||
}
|
||||
Reference in New Issue
Block a user