first commit

This commit is contained in:
Matt Kane
2026-04-01 10:44:22 +01:00
commit 43fcb9a131
1789 changed files with 395041 additions and 0 deletions

View File

@@ -0,0 +1,207 @@
/**
* In-Process Adapter for Standard-Format Plugins
*
* Converts a standard plugin definition ({ hooks, routes }) into a
* ResolvedPlugin compatible with HookPipeline. This allows standard-format
* plugins to run in-process when placed in the `plugins: []` config array.
*
* The adapter wraps each hook and route handler so that the PluginContextFactory
* provides the same capability-gated context as the native path.
*
*/
import type { PluginDescriptor } from "../astro/integration/runtime.js";
import { PLUGIN_CAPABILITIES, HOOK_NAMES } from "./manifest-schema.js";
import type {
StandardPluginDefinition,
StandardHookEntry,
StandardHookHandler,
ResolvedPlugin,
ResolvedPluginHooks,
ResolvedHook,
PluginRoute,
PluginCapability,
PluginStorageConfig,
PluginAdminConfig,
} from "./types.js";
/**
* Default hook configuration values
*/
const DEFAULT_PRIORITY = 100;
const DEFAULT_TIMEOUT = 5000;
const DEFAULT_ERROR_POLICY = "abort" as const;
/**
* Check if a standard hook entry is a config object (has a `handler` property)
*/
function isHookConfig(
entry: StandardHookEntry,
): entry is Exclude<StandardHookEntry, StandardHookHandler> {
return typeof entry === "object" && entry !== null && "handler" in entry;
}
/**
* Resolve a single standard hook entry to a ResolvedHook.
*
* Standard-format hooks use the sandbox entry convention:
* handler(event, ctx) -- two args
*
* The HookPipeline dispatch methods also call handlers with (event, ctx),
* so the handler is compatible as-is. We just need to wrap it for type safety.
*/
function resolveStandardHook(
entry: StandardHookEntry,
pluginId: string,
): ResolvedHook<StandardHookHandler> {
if (isHookConfig(entry)) {
return {
priority: entry.priority ?? DEFAULT_PRIORITY,
timeout: entry.timeout ?? DEFAULT_TIMEOUT,
dependencies: entry.dependencies ?? [],
errorPolicy: entry.errorPolicy ?? DEFAULT_ERROR_POLICY,
exclusive: entry.exclusive ?? false,
handler: entry.handler,
pluginId,
};
}
// Bare function handler
return {
priority: DEFAULT_PRIORITY,
timeout: DEFAULT_TIMEOUT,
dependencies: [],
errorPolicy: DEFAULT_ERROR_POLICY,
exclusive: false,
handler: entry,
pluginId,
};
}
const VALID_CAPABILITIES_SET = new Set<string>(PLUGIN_CAPABILITIES);
const VALID_HOOK_NAMES_SET = new Set<string>(HOOK_NAMES);
/**
* Adapt a standard-format plugin definition into a ResolvedPlugin.
*
* This is the core of the unified plugin format. It takes the `{ hooks, routes }`
* export from a standard plugin and produces a ResolvedPlugin that can enter the
* HookPipeline alongside native plugins.
*
* @param definition - The standard plugin definition (from definePlugin() or raw export)
* @param descriptor - The plugin descriptor with id, version, capabilities, etc.
* @returns A ResolvedPlugin compatible with HookPipeline
*/
export function adaptSandboxEntry(
definition: StandardPluginDefinition,
descriptor: PluginDescriptor,
): ResolvedPlugin {
const pluginId = descriptor.id;
const version = descriptor.version;
// Resolve hooks
const resolvedHooks: ResolvedPluginHooks = {};
if (definition.hooks) {
for (const [hookName, entry] of Object.entries(definition.hooks)) {
if (!VALID_HOOK_NAMES_SET.has(hookName)) {
throw new Error(
`Plugin "${pluginId}" declares unknown hook "${hookName}". ` +
`Valid hooks: ${[...VALID_HOOK_NAMES_SET].join(", ")}`,
);
}
// The resolved hook has the correct handler type for the hook name.
// We store it as the generic type and let HookPipeline's typed dispatch
// methods handle the type narrowing at call time.
// eslint-disable-next-line typescript-eslint/no-unsafe-type-assertion -- bridging untyped map to typed interface
(resolvedHooks as Record<string, unknown>)[hookName] = resolveStandardHook(entry, pluginId);
}
}
// Resolve routes: standard format uses (routeCtx, pluginCtx) two-arg pattern.
// Native format uses (ctx: RouteContext) single-arg pattern where RouteContext
// extends PluginContext with { input, request, requestMeta }.
// We wrap standard route handlers to merge the two args into one.
const resolvedRoutes: Record<string, PluginRoute> = {};
if (definition.routes) {
for (const [routeName, routeEntry] of Object.entries(definition.routes)) {
const standardHandler = routeEntry.handler;
resolvedRoutes[routeName] = {
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- StandardRouteEntry.input is intentionally loosely typed; callers validate at runtime
input: routeEntry.input as PluginRoute["input"],
public: routeEntry.public,
handler: async (ctx) => {
// Build the routeCtx shape that standard handlers expect
const routeCtx = {
input: ctx.input,
request: ctx.request,
requestMeta: ctx.requestMeta,
};
// Pass only the PluginContext portion (without input/request/requestMeta)
// to match what sandboxed handlers receive.
const { input: _, request: __, requestMeta: ___, ...pluginCtx } = ctx;
return standardHandler(routeCtx, pluginCtx);
},
};
}
}
// Build capabilities from descriptor.
// Validate against the known set (same as defineNativePlugin).
const rawCapabilities = descriptor.capabilities ?? [];
for (const cap of rawCapabilities) {
if (!VALID_CAPABILITIES_SET.has(cap)) {
throw new Error(
`Invalid capability "${cap}" in plugin "${pluginId}". ` +
`Valid capabilities: ${[...VALID_CAPABILITIES_SET].join(", ")}`,
);
}
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- validated against VALID_CAPABILITIES_SET above; descriptor uses string[] for flexibility
const capabilities = [...rawCapabilities] as PluginCapability[];
const allowedHosts = descriptor.allowedHosts ?? [];
// Capability implications: broader capabilities imply narrower ones
// (mirrors the normalization in define-plugin.ts for native format)
if (capabilities.includes("write:content") && !capabilities.includes("read:content")) {
capabilities.push("read:content");
}
if (capabilities.includes("write:media") && !capabilities.includes("read:media")) {
capabilities.push("read:media");
}
if (capabilities.includes("network:fetch:any") && !capabilities.includes("network:fetch")) {
capabilities.push("network:fetch");
}
// Build storage config from descriptor.
// StorageCollectionDeclaration uses optional indexes, but PluginStorageConfig
// requires them. Ensure every collection has an indexes array.
const rawStorage = descriptor.storage ?? {};
const storage: PluginStorageConfig = {};
for (const [name, config] of Object.entries(rawStorage)) {
storage[name] = {
indexes: config.indexes ?? [],
uniqueIndexes: config.uniqueIndexes,
};
}
// Build admin config from descriptor
const admin: PluginAdminConfig = {};
if (descriptor.adminPages) {
admin.pages = descriptor.adminPages;
}
if (descriptor.adminWidgets) {
admin.widgets = descriptor.adminWidgets;
}
return {
id: pluginId,
version,
capabilities,
allowedHosts,
storage,
hooks: resolvedHooks,
routes: resolvedRoutes,
admin,
};
}

View File

@@ -0,0 +1,833 @@
/**
* Plugin Context v2
*
* Creates the unified context object provided to plugins in all hooks and routes.
*
*/
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import { ContentRepository } from "../database/repositories/content.js";
import { MediaRepository } from "../database/repositories/media.js";
import { OptionsRepository } from "../database/repositories/options.js";
import { PluginStorageRepository } from "../database/repositories/plugin-storage.js";
import { UserRepository } from "../database/repositories/user.js";
import type { Database } from "../database/types.js";
import { validateExternalUrl, SsrfError, stripCredentialHeaders } from "../import/ssrf.js";
import type { Storage } from "../storage/types.js";
import { CronAccessImpl } from "./cron.js";
import type { EmailPipeline } from "./email.js";
import type {
ResolvedPlugin,
PluginContext,
PluginStorageConfig,
StorageCollection,
KVAccess,
CronAccess,
EmailAccess,
ContentAccess,
ContentAccessWithWrite,
MediaAccess,
MediaAccessWithWrite,
HttpAccess,
LogAccess,
SiteInfo,
UserAccess,
UserInfo,
ContentItem,
MediaItem,
PaginatedResult,
QueryOptions,
ContentListOptions,
MediaListOptions,
} from "./types.js";
// =============================================================================
// KV Access
// =============================================================================
/**
* Create KV accessor for a plugin
* All keys are automatically prefixed with the plugin ID
*/
export function createKVAccess(optionsRepo: OptionsRepository, pluginId: string): KVAccess {
const prefix = `plugin:${pluginId}:`;
return {
async get<T>(key: string): Promise<T | null> {
return optionsRepo.get<T>(`${prefix}${key}`);
},
async set(key: string, value: unknown): Promise<void> {
await optionsRepo.set(`${prefix}${key}`, value);
},
async delete(key: string): Promise<boolean> {
return optionsRepo.delete(`${prefix}${key}`);
},
async list(keyPrefix?: string): Promise<Array<{ key: string; value: unknown }>> {
const fullPrefix = `${prefix}${keyPrefix ?? ""}`;
const entriesMap = await optionsRepo.getByPrefix(fullPrefix);
const result: Array<{ key: string; value: unknown }> = [];
for (const [fullKey, value] of entriesMap) {
result.push({
key: fullKey.slice(prefix.length),
value,
});
}
return result;
},
};
}
// =============================================================================
// Storage Access
// =============================================================================
/**
* Create storage collection accessor for a plugin
* Wraps PluginStorageRepository with the v2 interface (no async iterators)
*/
function createStorageCollection<T>(
db: Kysely<Database>,
pluginId: string,
collectionName: string,
indexes: Array<string | string[]>,
): StorageCollection<T> {
const repo = new PluginStorageRepository<T>(db, pluginId, collectionName, indexes);
return {
get: (id) => repo.get(id),
put: (id, data) => repo.put(id, data),
delete: (id) => repo.delete(id),
exists: (id) => repo.exists(id),
getMany: (ids) => repo.getMany(ids),
putMany: (items) => repo.putMany(items),
deleteMany: (ids) => repo.deleteMany(ids),
count: (where) => repo.count(where),
// Query returns PaginatedResult instead of the old format
async query(options?: QueryOptions): Promise<PaginatedResult<{ id: string; data: T }>> {
const result = await repo.query({
where: options?.where,
orderBy: options?.orderBy,
limit: options?.limit,
cursor: options?.cursor,
});
return {
items: result.items,
cursor: result.cursor,
hasMore: result.hasMore,
};
},
};
}
/**
* Create storage accessor with all declared collections
*/
export function createStorageAccess<T extends PluginStorageConfig>(
db: Kysely<Database>,
pluginId: string,
storageConfig: T,
): Record<string, StorageCollection> {
const storage: Record<string, StorageCollection> = {};
for (const [collectionName, config] of Object.entries(storageConfig)) {
const allIndexes = [...config.indexes, ...(config.uniqueIndexes ?? [])];
storage[collectionName] = createStorageCollection(db, pluginId, collectionName, allIndexes);
}
return storage;
}
// =============================================================================
// Content Access
// =============================================================================
/**
* Create read-only content access
*/
export function createContentAccess(db: Kysely<Database>): ContentAccess {
const contentRepo = new ContentRepository(db);
return {
async get(collection: string, id: string): Promise<ContentItem | null> {
const item = await contentRepo.findById(collection, id);
if (!item) return null;
return {
id: item.id,
type: item.type,
data: item.data,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
};
},
async list(
collection: string,
options?: ContentListOptions,
): Promise<PaginatedResult<ContentItem>> {
// Convert orderBy format if provided
let orderBy: { field: string; direction: "asc" | "desc" } | undefined;
if (options?.orderBy) {
const entries = Object.entries(options.orderBy);
const first = entries[0];
if (first) {
orderBy = { field: first[0], direction: first[1] };
}
}
const result = await contentRepo.findMany(collection, {
limit: options?.limit ?? 50,
cursor: options?.cursor,
orderBy,
});
return {
items: result.items.map((item) => ({
id: item.id,
type: item.type,
data: item.data,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
})),
cursor: result.nextCursor,
hasMore: !!result.nextCursor,
};
},
};
}
/**
* Create full content access with write operations
*/
export function createContentAccessWithWrite(db: Kysely<Database>): ContentAccessWithWrite {
const contentRepo = new ContentRepository(db);
const readAccess = createContentAccess(db);
return {
...readAccess,
async create(collection: string, data: Record<string, unknown>): Promise<ContentItem> {
const item = await contentRepo.create({
type: collection,
data,
});
return {
id: item.id,
type: item.type,
data: item.data,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
};
},
async update(
collection: string,
id: string,
data: Record<string, unknown>,
): Promise<ContentItem> {
const item = await contentRepo.update(collection, id, { data });
return {
id: item.id,
type: item.type,
data: item.data,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
};
},
async delete(collection: string, id: string): Promise<boolean> {
return contentRepo.delete(collection, id);
},
};
}
// =============================================================================
// Media Access
// =============================================================================
/**
* Create read-only media access
*/
export function createMediaAccess(db: Kysely<Database>): MediaAccess {
const mediaRepo = new MediaRepository(db);
return {
async get(id: string): Promise<MediaItem | null> {
const item = await mediaRepo.findById(id);
if (!item) return null;
return {
id: item.id,
filename: item.filename,
mimeType: item.mimeType,
size: item.size,
// Construct URL from storage key (or use a sensible default path)
url: `/media/${item.id}/${item.filename}`,
createdAt: item.createdAt,
};
},
async list(options?: MediaListOptions): Promise<PaginatedResult<MediaItem>> {
const result = await mediaRepo.findMany({
limit: options?.limit ?? 50,
cursor: options?.cursor,
mimeType: options?.mimeType,
});
return {
items: result.items.map((item) => ({
id: item.id,
filename: item.filename,
mimeType: item.mimeType,
size: item.size,
url: `/media/${item.id}/${item.filename}`,
createdAt: item.createdAt,
})),
cursor: result.nextCursor,
hasMore: !!result.nextCursor,
};
},
};
}
/**
* Create full media access with write operations.
* If storage is not provided, upload() will throw at call time.
*/
export function createMediaAccessWithWrite(
db: Kysely<Database>,
getUploadUrlFn: (
filename: string,
contentType: string,
) => Promise<{ uploadUrl: string; mediaId: string }>,
storage?: Storage,
): MediaAccessWithWrite {
const mediaRepo = new MediaRepository(db);
const readAccess = createMediaAccess(db);
return {
...readAccess,
getUploadUrl: getUploadUrlFn,
async upload(
filename: string,
contentType: string,
bytes: ArrayBuffer,
): Promise<{ mediaId: string; storageKey: string; url: string }> {
if (!storage) {
throw new Error(
"Media upload() requires a storage backend. Configure storage in PluginContextFactoryOptions.",
);
}
const mediaId = ulid();
// Extract extension from basename (ignore path separators)
const basename = filename.split("/").pop() ?? filename;
const dotIdx = basename.lastIndexOf(".");
const ext = dotIdx > 0 ? basename.slice(dotIdx).toLowerCase() : "";
const storageKey = `${mediaId}${ext}`;
// Upload to storage first
await storage.upload({
key: storageKey,
body: new Uint8Array(bytes),
contentType,
});
// Create DB record — clean up storage on failure
try {
await mediaRepo.create({
filename: basename,
mimeType: contentType,
size: bytes.byteLength,
storageKey,
status: "ready",
});
} catch (error) {
try {
await storage.delete(storageKey);
} catch {
// Best-effort cleanup
}
throw error;
}
return {
mediaId,
storageKey,
url: `/_emdash/api/media/file/${storageKey}`,
};
},
async delete(id: string): Promise<boolean> {
return mediaRepo.delete(id);
},
};
}
// =============================================================================
// HTTP Access
// =============================================================================
/** Maximum number of redirects to follow in plugin HTTP access */
const MAX_PLUGIN_REDIRECTS = 5;
function isHostAllowed(host: string, allowedHosts: string[]): boolean {
return allowedHosts.some((pattern) => {
if (pattern.startsWith("*.")) {
const suffix = pattern.slice(1); // ".example.com"
return host.endsWith(suffix) || host === pattern.slice(2);
}
return host === pattern;
});
}
/**
* Create HTTP access with host validation.
*
* Uses redirect: "manual" to re-validate each redirect target against
* the allowedHosts list, preventing redirects to unauthorized hosts.
*/
export function createHttpAccess(pluginId: string, allowedHosts: string[]): HttpAccess {
return {
async fetch(url: string, init?: RequestInit): Promise<Response> {
// Deny by default — plugins must declare allowed hosts
if (allowedHosts.length === 0) {
throw new Error(
`Plugin "${pluginId}" has no allowed hosts configured. ` +
`Add hosts to the plugin's allowedHosts array to enable HTTP requests.`,
);
}
let currentUrl = url;
let currentInit = init;
for (let i = 0; i <= MAX_PLUGIN_REDIRECTS; i++) {
const hostname = new URL(currentUrl).hostname;
if (!isHostAllowed(hostname, allowedHosts)) {
throw new Error(
`Plugin "${pluginId}" is not allowed to fetch from host "${hostname}". ` +
`Allowed hosts: ${allowedHosts.join(", ")}`,
);
}
const response = await globalThis.fetch(currentUrl, {
...currentInit,
redirect: "manual",
});
// Not a redirect -- return directly
if (response.status < 300 || response.status >= 400) {
return response;
}
// Extract redirect target
const location = response.headers.get("Location");
if (!location) {
return response;
}
// Resolve relative redirects; strip credentials on cross-origin hops
const previousOrigin = new URL(currentUrl).origin;
currentUrl = new URL(location, currentUrl).href;
const nextOrigin = new URL(currentUrl).origin;
if (previousOrigin !== nextOrigin && currentInit) {
currentInit = stripCredentialHeaders(currentInit);
}
}
throw new Error(`Plugin "${pluginId}": too many redirects (max ${MAX_PLUGIN_REDIRECTS})`);
},
};
}
/**
* Create unrestricted HTTP access (for plugins with network:fetch:any capability).
* No host validation, but applies SSRF protection on redirect targets to
* prevent plugins from being tricked into reaching internal services.
*/
export function createUnrestrictedHttpAccess(pluginId: string): HttpAccess {
return {
async fetch(url: string, init?: RequestInit): Promise<Response> {
let currentUrl = url;
let currentInit = init;
for (let i = 0; i <= MAX_PLUGIN_REDIRECTS; i++) {
// Validate each URL against SSRF rules (private IPs, metadata endpoints)
try {
validateExternalUrl(currentUrl);
} catch (e) {
const msg = e instanceof SsrfError ? e.message : "SSRF validation failed";
throw new Error(
`Plugin "${pluginId}": blocked fetch to "${new URL(currentUrl).hostname}": ${msg}`,
{ cause: e },
);
}
const response = await globalThis.fetch(currentUrl, {
...currentInit,
redirect: "manual",
});
// Not a redirect -- return directly
if (response.status < 300 || response.status >= 400) {
return response;
}
// Extract redirect target
const location = response.headers.get("Location");
if (!location) {
return response;
}
// Resolve relative redirects; strip credentials on cross-origin hops
const previousOrigin = new URL(currentUrl).origin;
currentUrl = new URL(location, currentUrl).href;
const nextOrigin = new URL(currentUrl).origin;
if (previousOrigin !== nextOrigin && currentInit) {
currentInit = stripCredentialHeaders(currentInit);
}
}
throw new Error(`Plugin "${pluginId}": too many redirects (max ${MAX_PLUGIN_REDIRECTS})`);
},
};
}
/**
* Create blocked HTTP access (for plugins without network:fetch capability)
*/
export function createBlockedHttpAccess(pluginId: string): HttpAccess {
return {
async fetch(): Promise<never> {
throw new Error(
`Plugin "${pluginId}" does not have the "network:fetch" capability. ` +
`Add "network:fetch" to the plugin's capabilities to enable HTTP requests.`,
);
},
};
}
// =============================================================================
// Log Access
// =============================================================================
/**
* Create logger for a plugin
*/
export function createLogAccess(pluginId: string): LogAccess {
const prefix = `[plugin:${pluginId}]`;
return {
debug(message: string, data?: unknown): void {
if (data !== undefined) {
console.debug(prefix, message, data);
} else {
console.debug(prefix, message);
}
},
info(message: string, data?: unknown): void {
if (data !== undefined) {
console.info(prefix, message, data);
} else {
console.info(prefix, message);
}
},
warn(message: string, data?: unknown): void {
if (data !== undefined) {
console.warn(prefix, message, data);
} else {
console.warn(prefix, message);
}
},
error(message: string, data?: unknown): void {
if (data !== undefined) {
console.error(prefix, message, data);
} else {
console.error(prefix, message);
}
},
};
}
// =============================================================================
// Site Info
// =============================================================================
const TRAILING_SLASH_RE = /\/$/;
/**
* Options for creating site info
*/
export interface SiteInfoOptions {
/** Site name from options table */
siteName?: string;
/** Site URL from options table or Astro config */
siteUrl?: string;
/** Site locale from options table */
locale?: string;
}
/**
* Create site info from config and settings.
*
* Resolution order for URL:
* 1. options table (emdash:site_url)
* 2. Astro `site` config
* 3. fallback to empty string
*/
export function createSiteInfo(options: SiteInfoOptions): SiteInfo {
return {
name: options.siteName ?? "",
url: (options.siteUrl ?? "").replace(TRAILING_SLASH_RE, ""), // strip trailing slash
locale: options.locale ?? "en",
};
}
/**
* Create a URL helper that generates absolute URLs from relative paths.
* Validates that path starts with "/" and rejects protocol-relative paths ("//").
*/
export function createUrlHelper(siteUrl: string): (path: string) => string {
const base = siteUrl.replace(TRAILING_SLASH_RE, ""); // strip trailing slash
return (path: string): string => {
if (!path.startsWith("/")) {
throw new Error(`URL path must start with "/", got: "${path}"`);
}
if (path.startsWith("//")) {
throw new Error(`URL path must not be protocol-relative, got: "${path}"`);
}
return `${base}${path}`;
};
}
// =============================================================================
// User Access
// =============================================================================
/**
* Convert a UserRepository user to the plugin-facing UserInfo shape.
* Strips sensitive fields (avatarUrl, emailVerified, data).
*/
function toUserInfo(user: {
id: string;
email: string;
name: string | null;
role: number;
createdAt: string;
}): UserInfo {
return {
id: user.id,
email: user.email,
name: user.name,
role: user.role,
createdAt: user.createdAt,
};
}
/**
* Create read-only user access for plugins.
* Excludes sensitive fields (password hashes, sessions, passkeys, avatar URL, data).
*/
export function createUserAccess(db: Kysely<Database>): UserAccess {
const userRepo = new UserRepository(db);
return {
async get(id: string): Promise<UserInfo | null> {
const user = await userRepo.findById(id);
if (!user) return null;
return toUserInfo(user);
},
async getByEmail(email: string): Promise<UserInfo | null> {
const user = await userRepo.findByEmail(email);
if (!user) return null;
return toUserInfo(user);
},
async list(opts?: {
role?: number;
limit?: number;
cursor?: string;
}): Promise<{ items: UserInfo[]; nextCursor?: string }> {
const result = await userRepo.findMany({
role: opts?.role as 10 | 20 | 30 | 40 | 50 | undefined,
cursor: opts?.cursor,
limit: opts?.limit,
});
return {
items: result.items.map(toUserInfo),
nextCursor: result.nextCursor,
};
},
};
}
// =============================================================================
// Plugin Context Factory
// =============================================================================
export interface PluginContextFactoryOptions {
db: Kysely<Database>;
/**
* Storage backend for direct media uploads.
* If not provided, upload() will throw.
*/
storage?: Storage;
/**
* Function to generate upload URLs for media.
* If not provided, media write operations will throw.
*/
getUploadUrl?: (
filename: string,
contentType: string,
) => Promise<{ uploadUrl: string; mediaId: string }>;
/**
* Site information for ctx.site and ctx.url().
* If not provided, site info will have empty defaults.
*/
siteInfo?: SiteInfoOptions;
/**
* Callback to notify the cron scheduler that the next due time may have changed.
* If not provided, ctx.cron will not be available.
*/
cronReschedule?: () => void;
/**
* Email pipeline instance for ctx.email.
* If not provided (or no provider configured), ctx.email will be undefined.
*/
emailPipeline?: EmailPipeline;
}
/**
* Factory for creating plugin contexts
*/
export class PluginContextFactory {
private optionsRepo: OptionsRepository;
private db: Kysely<Database>;
private storage?: Storage;
private getUploadUrl?: (
filename: string,
contentType: string,
) => Promise<{ uploadUrl: string; mediaId: string }>;
private site: SiteInfo;
private urlHelper: (path: string) => string;
private cronReschedule?: () => void;
private emailPipeline?: EmailPipeline;
constructor(options: PluginContextFactoryOptions) {
this.db = options.db;
this.optionsRepo = new OptionsRepository(options.db);
this.storage = options.storage;
this.getUploadUrl = options.getUploadUrl;
this.site = createSiteInfo(options.siteInfo ?? {});
this.urlHelper = createUrlHelper(this.site.url);
this.cronReschedule = options.cronReschedule;
this.emailPipeline = options.emailPipeline;
}
/**
* Create the unified plugin context
*/
createContext(plugin: ResolvedPlugin): PluginContext {
const capabilities = new Set(plugin.capabilities);
// Always available
const kv = createKVAccess(this.optionsRepo, plugin.id);
const log = createLogAccess(plugin.id);
const storage = createStorageAccess(this.db, plugin.id, plugin.storage);
// Capability-gated: content
let content: ContentAccess | ContentAccessWithWrite | undefined;
if (capabilities.has("write:content")) {
content = createContentAccessWithWrite(this.db);
} else if (capabilities.has("read:content")) {
content = createContentAccess(this.db);
}
// Capability-gated: media
let media: MediaAccess | MediaAccessWithWrite | undefined;
if (capabilities.has("write:media") && this.getUploadUrl) {
media = createMediaAccessWithWrite(this.db, this.getUploadUrl, this.storage);
} else if (capabilities.has("read:media")) {
media = createMediaAccess(this.db);
}
// Capability-gated: http
let http: HttpAccess | undefined;
if (capabilities.has("network:fetch:any")) {
http = createUnrestrictedHttpAccess(plugin.id);
} else if (capabilities.has("network:fetch")) {
http = createHttpAccess(plugin.id, plugin.allowedHosts);
}
// Capability-gated: users
let users: UserAccess | undefined;
if (capabilities.has("read:users")) {
users = createUserAccess(this.db);
}
// Cron access <20><><EFBFBD> always available (scoped to plugin), but only if
// the runtime provided a reschedule callback (i.e. cron is wired up).
let cron: CronAccess | undefined;
if (this.cronReschedule) {
cron = new CronAccessImpl(this.db, plugin.id, this.cronReschedule);
}
// Email access — requires email:send capability AND a configured provider
let email: EmailAccess | undefined;
if (capabilities.has("email:send") && this.emailPipeline?.isAvailable()) {
const pipeline = this.emailPipeline;
const pluginId = plugin.id;
email = {
send: (message) => pipeline.send(message, pluginId),
};
}
return {
plugin: {
id: plugin.id,
version: plugin.version,
},
storage,
kv,
content,
media,
http,
log,
site: this.site,
url: this.urlHelper,
users,
cron,
email,
};
}
}
/**
* Create a plugin context for a resolved plugin
*/
export function createPluginContext(
options: PluginContextFactoryOptions,
plugin: ResolvedPlugin,
): PluginContext {
const factory = new PluginContextFactory(options);
return factory.createContext(plugin);
}

View File

@@ -0,0 +1,361 @@
/**
* Plugin Cron System
*
* Provides scheduled task execution for plugins:
* - CronExecutor: claims overdue tasks, invokes per-plugin cron hook, updates next run.
* - CronAccessImpl: per-plugin API for schedule/cancel/list.
*
*/
import { Cron } from "croner";
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { ulid } from "ulidx";
import type { Database } from "../database/types.js";
import type { CronAccess, CronEvent, CronTaskInfo } from "./types.js";
/** Stale lock threshold in minutes */
const STALE_LOCK_MINUTES = 10;
/**
* Callback to invoke a plugin's cron hook.
* Provided by PluginManager so CronExecutor stays decoupled from the hook pipeline.
*/
export type InvokeCronHookFn = (pluginId: string, event: CronEvent) => Promise<void>;
/**
* Callback to notify the scheduler that the next due time may have changed.
*/
export type RescheduleFn = () => void;
// ─── CronExecutor ──────────────────────────────────────────────────────────
/**
* Executes overdue cron tasks.
*
* Called by platform-specific schedulers (NodeCronScheduler, EmDashScheduler DO,
* PiggybackScheduler). Stateless — all state lives in the database.
*/
export class CronExecutor {
constructor(
private db: Kysely<Database>,
private invokeCronHook: InvokeCronHookFn,
) {}
/**
* Process all overdue tasks.
*
* 1. Atomically claim tasks whose next_run_at <= now, status = idle, enabled = 1.
* 2. For each claimed task, invoke the plugin's cron hook.
* 3. On success: compute next_run_at and reset to idle, or delete one-shots.
* 4. On failure: reset to idle (retry on next tick).
*/
async tick(): Promise<number> {
const now = new Date().toISOString();
let processed = 0;
// Claim overdue tasks atomically
const claimed = await sql<{
id: string;
plugin_id: string;
task_name: string;
schedule: string;
is_oneshot: number;
data: string | null;
next_run_at: string;
}>`
UPDATE _emdash_cron_tasks
SET status = 'running', locked_at = ${now}
WHERE id IN (
SELECT id FROM _emdash_cron_tasks
WHERE next_run_at <= ${now}
AND status = 'idle'
AND enabled = 1
ORDER BY next_run_at ASC
LIMIT 10
)
RETURNING id, plugin_id, task_name, schedule, is_oneshot, data, next_run_at
`.execute(this.db);
for (const task of claimed.rows) {
// Parse task data safely <20><><EFBFBD> malformed JSON must not crash the entire batch
let parsedData: Record<string, unknown> | undefined;
if (task.data) {
try {
parsedData = JSON.parse(task.data) as Record<string, unknown>;
} catch {
console.error(
`[cron] Invalid JSON data for ${task.plugin_id}:${task.task_name}, skipping`,
);
await sql`
UPDATE _emdash_cron_tasks
SET status = 'idle', locked_at = NULL
WHERE id = ${task.id}
`.execute(this.db);
continue;
}
}
const event: CronEvent = {
name: task.task_name,
data: parsedData,
scheduledAt: task.next_run_at,
};
let hookFailed = false;
try {
await this.invokeCronHook(task.plugin_id, event);
} catch (error) {
hookFailed = true;
console.error(`[cron] Hook failed for ${task.plugin_id}:${task.task_name}:`, error);
}
if (task.is_oneshot) {
if (hookFailed) {
// Keep the task for retry — reset to idle with a 1-minute backoff
const retryAt = new Date(Date.now() + 60_000).toISOString();
await sql`
UPDATE _emdash_cron_tasks
SET status = 'idle', locked_at = NULL, next_run_at = ${retryAt}
WHERE id = ${task.id}
`.execute(this.db);
} else {
// Success: delete the one-shot task
await sql`
DELETE FROM _emdash_cron_tasks WHERE id = ${task.id}
`.execute(this.db);
}
} else {
// Recurring: compute next run and reset
const nextRun = nextCronTime(task.schedule);
await sql`
UPDATE _emdash_cron_tasks
SET status = 'idle',
locked_at = NULL,
last_run_at = ${now},
next_run_at = ${nextRun}
WHERE id = ${task.id}
`.execute(this.db);
}
processed++;
}
return processed;
}
/**
* Recover tasks stuck in 'running' for more than STALE_LOCK_MINUTES.
* These likely crashed mid-execution.
*/
async recoverStaleLocks(): Promise<number> {
const cutoff = new Date(Date.now() - STALE_LOCK_MINUTES * 60 * 1000).toISOString();
const result = await sql`
UPDATE _emdash_cron_tasks
SET status = 'idle', locked_at = NULL
WHERE status = 'running'
AND locked_at < ${cutoff}
`.execute(this.db);
return Number(result.numAffectedRows ?? 0);
}
/**
* Get the next due time across all enabled tasks.
* Returns null if no tasks are scheduled.
*/
async getNextDueTime(): Promise<string | null> {
const result = await sql<{ next: string | null }>`
SELECT MIN(next_run_at) as next
FROM _emdash_cron_tasks
WHERE status = 'idle' AND enabled = 1
`.execute(this.db);
return result.rows[0]?.next ?? null;
}
}
// ─── CronAccessImpl ────────────────────────────────────────────────────────
/**
* Per-plugin cron API implementation.
* Scoped to a single plugin ID — plugins cannot see or modify other plugins' tasks.
*/
export class CronAccessImpl implements CronAccess {
constructor(
private db: Kysely<Database>,
private pluginId: string,
private reschedule: RescheduleFn,
) {}
async schedule(
name: string,
opts: { schedule: string; data?: Record<string, unknown> },
): Promise<void> {
validateTaskName(name);
validateSchedule(opts.schedule);
const oneshot = isOneShot(opts.schedule);
const nextRun = oneshot ? opts.schedule : nextCronTime(opts.schedule);
const dataJson = opts.data ? JSON.stringify(opts.data) : null;
const id = ulid();
// Upsert: if task already exists for this plugin+name, update it.
// Guard: don't clobber a task that is currently executing.
await sql`
INSERT INTO _emdash_cron_tasks (id, plugin_id, task_name, schedule, is_oneshot, data, next_run_at, status, enabled)
VALUES (${id}, ${this.pluginId}, ${name}, ${opts.schedule}, ${oneshot ? 1 : 0}, ${dataJson}, ${nextRun}, 'idle', 1)
ON CONFLICT (plugin_id, task_name) DO UPDATE SET
schedule = ${opts.schedule},
is_oneshot = ${oneshot ? 1 : 0},
data = ${dataJson},
next_run_at = ${nextRun},
status = CASE WHEN _emdash_cron_tasks.status = 'running' THEN 'running' ELSE 'idle' END,
locked_at = CASE WHEN _emdash_cron_tasks.status = 'running' THEN _emdash_cron_tasks.locked_at ELSE NULL END,
enabled = 1
`.execute(this.db);
this.reschedule();
}
async cancel(name: string): Promise<void> {
await sql`
DELETE FROM _emdash_cron_tasks
WHERE plugin_id = ${this.pluginId} AND task_name = ${name}
`.execute(this.db);
this.reschedule();
}
async list(): Promise<CronTaskInfo[]> {
const rows = await sql<{
task_name: string;
schedule: string;
next_run_at: string;
last_run_at: string | null;
}>`
SELECT task_name, schedule, next_run_at, last_run_at
FROM _emdash_cron_tasks
WHERE plugin_id = ${this.pluginId} AND enabled = 1
ORDER BY next_run_at ASC
`.execute(this.db);
return rows.rows.map((row) => ({
name: row.task_name,
schedule: row.schedule,
nextRunAt: row.next_run_at,
lastRunAt: row.last_run_at,
}));
}
}
// ─── Cron task lifecycle helpers ────────────────────────────────────────────
/**
* Enable or disable all cron tasks for a plugin.
* Called by admin disable/enable endpoints and PluginManager lifecycle.
* Gracefully handles the cron table not existing yet (pre-migration).
*/
export async function setCronTasksEnabled(
db: Kysely<Database>,
pluginId: string,
enabled: boolean,
): Promise<void> {
try {
await sql`
UPDATE _emdash_cron_tasks
SET enabled = ${enabled ? 1 : 0}
WHERE plugin_id = ${pluginId}
`.execute(db);
} catch {
// Cron table may not exist yet (pre-migration). Non-fatal.
}
}
// ─── Cron utilities ────────────────────────────────────────────────────────
/**
* Compute the next fire time for a cron expression.
* Supports standard cron (5-field), extended (6-field with seconds), and
* aliases like @daily, @weekly, @hourly, @monthly, @yearly.
*/
export function nextCronTime(expression: string): string {
const job = new Cron(expression);
const next = job.nextRun();
if (!next) {
throw new Error(`Invalid cron expression or no future run: "${expression}"`);
}
return next.toISOString();
}
/**
* Check whether a string is a valid cron expression.
*/
function isCronExpression(schedule: string): boolean {
try {
// Cron constructor validates; we discard the instance immediately.
const _cron = new Cron(schedule);
void _cron;
return true;
} catch {
return false;
}
}
/**
* Check if a schedule string is a one-shot (ISO 8601 datetime) rather than
* a recurring cron expression.
*
* Tries to parse as a cron expression first. Only if that fails does it
* attempt Date.parse. This avoids misclassifying cron range expressions
* like "1-5 * * * *" which Date.parse accepts as valid dates.
*/
export function isOneShot(schedule: string): boolean {
if (schedule.startsWith("@")) return false;
if (isCronExpression(schedule)) return false;
return !isNaN(Date.parse(schedule));
}
/** Max length for a task name */
const MAX_TASK_NAME_LENGTH = 128;
/** Task name pattern: alphanumeric, dashes, underscores */
const TASK_NAME_RE = /^[a-zA-Z][a-zA-Z0-9_-]*$/;
/**
* Validate a cron task name.
* Must be non-empty, ≤128 chars, alphanumeric with dashes/underscores.
*/
export function validateTaskName(name: string): void {
if (!name || name.length > MAX_TASK_NAME_LENGTH) {
throw new Error(
`Invalid task name: must be 1-${MAX_TASK_NAME_LENGTH} characters, got ${name.length}`,
);
}
if (!TASK_NAME_RE.test(name)) {
throw new Error(
`Invalid task name "${name}": must start with a letter and contain only letters, numbers, dashes, or underscores`,
);
}
}
/**
* Validate a schedule string at registration time.
* Must be a valid cron expression or a parseable ISO 8601 datetime.
*/
export function validateSchedule(schedule: string): void {
if (!schedule || schedule.length > 256) {
throw new Error(`Invalid schedule: must be 1-256 characters, got ${schedule.length}`);
}
// Try cron first
if (isCronExpression(schedule)) return;
const parsed = Date.parse(schedule);
if (isNaN(parsed)) {
throw new Error(
`Invalid schedule "${schedule}": must be a valid cron expression or ISO 8601 datetime`,
);
}
}

View File

@@ -0,0 +1,259 @@
/**
* definePlugin() Helper
*
* Creates a properly typed and normalized plugin definition.
* Supports two formats:
*
* 1. **Native format** -- full PluginDefinition with id, version, capabilities, etc.
* Returns a ResolvedPlugin.
*
* 2. **Standard format** -- just { hooks, routes }. No id/version/capabilities.
* Returns the same object (identity function for type inference).
* Metadata comes from the descriptor at config time.
*
*/
import type {
PluginDefinition,
ResolvedPlugin,
PluginHooks,
ResolvedPluginHooks,
ResolvedHook,
HookConfig,
PluginStorageConfig,
StandardPluginDefinition,
} from "./types.js";
// Plugin ID validation patterns
const SIMPLE_ID = /^[a-z0-9-]+$/;
const SCOPED_ID = /^@[a-z0-9-]+\/[a-z0-9-]+$/;
const SEMVER_PATTERN = /^\d+\.\d+\.\d+/;
/**
* Define an EmDash plugin.
*
* **Standard format** -- the canonical format for plugins that work in both
* trusted and sandboxed modes. No id/version -- those come from the descriptor.
*
* @example
* ```typescript
* import { definePlugin } from "emdash";
*
* export default definePlugin({
* hooks: {
* "content:afterSave": {
* handler: async (event, ctx) => {
* await ctx.kv.set("lastSave", Date.now());
* },
* },
* },
* routes: {
* status: {
* handler: async (routeCtx, ctx) => ({ ok: true }),
* },
* },
* });
* ```
*
* **Native format** -- for plugins that need React admin, direct DB access,
* or other capabilities not available in the sandbox.
*
* @example
* ```typescript
* import { definePlugin } from "emdash";
*
* export default definePlugin({
* id: "my-plugin",
* version: "1.0.0",
* capabilities: ["read:content"],
* hooks: {
* "content:beforeSave": async (event, ctx) => {
* ctx.log.info("Saving content", { collection: event.collection });
* return event.content;
* }
* },
* routes: {
* "sync": {
* handler: async (ctx) => {
* return { status: "ok" };
* }
* }
* }
* });
* ```
*/
// Native overload first -- PluginDefinition (with id+version) is more specific
export function definePlugin<TStorage extends PluginStorageConfig>(
definition: PluginDefinition<TStorage>,
): ResolvedPlugin<TStorage>;
// Standard overload second -- catches { hooks, routes } without id/version
export function definePlugin(definition: StandardPluginDefinition): StandardPluginDefinition;
export function definePlugin<TStorage extends PluginStorageConfig>(
definition: PluginDefinition<TStorage> | StandardPluginDefinition,
): ResolvedPlugin<TStorage> | StandardPluginDefinition {
// Standard format: has hooks/routes but no id/version
if (!("id" in definition) || !("version" in definition)) {
// Validate that the standard format has at least hooks or routes
if (!("hooks" in definition) && !("routes" in definition)) {
throw new Error(
"Standard plugin format requires at least `hooks` or `routes`. " +
"For native format, provide `id` and `version`.",
);
}
// Identity function -- return as-is for type inference.
// The adapter (adaptSandboxEntry) will convert this to a ResolvedPlugin at build time.
return definition;
}
return defineNativePlugin(definition);
}
/**
* Internal: define a native-format plugin with full validation and normalization.
*/
function defineNativePlugin<TStorage extends PluginStorageConfig>(
definition: PluginDefinition<TStorage>,
): ResolvedPlugin<TStorage> {
const {
id,
version,
capabilities = [],
allowedHosts = [],
hooks = {},
routes = {},
admin = {},
} = definition;
// Default to empty object if no storage declared.
// The empty object satisfies PluginStorageConfig (Record<string, ...>).
// The cast is structurally safe because an empty record has no keys to conflict.
const storage = (definition.storage ?? {}) as TStorage;
// Validate id format: either simple (my-plugin) or scoped (@scope/my-plugin)
// Simple: lowercase alphanumeric with dashes
// Scoped: @scope/name where both parts are lowercase alphanumeric with dashes
if (!SIMPLE_ID.test(id) && !SCOPED_ID.test(id)) {
throw new Error(
`Invalid plugin id "${id}". Must be lowercase alphanumeric with dashes (e.g., "my-plugin" or "@scope/my-plugin").`,
);
}
// Validate version format (basic semver)
if (!SEMVER_PATTERN.test(version)) {
throw new Error(`Invalid plugin version "${version}". Must be semver format (e.g., "1.0.0").`);
}
// Validate capabilities
const validCapabilities = new Set([
"network:fetch",
"network:fetch:any",
"read:content",
"write:content",
"read:media",
"write:media",
"read:users",
"email:send",
"email:provide",
"email:intercept",
"page:inject",
]);
for (const cap of capabilities) {
if (!validCapabilities.has(cap)) {
throw new Error(`Invalid capability "${cap}" in plugin "${id}".`);
}
}
// Capability implications: broader capabilities imply narrower ones
const normalizedCapabilities = [...capabilities];
if (capabilities.includes("write:content") && !capabilities.includes("read:content")) {
normalizedCapabilities.push("read:content");
}
if (capabilities.includes("write:media") && !capabilities.includes("read:media")) {
normalizedCapabilities.push("read:media");
}
if (capabilities.includes("network:fetch:any") && !capabilities.includes("network:fetch")) {
normalizedCapabilities.push("network:fetch");
}
// Normalize hooks
const resolvedHooks = resolveHooks(hooks, id);
return {
id,
version,
capabilities: normalizedCapabilities,
allowedHosts,
storage,
hooks: resolvedHooks,
routes,
admin,
};
}
/**
* Resolve hooks to normalized format with defaults.
*
* PluginHooks and ResolvedPluginHooks share the same keys — each input value is
* `HookConfig<H> | H` and the output is `ResolvedHook<H>`. TS can't narrow
* the handler type through a dynamic key, so we assert at the loop boundary.
*/
function resolveHooks(hooks: PluginHooks, pluginId: string): ResolvedPluginHooks {
const resolved: ResolvedPluginHooks = {};
for (const key of Object.keys(hooks) as Array<keyof PluginHooks>) {
const hook = hooks[key];
if (hook) {
(resolved as Record<string, unknown>)[key] = resolveHook(hook, pluginId);
}
}
return resolved;
}
/**
* Check if a hook value is a config object (has a `handler` property)
*/
function isHookConfig<THandler>(
hook: HookConfig<THandler> | THandler,
): hook is HookConfig<THandler> {
return typeof hook === "object" && hook !== null && "handler" in hook;
}
/**
* Resolve a single hook to normalized format
*/
function resolveHook<THandler>(
hook: HookConfig<THandler> | THandler,
pluginId: string,
): ResolvedHook<THandler> {
// If it's a config object with handler property
if (isHookConfig(hook)) {
if (hook.exclusive !== undefined && typeof hook.exclusive !== "boolean") {
throw new Error(
`Invalid "exclusive" value in hook config for plugin "${pluginId}". Must be boolean.`,
);
}
return {
priority: hook.priority ?? 100,
timeout: hook.timeout ?? 5000,
dependencies: hook.dependencies ?? [],
errorPolicy: hook.errorPolicy ?? "abort",
exclusive: hook.exclusive ?? false,
handler: hook.handler,
pluginId,
};
}
// It's just a handler function
return {
priority: 100,
timeout: 5000,
dependencies: [],
errorPolicy: "abort",
exclusive: false,
handler: hook,
pluginId,
};
}
export default definePlugin;

View File

@@ -0,0 +1,73 @@
/**
* Dev Console Email Provider
*
* Built-in plugin that registers email:deliver as an exclusive hook.
* Logs emails to console and stores them in memory (capped at 100).
* Auto-activated when import.meta.env.DEV is true and no other provider is selected.
*
*/
import type { EmailDeliverEvent, EmailMessage, PluginContext } from "./types.js";
/** Plugin ID for the dev console email provider */
export const DEV_CONSOLE_EMAIL_PLUGIN_ID = "emdash-console-email";
/** Maximum number of emails to keep in memory */
const MAX_STORED_EMAILS = 100;
/**
* Stored email record (in-memory only)
*/
export interface StoredEmail {
message: EmailMessage;
source: string;
sentAt: string;
}
/** In-memory store for dev emails */
const storedEmails: StoredEmail[] = [];
/**
* Get all stored dev emails (most recent first).
*/
export function getDevEmails(): StoredEmail[] {
return storedEmails.toReversed();
}
/**
* Clear all stored dev emails.
*/
export function clearDevEmails(): void {
storedEmails.length = 0;
}
/**
* The email:deliver handler for the dev console provider.
* Logs to console and stores in memory.
*/
export async function devConsoleEmailDeliver(
event: EmailDeliverEvent,
_ctx: PluginContext,
): Promise<void> {
const { message, source } = event;
console.log(
`\n📧 [dev-email] Email sent\n` +
` From: ${source}\n` +
` To: ${message.to}\n` +
` Subject: ${message.subject}\n` +
` Text: ${message.text.slice(0, 200)}${message.text.length > 200 ? "..." : ""}\n`,
);
// Store the email
storedEmails.push({
message,
source,
sentAt: new Date().toISOString(),
});
// Cap at MAX_STORED_EMAILS
while (storedEmails.length > MAX_STORED_EMAILS) {
storedEmails.shift();
}
}

View File

@@ -0,0 +1,209 @@
/**
* Email Pipeline
*
* Orchestrates the three-stage email pipeline:
* 1. email:beforeSend hooks (middleware — transform, validate, cancel)
* 2. email:deliver hook (exclusive — exactly one provider delivers)
* 3. email:afterSend hooks (logging, analytics, fire-and-forget)
*
* Security features:
* - Recursion guard prevents re-entrant sends (e.g. plugin calling ctx.email.send from a hook)
* - System emails (source="system") bypass email:beforeSend and email:afterSend hooks entirely
* to protect auth tokens from exfiltration by plugin hooks
*
*/
import { AsyncLocalStorage } from "node:async_hooks";
import type { HookPipeline } from "./hooks.js";
import type { EmailDeliverEvent, EmailMessage } from "./types.js";
/** Hook name for the exclusive email delivery hook */
const EMAIL_DELIVER_HOOK = "email:deliver";
/** Source value used for auth emails (magic links, invites, password resets) */
const SYSTEM_SOURCE = "system";
/**
* Error thrown when ctx.email.send() is called but no provider is configured.
*/
export class EmailNotConfiguredError extends Error {
constructor() {
super(
"No email provider is configured. Install and activate an email provider plugin, " +
"then select it in Settings > Email.",
);
this.name = "EmailNotConfiguredError";
}
}
/**
* Error thrown when a recursive email send is detected.
*/
export class EmailRecursionError extends Error {
constructor() {
super(
"Recursive email send detected. A plugin hook attempted to send an email " +
"from within the email pipeline, which would cause infinite recursion.",
);
this.name = "EmailRecursionError";
}
}
/**
* Recursion guard using AsyncLocalStorage.
*
* EmailPipeline is a singleton (worker-lifetime cached via EmDashRuntime).
* Instance state like `sendDepth` would false-positive under concurrent
* requests because two unrelated sends would increment the same counter.
* ALS scopes the guard to the current async execution context, so concurrent
* requests each get their own independent recursion tracking.
*/
const emailSendALS = new AsyncLocalStorage<{ depth: number }>();
/**
* EmailPipeline orchestrates email delivery through the plugin hook system.
*
* The pipeline runs in three stages:
* 1. email:beforeSend — middleware hooks that can transform or cancel messages
* 2. email:deliver — exclusive hook dispatching to the selected provider
* 3. email:afterSend — fire-and-forget hooks for logging/analytics
*/
export class EmailPipeline {
private pipeline: HookPipeline;
constructor(pipeline: HookPipeline) {
this.pipeline = pipeline;
}
/**
* Replace the underlying hook pipeline.
*
* Called by the runtime when rebuilding the hook pipeline after a
* plugin is enabled or disabled, so the email pipeline dispatches
* to the current set of active hooks.
*/
setPipeline(pipeline: HookPipeline): void {
this.pipeline = pipeline;
}
/**
* Send an email through the full pipeline.
*
* @param message - The email to send
* @param source - Where the email originated ("system" for auth, plugin ID for plugins)
* @throws EmailNotConfiguredError if no provider is selected
* @throws EmailRecursionError if called re-entrantly from within a hook
* @throws Error if the provider handler throws
*/
async send(message: EmailMessage, source: string): Promise<void> {
// Recursion guard: a plugin with email:send + email:intercept calling
// ctx.email.send() from an email hook would loop forever.
// Uses AsyncLocalStorage so concurrent requests don't interfere —
// each async context tracks its own depth independently.
const store = emailSendALS.getStore();
if (store && store.depth > 0) {
throw new EmailRecursionError();
}
const run = () => this.sendInner(message, source);
if (store) {
// Already inside an ALS context (e.g. nested call) — increment depth
store.depth++;
try {
await run();
} finally {
store.depth--;
}
} else {
// First call — create new ALS context
await emailSendALS.run({ depth: 1 }, run);
}
}
/**
* Inner send implementation, separated from the recursion guard.
*/
private async sendInner(message: EmailMessage, source: string): Promise<void> {
// Validate message fields at the pipeline boundary. TypeScript enforces
// this at compile time, but sandboxed plugins cross an RPC boundary
// where runtime types aren't guaranteed.
if (!message || typeof message !== "object") {
throw new Error("Invalid email message: message must be an object");
}
if (!message.to || typeof message.to !== "string") {
throw new Error("Invalid email message: 'to' is required and must be a string");
}
if (!message.subject || typeof message.subject !== "string") {
throw new Error("Invalid email message: 'subject' is required and must be a string");
}
if (!message.text || typeof message.text !== "string") {
throw new Error("Invalid email message: 'text' is required and must be a string");
}
const isSystemEmail = source === SYSTEM_SOURCE;
// System emails (auth tokens, magic links, invites) skip the
// email:beforeSend pipeline entirely. These contain sensitive tokens
// that must never be exposed to plugin hooks — a malicious interceptor
// could rewrite the body/URL to steal auth tokens even if the `to`
// field is protected.
let finalMessage: EmailMessage;
if (isSystemEmail) {
finalMessage = message;
} else {
// Stage 1: email:beforeSend middleware (can transform or cancel)
const beforeResult = await this.pipeline.runEmailBeforeSend(message, source);
if (beforeResult.message === false) {
// Cancelled by middleware — find which plugin cancelled for audit log
const cancellingResult = beforeResult.results.find((r) => r.value === false);
const cancelledBy = cancellingResult?.pluginId ?? "unknown";
console.info(`[email] Email to "${message.to}" cancelled by plugin "${cancelledBy}"`);
return;
}
finalMessage = beforeResult.message;
}
// Stage 2: email:deliver (exclusive hook)
const deliverEvent: EmailDeliverEvent = { message: finalMessage, source };
const deliverResult = await this.pipeline.invokeExclusiveHook(EMAIL_DELIVER_HOOK, deliverEvent);
if (!deliverResult) {
throw new EmailNotConfiguredError();
}
if (deliverResult.error) {
throw deliverResult.error;
}
// Stage 3: email:afterSend (fire-and-forget)
// System emails skip afterSend for the same reason they skip beforeSend:
// the message contains plaintext auth tokens that must not be exposed to
// plugin hooks. A logging/analytics hook could exfiltrate magic link URLs.
// Errors are logged internally by the pipeline, not propagated.
if (!isSystemEmail) {
this.pipeline
.runEmailAfterSend(finalMessage, source)
.catch((err) =>
console.error(
"[email] afterSend pipeline error:",
err instanceof Error ? err.message : err,
),
);
}
}
/**
* Check if an email provider is configured and available.
*
* Returns true if an email:deliver provider is selected in the exclusive
* hook system. Plugins and auth code use this to decide whether to show
* "send invite" vs "copy invite link" UI.
*/
isAvailable(): boolean {
return this.pipeline.getExclusiveSelection(EMAIL_DELIVER_HOOK) !== undefined;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,193 @@
/**
* Plugin System Exports
*
* Unified plugin API with:
* - Single context shape for all hooks and routes
* - Paginated queries (no async iterators)
* - Capability-gated APIs
*
*/
// definePlugin
export { definePlugin } from "./define-plugin.js";
// Standard plugin adapter
export { adaptSandboxEntry } from "./adapt-sandbox-entry.js";
// Manifest validation
export { pluginManifestSchema, PLUGIN_CAPABILITIES, HOOK_NAMES } from "./manifest-schema.js";
export type { ValidatedPluginManifest } from "./manifest-schema.js";
// Request metadata
export { extractRequestMeta, sanitizeHeadersForSandbox } from "./request-meta.js";
// Context factory
export {
PluginContextFactory,
createPluginContext,
createKVAccess,
createStorageAccess,
createContentAccess,
createContentAccessWithWrite,
createMediaAccess,
createMediaAccessWithWrite,
createHttpAccess,
createUnrestrictedHttpAccess,
createBlockedHttpAccess,
createLogAccess,
createUserAccess,
createUrlHelper,
createSiteInfo,
} from "./context.js";
export type { PluginContextFactoryOptions } from "./context.js";
// Hooks
export { HookPipeline, createHookPipeline } from "./hooks.js";
export type { HookResult } from "./hooks.js";
// Email pipeline
export { EmailPipeline, EmailNotConfiguredError, EmailRecursionError } from "./email.js";
export { DEV_CONSOLE_EMAIL_PLUGIN_ID, getDevEmails, clearDevEmails } from "./email-console.js";
export type { StoredEmail } from "./email-console.js";
// Routes
export {
PluginRouteHandler,
PluginRouteRegistry,
PluginRouteError,
createRouteRegistry,
} from "./routes.js";
export type { RouteResult, InvokeRouteOptions } from "./routes.js";
// Manager
export { PluginManager, createPluginManager } from "./manager.js";
export type { PluginManagerOptions, PluginState } from "./manager.js";
// Sandbox
export {
NoopSandboxRunner,
SandboxNotAvailableError,
createNoopSandboxRunner,
} from "./sandbox/index.js";
export type {
SandboxRunner,
SandboxedPlugin,
SandboxRunnerFactory,
SandboxOptions,
SandboxEmailMessage,
SandboxEmailSendCallback,
ResourceLimits,
PluginCodeStorage,
SerializedRequest,
} from "./sandbox/index.js";
// Types
export type {
// Core types
PluginCapability,
PluginStorageConfig,
StorageCollectionConfig,
PaginatedResult,
QueryOptions,
WhereClause,
WhereValue,
RangeFilter,
InFilter,
StartsWithFilter,
// Context APIs
PluginContext,
StorageCollection,
KVAccess,
ContentAccess,
ContentAccessWithWrite,
MediaAccess,
MediaAccessWithWrite,
HttpAccess,
LogAccess,
SiteInfo,
UserInfo,
UserAccess,
ContentItem,
MediaItem,
ContentListOptions,
MediaListOptions,
// Hook types
PluginHooks,
HookConfig,
HookName,
ResolvedHook,
ResolvedPluginHooks,
ContentHookEvent,
ContentDeleteEvent,
MediaUploadEvent,
MediaAfterUploadEvent,
LifecycleEvent,
UninstallEvent,
// Email types
EmailAccess,
EmailMessage,
EmailBeforeSendEvent,
EmailDeliverEvent,
EmailAfterSendEvent,
EmailBeforeSendHandler,
EmailDeliverHandler,
EmailAfterSendHandler,
// Handler types
ContentBeforeSaveHandler,
ContentAfterSaveHandler,
ContentBeforeDeleteHandler,
ContentAfterDeleteHandler,
MediaBeforeUploadHandler,
MediaAfterUploadHandler,
LifecycleHandler,
UninstallHandler,
// Comment types
CommentBeforeCreateEvent,
CommentModerateEvent,
CommentAfterCreateEvent,
CommentAfterModerateEvent,
CommentBeforeCreateHandler,
CommentModerateHandler,
CommentAfterCreateHandler,
CommentAfterModerateHandler,
ModerationDecision,
CollectionCommentSettings,
StoredComment,
// Request metadata types
RequestMeta,
GeoInfo,
// Route types
PluginRoute,
RouteContext,
// Admin types
PluginAdminConfig,
PluginAdminPage,
PluginDashboardWidget,
PluginAdminExports,
FieldWidgetConfig,
PortableTextBlockConfig,
PortableTextBlockField,
SettingField,
SettingFieldType,
// Plugin definition
PluginDefinition,
ResolvedPlugin,
PluginManifest,
// Standard plugin format
StandardPluginDefinition,
StandardHookHandler,
StandardHookEntry,
StandardRouteHandler,
StandardRouteEntry,
} from "./types.js";
export { isStandardPluginDefinition } from "./types.js";

View File

@@ -0,0 +1,595 @@
/**
* Plugin Manager v2
*
* Central orchestrator for the plugin system:
* - Loads and resolves plugins
* - Manages plugin lifecycle (install, activate, deactivate, uninstall)
* - Dispatches hooks across all plugins
* - Routes API requests to plugins
*
*/
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { OptionsRepository } from "../database/repositories/options.js";
import type { Database } from "../database/types.js";
import type { Storage } from "../storage/types.js";
import type { PluginContextFactoryOptions } from "./context.js";
import { setCronTasksEnabled } from "./cron.js";
import { definePlugin } from "./define-plugin.js";
import {
HookPipeline,
type HookResult,
resolveExclusiveHooks as resolveExclusiveHooksShared,
} from "./hooks.js";
import { PluginRouteRegistry, type RouteResult, type InvokeRouteOptions } from "./routes.js";
import type {
PluginDefinition,
ResolvedPlugin,
PluginStorageConfig,
MediaItem,
CronEvent,
} from "./types.js";
/** Options table key prefix for exclusive hook DB reads via PluginManager */
const EXCLUSIVE_HOOK_KEY_PREFIX = "emdash:exclusive_hook:";
/**
* Plugin state in the manager
*/
export type PluginState = "registered" | "installed" | "active" | "inactive";
/**
* Plugin entry in the manager
*/
interface PluginEntry {
plugin: ResolvedPlugin;
state: PluginState;
}
/**
* Plugin manager options
*/
export interface PluginManagerOptions {
/** Database instance */
db: Kysely<Database>;
/** Storage backend for direct media uploads */
storage?: Storage;
/** Function to generate upload URLs for media */
getUploadUrl?: (
filename: string,
contentType: string,
) => Promise<{ uploadUrl: string; mediaId: string }>;
}
/**
* Plugin Manager v2
*
* Manages the full lifecycle of plugins and coordinates hooks/routes.
*/
export class PluginManager {
private plugins: Map<string, PluginEntry> = new Map();
private hookPipeline: HookPipeline | null = null;
private routeRegistry: PluginRouteRegistry | null = null;
private factoryOptions: PluginContextFactoryOptions;
private initialized = false;
constructor(private options: PluginManagerOptions) {
this.factoryOptions = {
db: options.db,
storage: options.storage,
getUploadUrl: options.getUploadUrl,
};
}
// =========================================================================
// Plugin Registration
// =========================================================================
/**
* Register a plugin definition
* This resolves the definition and adds it to the manager, but doesn't install it
*/
register<TStorage extends PluginStorageConfig>(
definition: PluginDefinition<TStorage>,
): ResolvedPlugin<TStorage> {
const resolved = definePlugin(definition);
if (this.plugins.has(resolved.id)) {
throw new Error(`Plugin "${resolved.id}" is already registered`);
}
this.plugins.set(resolved.id, {
plugin: resolved,
state: "registered",
});
// Mark as needing reinitialization
this.initialized = false;
return resolved;
}
/**
* Register multiple plugins
*/
registerAll(definitions: PluginDefinition[]): void {
for (const def of definitions) {
this.register(def);
}
}
/**
* Unregister a plugin
* Plugin must be inactive or just registered
*/
unregister(pluginId: string): boolean {
const entry = this.plugins.get(pluginId);
if (!entry) return false;
if (entry.state === "active") {
throw new Error(`Cannot unregister active plugin "${pluginId}". Deactivate it first.`);
}
this.plugins.delete(pluginId);
this.initialized = false;
return true;
}
// =========================================================================
// Plugin Lifecycle
// =========================================================================
/**
* Install a plugin (run install hooks, set up storage)
*/
async install(pluginId: string): Promise<HookResult<void>[]> {
const entry = this.plugins.get(pluginId);
if (!entry) {
throw new Error(`Plugin "${pluginId}" not found`);
}
if (entry.state !== "registered") {
throw new Error(`Plugin "${pluginId}" is already installed (state: ${entry.state})`);
}
this.ensureInitialized();
// Run install hooks
const results = await this.hookPipeline!.runPluginInstall(pluginId);
// Check for errors
const failed = results.find((r) => !r.success);
if (failed) {
throw new Error(`Plugin install failed: ${failed.error?.message ?? "Unknown error"}`);
}
entry.state = "installed";
return results;
}
/**
* Activate a plugin (run activate hooks, enable hooks/routes)
*/
async activate(pluginId: string): Promise<HookResult<void>[]> {
const entry = this.plugins.get(pluginId);
if (!entry) {
throw new Error(`Plugin "${pluginId}" not found`);
}
if (entry.state === "active") {
return []; // Already active
}
if (entry.state === "registered") {
// Auto-install if not installed
await this.install(pluginId);
}
this.ensureInitialized();
// Run activate hooks
const results = await this.hookPipeline!.runPluginActivate(pluginId);
// Check for errors
const failed = results.find((r) => !r.success);
if (failed) {
throw new Error(`Plugin activation failed: ${failed.error?.message ?? "Unknown error"}`);
}
entry.state = "active";
// Re-enable cron tasks for the activated plugin
await setCronTasksEnabled(this.options.db, pluginId, true);
// Reinitialize pipeline so the newly active plugin's hooks are registered
this.reinitialize();
// Resolve exclusive hooks (new provider may need auto-selection)
await this.resolveExclusiveHooks();
return results;
}
/**
* Deactivate a plugin (run deactivate hooks, disable hooks/routes)
*/
async deactivate(pluginId: string): Promise<HookResult<void>[]> {
const entry = this.plugins.get(pluginId);
if (!entry) {
throw new Error(`Plugin "${pluginId}" not found`);
}
if (entry.state !== "active") {
return []; // Not active
}
this.ensureInitialized();
// Run deactivate hooks
const results = await this.hookPipeline!.runPluginDeactivate(pluginId);
// Disable cron tasks for the deactivated plugin
await setCronTasksEnabled(this.options.db, pluginId, false);
entry.state = "inactive";
// Reinitialize pipeline so the deactivated plugin's hooks are removed
this.reinitialize();
// Resolve exclusive hooks (deactivated provider may need clearing)
await this.resolveExclusiveHooks();
return results;
}
/**
* Uninstall a plugin (run uninstall hooks, optionally delete data)
*/
async uninstall(pluginId: string, deleteData: boolean = false): Promise<HookResult<void>[]> {
const entry = this.plugins.get(pluginId);
if (!entry) {
throw new Error(`Plugin "${pluginId}" not found`);
}
// Deactivate first if active (this also resolves exclusive hooks)
if (entry.state === "active") {
await this.deactivate(pluginId);
}
this.ensureInitialized();
// Run uninstall hooks
const results = await this.hookPipeline!.runPluginUninstall(pluginId, deleteData);
// Delete all cron tasks for the uninstalled plugin
await this.deleteCronTasks(pluginId);
// Remove from manager
this.plugins.delete(pluginId);
this.initialized = false;
// Resolve exclusive hooks after removal
await this.resolveExclusiveHooks();
return results;
}
// =========================================================================
// Hook Dispatch
// =========================================================================
/**
* Run content:beforeSave hooks across all active plugins
*/
async runContentBeforeSave(
content: Record<string, unknown>,
collection: string,
isNew: boolean,
): Promise<{
content: Record<string, unknown>;
results: HookResult<Record<string, unknown>>[];
}> {
this.ensureInitialized();
return this.hookPipeline!.runContentBeforeSave(content, collection, isNew);
}
/**
* Run content:afterSave hooks across all active plugins
*/
async runContentAfterSave(
content: Record<string, unknown>,
collection: string,
isNew: boolean,
): Promise<HookResult<void>[]> {
this.ensureInitialized();
return this.hookPipeline!.runContentAfterSave(content, collection, isNew);
}
/**
* Run content:beforeDelete hooks across all active plugins
*/
async runContentBeforeDelete(
id: string,
collection: string,
): Promise<{ allowed: boolean; results: HookResult<boolean>[] }> {
this.ensureInitialized();
return this.hookPipeline!.runContentBeforeDelete(id, collection);
}
/**
* Run content:afterDelete hooks across all active plugins
*/
async runContentAfterDelete(id: string, collection: string): Promise<HookResult<void>[]> {
this.ensureInitialized();
return this.hookPipeline!.runContentAfterDelete(id, collection);
}
/**
* Run media:beforeUpload hooks across all active plugins
*/
async runMediaBeforeUpload(file: { name: string; type: string; size: number }): Promise<{
file: { name: string; type: string; size: number };
results: HookResult<{ name: string; type: string; size: number }>[];
}> {
this.ensureInitialized();
return this.hookPipeline!.runMediaBeforeUpload(file);
}
/**
* Run media:afterUpload hooks across all active plugins
*/
async runMediaAfterUpload(media: MediaItem): Promise<HookResult<void>[]> {
this.ensureInitialized();
return this.hookPipeline!.runMediaAfterUpload(media);
}
/**
* Invoke the cron hook for a specific plugin (per-plugin dispatch).
* Used as the InvokeCronHookFn callback for CronExecutor.
*/
async invokeCronHook(pluginId: string, event: CronEvent): Promise<void> {
this.ensureInitialized();
const result = await this.hookPipeline!.invokeCronHook(pluginId, event);
if (!result.success && result.error) {
throw result.error;
}
}
// =========================================================================
// Route Dispatch
// =========================================================================
/**
* Invoke a plugin route
*/
async invokeRoute(
pluginId: string,
routeName: string,
options: InvokeRouteOptions,
): Promise<RouteResult> {
this.ensureInitialized();
return this.routeRegistry!.invoke(pluginId, routeName, options);
}
/**
* Get all routes for a plugin
*/
getPluginRoutes(pluginId: string): string[] {
this.ensureInitialized();
return this.routeRegistry!.getRoutes(pluginId);
}
// =========================================================================
// Query Methods
// =========================================================================
/**
* Get a plugin by ID
*/
getPlugin(pluginId: string): ResolvedPlugin | undefined {
return this.plugins.get(pluginId)?.plugin;
}
/**
* Get plugin state
*/
getPluginState(pluginId: string): PluginState | undefined {
return this.plugins.get(pluginId)?.state;
}
/**
* Get all registered plugins
*/
getAllPlugins(): Array<{ plugin: ResolvedPlugin; state: PluginState }> {
return Array.from(this.plugins.values(), (entry) => ({
plugin: entry.plugin,
state: entry.state,
}));
}
/**
* Get all active plugins
*/
getActivePlugins(): ResolvedPlugin[] {
return [...this.plugins.values()]
.filter((entry) => entry.state === "active")
.map((entry) => entry.plugin);
}
/**
* Check if a plugin exists
*/
hasPlugin(pluginId: string): boolean {
return this.plugins.has(pluginId);
}
/**
* Check if a plugin is active
*/
isActive(pluginId: string): boolean {
return this.plugins.get(pluginId)?.state === "active";
}
// =========================================================================
// Exclusive Hooks
// =========================================================================
/**
* Get all plugins that registered a handler for an exclusive hook.
*/
getExclusiveHookProviders(hookName: string): Array<{ pluginId: string; pluginName: string }> {
this.ensureInitialized();
return this.hookPipeline!.getExclusiveHookProviders(hookName).map((p) => {
const plugin = this.plugins.get(p.pluginId);
return {
pluginId: p.pluginId,
pluginName: plugin?.plugin.id ?? p.pluginId,
};
});
}
/**
* Read the selected provider for an exclusive hook from the options table.
*/
async getExclusiveHookSelection(hookName: string): Promise<string | null> {
const optionsRepo = new OptionsRepository(this.options.db);
return optionsRepo.get<string>(`${EXCLUSIVE_HOOK_KEY_PREFIX}${hookName}`);
}
/**
* Set the selected provider for an exclusive hook in the options table.
* Pass null to clear the selection.
*/
async setExclusiveHookSelection(hookName: string, pluginId: string | null): Promise<void> {
const optionsRepo = new OptionsRepository(this.options.db);
const key = `${EXCLUSIVE_HOOK_KEY_PREFIX}${hookName}`;
if (pluginId === null) {
await optionsRepo.delete(key);
this.hookPipeline?.clearExclusiveSelection(hookName);
return;
}
// Validate plugin exists and is active
const entry = this.plugins.get(pluginId);
if (!entry) {
throw new Error(`Plugin "${pluginId}" not found`);
}
if (entry.state !== "active") {
throw new Error(`Plugin "${pluginId}" is not active`);
}
await optionsRepo.set(key, pluginId);
this.hookPipeline?.setExclusiveSelection(hookName, pluginId);
}
/**
* Resolution algorithm for exclusive hooks.
*
* Delegates to the shared resolveExclusiveHooks() function.
* See hooks.ts for the full algorithm description.
*/
async resolveExclusiveHooks(preferredHints?: Map<string, string[]>): Promise<void> {
this.ensureInitialized();
const optionsRepo = new OptionsRepository(this.options.db);
await resolveExclusiveHooksShared({
pipeline: this.hookPipeline!,
isActive: (pluginId) => this.isActive(pluginId),
getOption: (key) => optionsRepo.get<string>(key),
setOption: (key, value) => optionsRepo.set(key, value),
deleteOption: async (key) => {
await optionsRepo.delete(key);
},
preferredHints,
});
}
/**
* Get all exclusive hooks with their providers and current selections.
* Used by the admin API.
*/
async getExclusiveHooksInfo(): Promise<
Array<{
hookName: string;
providers: Array<{ pluginId: string }>;
selectedPluginId: string | null;
}>
> {
this.ensureInitialized();
const exclusiveHookNames = this.hookPipeline!.getRegisteredExclusiveHooks();
const result = [];
for (const hookName of exclusiveHookNames) {
const providers = this.hookPipeline!.getExclusiveHookProviders(hookName);
const selection = await this.getExclusiveHookSelection(hookName);
result.push({
hookName,
providers,
selectedPluginId: selection,
});
}
return result;
}
// =========================================================================
// Internal Methods
// =========================================================================
/**
* Initialize or reinitialize the hook pipeline and route registry
*/
private ensureInitialized(): void {
if (this.initialized) return;
// Get all active plugins for hooks
const activePlugins = this.getActivePlugins();
// Create hook pipeline with active plugins
this.hookPipeline = new HookPipeline(activePlugins, this.factoryOptions);
// Create route registry
this.routeRegistry = new PluginRouteRegistry(this.factoryOptions);
// Register routes for active plugins
for (const plugin of activePlugins) {
this.routeRegistry.register(plugin);
}
this.initialized = true;
}
/**
* Force reinitialization (useful after plugin state changes)
*/
reinitialize(): void {
this.initialized = false;
this.ensureInitialized();
}
/**
* Delete all cron tasks for a plugin.
* Used during uninstall.
*/
private async deleteCronTasks(pluginId: string): Promise<void> {
try {
await sql`
DELETE FROM _emdash_cron_tasks
WHERE plugin_id = ${pluginId}
`.execute(this.options.db);
} catch {
// Cron table may not exist yet (pre-migration). Non-fatal.
}
}
}
/**
* Create a plugin manager
*/
export function createPluginManager(options: PluginManagerOptions): PluginManager {
return new PluginManager(options);
}

View File

@@ -0,0 +1,230 @@
/**
* Zod schema for PluginManifest validation
*
* Used to validate manifest.json from plugin bundles at every parse site:
* - Client-side download (marketplace.ts extractBundle)
* - R2 load (api/handlers/marketplace.ts loadBundleFromR2)
* - CLI publish preview (cli/commands/publish.ts readManifestFromTarball)
* - Marketplace ingest extends this with publishing-specific fields
*/
import { z } from "zod";
// ── Enum values (must stay in sync with types.ts) ───────────────
export const PLUGIN_CAPABILITIES = [
"network:fetch",
"network:fetch:any",
"read:content",
"write:content",
"read:media",
"write:media",
"read:users",
"email:send",
"email:provide",
"email:intercept",
"page:inject",
] as const;
/** Must stay in sync with FieldType in schema/types.ts */
const FIELD_TYPES = [
"string",
"text",
"number",
"integer",
"boolean",
"datetime",
"select",
"multiSelect",
"portableText",
"image",
"file",
"reference",
"json",
"slug",
] as const;
export const HOOK_NAMES = [
"plugin:install",
"plugin:activate",
"plugin:deactivate",
"plugin:uninstall",
"content:beforeSave",
"content:afterSave",
"content:beforeDelete",
"content:afterDelete",
"media:beforeUpload",
"media:afterUpload",
"cron",
"email:beforeSend",
"email:deliver",
"email:afterSend",
"comment:beforeCreate",
"comment:moderate",
"comment:afterCreate",
"comment:afterModerate",
"page:metadata",
"page:fragments",
] as const;
/**
* Structured hook entry for manifest — name plus optional metadata.
* During a transition period, both plain strings and objects are accepted.
*/
const manifestHookEntrySchema = z.object({
name: z.enum(HOOK_NAMES),
exclusive: z.boolean().optional(),
priority: z.number().int().optional(),
timeout: z.number().int().positive().optional(),
});
/**
* Structured route entry for manifest — name plus optional metadata.
* Both plain strings and objects are accepted; strings are normalized
* to `{ name }` objects via `normalizeManifestRoute()`.
*/
/** Route names must be safe path segments — alphanumeric, hyphens, underscores, forward slashes */
const routeNamePattern = /^[a-zA-Z0-9][a-zA-Z0-9_\-/]*$/;
const manifestRouteEntrySchema = z.object({
name: z.string().min(1).regex(routeNamePattern, "Route name must be a safe path segment"),
public: z.boolean().optional(),
});
// ── Sub-schemas ─────────────────────────────────────────────────
/** Index field names must be valid identifiers to prevent SQL injection via JSON path expressions */
const indexFieldName = z.string().regex(/^[a-zA-Z][a-zA-Z0-9_]*$/);
const storageCollectionSchema = z.object({
indexes: z.array(z.union([indexFieldName, z.array(indexFieldName)])),
uniqueIndexes: z.array(z.union([indexFieldName, z.array(indexFieldName)])).optional(),
});
const baseSettingFields = {
label: z.string(),
description: z.string().optional(),
};
const settingFieldSchema = z.discriminatedUnion("type", [
z.object({
...baseSettingFields,
type: z.literal("string"),
default: z.string().optional(),
multiline: z.boolean().optional(),
}),
z.object({
...baseSettingFields,
type: z.literal("number"),
default: z.number().optional(),
min: z.number().optional(),
max: z.number().optional(),
}),
z.object({ ...baseSettingFields, type: z.literal("boolean"), default: z.boolean().optional() }),
z.object({
...baseSettingFields,
type: z.literal("select"),
options: z.array(z.object({ value: z.string(), label: z.string() })),
default: z.string().optional(),
}),
z.object({ ...baseSettingFields, type: z.literal("secret") }),
]);
const adminPageSchema = z.object({
path: z.string(),
label: z.string(),
icon: z.string().optional(),
});
const dashboardWidgetSchema = z.object({
id: z.string(),
size: z.enum(["full", "half", "third"]).optional(),
title: z.string().optional(),
});
const pluginAdminConfigSchema = z.object({
entry: z.string().optional(),
settingsSchema: z.record(z.string(), settingFieldSchema).optional(),
pages: z.array(adminPageSchema).optional(),
widgets: z.array(dashboardWidgetSchema).optional(),
fieldWidgets: z
.array(
z.object({
name: z.string().min(1),
label: z.string().min(1),
fieldTypes: z.array(z.enum(FIELD_TYPES)),
elements: z
.array(
z
.object({
type: z.string(),
action_id: z.string(),
label: z.string().optional(),
})
.passthrough(),
)
.optional(),
}),
)
.optional(),
});
// ── Main schema ─────────────────────────────────────────────────
/**
* Zod schema matching the PluginManifest interface from types.ts.
*
* Every JSON.parse of a manifest.json should validate through this.
*/
export const pluginManifestSchema = z.object({
id: z.string().min(1),
version: z.string().min(1),
capabilities: z.array(z.enum(PLUGIN_CAPABILITIES)),
allowedHosts: z.array(z.string()),
storage: z.record(z.string(), storageCollectionSchema),
/**
* Hook declarations — accepts both plain name strings (legacy) and
* structured objects with exclusive/priority/timeout metadata.
* Plain strings are normalized to `{ name }` objects after parsing.
*/
hooks: z.array(z.union([z.enum(HOOK_NAMES), manifestHookEntrySchema])),
/**
* Route declarations — accepts both plain name strings and
* structured objects with public metadata.
* Plain strings are normalized to `{ name }` objects after parsing.
*/
routes: z.array(
z.union([
z.string().min(1).regex(routeNamePattern, "Route name must be a safe path segment"),
manifestRouteEntrySchema,
]),
),
admin: pluginAdminConfigSchema,
});
export type ValidatedPluginManifest = z.infer<typeof pluginManifestSchema>;
/**
* Normalize a manifest hook entry — plain strings become `{ name }` objects.
*/
export function normalizeManifestHook(
entry: string | { name: string; exclusive?: boolean; priority?: number; timeout?: number },
): { name: string; exclusive?: boolean; priority?: number; timeout?: number } {
if (typeof entry === "string") {
return { name: entry };
}
return entry;
}
/**
* Normalize a manifest route entry — plain strings become `{ name }` objects.
*/
export function normalizeManifestRoute(entry: string | { name: string; public?: boolean }): {
name: string;
public?: boolean;
} {
if (typeof entry === "string") {
return { name: entry };
}
return entry;
}

View File

@@ -0,0 +1,460 @@
/**
* MarketplaceClient — HTTP client for the EmDash Plugin Marketplace
*
* Used by the install/update/proxy endpoints in EmDash core to communicate
* with the marketplace Worker. The marketplace is a distribution channel,
* not a runtime dependency — bundles are copied to site-local R2 at install time.
*/
import { createGzipDecoder, unpackTar } from "modern-tar";
import { pluginManifestSchema } from "./manifest-schema.js";
import type { PluginManifest } from "./types.js";
// ── Module-level regex patterns ───────────────────────────────────
const TRAILING_SLASHES = /\/+$/;
const LEADING_DOT_SLASH = /^\.\//;
// ── Types ──────────────────────────────────────────────────────────
export interface MarketplacePluginSummary {
id: string;
name: string;
description: string | null;
author: {
name: string;
verified: boolean;
avatarUrl: string | null;
};
capabilities: string[];
keywords: string[];
installCount: number;
hasIcon: boolean;
iconUrl: string;
latestVersion?: {
version: string;
audit?: {
verdict: string;
riskScore: number;
};
imageAudit?: {
verdict: string;
};
};
createdAt: string;
updatedAt: string;
}
export interface MarketplaceVersionSummary {
version: string;
minEmDashVersion: string | null;
bundleSize: number;
checksum: string;
changelog: string | null;
capabilities: string[];
status: string;
auditVerdict: string | null;
imageAuditVerdict: string | null;
publishedAt: string;
}
export interface MarketplacePluginDetail extends MarketplacePluginSummary {
repositoryUrl: string | null;
homepageUrl: string | null;
license: string | null;
latestVersion?: {
version: string;
minEmDashVersion: string | null;
bundleSize: number;
checksum: string;
changelog: string | null;
readme: string | null;
hasIcon: boolean;
screenshotCount: number;
screenshotUrls: string[];
capabilities: string[];
status: string;
audit?: {
verdict: string;
riskScore: number;
};
imageAudit?: {
verdict: string;
};
publishedAt: string;
};
}
export interface MarketplaceSearchOpts {
category?: string;
capability?: string;
sort?: "installs" | "updated" | "created" | "name";
cursor?: string;
limit?: number;
}
export interface MarketplaceSearchResult {
items: MarketplacePluginSummary[];
nextCursor?: string;
}
// ── Theme types ───────────────────────────────────────────────────
export interface MarketplaceThemeSummary {
id: string;
name: string;
description: string | null;
author: {
name: string;
verified: boolean;
avatarUrl: string | null;
};
keywords: string[];
previewUrl: string;
demoUrl: string | null;
hasThumbnail: boolean;
thumbnailUrl: string | null;
createdAt: string;
updatedAt: string;
}
export interface MarketplaceThemeDetail extends MarketplaceThemeSummary {
author: {
id: string;
name: string;
verified: boolean;
avatarUrl: string | null;
};
repositoryUrl: string | null;
homepageUrl: string | null;
license: string | null;
screenshotCount: number;
screenshotUrls: string[];
}
export interface MarketplaceThemeSearchOpts {
keyword?: string;
sort?: "name" | "created" | "updated";
cursor?: string;
limit?: number;
}
export interface MarketplaceThemeSearchResult {
items: MarketplaceThemeSummary[];
nextCursor?: string;
}
export interface PluginBundle {
manifest: PluginManifest;
backendCode: string;
adminCode?: string;
checksum: string;
}
// ── Interface ──────────────────────────────────────────────────────
export interface MarketplaceClient {
/** Search the marketplace catalog */
search(query?: string, opts?: MarketplaceSearchOpts): Promise<MarketplaceSearchResult>;
/** Get full plugin detail */
getPlugin(id: string): Promise<MarketplacePluginDetail>;
/** Get version history for a plugin */
getVersions(id: string): Promise<MarketplaceVersionSummary[]>;
/** Download and extract a plugin bundle */
downloadBundle(id: string, version: string): Promise<PluginBundle>;
/** Fire-and-forget install stat (never throws) */
reportInstall(id: string, version: string): Promise<void>;
/** Search theme listings */
searchThemes(
query?: string,
opts?: MarketplaceThemeSearchOpts,
): Promise<MarketplaceThemeSearchResult>;
/** Get full theme detail */
getTheme(id: string): Promise<MarketplaceThemeDetail>;
}
// ── Errors ─────────────────────────────────────────────────────────
export class MarketplaceError extends Error {
constructor(
message: string,
public readonly status?: number,
public readonly code?: string,
) {
super(message);
this.name = "MarketplaceError";
}
}
export class MarketplaceUnavailableError extends MarketplaceError {
constructor(cause?: unknown) {
super("Plugin marketplace is unavailable", undefined, "MARKETPLACE_UNAVAILABLE");
if (cause) this.cause = cause;
}
}
// ── Implementation ─────────────────────────────────────────────────
class MarketplaceClientImpl implements MarketplaceClient {
private readonly baseUrl: string;
constructor(baseUrl: string) {
// Strip trailing slash
this.baseUrl = baseUrl.replace(TRAILING_SLASHES, "");
}
async search(query?: string, opts?: MarketplaceSearchOpts): Promise<MarketplaceSearchResult> {
const params = new URLSearchParams();
if (query) params.set("q", query);
if (opts?.category) params.set("category", opts.category);
if (opts?.capability) params.set("capability", opts.capability);
if (opts?.sort) params.set("sort", opts.sort);
if (opts?.cursor) params.set("cursor", opts.cursor);
if (opts?.limit) params.set("limit", String(opts.limit));
const qs = params.toString();
const url = `${this.baseUrl}/api/v1/plugins${qs ? `?${qs}` : ""}`;
const data = await this.fetchJson<MarketplaceSearchResult>(url);
return data;
}
async getPlugin(id: string): Promise<MarketplacePluginDetail> {
const url = `${this.baseUrl}/api/v1/plugins/${encodeURIComponent(id)}`;
return this.fetchJson<MarketplacePluginDetail>(url);
}
async getVersions(id: string): Promise<MarketplaceVersionSummary[]> {
const url = `${this.baseUrl}/api/v1/plugins/${encodeURIComponent(id)}/versions`;
const data = await this.fetchJson<{ items: MarketplaceVersionSummary[] }>(url);
return data.items;
}
async downloadBundle(id: string, version: string): Promise<PluginBundle> {
const bundleUrl = `${this.baseUrl}/api/v1/plugins/${encodeURIComponent(id)}/versions/${encodeURIComponent(version)}/bundle`;
let response: Response;
try {
response = await fetch(bundleUrl, {
redirect: "follow",
});
} catch (err) {
throw new MarketplaceUnavailableError(err);
}
if (!response.ok) {
throw new MarketplaceError(
`Failed to download bundle: ${response.status} ${response.statusText}`,
response.status,
"BUNDLE_DOWNLOAD_FAILED",
);
}
const tarballBytes = new Uint8Array(await response.arrayBuffer());
try {
return await extractBundle(tarballBytes);
} catch (err) {
if (err instanceof MarketplaceError) throw err;
throw new MarketplaceError(
"Failed to extract plugin bundle",
undefined,
"BUNDLE_EXTRACT_FAILED",
);
}
}
async reportInstall(id: string, version: string): Promise<void> {
// Generate a stable site hash (best-effort, non-identifying)
const siteHash = await generateSiteHash();
const url = `${this.baseUrl}/api/v1/plugins/${encodeURIComponent(id)}/installs`;
try {
await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ siteHash, version }),
});
} catch {
// Fire-and-forget — never throw
}
}
async searchThemes(
query?: string,
opts?: MarketplaceThemeSearchOpts,
): Promise<MarketplaceThemeSearchResult> {
const params = new URLSearchParams();
if (query) params.set("q", query);
if (opts?.keyword) params.set("keyword", opts.keyword);
if (opts?.sort) params.set("sort", opts.sort);
if (opts?.cursor) params.set("cursor", opts.cursor);
if (opts?.limit) params.set("limit", String(opts.limit));
const qs = params.toString();
const url = `${this.baseUrl}/api/v1/themes${qs ? `?${qs}` : ""}`;
return this.fetchJson<MarketplaceThemeSearchResult>(url);
}
async getTheme(id: string): Promise<MarketplaceThemeDetail> {
const url = `${this.baseUrl}/api/v1/themes/${encodeURIComponent(id)}`;
return this.fetchJson<MarketplaceThemeDetail>(url);
}
private async fetchJson<T>(url: string): Promise<T> {
let response: Response;
try {
response = await fetch(url, {
headers: { Accept: "application/json" },
});
} catch (err) {
throw new MarketplaceUnavailableError(err);
}
if (!response.ok) {
let errorMessage = `Marketplace request failed: ${response.status}`;
try {
const body: { error?: string } = await response.json();
if (body.error) errorMessage = body.error;
} catch {
// use default message
}
throw new MarketplaceError(errorMessage, response.status);
}
const data: T = await response.json();
return data;
}
}
// ── Bundle extraction ──────────────────────────────────────────────
/**
* Extract manifest + code files from a tarball.
*
* The tarball is a gzipped tar archive containing:
* - manifest.json
* - backend.js
* - admin.js (optional)
*
* We use a minimal tar parser since we only need to read a few small files.
*/
async function extractBundle(tarballBytes: Uint8Array): Promise<PluginBundle> {
// Decompress fully into memory first, then parse the tar.
// Passing a pipeThrough() stream directly to unpackTar causes a backpressure
// deadlock in workerd: the tar decoder's body-stream pull() needs more
// decompressed data, but the upstream pipe is stalled waiting for the
// decoder's writable side to drain — a circular dependency.
const decompressedStream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(tarballBytes);
controller.close();
},
}).pipeThrough(createGzipDecoder());
// Collect decompressed bytes fully before parsing
const decompressedBuf = await new Response(decompressedStream).arrayBuffer();
const decompressedBytes = new Uint8Array(decompressedBuf);
const decompressed = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(decompressedBytes);
controller.close();
},
});
const entries = await unpackTar(decompressed);
const decoder = new TextDecoder();
const files = new Map<string, string>();
for (const entry of entries) {
if (entry.data && entry.header.type === "file") {
// Strip leading ./ prefix that tar tools commonly add
const name = entry.header.name.replace(LEADING_DOT_SLASH, "");
files.set(name, decoder.decode(entry.data));
}
}
const manifestJson = files.get("manifest.json");
const backendCode = files.get("backend.js");
if (!manifestJson) {
throw new MarketplaceError(
"Invalid bundle: missing manifest.json",
undefined,
"INVALID_BUNDLE",
);
}
if (!backendCode) {
throw new MarketplaceError("Invalid bundle: missing backend.js", undefined, "INVALID_BUNDLE");
}
let manifest: PluginManifest;
try {
const parsed: unknown = JSON.parse(manifestJson);
const result = pluginManifestSchema.safeParse(parsed);
if (!result.success) {
throw new MarketplaceError(
"Invalid bundle: manifest.json failed validation",
undefined,
"INVALID_BUNDLE",
);
}
// Elements are validated as unknown[] by Zod; cast to PluginManifest
// for the Element[] type (Block Kit validation happens at render time).
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- Zod types elements as unknown[]; Element type validated at render time
manifest = result.data as unknown as PluginManifest;
} catch (err) {
if (err instanceof MarketplaceError) throw err;
throw new MarketplaceError(
"Invalid bundle: malformed manifest.json",
undefined,
"INVALID_BUNDLE",
);
}
// Compute SHA-256 checksum of the tarball for verification
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Uint8Array is a valid BufferSource at runtime; TS lib mismatch
const hashBuffer = await crypto.subtle.digest("SHA-256", tarballBytes as unknown as BufferSource);
const hashArray = new Uint8Array(hashBuffer);
const checksum = Array.from(hashArray, (b) => b.toString(16).padStart(2, "0")).join("");
return {
manifest,
backendCode,
adminCode: files.get("admin.js"),
checksum,
};
}
// ── Helpers ────────────────────────────────────────────────────────
/** Generate a stable non-identifying site hash (best-effort) */
async function generateSiteHash(): Promise<string> {
// Use a timestamp-based approach since we can't reliably get the origin
// in all contexts (Workers, Node, etc.)
const seed = `emdash-${Date.now()}`;
try {
const hash = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(seed));
const arr = new Uint8Array(hash);
return Array.from(arr.slice(0, 8), (b) => b.toString(16).padStart(2, "0")).join("");
} catch {
// Fallback for environments without crypto.subtle
return Math.random().toString(36).slice(2, 18);
}
}
// ── Factory ────────────────────────────────────────────────────────
/**
* Create a MarketplaceClient for the given marketplace URL.
*
* @param baseUrl - The marketplace API base URL (e.g. "https://marketplace.emdashcms.com")
*/
export function createMarketplaceClient(baseUrl: string): MarketplaceClient {
return new MarketplaceClientImpl(baseUrl);
}

View File

@@ -0,0 +1,139 @@
/**
* Request Metadata Extraction
*
* Extracts normalized metadata (IP, user agent, referer, geo) from
* incoming requests. Used by plugin route handlers to access request
* context without touching raw headers.
*
*/
import type { GeoInfo, RequestMeta } from "./types.js";
/**
* Cloudflare Workers `cf` object shape (subset we use).
* Present on requests when running on Cloudflare Workers.
*/
interface CfProperties {
country?: string;
region?: string;
city?: string;
}
/**
* Loose validation for IPv4 and IPv6 addresses.
* Accepts digits, hex chars, dots, and colons — rejects anything else
* (e.g. HTML tags, scripts, or other non-IP garbage in spoofed headers).
*/
const IP_PATTERN = /^[\da-fA-F.:]+$/;
/**
* Extract the first IP from an X-Forwarded-For header value.
* The header may contain a comma-separated list of IPs; the first
* entry is the original client IP.
*
* Returns null if the extracted value doesn't look like an IP address.
*/
function parseFirstForwardedIp(header: string): string | null {
const first = header.split(",")[0];
const trimmed = first?.trim();
if (!trimmed) return null;
return IP_PATTERN.test(trimmed) ? trimmed : null;
}
/**
* Get the Cloudflare `cf` object from the request, if present.
* Returns undefined when not running on Cloudflare Workers.
*/
function getCfObject(request: Request): CfProperties | undefined {
return (request as unknown as { cf?: CfProperties }).cf;
}
/**
* Extract geographic information from the Cloudflare `cf` object
* attached to the request. Returns null when not running on CF Workers.
*/
function extractGeo(cf: CfProperties | undefined): GeoInfo | null {
if (!cf) return null;
const country = cf.country ?? null;
const region = cf.region ?? null;
const city = cf.city ?? null;
// Only return geo if at least one field is populated
if (country === null && region === null && city === null) return null;
return { country, region, city };
}
/**
* Extract normalized request metadata from a Request object.
*
* IP resolution order:
* 1. `CF-Connecting-IP` header — only trusted when a `cf` object is
* present on the request (proving the request came through Cloudflare's
* edge, which strips/overwrites client-supplied values).
* 2. `X-Forwarded-For` header (first entry) — best-effort, spoofable
* when there is no trusted reverse proxy.
* 3. `null`
*/
export function extractRequestMeta(request: Request): RequestMeta {
const headers = request.headers;
const cf = getCfObject(request);
// IP: only trust headers when the cf object confirms we're on Cloudflare.
// Without a trusted reverse proxy, X-Forwarded-For is trivially spoofable.
let ip: string | null = null;
if (cf) {
const cfIp = headers.get("cf-connecting-ip")?.trim();
if (cfIp && IP_PATTERN.test(cfIp)) {
ip = cfIp;
}
}
if (!ip && cf) {
// Only trust X-Forwarded-For when we're behind Cloudflare (which
// overwrites the header). In standalone deployments without a trusted
// proxy, XFF is trivially spoofable.
const xff = headers.get("x-forwarded-for");
ip = xff ? parseFirstForwardedIp(xff) : null;
}
const userAgent = headers.get("user-agent")?.trim() || null;
const referer = headers.get("referer")?.trim() || null;
const geo = extractGeo(cf);
return { ip, userAgent, referer, geo };
}
// =============================================================================
// Header Sanitization for Sandbox
// =============================================================================
/**
* Headers that must never cross the RPC boundary to sandboxed plugins.
* Session tokens, auth credentials, and infrastructure headers are stripped
* to prevent malicious plugins from exfiltrating sensitive data.
*/
const SANDBOX_STRIPPED_HEADERS = new Set([
"cookie",
"set-cookie",
"authorization",
"proxy-authorization",
"cf-access-jwt-assertion",
"cf-access-client-id",
"cf-access-client-secret",
"x-emdash-request",
]);
/**
* Copy request headers into a plain object, stripping sensitive headers
* that must not be exposed to sandboxed plugin code.
*/
export function sanitizeHeadersForSandbox(headers: Headers): Record<string, string> {
const safe: Record<string, string> = {};
headers.forEach((value, key) => {
if (!SANDBOX_STRIPPED_HEADERS.has(key)) {
safe[key] = value;
}
});
return safe;
}

View File

@@ -0,0 +1,302 @@
/**
* Plugin Routes v2
*
* Handles plugin API route invocation with:
* - Input validation via Zod schemas
* - Route context creation
* - Error handling
*
*/
import { PluginContextFactory, type PluginContextFactoryOptions } from "./context.js";
import { extractRequestMeta } from "./request-meta.js";
import type { ResolvedPlugin, RouteContext, PluginRoute } from "./types.js";
/**
* Route metadata (public flag) without the handler.
* Used by the catch-all route to decide auth before dispatch.
*/
export interface RouteMeta {
public: boolean;
}
/**
* Result from a route invocation
*/
export interface RouteResult<T = unknown> {
success: boolean;
data?: T;
error?: {
code: string;
message: string;
details?: unknown;
};
status: number;
}
/**
* Route invocation options
*/
export interface InvokeRouteOptions {
/** The original request */
request: Request;
/** Request body (already parsed) */
body?: unknown;
}
/**
* Route handler for a plugin
*/
export class PluginRouteHandler {
private contextFactory: PluginContextFactory;
private plugin: ResolvedPlugin;
constructor(plugin: ResolvedPlugin, factoryOptions: PluginContextFactoryOptions) {
this.plugin = plugin;
this.contextFactory = new PluginContextFactory(factoryOptions);
}
/**
* Invoke a route by name
*/
async invoke(routeName: string, options: InvokeRouteOptions): Promise<RouteResult> {
const route = this.plugin.routes[routeName];
if (!route) {
return {
success: false,
error: {
code: "ROUTE_NOT_FOUND",
message: `Route "${routeName}" not found in plugin "${this.plugin.id}"`,
},
status: 404,
};
}
// Validate input if schema is provided
let validatedInput: unknown;
if (route.input) {
const parseResult = route.input.safeParse(options.body);
if (!parseResult.success) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "Invalid request body",
details: parseResult.error.format(),
},
status: 400,
};
}
validatedInput = parseResult.data;
} else {
validatedInput = options.body;
}
// Create route context
const baseContext = this.contextFactory.createContext(this.plugin);
const routeContext: RouteContext = {
...baseContext,
input: validatedInput,
request: options.request,
requestMeta: extractRequestMeta(options.request),
};
// Execute handler
try {
const result = await route.handler(routeContext);
return {
success: true,
data: result,
status: 200,
};
} catch (error) {
// Handle known error types
if (error instanceof PluginRouteError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
status: error.status,
};
}
// Unknown error
const message = error instanceof Error ? error.message : String(error);
return {
success: false,
error: {
code: "INTERNAL_ERROR",
message: `Route handler failed: ${message}`,
},
status: 500,
};
}
}
/**
* Get all route names
*/
getRouteNames(): string[] {
return Object.keys(this.plugin.routes);
}
/**
* Check if a route exists
*/
hasRoute(name: string): boolean {
return name in this.plugin.routes;
}
/**
* Get route metadata without invoking the handler.
* Returns null if the route doesn't exist.
*/
getRouteMeta(name: string): RouteMeta | null {
const route: PluginRoute | undefined = this.plugin.routes[name];
if (!route) return null;
return { public: route.public === true };
}
}
/**
* Error class for plugin routes
* Allows plugins to return structured errors with specific HTTP status codes
*/
export class PluginRouteError extends Error {
constructor(
public code: string,
message: string,
public status: number = 400,
public details?: unknown,
) {
super(message);
this.name = "PluginRouteError";
}
/**
* Create a bad request error (400)
*/
static badRequest(message: string, details?: unknown): PluginRouteError {
return new PluginRouteError("BAD_REQUEST", message, 400, details);
}
/**
* Create an unauthorized error (401)
*/
static unauthorized(message: string = "Unauthorized"): PluginRouteError {
return new PluginRouteError("UNAUTHORIZED", message, 401);
}
/**
* Create a forbidden error (403)
*/
static forbidden(message: string = "Forbidden"): PluginRouteError {
return new PluginRouteError("FORBIDDEN", message, 403);
}
/**
* Create a not found error (404)
*/
static notFound(message: string = "Not found"): PluginRouteError {
return new PluginRouteError("NOT_FOUND", message, 404);
}
/**
* Create a conflict error (409)
*/
static conflict(message: string, details?: unknown): PluginRouteError {
return new PluginRouteError("CONFLICT", message, 409, details);
}
/**
* Create an internal error (500)
*/
static internal(message: string = "Internal error"): PluginRouteError {
return new PluginRouteError("INTERNAL_ERROR", message, 500);
}
}
/**
* Registry for all plugin route handlers
*/
export class PluginRouteRegistry {
private handlers: Map<string, PluginRouteHandler> = new Map();
constructor(private factoryOptions: PluginContextFactoryOptions) {}
/**
* Register a plugin's routes
*/
register(plugin: ResolvedPlugin): void {
const handler = new PluginRouteHandler(plugin, this.factoryOptions);
this.handlers.set(plugin.id, handler);
}
/**
* Unregister a plugin's routes
*/
unregister(pluginId: string): void {
this.handlers.delete(pluginId);
}
/**
* Invoke a plugin route
*/
async invoke(
pluginId: string,
routeName: string,
options: InvokeRouteOptions,
): Promise<RouteResult> {
const handler = this.handlers.get(pluginId);
if (!handler) {
return {
success: false,
error: {
code: "PLUGIN_NOT_FOUND",
message: `Plugin "${pluginId}" not found`,
},
status: 404,
};
}
return handler.invoke(routeName, options);
}
/**
* Get all registered plugin IDs
*/
getPluginIds(): string[] {
return [...this.handlers.keys()];
}
/**
* Get routes for a plugin
*/
getRoutes(pluginId: string): string[] {
return this.handlers.get(pluginId)?.getRouteNames() ?? [];
}
/**
* Get route metadata for a specific plugin route.
* Returns null if the plugin or route doesn't exist.
*/
getRouteMeta(pluginId: string, routeName: string): RouteMeta | null {
const handler = this.handlers.get(pluginId);
if (!handler) return null;
return handler.getRouteMeta(routeName);
}
}
/**
* Create a route registry
*/
export function createRouteRegistry(
factoryOptions: PluginContextFactoryOptions,
): PluginRouteRegistry {
return new PluginRouteRegistry(factoryOptions);
}

View File

@@ -0,0 +1,18 @@
/**
* Plugin Sandbox Exports
*
*/
export { NoopSandboxRunner, SandboxNotAvailableError, createNoopSandboxRunner } from "./noop.js";
export type {
SandboxRunner,
SandboxedPlugin,
SandboxRunnerFactory,
SandboxOptions,
SandboxEmailMessage,
SandboxEmailSendCallback,
ResourceLimits,
PluginCodeStorage,
SerializedRequest,
} from "./types.js";

View File

@@ -0,0 +1,76 @@
/**
* No-op Sandbox Runner
*
* Default implementation that doesn't support sandboxing.
* Used on platforms without Worker Loader (Node.js, Deno, etc.).
*
*/
import type { PluginManifest } from "../types.js";
import type { SandboxRunner, SandboxedPlugin, SandboxOptions } from "./types.js";
/**
* Error thrown when attempting to use sandboxing on an unsupported platform.
*/
export class SandboxNotAvailableError extends Error {
constructor() {
super(
"Plugin sandboxing is not available on this platform. " +
"Sandboxed plugins require Cloudflare Workers with Worker Loader. " +
"Use trusted plugins (from config) instead, or deploy to Cloudflare.",
);
this.name = "SandboxNotAvailableError";
}
}
/**
* No-op sandbox runner for platforms without isolation support.
*
* - `isAvailable()` returns false
* - `load()` throws SandboxNotAvailableError
* - `terminateAll()` is a no-op
*
* This is the default runner when no platform adapter is configured.
*/
export class NoopSandboxRunner implements SandboxRunner {
/**
* Always returns false - sandboxing is not available.
*/
isAvailable(): boolean {
return false;
}
/**
* Always throws - can't load sandboxed plugins without isolation.
*/
async load(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_manifest: PluginManifest,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_code: string,
): Promise<SandboxedPlugin> {
throw new SandboxNotAvailableError();
}
/**
* No-op - sandboxing not available, email callback is irrelevant.
*/
setEmailSend(): void {
// Nothing to do
}
/**
* No-op - nothing to terminate.
*/
async terminateAll(): Promise<void> {
// Nothing to do
}
}
/**
* Create a no-op sandbox runner.
* This is used as the default when no platform adapter is configured.
*/
export function createNoopSandboxRunner(_options?: SandboxOptions): SandboxRunner {
return new NoopSandboxRunner();
}

View File

@@ -0,0 +1,173 @@
/**
* Plugin Sandbox Types
*
* Defines interfaces for running plugins in sandboxed V8 isolates.
* The SandboxRunner interface is implemented by platform adapters
* (e.g., Cloudflare Worker Loader) to provide isolation.
*
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import type { PluginManifest, RequestMeta } from "../types.js";
/**
* Resource limits for sandboxed plugins.
* Enforced by the sandbox runtime (e.g., Worker Loader).
*/
export interface ResourceLimits {
/** CPU time per invocation in milliseconds (default: 50ms) */
cpuMs?: number;
/** Memory limit in MB (default: 128MB) */
memoryMb?: number;
/** Maximum subrequests per invocation (default: 10) */
subrequests?: number;
/** Wall-clock time limit in milliseconds (default: 30000ms) */
wallTimeMs?: number;
}
/**
* Storage interface for loading plugin code.
* Could be R2, local filesystem, or any other storage backend.
*/
export interface PluginCodeStorage {
/** Get plugin bundle code by path */
get(path: string): Promise<string | null>;
/** Check if a bundle exists */
exists(path: string): Promise<boolean>;
}
/**
* Serialized email message for sandbox RPC transport.
* Matches the core EmailMessage type but uses only serializable fields.
*/
export interface SandboxEmailMessage {
to: string;
subject: string;
text: string;
html?: string;
}
/**
* Callback for sending email from a sandboxed plugin.
* The sandbox runner wires this up from the EmailPipeline.
*
* @param message - The email message to send
* @param pluginId - The sending plugin's ID (used as source)
*/
export type SandboxEmailSendCallback = (
message: SandboxEmailMessage,
pluginId: string,
) => Promise<void>;
/**
* Options for creating a sandbox runner
*/
export interface SandboxOptions {
/** Storage interface for loading plugin code */
storage?: PluginCodeStorage;
/** Database for bridge operations */
db: Kysely<Database>;
/** Default resource limits */
limits?: ResourceLimits;
/** Site info for plugin context (injected into wrapper at generation time) */
siteInfo?: { name: string; url: string; locale: string };
/** Email send callback, wired from the EmailPipeline by the runtime */
emailSend?: SandboxEmailSendCallback;
}
/**
* A sandboxed plugin instance.
* Provides methods to invoke hooks and routes in the isolated environment.
*/
export interface SandboxedPlugin {
/** Unique identifier: `${manifest.id}:${manifest.version}` */
readonly id: string;
/**
* Invoke a hook in the sandboxed plugin.
*
* @param hookName - Name of the hook (e.g., "content:beforeSave")
* @param event - Event data to pass to the hook
* @returns Hook result (transformed content, void, etc.)
*/
invokeHook(hookName: string, event: unknown): Promise<unknown>;
/**
* Invoke an API route in the sandboxed plugin.
*
* @param routeName - Name of the route
* @param input - Validated input data
* @param request - Serialized request info for context
* @returns Route response data
*/
invokeRoute(routeName: string, input: unknown, request: SerializedRequest): Promise<unknown>;
/**
* Terminate the sandboxed plugin.
* Releases resources and prevents further invocations.
*/
terminate(): Promise<void>;
}
/**
* Serialized request for RPC transport.
* Worker Loader can't pass Request objects directly.
*/
export interface SerializedRequest {
url: string;
method: string;
headers: Record<string, string>;
/** Normalized request metadata extracted before RPC serialization */
meta: RequestMeta;
}
/**
* Sandbox runner interface.
* Platform adapters implement this to provide plugin isolation.
*/
export interface SandboxRunner {
/**
* Check if sandboxing is available on this platform.
* Returns false for platforms that don't support isolation.
*/
isAvailable(): boolean;
/**
* Load a sandboxed plugin from code.
*
* @param manifest - Plugin manifest with metadata and capabilities
* @param code - The bundled plugin JavaScript code
* @returns A sandboxed plugin instance
* @throws If sandboxing is not available or plugin can't be loaded
*/
load(manifest: PluginManifest, code: string): Promise<SandboxedPlugin>;
/**
* Set the email send callback for sandboxed plugins.
* Called after the EmailPipeline is created, since the pipeline
* doesn't exist when the sandbox runner is constructed.
*/
setEmailSend(callback: SandboxEmailSendCallback | null): void;
/**
* Terminate all loaded sandboxed plugins.
* Called during shutdown or when reconfiguring.
*/
terminateAll(): Promise<void>;
}
/**
* Factory function type for creating sandbox runners.
* Exported by platform adapters (e.g., @emdashcms/adapter-cloudflare/sandbox).
*
* @example
* ```typescript
* // In @emdashcms/adapter-cloudflare/sandbox.ts
* export const createSandboxRunner: SandboxRunnerFactory = (options) => {
* return new CloudflareSandboxRunner(options);
* };
* ```
*/
export type SandboxRunnerFactory = (options: SandboxOptions) => SandboxRunner;

View File

@@ -0,0 +1,122 @@
/**
* Node.js cron scheduler — setTimeout-based.
*
* Queries the executor for the next due time and sets a timeout. Re-arms
* after each tick and when reschedule() is called (new task scheduled or
* cancelled).
*
* Suitable for single-process deployments (local dev, single-node).
*
*/
import type { CronExecutor } from "../cron.js";
import type { CronScheduler, SystemCleanupFn } from "./types.js";
/** Minimum polling interval (ms) — prevents tight loops if next_run_at is in the past */
const MIN_INTERVAL_MS = 1000;
/** Maximum polling interval (ms) — wake up periodically to check for stale locks */
const MAX_INTERVAL_MS = 5 * 60 * 1000;
export class NodeCronScheduler implements CronScheduler {
private timer: ReturnType<typeof setTimeout> | null = null;
private running = false;
private systemCleanup: SystemCleanupFn | null = null;
constructor(private executor: CronExecutor) {}
setSystemCleanup(fn: SystemCleanupFn): void {
this.systemCleanup = fn;
}
start(): void {
this.running = true;
this.arm();
}
stop(): void {
this.running = false;
if (this.timer) {
clearTimeout(this.timer);
this.timer = null;
}
}
reschedule(): void {
if (!this.running) return;
// Clear existing timer and re-arm with fresh next due time
if (this.timer) {
clearTimeout(this.timer);
this.timer = null;
}
this.arm();
}
private arm(): void {
if (!this.running) return;
// Query the next due time, then schedule a wake-up
void this.executor
.getNextDueTime()
.then((nextDue) => {
if (!this.running) return undefined;
let delayMs: number;
if (nextDue) {
const dueAt = new Date(nextDue).getTime();
delayMs = Math.max(dueAt - Date.now(), MIN_INTERVAL_MS);
delayMs = Math.min(delayMs, MAX_INTERVAL_MS);
} else {
// No tasks scheduled — poll at max interval for stale lock recovery
delayMs = MAX_INTERVAL_MS;
}
this.timer = setTimeout(() => {
if (!this.running) return;
this.executeTick();
}, delayMs);
// Don't prevent process exit
if (this.timer && typeof this.timer === "object" && "unref" in this.timer) {
this.timer.unref();
}
return undefined;
})
.catch((error: unknown) => {
console.error("[cron:node] Failed to get next due time:", error);
// Retry after max interval
if (this.running) {
this.timer = setTimeout(() => this.arm(), MAX_INTERVAL_MS);
if (this.timer && typeof this.timer === "object" && "unref" in this.timer) {
this.timer.unref();
}
}
});
}
private executeTick(): void {
if (!this.running) return;
// Run tick + stale lock recovery + system cleanup, then re-arm
const tasks: Promise<unknown>[] = [this.executor.tick(), this.executor.recoverStaleLocks()];
if (this.systemCleanup) {
tasks.push(this.systemCleanup());
}
void Promise.allSettled(tasks)
.then((results) => {
for (const r of results) {
if (r.status === "rejected") {
console.error("[cron:node] Tick task failed:", r.reason);
}
}
return undefined;
})
.finally(() => {
if (this.running) {
this.arm();
}
});
}
}

View File

@@ -0,0 +1,71 @@
/**
* Piggyback cron scheduler — request-driven fallback.
*
* Checks for overdue tasks on each incoming request, debounced to at most
* once per 60 seconds. Fire-and-forget (does not block the request).
*
* Used on Cloudflare when no Durable Object binding is available, or
* during development when DO bindings aren't configured.
*
*/
import type { CronExecutor } from "../cron.js";
import type { CronScheduler, SystemCleanupFn } from "./types.js";
/** Minimum interval between tick attempts (ms) */
const DEBOUNCE_MS = 60 * 1000;
export class PiggybackScheduler implements CronScheduler {
private lastTickAt = 0;
private running = false;
private systemCleanup: SystemCleanupFn | null = null;
constructor(private executor: CronExecutor) {}
setSystemCleanup(fn: SystemCleanupFn): void {
this.systemCleanup = fn;
}
start(): void {
this.running = true;
}
stop(): void {
this.running = false;
}
/**
* No-op for piggyback — tick happens on next request.
*/
reschedule(): void {
// Nothing to do — next request will check
}
/**
* Call this from middleware on each request.
* Debounced: only actually ticks if enough time has passed.
*/
onRequest(): void {
if (!this.running) return;
const now = Date.now();
if (now - this.lastTickAt < DEBOUNCE_MS) return;
this.lastTickAt = now;
// Fire-and-forget — don't block the request
const tasks: Promise<unknown>[] = [this.executor.tick(), this.executor.recoverStaleLocks()];
if (this.systemCleanup) {
tasks.push(this.systemCleanup());
}
void Promise.allSettled(tasks).then((results) => {
for (const r of results) {
if (r.status === "rejected") {
console.error("[cron:piggyback] Tick task failed:", r.reason);
}
}
return undefined;
});
}
}

View File

@@ -0,0 +1,27 @@
/**
* Platform-specific cron scheduler interface.
*
* Schedulers are responsible for calling CronExecutor.tick() at the right
* time. The executor handles all business logic; the scheduler only manages
* timing.
*
* Implementations receive the CronExecutor via constructor.
*
*/
export interface CronScheduler {
/** Start the scheduler. */
start(): void | Promise<void>;
/** Stop the scheduler and clean up timers/alarms. */
stop(): void | Promise<void>;
/** Signal that the next due time may have changed (task added/cancelled). */
reschedule(): void;
/** Register a system cleanup function to run alongside each tick. */
setSystemCleanup(fn: SystemCleanupFn): void;
}
/**
* System cleanup callback invoked alongside each scheduler tick.
* Fire-and-forget -- failures are logged internally and never propagate.
*/
export type SystemCleanupFn = () => Promise<void>;

View File

@@ -0,0 +1,208 @@
/**
* Plugin State Repository
*
* Database-backed storage for plugin activation state.
* Used by the admin API to persist plugin enable/disable across restarts.
*/
import type { Kysely } from "kysely";
import type { Database } from "../database/types.js";
export type PluginStatus = "active" | "inactive";
export type PluginSource = "config" | "marketplace";
function toPluginStatus(value: string): PluginStatus {
if (value === "active") return "active";
return "inactive";
}
function toPluginSource(value: string | undefined | null): PluginSource {
if (value === "marketplace") return "marketplace";
return "config";
}
export interface PluginState {
pluginId: string;
status: PluginStatus;
version: string;
installedAt: Date;
activatedAt: Date | null;
deactivatedAt: Date | null;
source: PluginSource;
marketplaceVersion: string | null;
displayName: string | null;
description: string | null;
}
/**
* Repository for plugin state in the database
*/
export class PluginStateRepository {
constructor(private db: Kysely<Database>) {}
/**
* Get state for a specific plugin
*/
async get(pluginId: string): Promise<PluginState | null> {
const row = await this.db
.selectFrom("_plugin_state")
.selectAll()
.where("plugin_id", "=", pluginId)
.executeTakeFirst();
if (!row) return null;
return {
pluginId: row.plugin_id,
status: toPluginStatus(row.status),
version: row.version,
installedAt: new Date(row.installed_at),
activatedAt: row.activated_at ? new Date(row.activated_at) : null,
deactivatedAt: row.deactivated_at ? new Date(row.deactivated_at) : null,
source: toPluginSource(row.source),
marketplaceVersion: row.marketplace_version ?? null,
displayName: row.display_name ?? null,
description: row.description ?? null,
};
}
/**
* Get all plugin states
*/
async getAll(): Promise<PluginState[]> {
const rows = await this.db.selectFrom("_plugin_state").selectAll().execute();
return rows.map((row) => ({
pluginId: row.plugin_id,
status: toPluginStatus(row.status),
version: row.version,
installedAt: new Date(row.installed_at),
activatedAt: row.activated_at ? new Date(row.activated_at) : null,
deactivatedAt: row.deactivated_at ? new Date(row.deactivated_at) : null,
source: toPluginSource(row.source),
marketplaceVersion: row.marketplace_version ?? null,
displayName: row.display_name ?? null,
description: row.description ?? null,
}));
}
/**
* Get all marketplace-installed plugin states
*/
async getMarketplacePlugins(): Promise<PluginState[]> {
const rows = await this.db
.selectFrom("_plugin_state")
.selectAll()
.where("source", "=", "marketplace")
.execute();
return rows.map((row) => ({
pluginId: row.plugin_id,
status: toPluginStatus(row.status),
version: row.version,
installedAt: new Date(row.installed_at),
activatedAt: row.activated_at ? new Date(row.activated_at) : null,
deactivatedAt: row.deactivated_at ? new Date(row.deactivated_at) : null,
source: toPluginSource(row.source),
marketplaceVersion: row.marketplace_version ?? null,
displayName: row.display_name ?? null,
description: row.description ?? null,
}));
}
/**
* Create or update plugin state
*/
async upsert(
pluginId: string,
version: string,
status: PluginStatus,
opts?: {
source?: PluginSource;
marketplaceVersion?: string;
displayName?: string;
description?: string;
},
): Promise<PluginState> {
const now = new Date().toISOString();
const existing = await this.get(pluginId);
if (existing) {
// Update existing state
const updates: Record<string, string | null> = {
status,
version,
};
if (status === "active" && existing.status !== "active") {
updates.activated_at = now;
} else if (status === "inactive" && existing.status !== "inactive") {
updates.deactivated_at = now;
}
if (opts?.source) updates.source = opts.source;
if (opts?.marketplaceVersion !== undefined) {
updates.marketplace_version = opts.marketplaceVersion;
}
if (opts?.displayName !== undefined) {
updates.display_name = opts.displayName;
}
if (opts?.description !== undefined) {
updates.description = opts.description;
}
await this.db
.updateTable("_plugin_state")
.set(updates)
.where("plugin_id", "=", pluginId)
.execute();
} else {
// Create new state
await this.db
.insertInto("_plugin_state")
.values({
plugin_id: pluginId,
status,
version,
installed_at: now,
activated_at: status === "active" ? now : null,
deactivated_at: null,
data: null,
source: opts?.source ?? "config",
marketplace_version: opts?.marketplaceVersion ?? null,
display_name: opts?.displayName ?? null,
description: opts?.description ?? null,
})
.execute();
}
return (await this.get(pluginId))!;
}
/**
* Enable a plugin
*/
async enable(pluginId: string, version: string): Promise<PluginState> {
return this.upsert(pluginId, version, "active");
}
/**
* Disable a plugin
*/
async disable(pluginId: string, version: string): Promise<PluginState> {
return this.upsert(pluginId, version, "inactive");
}
/**
* Delete plugin state
*/
async delete(pluginId: string): Promise<boolean> {
const result = await this.db
.deleteFrom("_plugin_state")
.where("plugin_id", "=", pluginId)
.executeTakeFirst();
return (result.numDeletedRows ?? 0) > 0;
}
}

View File

@@ -0,0 +1,326 @@
/**
* Plugin Storage Index Management
*
* Manages expression indexes on the _plugin_storage table for efficient queries.
*
* @see PLUGIN-SYSTEM.md § Plugin Storage > Index Management
*/
import type { Kysely, RawBuilder } from "kysely";
import { sql } from "kysely";
import { jsonExtractExpr, isPostgres } from "../database/dialect-helpers.js";
import type { Database } from "../database/types.js";
import {
validateIdentifier,
validateJsonFieldName,
validatePluginIdentifier,
} from "../database/validate.js";
/**
* Generate a deterministic index name.
* Unique indexes use a `uidx_` prefix to avoid collisions with regular indexes on the same fields.
*/
export function generateIndexName(
pluginId: string,
collection: string,
fields: string[],
options?: { unique?: boolean },
): string {
const prefix = options?.unique ? "uidx" : "idx";
const fieldPart = fields.join("_");
// SQLite index names have no length limit, but keep it reasonable
return `${prefix}_plugin_${pluginId}_${collection}_${fieldPart}`.substring(0, 128);
}
/**
* Generate a Kysely sql expression for creating an expression index.
*
* Validates all identifiers before interpolation to prevent SQL injection.
* Plugin ID and collection values are parameterized in the WHERE clause.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- accepts any Kysely instance
export function generateCreateIndexSql(
db: Kysely<any>,
pluginId: string,
collection: string,
fields: string[],
options?: { unique?: boolean },
): RawBuilder<unknown> {
// Validate all identifiers
validatePluginIdentifier(pluginId, "plugin ID");
validateIdentifier(collection, "collection name");
for (const field of fields) {
validateJsonFieldName(field, "index field name");
}
const indexName = generateIndexName(pluginId, collection, fields, options);
// Build the indexed expressions
// Fields are validated above, safe to interpolate into json path
const expressions = fields
.map((field) => {
if (isPostgres(db)) {
// Postgres expression indexes need parens around the expression
return `(${jsonExtractExpr(db, "data", field)})`;
}
return jsonExtractExpr(db, "data", field);
})
.join(", ");
// Partial index filtered to this plugin/collection
// SQLite prohibits bound parameters in partial index WHERE clauses,
// so we use sql.lit() for literal string values. Both pluginId and
// collection are validated above, so this is safe.
const createKeyword = options?.unique ? "CREATE UNIQUE INDEX" : "CREATE INDEX";
return sql`${sql.raw(createKeyword)} IF NOT EXISTS ${sql.ref(indexName)}
ON _plugin_storage(${sql.raw(expressions)})
WHERE plugin_id = ${sql.lit(pluginId)} AND collection = ${sql.lit(collection)}
`;
}
/**
* Generate a Kysely sql expression for dropping an index.
*
* Uses sql.ref() for safe identifier quoting.
*/
export function generateDropIndexSql(indexName: string): RawBuilder<unknown> {
return sql`DROP INDEX IF EXISTS ${sql.ref(indexName)}`;
}
/**
* Normalize index declarations to arrays of field arrays
*/
export function normalizeIndexes(indexes: Array<string | string[]>): string[][] {
return indexes.map((index) => (Array.isArray(index) ? index : [index]));
}
/**
* Create all declared indexes for a plugin collection
*/
export async function createStorageIndexes(
db: Kysely<Database>,
pluginId: string,
collection: string,
indexes: Array<string | string[]>,
options?: { uniqueIndexes?: Array<string | string[]> },
): Promise<{
created: string[];
errors: Array<{ index: string; error: string }>;
}> {
const normalized = normalizeIndexes(indexes);
const uniqueNormalized = options?.uniqueIndexes ? normalizeIndexes(options.uniqueIndexes) : [];
const uniqueSet = new Set(uniqueNormalized.map((f) => f.join(",")));
// Deduplicate: if fields appear in both indexes and uniqueIndexes, only create the unique version
const deduped = normalized.filter((f) => !uniqueSet.has(f.join(",")));
const allEntries: Array<{ fields: string[]; unique: boolean }> = [
...deduped.map((fields) => ({ fields, unique: false })),
...uniqueNormalized.map((fields) => ({ fields, unique: true })),
];
const created: string[] = [];
const errors: Array<{ index: string; error: string }> = [];
for (const entry of allEntries) {
const { fields } = entry;
const indexName = generateIndexName(pluginId, collection, fields, { unique: entry.unique });
try {
// Create the index
const createSql = generateCreateIndexSql(db, pluginId, collection, fields, {
unique: entry.unique,
});
await createSql.execute(db);
// Track in _plugin_indexes table
await db
.insertInto("_plugin_indexes")
.values({
plugin_id: pluginId,
collection,
index_name: indexName,
fields: JSON.stringify(fields),
})
.onConflict((oc) =>
oc
.columns(["plugin_id", "collection", "index_name"])
.doUpdateSet({ fields: JSON.stringify(fields) }),
)
.execute();
created.push(indexName);
} catch (error) {
errors.push({
index: indexName,
error: error instanceof Error ? error.message : String(error),
});
}
}
return { created, errors };
}
/**
* Remove indexes that are no longer declared
*/
export async function removeOrphanedIndexes(
db: Kysely<Database>,
pluginId: string,
collection: string,
currentIndexes: Array<string | string[]>,
options?: { uniqueIndexes?: Array<string | string[]> },
): Promise<{
removed: string[];
errors: Array<{ index: string; error: string }>;
}> {
const normalized = normalizeIndexes(currentIndexes);
const uniqueNormalized = options?.uniqueIndexes ? normalizeIndexes(options.uniqueIndexes) : [];
const uniqueSet = new Set(uniqueNormalized.map((f) => f.join(",")));
// Build the set of expected index names (regular + unique with correct prefix)
const currentIndexNames = new Set<string>();
for (const fields of normalized) {
// If this field set is in both, only the unique version exists (deduplication in create)
if (!uniqueSet.has(fields.join(","))) {
currentIndexNames.add(generateIndexName(pluginId, collection, fields));
}
}
for (const fields of uniqueNormalized) {
currentIndexNames.add(generateIndexName(pluginId, collection, fields, { unique: true }));
}
// Get existing indexes from tracking table
const existingIndexes = await db
.selectFrom("_plugin_indexes")
.select(["index_name"])
.where("plugin_id", "=", pluginId)
.where("collection", "=", collection)
.execute();
const removed: string[] = [];
const errors: Array<{ index: string; error: string }> = [];
for (const { index_name } of existingIndexes) {
if (!currentIndexNames.has(index_name)) {
try {
// Drop the index
await generateDropIndexSql(index_name).execute(db);
// Remove from tracking table
await db
.deleteFrom("_plugin_indexes")
.where("plugin_id", "=", pluginId)
.where("collection", "=", collection)
.where("index_name", "=", index_name)
.execute();
removed.push(index_name);
} catch (error) {
errors.push({
index: index_name,
error: error instanceof Error ? error.message : String(error),
});
}
}
}
return { removed, errors };
}
/**
* Sync indexes for a plugin collection (create new, remove old)
*/
export async function syncStorageIndexes(
db: Kysely<Database>,
pluginId: string,
collection: string,
indexes: Array<string | string[]>,
options?: { uniqueIndexes?: Array<string | string[]> },
): Promise<{
created: string[];
removed: string[];
errors: Array<{ index: string; error: string }>;
}> {
const [createResult, removeResult] = await Promise.all([
createStorageIndexes(db, pluginId, collection, indexes, options),
removeOrphanedIndexes(db, pluginId, collection, indexes, options),
]);
return {
created: createResult.created,
removed: removeResult.removed,
errors: [...createResult.errors, ...removeResult.errors],
};
}
/**
* Remove all indexes for a plugin
*/
export async function removeAllPluginIndexes(
db: Kysely<Database>,
pluginId: string,
): Promise<{
removed: string[];
errors: Array<{ index: string; error: string }>;
}> {
const existingIndexes = await db
.selectFrom("_plugin_indexes")
.select(["index_name", "collection"])
.where("plugin_id", "=", pluginId)
.execute();
const removed: string[] = [];
const errors: Array<{ index: string; error: string }> = [];
for (const { index_name } of existingIndexes) {
try {
await generateDropIndexSql(index_name).execute(db);
removed.push(index_name);
} catch (error) {
errors.push({
index: index_name,
error: error instanceof Error ? error.message : String(error),
});
}
}
// Clean up tracking table
await db.deleteFrom("_plugin_indexes").where("plugin_id", "=", pluginId).execute();
return { removed, errors };
}
/**
* Get current index status for a plugin
*/
export async function getPluginIndexStatus(
db: Kysely<Database>,
pluginId: string,
): Promise<
Array<{
collection: string;
indexName: string;
fields: string[];
createdAt: string;
}>
> {
const rows = await db
.selectFrom("_plugin_indexes")
.select(["collection", "index_name", "fields", "created_at"])
.where("plugin_id", "=", pluginId)
.execute();
return rows.map((row) => {
const parsed: unknown = JSON.parse(row.fields);
const fields = Array.isArray(parsed)
? parsed.filter((f): f is string => typeof f === "string")
: [];
return {
collection: row.collection,
indexName: row.index_name,
fields,
createdAt: row.created_at,
};
});
}

View File

@@ -0,0 +1,240 @@
/**
* Plugin Storage Query Validation and Building
*
* Validates that queries only use indexed fields and builds SQL WHERE clauses.
*
* @see PLUGIN-SYSTEM.md § Plugin Storage > Query Validation
*/
import type { Kysely } from "kysely";
import { jsonExtractExpr } from "../database/dialect-helpers.js";
import { validateJsonFieldName } from "../database/validate.js";
import type { WhereClause, WhereValue, RangeFilter, InFilter, StartsWithFilter } from "./types.js";
/**
* Error thrown when querying non-indexed fields
*/
export class StorageQueryError extends Error {
constructor(
message: string,
public field?: string,
public suggestion?: string,
) {
super(message);
this.name = "StorageQueryError";
}
}
/**
* Check if a value is a range filter
*/
export function isRangeFilter(value: WhereValue): value is RangeFilter {
if (typeof value !== "object" || value === null) return false;
return "gt" in value || "gte" in value || "lt" in value || "lte" in value;
}
/**
* Check if a value is an IN filter
*/
export function isInFilter(value: WhereValue): value is InFilter {
if (typeof value !== "object" || value === null) return false;
return "in" in value && Array.isArray(value.in);
}
/**
* Check if a value is a startsWith filter
*/
export function isStartsWithFilter(value: WhereValue): value is StartsWithFilter {
if (typeof value !== "object" || value === null) return false;
return "startsWith" in value && typeof value.startsWith === "string";
}
/**
* Get the set of indexed fields from index declarations
*/
export function getIndexedFields(indexes: Array<string | string[]>): Set<string> {
const fields = new Set<string>();
for (const index of indexes) {
if (Array.isArray(index)) {
for (const field of index) {
fields.add(field);
}
} else {
fields.add(index);
}
}
return fields;
}
/**
* Validate that all fields in a where clause are indexed
*/
export function validateWhereClause(
where: WhereClause,
indexedFields: Set<string>,
pluginId: string,
collection: string,
): void {
for (const field of Object.keys(where)) {
if (!indexedFields.has(field)) {
throw new StorageQueryError(
`Cannot query on non-indexed field '${field}'.`,
field,
`Add '${field}' to storage.${collection}.indexes in plugin '${pluginId}' to enable this query.`,
);
}
}
}
/**
* Validate orderBy fields are indexed
*/
export function validateOrderByClause(
orderBy: Record<string, "asc" | "desc">,
indexedFields: Set<string>,
pluginId: string,
collection: string,
): void {
for (const field of Object.keys(orderBy)) {
if (!indexedFields.has(field)) {
throw new StorageQueryError(
`Cannot order by non-indexed field '${field}'.`,
field,
`Add '${field}' to storage.${collection}.indexes in plugin '${pluginId}' to enable ordering by this field.`,
);
}
}
}
/**
* SQL expression for extracting JSON field.
*
* Validates the field name before interpolation to prevent SQL injection
* via crafted JSON path expressions.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- accepts any Kysely instance
export function jsonExtract(db: Kysely<any>, field: string): string {
validateJsonFieldName(field, "query field name");
return jsonExtractExpr(db, "data", field);
}
/**
* Build a WHERE clause condition for a single field
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- accepts any Kysely instance
export function buildCondition(
db: Kysely<any>,
field: string,
value: WhereValue,
): { sql: string; params: unknown[] } {
const extract = jsonExtract(db, field);
if (value === null) {
return { sql: `${extract} IS NULL`, params: [] };
}
if (typeof value === "string" || typeof value === "number") {
return { sql: `${extract} = ?`, params: [value] };
}
if (typeof value === "boolean") {
// JSON booleans are stored as true/false strings
return { sql: `${extract} = ?`, params: [value] };
}
if (isInFilter(value)) {
const placeholders = value.in.map(() => "?").join(", ");
return {
sql: `${extract} IN (${placeholders})`,
params: value.in,
};
}
if (isStartsWithFilter(value)) {
return {
sql: `${extract} LIKE ?`,
params: [`${value.startsWith}%`],
};
}
if (isRangeFilter(value)) {
const conditions: string[] = [];
const params: unknown[] = [];
if (value.gt !== undefined) {
conditions.push(`${extract} > ?`);
params.push(value.gt);
}
if (value.gte !== undefined) {
conditions.push(`${extract} >= ?`);
params.push(value.gte);
}
if (value.lt !== undefined) {
conditions.push(`${extract} < ?`);
params.push(value.lt);
}
if (value.lte !== undefined) {
conditions.push(`${extract} <= ?`);
params.push(value.lte);
}
return {
sql: conditions.join(" AND "),
params,
};
}
throw new StorageQueryError(`Unknown filter type for field '${field}'`);
}
/**
* Build a complete WHERE clause from a WhereClause object
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- accepts any Kysely instance
export function buildWhereClause(
db: Kysely<any>,
where: WhereClause,
): {
sql: string;
params: unknown[];
} {
const conditions: string[] = [];
const params: unknown[] = [];
for (const [field, value] of Object.entries(where)) {
const condition = buildCondition(db, field, value);
conditions.push(condition.sql);
params.push(...condition.params);
}
if (conditions.length === 0) {
return { sql: "", params: [] };
}
return {
sql: conditions.join(" AND "),
params,
};
}
/**
* Build ORDER BY clause
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- accepts any Kysely instance
export function buildOrderByClause(
db: Kysely<any>,
orderBy: Record<string, "asc" | "desc">,
): string {
const clauses: string[] = [];
for (const [field, direction] of Object.entries(orderBy)) {
clauses.push(`${jsonExtract(db, field)} ${direction.toUpperCase()}`);
}
if (clauses.length === 0) {
return "";
}
return `ORDER BY ${clauses.join(", ")}`;
}

File diff suppressed because it is too large Load Diff