Emdash source with visual editor image upload fix

Fixes:
1. media.ts: wrap placeholder generation in try-catch
2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
2026-05-03 10:44:54 +07:00
parent 78f81bebb6
commit 2d1be52177
2352 changed files with 662964 additions and 0 deletions

View File

@@ -0,0 +1,303 @@
/**
* Cloudflare Access Authentication - RUNTIME MODULE
*
* When EmDash is deployed behind Cloudflare Access, this module handles
* JWT validation and user provisioning from Access identity.
*
* Uses jose for JWT verification - works in all runtimes.
*
* This is loaded at runtime via the auth provider system.
* Do not import at config time.
*/
import type { AuthResult } from "emdash";
import { createRemoteJWKSet, jwtVerify, type JWTPayload } from "jose";
/**
* Configuration for Cloudflare Access authentication
*
* Note: This interface is duplicated in ../index.ts for config-time usage.
* Keep them in sync.
*/
export interface AccessConfig {
/**
* Your Cloudflare Access team domain
* @example "myteam.cloudflareaccess.com"
*/
teamDomain: string;
/**
* Application Audience (AUD) tag from Access application settings.
* For Cloudflare Workers, use `audienceEnvVar` instead to read at runtime.
*/
audience?: string;
/**
* Environment variable name containing the audience tag.
* Read at runtime from environment.
* @default "CF_ACCESS_AUDIENCE"
*/
audienceEnvVar?: string;
/**
* Role level for users not matching any group in roleMapping
* @default 30 (Editor)
*/
defaultRole?: number;
/**
* Map IdP group names to EmDash role levels
*/
roleMapping?: Record<string, number>;
}
/**
* Cloudflare Access JWT payload extends standard JWT with email claim
*/
export interface AccessJwtPayload extends JWTPayload {
/** User's email address (Access-specific claim) */
email: string;
}
/**
* Group from IdP (returned by get-identity endpoint)
*/
export interface AccessGroup {
id: string;
name: string;
email?: string;
}
/**
* Full identity from Access get-identity endpoint
*/
export interface AccessIdentity {
/** Unique identity ID */
id: string;
/** User's display name (may be undefined if IdP doesn't provide it) */
name?: string;
/** User's email address */
email: string;
/** Groups from IdP */
groups: AccessGroup[];
/** Identity provider info */
idp: {
id: string;
type: string;
};
/** Custom OIDC claims from IdP */
oidc_fields?: Record<string, unknown>;
/** SAML attributes from IdP */
saml_attributes?: Record<string, unknown>;
/** User's country (from geo) */
geo?: {
country: string;
};
}
// Cache for JWKS (jose handles key rotation automatically)
const jwksCache = new Map<string, ReturnType<typeof createRemoteJWKSet>>();
/** Regex to extract CF_Authorization cookie value */
const CF_AUTHORIZATION_COOKIE_REGEX = /CF_Authorization=([^;]+)/;
/**
* Get or create a JWKS client for the given team domain
*/
function getJwks(teamDomain: string): ReturnType<typeof createRemoteJWKSet> {
let jwks = jwksCache.get(teamDomain);
if (!jwks) {
const jwksUrl = new URL(`https://${teamDomain}/cdn-cgi/access/certs`);
jwks = createRemoteJWKSet(jwksUrl);
jwksCache.set(teamDomain, jwks);
}
return jwks;
}
/** Default environment variable name for Access audience */
const DEFAULT_AUDIENCE_ENV_VAR = "CF_ACCESS_AUDIENCE";
/**
* Resolve the audience value from config.
* Supports direct value or reading from environment variable.
*/
function resolveAudience(config: AccessConfig): string {
// Direct value takes precedence
if (config.audience) {
return config.audience;
}
// Read from environment
const envVarName = config.audienceEnvVar ?? DEFAULT_AUDIENCE_ENV_VAR;
const value = process.env[envVarName];
if (typeof value === "string" && value) {
return value;
}
throw new Error(
`Environment variable "${envVarName}" not found or empty. ` +
`Set it via wrangler secret, .dev.vars, or environment.`,
);
}
/**
* Validate a Cloudflare Access JWT using jose
*
* @param jwt The JWT string from header or cookie
* @param config Access configuration
* @returns Decoded and validated JWT payload
* @throws Error if validation fails
*/
export async function validateAccessJwt(
jwt: string,
config: AccessConfig,
): Promise<AccessJwtPayload> {
const audience = resolveAudience(config);
const issuer = `https://${config.teamDomain}`;
const jwks = getJwks(config.teamDomain);
const { payload } = await jwtVerify<AccessJwtPayload>(jwt, jwks, {
issuer,
audience,
clockTolerance: 60, // 60 seconds clock skew tolerance
});
return payload;
}
/**
* Extract Access JWT from request
*
* Checks header first (more reliable), then falls back to cookie.
*
* @param request The incoming request
* @returns JWT string or null if not present
*/
export function extractAccessJwt(request: Request): string | null {
// Try header first (preferred - set by Access on all requests)
const headerJwt = request.headers.get("Cf-Access-Jwt-Assertion");
if (headerJwt) {
return headerJwt;
}
// Fall back to cookie (set in browser)
const cookies = request.headers.get("Cookie") || "";
const match = cookies.match(CF_AUTHORIZATION_COOKIE_REGEX);
return match?.[1] || null;
}
/**
* Fetch full identity from Access (includes groups)
*
* The JWT itself only contains basic claims. To get groups and other
* IdP attributes, we need to call the get-identity endpoint.
*
* @param jwt The JWT string
* @param teamDomain The Access team domain
* @returns Full identity including groups
*/
export async function getAccessIdentity(jwt: string, teamDomain: string): Promise<AccessIdentity> {
const response = await fetch(`https://${teamDomain}/cdn-cgi/access/get-identity`, {
headers: {
Cookie: `CF_Authorization=${jwt}`,
},
});
if (!response.ok) {
throw new Error(`Failed to fetch identity: ${response.status}`);
}
return response.json();
}
/**
* Resolve role from IdP groups using roleMapping config
*
* @param groups User's groups from IdP
* @param config Access configuration
* @returns Role level (e.g., 50 for Admin, 30 for Editor)
*/
export function resolveRoleFromGroups(groups: AccessGroup[], config: AccessConfig): number {
const defaultRole = config.defaultRole ?? 30; // Editor
if (!config.roleMapping) {
return defaultRole;
}
// Check each group against mapping (first match wins)
for (const group of groups) {
const role = config.roleMapping[group.name];
if (role !== undefined) {
return role;
}
}
return defaultRole;
}
/**
* Authenticate a request using Cloudflare Access
*
* This is the main entry point for Access authentication.
* It validates the JWT, fetches the full identity, and resolves the role.
*
* This function implements the AuthProviderModule.authenticate interface.
*
* @param request The incoming request
* @param config Access configuration (passed from AuthDescriptor)
* @returns Authentication result with user info and role
* @throws Error if authentication fails
*/
function isAccessConfig(value: unknown): value is AccessConfig {
return (
value != null &&
typeof value === "object" &&
"teamDomain" in value &&
typeof value.teamDomain === "string"
);
}
export async function authenticate(request: Request, config: unknown): Promise<AuthResult> {
if (!isAccessConfig(config)) {
throw new Error("Invalid Cloudflare Access config: teamDomain is required");
}
const accessConfig = config;
// Extract JWT
const jwt = extractAccessJwt(request);
if (!jwt) {
throw new Error("No Access JWT present");
}
// Validate JWT
const payload = await validateAccessJwt(jwt, accessConfig);
// Fetch full identity (includes groups)
const identity = await getAccessIdentity(jwt, accessConfig.teamDomain);
// Resolve role from groups
const role = resolveRoleFromGroups(identity.groups, accessConfig);
// Log identity for debugging
console.log(
"[cf-access] Identity from Access:",
JSON.stringify({
email: identity.email,
name: identity.name,
groups: identity.groups?.map((g) => g.name),
idp: identity.idp,
}),
);
return {
email: identity.email,
name: identity.name ?? identity.email.split("@")[0] ?? "Unknown",
role,
subject: payload.sub,
metadata: {
groups: identity.groups,
idp: identity.idp,
},
};
}

View File

@@ -0,0 +1,16 @@
/**
* Cloudflare Access Auth - RUNTIME ENTRY
*
* This module is loaded at runtime when authenticating requests.
* It exports the `authenticate` function required by the auth provider interface.
*
* For config-time usage, import { access } from "@emdash-cms/cloudflare" instead.
*/
export { authenticate } from "./cloudflare-access.js";
export type {
AccessConfig,
AccessJwtPayload,
AccessGroup,
AccessIdentity,
} from "./cloudflare-access.js";

81
packages/cloudflare/src/cache/config.ts vendored Normal file
View File

@@ -0,0 +1,81 @@
/**
* Cloudflare Cache API route cache provider - CONFIG ENTRY
*
* This is the config-time helper. Import it in your astro.config.mjs:
*
* ```ts
* import { cloudflareCache } from "@emdash-cms/cloudflare";
*
* export default defineConfig({
* experimental: {
* cache: {
* provider: cloudflareCache(),
* },
* },
* });
* ```
*
* This module does NOT import cloudflare:workers and is safe to use at
* config time.
*/
import type { CacheProviderConfig } from "astro";
import type { CloudflareCacheConfig } from "./runtime.js";
export type { CloudflareCacheConfig };
/**
* Cloudflare Cache API route cache provider.
*
* Uses the Workers Cache API (`cache.put()`/`cache.match()`) to cache
* rendered route responses at the edge. Invalidation uses the Cloudflare
* purge-by-tag REST API for global purge across all edge locations.
*
* This is a stopgap until CacheW provides native distributed caching
* for Workers. Worker responses can't go through the CDN cache today,
* so we use the Cache API directly. The standard `Cache-Tag` header is
* set on stored responses so the purge-by-tag API can find them.
*
* Tag-based invalidation requires a Zone ID and an API token with
* "Cache Purge" permission. These can be passed directly in the config
* or read from environment variables at runtime (default: `CF_ZONE_ID`
* and `CF_CACHE_PURGE_TOKEN`).
*
* @param config Optional configuration.
* @returns A {@link CacheProviderConfig} to pass to `experimental.cache.provider`.
*
* @example Basic usage (reads zone ID and token from env vars)
* ```ts
* import { defineConfig } from "astro/config";
* import cloudflare from "@astrojs/cloudflare";
* import { cloudflareCache } from "@emdash-cms/cloudflare";
*
* export default defineConfig({
* adapter: cloudflare(),
* experimental: {
* cache: {
* provider: cloudflareCache(),
* },
* },
* });
* ```
*
* @example With explicit config
* ```ts
* cloudflareCache({
* cacheName: "my-site",
* zoneId: "abc123...",
* apiToken: "xyz789...",
* })
* ```
*/
export function cloudflareCache(
config: CloudflareCacheConfig = {},
): CacheProviderConfig<CloudflareCacheConfig> {
return {
// Resolved by Vite/Astro at build time — points to the runtime module
entrypoint: "@emdash-cms/cloudflare/cache",
config,
};
}

330
packages/cloudflare/src/cache/runtime.ts vendored Normal file
View File

@@ -0,0 +1,330 @@
/**
* Cloudflare Cache API route cache provider - RUNTIME ENTRY
*
* Implements Astro's CacheProvider interface as a runtime provider using the
* Workers Cache API for storage and the Cloudflare purge-by-tag REST API for
* global invalidation.
*
* This is a temporary solution until CacheW exists. Workers responses can't
* go through the CDN cache, so we use cache.put()/cache.match() directly.
* The standard `Cache-Tag` header (set by Astro's default setHeaders) is
* preserved on cached responses so the purge-by-tag API works globally.
*
* We do NOT implement setHeaders() — Astro's defaultSetHeaders correctly
* emits CDN-Cache-Control and Cache-Tag. Our onRequest() reads those
* headers from the response that next() returns.
*
* Do NOT import this at config time. Use cloudflareCache() from
* "@emdash-cms/cloudflare" or "@emdash-cms/cloudflare/cache/config" instead.
*/
import type { CacheProviderFactory } from "astro";
import { env, waitUntil } from "cloudflare:workers";
/**
* Internal headers stored on cached responses for freshness tracking.
* These are removed before returning to the client.
*/
const STORED_AT_HEADER = "X-EmDash-Stored-At";
const MAX_AGE_HEADER = "X-EmDash-Max-Age";
const SWR_HEADER = "X-EmDash-SWR";
/** Cloudflare purge API base */
const CF_API_BASE = "https://api.cloudflare.com/client/v4";
/** Matches max-age in CDN-Cache-Control */
const MAX_AGE_REGEX = /max-age=(\d+)/;
/** Matches stale-while-revalidate in CDN-Cache-Control */
const SWR_REGEX = /stale-while-revalidate=(\d+)/;
/** Internal headers to strip before returning responses to the client */
const INTERNAL_HEADERS = [STORED_AT_HEADER, MAX_AGE_HEADER, SWR_HEADER];
/** Default D1 bookmark cookie name (from @emdash-cms/cloudflare d1 config) */
const DEFAULT_BOOKMARK_COOKIE = "__em_d1_bookmark";
export interface CloudflareCacheConfig {
/**
* Name of the Cache API cache to use.
* @default "emdash"
*/
cacheName?: string;
/**
* D1 bookmark cookie name. Responses whose only Set-Cookie is this
* bookmark will have it stripped before caching. Responses with any
* other Set-Cookie headers will not be cached.
* @default "__em_d1_bookmark"
*/
bookmarkCookie?: string;
/**
* Cloudflare Zone ID. Required for tag-based invalidation.
* If not provided, reads from `zoneIdEnvVar` at runtime.
*/
zoneId?: string;
/**
* Environment variable name containing the Zone ID.
* @default "CF_ZONE_ID"
*/
zoneIdEnvVar?: string;
/**
* Cloudflare API token with Cache Purge permission.
* If not provided, reads from `apiTokenEnvVar` at runtime.
*/
apiToken?: string;
/**
* Environment variable name containing the API token.
* @default "CF_CACHE_PURGE_TOKEN"
*/
apiTokenEnvVar?: string;
}
/**
* Parse CDN-Cache-Control header for max-age and stale-while-revalidate.
*/
function parseCdnCacheControl(header: string | null): { maxAge: number; swr: number } {
let maxAge = 0;
let swr = 0;
if (!header) return { maxAge, swr };
const maxAgeMatch = MAX_AGE_REGEX.exec(header);
if (maxAgeMatch) maxAge = parseInt(maxAgeMatch[1]!, 10) || 0;
const swrMatch = SWR_REGEX.exec(header);
if (swrMatch) swr = parseInt(swrMatch[1]!, 10) || 0;
return { maxAge, swr };
}
/**
* Normalize a URL for use as a cache key.
* Strips common tracking query parameters and sorts the rest.
*/
function normalizeCacheKey(url: URL): string {
const normalized = new URL(url.toString());
const trackingParams = [
"utm_source",
"utm_medium",
"utm_campaign",
"utm_term",
"utm_content",
"fbclid",
"gclid",
"gbraid",
"wbraid",
"dclid",
"msclkid",
"twclid",
"_ga",
"_gl",
];
for (const param of trackingParams) {
normalized.searchParams.delete(param);
}
normalized.searchParams.sort();
return normalized.toString();
}
/**
* Read a config value, falling back to an env var.
*/
function resolveEnvValue(explicit: string | undefined, envVarName: string): string | undefined {
if (explicit) return explicit;
if (!(envVarName in env)) return undefined;
const value: unknown = Reflect.get(env, envVarName);
return typeof value === "string" ? value : undefined;
}
/**
* Strip internal tracking headers from a response before returning to client.
*/
function stripInternalHeaders(response: Response): void {
for (const header of INTERNAL_HEADERS) {
response.headers.delete(header);
}
}
/**
* Check whether all Set-Cookie headers on a response are only the D1
* bookmark cookie. Returns true if we can safely strip them for caching.
* Returns false if there are non-bookmark cookies (session, auth, etc.)
* which means the response should NOT be cached.
*/
function hasOnlyBookmarkCookies(response: Response, bookmarkCookie: string): boolean {
const cookies = response.headers.getSetCookie();
if (cookies.length === 0) return true;
return cookies.every((c) => c.startsWith(`${bookmarkCookie}=`));
}
/**
* Prepare a response for storage in the Cache API.
* - Adds internal tracking headers (stored-at, max-age, swr)
* - Strips Set-Cookie (only called when cookies are safe to strip)
*
* Returns null if the response has non-bookmark Set-Cookie headers
* and should not be cached.
*/
function prepareForCache(
response: Response,
maxAge: number,
swr: number,
bookmarkCookie: string,
): Response | null {
if (!hasOnlyBookmarkCookies(response, bookmarkCookie)) {
return null;
}
const prepared = new Response(response.body, response);
prepared.headers.set(STORED_AT_HEADER, String(Date.now()));
prepared.headers.set(MAX_AGE_HEADER, String(maxAge));
prepared.headers.set(SWR_HEADER, String(swr));
prepared.headers.delete("Set-Cookie");
return prepared;
}
const factory: CacheProviderFactory<CloudflareCacheConfig> = (config) => {
const cacheName = config?.cacheName ?? "emdash";
const bookmarkCookie = config?.bookmarkCookie ?? DEFAULT_BOOKMARK_COOKIE;
const zoneIdEnvVar = config?.zoneIdEnvVar ?? "CF_ZONE_ID";
const apiTokenEnvVar = config?.apiTokenEnvVar ?? "CF_CACHE_PURGE_TOKEN";
async function getCache(): Promise<Cache> {
return caches.open(cacheName);
}
return {
name: "cloudflare-cache-api",
// No setHeaders() — we use Astro's defaultSetHeaders which correctly
// emits CDN-Cache-Control and Cache-Tag. Our onRequest() reads those.
async onRequest(context, next) {
// Only cache GET requests
if (context.request.method !== "GET") {
return next();
}
// Skip cache for authenticated users. Their responses may differ
// (edit toolbar, admin UI, draft content) and must not be served
// to other visitors. The Astro session cookie indicates a logged-in user.
const cookieHeader = context.request.headers.get("Cookie") ?? "";
if (cookieHeader.includes("astro-session=")) {
return next();
}
const cacheKey = normalizeCacheKey(context.url);
const cache = await getCache();
const cached = await cache.match(cacheKey);
if (cached) {
const storedAt = parseInt(cached.headers.get(STORED_AT_HEADER) ?? "0", 10);
const maxAge = parseInt(cached.headers.get(MAX_AGE_HEADER) ?? "0", 10);
const swr = parseInt(cached.headers.get(SWR_HEADER) ?? "0", 10);
const ageSeconds = (Date.now() - storedAt) / 1000;
if (ageSeconds < maxAge) {
// Fresh — serve from cache
const hit = new Response(cached.body, cached);
hit.headers.set("X-Astro-Cache", "HIT");
stripInternalHeaders(hit);
return hit;
}
if (swr > 0 && ageSeconds < maxAge + swr) {
// Stale but within SWR window — serve stale, revalidate in background
const stale = new Response(cached.body, cached);
stale.headers.set("X-Astro-Cache", "STALE");
stripInternalHeaders(stale);
waitUntil(
(async () => {
try {
const fresh = await next();
const cdnCC = fresh.headers.get("CDN-Cache-Control");
const parsed = parseCdnCacheControl(cdnCC);
if (parsed.maxAge > 0 && fresh.ok) {
const toStore = prepareForCache(fresh, parsed.maxAge, parsed.swr, bookmarkCookie);
if (toStore) {
await cache.put(cacheKey, toStore);
}
}
} catch {
// Non-fatal — next request will retry
}
})(),
);
return stale;
}
// Expired and past SWR window — delete and fall through
await cache.delete(cacheKey);
}
// Cache MISS — render
const response = await next();
// Read cache directives from CDN-Cache-Control (set by Astro's defaultSetHeaders)
const cdnCC = response.headers.get("CDN-Cache-Control");
const { maxAge, swr } = parseCdnCacheControl(cdnCC);
if (maxAge > 0 && response.ok) {
const toStore = prepareForCache(response.clone(), maxAge, swr, bookmarkCookie);
if (toStore) {
await cache.put(cacheKey, toStore);
}
const miss = new Response(response.body, response);
miss.headers.set("X-Astro-Cache", "MISS");
return miss;
}
// No cache directives — pass through without caching
return response;
},
async invalidate(options) {
if (options.tags) {
const zoneId = resolveEnvValue(config?.zoneId, zoneIdEnvVar);
const apiToken = resolveEnvValue(config?.apiToken, apiTokenEnvVar);
if (!zoneId || !apiToken) {
throw new Error(
`[cloudflare-cache-api] Tag-based invalidation requires a Zone ID and API token. ` +
`Set the ${zoneIdEnvVar} and ${apiTokenEnvVar} environment variables, ` +
`or pass zoneId/apiToken in the cloudflareCache() config.`,
);
}
const tags = Array.isArray(options.tags) ? options.tags : [options.tags];
const response = await fetch(`${CF_API_BASE}/zones/${zoneId}/purge_cache`, {
method: "POST",
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
},
body: JSON.stringify({ tags }),
});
if (!response.ok) {
const body = await response.text().catch(() => "");
throw new Error(
`[cloudflare-cache-api] Cache purge failed (${response.status}): ${body}`,
);
}
}
if (options.path) {
const cache = await getCache();
await cache.delete(options.path);
}
},
};
};
export default factory;

31
packages/cloudflare/src/cloudflare.d.ts vendored Normal file
View File

@@ -0,0 +1,31 @@
/**
* Type declarations for Cloudflare virtual modules
*
* These are only available at runtime on Cloudflare Workers.
* The types here are minimal - just enough for our usage.
*/
declare module "cloudflare:workers" {
/**
* Environment bindings object
* Contains all bindings defined in wrangler.toml (D1, R2, KV, etc.)
*/
export const env: Record<string, unknown>;
/**
* Exports object for loopback bindings
*/
export const exports: Record<string, unknown>;
/**
* Base class for Worker Entrypoints
*/
export class WorkerEntrypoint<TEnv = unknown, TProps = unknown> {
env: TEnv;
ctx: ExecutionContext & { props: TProps };
}
}
declare module "cloudflare:email" {
// Email worker types if needed
}

View File

@@ -0,0 +1,120 @@
/**
* D1-compatible SQLite Introspector
*
* D1 doesn't allow the correlated cross-join pattern that Kysely's default
* SqliteIntrospector uses: `FROM tl, pragma_table_info(tl.name)`
*
* This introspector queries tables individually instead.
*/
import type { DatabaseIntrospector, DatabaseMetadata, SchemaMetadata, TableMetadata } from "kysely";
import { sql } from "kysely";
// Kysely's default migration table names
const DEFAULT_MIGRATION_TABLE = "kysely_migration";
const DEFAULT_MIGRATION_LOCK_TABLE = "kysely_migration_lock";
// Kysely's DatabaseIntrospector.createIntrospector receives Kysely<any>.
// We must use `any` here to match Kysely's own interface contract —
// it needs untyped schema access to query sqlite_master dynamically.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type AnyKysely = any;
// Regex patterns for parsing CREATE TABLE statements
const SPLIT_PARENS_PATTERN = /[(),]/;
const WHITESPACE_PATTERN = /\s+/;
const QUOTES_PATTERN = /["`]/g;
export class D1Introspector implements DatabaseIntrospector {
readonly #db: AnyKysely;
constructor(db: AnyKysely) {
this.#db = db;
}
async getSchemas(): Promise<SchemaMetadata[]> {
// SQLite doesn't support schemas
return [];
}
async getTables(options: { withInternalKyselyTables?: boolean } = {}): Promise<TableMetadata[]> {
// Get table names from sqlite_master
let query = this.#db
.selectFrom("sqlite_master")
.where("type", "in", ["table", "view"])
.where("name", "not like", "sqlite_%")
.where("name", "not like", "_cf_%") // Skip Cloudflare internal tables
.select(["name", "sql", "type"])
.orderBy("name");
if (!options.withInternalKyselyTables) {
query = query
.where("name", "!=", DEFAULT_MIGRATION_TABLE)
.where("name", "!=", DEFAULT_MIGRATION_LOCK_TABLE);
}
const tables = await query.execute();
// Query each table's columns individually (avoiding the problematic cross-join)
const result: TableMetadata[] = [];
for (const table of tables) {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely's DatabaseIntrospector returns untyped results
const tableName = table.name as string;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely's DatabaseIntrospector returns untyped results
const tableType = table.type as string;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely's DatabaseIntrospector returns untyped results
const tableSql = table.sql as string | null;
// Get columns for this specific table
// Use sql.raw() to insert table name directly into query string
// D1 doesn't allow parameterized table names in pragma_table_info()
// Note: tableName comes from sqlite_master so it's safe
const columns = await sql<{
cid: number;
name: string;
type: string;
notnull: number;
dflt_value: string | null;
pk: number;
}>`SELECT * FROM pragma_table_info('${sql.raw(tableName)}')`.execute(this.#db);
// Try to find autoincrement column from CREATE TABLE statement
let autoIncrementCol = tableSql
?.split(SPLIT_PARENS_PATTERN)
?.find((it) => it.toLowerCase().includes("autoincrement"))
?.trimStart()
?.split(WHITESPACE_PATTERN)?.[0]
?.replace(QUOTES_PATTERN, "");
// Otherwise, check for INTEGER PRIMARY KEY (implicit autoincrement)
if (!autoIncrementCol) {
const pkCols = columns.rows.filter((r) => r.pk > 0);
if (pkCols.length === 1 && pkCols[0]!.type.toLowerCase() === "integer") {
autoIncrementCol = pkCols[0]!.name;
}
}
result.push({
name: tableName,
isView: tableType === "view",
columns: columns.rows.map((col) => ({
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
})),
});
}
return result;
}
async getMetadata(options?: { withInternalKyselyTables?: boolean }): Promise<DatabaseMetadata> {
return {
tables: await this.getTables(options),
};
}
}

View File

@@ -0,0 +1,196 @@
/**
* Cloudflare D1 runtime adapter - RUNTIME ENTRY
*
* Creates a Kysely dialect for D1 and, when read replication is enabled,
* a per-request Kysely bound to a D1 Sessions-API session.
*
* This module imports directly from cloudflare:workers to access the D1 binding.
* Do NOT import this at config time - use { d1 } from "@emdash-cms/cloudflare" instead.
*/
import { env } from "cloudflare:workers";
import { kyselyLogOption } from "emdash/database/instrumentation";
import { type DatabaseIntrospector, type Dialect, Kysely } from "kysely";
import { D1Dialect } from "kysely-d1";
import { D1Introspector } from "./d1-introspector.js";
/**
* D1 configuration (runtime type — matches the config-time type in index.ts)
*/
interface D1Config {
binding: string;
session?: "disabled" | "auto" | "primary-first";
bookmarkCookie?: string;
}
const DEFAULT_BOOKMARK_COOKIE = "__em_d1_bookmark";
/**
* D1 bookmarks are opaque, minted by Cloudflare. We don't validate the shape
* (a tighter regex risks rejecting a format change and silently degrading
* read-your-writes), but we do cap length and reject control characters so a
* malicious or corrupt cookie can't smuggle anything weird into `withSession`.
*/
// D1 bookmarks observed in the wild are ~60 chars, but the format is opaque
// and future encodings (e.g. signed envelopes) could be longer. Err on the
// generous side — cookie values max out at ~4 KB anyway.
const MAX_BOOKMARK_LENGTH = 1024;
function hasControlChars(value: string): boolean {
for (let i = 0; i < value.length; i++) {
const code = value.charCodeAt(i);
if (code < 0x20 || code === 0x7f) return true;
}
return false;
}
/**
* Custom D1 Dialect that uses our D1-compatible introspector
*
* The default kysely-d1 dialect uses SqliteIntrospector which does a
* cross-join with pragma_table_info() that D1 doesn't allow.
*/
class EmDashD1Dialect extends D1Dialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new D1Introspector(db);
}
}
/**
* Create a D1 dialect from config. Used for the singleton Kysely instance
* (no session — queries go through the raw binding).
*/
export function createDialect(config: D1Config): Dialect {
const db = getBinding(config);
if (!db) {
const example = JSON.stringify(
{
d1_databases: [
{
binding: config.binding,
database_name: "your-database-name",
database_id: "your-database-id",
},
],
},
null,
2,
);
throw new Error(
`D1 binding "${config.binding}" not found in environment. ` +
`Check your wrangler.jsonc configuration:\n\n${example}`,
);
}
return new EmDashD1Dialect({ database: db });
}
// =========================================================================
// D1 Read Replica Session Support
//
// createRequestScopedDb is called by the core middleware on each request.
// When sessions are enabled it returns a per-request Kysely bound to a
// D1 Sessions API session, plus a `commit()` callback that persists the
// resulting bookmark as a cookie for authenticated users.
// =========================================================================
/**
* A cookie interface minimally compatible with Astro's AstroCookies. Declared
* here (not imported from astro) so this module stays free of astro types.
*/
interface CookieJar {
get(name: string): { value: string } | undefined;
set(name: string, value: string, options: Record<string, unknown>): void;
}
export interface RequestScopedDbOpts {
config: D1Config;
isAuthenticated: boolean;
isWrite: boolean;
cookies: CookieJar;
url: URL;
}
export interface RequestScopedDb {
/** Per-request Kysely instance backed by a D1 Sessions API session. */
db: Kysely<any>;
/**
* Persist any per-request session state (e.g. the resulting D1 bookmark)
* as a cookie. Idempotent; safe to call once after next() returns.
*/
commit: () => void;
}
/**
* Create a per-request session-backed Kysely, or null when D1 sessions are
* disabled or the binding is missing. Core middleware calls this once per
* request, stashes `db` in ALS for the duration of next(), then invokes
* `commit()` on the response path.
*/
export function createRequestScopedDb(opts: RequestScopedDbOpts): RequestScopedDb | null {
if (!isSessionEnabled(opts.config)) return null;
const binding = getBinding(opts.config);
if (!binding || typeof binding.withSession !== "function") return null;
const cookieName = opts.config.bookmarkCookie ?? DEFAULT_BOOKMARK_COOKIE;
const configConstraint =
opts.config.session === "primary-first" ? "first-primary" : "first-unconstrained";
// Any write — authenticated or not (e.g. an anonymous comment POST) — must
// hit primary; we don't want a write plus a follow-up read racing across
// replicas. Authenticated reads resume from a prior bookmark when the client
// sent a valid one. Everything else (anonymous reads — the whole point of
// read replicas) uses the config default, typically "first-unconstrained"
// for nearest-replica routing.
let constraint: string = configConstraint;
if (opts.isWrite) {
constraint = "first-primary";
} else if (opts.isAuthenticated) {
const bookmark = opts.cookies.get(cookieName)?.value;
if (
bookmark &&
bookmark.length > 0 &&
bookmark.length <= MAX_BOOKMARK_LENGTH &&
!hasControlChars(bookmark)
) {
constraint = bookmark;
}
}
const session = binding.withSession(constraint);
// kysely-d1 only touches .prepare() and .batch() on the database argument,
// both of which D1DatabaseSession implements.
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- session is structurally compatible with the subset D1Dialect uses
const sessionAsDatabase = session as unknown as D1Database;
const db = new Kysely<any>({
dialect: new EmDashD1Dialect({ database: sessionAsDatabase }),
log: kyselyLogOption(),
});
return {
db,
commit() {
// Anonymous sessions can't resume across requests, so there's no
// value in persisting a bookmark for them.
if (!opts.isAuthenticated) return;
const newBookmark = session.getBookmark?.();
if (!newBookmark) return;
opts.cookies.set(cookieName, newBookmark, {
path: "/",
httpOnly: true,
sameSite: "lax",
secure: opts.url.protocol === "https:",
});
},
};
}
function isSessionEnabled(config: D1Config): boolean {
return !!config.session && config.session !== "disabled";
}
function getBinding(config: D1Config): D1Database | null {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding accessed from untyped env object
const db = (env as Record<string, unknown>)[config.binding] as D1Database | undefined;
return db ?? null;
}

View File

@@ -0,0 +1,288 @@
/**
* EmDashPreviewDB — Durable Object for preview databases
*
* Each preview session gets its own DO with isolated SQLite storage.
* The DO is populated from a snapshot of the source EmDash site
* and serves read-only queries until its TTL expires.
*
* Not used in production — preview only.
*/
import { DurableObject } from "cloudflare:workers";
/** Default TTL for preview data (1 hour) */
const DEFAULT_TTL_MS = 60 * 60 * 1000;
/** Valid identifier pattern for snapshot table/column names */
const SAFE_IDENTIFIER = /^[a-z_][a-z0-9_]*$/;
/** SQL command prefixes that indicate read-only statements */
const READ_PREFIXES = ["SELECT", "PRAGMA", "EXPLAIN", "WITH"];
/** Result shape returned by query() */
export interface QueryResult {
rows: Record<string, unknown>[];
/** Number of rows written. Undefined for read-only queries. */
changes?: number;
}
/** A single statement for batch execution */
export interface BatchStatement {
sql: string;
params?: unknown[];
}
/** Snapshot shape received from the source site */
interface Snapshot {
tables: Record<string, Record<string, unknown>[]>;
schema?: Record<
string,
{
columns: string[];
types?: Record<string, string>;
}
>;
generatedAt: string;
}
export class EmDashPreviewDB extends DurableObject {
/**
* Execute a single SQL statement.
*
* Called via RPC from the Kysely driver connection.
*/
query(sql: string, params?: unknown[]): QueryResult {
const cursor = params?.length
? this.ctx.storage.sql.exec(sql, ...params)
: this.ctx.storage.sql.exec(sql);
const rows: Record<string, unknown>[] = [];
for (const row of cursor) {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- SqlStorageCursor yields record-like objects
rows.push(row as Record<string, unknown>);
}
const isRead = READ_PREFIXES.some((p) => sql.trimStart().toUpperCase().startsWith(p));
return {
rows,
changes: isRead ? undefined : cursor.rowsWritten,
};
}
/**
* Execute multiple statements in a single synchronous transaction.
*
* Used for snapshot import.
*/
batch(statements: BatchStatement[]): void {
this.ctx.storage.transactionSync(() => {
for (const stmt of statements) {
if (stmt.params?.length) {
this.ctx.storage.sql.exec(stmt.sql, ...stmt.params);
} else {
this.ctx.storage.sql.exec(stmt.sql);
}
}
});
}
/**
* Invalidate the cached snapshot so the next populateFromSnapshot call
* re-fetches from the source site.
*/
invalidateSnapshot(): void {
try {
this.ctx.storage.sql.exec("DELETE FROM _emdash_do_meta WHERE key = 'snapshot_fetched_at'");
} catch {
// Table doesn't exist — nothing to invalidate
}
}
/**
* Get snapshot metadata (generated-at timestamp).
* Returns null if the DO has no snapshot loaded.
*/
getSnapshotMeta(): { generatedAt: string } | null {
try {
const row = this.ctx.storage.sql
.exec("SELECT value FROM _emdash_do_meta WHERE key = 'snapshot_generated_at'")
.one();
const value = row.value;
if (typeof value !== "string") return null;
return { generatedAt: value };
} catch {
return null;
}
}
/**
* Populate from a snapshot (preview mode).
*
* Fetches content from a source EmDash site and loads it into
* this DO's SQLite. Sets a TTL alarm for cleanup.
*/
async populateFromSnapshot(
sourceUrl: string,
signature: string,
options?: { drafts?: boolean; ttl?: number },
): Promise<{ generatedAt: string }> {
const ttlMs = (options?.ttl ?? DEFAULT_TTL_MS / 1000) * 1000;
// Check if already populated and fresh
try {
const meta = this.ctx.storage.sql
.exec("SELECT value FROM _emdash_do_meta WHERE key = 'snapshot_fetched_at'")
.one();
const fetchedAt = Number(meta.value);
if (Date.now() - fetchedAt < ttlMs) {
// Refresh alarm so active sessions aren't killed
void this.ctx.storage.setAlarm(Date.now() + ttlMs);
const gen = this.ctx.storage.sql
.exec("SELECT value FROM _emdash_do_meta WHERE key = 'snapshot_generated_at'")
.one();
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- SqlStorageCursor yields loosely-typed rows
return { generatedAt: String(gen.value as string | number) };
}
} catch (error) {
// Only swallow "no such table" — surface all other errors
if (!(error instanceof Error) || !error.message.includes("no such table")) {
throw error;
}
// _emdash_do_meta doesn't exist yet — first population
}
// Fetch snapshot with timeout
const url = `${sourceUrl}/_emdash/api/snapshot${options?.drafts ? "?drafts=true" : ""}`;
const response = await fetch(url, {
headers: { "X-Preview-Signature": signature },
signal: AbortSignal.timeout(10_000),
});
if (!response.ok) {
const body = await response.text().catch(() => "");
throw new Error(
`Snapshot fetch failed: ${response.status} ${response.statusText}${body ? `${body}` : ""}`,
);
}
const snapshot: Snapshot = await response.json();
// Wipe and repopulate in a single transaction so partial applies
// can't leave the database in an inconsistent state.
// ctx.storage.deleteAll() only clears KV storage, not SQLite.
this.ctx.storage.transactionSync(() => {
this.dropAllTables();
this.applySnapshot(snapshot);
});
// Set cleanup alarm
void this.ctx.storage.setAlarm(Date.now() + ttlMs);
return { generatedAt: snapshot.generatedAt };
}
/**
* Set a cleanup alarm after the given number of seconds.
*
* Used by the playground middleware to set TTL after initialization
* is complete (initialization runs on the Worker side via RPC).
*/
setTtlAlarm(ttlSeconds: number): void {
void this.ctx.storage.setAlarm(Date.now() + ttlSeconds * 1000);
}
/**
* Alarm handler — clean up expired preview/playground data.
*
* Drops all user tables to reclaim storage.
*/
override alarm(): void {
this.dropAllTables();
}
/**
* Drop all user tables in the DO's SQLite database.
* Preserves SQLite and Cloudflare internal tables.
*
* Disables foreign key enforcement before dropping to avoid cascade
* errors when tables are dropped in an order that violates FK
* dependencies (e.g. child dropped first, then parent's implicit
* CASCADE delete references the already-dropped child table).
*/
private dropAllTables(): void {
// Disable FK enforcement so DROP order doesn't matter.
// Cloudflare DO SQLite enforces foreign keys by default.
this.ctx.storage.sql.exec("PRAGMA foreign_keys = OFF");
try {
const tables = [
...this.ctx.storage.sql.exec(
"SELECT name FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%'",
),
];
for (const row of tables) {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- SqlStorageCursor yields loosely-typed rows
const name = String(row.name as string);
if (!SAFE_IDENTIFIER.test(name)) {
// Skip tables with unsafe names rather than interpolating them
continue;
}
this.ctx.storage.sql.exec(`DROP TABLE IF EXISTS "${name}"`);
}
} finally {
this.ctx.storage.sql.exec("PRAGMA foreign_keys = ON");
}
}
private applySnapshot(snapshot: Snapshot): void {
const validateSnapshotIdentifier = (name: string, context: string) => {
if (!SAFE_IDENTIFIER.test(name)) {
throw new Error(`Invalid ${context} in snapshot: ${JSON.stringify(name)}`);
}
};
// Create meta table
this.ctx.storage.sql.exec(`
CREATE TABLE IF NOT EXISTS _emdash_do_meta (key TEXT PRIMARY KEY, value TEXT)
`);
// Create tables and insert data from snapshot
for (const [tableName, rows] of Object.entries(snapshot.tables)) {
if (tableName === "_emdash_do_meta") continue;
if (!rows.length) continue;
validateSnapshotIdentifier(tableName, "table name");
const schemaInfo = snapshot.schema?.[tableName];
const columns = schemaInfo?.columns ?? Object.keys(rows[0]!);
columns.forEach((c) => validateSnapshotIdentifier(c, `column name in ${tableName}`));
const colDefs = columns
.map((c) => {
const colType = schemaInfo?.types?.[c] ?? "TEXT";
const safeType = ["TEXT", "INTEGER", "REAL", "BLOB", "JSON"].includes(
colType.toUpperCase(),
)
? colType.toUpperCase()
: "TEXT";
return `"${c}" ${safeType}`;
})
.join(", ");
this.ctx.storage.sql.exec(`CREATE TABLE IF NOT EXISTS "${tableName}" (${colDefs})`);
// Batch insert
const placeholders = columns.map(() => "?").join(", ");
const insertSql = `INSERT INTO "${tableName}" (${columns.map((c) => `"${c}"`).join(", ")}) VALUES (${placeholders})`;
for (const row of rows) {
const values = columns.map((c) => row[c] ?? null);
this.ctx.storage.sql.exec(insertSql, ...values);
}
}
// Record metadata
this.ctx.storage.sql.exec(
`INSERT OR REPLACE INTO _emdash_do_meta VALUES ('snapshot_fetched_at', ?), ('snapshot_generated_at', ?)`,
String(Date.now()),
snapshot.generatedAt,
);
}
}

View File

@@ -0,0 +1,125 @@
/**
* Kysely dialect for Durable Object preview databases
*
* Proxies all queries to an EmDashPreviewDB DO instance via RPC.
* Preview mode is read-only — no transaction support needed.
*/
import type {
CompiledQuery,
DatabaseConnection,
DatabaseIntrospector,
Dialect,
Driver,
Kysely,
QueryResult,
} from "kysely";
import { SqliteAdapter, SqliteQueryCompiler } from "kysely";
import { D1Introspector } from "./d1-introspector.js";
import type { QueryResult as DOQueryResult } from "./do-class.js";
/**
* Minimal interface for the DO stub's RPC methods.
*
* We define this instead of using DurableObjectStub<EmDashPreviewDB> directly
* because Rpc.Result<T> resolves to `never` when the return type contains
* `unknown` (Record<string, unknown> in QueryResult.rows). This interface
* gives us clean typing without fighting the Rpc type system.
*/
export interface PreviewDBStub {
query(sql: string, params?: unknown[]): Promise<DOQueryResult>;
}
export interface PreviewDODialectConfig {
/**
* Factory that returns a fresh DO stub on each call.
*
* DO stubs are bound to the request context that created them.
* Since the Kysely instance may be cached across requests, we can't
* hold a single stub — each connection must get a fresh one via
* namespace.get(id), which is cheap (no RPC, just a local ref).
*/
getStub: () => PreviewDBStub;
}
export class PreviewDODialect implements Dialect {
readonly #config: PreviewDODialectConfig;
constructor(config: PreviewDODialectConfig) {
this.#config = config;
}
createAdapter(): SqliteAdapter {
return new SqliteAdapter();
}
createDriver(): Driver {
return new PreviewDODriver(this.#config);
}
createQueryCompiler(): SqliteQueryCompiler {
return new SqliteQueryCompiler();
}
createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new D1Introspector(db);
}
}
class PreviewDODriver implements Driver {
readonly #config: PreviewDODialectConfig;
constructor(config: PreviewDODialectConfig) {
this.#config = config;
}
async init(): Promise<void> {}
async acquireConnection(): Promise<DatabaseConnection> {
return new PreviewDOConnection(this.#config.getStub());
}
async beginTransaction(): Promise<void> {
// No-op. Preview is read-only.
}
async commitTransaction(): Promise<void> {
// No-op.
}
async rollbackTransaction(): Promise<void> {
// No-op.
}
async releaseConnection(): Promise<void> {}
async destroy(): Promise<void> {}
}
class PreviewDOConnection implements DatabaseConnection {
readonly #stub: PreviewDBStub;
constructor(stub: PreviewDBStub) {
this.#stub = stub;
}
async executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>> {
const sqlText = compiledQuery.sql;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- CompiledQuery.parameters is ReadonlyArray<unknown>, stub expects unknown[]
const params = compiledQuery.parameters as unknown[];
const result = await this.#stub.query(sqlText, params);
return {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely generic O is the caller's row type; we trust the DB returned matching rows
rows: result.rows as O[],
numAffectedRows: result.changes !== undefined ? BigInt(result.changes) : undefined,
};
}
// eslint-disable-next-line require-yield -- interface requires AsyncIterableIterator but DO doesn't support streaming
async *streamQuery<O>(): AsyncIterableIterator<QueryResult<O>> {
throw new Error("Preview DO dialect does not support streaming");
}
}

View File

@@ -0,0 +1,65 @@
/**
* Playground mode route gating.
*
* Unlike preview mode (which blocks everything except read-only API routes),
* playground mode allows most routes including the admin UI and write APIs.
* Only auth, setup, and abuse-prone routes are blocked.
*
* Pure function -- no Worker or Cloudflare dependencies.
*/
/**
* Routes blocked in playground mode.
*
* These are either security-sensitive (auth, setup, tokens, OAuth),
* abuse-prone (media upload, plugin install), or pointless in a
* temporary playground (snapshot export, user management).
*/
/**
* Auth routes that ARE allowed in playground mode.
* /auth/me is needed by the admin UI to identify the current user.
*/
const AUTH_ALLOWLIST = new Set(["/_emdash/api/auth/me"]);
const BLOCKED_PREFIXES = [
// Auth -- playground has no real auth (except /auth/me for admin UI)
"/_emdash/api/auth/",
// Setup -- playground is pre-configured
"/_emdash/api/setup/",
// OAuth provider routes
"/_emdash/api/oauth/",
// API token management
"/_emdash/api/tokens/",
// User management (can't invite/create real users)
"/_emdash/api/users/invite",
// Plugin installation (security boundary)
"/_emdash/api/plugins/install",
"/_emdash/api/plugins/marketplace",
// Media uploads (abuse vector -- no storage in playground)
"/_emdash/api/media/upload",
// Snapshot export (no point exporting a playground)
"/_emdash/api/snapshot",
];
/**
* Check whether a request should be blocked in playground mode.
*
* Playground allows most CMS functionality: content CRUD, schema editing,
* taxonomies, menus, widgets, search, settings, and the full admin UI.
* Only auth, setup, user management, media uploads, and plugin
* installation are blocked.
*/
export function isBlockedInPlayground(pathname: string): boolean {
// Check allowlist first -- specific routes that must work despite
// their parent prefix being blocked (e.g. /auth/me for admin UI)
if (AUTH_ALLOWLIST.has(pathname)) {
return false;
}
for (const prefix of BLOCKED_PREFIXES) {
if (pathname === prefix || pathname.startsWith(prefix)) {
return true;
}
}
return false;
}

View File

@@ -0,0 +1,48 @@
/**
* Preview mode route gating.
*
* Pure function — no Worker or Cloudflare dependencies.
* Extracted so it can be tested without mocking cloudflare:workers.
*/
/**
* API route prefixes allowed in preview mode (read-only).
* Everything else under /_emdash/ is blocked.
*/
const ALLOWED_API_PREFIXES = [
"/_emdash/api/content/",
"/_emdash/api/schema",
"/_emdash/api/manifest",
"/_emdash/api/dashboard",
"/_emdash/api/search",
"/_emdash/api/media",
"/_emdash/api/taxonomies",
"/_emdash/api/menus",
"/_emdash/api/snapshot",
];
/**
* Check whether a request should be blocked in preview mode.
*
* Preview is read-only with no authenticated user. All /_emdash/
* routes are blocked by default (admin UI, auth, setup, write APIs).
* Only specific read-only API prefixes are allowlisted.
*
* Non-emdash routes (site pages, assets) are always allowed.
*/
export function isBlockedInPreview(pathname: string): boolean {
// Non-emdash routes are always allowed (site pages, assets, etc.)
if (!pathname.startsWith("/_emdash/")) {
return false;
}
// Check allowlist for API routes
for (const prefix of ALLOWED_API_PREFIXES) {
if (pathname === prefix || pathname.startsWith(prefix)) {
return false;
}
}
// Everything else under /_emdash/ is blocked
return true;
}

View File

@@ -0,0 +1,100 @@
/**
* Preview URL signing utilities.
*
* Pure functions using Web Crypto — no Worker or Cloudflare dependencies.
* Used by the source site to generate signed preview URLs and by the
* preview service to verify them.
*/
/** Matches a lowercase hex string */
const HEX_PATTERN = /^[0-9a-f]+$/;
/**
* Compute HMAC-SHA256 over a message and return the hex-encoded signature.
*/
async function hmacSign(message: string, secret: string): Promise<string> {
const encoder = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
const buffer = await crypto.subtle.sign("HMAC", key, encoder.encode(message));
return Array.from(new Uint8Array(buffer), (b) => b.toString(16).padStart(2, "0")).join("");
}
/**
* Generate a signed preview URL.
*
* The source site calls this to create a link that opens the preview service.
* The preview service validates the signature and populates the DO from a
* snapshot of the source site.
*
* @param previewBase - Base URL of the preview service (e.g. "https://theme-x.preview.emdashcms.com")
* @param source - URL of the source site providing the snapshot (e.g. "https://mysite.com")
* @param secret - Shared HMAC secret (same value configured on both sides)
* @param ttl - Link validity in seconds (default: 3600 = 1 hour)
* @returns Fully signed preview URL
*
* @example
* ```ts
* const url = await signPreviewUrl(
* "https://theme-x.preview.emdashcms.com",
* "https://mysite.com",
* import.meta.env.PREVIEW_SECRET,
* );
* // => "https://theme-x.preview.emdashcms.com/?source=https%3A%2F%2Fmysite.com&exp=1709164800&sig=abc123..."
* ```
*/
export async function signPreviewUrl(
previewBase: string,
source: string,
secret: string,
ttl = 3600,
): Promise<string> {
const exp = Math.floor(Date.now() / 1000) + ttl;
const sig = await hmacSign(`${source}:${exp}`, secret);
const url = new URL(previewBase);
url.searchParams.set("source", source);
url.searchParams.set("exp", String(exp));
url.searchParams.set("sig", sig);
return url.toString();
}
/**
* Verify an HMAC-SHA256 signature on a preview URL.
*
* Uses crypto.subtle.verify for constant-time comparison.
*
* @returns true if the signature is valid
*/
export async function verifyPreviewSignature(
source: string,
exp: number,
sig: string,
secret: string,
): Promise<boolean> {
// Decode hex signature to ArrayBuffer
if (sig.length !== 64 || !HEX_PATTERN.test(sig)) return false;
const sigBytes = new Uint8Array(32);
for (let i = 0; i < 64; i += 2) {
sigBytes[i / 2] = parseInt(sig.substring(i, i + 2), 16);
}
const encoder = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["verify"],
);
return crypto.subtle.verify("HMAC", key, sigBytes, encoder.encode(`${source}:${exp}`));
}

View File

@@ -0,0 +1,269 @@
/**
* Preview middleware for Durable Object-backed preview databases.
*
* This middleware intercepts requests to a preview service, validates
* signed preview URLs, creates/resolves DO sessions, populates snapshots,
* and overrides the request-context DB so all queries route to the
* isolated DO database.
*
* Designed to be registered as Astro middleware in a preview Worker.
*
* @example
* ```ts
* // src/middleware.ts (in the preview Worker)
* import { createPreviewMiddleware } from "@emdash-cms/cloudflare/db/do";
*
* export const onRequest = createPreviewMiddleware({
* binding: "PREVIEW_DB",
* secret: import.meta.env.PREVIEW_SECRET,
* });
* ```
*/
import type { MiddlewareHandler } from "astro";
import { env } from "cloudflare:workers";
import { runWithContext } from "emdash/request-context";
import { Kysely } from "kysely";
import { ulid } from "ulidx";
import type { EmDashPreviewDB } from "./do-class.js";
import { PreviewDODialect } from "./do-dialect.js";
import type { PreviewDBStub } from "./do-dialect.js";
import { isBlockedInPreview } from "./do-preview-routes.js";
import { verifyPreviewSignature } from "./do-preview-sign.js";
import { renderPreviewToolbar } from "./preview-toolbar.js";
/** Configuration for the preview middleware */
export interface PreviewMiddlewareConfig {
/** Durable Object binding name (from wrangler.jsonc) */
binding: string;
/** HMAC secret for validating signed preview URLs */
secret: string;
/** TTL for preview data in seconds (default: 3600 = 1 hour) */
ttl?: number;
/** Cookie name for session token (default: "emdash_preview") */
cookieName?: string;
}
/**
* Simple loading interstitial HTML.
* Auto-reloads after a short delay to check if the snapshot is ready.
*/
function loadingPage(): string {
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="refresh" content="2">
<title>Loading preview...</title>
<link rel="icon" href="data:image/svg+xml,<svg width='75' height='75' viewBox='0 0 75 75' fill='none' xmlns='http://www.w3.org/2000/svg'><rect x='3' y='3' width='69' height='69' rx='10.518' stroke='url(%23pb)' stroke-width='6'/><rect x='18' y='34' width='39.366' height='6.561' fill='url(%23pd)'/><defs><linearGradient id='pb' x1='-43' y1='124' x2='92.42' y2='-41.75' gradientUnits='userSpaceOnUse'><stop stop-color='%230F006B'/><stop offset='.08' stop-color='%23281A81'/><stop offset='.17' stop-color='%235D0C83'/><stop offset='.25' stop-color='%23911475'/><stop offset='.33' stop-color='%23CE2F55'/><stop offset='.42' stop-color='%23FF6633'/><stop offset='.5' stop-color='%23F6821F'/><stop offset='.58' stop-color='%23FBAD41'/><stop offset='.67' stop-color='%23FFCD89'/><stop offset='.75' stop-color='%23FFE9CB'/><stop offset='.83' stop-color='%23FFF7EC'/><stop offset='.92' stop-color='%23FFF8EE'/><stop offset='1' stop-color='white'/></linearGradient><linearGradient id='pd' x1='91.5' y1='27.5' x2='28.12' y2='54.18' gradientUnits='userSpaceOnUse'><stop stop-color='white'/><stop offset='.13' stop-color='%23FFF8EE'/><stop offset='.62' stop-color='%23FBAD41'/><stop offset='.85' stop-color='%23F6821F'/><stop offset='1' stop-color='%23FF6633'/></linearGradient></defs></svg>" />
<style>
body { font-family: system-ui, sans-serif; display: flex; align-items: center; justify-content: center; min-height: 100vh; margin: 0; background: #fafafa; color: #333; }
.spinner { width: 40px; height: 40px; border: 3px solid #e0e0e0; border-top-color: #333; border-radius: 50%; animation: spin 0.8s linear infinite; margin-right: 16px; }
@keyframes spin { to { transform: rotate(360deg); } }
</style>
</head>
<body>
<div class="spinner"></div>
<p>Loading preview&hellip;</p>
</body>
</html>`;
}
/**
* Create an Astro-compatible preview middleware.
*
* Returns a middleware function that can be used in `defineMiddleware()`
* or composed via `sequence()`.
*/
export function createPreviewMiddleware(config: PreviewMiddlewareConfig): MiddlewareHandler {
const { binding, secret, ttl = 3600, cookieName = "emdash_preview" } = config;
return async function previewMiddleware(context, next) {
const { url, cookies } = context;
// --- 0a. Reload endpoint ---
// The toolbar POSTs here to clear the httpOnly session cookie and
// redirect back with the original signed params for a fresh snapshot.
if (url.pathname === "/_preview/reload") {
cookies.delete(cookieName, { path: "/" });
let redirectTo = "/";
const paramsCookie = cookies.get(`${cookieName}_params`)?.value;
if (paramsCookie) {
const parts = decodeURIComponent(paramsCookie).split("\n");
if (parts.length === 3) {
const reloadUrl = new URL("/", url.origin);
reloadUrl.searchParams.set("source", parts[0]!);
reloadUrl.searchParams.set("exp", parts[1]!);
reloadUrl.searchParams.set("sig", parts[2]!);
redirectTo = reloadUrl.pathname + reloadUrl.search;
}
}
return context.redirect(redirectTo);
}
// --- 0b. Route gating ---
// Block admin UI, auth, and setup routes. These depend on state
// (users, sessions, credentials) that doesn't exist in preview snapshots.
if (isBlockedInPreview(url.pathname)) {
return Response.json(
{ error: { code: "PREVIEW_MODE", message: "Not available in preview mode" } },
{ status: 403 },
);
}
// --- 1. Resolve session token ---
let sessionToken: string | undefined = cookies.get(cookieName)?.value;
let sourceUrl: string | null = null;
let snapshotSignature: string | null = null;
if (!sessionToken) {
// No cookie — must have a signed URL
const source = url.searchParams.get("source");
const exp = url.searchParams.get("exp");
const sig = url.searchParams.get("sig");
if (!source || !exp || !sig) {
return new Response("Missing preview parameters", { status: 400 });
}
const expNum = parseInt(exp, 10);
if (isNaN(expNum) || expNum < Date.now() / 1000) {
return new Response("Preview link expired", { status: 403 });
}
const valid = await verifyPreviewSignature(source, expNum, sig, secret);
if (!valid) {
return new Response("Invalid preview signature", { status: 403 });
}
// Generate session
sessionToken = ulid();
sourceUrl = source;
// Build the signature header value for snapshot fetch: "source:exp:sig"
snapshotSignature = `${source}:${exp}:${sig}`;
cookies.set(cookieName, sessionToken, {
httpOnly: true,
sameSite: "lax",
path: "/",
maxAge: ttl,
});
// Store the signed params so the toolbar can trigger a reload.
// Not httpOnly — the toolbar script needs to read them.
cookies.set(`${cookieName}_params`, `${source}\n${exp}\n${sig}`, {
sameSite: "lax",
path: "/",
maxAge: ttl,
});
}
// --- 2. Get DO stub ---
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding from untyped env
const ns = (env as Record<string, unknown>)[binding];
if (!ns) {
console.error(`Preview binding "${binding}" not found in environment`);
return new Response("Preview service misconfigured", { status: 500 });
}
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- DO namespace from untyped env
const namespace = ns as DurableObjectNamespace<EmDashPreviewDB>;
const doId = namespace.idFromName(sessionToken);
const stub = namespace.get(doId);
// --- 3. Populate from snapshot if needed ---
let snapshotGeneratedAt: string | undefined;
let snapshotError: string | undefined;
if (!sourceUrl) {
// Returning session — get metadata from the DO
try {
const meta = await stub.getSnapshotMeta();
snapshotGeneratedAt = meta?.generatedAt;
} catch {
// DO may have expired or been cleaned up
}
}
if (sourceUrl && snapshotSignature) {
try {
// Pass the full signature header value (source:exp:sig) so the DO
// can send it as X-Preview-Signature when fetching the snapshot.
const result = await stub.populateFromSnapshot(sourceUrl, snapshotSignature, { ttl });
snapshotGeneratedAt = result.generatedAt;
// Snapshot loaded — redirect to strip signed params from the URL.
// Astro's cookie buffer flushes on context.redirect().
const cleanUrl = new URL(url);
cleanUrl.searchParams.delete("source");
cleanUrl.searchParams.delete("exp");
cleanUrl.searchParams.delete("sig");
return context.redirect(cleanUrl.pathname + cleanUrl.search);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.error("Failed to populate preview snapshot:", message);
snapshotError = message;
// If this is the initial load (no session yet), show a loading page.
// If we already have a session, continue with stale data and show the error in the toolbar.
if (!cookies.get(cookieName)?.value) {
return new Response(loadingPage(), {
status: 503,
headers: {
"Content-Type": "text/html",
"Retry-After": "2",
},
});
}
}
}
// --- 4. Create Kysely dialect pointing at the DO ---
const getStub = (): PreviewDBStub => {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- RPC type limitation
return stub as unknown as PreviewDBStub;
};
const dialect = new PreviewDODialect({ getStub });
// --- 5. Create Kysely instance and override request-context DB ---
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const previewDb = new Kysely<any>({ dialect });
return runWithContext(
{
editMode: false,
db: previewDb,
},
async () => {
const response = await next();
return injectPreviewToolbar(response, {
generatedAt: snapshotGeneratedAt,
source: sourceUrl ?? undefined,
error: snapshotError,
});
},
);
};
}
/**
* Inject preview toolbar HTML into an HTML response.
* Returns the original response unchanged for non-HTML responses.
*/
async function injectPreviewToolbar(
response: Response,
config: { generatedAt?: string; source?: string; error?: string },
): Promise<Response> {
const contentType = response.headers.get("content-type");
if (!contentType?.includes("text/html")) return response;
const html = await response.text();
if (!html.includes("</body>")) return new Response(html, response);
const toolbarHtml = renderPreviewToolbar(config);
const injected = html.replace("</body>", `${toolbarHtml}</body>`);
return new Response(injected, {
status: response.status,
headers: response.headers,
});
}

View File

@@ -0,0 +1,12 @@
/**
* Shared Durable Object config types (preview-only)
*
* Imported by both the config-time entry (index.ts) and the runtime entry (do.ts).
* This module must NOT import from cloudflare:workers so it stays safe at config time.
*/
/** Durable Object preview database configuration */
export interface PreviewDOConfig {
/** Wrangler binding name for the DO namespace */
binding: string;
}

View File

@@ -0,0 +1,62 @@
/**
* Durable Object preview database — RUNTIME ENTRY
*
* Creates a Kysely dialect backed by a preview Durable Object.
* Loaded at runtime via virtual module when preview database queries are needed.
*
* This module imports directly from cloudflare:workers to access the DO binding.
* Do NOT import this at config time.
*/
import { env } from "cloudflare:workers";
import type { Dialect } from "kysely";
import type { EmDashPreviewDB } from "./do-class.js";
import { PreviewDODialect } from "./do-dialect.js";
import type { PreviewDBStub } from "./do-dialect.js";
import type { PreviewDOConfig } from "./do-types.js";
/**
* Create a preview DO dialect from config.
*
* The caller is responsible for resolving the DO name (session token).
* This is passed as `config.name` by the preview middleware.
*/
export function createDialect(config: PreviewDOConfig & { name: string }): Dialect {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding accessed from untyped env object
const ns = (env as Record<string, unknown>)[config.binding];
if (!ns) {
throw new Error(
`Durable Object binding "${config.binding}" not found in environment. ` +
`Check your wrangler.jsonc configuration:\n\n` +
`[durable_objects]\n` +
`bindings = [\n` +
` { name = "${config.binding}", class_name = "EmDashPreviewDB" }\n` +
`]\n\n` +
`[[migrations]]\n` +
`tag = "v1"\n` +
`new_sqlite_classes = ["EmDashPreviewDB"]`,
);
}
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- DO namespace binding from untyped env object
const namespace = ns as DurableObjectNamespace<EmDashPreviewDB>;
const id = namespace.idFromName(config.name);
// Return a factory that creates a fresh stub per connection.
const getStub = (): PreviewDBStub => {
const stub = namespace.get(id);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Rpc type limitation with unknown in return types
return stub as unknown as PreviewDBStub;
};
return new PreviewDODialect({ getStub });
}
// Re-export the DO class and preview middleware for user convenience
export { EmDashPreviewDB } from "./do-class.js";
export { createPreviewMiddleware } from "./do-preview.js";
export type { PreviewMiddlewareConfig } from "./do-preview.js";
export { isBlockedInPreview } from "./do-preview-routes.js";
export { signPreviewUrl, verifyPreviewSignature } from "./do-preview-sign.js";

View File

@@ -0,0 +1,267 @@
/**
* Playground Loading Page
*
* Rendered when a user first hits /playground. Shows an animated loading state
* while the client-side JS calls /_playground/init to create the DO, run
* migrations, and apply the seed. Once init completes, redirects to the admin.
*
* No dependencies -- plain HTML with inline styles and a <script> tag.
*/
export function renderPlaygroundLoadingPage(): string {
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>EmDash Playground</title>
<link rel="icon" href="data:image/svg+xml,<svg width='75' height='75' viewBox='0 0 75 75' fill='none' xmlns='http://www.w3.org/2000/svg'><rect x='3' y='3' width='69' height='69' rx='10.518' stroke='url(%23pb)' stroke-width='6'/><rect x='18' y='34' width='39.366' height='6.561' fill='url(%23pd)'/><defs><linearGradient id='pb' x1='-43' y1='124' x2='92.42' y2='-41.75' gradientUnits='userSpaceOnUse'><stop stop-color='%230F006B'/><stop offset='.08' stop-color='%23281A81'/><stop offset='.17' stop-color='%235D0C83'/><stop offset='.25' stop-color='%23911475'/><stop offset='.33' stop-color='%23CE2F55'/><stop offset='.42' stop-color='%23FF6633'/><stop offset='.5' stop-color='%23F6821F'/><stop offset='.58' stop-color='%23FBAD41'/><stop offset='.67' stop-color='%23FFCD89'/><stop offset='.75' stop-color='%23FFE9CB'/><stop offset='.83' stop-color='%23FFF7EC'/><stop offset='.92' stop-color='%23FFF8EE'/><stop offset='1' stop-color='white'/></linearGradient><linearGradient id='pd' x1='91.5' y1='27.5' x2='28.12' y2='54.18' gradientUnits='userSpaceOnUse'><stop stop-color='white'/><stop offset='.13' stop-color='%23FFF8EE'/><stop offset='.62' stop-color='%23FBAD41'/><stop offset='.85' stop-color='%23F6821F'/><stop offset='1' stop-color='%23FF6633'/></linearGradient></defs></svg>" />
<style>
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
body {
min-height: 100dvh;
display: flex;
align-items: center;
justify-content: center;
background: #0a0a0a;
color: #e0e0e0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
-webkit-font-smoothing: antialiased;
}
.pg-loading {
text-align: center;
display: flex;
flex-direction: column;
align-items: center;
gap: 32px;
}
.pg-logo {
display: flex;
align-items: center;
justify-content: center;
gap: 12px;
font-size: 28px;
font-weight: 700;
letter-spacing: -0.02em;
color: #fff;
}
.pg-logo svg {
width: 36px;
height: 36px;
flex-shrink: 0;
}
.pg-spinner-wrap {
position: relative;
width: 48px;
height: 48px;
}
.pg-spinner {
width: 48px;
height: 48px;
border: 3px solid rgba(255, 255, 255, 0.08);
border-top-color: #facc15;
border-radius: 50%;
animation: pg-spin 0.8s linear infinite;
}
@keyframes pg-spin {
to { transform: rotate(360deg); }
}
.pg-message {
font-size: 15px;
color: #888;
line-height: 1.5;
}
.pg-steps {
display: flex;
flex-direction: column;
gap: 8px;
margin-top: 4px;
}
.pg-step {
display: flex;
align-items: center;
gap: 8px;
font-size: 13px;
color: #555;
transition: color 0.3s;
}
.pg-step.active {
color: #ccc;
}
.pg-step.done {
color: #4ade80;
}
.pg-step-dot {
width: 6px;
height: 6px;
border-radius: 50%;
background: #333;
flex-shrink: 0;
transition: background 0.3s;
}
.pg-step.active .pg-step-dot {
background: #facc15;
box-shadow: 0 0 6px rgba(250, 204, 21, 0.4);
}
.pg-step.done .pg-step-dot {
background: #4ade80;
}
.pg-error {
display: none;
flex-direction: column;
align-items: center;
gap: 16px;
}
.pg-error.visible {
display: flex;
}
.pg-error-message {
font-size: 14px;
color: #f87171;
max-width: 360px;
line-height: 1.5;
}
.pg-retry-btn {
display: inline-flex;
align-items: center;
gap: 6px;
padding: 8px 16px;
background: rgba(250, 204, 21, 0.12);
color: #facc15;
border: none;
border-radius: 999px;
font-size: 13px;
font-weight: 500;
cursor: pointer;
font-family: inherit;
transition: background 0.15s;
}
.pg-retry-btn:hover {
background: rgba(250, 204, 21, 0.22);
}
</style>
</head>
<body>
<div class="pg-loading">
<div class="pg-logo"><svg viewBox="0 0 75 75" fill="none" xmlns="http://www.w3.org/2000/svg"><rect x="3" y="3" width="69" height="69" rx="10.518" stroke="url(#pl-b)" stroke-width="6"/><rect x="18" y="34" width="39.366" height="6.561" fill="url(#pl-d)"/><defs><linearGradient id="pl-b" x1="-43" y1="124" x2="92.42" y2="-41.75" gradientUnits="userSpaceOnUse"><stop stop-color="#0F006B"/><stop offset=".08" stop-color="#281A81"/><stop offset=".17" stop-color="#5D0C83"/><stop offset=".25" stop-color="#911475"/><stop offset=".33" stop-color="#CE2F55"/><stop offset=".42" stop-color="#FF6633"/><stop offset=".5" stop-color="#F6821F"/><stop offset=".58" stop-color="#FBAD41"/><stop offset=".67" stop-color="#FFCD89"/><stop offset=".75" stop-color="#FFE9CB"/><stop offset=".83" stop-color="#FFF7EC"/><stop offset=".92" stop-color="#FFF8EE"/><stop offset="1" stop-color="#fff"/></linearGradient><linearGradient id="pl-d" x1="91.5" y1="27.5" x2="28.12" y2="54.18" gradientUnits="userSpaceOnUse"><stop stop-color="#fff"/><stop offset=".13" stop-color="#FFF8EE"/><stop offset=".62" stop-color="#FBAD41"/><stop offset=".85" stop-color="#F6821F"/><stop offset="1" stop-color="#FF6633"/></linearGradient></defs></svg>EmDash</div>
<div class="pg-spinner-wrap">
<div class="pg-spinner" id="pg-spinner"></div>
</div>
<div>
<div class="pg-message" id="pg-message">Creating your playground&hellip;</div>
<div class="pg-steps" id="pg-steps">
<div class="pg-step active" id="step-db">
<span class="pg-step-dot"></span>
Setting up database
</div>
<div class="pg-step" id="step-content">
<span class="pg-step-dot"></span>
Loading demo content
</div>
<div class="pg-step" id="step-ready">
<span class="pg-step-dot"></span>
Almost ready
</div>
</div>
</div>
<div class="pg-error" id="pg-error">
<div class="pg-error-message" id="pg-error-message"></div>
<button class="pg-retry-btn" id="pg-retry">Try again</button>
</div>
</div>
<script>
(function() {
var steps = ["step-db", "step-content", "step-ready"];
var currentStep = 0;
function setStep(index) {
for (var i = 0; i < steps.length; i++) {
var el = document.getElementById(steps[i]);
if (!el) continue;
el.className = "pg-step" + (i < index ? " done" : i === index ? " active" : "");
}
currentStep = index;
}
function showError(message) {
document.getElementById("pg-spinner").style.display = "none";
document.getElementById("pg-message").textContent = "Something went wrong";
document.getElementById("pg-steps").style.display = "none";
var errorEl = document.getElementById("pg-error");
var errorMsg = document.getElementById("pg-error-message");
if (errorEl) errorEl.className = "pg-error visible";
if (errorMsg) errorMsg.textContent = message;
}
function init() {
setStep(0);
document.getElementById("pg-spinner").style.display = "";
document.getElementById("pg-message").textContent = "Creating your playground\\u2026";
document.getElementById("pg-steps").style.display = "";
var errorEl = document.getElementById("pg-error");
if (errorEl) errorEl.className = "pg-error";
// Advance steps on a timer for visual feedback while init runs.
// The actual init is a single server call -- these steps are cosmetic.
var stepTimer = setTimeout(function() { setStep(1); }, 800);
var stepTimer2 = setTimeout(function() { setStep(2); }, 2000);
fetch("/_playground/init", { method: "POST", credentials: "same-origin" })
.then(function(res) {
clearTimeout(stepTimer);
clearTimeout(stepTimer2);
if (!res.ok) {
return res.json().then(function(body) {
throw new Error(body.error?.message || "Initialization failed");
});
}
return res.json();
})
.then(function() {
// Mark all steps done
setStep(steps.length);
document.getElementById("pg-message").textContent = "Ready!";
// Brief pause so the user sees "Ready!" before navigating
setTimeout(function() {
location.replace("/_emdash/admin");
}, 400);
})
.catch(function(err) {
clearTimeout(stepTimer);
clearTimeout(stepTimer2);
showError(err.message || "Failed to create playground. Please try again.");
});
}
document.getElementById("pg-retry").addEventListener("click", init);
init();
})();
</script>
</body>
</html>`;
}

View File

@@ -0,0 +1,380 @@
/**
* Playground middleware — injected by the EmDash integration as order: "pre".
*
* Runs BEFORE the EmDash runtime init middleware. Creates a per-session
* Durable Object database, runs migrations, applies the seed, creates an
* anonymous admin user, and sets the DB in ALS via runWithContext().
*
* By the time the runtime middleware runs, the ALS-scoped DB is ready.
* The runtime's `db` getter checks ALS first, so all init queries
* (migrations, FTS, cron, manifest) operate on the real DO database.
*
* This module is registered via `addMiddleware({ entrypoint: "..." })` in
* the integration, NOT in the user's src/middleware.ts.
*/
import { defineMiddleware } from "astro:middleware";
import { env } from "cloudflare:workers";
import { Kysely, sql } from "kysely";
import { ulid } from "ulidx";
// @ts-ignore - virtual module populated by EmDash integration at build time
import virtualConfig from "virtual:emdash/config";
import type { EmDashPreviewDB } from "./do-class.js";
import { PreviewDODialect } from "./do-dialect.js";
import type { PreviewDBStub } from "./do-dialect.js";
import { isBlockedInPlayground } from "./do-playground-routes.js";
import { renderPlaygroundLoadingPage } from "./playground-loading.js";
import { renderPlaygroundToolbar } from "./playground-toolbar.js";
/** Default TTL for playground data (1 hour) */
const DEFAULT_TTL = 3600;
/** Cookie name for playground session */
const COOKIE_NAME = "emdash_playground";
/** Playground admin user constants */
const PLAYGROUND_USER_ID = "playground-admin";
const PLAYGROUND_USER_EMAIL = "playground@emdashcms.com";
const PLAYGROUND_USER_NAME = "Playground User";
const PLAYGROUND_USER_ROLE = 50; // Admin
const PLAYGROUND_USER = {
id: PLAYGROUND_USER_ID,
email: PLAYGROUND_USER_EMAIL,
name: PLAYGROUND_USER_NAME,
role: PLAYGROUND_USER_ROLE,
};
/** Track which DOs have been initialized this Worker lifetime */
const initializedSessions = new Set<string>();
/**
* Read the DO binding name from the virtual config.
* The database config has the binding in `config.database.config.binding`.
*/
function getBindingName(): string {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import
const config = virtualConfig as { database?: { config?: { binding?: string } } } | null;
const binding = config?.database?.config?.binding;
if (!binding) {
throw new Error(
"Playground middleware: no database binding found in config. " +
"Ensure database: playgroundDatabase({ binding: '...' }) is set.",
);
}
return binding;
}
/**
* Get a PreviewDBStub for the given session token.
*/
function getStub(binding: string, token: string): PreviewDBStub {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding from untyped env
const ns = (env as Record<string, unknown>)[binding];
if (!ns) {
throw new Error(`Playground binding "${binding}" not found in environment`);
}
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- DO namespace from untyped env
const namespace = ns as DurableObjectNamespace<EmDashPreviewDB>;
const doId = namespace.idFromName(token);
const stub = namespace.get(doId);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- RPC type limitation
return stub as unknown as PreviewDBStub;
}
/**
* Get the full DO stub for direct RPC calls (e.g. setTtlAlarm).
*/
function getFullStub(binding: string, token: string): DurableObjectStub<EmDashPreviewDB> {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding from untyped env
const ns = (env as Record<string, unknown>)[binding];
if (!ns) {
throw new Error(`Playground binding "${binding}" not found in environment`);
}
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- DO namespace from untyped env
const namespace = ns as DurableObjectNamespace<EmDashPreviewDB>;
const doId = namespace.idFromName(token);
return namespace.get(doId);
}
/**
* Derive a created-at timestamp from the ULID session token.
*/
function getSessionCreatedAt(token: string): string {
try {
const ENCODING = "0123456789ABCDEFGHJKMNPQRSTVWXYZ";
let time = 0;
const chars = token.toUpperCase().slice(0, 10);
for (const char of chars) {
time = time * 32 + ENCODING.indexOf(char);
}
return new Date(time).toISOString();
} catch {
return new Date().toISOString();
}
}
/**
* Initialize a playground DO: run migrations, apply seed, create admin user.
*/
async function initializePlayground(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
db: Kysely<any>,
token: string,
): Promise<void> {
// Check if already initialized (persisted in the DO)
try {
const { rows } = await sql<{ value: string }>`
SELECT value FROM options WHERE name = ${"emdash:setup_complete"}
`.execute(db);
if (rows.length > 0) {
return;
}
} catch {
// Table doesn't exist yet -- first initialization
}
console.log(`[playground] Initializing session ${token}`);
// 1. Run all EmDash migrations.
// If the DO was previously initialized (persisted state) but somehow the
// setup_complete flag is missing, migrations may partially fail on tables
// that already exist. Treat migration errors as non-fatal if there are
// tables present (i.e. the DO was previously initialized).
const { runMigrations } = await import("emdash/db");
try {
const migrations = await runMigrations(db);
console.log(`[playground] Migrations applied: ${migrations.applied.length}`);
} catch (migrationError) {
// Check if this looks like a "tables already exist" error -- the DO
// was probably initialized in a previous Worker lifetime and the
// options check above failed for a transient reason.
const msg = migrationError instanceof Error ? migrationError.message : String(migrationError);
if (msg.includes("already exists")) {
console.log(`[playground] Migrations skipped (tables already exist)`);
// Mark setup complete if it wasn't (recover from partial init)
try {
await sql`
INSERT OR IGNORE INTO options (name, value)
VALUES (${"emdash:setup_complete"}, ${JSON.stringify(true)})
`.execute(db);
} catch {
// Best effort
}
return;
}
throw migrationError;
}
// 2. Load and apply seed with content (skip media downloads)
const { loadSeed } = await import("emdash/seed");
const { applySeed } = await import("emdash");
const seed = await loadSeed();
const seedResult = await applySeed(db, seed, {
includeContent: true,
onConflict: "skip",
skipMediaDownload: true,
});
console.log(
`[playground] Seed applied: ${seedResult.collections.created} collections, ${seedResult.content.created} content entries`,
);
// 3. Create anonymous admin user
const now = new Date().toISOString();
try {
await sql`
INSERT INTO users (id, email, name, role, email_verified, created_at, updated_at)
VALUES (${PLAYGROUND_USER_ID}, ${PLAYGROUND_USER_EMAIL}, ${PLAYGROUND_USER_NAME},
${PLAYGROUND_USER_ROLE}, ${1}, ${now}, ${now})
`.execute(db);
} catch {
// User might already exist
}
// 4. Mark setup complete
try {
await sql`
INSERT INTO options (name, value)
VALUES (${"emdash:setup_complete"}, ${JSON.stringify(true)})
`.execute(db);
} catch {
// May already exist
}
// 5. Set site title
try {
await sql`
INSERT OR REPLACE INTO options (name, value)
VALUES (${"emdash:site_title"}, ${JSON.stringify("EmDash Playground")})
`.execute(db);
} catch {
// Non-critical
}
console.log(`[playground] Session ${token} initialized`);
}
/**
* Inject playground toolbar HTML into an HTML response.
*/
async function injectPlaygroundToolbar(
response: Response,
config: { createdAt: string; ttl: number; editMode: boolean },
): Promise<Response> {
const contentType = response.headers.get("content-type");
if (!contentType?.includes("text/html")) return response;
const html = await response.text();
if (!html.includes("</body>")) return new Response(html, response);
const toolbarHtml = renderPlaygroundToolbar(config);
const injected = html.replace("</body>", `${toolbarHtml}</body>`);
return new Response(injected, {
status: response.status,
headers: response.headers,
});
}
export const onRequest = defineMiddleware(async (context, next) => {
const { url, cookies } = context;
const ttl = DEFAULT_TTL;
// Lazy-load binding name from virtual config
const binding = getBindingName();
// --- Entry point: /playground ---
// Show a loading page immediately. The page calls /_playground/init via
// fetch to do the actual setup, then redirects to admin when ready.
// If the session is already initialized, skip the loading page.
if (url.pathname === "/playground") {
let token = cookies.get(COOKIE_NAME)?.value;
if (!token) {
token = ulid();
cookies.set(COOKIE_NAME, token, {
httpOnly: true,
sameSite: "lax",
path: "/",
maxAge: ttl,
});
}
// Already initialized? Skip the loading page and go straight to admin.
if (initializedSessions.has(token)) {
return context.redirect("/_emdash/admin");
}
return new Response(renderPlaygroundLoadingPage(), {
status: 200,
headers: { "content-type": "text/html; charset=utf-8" },
});
}
// --- Init endpoint: called by the loading page ---
if (url.pathname === "/_playground/init" && context.request.method === "POST") {
const token = cookies.get(COOKIE_NAME)?.value;
if (!token) {
return Response.json(
{ error: { code: "NO_SESSION", message: "No playground session" } },
{ status: 400 },
);
}
if (initializedSessions.has(token)) {
return Response.json({ ok: true });
}
const stub = getStub(binding, token);
const dialect = new PreviewDODialect({ getStub: () => stub });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const db = new Kysely<any>({ dialect });
try {
await initializePlayground(db, token);
console.log(`[playground] Session ${token} initialized`);
initializedSessions.add(token);
const fullStub = getFullStub(binding, token);
console.log(`[playground] Setting TTL alarm for session ${token} (${ttl} seconds)`);
await fullStub.setTtlAlarm(ttl);
console.log(`[playground] TTL alarm set for session ${token}`);
return Response.json({ ok: true });
} catch (error) {
console.error("Playground initialization failed:", error);
if (error instanceof Error) {
console.error(error.stack);
}
return Response.json(
{ error: { code: "PLAYGROUND_INIT_ERROR", message: "Failed to initialize playground" } },
{ status: 500 },
);
}
}
// --- Reset endpoint ---
// Instead of dropping tables on the old DO (which is fragile and races
// with cached state), just clear the cookie and redirect to /playground.
// That creates a brand new DO with a fresh session -- clean slate.
// The old DO expires via its TTL alarm.
if (url.pathname === "/_playground/reset") {
cookies.delete(COOKIE_NAME, { path: "/" });
return context.redirect("/playground");
}
// --- Route gating ---
if (isBlockedInPlayground(url.pathname)) {
return Response.json(
{ error: { code: "PLAYGROUND_MODE", message: "Not available in playground mode" } },
{ status: 403 },
);
}
// --- Resolve session ---
const token = cookies.get(COOKIE_NAME)?.value;
if (!token) {
// No session -- redirect to /playground to create one
return context.redirect("/playground");
}
// --- Set up DO database and ALS ---
const stub = getStub(binding, token);
const dialect = new PreviewDODialect({ getStub: () => stub });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const db = new Kysely<any>({ dialect });
// Ensure initialized
if (!initializedSessions.has(token)) {
try {
await initializePlayground(db, token);
initializedSessions.add(token);
const fullStub = getFullStub(binding, token);
await fullStub.setTtlAlarm(ttl);
} catch (error) {
console.error("Playground initialization failed:", error);
return Response.json(
{ error: { code: "PLAYGROUND_INIT_ERROR", message: "Failed to initialize playground" } },
{ status: 500 },
);
}
}
// Stash the DO database and user on locals so downstream middleware
// (runtime init, request-context) can use them. We can't use ALS directly
// because this middleware is in @emdash-cms/cloudflare and resolves to a
// different AsyncLocalStorage instance than the emdash core package
// (workerd loads dist modules separately from Vite's source modules).
// The request-context middleware (same module context as the loader)
// detects locals.__playgroundDb and wraps the render in runWithContext().
// The __playgroundDb property is declared on App.Locals in emdash's locals.d.ts.
Object.assign(context.locals, { __playgroundDb: db, user: PLAYGROUND_USER });
const editMode = cookies.get("emdash-edit-mode")?.value === "true";
const response = await next();
return injectPlaygroundToolbar(response, {
createdAt: getSessionCreatedAt(token),
ttl,
editMode,
});
});

View File

@@ -0,0 +1,356 @@
/**
* Playground Toolbar
*
* A floating pill injected by the playground middleware into HTML responses.
* Shows edit toggle, time remaining, reset button, and deploy CTA.
* No dependencies -- plain HTML string with inline styles and a <script> tag.
*
* The edit toggle sets the emdash-edit-mode cookie, same as the normal
* visual editing toolbar. The data-edit-mode attribute on the toolbar div
* activates the hover outlines on [data-emdash-ref] elements via CSS :has().
*/
export interface PlaygroundToolbarConfig {
/** When the playground was created (ISO string) */
createdAt: string;
/** TTL in seconds */
ttl: number;
/** Whether edit mode is currently active */
editMode: boolean;
}
const RE_AMP = /&/g;
const RE_QUOT = /"/g;
const RE_LT = /</g;
const RE_GT = />/g;
export function renderPlaygroundToolbar(config: PlaygroundToolbarConfig): string {
const { createdAt, ttl, editMode } = config;
return `
<!-- EmDash Playground Toolbar -->
<div id="emdash-playground-toolbar" data-created-at="${escapeAttr(createdAt)}" data-ttl="${ttl}" data-edit-mode="${editMode}">
<div class="ec-pg-inner">
<span class="ec-pg-badge">Playground</span>
<div class="ec-pg-divider"></div>
<label class="ec-pg-toggle" title="Toggle visual editing">
<input type="checkbox" id="ec-pg-edit-toggle" ${editMode ? "checked" : ""} />
<span class="ec-pg-toggle-track">
<span class="ec-pg-toggle-thumb"></span>
</span>
<span class="ec-pg-toggle-label">Edit</span>
</label>
<div class="ec-pg-divider"></div>
<span class="ec-pg-status" id="ec-pg-status"></span>
<div class="ec-pg-divider"></div>
<button class="ec-pg-btn ec-pg-btn--reset" id="ec-pg-reset" title="Reset playground">
<svg class="ec-pg-icon" id="ec-pg-reset-icon" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="23 4 23 10 17 10"/><path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10"/></svg>
</button>
<a class="ec-pg-btn ec-pg-btn--deploy" href="https://github.com/emdash-cms/emdash" target="_blank" rel="noopener">
Deploy your own
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"/><polyline points="15 3 21 3 21 9"/><line x1="10" y1="14" x2="21" y2="3"/></svg>
</a>
<button class="ec-pg-btn ec-pg-close" id="ec-pg-dismiss" title="Dismiss toolbar">
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/></svg>
</button>
</div>
</div>
<style>
#emdash-playground-toolbar {
position: fixed;
bottom: 16px;
left: 50%;
transform: translateX(-50%);
z-index: 999999;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
font-size: 13px;
line-height: 1;
-webkit-font-smoothing: antialiased;
}
@media (max-width: 639px) {
#emdash-playground-toolbar {
max-width: calc(100vw - 2rem);
width: 100%;
}
}
#emdash-playground-toolbar.ec-pg-hidden {
display: none;
}
.ec-pg-inner {
display: flex;
align-items: center;
gap: 10px;
padding: 8px 12px 8px 16px;
background: #1a1a1a;
color: #e0e0e0;
border-radius: 999px;
box-shadow: 0 4px 24px rgba(0,0,0,0.3), 0 0 0 1px rgba(255,255,255,0.08);
white-space: nowrap;
user-select: none;
}
@media (max-width: 639px) {
.ec-pg-inner {
flex-wrap: wrap;
justify-content: center;
border-radius: .75rem;
}
}
.ec-pg-badge {
display: inline-flex;
align-items: center;
padding: 3px 8px;
border-radius: 999px;
font-size: 11px;
font-weight: 600;
letter-spacing: 0.02em;
text-transform: uppercase;
background: rgba(234,179,8,0.2);
color: #facc15;
}
.ec-pg-divider {
width: 1px;
height: 16px;
background: rgba(255,255,255,0.15);
}
/* Edit toggle */
.ec-pg-toggle {
display: inline-flex;
align-items: center;
gap: 6px;
cursor: pointer;
}
.ec-pg-toggle input {
position: absolute;
opacity: 0;
width: 0;
height: 0;
}
.ec-pg-toggle-track {
position: relative;
width: 28px;
height: 16px;
border-radius: 999px;
background: rgba(255,255,255,0.15);
transition: background 0.15s;
}
.ec-pg-toggle input:checked + .ec-pg-toggle-track {
background: #3b82f6;
}
.ec-pg-toggle-thumb {
position: absolute;
top: 2px;
left: 2px;
width: 12px;
height: 12px;
border-radius: 50%;
background: #fff;
transition: transform 0.15s;
}
.ec-pg-toggle input:checked + .ec-pg-toggle-track .ec-pg-toggle-thumb {
transform: translateX(12px);
}
.ec-pg-toggle-label {
font-size: 12px;
font-weight: 500;
color: #999;
transition: color 0.15s;
}
.ec-pg-toggle input:checked ~ .ec-pg-toggle-label {
color: #e0e0e0;
}
.ec-pg-status {
display: inline-flex;
align-items: center;
gap: 6px;
font-size: 12px;
color: #999;
}
.ec-pg-status--warning {
color: #fbbf24;
}
.ec-pg-status--expired {
color: #f87171;
}
.ec-pg-btn {
display: inline-flex;
align-items: center;
justify-content: center;
background: none;
border: none;
color: #888;
cursor: pointer;
padding: 4px;
border-radius: 4px;
transition: color 0.15s, background 0.15s;
font-family: inherit;
text-decoration: none;
}
.ec-pg-btn:hover {
color: #fff;
background: rgba(255,255,255,0.08);
}
.ec-pg-btn--deploy {
gap: 5px;
padding: 5px 10px;
font-size: 12px;
font-weight: 500;
color: #facc15;
background: rgba(234,179,8,0.12);
border-radius: 999px;
}
.ec-pg-btn--deploy:hover {
background: rgba(234,179,8,0.22);
color: #fde047;
}
.ec-pg-icon {
transition: transform 0.3s;
}
.ec-pg-btn:disabled {
opacity: 0.4;
cursor: not-allowed;
}
.ec-pg-btn:disabled:hover {
color: #888;
background: none;
}
@keyframes ec-pg-spin {
to { transform: rotate(360deg); }
}
.ec-pg-spinning .ec-pg-icon {
animation: ec-pg-spin 0.8s linear infinite;
}
/* Edit mode: editable hover styles (mirrors the visual editing toolbar CSS) */
body:has(#emdash-playground-toolbar[data-edit-mode="true"]) [data-emdash-ref] {
transition: box-shadow 0.15s, background-color 0.15s;
}
body:has(#emdash-playground-toolbar[data-edit-mode="true"]) [data-emdash-ref]:hover {
box-shadow: 0 0 0 2px rgba(59,130,246,0.5);
border-radius: 4px;
background-color: rgba(59,130,246,0.04);
cursor: text;
}
</style>
<script>
(function() {
var toolbar = document.getElementById("emdash-playground-toolbar");
var statusEl = document.getElementById("ec-pg-status");
var resetBtn = document.getElementById("ec-pg-reset");
var dismissBtn = document.getElementById("ec-pg-dismiss");
var editToggle = document.getElementById("ec-pg-edit-toggle");
if (!toolbar || !statusEl || !resetBtn || !dismissBtn || !editToggle) return;
var createdAt = toolbar.getAttribute("data-created-at");
var ttl = parseInt(toolbar.getAttribute("data-ttl") || "3600", 10);
function getRemaining() {
if (!createdAt) return 0;
var created = new Date(createdAt).getTime();
var expiresAt = created + ttl * 1000;
return Math.max(0, Math.floor((expiresAt - Date.now()) / 1000));
}
function formatRemaining(seconds) {
if (seconds <= 0) return "Expired";
var m = Math.floor(seconds / 60);
if (m >= 60) {
var h = Math.floor(m / 60);
m = m % 60;
return h + "h " + m + "m";
}
return m + "m remaining";
}
function updateStatus() {
var remaining = getRemaining();
statusEl.textContent = formatRemaining(remaining);
if (remaining <= 0) {
statusEl.className = "ec-pg-status ec-pg-status--expired";
} else if (remaining < 300) {
statusEl.className = "ec-pg-status ec-pg-status--warning";
} else {
statusEl.className = "ec-pg-status";
}
}
updateStatus();
// Update every 30s -- no seconds shown so no need for frequent updates
var interval = setInterval(updateStatus, 30000);
// Edit mode toggle -- sets cookie and reloads
editToggle.addEventListener("change", function() {
if (editToggle.checked) {
document.cookie = "emdash-edit-mode=true;path=/;samesite=lax";
toolbar.setAttribute("data-edit-mode", "true");
} else {
document.cookie = "emdash-edit-mode=;path=/;expires=Thu, 01 Jan 1970 00:00:00 GMT";
toolbar.setAttribute("data-edit-mode", "false");
}
if (document.startViewTransition) {
document.startViewTransition(function() { location.replace(location.href); });
} else {
location.replace(location.href);
}
});
resetBtn.addEventListener("click", function() {
resetBtn.disabled = true;
resetBtn.classList.add("ec-pg-spinning");
statusEl.className = "ec-pg-status";
statusEl.textContent = "Resetting\\u2026";
location.href = "/_playground/reset";
});
dismissBtn.addEventListener("click", function() {
toolbar.classList.add("ec-pg-hidden");
clearInterval(interval);
});
})();
</script>
`;
}
function escapeAttr(str: string): string {
return str
.replace(RE_AMP, "&amp;")
.replace(RE_QUOT, "&quot;")
.replace(RE_LT, "&lt;")
.replace(RE_GT, "&gt;");
}

View File

@@ -0,0 +1,49 @@
/**
* Durable Object playground database -- RUNTIME ENTRY
*
* Provides a createDialect() that the virtual module system expects,
* plus re-exports the DO class and playground middleware.
*
* In playground mode, the actual DB connection is always set by the
* playground middleware via ALS (runWithContext). The createDialect
* here creates a "dummy" dialect that will be overridden per-request.
* If a query somehow runs without the middleware's ALS override,
* the dialect throws a clear error.
*
* This module imports from cloudflare:workers transitively.
* Do NOT import this at config time.
*/
import type { Dialect } from "kysely";
import { PreviewDODialect } from "./do-dialect.js";
import type { PreviewDBStub } from "./do-dialect.js";
import type { PreviewDOConfig } from "./do-types.js";
/**
* Create a playground DO dialect from config.
*
* Returns a dialect that throws if any query is executed outside of
* the playground middleware's ALS context. In normal operation, the
* middleware overrides this DB via runWithContext() on every request.
*
* This factory exists to satisfy the virtual module system's
* createDialect() contract. The EmDash runtime creates a singleton
* DB from it, but all actual queries go through the ALS-scoped DB.
*/
export function createDialect(_config: PreviewDOConfig): Dialect {
const notInitialized: PreviewDBStub = {
async query(): Promise<{ rows: Record<string, unknown>[] }> {
throw new Error(
"Playground database not initialized. " +
"Ensure the playground middleware is registered in src/middleware.ts " +
"and all requests go through it.",
);
},
};
return new PreviewDODialect({ getStub: () => notInitialized });
}
export { EmDashPreviewDB } from "./do-class.js";
export { isBlockedInPlayground } from "./do-playground-routes.js";

View File

@@ -0,0 +1,220 @@
/**
* Preview Toolbar
*
* A floating pill injected by the preview middleware into HTML responses.
* Shows preview status, snapshot age, reload button, and errors.
* No dependencies — plain HTML string with inline styles and a <script> tag.
*/
export interface PreviewToolbarConfig {
/** When the snapshot was generated (ISO string) */
generatedAt?: string;
/** Source site URL */
source?: string;
/** Error message if snapshot failed */
error?: string;
}
const RE_AMP = /&/g;
const RE_QUOT = /"/g;
const RE_LT = /</g;
const RE_GT = />/g;
export function renderPreviewToolbar(config: PreviewToolbarConfig): string {
const { generatedAt, source, error } = config;
const generatedAtAttr = generatedAt ? ` data-generated-at="${escapeAttr(generatedAt)}"` : "";
const sourceAttr = source ? ` data-source="${escapeAttr(source)}"` : "";
const errorAttr = error ? ` data-error="${escapeAttr(error)}"` : "";
return `
<!-- EmDash Preview Toolbar -->
<div id="emdash-preview-toolbar"${generatedAtAttr}${sourceAttr}${errorAttr}>
<div class="ec-ptb-inner">
<span class="ec-ptb-badge">Preview</span>
<div class="ec-ptb-divider"></div>
<span class="ec-ptb-status" id="ec-ptb-status"></span>
<button class="ec-ptb-btn" id="ec-ptb-reload" title="Reload snapshot">
<svg class="ec-ptb-icon" id="ec-ptb-reload-icon" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="23 4 23 10 17 10"/><path d="M20.49 15a9 9 0 1 1-2.12-9.36L23 10"/></svg>
</button>
<button class="ec-ptb-btn ec-ptb-close" id="ec-ptb-dismiss" title="Dismiss toolbar">
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/></svg>
</button>
</div>
</div>
<style>
#emdash-preview-toolbar {
position: fixed;
bottom: 16px;
left: 50%;
transform: translateX(-50%);
z-index: 999999;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
font-size: 13px;
line-height: 1;
-webkit-font-smoothing: antialiased;
}
#emdash-preview-toolbar.ec-ptb-hidden {
display: none;
}
.ec-ptb-inner {
display: flex;
align-items: center;
gap: 10px;
padding: 8px 12px 8px 16px;
background: #1a1a1a;
color: #e0e0e0;
border-radius: 999px;
box-shadow: 0 4px 24px rgba(0,0,0,0.3), 0 0 0 1px rgba(255,255,255,0.08);
white-space: nowrap;
user-select: none;
}
.ec-ptb-badge {
display: inline-flex;
align-items: center;
padding: 3px 8px;
border-radius: 999px;
font-size: 11px;
font-weight: 600;
letter-spacing: 0.02em;
text-transform: uppercase;
background: rgba(139,92,246,0.2);
color: #a78bfa;
}
.ec-ptb-divider {
width: 1px;
height: 16px;
background: rgba(255,255,255,0.15);
}
.ec-ptb-status {
display: inline-flex;
align-items: center;
gap: 6px;
font-size: 12px;
color: #999;
}
.ec-ptb-status--error {
color: #f87171;
}
.ec-ptb-btn {
display: inline-flex;
align-items: center;
justify-content: center;
background: none;
border: none;
color: #888;
cursor: pointer;
padding: 4px;
border-radius: 4px;
transition: color 0.15s, background 0.15s;
font-family: inherit;
}
.ec-ptb-btn:hover {
color: #fff;
background: rgba(255,255,255,0.08);
}
.ec-ptb-icon {
transition: transform 0.3s;
}
.ec-ptb-btn:disabled {
opacity: 0.4;
cursor: not-allowed;
}
.ec-ptb-btn:disabled:hover {
color: #888;
background: none;
}
@keyframes ec-ptb-spin {
to { transform: rotate(360deg); }
}
.ec-ptb-spinning .ec-ptb-icon {
animation: ec-ptb-spin 0.8s linear infinite;
}
</style>
<script>
(function() {
var toolbar = document.getElementById("emdash-preview-toolbar");
var statusEl = document.getElementById("ec-ptb-status");
var reloadBtn = document.getElementById("ec-ptb-reload");
var dismissBtn = document.getElementById("ec-ptb-dismiss");
if (!toolbar || !statusEl || !reloadBtn || !dismissBtn) return;
var generatedAt = toolbar.getAttribute("data-generated-at");
var source = toolbar.getAttribute("data-source");
var error = toolbar.getAttribute("data-error");
function formatAge(isoString) {
if (!isoString) return null;
var then = new Date(isoString).getTime();
var now = Date.now();
var seconds = Math.floor((now - then) / 1000);
if (seconds < 60) return "just now";
var minutes = Math.floor(seconds / 60);
if (minutes < 60) return minutes + "m ago";
var hours = Math.floor(minutes / 60);
if (hours < 24) return hours + "h ago";
return Math.floor(hours / 24) + "d ago";
}
function updateStatus() {
if (error) {
statusEl.className = "ec-ptb-status ec-ptb-status--error";
statusEl.textContent = error;
return;
}
var age = formatAge(generatedAt);
statusEl.className = "ec-ptb-status";
statusEl.textContent = age ? "Snapshot " + age : "Preview mode";
}
updateStatus();
// Update age display every 30s
var ageInterval = setInterval(updateStatus, 30000);
// Reload: hit the server endpoint which clears the httpOnly session cookie
// and redirects back with the original signed params for a fresh snapshot.
reloadBtn.addEventListener("click", function() {
reloadBtn.disabled = true;
reloadBtn.classList.add("ec-ptb-spinning");
statusEl.className = "ec-ptb-status";
statusEl.textContent = "Reloading\u2026";
location.href = "/_preview/reload";
});
// Dismiss
dismissBtn.addEventListener("click", function() {
toolbar.classList.add("ec-ptb-hidden");
clearInterval(ageInterval);
});
})();
</script>
`;
}
function escapeAttr(str: string): string {
return str
.replace(RE_AMP, "&amp;")
.replace(RE_QUOT, "&quot;")
.replace(RE_LT, "&lt;")
.replace(RE_GT, "&gt;");
}

View File

@@ -0,0 +1,286 @@
/**
* @emdash-cms/cloudflare
*
* Cloudflare adapters for EmDash:
* - D1 database adapter
* - R2 storage adapter
* - Cloudflare Access authentication
* - Worker Loader sandbox for plugins
*
* This is the CONFIG-TIME entry point. It does NOT import cloudflare:workers
* and is safe to use in astro.config.mjs.
*
* For runtime exports (PluginBridge, authenticate), import from the specific
* runtime entrypoints:
* - @emdash-cms/cloudflare/sandbox (PluginBridge, createSandboxRunner)
* - @emdash-cms/cloudflare/auth (authenticate)
*
* @example
* ```ts
* import emdash from "emdash/astro";
* import { d1, r2, access, sandbox } from "@emdash-cms/cloudflare";
*
* export default defineConfig({
* integrations: [
* emdash({
* database: d1({ binding: "DB" }),
* storage: r2({ binding: "MEDIA" }),
* auth: access({ teamDomain: "myteam.cloudflareaccess.com" }),
* sandboxRunner: sandbox(),
* }),
* ],
* });
* ```
*/
import type { AuthDescriptor, DatabaseDescriptor, StorageDescriptor } from "emdash";
import type { PreviewDOConfig } from "./db/do-types.js";
/**
* D1 configuration
*/
export interface D1Config {
/**
* Name of the D1 binding in wrangler.toml
*/
binding: string;
/**
* Read replication session mode.
*
* - `"disabled"` — No sessions. All queries go to primary. (default)
* - `"auto"` — Automatic session management. Anonymous requests use
* `"first-unconstrained"` (nearest replica). Authenticated requests
* use bookmark cookies for read-your-writes consistency.
* - `"primary-first"` — Like `"auto"`, but the first query in every
* session goes to the primary. Use this if your site has very
* frequent writes and you need stronger consistency guarantees
* at the cost of higher read latency.
*
* Read replication must also be enabled on the D1 database itself
* (via dashboard or REST API).
*/
session?: "disabled" | "auto" | "primary-first";
/**
* Cookie name for storing the session bookmark.
* Only used when session is `"auto"` or `"primary-first"`.
*
* @default "__em_d1_bookmark"
*/
bookmarkCookie?: string;
}
/**
* R2 storage configuration
*/
export interface R2StorageConfig {
/**
* Name of the R2 binding in wrangler.toml
*/
binding: string;
/**
* Public URL for accessing files (optional CDN)
*/
publicUrl?: string;
}
/**
* Configuration for Cloudflare Access authentication
*/
export interface AccessConfig {
/**
* Your Cloudflare Access team domain
* @example "myteam.cloudflareaccess.com"
*/
teamDomain: string;
/**
* Application Audience (AUD) tag from Access application settings.
* For Cloudflare Workers, use `audienceEnvVar` instead to read at runtime.
*/
audience?: string;
/**
* Environment variable name containing the audience tag.
* Read at runtime from environment.
* @default "CF_ACCESS_AUDIENCE"
*/
audienceEnvVar?: string;
/**
* Automatically create EmDash users on first login
* @default true
*/
autoProvision?: boolean;
/**
* Role level for users not matching any group in roleMapping
* @default 30 (Editor)
*/
defaultRole?: number;
/**
* Update user's role on each login based on current IdP groups
* When false, role is only set on first provisioning
* @default false
*/
syncRoles?: boolean;
/**
* Map IdP group names to EmDash role levels
* First match wins if user is in multiple groups
*
* @example
* ```ts
* roleMapping: {
* "Admins": 50, // Admin
* "Developers": 40, // Developer
* "Content Team": 30, // Editor
* }
* ```
*/
roleMapping?: Record<string, number>;
}
/**
* Cloudflare D1 database adapter
*
* For Cloudflare Workers with D1 binding.
* Migrations run automatically at setup time - no need for manual SQL files.
*
* Uses a custom introspector that works around D1's restriction on
* cross-joins with pragma_table_info().
*
* @example
* ```ts
* database: d1({ binding: "DB" })
* ```
*/
export function d1(config: D1Config): DatabaseDescriptor {
return {
entrypoint: "@emdash-cms/cloudflare/db/d1",
config,
type: "sqlite",
supportsRequestScope: true,
};
}
export type { PreviewDOConfig } from "./db/do-types.js";
/**
* Durable Object preview database adapter
*
* Each preview session gets an isolated SQLite database inside a DO,
* populated from a snapshot of the source EmDash site.
*
* Not for production use — preview only.
*
* @example
* ```ts
* database: previewDatabase({ binding: "PREVIEW_DB" })
* ```
*/
export function previewDatabase(config: PreviewDOConfig): DatabaseDescriptor {
return {
entrypoint: "@emdash-cms/cloudflare/db/do",
config,
type: "sqlite",
};
}
/**
* Durable Object playground database adapter
*
* Each playground session gets an isolated SQLite database inside a DO,
* populated from a seed file with migrations run at init time.
* Unlike preview, playground is writable and has admin access.
*
* Not for production use -- playground/demo only.
*
* @example
* ```ts
* database: playgroundDatabase({ binding: "PLAYGROUND_DB" })
* ```
*/
export function playgroundDatabase(config: PreviewDOConfig): DatabaseDescriptor {
return {
entrypoint: "@emdash-cms/cloudflare/db/playground",
config,
type: "sqlite",
};
}
/**
* Cloudflare R2 binding adapter
*
* Uses R2 bindings directly when running on Cloudflare Workers.
* Does NOT support signed upload URLs (use s3() with R2 credentials instead).
*
* Requires R2 binding in wrangler.toml:
* ```toml
* [[r2_buckets]]
* binding = "MEDIA"
* bucket_name = "my-media-bucket"
* ```
*
* @example
* ```ts
* storage: r2({ binding: "MEDIA" })
* ```
*/
export function r2(config: R2StorageConfig): StorageDescriptor {
return {
entrypoint: "@emdash-cms/cloudflare/storage/r2",
config: { binding: config.binding, publicUrl: config.publicUrl },
};
}
/**
* Cloudflare Access authentication adapter
*
* Use this to configure EmDash to authenticate via Cloudflare Access.
* When Access is configured, passkey auth is disabled.
*
* @example
* ```ts
* auth: access({
* teamDomain: "myteam.cloudflareaccess.com",
* audience: "abc123...",
* roleMapping: {
* "Admins": 50,
* "Editors": 30,
* },
* })
* ```
*/
export function access(config: AccessConfig): AuthDescriptor {
return {
type: "cloudflare-access",
entrypoint: "@emdash-cms/cloudflare/auth",
config,
};
}
/**
* Cloudflare Worker Loader sandbox adapter
*
* Returns the module path for the Cloudflare sandbox runner.
* Use this in the `sandboxRunner` config option.
*
* @example
* ```ts
* sandboxRunner: sandbox()
* ```
*/
export function sandbox(): string {
return "@emdash-cms/cloudflare/sandbox";
}
// Re-export media providers (config-time)
export { cloudflareImages, type CloudflareImagesConfig } from "./media/images.js";
export { cloudflareStream, type CloudflareStreamConfig } from "./media/stream.js";
// Re-export cache provider config helper (config-time)
export { cloudflareCache, type CloudflareCacheConfig } from "./cache/config.js";

View File

@@ -0,0 +1,353 @@
/**
* Cloudflare Images Runtime Module
*
* This module is imported at runtime by the media provider system.
* It contains the actual provider implementation that interacts with the Cloudflare API.
*/
import { env } from "cloudflare:workers";
import type {
MediaProvider,
MediaListOptions,
MediaValue,
EmbedOptions,
EmbedResult,
CreateMediaProviderFn,
} from "emdash/media";
import type { CloudflareImagesConfig } from "./images.js";
/** Safely extract a number from an unknown value */
function toNumber(value: unknown): number | undefined {
return typeof value === "number" ? value : undefined;
}
/**
* Resolve a config value, checking env var if direct value not provided
*/
function resolveEnvValue(
directValue: string | undefined,
envVarName: string | undefined,
defaultEnvVar: string,
serviceName: string,
): string {
if (directValue) return directValue;
const envVar = envVarName || defaultEnvVar;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding accessed from untyped env object
const value = (env as Record<string, string | undefined>)[envVar];
if (!value) {
throw new Error(
`${serviceName}: Missing ${envVar}. Set it as an environment variable or provide it directly in config.`,
);
}
return value;
}
/**
* Runtime implementation for Cloudflare Images provider
*/
export const createMediaProvider: CreateMediaProviderFn<CloudflareImagesConfig> = (config) => {
const { deliveryDomain, defaultVariant = "public" } = config;
// Lazy getters - resolve env vars at request time, not module init time
const getAccountId = () =>
resolveEnvValue(config.accountId, config.accountIdEnvVar, "CF_ACCOUNT_ID", "Cloudflare Images");
const getAccountHash = () =>
resolveEnvValue(
config.accountHash,
config.accountHashEnvVar,
"CF_IMAGES_ACCOUNT_HASH",
"Cloudflare Images",
);
const getApiToken = () =>
resolveEnvValue(config.apiToken, config.apiTokenEnvVar, "CF_IMAGES_TOKEN", "Cloudflare Images");
const getApiBase = () =>
`https://api.cloudflare.com/client/v4/accounts/${getAccountId()}/images/v1`;
const getHeaders = () => ({ Authorization: `Bearer ${getApiToken()}` });
const getDeliveryBase = () =>
deliveryDomain ? `https://${deliveryDomain}` : "https://imagedelivery.net";
// Build a delivery URL with flexible variant transforms
const buildUrl = (imageId: string, transforms?: { w?: number; h?: number; fit?: string }) => {
const base = `${getDeliveryBase()}/${getAccountHash()}/${imageId}`;
if (!transforms || Object.keys(transforms).length === 0) {
return `${base}/${defaultVariant}`;
}
const parts: string[] = [];
if (transforms.w) parts.push(`w=${transforms.w}`);
if (transforms.h) parts.push(`h=${transforms.h}`);
if (transforms.fit) parts.push(`fit=${transforms.fit}`);
return `${base}/${parts.join(",")}`;
};
// Fetch image dimensions via the format=json delivery endpoint
// This is a public endpoint that doesn't require authentication
const fetchDimensions = async (
imageId: string,
): Promise<{ width: number; height: number } | null> => {
const url = `${getDeliveryBase()}/${getAccountHash()}/${imageId}/format=json`;
try {
const response = await fetch(url);
if (!response.ok) return null;
const data: ImageJsonResponse = await response.json();
return { width: data.width, height: data.height };
} catch {
return null;
}
};
const provider: MediaProvider = {
async list(options: MediaListOptions) {
const apiBase = getApiBase();
const headers = getHeaders();
const params = new URLSearchParams();
if (options.cursor) {
params.set("continuation_token", options.cursor);
}
if (options.limit) {
params.set("per_page", String(options.limit));
}
const url = `${apiBase}?${params}`;
const response = await fetch(url, { headers });
if (!response.ok) {
throw new Error(`Cloudflare Images API error: ${response.status}`);
}
const data: CloudflareImagesListResponse = await response.json();
if (!data.success) {
throw new Error(
`Cloudflare Images API error: ${data.errors?.[0]?.message || "Unknown error"}`,
);
}
// Filter out images that require signed URLs (not supported yet)
const publicImages = data.result.images.filter((img) => !img.requireSignedURLs);
// Fetch dimensions for all images in parallel
const dimensionsMap = new Map<string, { width: number; height: number }>();
const dimensionResults = await Promise.all(
publicImages.map(async (img) => {
const dims = await fetchDimensions(img.id);
return { id: img.id, dims };
}),
);
for (const { id, dims } of dimensionResults) {
if (dims) dimensionsMap.set(id, dims);
}
return {
items: publicImages.map((img) => {
const dims = dimensionsMap.get(img.id);
return {
id: img.id,
filename: img.filename || img.id,
mimeType: "image/jpeg", // CF Images doesn't expose original mime type
width: dims?.width ?? toNumber(img.meta?.width),
height: dims?.height ?? toNumber(img.meta?.height),
// Use 400px wide preview for grid thumbnails (good for 2x retina on ~200px grid)
previewUrl: buildUrl(img.id, { w: 400, fit: "scale-down" }),
meta: {
variants: img.variants,
uploaded: img.uploaded,
},
};
}),
nextCursor: data.result.continuation_token || undefined,
};
},
async get(id: string) {
const apiBase = getApiBase();
const headers = getHeaders();
const url = `${apiBase}/${id}`;
const response = await fetch(url, { headers });
if (!response.ok) {
if (response.status === 404) return null;
throw new Error(`Cloudflare Images API error: ${response.status}`);
}
const data: CloudflareImageResponse = await response.json();
if (!data.success) {
return null;
}
const img = data.result;
// Don't return images that require signed URLs (not supported yet)
if (img.requireSignedURLs) {
return null;
}
// Fetch dimensions via format=json endpoint
const dims = await fetchDimensions(img.id);
return {
id: img.id,
filename: img.filename || img.id,
mimeType: "image/jpeg",
width: dims?.width ?? toNumber(img.meta?.width),
height: dims?.height ?? toNumber(img.meta?.height),
// Use larger preview for detail view
previewUrl: buildUrl(img.id, { w: 800, fit: "scale-down" }),
meta: {
variants: img.variants,
uploaded: img.uploaded,
},
};
},
async upload(input) {
const apiBase = getApiBase();
const apiToken = getApiToken();
const formData = new FormData();
formData.append("file", input.file, input.filename);
// Ensure uploaded images are public (don't require signed URLs)
formData.append("requireSignedURLs", "false");
// Add metadata if provided
const metadata: Record<string, string> = {};
if (input.alt) {
metadata.alt = input.alt;
}
if (Object.keys(metadata).length > 0) {
formData.append("metadata", JSON.stringify(metadata));
}
const response = await fetch(apiBase, {
method: "POST",
headers: {
Authorization: `Bearer ${apiToken}`,
// Don't set Content-Type - let browser set it with boundary
},
body: formData,
});
if (!response.ok) {
const error = await response.text();
throw new Error(`Cloudflare Images upload failed: ${error}`);
}
const data: CloudflareImageResponse = await response.json();
if (!data.success) {
throw new Error(
`Cloudflare Images upload failed: ${data.errors?.[0]?.message || "Unknown error"}`,
);
}
const img = data.result;
return {
id: img.id,
filename: img.filename || input.filename,
mimeType: "image/jpeg",
width: toNumber(img.meta?.width),
height: toNumber(img.meta?.height),
previewUrl: buildUrl(img.id, { w: 400, fit: "scale-down" }),
meta: {
variants: img.variants,
uploaded: img.uploaded,
},
};
},
async delete(id: string) {
const apiBase = getApiBase();
const headers = getHeaders();
const response = await fetch(`${apiBase}/${id}`, {
method: "DELETE",
headers,
});
if (!response.ok && response.status !== 404) {
throw new Error(`Cloudflare Images delete failed: ${response.status}`);
}
},
getEmbed(value: MediaValue, options?: EmbedOptions): EmbedResult {
const accountHash = getAccountHash();
const deliveryBase = getDeliveryBase();
const baseUrl = `${deliveryBase}/${accountHash}/${value.id}`;
// Helper to build URL with transforms
const buildSrc = (opts: { width?: number; height?: number; format?: string }) => {
const t: string[] = [];
if (opts.width) t.push(`w=${opts.width}`);
if (opts.height) t.push(`h=${opts.height}`);
if (opts.format) t.push(`f=${opts.format}`);
t.push("fit=scale-down");
return `${baseUrl}/${t.join(",")}`;
};
// Build src URL - always include transforms (CF Images requires a variant)
const width = options?.width ?? value.width ?? 1200;
const height = options?.height ?? value.height;
const src = buildSrc({ width, height, format: options?.format });
return {
type: "image",
src,
width: options?.width ?? value.width,
height: options?.height ?? value.height,
alt: value.alt,
// Provide getSrc for dynamic resizing (e.g., responsive images)
getSrc: buildSrc,
};
},
getThumbnailUrl(id: string, _mimeType?: string, options?: { width?: number; height?: number }) {
// For images, return a sized delivery URL
const width = options?.width || 400;
const height = options?.height;
return buildUrl(id, { w: width, h: height, fit: "scale-down" });
},
};
return provider;
};
// Cloudflare API response types
interface CloudflareImagesListResponse {
success: boolean;
errors?: Array<{ message: string }>;
result: {
images: CloudflareImage[];
continuation_token?: string;
};
}
interface CloudflareImageResponse {
success: boolean;
errors?: Array<{ message: string }>;
result: CloudflareImage;
}
interface CloudflareImage {
id: string;
filename?: string;
uploaded: string;
requireSignedURLs: boolean;
variants: string[];
meta?: Record<string, unknown>;
}
// Response from format=json delivery endpoint
interface ImageJsonResponse {
width: number;
height: number;
original: {
file_size: number;
width: number;
height: number;
format: string;
};
}

View File

@@ -0,0 +1,114 @@
/**
* Cloudflare Images Media Provider
*
* Provides integration with Cloudflare Images for image hosting and transformation.
*
* Features:
* - Browse uploaded images
* - Upload new images
* - Delete images
* - URL-based image transformations (resize, format conversion, etc.)
*
* @see https://developers.cloudflare.com/images/
*/
import type { MediaProviderDescriptor } from "emdash/media";
/**
* Cloudflare Images configuration
*/
export interface CloudflareImagesConfig {
/**
* Cloudflare Account ID (for API calls)
* If not provided, reads from accountIdEnvVar at runtime
*/
accountId?: string;
/**
* Environment variable name containing the Account ID
* @default "CF_ACCOUNT_ID"
*/
accountIdEnvVar?: string;
/**
* Cloudflare Images Account Hash (for delivery URLs)
* This is different from the Account ID - find it in the Cloudflare dashboard
* under Images > Overview > "Account Hash"
* If not provided, reads from accountHashEnvVar at runtime
*/
accountHash?: string;
/**
* Environment variable name containing the Account Hash
* @default "CF_IMAGES_ACCOUNT_HASH"
*/
accountHashEnvVar?: string;
/**
* API Token with Images permissions
* If not provided, reads from apiTokenEnvVar at runtime
* Should have "Cloudflare Images: Read" and "Cloudflare Images: Edit" permissions
*/
apiToken?: string;
/**
* Environment variable name containing the API token
* @default "CF_IMAGES_TOKEN"
*/
apiTokenEnvVar?: string;
/**
* Custom delivery domain (optional)
* If not specified, uses imagedelivery.net
* @example "images.example.com"
*/
deliveryDomain?: string;
/**
* Default variant to use for display
* @default "public"
*/
defaultVariant?: string;
}
// Cloudflare Images icon (inline SVG as data URL)
const IMAGES_ICON = `data:image/svg+xml,${encodeURIComponent('<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" fill="none" viewBox="0 0 64 64"><path fill="#F63" d="M56 11.92H8l-2 2v39.87l2 2h48l2-2V13.92l-2-2Zm-2 4v18.69l-8-6.55-2.62.08-5.08 4.68-5.43-4-2.47.08-14 11.7-6.4-4.4V15.92h44ZM10 51.79V41.08l5.3 3.7 2.42-.11L31.75 33l5.5 4 2.54-.14 5-4.63L54 39.77v12l-44 .02Z"/><path fill="#F63" d="M19.08 32.16a4 4 0 1 0 0-8 4 4 0 0 0 0 8Z"/></svg>')}`;
/**
* Cloudflare Images media provider
*
* @example
* ```ts
* import { cloudflareImages } from "@emdash-cms/cloudflare";
*
* emdash({
* mediaProviders: [
* // Uses CF_ACCOUNT_ID and CF_IMAGES_TOKEN env vars by default
* cloudflareImages({}),
*
* // Or with custom env var names
* cloudflareImages({
* accountIdEnvVar: "MY_CF_ACCOUNT",
* apiTokenEnvVar: "MY_CF_IMAGES_KEY",
* }),
* ],
* })
* ```
*/
export function cloudflareImages(
config: CloudflareImagesConfig,
): MediaProviderDescriptor<CloudflareImagesConfig> {
return {
id: "cloudflare-images",
name: "Cloudflare Images",
icon: IMAGES_ICON,
entrypoint: "@emdash-cms/cloudflare/media/images-runtime",
capabilities: {
browse: true,
search: false, // Images API doesn't support search
upload: true,
delete: true,
},
config,
};
}

View File

@@ -0,0 +1,392 @@
/**
* Cloudflare Stream Runtime Module
*
* This module is imported at runtime by the media provider system.
* It contains the actual provider implementation that interacts with the Cloudflare API.
*/
import { env } from "cloudflare:workers";
import type {
MediaProvider,
MediaListOptions,
MediaValue,
EmbedOptions,
EmbedResult,
CreateMediaProviderFn,
} from "emdash/media";
import type { CloudflareStreamConfig } from "./stream.js";
/** Safely extract a string from an unknown value */
function toString(value: unknown): string | undefined {
return typeof value === "string" ? value : undefined;
}
/** Type guard: check if value is a record-like object */
function isRecord(value: unknown): value is Record<string, unknown> {
return value != null && typeof value === "object" && !Array.isArray(value);
}
/**
* Resolve a config value, checking env var if direct value not provided
*/
function resolveEnvValue(
directValue: string | undefined,
envVarName: string | undefined,
defaultEnvVar: string,
serviceName: string,
): string {
if (directValue) return directValue;
const envVar = envVarName || defaultEnvVar;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker binding accessed from untyped env object
const value = (env as Record<string, string | undefined>)[envVar];
if (!value) {
throw new Error(
`${serviceName}: Missing ${envVar}. Set it as an environment variable or provide it directly in config.`,
);
}
return value;
}
/**
* Runtime implementation for Cloudflare Stream provider
*/
export const createMediaProvider: CreateMediaProviderFn<CloudflareStreamConfig> = (config) => {
const { customerSubdomain, controls = true, autoplay = false, loop = false, muted } = config;
// Resolve credentials from config or env vars
const accountId = resolveEnvValue(
config.accountId,
config.accountIdEnvVar,
"CF_ACCOUNT_ID",
"Cloudflare Stream",
);
const apiToken = resolveEnvValue(
config.apiToken,
config.apiTokenEnvVar,
"CF_STREAM_TOKEN",
"Cloudflare Stream",
);
const apiBase = `https://api.cloudflare.com/client/v4/accounts/${accountId}/stream`;
const headers = { Authorization: `Bearer ${apiToken}` };
// Muted defaults to true if autoplay is enabled (browser requirement)
const isMuted = muted ?? autoplay;
const provider: MediaProvider = {
async list(options: MediaListOptions) {
const params = new URLSearchParams();
// Stream uses "after" for cursor-based pagination
if (options.cursor) {
params.set("after", options.cursor);
}
// Stream uses "asc" boolean, default is newest first
params.set("asc", "false");
// Search by name if query provided
if (options.query) {
params.set("search", options.query);
}
const url = `${apiBase}?${params}`;
const response = await fetch(url, { headers });
if (!response.ok) {
throw new Error(`Cloudflare Stream API error: ${response.status}`);
}
const data: CloudflareStreamListResponse = await response.json();
if (!data.success) {
throw new Error(
`Cloudflare Stream API error: ${data.errors?.[0]?.message || "Unknown error"}`,
);
}
// Get the last video's UID for cursor-based pagination
const lastVideo = data.result.at(-1);
const nextCursor = lastVideo?.uid;
return {
items: data.result.map((video) => ({
id: video.uid,
filename: toString(video.meta?.name) || video.uid,
mimeType: "video/mp4",
width: video.input?.width,
height: video.input?.height,
previewUrl: video.thumbnail,
meta: {
duration: video.duration,
playback: video.playback,
status: video.status,
created: video.created,
modified: video.modified,
size: video.size,
},
})),
nextCursor: data.result.length > 0 ? nextCursor : undefined,
};
},
async get(id: string) {
const url = `${apiBase}/${id}`;
const response = await fetch(url, { headers });
if (!response.ok) {
if (response.status === 404) return null;
throw new Error(`Cloudflare Stream API error: ${response.status}`);
}
const data: CloudflareStreamResponse = await response.json();
if (!data.success) {
return null;
}
const video = data.result;
return {
id: video.uid,
filename: toString(video.meta?.name) || video.uid,
mimeType: "video/mp4",
width: video.input?.width,
height: video.input?.height,
previewUrl: video.thumbnail,
meta: {
duration: video.duration,
playback: video.playback,
status: video.status,
created: video.created,
modified: video.modified,
size: video.size,
},
};
},
async upload(input) {
// Stream supports tus protocol for resumable uploads
// For simplicity, we'll use direct creator upload which creates an upload URL
// For large files, this would need to be enhanced with tus
// First, create a direct upload URL
const createResponse = await fetch(`${apiBase}/direct_upload`, {
method: "POST",
headers: {
...headers,
"Content-Type": "application/json",
},
body: JSON.stringify({
maxDurationSeconds: 3600, // 1 hour max
meta: {
name: input.filename,
},
}),
});
if (!createResponse.ok) {
const error = await createResponse.text();
throw new Error(`Failed to create upload URL: ${error}`);
}
const createData: CloudflareStreamDirectUploadResponse = await createResponse.json();
if (!createData.success) {
throw new Error(
`Failed to create upload URL: ${createData.errors?.[0]?.message || "Unknown error"}`,
);
}
// Upload the file to the provided URL
const uploadUrl = createData.result.uploadURL;
const formData = new FormData();
formData.append("file", input.file, input.filename);
const uploadResponse = await fetch(uploadUrl, {
method: "POST",
body: formData,
});
if (!uploadResponse.ok) {
const error = await uploadResponse.text();
throw new Error(`Upload failed: ${error}`);
}
// The upload response contains the video details
// Wait a moment for the video to be processed
const videoId = createData.result.uid;
// Poll for the video to be ready (simple implementation)
let video: CloudflareStreamVideo | null = null;
for (let i = 0; i < 10; i++) {
await new Promise((resolve) => setTimeout(resolve, 1000));
const checkResponse = await fetch(`${apiBase}/${videoId}`, { headers });
if (checkResponse.ok) {
const checkData: CloudflareStreamResponse = await checkResponse.json();
if (checkData.success && checkData.result.status?.state !== "queued") {
video = checkData.result;
break;
}
}
}
if (!video) {
// Return with pending status - thumbnail might not be ready yet
return {
id: videoId,
filename: input.filename,
mimeType: "video/mp4",
previewUrl: undefined,
meta: {
status: { state: "processing" },
},
};
}
return {
id: video.uid,
filename: toString(video.meta?.name) || input.filename,
mimeType: "video/mp4",
width: video.input?.width,
height: video.input?.height,
previewUrl: video.thumbnail,
meta: {
duration: video.duration,
playback: video.playback,
status: video.status,
},
};
},
async delete(id: string) {
const response = await fetch(`${apiBase}/${id}`, {
method: "DELETE",
headers,
});
if (!response.ok && response.status !== 404) {
throw new Error(`Cloudflare Stream delete failed: ${response.status}`);
}
},
getEmbed(value: MediaValue, options?: EmbedOptions): EmbedResult {
const rawPlayback = value.meta?.playback;
const playback = isRecord(rawPlayback) ? rawPlayback : undefined;
const hlsSrc = toString(playback?.hls);
const dashSrc = toString(playback?.dash);
// Build the Stream player iframe URL or use HLS/DASH directly
// For video embeds, we can use the HLS stream URL
if (hlsSrc) {
return {
type: "video",
sources: [
{ src: hlsSrc, type: "application/x-mpegURL" },
...(dashSrc ? [{ src: dashSrc, type: "application/dash+xml" }] : []),
],
poster: toString(value.meta?.thumbnail),
width: options?.width ?? value.width,
height: options?.height ?? value.height,
controls,
autoplay,
loop,
muted: isMuted,
playsinline: true,
preload: "metadata",
};
}
// Fallback: use the Stream embed player URL
const baseUrl = customerSubdomain
? `https://${customerSubdomain}`
: `https://customer-${accountId.slice(0, 8)}.cloudflarestream.com`;
return {
type: "video",
src: `${baseUrl}/${value.id}/manifest/video.m3u8`,
poster: `${baseUrl}/${value.id}/thumbnails/thumbnail.jpg`,
width: options?.width ?? value.width,
height: options?.height ?? value.height,
controls,
autoplay,
loop,
muted: isMuted,
playsinline: true,
preload: "metadata",
};
},
getThumbnailUrl(id: string, _mimeType?: string, options?: { width?: number; height?: number }) {
// For videos, return a thumbnail/poster image
const baseUrl = customerSubdomain
? `https://${customerSubdomain}`
: `https://customer-${accountId.slice(0, 8)}.cloudflarestream.com`;
// Stream supports thumbnail customization via URL params
const width = options?.width || 400;
const height = options?.height;
let url = `${baseUrl}/${id}/thumbnails/thumbnail.jpg?width=${width}`;
if (height) url += `&height=${height}`;
return url;
},
};
return provider;
};
// Cloudflare Stream API response types
interface CloudflareStreamListResponse {
success: boolean;
errors?: Array<{ message: string }>;
result: CloudflareStreamVideo[];
}
interface CloudflareStreamResponse {
success: boolean;
errors?: Array<{ message: string }>;
result: CloudflareStreamVideo;
}
interface CloudflareStreamDirectUploadResponse {
success: boolean;
errors?: Array<{ message: string }>;
result: {
uploadURL: string;
uid: string;
};
}
interface CloudflareStreamVideo {
uid: string;
thumbnail: string;
thumbnailTimestampPct?: number;
readyToStream: boolean;
status: {
state: string;
pctComplete?: string;
errorReasonCode?: string;
errorReasonText?: string;
};
meta?: Record<string, unknown>;
created: string;
modified: string;
size: number;
preview?: string;
allowedOrigins?: string[];
requireSignedURLs: boolean;
uploaded?: string;
scheduledDeletion?: string;
input?: {
width: number;
height: number;
};
playback?: {
hls: string;
dash: string;
};
watermark?: unknown;
duration: number;
}

View File

@@ -0,0 +1,118 @@
/**
* Cloudflare Stream Media Provider
*
* Provides integration with Cloudflare Stream for video hosting and streaming.
*
* Features:
* - Browse uploaded videos
* - Upload new videos (direct upload)
* - Delete videos
* - HLS/DASH streaming URLs
* - Thumbnail generation
*
* @see https://developers.cloudflare.com/stream/
*/
import type { MediaProviderDescriptor } from "emdash/media";
/**
* Cloudflare Stream configuration
*/
export interface CloudflareStreamConfig {
/**
* Cloudflare Account ID
* If not provided, reads from accountIdEnvVar at runtime
*/
accountId?: string;
/**
* Environment variable name containing the Account ID
* @default "CF_ACCOUNT_ID"
*/
accountIdEnvVar?: string;
/**
* API Token with Stream permissions
* If not provided, reads from apiTokenEnvVar at runtime
* Should have "Stream: Read" and "Stream: Edit" permissions
*/
apiToken?: string;
/**
* Environment variable name containing the API token
* @default "CF_STREAM_TOKEN"
*/
apiTokenEnvVar?: string;
/**
* Customer subdomain for Stream delivery (optional)
* If not provided, uses customer-{hash}.cloudflarestream.com format
*/
customerSubdomain?: string;
/**
* Default player controls setting
* @default true
*/
controls?: boolean;
/**
* Autoplay videos (muted by default to comply with browser policies)
* @default false
*/
autoplay?: boolean;
/**
* Loop videos
* @default false
*/
loop?: boolean;
/**
* Mute videos
* @default false (true if autoplay is enabled)
*/
muted?: boolean;
}
// Cloudflare Stream icon (inline SVG as data URL)
const STREAM_ICON = `data:image/svg+xml,${encodeURIComponent('<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" fill="none" viewBox="0 0 64 64"><g clip-path="url(#a)"><path fill="#F63" d="M59.87 30.176a11.73 11.73 0 0 0-8-2.72 19.3 19.3 0 0 0-37-4.59 13.63 13.63 0 0 0-9.67 3.19 14.599 14.599 0 0 0-5.2 11 14.24 14.24 0 0 0 14.18 14.25h37.88a12 12 0 0 0 7.81-21.13Zm-7.81 17.13H14.19A10.24 10.24 0 0 1 4 37.086a10.58 10.58 0 0 1 3.77-8 9.55 9.55 0 0 1 6.23-2.25c.637 0 1.273.058 1.9.17l1.74.31.51-1.69A15.29 15.29 0 0 1 48 29.686l.1 2.32 2.26-.36a8.239 8.239 0 0 1 6.91 1.62 8.098 8.098 0 0 1 2.73 6.1 8 8 0 0 1-7.94 7.94Z"/><path fill="#F63" fill-rule="evenodd" d="m25.72 24.89 3.02-1.72 15.085 8.936.004 3.44-15.087 8.973L25.72 42.8V24.89Zm4 3.51v10.883l9.168-5.452L29.72 28.4Z" clip-rule="evenodd"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h64v64H0z"/></clipPath></defs></svg>')}`;
/**
* Cloudflare Stream media provider
*
* @example
* ```ts
* import { cloudflareStream } from "@emdash-cms/cloudflare";
*
* emdash({
* mediaProviders: [
* // Uses CF_ACCOUNT_ID and CF_STREAM_TOKEN env vars by default
* cloudflareStream({}),
*
* // Or with custom env var names
* cloudflareStream({
* accountIdEnvVar: "MY_CF_ACCOUNT",
* apiTokenEnvVar: "MY_CF_STREAM_KEY",
* }),
* ],
* })
* ```
*/
export function cloudflareStream(
config: CloudflareStreamConfig,
): MediaProviderDescriptor<CloudflareStreamConfig> {
return {
id: "cloudflare-stream",
name: "Cloudflare Stream",
icon: STREAM_ICON,
entrypoint: "@emdash-cms/cloudflare/media/stream-runtime",
capabilities: {
browse: true,
search: true,
upload: true,
delete: true,
},
config,
};
}

View File

@@ -0,0 +1,7 @@
/**
* Cloudflare Plugins
*
* Optional plugins that enhance EmDash with Cloudflare-specific features.
*/
export { vectorizeSearch, type VectorizeSearchConfig } from "./vectorize-search.js";

View File

@@ -0,0 +1,393 @@
/**
* Vectorize Search Plugin
*
* Semantic search using Cloudflare Vectorize and Workers AI.
* This plugin provides a semantic search endpoint that complements
* the core FTS5-based search.
*
* Usage:
* 1. Add the plugin to your EmDash config
* 2. Configure Vectorize index and AI bindings in wrangler.toml
* 3. Access semantic search via plugin route
*
* @example
* ```typescript
* // astro.config.mjs
* import emdash from "emdash/astro";
* import { vectorizeSearch } from "@emdash-cms/cloudflare/plugins";
*
* export default defineConfig({
* integrations: [
* emdash({
* plugins: [
* vectorizeSearch({
* indexName: "emdash-content",
* model: "@cf/bge-base-en-v1.5",
* }),
* ],
* }),
* ],
* });
* ```
*
* @example
* ```toml
* # wrangler.toml
* [[vectorize]]
* binding = "VECTORIZE"
* index_name = "emdash-content"
*
* [ai]
* binding = "AI"
* ```
*/
import type { PluginDefinition, PluginContext, RouteContext, ContentHookEvent } from "emdash";
import { extractPlainText } from "emdash";
/** Safely extract a string from an unknown value */
function toString(value: unknown): string {
return typeof value === "string" ? value : "";
}
/** Type guard: check if value is a record-like object */
function isRecord(value: unknown): value is Record<string, unknown> {
return value != null && typeof value === "object" && !Array.isArray(value);
}
/**
* Vectorize Search Plugin Configuration
*/
export interface VectorizeSearchConfig {
/**
* Name of the Vectorize index
* @default "emdash-content"
*/
indexName?: string;
/**
* Workers AI embedding model to use
* @default "@cf/bge-base-en-v1.5"
*/
model?: string;
/**
* Collections to index. If not specified, indexes all collections
* that have search enabled in their config.
*/
collections?: string[];
}
/**
* Get Cloudflare runtime environment from request
*/
function getCloudflareEnv(request: Request): CloudflareEnv | null {
// Access runtime.env from Astro's Cloudflare adapter
// This is available when running on Cloudflare Workers
// eslint-disable-next-line @typescript-eslint/no-explicit-any, typescript-eslint(no-unsafe-type-assertion) -- Astro locals accessed via internal symbol; no typed API available
const locals = (request as any)[Symbol.for("astro.locals")];
if (locals?.runtime?.env) {
return locals.runtime.env;
}
return null;
}
/**
* Extract searchable text from content entry
*/
function extractSearchableText(content: Record<string, unknown>): string {
const parts: string[] = [];
// Extract title if present
if (typeof content.title === "string") {
parts.push(content.title);
}
// Extract any string or Portable Text fields
for (const [key, value] of Object.entries(content)) {
if (key === "title" || key === "id" || key === "slug") continue;
if (typeof value === "string") {
// Could be plain text or JSON Portable Text
const text = extractPlainText(value);
if (text) parts.push(text);
} else if (Array.isArray(value)) {
// Assume Portable Text array
// eslint-disable-next-line @typescript-eslint/no-explicit-any, typescript-eslint(no-unsafe-type-assertion) -- Portable Text arrays are untyped at this point; extractPlainText handles validation
const text = extractPlainText(value as any);
if (text) parts.push(text);
}
}
return parts.join("\n");
}
/**
* Create a Vectorize Search plugin definition
*
* Note: This returns a plain plugin definition object, not a resolved plugin.
* It should be passed to the emdash() integration's plugins array.
*/
export function vectorizeSearch(config: VectorizeSearchConfig = {}): PluginDefinition {
const model = config.model ?? "@cf/bge-base-en-v1.5";
const targetCollections = config.collections;
// Store env reference from routes for use in hooks
// (hooks don't have request context directly)
let cachedEnv: CloudflareEnv | null = null;
return {
id: "vectorize-search",
version: "1.0.0",
capabilities: ["content:read"],
hooks: {
/**
* Index content on save
*
* Note: Hooks don't have access to the request directly.
* We rely on the route handler being called first to cache the env,
* or the env being available through other means on Cloudflare.
*/
"content:afterSave": {
handler: async (event: ContentHookEvent, _ctx: PluginContext): Promise<void> => {
const { content, collection } = event;
// Check if this collection should be indexed
if (targetCollections && !targetCollections.includes(collection)) {
return;
}
// On Cloudflare Workers, we need to get env from the execution context
// This is a limitation - hooks don't have request context
// The workaround is to use the query route first to cache the env
if (!cachedEnv) {
console.warn(
"[vectorize-search] Environment not available in hook. " +
"Call the /query route first to initialize, or reindex manually.",
);
return;
}
const env = cachedEnv;
if (!env.AI || !env.VECTORIZE) {
console.warn(
"[vectorize-search] AI or VECTORIZE binding not available, skipping indexing",
);
return;
}
try {
const text = extractSearchableText(content);
if (!text.trim()) {
return;
}
// Generate embedding
const embedResult = await env.AI.run(model, {
text: [text],
});
if (!embedResult?.data?.[0]) {
console.error("[vectorize-search] Failed to generate embedding");
return;
}
// Upsert to Vectorize
const contentId = toString(content.id);
const contentSlug = toString(content.slug);
const contentTitle = toString(content.title);
await env.VECTORIZE.upsert([
{
id: contentId,
values: embedResult.data[0],
metadata: {
collection,
slug: contentSlug ?? "",
title: contentTitle ?? "",
},
},
]);
console.log(`[vectorize-search] Indexed ${collection}/${contentId}`);
} catch (error) {
console.error("[vectorize-search] Error indexing content:", error);
}
},
},
/**
* Remove from index on delete
*/
"content:afterDelete": {
handler: async (
event: { id: string; collection: string },
_ctx: PluginContext,
): Promise<void> => {
const { id, collection } = event;
// Check if this collection should be indexed
if (targetCollections && !targetCollections.includes(collection)) {
return;
}
if (!cachedEnv?.VECTORIZE) {
return;
}
try {
await cachedEnv.VECTORIZE.deleteByIds([id]);
console.log(`[vectorize-search] Removed ${collection}/${id} from index`);
} catch (error) {
console.error("[vectorize-search] Error removing from index:", error);
}
},
},
},
routes: {
/**
* Semantic search query
*
* GET /_emdash/api/plugins/vectorize-search/query?q=hello&limit=10
*/
query: {
handler: async (ctx: RouteContext): Promise<unknown> => {
const { request } = ctx;
const input = isRecord(ctx.input) ? ctx.input : undefined;
// Cache env for hooks
const env = getCloudflareEnv(request);
if (env) {
cachedEnv = env;
}
if (!env?.AI || !env?.VECTORIZE) {
return {
error: "Vectorize or AI binding not available",
results: [],
};
}
const query = typeof input?.q === "string" ? input.q : undefined;
if (!query) {
return {
error: "Query parameter 'q' is required",
results: [],
};
}
try {
// Generate embedding for query
const embedResult = await env.AI.run(model, {
text: [query],
});
if (!embedResult?.data?.[0]) {
return {
error: "Failed to generate query embedding",
results: [],
};
}
// Query Vectorize
const limit = typeof input?.limit === "number" ? input.limit : 20;
const queryOptions: VectorizeQueryOptions = {
topK: limit,
returnMetadata: "all",
};
// Add collection filter if specified
const collection = typeof input?.collection === "string" ? input.collection : undefined;
if (collection) {
queryOptions.filter = {
collection,
};
}
const results = await env.VECTORIZE.query(embedResult.data[0], queryOptions);
return {
results: results.matches.map((match) => ({
id: match.id,
score: match.score,
collection: toString(match.metadata?.collection),
slug: toString(match.metadata?.slug),
title: toString(match.metadata?.title),
})),
};
} catch (error) {
console.error("[vectorize-search] Query error:", error);
return {
error: error instanceof Error ? error.message : "Query failed",
results: [],
};
}
},
},
/**
* Reindex all content
*
* POST /_emdash/api/plugins/vectorize-search/reindex
*/
reindex: {
handler: async (ctx: RouteContext): Promise<unknown> => {
const { request } = ctx;
// Cache env
const env = getCloudflareEnv(request);
if (env) {
cachedEnv = env;
}
return { success: false, error: "REINDEX_NOT_SUPPORTED" };
},
},
},
admin: {
pages: [
{
path: "/settings",
label: "Vectorize Search",
icon: "search",
},
],
},
};
}
// =============================================================================
// Cloudflare Types (minimal, for the plugin)
// =============================================================================
interface CloudflareEnv {
AI?: {
run(model: string, input: { text: string[] }): Promise<{ data: number[][] }>;
};
VECTORIZE?: VectorizeIndex;
}
interface VectorizeIndex {
upsert(
vectors: Array<{ id: string; values: number[]; metadata?: Record<string, unknown> }>,
): Promise<void>;
deleteByIds(ids: string[]): Promise<void>;
query(vector: number[], options: VectorizeQueryOptions): Promise<{ matches: VectorizeMatch[] }>;
}
interface VectorizeQueryOptions {
topK: number;
returnMetadata?: "all" | "indexed" | "none";
filter?: Record<string, unknown>;
}
interface VectorizeMatch {
id: string;
score: number;
metadata?: Record<string, unknown>;
}
export default vectorizeSearch;

View File

@@ -0,0 +1,291 @@
/**
* HTTP fetch helper for sandboxed plugins, called from the bridge.
*
* The bridge's httpFetch RPC method delegates here so the logic is pure and
* testable without standing up a real WorkerEntrypoint.
*
* Responsibilities:
* - Enforce the `network:request` / `network:request:unrestricted` capability.
* - Enforce the allowedHosts list, including on every redirect hop. The
* native `fetch` follows 3xx responses automatically; without manual
* redirect handling an allowed host that 302s to a disallowed host
* would bypass the allowlist.
* - Strip credential headers (Authorization, Cookie, Proxy-Authorization)
* on cross-origin redirects so tokens don't leak to attacker hosts.
* - For `network:request:unrestricted`, apply a minimal SSRF check on every hop so
* plugins can't be tricked into reaching cloud-metadata endpoints or
* literal private IPs even without an explicit allowlist.
*/
/** Maximum redirect chain length before we give up. */
const MAX_REDIRECTS = 5;
/** Headers that must be stripped when a redirect crosses origins. */
const CREDENTIAL_HEADERS = ["authorization", "cookie", "proxy-authorization"];
/**
* Known internal hostnames. Matched case-insensitively after stripping any
* trailing dots (FQDN form) and IPv6 brackets.
*/
const BLOCKED_HOSTNAMES = new Set(["localhost", "metadata.google.internal", "metadata.google"]);
/**
* Wildcard DNS services commonly used by SSRF tooling to map hostnames to
* private IPs (e.g. 127.0.0.1.nip.io -> 127.0.0.1). Matched as a suffix.
*/
const BLOCKED_HOSTNAME_SUFFIXES = [
"nip.io",
"sslip.io",
"xip.io",
"traefik.me",
"lvh.me",
"localtest.me",
// RFC 6761 §6.3 — any subdomain of localhost must resolve to loopback.
// The apex is already in BLOCKED_HOSTNAMES, this catches *.localhost.
"localhost",
];
/** RFC1918, loopback, link-local, current-network IPv4 ranges. */
const BLOCKED_IPV4_RANGES: Array<[number, number]> = [
[ip4(127, 0, 0, 0), ip4(127, 255, 255, 255)],
[ip4(10, 0, 0, 0), ip4(10, 255, 255, 255)],
[ip4(172, 16, 0, 0), ip4(172, 31, 255, 255)],
[ip4(192, 168, 0, 0), ip4(192, 168, 255, 255)],
[ip4(169, 254, 0, 0), ip4(169, 254, 255, 255)],
[ip4(0, 0, 0, 0), ip4(0, 255, 255, 255)],
];
function ip4(a: number, b: number, c: number, d: number): number {
return ((a << 24) | (b << 16) | (c << 8) | d) >>> 0;
}
const IPV4_PATTERN = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
/** Match IPv6 brackets at start/end for stripping. */
const IPV6_BRACKET_PATTERN = /^\[|\]$/g;
/** Match any number of trailing dots on an FQDN for stripping. */
const TRAILING_DOT_PATTERN = /\.+$/;
/** Match fc00::/7 ULA addresses — first byte 0xfc or 0xfd followed by any byte. */
const IPV6_ULA_FC_PATTERN = /^fc[0-9a-f]{2}:/;
const IPV6_ULA_FD_PATTERN = /^fd[0-9a-f]{2}:/;
/**
* IPv4-mapped IPv6 in hex form: ::ffff:XXXX:XXXX
* The WHATWG URL parser normalises dotted-decimal to hex:
* [::ffff:127.0.0.1] -> [::ffff:7f00:1]
* [::ffff:169.254.169.254] -> [::ffff:a9fe:a9fe]
* Without converting back, the hex form bypasses the IPv4 range check.
*/
const IPV4_MAPPED_IPV6_HEX_PATTERN = /^::ffff:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/i;
/** URL schemes we allow for outbound plugin fetches. */
const ALLOWED_SCHEMES = new Set(["http:", "https:"]);
function parseIpv4(ip: string): number | null {
const m = IPV4_PATTERN.exec(ip);
if (!m) return null;
const parts = [m[1], m[2], m[3], m[4]].map((x) => Number(x));
if (parts.some((n) => Number.isNaN(n) || n < 0 || n > 255)) return null;
return ip4(parts[0]!, parts[1]!, parts[2]!, parts[3]!);
}
/**
* Convert a hex-form IPv4-mapped IPv6 address back to dotted-decimal IPv4.
* Returns null if the input isn't in the hex-mapped form.
*/
function normalizeIPv4MappedIPv6(ip: string): string | null {
const match = IPV4_MAPPED_IPV6_HEX_PATTERN.exec(ip);
if (!match) return null;
const high = parseInt(match[1]!, 16);
const low = parseInt(match[2]!, 16);
return `${(high >> 8) & 0xff}.${high & 0xff}.${(low >> 8) & 0xff}.${low & 0xff}`;
}
function isPrivateLiteral(hostname: string): boolean {
// Strip IPv6 brackets
const bare = hostname.replace(IPV6_BRACKET_PATTERN, "").toLowerCase();
if (bare === "::1" || bare === "::ffff:127.0.0.1") return true;
// IPv4-mapped IPv6 in hex form (what WHATWG produces for [::ffff:127.0.0.1])
const mapped = normalizeIPv4MappedIPv6(bare);
if (mapped !== null) {
const num = parseIpv4(mapped);
if (num !== null) {
return BLOCKED_IPV4_RANGES.some(([start, end]) => num >= start && num <= end);
}
}
const num = parseIpv4(bare);
if (num !== null) {
return BLOCKED_IPV4_RANGES.some(([start, end]) => num >= start && num <= end);
}
// Loose IPv6 link-local / ULA detection. The bridge leaves full DNS
// resolution to the platform; we only need to catch literal addresses
// here. Anything containing a colon and matching one of these prefixes
// is private.
if (bare.includes(":")) {
return (
bare.startsWith("fe80:") || IPV6_ULA_FC_PATTERN.test(bare) || IPV6_ULA_FD_PATTERN.test(bare)
);
}
return false;
}
function isBlockedHostname(hostname: string): boolean {
// Strip brackets + trailing dots + lowercase. Trailing-dot FQDNs (e.g.
// "localhost.") are preserved by the WHATWG URL parser, so without
// normalisation they'd bypass exact-match checks.
const normalised = hostname
.replace(IPV6_BRACKET_PATTERN, "")
.replace(TRAILING_DOT_PATTERN, "")
.toLowerCase();
if (BLOCKED_HOSTNAMES.has(normalised)) return true;
for (const suffix of BLOCKED_HOSTNAME_SUFFIXES) {
if (normalised === suffix || normalised.endsWith(`.${suffix}`)) return true;
}
return false;
}
/**
* Check if a hostname matches any pattern in the allowlist.
* Patterns: "*" matches all, "*.example.com" matches subdomains AND the bare
* apex, and any other string matches exactly.
*
* Both host and patterns are normalised (lowercase + trailing dots stripped)
* so "API.Example.com" in a manifest and "api.example.com." on a request
* still match.
*/
function isHostAllowed(host: string, allowedHosts: string[]): boolean {
const normHost = host.replace(TRAILING_DOT_PATTERN, "").toLowerCase();
return allowedHosts.some((pattern) => {
const p = pattern.replace(TRAILING_DOT_PATTERN, "").toLowerCase();
if (p === "*") return true;
if (p.startsWith("*.")) {
const suffix = p.slice(1);
return normHost.endsWith(suffix) || normHost === p.slice(2);
}
return normHost === p;
});
}
/** Return a copy of init with credential headers removed. */
function stripCredentialHeaders(init: RequestInit): RequestInit {
if (!init.headers) return init;
const headers = new Headers(init.headers);
for (const name of CREDENTIAL_HEADERS) {
headers.delete(name);
}
return { ...init, headers };
}
export interface SandboxHttpFetchOptions {
capabilities: string[];
allowedHosts: string[];
/** Injectable fetch for tests. Defaults to globalThis.fetch. */
fetchImpl?: typeof fetch;
}
export interface SandboxHttpFetchResult {
status: number;
headers: Record<string, string>;
text: string;
}
/**
* Fetch a URL on behalf of a sandboxed plugin with manual redirect handling.
*
* @throws Error if the capability is missing, a host isn't allowed, or the
* target resolves to a known-internal address.
*/
export async function sandboxHttpFetch(
url: string,
init: RequestInit | undefined,
options: SandboxHttpFetchOptions,
): Promise<SandboxHttpFetchResult> {
const { capabilities, allowedHosts } = options;
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
const hasUnrestricted = capabilities.includes("network:request:unrestricted");
const hasFetch = capabilities.includes("network:request") || hasUnrestricted;
if (!hasFetch) {
throw new Error("Missing capability: network:request");
}
if (!hasUnrestricted && allowedHosts.length === 0) {
throw new Error(
"Plugin has no allowed hosts configured. Add hosts to allowedHosts to enable HTTP requests.",
);
}
let currentUrl = url;
let currentInit: RequestInit | undefined = init;
for (let i = 0; i <= MAX_REDIRECTS; i++) {
const parsed = new URL(currentUrl);
const hostname = parsed.hostname;
// Only http(s) is allowed. Keeps file:, data:, ftp:, and friends out
// regardless of what the platform fetch happens to support.
if (!ALLOWED_SCHEMES.has(parsed.protocol)) {
throw new Error(`Unsupported scheme: ${parsed.protocol}`);
}
// Literal-IP / internal-hostname SSRF check runs on every request,
// including the restricted (allowedHosts) path. The allowlist scopes
// which public hosts a plugin may reach — it must not be a way to
// opt out of SSRF protection (e.g. `allowedHosts: ["*"]` does NOT
// grant access to 127.0.0.1).
if (isPrivateLiteral(hostname) || isBlockedHostname(hostname)) {
throw new Error(`Blocked fetch to internal host: ${hostname}`);
}
if (!hasUnrestricted && !isHostAllowed(hostname, allowedHosts)) {
throw new Error(`Host not allowed: ${hostname}`);
}
const response = await fetchImpl(currentUrl, {
...currentInit,
redirect: "manual",
});
// Not a redirect — return directly.
if (response.status < 300 || response.status >= 400) {
const headers: Record<string, string> = {};
response.headers.forEach((value, key) => {
headers[key] = value;
});
return {
status: response.status,
headers,
text: await response.text(),
};
}
const location = response.headers.get("Location");
if (!location) {
const headers: Record<string, string> = {};
response.headers.forEach((value, key) => {
headers[key] = value;
});
return {
status: response.status,
headers,
text: await response.text(),
};
}
// Resolve relative redirects; strip credentials on cross-origin hops.
const previousOrigin = parsed.origin;
currentUrl = new URL(location, currentUrl).href;
const nextOrigin = new URL(currentUrl).origin;
if (previousOrigin !== nextOrigin && currentInit) {
currentInit = stripCredentialHeaders(currentInit);
}
}
throw new Error(`Too many redirects (max ${MAX_REDIRECTS})`);
}

View File

@@ -0,0 +1,976 @@
/**
* PluginBridge WorkerEntrypoint
*
* Provides controlled access to database operations for sandboxed plugins.
* The sandbox gets a SERVICE BINDING to this entrypoint, not direct DB access.
* All operations are validated and scoped to the plugin.
*
*/
import { WorkerEntrypoint } from "cloudflare:workers";
import type { SandboxEmailSendCallback } from "emdash";
import { ulid } from "emdash";
import { sandboxHttpFetch } from "./bridge-http.js";
/** Regex to validate collection names (prevent SQL injection) */
const COLLECTION_NAME_REGEX = /^[a-z][a-z0-9_]*$/;
/** Regex to validate file extensions (simple alphanumeric, 1-10 chars) */
const FILE_EXT_REGEX = /^\.[a-z0-9]{1,10}$/i;
/** System columns that plugins cannot directly write to */
const SYSTEM_COLUMNS = new Set([
"id",
"slug",
"status",
"author_id",
"created_at",
"updated_at",
"published_at",
"scheduled_at",
"deleted_at",
"version",
"live_revision_id",
"draft_revision_id",
]);
/**
* Module-level email send callback.
*
* The bridge runs in the host process (same worker), so we can use a
* module-level callback that the runner sets before creating bridge bindings.
* This avoids the need to pass non-serializable functions through props.
*
* @see runner.ts setEmailSendCallback()
*/
let emailSendCallback: SandboxEmailSendCallback | null = null;
/**
* Set the email send callback for all bridge instances.
* Called by the runner when the EmailPipeline is available.
*/
export function setEmailSendCallback(callback: SandboxEmailSendCallback | null): void {
emailSendCallback = callback;
}
/**
* Serialize a value for D1 storage.
* Mirrors core's serializeValue: objects/arrays → JSON strings,
* booleans → 0/1, null/undefined → null, everything else passthrough.
*/
function serializeValue(value: unknown): unknown {
if (value === null || value === undefined) return null;
if (typeof value === "boolean") return value ? 1 : 0;
if (typeof value === "object") return JSON.stringify(value);
return value;
}
/**
* Deserialize a row from D1 into a content response shape.
* Extracts system columns and bundles remaining columns into data.
*/
/**
* Deserialize a row from D1 into a ContentItem matching core's plugin API.
* Extracts system columns, deserializes JSON fields, and returns the
* canonical shape: { id, type, data, createdAt, updatedAt }.
*/
function rowToContentItem(
collection: string,
row: Record<string, unknown>,
): {
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
} {
const data: Record<string, unknown> = {};
for (const [key, value] of Object.entries(row)) {
if (!SYSTEM_COLUMNS.has(key)) {
// Attempt to parse JSON strings back to objects
if (typeof value === "string" && (value.startsWith("{") || value.startsWith("["))) {
try {
data[key] = JSON.parse(value);
} catch {
data[key] = value;
}
} else if (value !== null) {
data[key] = value;
}
}
}
return {
id: typeof row.id === "string" ? row.id : String(row.id),
type: collection,
data,
createdAt: typeof row.created_at === "string" ? row.created_at : new Date().toISOString(),
updatedAt: typeof row.updated_at === "string" ? row.updated_at : new Date().toISOString(),
};
}
/**
* Environment bindings required by PluginBridge
*/
export interface PluginBridgeEnv {
DB: D1Database;
MEDIA?: R2Bucket;
}
/**
* Props passed to the bridge via ctx.props when creating the loopback binding
*/
export interface PluginBridgeProps {
pluginId: string;
pluginVersion: string;
capabilities: string[];
allowedHosts: string[];
storageCollections: string[];
}
/**
* PluginBridge WorkerEntrypoint
*
* Provides the context API to sandboxed plugins via RPC.
* All methods validate capabilities and scope operations to the plugin.
*
* Usage:
* 1. Export this class from your worker entrypoint
* 2. Sandboxed plugins get a binding to it via ctx.exports.PluginBridge({...})
* 3. Plugins call bridge methods which validate and proxy to the database
*/
export class PluginBridge extends WorkerEntrypoint<PluginBridgeEnv, PluginBridgeProps> {
// =========================================================================
// KV Operations - scoped to plugin namespace
// =========================================================================
/**
* KV operations use _plugin_storage with a special "__kv" collection.
* This provides consistent storage across sandboxed and non-sandboxed modes.
*/
async kvGet(key: string): Promise<unknown> {
const { pluginId } = this.ctx.props;
const result = await this.env.DB.prepare(
"SELECT data FROM _plugin_storage WHERE plugin_id = ? AND collection = '__kv' AND id = ?",
)
.bind(pluginId, key)
.first<{ data: string }>();
if (!result) return null;
try {
return JSON.parse(result.data);
} catch {
return result.data;
}
}
async kvSet(key: string, value: unknown): Promise<void> {
const { pluginId } = this.ctx.props;
await this.env.DB.prepare(
"INSERT OR REPLACE INTO _plugin_storage (plugin_id, collection, id, data, updated_at) VALUES (?, '__kv', ?, ?, datetime('now'))",
)
.bind(pluginId, key, JSON.stringify(value))
.run();
}
async kvDelete(key: string): Promise<boolean> {
const { pluginId } = this.ctx.props;
const result = await this.env.DB.prepare(
"DELETE FROM _plugin_storage WHERE plugin_id = ? AND collection = '__kv' AND id = ?",
)
.bind(pluginId, key)
.run();
return (result.meta?.changes ?? 0) > 0;
}
async kvList(prefix: string = ""): Promise<Array<{ key: string; value: unknown }>> {
const { pluginId } = this.ctx.props;
const results = await this.env.DB.prepare(
"SELECT id, data FROM _plugin_storage WHERE plugin_id = ? AND collection = '__kv' AND id LIKE ?",
)
.bind(pluginId, prefix + "%")
.all<{ id: string; data: string }>();
return (results.results ?? []).map((row) => ({
key: row.id,
value: JSON.parse(row.data),
}));
}
// =========================================================================
// Storage Operations - scoped to plugin + collection validation
// =========================================================================
async storageGet(collection: string, id: string): Promise<unknown> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
const result = await this.env.DB.prepare(
"SELECT data FROM _plugin_storage WHERE plugin_id = ? AND collection = ? AND id = ?",
)
.bind(pluginId, collection, id)
.first<{ data: string }>();
if (!result) return null;
return JSON.parse(result.data);
}
async storagePut(collection: string, id: string, data: unknown): Promise<void> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
await this.env.DB.prepare(
"INSERT OR REPLACE INTO _plugin_storage (plugin_id, collection, id, data, updated_at) VALUES (?, ?, ?, ?, datetime('now'))",
)
.bind(pluginId, collection, id, JSON.stringify(data))
.run();
}
async storageDelete(collection: string, id: string): Promise<boolean> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
const result = await this.env.DB.prepare(
"DELETE FROM _plugin_storage WHERE plugin_id = ? AND collection = ? AND id = ?",
)
.bind(pluginId, collection, id)
.run();
return (result.meta?.changes ?? 0) > 0;
}
async storageQuery(
collection: string,
opts: { limit?: number; cursor?: string } = {},
): Promise<{
items: Array<{ id: string; data: unknown }>;
hasMore: boolean;
cursor?: string;
}> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
const limit = Math.min(opts.limit ?? 50, 1000);
const results = await this.env.DB.prepare(
"SELECT id, data FROM _plugin_storage WHERE plugin_id = ? AND collection = ? LIMIT ?",
)
.bind(pluginId, collection, limit + 1)
.all<{ id: string; data: string }>();
const items = (results.results ?? []).slice(0, limit).map((row) => ({
id: row.id,
data: JSON.parse(row.data),
}));
return {
items,
hasMore: (results.results ?? []).length > limit,
cursor: items.length > 0 ? items.at(-1)!.id : undefined,
};
}
async storageCount(collection: string): Promise<number> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
const result = await this.env.DB.prepare(
"SELECT COUNT(*) as count FROM _plugin_storage WHERE plugin_id = ? AND collection = ?",
)
.bind(pluginId, collection)
.first<{ count: number }>();
return result?.count ?? 0;
}
async storageGetMany(collection: string, ids: string[]): Promise<Map<string, unknown>> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
if (ids.length === 0) return new Map();
const placeholders = ids.map(() => "?").join(",");
const results = await this.env.DB.prepare(
`SELECT id, data FROM _plugin_storage WHERE plugin_id = ? AND collection = ? AND id IN (${placeholders})`,
)
.bind(pluginId, collection, ...ids)
.all<{ id: string; data: string }>();
const map = new Map<string, unknown>();
for (const row of results.results ?? []) {
map.set(row.id, JSON.parse(row.data));
}
return map;
}
async storagePutMany(
collection: string,
items: Array<{ id: string; data: unknown }>,
): Promise<void> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
if (items.length === 0) return;
// D1 doesn't support batch in prepare, so we do individual inserts
// In future, we could use batch API
for (const item of items) {
await this.env.DB.prepare(
"INSERT OR REPLACE INTO _plugin_storage (plugin_id, collection, id, data, updated_at) VALUES (?, ?, ?, ?, datetime('now'))",
)
.bind(pluginId, collection, item.id, JSON.stringify(item.data))
.run();
}
}
async storageDeleteMany(collection: string, ids: string[]): Promise<number> {
const { pluginId, storageCollections } = this.ctx.props;
if (!storageCollections.includes(collection)) {
throw new Error(`Storage collection not declared: ${collection}`);
}
if (ids.length === 0) return 0;
let deleted = 0;
for (const id of ids) {
const result = await this.env.DB.prepare(
"DELETE FROM _plugin_storage WHERE plugin_id = ? AND collection = ? AND id = ?",
)
.bind(pluginId, collection, id)
.run();
deleted += result.meta?.changes ?? 0;
}
return deleted;
}
// =========================================================================
// Content Operations - capability-gated
// =========================================================================
async contentGet(
collection: string,
id: string,
): Promise<{
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
} | null> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("content:read")) {
throw new Error("Missing capability: content:read");
}
// Validate collection name to prevent SQL injection
if (!COLLECTION_NAME_REGEX.test(collection)) {
throw new Error(`Invalid collection name: ${collection}`);
}
try {
// Content tables use ec_${collection} naming (no leading underscore)
// Exclude soft-deleted items
const result = await this.env.DB.prepare(
`SELECT * FROM ec_${collection} WHERE id = ? AND deleted_at IS NULL`,
)
.bind(id)
.first();
if (!result) return null;
return rowToContentItem(collection, result);
} catch {
return null;
}
}
async contentList(
collection: string,
opts: { limit?: number; cursor?: string } = {},
): Promise<{
items: Array<{
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
}>;
cursor?: string;
hasMore: boolean;
}> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("content:read")) {
throw new Error("Missing capability: content:read");
}
// Validate collection name to prevent SQL injection
if (!COLLECTION_NAME_REGEX.test(collection)) {
throw new Error(`Invalid collection name: ${collection}`);
}
const limit = Math.min(opts.limit ?? 50, 100);
try {
// Content tables use ec_${collection} naming (no leading underscore)
// Exclude soft-deleted items. Ordered by ULID (id DESC) for deterministic
// cursor pagination. ULIDs are time-sortable so this approximates created_at DESC.
let sql = `SELECT * FROM ec_${collection} WHERE deleted_at IS NULL`;
const params: unknown[] = [];
if (opts.cursor) {
sql += " AND id < ?";
params.push(opts.cursor);
}
sql += " ORDER BY id DESC LIMIT ?";
params.push(limit + 1);
const results = await this.env.DB.prepare(sql)
.bind(...params)
.all();
const rows = results.results ?? [];
const pageRows = rows.slice(0, limit);
const items = pageRows.map((row) => rowToContentItem(collection, row));
const hasMore = rows.length > limit;
return {
items,
cursor: hasMore && items.length > 0 ? items.at(-1)!.id : undefined,
hasMore,
};
} catch {
return { items: [], hasMore: false };
}
}
async contentCreate(
collection: string,
data: Record<string, unknown>,
): Promise<{
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
}> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("content:write")) {
throw new Error("Missing capability: content:write");
}
if (!COLLECTION_NAME_REGEX.test(collection)) {
throw new Error(`Invalid collection name: ${collection}`);
}
const id = ulid();
const now = new Date().toISOString();
// Build columns and values arrays — quote identifiers to avoid SQL keyword collisions
const columns: string[] = [
'"id"',
'"slug"',
'"status"',
'"author_id"',
'"created_at"',
'"updated_at"',
'"version"',
];
const values: unknown[] = [
id,
typeof data.slug === "string" ? data.slug : null,
typeof data.status === "string" ? data.status : "draft",
typeof data.author_id === "string" ? data.author_id : null,
now,
now,
1,
];
// Append user data fields (skip system columns, quote identifiers)
for (const [key, value] of Object.entries(data)) {
if (!SYSTEM_COLUMNS.has(key) && COLLECTION_NAME_REGEX.test(key)) {
columns.push(`"${key}"`);
values.push(serializeValue(value));
}
}
const placeholders = columns.map(() => "?").join(", ");
const columnList = columns.join(", ");
await this.env.DB.prepare(
`INSERT INTO ec_${collection} (${columnList}) VALUES (${placeholders})`,
)
.bind(...values)
.run();
// Re-read the created row
const created = await this.env.DB.prepare(
`SELECT * FROM ec_${collection} WHERE id = ? AND deleted_at IS NULL`,
)
.bind(id)
.first();
if (!created) {
return { id, type: collection, data: {}, createdAt: now, updatedAt: now };
}
return rowToContentItem(collection, created);
}
async contentUpdate(
collection: string,
id: string,
data: Record<string, unknown>,
): Promise<{
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
}> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("content:write")) {
throw new Error("Missing capability: content:write");
}
if (!COLLECTION_NAME_REGEX.test(collection)) {
throw new Error(`Invalid collection name: ${collection}`);
}
const now = new Date().toISOString();
// Quote identifiers to avoid SQL keyword collisions
const setClauses: string[] = ['"updated_at" = ?', '"version" = "version" + 1'];
const values: unknown[] = [now];
// System field updates (only if provided)
if (typeof data.status === "string") {
setClauses.push('"status" = ?');
values.push(data.status);
}
if (data.slug !== undefined) {
setClauses.push('"slug" = ?');
values.push(typeof data.slug === "string" ? data.slug : null);
}
// User data fields (quote identifiers)
for (const [key, value] of Object.entries(data)) {
if (!SYSTEM_COLUMNS.has(key) && COLLECTION_NAME_REGEX.test(key)) {
setClauses.push(`"${key}" = ?`);
values.push(serializeValue(value));
}
}
// WHERE clause: match by id and not soft-deleted
values.push(id);
const result = await this.env.DB.prepare(
`UPDATE ec_${collection} SET ${setClauses.join(", ")} WHERE "id" = ? AND "deleted_at" IS NULL`,
)
.bind(...values)
.run();
if ((result.meta?.changes ?? 0) === 0) {
throw new Error(`Content not found or deleted: ${collection}/${id}`);
}
// Re-read the updated row (with soft-delete guard)
const updated = await this.env.DB.prepare(
`SELECT * FROM ec_${collection} WHERE id = ? AND deleted_at IS NULL`,
)
.bind(id)
.first();
if (!updated) {
throw new Error(`Content not found: ${collection}/${id}`);
}
return rowToContentItem(collection, updated);
}
async contentDelete(collection: string, id: string): Promise<boolean> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("content:write")) {
throw new Error("Missing capability: content:write");
}
if (!COLLECTION_NAME_REGEX.test(collection)) {
throw new Error(`Invalid collection name: ${collection}`);
}
// Soft-delete: set deleted_at timestamp
const now = new Date().toISOString();
const result = await this.env.DB.prepare(
`UPDATE ec_${collection} SET deleted_at = ?, updated_at = ? WHERE id = ? AND deleted_at IS NULL`,
)
.bind(now, now, id)
.run();
return (result.meta?.changes ?? 0) > 0;
}
// =========================================================================
// Media Operations - capability-gated
// =========================================================================
async mediaGet(id: string): Promise<{
id: string;
filename: string;
mimeType: string;
size: number | null;
url: string;
createdAt: string;
} | null> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("media:read")) {
throw new Error("Missing capability: media:read");
}
const result = await this.env.DB.prepare("SELECT * FROM media WHERE id = ?").bind(id).first<{
id: string;
filename: string;
mime_type: string;
size: number | null;
storage_key: string;
created_at: string;
}>();
if (!result) return null;
return {
id: result.id,
filename: result.filename,
mimeType: result.mime_type,
size: result.size,
url: `/_emdash/api/media/file/${result.storage_key}`,
createdAt: result.created_at,
};
}
async mediaList(opts: { limit?: number; cursor?: string; mimeType?: string } = {}): Promise<{
items: Array<{
id: string;
filename: string;
mimeType: string;
size: number | null;
url: string;
createdAt: string;
}>;
cursor?: string;
hasMore: boolean;
}> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("media:read")) {
throw new Error("Missing capability: media:read");
}
const limit = Math.min(opts.limit ?? 50, 100);
// Only return ready items (matching core's MediaRepository.findMany default)
let sql = "SELECT * FROM media WHERE status = 'ready'";
const params: unknown[] = [];
if (opts.mimeType) {
sql += " AND mime_type LIKE ?";
params.push(opts.mimeType + "%");
}
if (opts.cursor) {
sql += " AND id < ?";
params.push(opts.cursor);
}
sql += " ORDER BY id DESC LIMIT ?";
params.push(limit + 1);
const results = await this.env.DB.prepare(sql)
.bind(...params)
.all<{
id: string;
filename: string;
mime_type: string;
size: number | null;
storage_key: string;
created_at: string;
}>();
const rows = results.results ?? [];
const pageRows = rows.slice(0, limit);
const items = pageRows.map((row) => ({
id: row.id,
filename: row.filename,
mimeType: row.mime_type,
size: row.size,
url: `/_emdash/api/media/file/${row.storage_key}`,
createdAt: row.created_at,
}));
const hasMore = rows.length > limit;
return {
items,
cursor: hasMore && items.length > 0 ? items.at(-1)!.id : undefined,
hasMore,
};
}
/**
* Create a pending media record and write bytes directly to R2.
*
* Unlike the admin UI flow (presigned URL → client PUT → confirm), sandboxed
* plugins are network-isolated and can't make external requests. The bridge
* accepts the file bytes directly and writes them to storage.
*
* Returns the media ID, storage key, and confirm URL. The plugin should
* call the confirm endpoint after this to finalize the record.
*/
async mediaUpload(
filename: string,
contentType: string,
bytes: ArrayBuffer,
): Promise<{ mediaId: string; storageKey: string; url: string }> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("media:write")) {
throw new Error("Missing capability: media:write");
}
if (!this.env.MEDIA) {
throw new Error("Media storage (R2) not configured. Add MEDIA binding to wrangler config.");
}
// Validate MIME type — only allow image, video, audio, and PDF
const ALLOWED_MIME_PREFIXES = ["image/", "video/", "audio/", "application/pdf"];
if (!ALLOWED_MIME_PREFIXES.some((prefix) => contentType.startsWith(prefix))) {
throw new Error(
`Unsupported content type: ${contentType}. Allowed: image/*, video/*, audio/*, application/pdf`,
);
}
const mediaId = ulid();
// Derive extension from basename only, validate it's a simple extension
const basename = filename.includes("/")
? filename.slice(filename.lastIndexOf("/") + 1)
: filename;
const rawExt = basename.includes(".") ? basename.slice(basename.lastIndexOf(".")) : "";
const ext = FILE_EXT_REGEX.test(rawExt) ? rawExt : "";
// Flat storage key matching core convention: ${ulid}${ext}
const storageKey = `${mediaId}${ext}`;
const now = new Date().toISOString();
// Write bytes to R2 first, then create DB record.
// If DB insert fails, clean up the R2 object to prevent orphans.
await this.env.MEDIA.put(storageKey, bytes, {
httpMetadata: { contentType },
});
try {
// Create confirmed media record with ISO timestamp (matching core)
await this.env.DB.prepare(
"INSERT INTO media (id, filename, mime_type, size, storage_key, status, created_at) VALUES (?, ?, ?, ?, ?, 'ready', ?)",
)
.bind(mediaId, filename, contentType, bytes.byteLength, storageKey, now)
.run();
} catch (error) {
// Clean up R2 object on DB failure to prevent orphans
try {
await this.env.MEDIA.delete(storageKey);
} catch {
// Best-effort cleanup — log and continue
console.warn(`[plugin-bridge] Failed to clean up orphaned R2 object: ${storageKey}`);
}
throw error;
}
return {
mediaId,
storageKey,
url: `/_emdash/api/media/file/${storageKey}`,
};
}
async mediaDelete(id: string): Promise<boolean> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("media:write")) {
throw new Error("Missing capability: media:write");
}
// Look up the storage key before deleting
const media = await this.env.DB.prepare("SELECT storage_key FROM media WHERE id = ?")
.bind(id)
.first<{ storage_key: string }>();
if (!media) return false;
// Delete the DB row
const result = await this.env.DB.prepare("DELETE FROM media WHERE id = ?").bind(id).run();
// Delete from R2 if the binding is available
if (this.env.MEDIA && media.storage_key) {
try {
await this.env.MEDIA.delete(media.storage_key);
} catch {
// Log but don't fail - the DB row is already deleted
console.warn(`[plugin-bridge] Failed to delete R2 object: ${media.storage_key}`);
}
}
return (result.meta?.changes ?? 0) > 0;
}
// =========================================================================
// Network Operations - capability-gated + host validation
// =========================================================================
async httpFetch(
url: string,
init?: RequestInit,
): Promise<{
status: number;
headers: Record<string, string>;
text: string;
}> {
const { capabilities, allowedHosts } = this.ctx.props;
return sandboxHttpFetch(url, init, { capabilities, allowedHosts });
}
// =========================================================================
// User Operations - capability-gated (users:read)
// =========================================================================
async userGet(id: string): Promise<{
id: string;
email: string;
name: string | null;
role: number;
createdAt: string;
} | null> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("users:read")) {
throw new Error("Missing capability: users:read");
}
const result = await this.env.DB.prepare(
"SELECT id, email, name, role, created_at FROM users WHERE id = ?",
)
.bind(id)
.first<{
id: string;
email: string;
name: string | null;
role: number;
created_at: string;
}>();
if (!result) return null;
return {
id: result.id,
email: result.email,
name: result.name,
role: result.role,
createdAt: result.created_at,
};
}
async userGetByEmail(email: string): Promise<{
id: string;
email: string;
name: string | null;
role: number;
createdAt: string;
} | null> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("users:read")) {
throw new Error("Missing capability: users:read");
}
const result = await this.env.DB.prepare(
"SELECT id, email, name, role, created_at FROM users WHERE email = ?",
)
.bind(email.toLowerCase())
.first<{
id: string;
email: string;
name: string | null;
role: number;
created_at: string;
}>();
if (!result) return null;
return {
id: result.id,
email: result.email,
name: result.name,
role: result.role,
createdAt: result.created_at,
};
}
async userList(opts?: { role?: number; limit?: number; cursor?: string }): Promise<{
items: Array<{
id: string;
email: string;
name: string | null;
role: number;
createdAt: string;
}>;
nextCursor?: string;
}> {
const { capabilities } = this.ctx.props;
if (!capabilities.includes("users:read")) {
throw new Error("Missing capability: users:read");
}
const limit = Math.max(1, Math.min(opts?.limit ?? 50, 100));
let sql = "SELECT id, email, name, role, created_at FROM users";
const params: unknown[] = [];
const conditions: string[] = [];
if (opts?.role !== undefined) {
conditions.push("role = ?");
params.push(opts.role);
}
if (opts?.cursor) {
conditions.push("id < ?");
params.push(opts.cursor);
}
if (conditions.length > 0) {
sql += ` WHERE ${conditions.join(" AND ")}`;
}
sql += " ORDER BY id DESC LIMIT ?";
params.push(limit + 1);
const results = await this.env.DB.prepare(sql)
.bind(...params)
.all<{
id: string;
email: string;
name: string | null;
role: number;
created_at: string;
}>();
const rows = results.results ?? [];
const pageRows = rows.slice(0, limit);
const items = pageRows.map((row) => ({
id: row.id,
email: row.email,
name: row.name,
role: row.role,
createdAt: row.created_at,
}));
const hasMore = rows.length > limit;
return {
items,
nextCursor: hasMore && items.length > 0 ? items.at(-1)!.id : undefined,
};
}
// =========================================================================
// Email Operations - capability-gated
// =========================================================================
async emailSend(message: {
to: string;
subject: string;
text: string;
html?: string;
}): Promise<void> {
const { capabilities, pluginId } = this.ctx.props;
if (!capabilities.includes("email:send")) {
throw new Error("Missing capability: email:send");
}
if (!emailSendCallback) {
throw new Error("Email is not configured. No email provider is available.");
}
await emailSendCallback(message, pluginId);
}
// =========================================================================
// Logging
// =========================================================================
log(level: "debug" | "info" | "warn" | "error", msg: string, data?: unknown): void {
const { pluginId } = this.ctx.props;
console[level](`[plugin:${pluginId}]`, msg, data ?? "");
}
}

View File

@@ -0,0 +1,13 @@
/**
* Cloudflare Sandbox Runner - RUNTIME ENTRY
*
* This module is loaded at runtime when plugins need to be sandboxed.
* It imports cloudflare:workers and should NOT be imported at config time.
*
* For config-time usage, import { sandbox } from "@emdash-cms/cloudflare" instead.
*
*/
export { CloudflareSandboxRunner, createSandboxRunner, type PluginBridgeProps } from "./runner.js";
export { PluginBridge, setEmailSendCallback, type PluginBridgeEnv } from "./bridge.js";
export { generatePluginWrapper } from "./wrapper.js";

View File

@@ -0,0 +1,367 @@
/**
* Cloudflare Sandbox Runner
*
* Uses Worker Loader to run plugins in isolated V8 isolates.
* Plugins communicate with the host via a BRIDGE service binding
* that enforces capabilities and scopes operations.
*
* This module imports directly from cloudflare:workers to access
* the LOADER binding and PluginBridge export. It's only loaded
* when the user configures `sandboxRunner: "@emdash-cms/cloudflare/sandbox"`.
*
*/
import { env, exports } from "cloudflare:workers";
import {
normalizeCapabilities,
type SandboxRunner,
type SandboxedPlugin,
type SandboxEmailSendCallback,
type SandboxOptions,
type SandboxRunnerFactory,
type SerializedRequest,
type PluginManifest,
} from "emdash";
import { setEmailSendCallback } from "./bridge.js";
import type { WorkerLoader, WorkerStub, PluginBridgeBinding, WorkerLoaderLimits } from "./types.js";
import { generatePluginWrapper } from "./wrapper.js";
const EMDASH_SHIM = "export const definePlugin = (d) => d;\n";
/**
* Default resource limits for sandboxed plugins.
*
* cpuMs and subrequests are enforced by Worker Loader at the V8 isolate level.
* wallTimeMs is enforced by the runner via Promise.race.
* memoryMb is declared for API compatibility but NOT currently enforced —
* Worker Loader doesn't expose a memory limit option. V8 isolates have a
* platform-level memory ceiling (~128MB) but it's not configurable per-worker.
*/
const DEFAULT_LIMITS = {
cpuMs: 50,
memoryMb: 128,
subrequests: 10,
wallTimeMs: 30_000,
} as const;
export interface PluginBridgeProps {
pluginId: string;
pluginVersion: string;
capabilities: string[];
allowedHosts: string[];
storageCollections: string[];
}
/**
* Get the Worker Loader binding from env
*/
function getLoader(): WorkerLoader | null {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Worker Loader binding accessed from untyped env object
return (env as Record<string, unknown>).LOADER as WorkerLoader | null;
}
/**
* Get the PluginBridge from exports (loopback binding)
*/
function getPluginBridge(): ((opts: { props: PluginBridgeProps }) => PluginBridgeBinding) | null {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- PluginBridge accessed from untyped cloudflare:workers exports
return (exports as Record<string, unknown>).PluginBridge as
| ((opts: { props: PluginBridgeProps }) => PluginBridgeBinding)
| null;
}
/**
* Resolved resource limits with defaults applied.
*/
interface ResolvedLimits {
cpuMs: number;
memoryMb: number;
subrequests: number;
wallTimeMs: number;
}
/**
* Resolve resource limits by merging user-provided overrides with defaults.
*/
function resolveLimits(limits?: SandboxOptions["limits"]): ResolvedLimits {
return {
cpuMs: limits?.cpuMs ?? DEFAULT_LIMITS.cpuMs,
memoryMb: limits?.memoryMb ?? DEFAULT_LIMITS.memoryMb,
subrequests: limits?.subrequests ?? DEFAULT_LIMITS.subrequests,
wallTimeMs: limits?.wallTimeMs ?? DEFAULT_LIMITS.wallTimeMs,
};
}
/**
* Cloudflare sandbox runner using Worker Loader.
*/
export class CloudflareSandboxRunner implements SandboxRunner {
private plugins = new Map<string, CloudflareSandboxedPlugin>();
private options: SandboxOptions;
private resolvedLimits: ResolvedLimits;
private siteInfo?: { name: string; url: string; locale: string };
constructor(options: SandboxOptions) {
this.options = options;
this.resolvedLimits = resolveLimits(options.limits);
this.siteInfo = options.siteInfo;
// Wire email send callback if provided at construction time
setEmailSendCallback(options.emailSend ?? null);
}
/**
* Set the email send callback for sandboxed plugins.
* Called after the EmailPipeline is created, since the pipeline
* doesn't exist when the sandbox runner is constructed.
*/
setEmailSend(callback: SandboxEmailSendCallback | null): void {
setEmailSendCallback(callback);
}
/**
* Check if Worker Loader is available.
*/
isAvailable(): boolean {
return !!getLoader() && !!getPluginBridge();
}
/**
* Load a sandboxed plugin.
*
* @param manifest - Plugin manifest with capabilities and storage declarations
* @param code - The bundled plugin JavaScript code
*/
async load(manifest: PluginManifest, code: string): Promise<SandboxedPlugin> {
const pluginId = `${manifest.id}:${manifest.version}`;
// Return cached plugin if available
const existing = this.plugins.get(pluginId);
if (existing) return existing;
const loader = getLoader();
const pluginBridge = getPluginBridge();
if (!loader) {
throw new Error(
"Worker Loader not available. Add worker_loaders binding to wrangler config.",
);
}
if (!pluginBridge) {
throw new Error(
"PluginBridge not available. Export PluginBridge from your worker entrypoint.",
);
}
const plugin = new CloudflareSandboxedPlugin(
manifest,
code,
loader,
pluginBridge,
this.resolvedLimits,
this.siteInfo,
);
this.plugins.set(pluginId, plugin);
return plugin;
}
/**
* Terminate all loaded plugins.
*/
async terminateAll(): Promise<void> {
for (const plugin of this.plugins.values()) {
await plugin.terminate();
}
this.plugins.clear();
}
}
/**
* A plugin running in a Worker Loader isolate.
*
* IMPORTANT: Worker stubs and bridge bindings are tied to request context.
* We must create fresh stubs for each invocation to avoid I/O isolation errors:
* "Cannot perform I/O on behalf of a different request"
*/
class CloudflareSandboxedPlugin implements SandboxedPlugin {
readonly id: string;
readonly manifest: PluginManifest;
private loader: WorkerLoader;
private createBridge: (opts: { props: PluginBridgeProps }) => PluginBridgeBinding;
private code: string;
private wrapperCode: string | null = null;
private limits: ResolvedLimits;
private siteInfo?: { name: string; url: string; locale: string };
constructor(
manifest: PluginManifest,
code: string,
loader: WorkerLoader,
createBridge: (opts: { props: PluginBridgeProps }) => PluginBridgeBinding,
limits: ResolvedLimits,
siteInfo?: { name: string; url: string; locale: string },
) {
this.id = `${manifest.id}:${manifest.version}`;
this.manifest = manifest;
this.code = code;
this.loader = loader;
this.createBridge = createBridge;
this.limits = limits;
this.siteInfo = siteInfo;
}
/**
* Create a fresh worker stub for the current request.
*
* Worker Loader stubs contain bindings (like BRIDGE) that are tied to the
* request context in which they were created. Reusing stubs across requests
* causes "Cannot perform I/O on behalf of a different request" errors.
*
* The Worker Loader internally caches the V8 isolate, so we only pay the
* cost of creating the bridge binding and stub wrapper per request.
*/
private createWorker(): WorkerStub {
// Cache the wrapper code (CPU-bound, no I/O context issues)
if (!this.wrapperCode) {
this.wrapperCode = generatePluginWrapper(this.manifest, {
site: this.siteInfo,
});
}
// Create fresh bridge binding for THIS request.
//
// Capabilities are normalized to canonical names here so the bridge
// only ever sees the current vocabulary. Manifests installed before
// the rename (or sites still using the legacy alias layer) keep
// working — `normalizeCapabilities` rewrites legacy names like
// `read:content` → `content:read` and `network:fetch` → `network:request`.
const bridgeBinding = this.createBridge({
props: {
pluginId: this.manifest.id,
pluginVersion: this.manifest.version || "0.0.0",
capabilities: normalizeCapabilities(this.manifest.capabilities || []),
allowedHosts: this.manifest.allowedHosts || [],
storageCollections: Object.keys(this.manifest.storage || {}),
},
});
// Build Worker Loader limits from resolved resource limits
const loaderLimits: WorkerLoaderLimits = {
cpuMs: this.limits.cpuMs,
subRequests: this.limits.subrequests,
};
// Get a fresh stub with the new bridge binding.
// Worker Loader caches the isolate but the stub/bindings are per-call.
return this.loader.get(this.id, () => ({
compatibilityDate: "2026-04-01",
mainModule: "plugin.js",
modules: {
"plugin.js": { js: this.wrapperCode! },
"sandbox-plugin.js": { js: this.code },
emdash: { js: EMDASH_SHIM },
},
// Block direct network access - plugins must use ctx.http via bridge
globalOutbound: null,
// Enforce resource limits at the V8 isolate level
limits: loaderLimits,
env: {
// Plugin metadata
PLUGIN_ID: this.manifest.id,
PLUGIN_VERSION: this.manifest.version || "0.0.0",
// Bridge binding for all host operations
BRIDGE: bridgeBinding,
},
}));
}
/**
* Run a function with wall-time enforcement.
*
* CPU limits and subrequest limits are enforced by the Worker Loader
* at the V8 isolate level. Wall-time is enforced here because Worker
* Loader doesn't expose a wall-time limit — a plugin could stall
* indefinitely waiting on network I/O.
*/
private async withWallTimeLimit<T>(operation: string, fn: () => Promise<T>): Promise<T> {
const wallTimeMs = this.limits.wallTimeMs;
let timer: ReturnType<typeof setTimeout> | undefined;
const timeout = new Promise<never>((_, reject) => {
timer = setTimeout(() => {
reject(
new Error(
`Plugin ${this.manifest.id} exceeded wall-time limit of ${wallTimeMs}ms during ${operation}`,
),
);
}, wallTimeMs);
});
try {
return await Promise.race([fn(), timeout]);
} finally {
if (timer !== undefined) clearTimeout(timer);
}
}
/**
* Invoke a hook in the sandboxed plugin.
*
* CPU and subrequest limits are enforced by Worker Loader.
* Wall-time is enforced here.
*/
async invokeHook(hookName: string, event: unknown): Promise<unknown> {
return this.withWallTimeLimit(`hook:${hookName}`, () => {
const worker = this.createWorker();
const entrypoint = worker.getEntrypoint<PluginEntrypoint>("default");
return entrypoint.invokeHook(hookName, event);
});
}
/**
* Invoke an API route in the sandboxed plugin.
*
* CPU and subrequest limits are enforced by Worker Loader.
* Wall-time is enforced here.
*/
async invokeRoute(
routeName: string,
input: unknown,
request: SerializedRequest,
): Promise<unknown> {
return this.withWallTimeLimit(`route:${routeName}`, () => {
const worker = this.createWorker();
const entrypoint = worker.getEntrypoint<PluginEntrypoint>("default");
return entrypoint.invokeRoute(routeName, input, request);
});
}
/**
* Terminate the sandboxed plugin.
*/
async terminate(): Promise<void> {
// Worker Loader manages isolate lifecycle - nothing to do here
this.wrapperCode = null;
}
}
/**
* The RPC interface exposed by the plugin wrapper.
*/
interface PluginEntrypoint {
invokeHook(hookName: string, event: unknown): Promise<unknown>;
invokeRoute(routeName: string, input: unknown, request: SerializedRequest): Promise<unknown>;
}
/**
* Factory function for creating the Cloudflare sandbox runner.
*
* Matches the SandboxRunnerFactory signature. The LOADER and PluginBridge
* are obtained internally from cloudflare:workers imports.
*/
export const createSandboxRunner: SandboxRunnerFactory = (options) => {
return new CloudflareSandboxRunner(options);
};

View File

@@ -0,0 +1,181 @@
/**
* Cloudflare-specific types for sandbox runner
*/
import type { D1Database, R2Bucket } from "@cloudflare/workers-types";
/**
* Environment bindings required for sandbox runner.
* These must be configured in wrangler.jsonc.
*/
export interface CloudflareSandboxEnv {
/** Worker Loader binding for spawning plugin isolates */
LOADER?: WorkerLoader;
/** D1 database for plugin storage and bridge operations */
DB: D1Database;
/** R2 bucket for plugin code storage (optional if loading from config) */
PLUGINS?: R2Bucket;
}
/**
* Worker Loader binding type.
* This is the API provided by Cloudflare's Worker Loader feature.
*/
export interface WorkerLoader {
/**
* Get or create a dynamic worker instance.
*
* @param name - Unique identifier for this worker instance
* @param config - Configuration function returning worker setup
* @returns A stub to interact with the dynamic worker
*/
get(name: string, config: () => WorkerLoaderConfig | Promise<WorkerLoaderConfig>): WorkerStub;
}
/**
* Configuration for a dynamically loaded worker.
*/
export interface WorkerLoaderConfig {
/** Compatibility date for the worker */
compatibilityDate?: string;
/** Name of the main module (must be in modules) */
mainModule: string;
/** Map of module names to their code */
modules: Record<string, string | { js: string }>;
/** Environment bindings to pass to the worker */
env?: Record<string, unknown>;
/**
* Outbound fetch handler.
* Set to null to block all network access.
* Set to a service binding to intercept/proxy requests.
*/
globalOutbound?: null | object;
/**
* Resource limits enforced at the V8 isolate level.
* Analogous to Workers for Platforms custom limits.
*/
limits?: WorkerLoaderLimits;
}
/**
* Resource limits for a dynamically loaded worker.
* Enforced by the Worker Loader runtime at the V8 isolate level.
*/
export interface WorkerLoaderLimits {
/** Maximum CPU time in milliseconds per invocation */
cpuMs?: number;
/** Maximum number of subrequests (fetch/service-binding calls) per invocation */
subRequests?: number;
}
/**
* Stub returned by Worker Loader for interacting with dynamic workers.
*/
export interface WorkerStub {
/**
* Get the default entrypoint (fetch handler).
*/
fetch(request: Request): Promise<Response>;
/**
* Get a named entrypoint class instance for RPC.
*/
getEntrypoint<T = unknown>(name?: string): T;
}
/**
* Plugin manifest - loaded from manifest.json in plugin bundle.
*/
export interface LoadedPluginManifest {
id: string;
version: string;
capabilities: string[];
allowedHosts: string[];
storage: Record<string, { indexes: Array<string | string[]> }>;
hooks: string[];
routes: string[];
}
/**
* Content item shape returned by bridge content operations.
* Matches core's ContentItem from plugins/types.ts.
*/
interface BridgeContentItem {
id: string;
type: string;
data: Record<string, unknown>;
createdAt: string;
updatedAt: string;
}
/**
* Media item shape returned by bridge media operations.
* Matches core's MediaItem from plugins/types.ts.
*/
interface BridgeMediaItem {
id: string;
filename: string;
mimeType: string;
size: number | null;
url: string;
createdAt: string;
}
/**
* Type for the PluginBridge binding passed to sandboxed workers.
* This is the RPC interface exposed by PluginBridge WorkerEntrypoint.
*/
export interface PluginBridgeBinding {
// KV
kvGet(key: string): Promise<unknown>;
kvSet(key: string, value: unknown): Promise<void>;
kvDelete(key: string): Promise<boolean>;
kvList(prefix?: string): Promise<Array<{ key: string; value: unknown }>>;
// Storage
storageGet(collection: string, id: string): Promise<unknown>;
storagePut(collection: string, id: string, data: unknown): Promise<void>;
storageDelete(collection: string, id: string): Promise<boolean>;
storageQuery(
collection: string,
opts?: { limit?: number; cursor?: string },
): Promise<{ items: Array<{ id: string; data: unknown }>; hasMore: boolean; cursor?: string }>;
storageCount(collection: string): Promise<number>;
storageGetMany(collection: string, ids: string[]): Promise<Map<string, unknown>>;
storagePutMany(collection: string, items: Array<{ id: string; data: unknown }>): Promise<void>;
storageDeleteMany(collection: string, ids: string[]): Promise<number>;
// Content
contentGet(collection: string, id: string): Promise<BridgeContentItem | null>;
contentList(
collection: string,
opts?: { limit?: number; cursor?: string },
): Promise<{ items: BridgeContentItem[]; cursor?: string; hasMore: boolean }>;
contentCreate(collection: string, data: Record<string, unknown>): Promise<BridgeContentItem>;
contentUpdate(
collection: string,
id: string,
data: Record<string, unknown>,
): Promise<BridgeContentItem>;
contentDelete(collection: string, id: string): Promise<boolean>;
// Media
mediaGet(id: string): Promise<BridgeMediaItem | null>;
mediaList(opts?: {
limit?: number;
cursor?: string;
mimeType?: string;
}): Promise<{ items: BridgeMediaItem[]; cursor?: string; hasMore: boolean }>;
mediaUpload(
filename: string,
contentType: string,
bytes: ArrayBuffer,
): Promise<{ mediaId: string; storageKey: string; url: string }>;
mediaDelete(id: string): Promise<boolean>;
// Network
httpFetch(
url: string,
init?: RequestInit,
): Promise<{ status: number; headers: Record<string, string>; text: string }>;
// Email
emailSend(message: { to: string; subject: string; text: string; html?: string }): Promise<void>;
// Logging
log(level: "debug" | "info" | "warn" | "error", msg: string, data?: unknown): void;
}

View File

@@ -0,0 +1,241 @@
/**
* Plugin Wrapper Generator
*
* Generates the code that wraps a plugin to run in a Worker Loader isolate.
* The wrapper:
* - Imports plugin hooks and routes from a separate module ("sandbox-plugin.js")
* - Creates plugin context that proxies to BRIDGE service binding
* - Exposes hooks and routes via RPC through WorkerEntrypoint
*
* Plugin code runs in its own module scope, isolated from the wrapper template.
*
*/
import { normalizeCapabilities, type PluginManifest } from "emdash";
const TRAILING_SLASH_RE = /\/$/;
const NEWLINE_RE = /[\n\r]/g;
const COMMENT_CLOSE_RE = /\*\//g;
/**
* Options for wrapper generation
*
* **Known limitation:** `site` info is baked into the generated wrapper code
* at load time. If site settings change (e.g., admin updates site name/URL),
* sandboxed plugins will see stale values until the worker restarts.
* Trusted-mode plugins always read fresh values from the database.
*/
export interface WrapperOptions {
/** Site info to inject into the context (no RPC needed) */
site?: { name: string; url: string; locale: string };
}
export function generatePluginWrapper(manifest: PluginManifest, options?: WrapperOptions): string {
const storageCollections = Object.keys(manifest.storage || {});
const site = options?.site ?? { name: "", url: "", locale: "en" };
// Normalize so manifests that still declare legacy names (`read:users`)
// expose the same APIs as canonical names (`users:read`).
const capabilities = normalizeCapabilities(manifest.capabilities ?? []);
const hasReadUsers = capabilities.includes("users:read");
const hasEmailSend = capabilities.includes("email:send");
return `
// =============================================================================
// Sandboxed Plugin Wrapper
// Generated by @emdash-cms/cloudflare
// Plugin: ${sanitizeComment(manifest.id)}@${sanitizeComment(manifest.version)}
// =============================================================================
import { WorkerEntrypoint } from "cloudflare:workers";
// Plugin code lives in a separate module for scope isolation
import pluginModule from "sandbox-plugin.js";
// Extract hooks and routes from the plugin module
const hooks = pluginModule?.hooks || pluginModule?.default?.hooks || {};
const routes = pluginModule?.routes || pluginModule?.default?.routes || {};
// -----------------------------------------------------------------------------
// Context Factory - creates ctx that proxies to BRIDGE
// -----------------------------------------------------------------------------
function createContext(env) {
const bridge = env.BRIDGE;
const storageCollections = ${JSON.stringify(storageCollections)};
// KV - proxies to bridge.kvGet/Set/Delete/List
const kv = {
get: (key) => bridge.kvGet(key),
set: (key, value) => bridge.kvSet(key, value),
delete: (key) => bridge.kvDelete(key),
list: (prefix) => bridge.kvList(prefix)
};
// Storage collection factory
function createStorageCollection(collectionName) {
return {
get: (id) => bridge.storageGet(collectionName, id),
put: (id, data) => bridge.storagePut(collectionName, id, data),
delete: (id) => bridge.storageDelete(collectionName, id),
exists: async (id) => (await bridge.storageGet(collectionName, id)) !== null,
query: (opts) => bridge.storageQuery(collectionName, opts),
count: (where) => bridge.storageCount(collectionName, where),
getMany: (ids) => bridge.storageGetMany(collectionName, ids),
putMany: (items) => bridge.storagePutMany(collectionName, items),
deleteMany: (ids) => bridge.storageDeleteMany(collectionName, ids)
};
}
// Storage proxy that creates collections on access
const storage = new Proxy({}, {
get(_, collectionName) {
if (typeof collectionName !== "string") return undefined;
return createStorageCollection(collectionName);
}
});
// Content access - proxies to bridge (capability enforced by bridge)
const content = {
get: (collection, id) => bridge.contentGet(collection, id),
list: (collection, opts) => bridge.contentList(collection, opts),
create: (collection, data) => bridge.contentCreate(collection, data),
update: (collection, id, data) => bridge.contentUpdate(collection, id, data),
delete: (collection, id) => bridge.contentDelete(collection, id)
};
// Media access - proxies to bridge (capability enforced by bridge)
const media = {
get: (id) => bridge.mediaGet(id),
list: (opts) => bridge.mediaList(opts),
upload: (filename, contentType, bytes) => bridge.mediaUpload(filename, contentType, bytes),
getUploadUrl: () => { throw new Error("getUploadUrl is not available in sandbox mode. Use media.upload(filename, contentType, bytes) instead."); },
delete: (id) => bridge.mediaDelete(id)
};
// HTTP access - proxies to bridge (capability + host enforced by bridge)
const http = {
fetch: async (url, init) => {
const result = await bridge.httpFetch(url, init);
// Bridge returns serialized response, reconstruct Response-like object
return {
status: result.status,
ok: result.status >= 200 && result.status < 300,
headers: new Headers(result.headers),
text: async () => result.text,
json: async () => JSON.parse(result.text)
};
}
};
// Logger - proxies to bridge
const log = {
debug: (msg, data) => bridge.log("debug", msg, data),
info: (msg, data) => bridge.log("info", msg, data),
warn: (msg, data) => bridge.log("warn", msg, data),
error: (msg, data) => bridge.log("error", msg, data)
};
// Site info - injected at wrapper generation time, no RPC needed
const site = ${JSON.stringify(site)};
// URL helper - generates absolute URLs from paths
const siteBaseUrl = ${JSON.stringify(site.url.replace(TRAILING_SLASH_RE, ""))};
function url(path) {
if (!path.startsWith("/")) {
throw new Error('URL path must start with "/", got: "' + path + '"');
}
if (path.startsWith("//")) {
throw new Error('URL path must not be protocol-relative, got: "' + path + '"');
}
return siteBaseUrl + path;
}
// User access - proxies to bridge (capability enforced by bridge)
const users = ${hasReadUsers} ? {
get: (id) => bridge.userGet(id),
getByEmail: (email) => bridge.userGetByEmail(email),
list: (opts) => bridge.userList(opts)
} : undefined;
// Email access - proxies to bridge (capability enforced by bridge)
const email = ${hasEmailSend} ? {
send: (message) => bridge.emailSend(message)
} : undefined;
return {
plugin: {
id: env.PLUGIN_ID,
version: env.PLUGIN_VERSION
},
storage,
kv,
content,
media,
http,
log,
site,
url,
users,
email
};
}
// -----------------------------------------------------------------------------
// Worker Entrypoint (RPC interface)
// -----------------------------------------------------------------------------
export default class PluginEntrypoint extends WorkerEntrypoint {
async invokeHook(hookName, event) {
const ctx = createContext(this.env);
// Find the hook handler
const hookDef = hooks[hookName];
if (!hookDef) {
// No handler for this hook - that's ok, return undefined
return undefined;
}
// Get the handler (might be wrapped in config object)
const handler = typeof hookDef === "function" ? hookDef : hookDef.handler;
if (typeof handler !== "function") {
throw new Error(\`Hook \${hookName} handler is not a function\`);
}
// Execute the hook
return handler(event, ctx);
}
async invokeRoute(routeName, input, serializedRequest) {
const ctx = createContext(this.env);
// Find the route handler
const route = routes[routeName];
if (!route) {
throw new Error(\`Route not found: \${routeName}\`);
}
// Get handler (might be direct function or object with handler)
const handler = typeof route === "function" ? route : route.handler;
if (typeof handler !== "function") {
throw new Error(\`Route \${routeName} handler is not a function\`);
}
// Execute the route handler with input, request metadata, and context
return handler({ input, request: serializedRequest, requestMeta: serializedRequest.meta }, ctx);
}
}
`;
}
/**
* Sanitize a string for inclusion in a JavaScript comment.
* Prevents comment injection via manifest.id or manifest.version containing
* newlines or comment-closing sequences.
*/
function sanitizeComment(s: string): string {
return s.replace(NEWLINE_RE, " ").replace(COMMENT_CLOSE_RE, "* /");
}

View File

@@ -0,0 +1,192 @@
/**
* Cloudflare R2 Storage Implementation - RUNTIME ENTRY
*
* Uses R2 bindings directly when running on Cloudflare Workers.
* This avoids the AWS SDK overhead and works with the native R2 API.
*
* This module imports directly from cloudflare:workers to access R2 bindings.
* Do NOT import this at config time - use { r2 } from "@emdash-cms/cloudflare" instead.
*
* For Astro 6 / Cloudflare adapter v13+:
* - Bindings are accessed via `import { env } from 'cloudflare:workers'`
*/
import { env } from "cloudflare:workers";
import type {
Storage,
UploadResult,
DownloadResult,
ListResult,
ListOptions,
SignedUploadUrl,
SignedUploadOptions,
} from "emdash";
import { EmDashStorageError } from "emdash";
/** Regex to remove trailing slashes from URLs */
const TRAILING_SLASH_REGEX = /\/$/;
/**
* R2 Storage implementation using native bindings
*/
export class R2Storage implements Storage {
private bucket: R2Bucket;
private publicUrl?: string;
constructor(bucket: R2Bucket, publicUrl?: string) {
this.bucket = bucket;
this.publicUrl = publicUrl;
}
async upload(options: {
key: string;
body: Buffer | Uint8Array | ReadableStream<Uint8Array>;
contentType: string;
}): Promise<UploadResult> {
try {
const result = await this.bucket.put(options.key, options.body, {
httpMetadata: {
contentType: options.contentType,
},
});
if (!result) {
throw new EmDashStorageError(`Failed to upload file: ${options.key}`, "UPLOAD_FAILED");
}
return {
key: options.key,
url: this.getPublicUrl(options.key),
size: result.size,
};
} catch (error) {
if (error instanceof EmDashStorageError) throw error;
throw new EmDashStorageError(`Failed to upload file: ${options.key}`, "UPLOAD_FAILED", error);
}
}
async download(key: string): Promise<DownloadResult> {
try {
const object = await this.bucket.get(key);
if (!object) {
throw new EmDashStorageError(`File not found: ${key}`, "NOT_FOUND");
}
// R2ObjectBody has the body property — use it as a type guard
if (!("body" in object) || !object.body) {
throw new EmDashStorageError(`File not found: ${key}`, "NOT_FOUND");
}
return {
body: object.body,
contentType: object.httpMetadata?.contentType || "application/octet-stream",
size: object.size,
};
} catch (error) {
if (error instanceof EmDashStorageError) throw error;
throw new EmDashStorageError(`Failed to download file: ${key}`, "DOWNLOAD_FAILED", error);
}
}
async delete(key: string): Promise<void> {
try {
await this.bucket.delete(key);
} catch (error) {
// R2 delete is idempotent
throw new EmDashStorageError(`Failed to delete file: ${key}`, "DELETE_FAILED", error);
}
}
async exists(key: string): Promise<boolean> {
try {
const object = await this.bucket.head(key);
return object !== null;
} catch (error) {
throw new EmDashStorageError(`Failed to check file existence: ${key}`, "HEAD_FAILED", error);
}
}
async list(options: ListOptions = {}): Promise<ListResult> {
try {
const response = await this.bucket.list({
prefix: options.prefix,
limit: options.limit,
cursor: options.cursor,
});
return {
files: response.objects.map((item) => ({
key: item.key,
size: item.size,
lastModified: item.uploaded,
etag: item.etag,
})),
nextCursor: response.truncated ? response.cursor : undefined,
};
} catch (error) {
throw new EmDashStorageError("Failed to list files", "LIST_FAILED", error);
}
}
async getSignedUploadUrl(_options: SignedUploadOptions): Promise<SignedUploadUrl> {
// R2 doesn't support pre-signed URLs in the same way as S3
// For R2, uploads go through the Worker
// This method is here for interface compatibility but throws an error
throw new EmDashStorageError(
"R2 bindings do not support pre-signed upload URLs. " +
"Use the S3 API with R2 credentials for signed URL support, " +
"or upload through the Worker.",
"NOT_SUPPORTED",
);
}
getPublicUrl(key: string): string {
if (this.publicUrl) {
return `${this.publicUrl.replace(TRAILING_SLASH_REGEX, "")}/${key}`;
}
// Without a public URL, we can't generate one for R2 bindings
// Return a relative path that should be served through the API
return `/_emdash/api/media/file/${key}`;
}
}
/**
* Create R2 storage adapter
* This is the factory function called at runtime
*
* Uses cloudflare:workers to access bindings directly.
*/
export function createStorage(config: Record<string, unknown>): Storage {
const binding = typeof config.binding === "string" ? config.binding : "";
const publicUrl = typeof config.publicUrl === "string" ? config.publicUrl : undefined;
if (!binding) {
throw new EmDashStorageError(
`R2 binding name is required in storage config.`,
"BINDING_NOT_FOUND",
);
}
// env from cloudflare:workers doesn't have an index signature, so cast is needed
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- R2Bucket binding accessed from untyped env object
const bucket = (env as Record<string, unknown>)[binding] as R2Bucket | undefined;
if (!bucket) {
throw new EmDashStorageError(
`R2 binding "${binding}" not found. ` +
`Make sure the binding is defined in wrangler.jsonc and ` +
`you're running on Cloudflare Workers.\n\n` +
`Example wrangler.jsonc:\n` +
`{\n` +
` "r2_buckets": [{\n` +
` "binding": "${binding}",\n` +
` "bucket_name": "my-bucket"\n` +
` }]\n` +
`}`,
"BINDING_NOT_FOUND",
);
}
return new R2Storage(bucket, publicUrl);
}