first commit

This commit is contained in:
Matt Kane
2026-04-01 10:44:22 +01:00
commit 43fcb9a131
1789 changed files with 395041 additions and 0 deletions

View File

@@ -0,0 +1,100 @@
import { customHeadersInterceptor, resolveCustomHeaders } from "../client/cf-access.js";
import { EmDashClient } from "../client/index.js";
import type { Interceptor } from "../client/transport.js";
import { getCredentials, saveCredentials } from "./credentials.js";
export interface ClientArgs {
url?: string;
token?: string;
}
/**
* Shared connection args for all CLI commands that talk to an EmDash instance.
* Spread into each command's `args` definition.
*/
export const connectionArgs = {
url: {
type: "string" as const,
alias: "u",
description: "EmDash instance URL",
default: "http://localhost:4321",
},
token: {
type: "string" as const,
alias: "t",
description: "Auth token",
},
header: {
type: "string" as const,
alias: "H",
description: 'Custom header "Name: Value" (repeatable, or use EMDASH_HEADERS env)',
},
json: {
type: "boolean" as const,
description: "Output as JSON",
},
};
/**
* Create an EmDashClient from CLI args, env vars, and stored credentials.
*
* Auth resolution order:
* 1. --token flag
* 2. EMDASH_TOKEN env var
* 3. Stored credentials (~/.config/emdash/auth.json)
* 4. Dev bypass (if URL is localhost)
*
* Custom headers are merged from (in priority order):
* 1. Stored credentials (persisted during `emdash login --header`)
* 2. EMDASH_HEADERS env var
* 3. --header CLI flags
*/
export function createClientFromArgs(args: ClientArgs): EmDashClient {
const baseUrl = args.url || process.env["EMDASH_URL"] || "http://localhost:4321";
let token = args.token || process.env["EMDASH_TOKEN"];
const isLocal = baseUrl.includes("localhost") || baseUrl.includes("127.0.0.1");
const cred = !token ? getCredentials(baseUrl) : null;
// Merge custom headers: stored credentials < env var < CLI flags
const customHeaders = {
...cred?.customHeaders,
...resolveCustomHeaders(),
};
const extraInterceptors: Interceptor[] = [];
if (Object.keys(customHeaders).length > 0) {
extraInterceptors.push(customHeadersInterceptor(customHeaders));
}
// Check stored credentials if no explicit token
if (!token && cred) {
// Check if access token is expired
if (new Date(cred.expiresAt) > new Date()) {
token = cred.accessToken;
} else {
// Token expired — use the refresh interceptor in the client
// Pass the refresh token so the client can auto-refresh
return new EmDashClient({
baseUrl,
token: cred.accessToken,
refreshToken: cred.refreshToken,
onTokenRefresh: (newAccessToken, expiresIn) => {
saveCredentials(baseUrl, {
...cred,
accessToken: newAccessToken,
expiresAt: new Date(Date.now() + expiresIn * 1000).toISOString(),
});
},
interceptors: extraInterceptors,
});
}
}
return new EmDashClient({
baseUrl,
token,
devBypass: !token && isLocal,
interceptors: extraInterceptors,
});
}

View File

@@ -0,0 +1,46 @@
/**
* Auth CLI commands
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import pc from "picocolors";
import { encodeBase64url } from "../../utils/base64.js";
/**
* Generate a cryptographically secure auth secret
*/
function generateAuthSecret(): string {
const bytes = new Uint8Array(32);
crypto.getRandomValues(bytes);
return encodeBase64url(bytes);
}
const secretCommand = defineCommand({
meta: {
name: "secret",
description: "Generate a secure auth secret",
},
run() {
const secret = generateAuthSecret();
consola.log("");
consola.log(pc.bold("Generated auth secret:"));
consola.log("");
consola.log(` ${pc.cyan("EMDASH_AUTH_SECRET")}=${pc.green(secret)}`);
consola.log("");
consola.log(pc.dim("Add this to your environment variables."));
consola.log("");
},
});
export const authCommand = defineCommand({
meta: {
name: "auth",
description: "Authentication utilities",
},
subCommands: {
secret: secretCommand,
},
});

View File

@@ -0,0 +1,247 @@
/**
* Bundle utility functions
*
* Shared logic extracted from the bundle command so it can be tested
* without the CLI harness and tsdown dependency.
*/
import { createWriteStream } from "node:fs";
import { readdir, stat, access } from "node:fs/promises";
import { resolve, join } from "node:path";
import { pipeline } from "node:stream/promises";
import { imageSize } from "image-size";
import { packTar } from "modern-tar/fs";
import type {
PluginManifest,
ResolvedPlugin,
HookName,
ManifestHookEntry,
} from "../../plugins/types.js";
// ── Constants ────────────────────────────────────────────────────────────────
export const MAX_BUNDLE_SIZE = 5 * 1024 * 1024;
export const MAX_SCREENSHOTS = 5;
export const MAX_SCREENSHOT_WIDTH = 1920;
export const MAX_SCREENSHOT_HEIGHT = 1080;
export const ICON_SIZE = 256;
// ── Regex patterns (module-scope to avoid re-compilation) ────────────────────
/** Matches require("node:xxx") / require("xxx") / import("node:xxx") in bundled output */
const NODE_BUILTIN_IMPORT_RE = /(?:import|require)\s*\(?["'](?:node:)?([a-z_]+)["']\)?/g;
const LEADING_DOT_SLASH_RE = /^\.\//;
const DIST_PREFIX_RE = /^dist\//;
const MJS_EXT_RE = /\.m?js$/;
const TS_TO_TSX_RE = /\.ts$/;
/** Node.js built-in modules that shouldn't appear in sandbox code */
const NODE_BUILTINS = new Set([
"assert",
"buffer",
"child_process",
"cluster",
"crypto",
"dgram",
"dns",
"domain",
"events",
"fs",
"http",
"http2",
"https",
"inspector",
"module",
"net",
"os",
"path",
"perf_hooks",
"process",
"punycode",
"querystring",
"readline",
"repl",
"stream",
"string_decoder",
"sys",
"timers",
"tls",
"trace_events",
"tty",
"url",
"util",
"v8",
"vm",
"wasi",
"worker_threads",
"zlib",
]);
// ── File helpers ─────────────────────────────────────────────────────────────
export async function fileExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
// ── Image dimension readers ──────────────────────────────────────────────────
/**
* Read image dimensions from a buffer.
* Returns [width, height] or null if the format is unrecognized.
*/
export function readImageDimensions(buf: Uint8Array): [number, number] | null {
try {
const result = imageSize(buf);
if (result.width != null && result.height != null) {
return [result.width, result.height];
}
return null;
} catch {
return null;
}
}
// ── Manifest extraction ──────────────────────────────────────────────────────
/**
* Extract manifest metadata from a ResolvedPlugin.
* Strips functions (hooks, route handlers) and keeps only serializable metadata.
*/
export function extractManifest(plugin: ResolvedPlugin): PluginManifest {
// Build hook entries preserving exclusive/priority/timeout metadata.
// Plain HookName strings are emitted for hooks with default settings;
// structured ManifestHookEntry objects are emitted when metadata differs.
const hooks: Array<ManifestHookEntry | HookName> = [];
for (const [name, resolved] of Object.entries(plugin.hooks)) {
if (!resolved) continue;
const hasMetadata =
resolved.exclusive || resolved.priority !== 100 || resolved.timeout !== 5000;
if (hasMetadata) {
const entry: ManifestHookEntry = { name };
if (resolved.exclusive) entry.exclusive = true;
if (resolved.priority !== 100) entry.priority = resolved.priority;
if (resolved.timeout !== 5000) entry.timeout = resolved.timeout;
hooks.push(entry);
} else {
hooks.push(name as HookName);
}
}
return {
id: plugin.id,
version: plugin.version,
capabilities: plugin.capabilities,
allowedHosts: plugin.allowedHosts,
storage: plugin.storage,
hooks,
routes: Object.keys(plugin.routes),
admin: {
// Omit entry (it's a module specifier for the host, not relevant in bundles)
settingsSchema: plugin.admin.settingsSchema,
pages: plugin.admin.pages,
widgets: plugin.admin.widgets,
},
};
}
// ── Node.js built-in detection ───────────────────────────────────────────────
/**
* Scan bundled code for Node.js built-in imports.
* Matches require("node:xxx"), require("xxx"), import("node:xxx") — the patterns
* that appear in bundled ESM/CJS output (not source-level named imports).
* Returns deduplicated array of built-in module names found.
*/
export function findNodeBuiltinImports(code: string): string[] {
const found: string[] = [];
NODE_BUILTIN_IMPORT_RE.lastIndex = 0;
let match;
while ((match = NODE_BUILTIN_IMPORT_RE.exec(code)) !== null) {
const mod = match[1];
if (NODE_BUILTINS.has(mod)) {
found.push(mod);
}
}
return [...new Set(found)];
}
// ── Path resolution ──────────────────────────────────────────────────────────
/**
* Find a build output file by base name, checking common extensions.
* tsdown may output .mjs, .js, or .cjs depending on format and config.
*/
export async function findBuildOutput(dir: string, baseName: string): Promise<string | undefined> {
for (const ext of [".mjs", ".js", ".cjs"]) {
const candidate = join(dir, `${baseName}${ext}`);
if (await fileExists(candidate)) return candidate;
}
return undefined;
}
/**
* Resolve a dist/built path back to its source .ts/.tsx equivalent.
* E.g., "./dist/index.mjs" → "src/index.ts"
*/
export async function resolveSourceEntry(
pluginDir: string,
distPath: string,
): Promise<string | undefined> {
const cleaned = distPath.replace(LEADING_DOT_SLASH_RE, "");
// Try the path directly (might be source already)
const direct = resolve(pluginDir, cleaned);
if (await fileExists(direct)) return direct;
// Convert dist path to src: dist/foo.mjs → src/foo.ts
const srcPath = cleaned.replace(DIST_PREFIX_RE, "src/").replace(MJS_EXT_RE, ".ts");
const srcFull = resolve(pluginDir, srcPath);
if (await fileExists(srcFull)) return srcFull;
// Try .tsx
const tsxPath = srcPath.replace(TS_TO_TSX_RE, ".tsx");
const tsxFull = resolve(pluginDir, tsxPath);
if (await fileExists(tsxFull)) return tsxFull;
return undefined;
}
// ── Directory helpers ────────────────────────────────────────────────────────
/**
* Recursively calculate the total size of all files in a directory.
*/
export async function calculateDirectorySize(dir: string): Promise<number> {
let total = 0;
const items = await readdir(dir, { withFileTypes: true });
for (const item of items) {
const fullPath = join(dir, item.name);
if (item.isFile()) {
const s = await stat(fullPath);
total += s.size;
} else if (item.isDirectory()) {
total += await calculateDirectorySize(fullPath);
}
}
return total;
}
// ── Tarball creation ─────────────────────────────────────────────────────────
/**
* Create a gzipped tarball from a directory.
*/
export async function createTarball(sourceDir: string, outputPath: string): Promise<void> {
const { createGzip } = await import("node:zlib");
const tarStream = packTar(sourceDir);
const gzip = createGzip({ level: 9 });
const out = createWriteStream(outputPath);
await pipeline(tarStream, gzip, out);
}

View File

@@ -0,0 +1,609 @@
/**
* emdash plugin bundle
*
* Produces a publishable plugin tarball from a plugin source directory.
*
* Steps:
* 1. Resolve plugin entrypoint (finds definePlugin() export)
* 2. Bundle backend code with tsdown → backend.js (single ES module, tree-shaken)
* 3. Bundle admin code if present → admin.js
* 4. Extract manifest from definePlugin() → manifest.json
* 5. Collect assets (README.md, icon.png, screenshots/)
* 6. Validate bundle (manifest schema, size limits, no Node.js builtins)
* 7. Create tarball ({id}-{version}.tar.gz)
*/
import { createHash } from "node:crypto";
import { readFile, stat, mkdir, writeFile, rm, copyFile, symlink, readdir } from "node:fs/promises";
import { resolve, join, extname, basename } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import type { ResolvedPlugin } from "../../plugins/types.js";
import {
fileExists,
readImageDimensions,
extractManifest,
findNodeBuiltinImports,
findBuildOutput,
resolveSourceEntry,
calculateDirectorySize,
createTarball,
MAX_BUNDLE_SIZE,
MAX_SCREENSHOTS,
MAX_SCREENSHOT_WIDTH,
MAX_SCREENSHOT_HEIGHT,
ICON_SIZE,
} from "./bundle-utils.js";
const TS_EXT_RE = /\.tsx?$/;
const SLASH_RE = /\//g;
const LEADING_AT_RE = /^@/;
const emdash_SCOPE_RE = /^@emdashcms\//;
export const bundleCommand = defineCommand({
meta: {
name: "bundle",
description: "Bundle a plugin for marketplace distribution",
},
args: {
dir: {
type: "string",
description: "Plugin directory (default: current directory)",
default: process.cwd(),
},
outDir: {
type: "string",
alias: "o",
description: "Output directory for the tarball (default: ./dist)",
default: "dist",
},
validateOnly: {
type: "boolean",
description: "Run validation only, skip tarball creation",
default: false,
},
},
async run({ args }) {
const pluginDir = resolve(args.dir);
const outDir = resolve(pluginDir, args.outDir);
const validateOnly = args.validateOnly;
consola.start(validateOnly ? "Validating plugin..." : "Bundling plugin...");
// ── Step 1: Read package.json and resolve entrypoints ──
const pkgPath = join(pluginDir, "package.json");
if (!(await fileExists(pkgPath))) {
consola.error("No package.json found in", pluginDir);
process.exit(1);
}
const pkg = JSON.parse(await readFile(pkgPath, "utf-8")) as {
name?: string;
main?: string;
exports?: Record<string, unknown>;
};
// Find the sandbox entrypoint — look for ./sandbox export first, then main
let backendEntry: string | undefined;
let adminEntry: string | undefined;
if (pkg.exports) {
// Check for explicit sandbox export
const sandboxExport = pkg.exports["./sandbox"];
if (typeof sandboxExport === "string") {
backendEntry = await resolveSourceEntry(pluginDir, sandboxExport);
} else if (sandboxExport && typeof sandboxExport === "object" && "import" in sandboxExport) {
backendEntry = await resolveSourceEntry(
pluginDir,
(sandboxExport as { import: string }).import,
);
}
// Check for admin export
const adminExport = pkg.exports["./admin"];
if (typeof adminExport === "string") {
adminEntry = await resolveSourceEntry(pluginDir, adminExport);
} else if (adminExport && typeof adminExport === "object" && "import" in adminExport) {
adminEntry = await resolveSourceEntry(
pluginDir,
(adminExport as { import: string }).import,
);
}
}
// If no sandbox export, look for src/sandbox-entry.ts
if (!backendEntry) {
const defaultSandbox = join(pluginDir, "src/sandbox-entry.ts");
if (await fileExists(defaultSandbox)) {
backendEntry = defaultSandbox;
}
}
// Find the main entry for manifest extraction
let mainEntry: string | undefined;
if (pkg.exports?.["."] !== undefined) {
const mainExport = pkg.exports["."];
if (typeof mainExport === "string") {
mainEntry = await resolveSourceEntry(pluginDir, mainExport);
} else if (mainExport && typeof mainExport === "object" && "import" in mainExport) {
mainEntry = await resolveSourceEntry(pluginDir, (mainExport as { import: string }).import);
}
}
if (!mainEntry && pkg.main) {
mainEntry = await resolveSourceEntry(pluginDir, pkg.main);
}
if (!mainEntry) {
const defaultMain = join(pluginDir, "src/index.ts");
if (await fileExists(defaultMain)) {
mainEntry = defaultMain;
}
}
if (!mainEntry) {
consola.error(
"Cannot find plugin entrypoint. Expected src/index.ts or main/exports in package.json",
);
process.exit(1);
}
consola.info(`Main entry: ${mainEntry}`);
if (backendEntry) consola.info(`Backend entry: ${backendEntry}`);
if (adminEntry) consola.info(`Admin entry: ${adminEntry}`);
// ── Step 2: Extract manifest by importing the plugin ──
consola.start("Extracting plugin manifest...");
// Build the main entry first so we can import it
const { build } = await import("tsdown");
const tmpDir = join(pluginDir, ".emdash-bundle-tmp");
try {
await mkdir(tmpDir, { recursive: true });
// Build main entry to extract manifest.
// Externalize emdash and sibling packages — they'll resolve
// via the symlinked node_modules below.
const mainOutDir = join(tmpDir, "main");
await build({
config: false,
entry: [mainEntry],
format: "esm",
outDir: mainOutDir,
dts: false,
platform: "node",
external: ["emdash", emdash_SCOPE_RE],
});
// Symlink plugin's node_modules so the built module can resolve
// external dependencies (emdash, @emdashcms/*, etc.)
const pluginNodeModules = join(pluginDir, "node_modules");
const tmpNodeModules = join(mainOutDir, "node_modules");
if (await fileExists(pluginNodeModules)) {
await symlink(pluginNodeModules, tmpNodeModules, "junction");
}
// Import the built module to get the resolved plugin
const mainBaseName = basename(mainEntry).replace(TS_EXT_RE, "");
const mainOutputPath = await findBuildOutput(mainOutDir, mainBaseName);
if (!mainOutputPath) {
consola.error("Failed to build main entry — no output found in", mainOutDir);
process.exit(1);
}
// Dynamic import of the built plugin
const pluginModule = (await import(mainOutputPath)) as Record<string, unknown>;
// Extract manifest from the imported module.
// Supports three patterns:
// 1. Native: createPlugin() export -> ResolvedPlugin
// 2. Native: default export that is/returns a ResolvedPlugin (has id+version)
// 3. Standard: descriptor factory function (returns { id, version, ... })
let resolvedPlugin: ResolvedPlugin | undefined;
if (typeof pluginModule.createPlugin === "function") {
resolvedPlugin = pluginModule.createPlugin() as ResolvedPlugin;
} else if (typeof pluginModule.default === "function") {
resolvedPlugin = pluginModule.default() as ResolvedPlugin;
} else if (typeof pluginModule.default === "object" && pluginModule.default !== null) {
const defaultExport = pluginModule.default as Record<string, unknown>;
if ("id" in defaultExport && "version" in defaultExport) {
resolvedPlugin = defaultExport as unknown as ResolvedPlugin;
}
}
// Standard format: no createPlugin, no default with id/version.
// Look for a descriptor factory -- any named export function that
// returns an object with { id, version }.
if (!resolvedPlugin) {
for (const [key, value] of Object.entries(pluginModule)) {
if (key === "default" || typeof value !== "function") continue;
try {
const result = (value as () => unknown)() as Record<string, unknown> | null;
if (result && typeof result === "object" && "id" in result && "version" in result) {
resolvedPlugin = {
id: result.id,
version: result.version,
capabilities: result.capabilities ?? [],
allowedHosts: result.allowedHosts ?? [],
storage: result.storage ?? {},
hooks: {},
routes: {},
admin: {
pages: result.adminPages,
widgets: result.adminWidgets,
},
} as ResolvedPlugin;
// If there's a sandbox entry, build and import it
// to get hook/route names for the manifest.
if (backendEntry) {
const backendProbeDir = join(tmpDir, "backend-probe");
const probeShimDir = join(tmpDir, "probe-shims");
await mkdir(probeShimDir, { recursive: true });
await writeFile(
join(probeShimDir, "emdash.mjs"),
"export const definePlugin = (d) => d;\n",
);
await build({
config: false,
entry: [backendEntry],
format: "esm",
outDir: backendProbeDir,
dts: false,
platform: "neutral",
external: [],
alias: { emdash: join(probeShimDir, "emdash.mjs") },
treeshake: true,
});
const backendBaseName = basename(backendEntry).replace(TS_EXT_RE, "");
const backendProbePath = await findBuildOutput(backendProbeDir, backendBaseName);
if (backendProbePath) {
const backendModule = (await import(backendProbePath)) as Record<string, unknown>;
const standardDef = (backendModule.default ?? {}) as Record<string, unknown>;
const hooks = standardDef.hooks as Record<string, unknown> | undefined;
const routes = standardDef.routes as Record<string, unknown> | undefined;
if (hooks) {
for (const hookName of Object.keys(hooks)) {
const hookEntry = hooks[hookName];
const isConfig =
typeof hookEntry === "object" &&
hookEntry !== null &&
"handler" in hookEntry;
const config = isConfig ? (hookEntry as Record<string, unknown>) : {};
(resolvedPlugin.hooks as Record<string, unknown>)[hookName] = {
handler: isConfig
? (hookEntry as Record<string, unknown>).handler
: hookEntry,
priority: (config.priority as number) ?? 100,
timeout: (config.timeout as number) ?? 5000,
dependencies: (config.dependencies as string[]) ?? [],
errorPolicy: (config.errorPolicy as string) ?? "abort",
exclusive: (config.exclusive as boolean) ?? false,
pluginId: result.id,
};
}
}
if (routes) {
for (const [name, route] of Object.entries(routes)) {
const routeObj = route as Record<string, unknown>;
(resolvedPlugin.routes as Record<string, unknown>)[name] = {
handler: routeObj.handler,
public: routeObj.public,
};
}
}
}
}
break;
}
} catch {
// Not a descriptor factory, skip
}
}
}
if (!resolvedPlugin?.id || !resolvedPlugin?.version) {
consola.error(
"Could not extract plugin definition. Expected one of:\n" +
" - createPlugin() export (native format)\n" +
" - Descriptor factory function returning { id, version, ... } (standard format)",
);
process.exit(1);
}
const manifest = extractManifest(resolvedPlugin);
// Validate format consistency: bundled plugins are for the marketplace
// (sandboxed), so they must be standard format without trusted-only features.
if (resolvedPlugin.admin?.entry) {
consola.error(
`Plugin declares adminEntry — React admin components require native/trusted mode. ` +
`Use Block Kit for sandboxed admin pages, or remove adminEntry.`,
);
process.exit(1);
}
if (
resolvedPlugin.admin?.portableTextBlocks &&
resolvedPlugin.admin.portableTextBlocks.length > 0
) {
consola.error(
`Plugin declares portableTextBlocks — these require native/trusted mode ` +
`and cannot be bundled for the marketplace.`,
);
process.exit(1);
}
consola.success(`Plugin: ${manifest.id}@${manifest.version}`);
consola.info(
` Capabilities: ${manifest.capabilities.length > 0 ? manifest.capabilities.join(", ") : "(none)"}`,
);
consola.info(
` Hooks: ${manifest.hooks.length > 0 ? manifest.hooks.map((h) => (typeof h === "string" ? h : h.name)).join(", ") : "(none)"}`,
);
consola.info(
` Routes: ${manifest.routes.length > 0 ? manifest.routes.map((r) => (typeof r === "string" ? r : r.name)).join(", ") : "(none)"}`,
);
// ── Step 3: Bundle backend.js ──
const bundleDir = join(tmpDir, "bundle");
await mkdir(bundleDir, { recursive: true });
if (backendEntry) {
consola.start("Bundling backend...");
// Create a shim for emdash so the sandbox entry doesn't pull in the
// entire core package. definePlugin is an identity function for standard
// format, and PluginContext is a type-only import that disappears.
const shimDir = join(tmpDir, "shims");
await mkdir(shimDir, { recursive: true });
await writeFile(join(shimDir, "emdash.mjs"), "export const definePlugin = (d) => d;\n");
await build({
config: false,
entry: [backendEntry],
format: "esm",
outDir: join(tmpDir, "backend"),
dts: false,
platform: "neutral",
// Bundle everything for a self-contained sandbox file,
// but alias emdash to our shim so we don't pull in the core.
external: [],
alias: { emdash: join(shimDir, "emdash.mjs") },
minify: true,
treeshake: true,
});
const backendBaseName = basename(backendEntry).replace(TS_EXT_RE, "");
const backendOutputPath = await findBuildOutput(join(tmpDir, "backend"), backendBaseName);
if (backendOutputPath) {
await copyFile(backendOutputPath, join(bundleDir, "backend.js"));
consola.success("Built backend.js");
} else {
consola.error("Backend build produced no output");
process.exit(1);
}
} else {
consola.warn("No sandbox entry found — bundle will have no backend.js");
consola.warn(' Add a "sandbox-entry.ts" in src/ or a "./sandbox" export in package.json');
}
// ── Step 4: Bundle admin.js ──
if (adminEntry) {
consola.start("Bundling admin...");
await build({
config: false,
entry: [adminEntry],
format: "esm",
outDir: join(tmpDir, "admin"),
dts: false,
platform: "neutral",
external: [],
minify: true,
treeshake: true,
});
const adminBaseName = basename(adminEntry).replace(TS_EXT_RE, "");
const adminOutputPath = await findBuildOutput(join(tmpDir, "admin"), adminBaseName);
if (adminOutputPath) {
await copyFile(adminOutputPath, join(bundleDir, "admin.js"));
consola.success("Built admin.js");
}
}
// ── Step 5: Write manifest.json ──
await writeFile(join(bundleDir, "manifest.json"), JSON.stringify(manifest, null, 2));
// ── Step 6: Collect assets ──
consola.start("Collecting assets...");
// README.md
const readmePath = join(pluginDir, "README.md");
if (await fileExists(readmePath)) {
await copyFile(readmePath, join(bundleDir, "README.md"));
consola.success("Included README.md");
}
// icon.png
const iconPath = join(pluginDir, "icon.png");
if (await fileExists(iconPath)) {
const iconBuf = await readFile(iconPath);
const dims = readImageDimensions(iconBuf);
if (!dims) {
consola.warn("icon.png is not a valid PNG — skipping");
} else if (dims[0] !== ICON_SIZE || dims[1] !== ICON_SIZE) {
consola.warn(
`icon.png is ${dims[0]}x${dims[1]}, expected ${ICON_SIZE}x${ICON_SIZE} — including anyway`,
);
await copyFile(iconPath, join(bundleDir, "icon.png"));
} else {
await copyFile(iconPath, join(bundleDir, "icon.png"));
consola.success("Included icon.png");
}
}
// screenshots/
const screenshotsDir = join(pluginDir, "screenshots");
if (await fileExists(screenshotsDir)) {
const screenshotFiles = (await readdir(screenshotsDir))
.filter((f) => {
const ext = extname(f).toLowerCase();
return ext === ".png" || ext === ".jpg" || ext === ".jpeg";
})
.toSorted()
.slice(0, MAX_SCREENSHOTS);
if (screenshotFiles.length > 0) {
await mkdir(join(bundleDir, "screenshots"), { recursive: true });
for (const file of screenshotFiles) {
const filePath = join(screenshotsDir, file);
const buf = await readFile(filePath);
const dims = readImageDimensions(buf);
if (!dims) {
consola.warn(`screenshots/${file} — cannot read dimensions, skipping`);
continue;
}
if (dims[0] > MAX_SCREENSHOT_WIDTH || dims[1] > MAX_SCREENSHOT_HEIGHT) {
consola.warn(
`screenshots/${file} is ${dims[0]}x${dims[1]}, max ${MAX_SCREENSHOT_WIDTH}x${MAX_SCREENSHOT_HEIGHT} — including anyway`,
);
}
await copyFile(filePath, join(bundleDir, "screenshots", file));
}
consola.success(`Included ${screenshotFiles.length} screenshot(s)`);
}
}
// ── Step 7: Validation ──
consola.start("Validating bundle...");
let hasErrors = false;
// Check for Node.js builtins in backend.js
const backendPath = join(bundleDir, "backend.js");
if (await fileExists(backendPath)) {
const backendCode = await readFile(backendPath, "utf-8");
const builtins = findNodeBuiltinImports(backendCode);
if (builtins.length > 0) {
consola.error(`backend.js imports Node.js built-in modules: ${builtins.join(", ")}`);
consola.error("Sandboxed plugins cannot use Node.js APIs");
hasErrors = true;
}
}
// Check capabilities warnings
if (manifest.capabilities.includes("network:fetch:any")) {
consola.warn(
"Plugin declares unrestricted network access (network:fetch:any) — it can make requests to any host",
);
} else if (
manifest.capabilities.includes("network:fetch") &&
manifest.allowedHosts.length === 0
) {
consola.warn(
"Plugin declares network:fetch capability but no allowedHosts — all fetch requests will be blocked",
);
}
// Check for features that won't work in sandboxed mode
if (
resolvedPlugin.admin?.portableTextBlocks &&
resolvedPlugin.admin.portableTextBlocks.length > 0
) {
consola.warn(
"Plugin declares portableTextBlocks — these require trusted mode and will be ignored in sandboxed plugins",
);
}
if (resolvedPlugin.admin?.entry) {
consola.warn(
"Plugin declares admin.entry — custom React components require trusted mode. Use Block Kit for sandboxed admin pages",
);
}
// Check for page:fragments hook — trusted-only, not allowed in sandbox
if (resolvedPlugin.hooks["page:fragments"]) {
consola.warn(
"Plugin declares page:fragments hook — this is trusted-only and will not work in sandboxed mode",
);
}
// Check: if plugin declares admin pages or widgets, it must have an "admin" route
const hasAdminPages = (manifest.admin?.pages?.length ?? 0) > 0;
const hasAdminWidgets = (manifest.admin?.widgets?.length ?? 0) > 0;
if (hasAdminPages || hasAdminWidgets) {
const routeNames = manifest.routes.map((r: string | { name: string }) =>
typeof r === "string" ? r : r.name,
);
if (!routeNames.includes("admin")) {
consola.error(
`Plugin declares ${hasAdminPages ? "adminPages" : ""}${hasAdminPages && hasAdminWidgets ? " and " : ""}${hasAdminWidgets ? "adminWidgets" : ""} ` +
`but the sandbox entry has no "admin" route. ` +
`Add an admin route handler to serve Block Kit pages.`,
);
hasErrors = true;
}
}
// Calculate total bundle size
const totalSize = await calculateDirectorySize(bundleDir);
if (totalSize > MAX_BUNDLE_SIZE) {
const sizeMB = (totalSize / 1024 / 1024).toFixed(2);
consola.error(`Bundle size ${sizeMB}MB exceeds maximum of 5MB`);
hasErrors = true;
} else {
const sizeKB = (totalSize / 1024).toFixed(1);
consola.info(`Bundle size: ${sizeKB}KB`);
}
if (hasErrors) {
consola.error("Bundle validation failed");
process.exit(1);
}
consola.success("Validation passed");
if (validateOnly) {
return;
}
// ── Step 8: Create tarball ──
await mkdir(outDir, { recursive: true });
const tarballName = `${manifest.id.replace(SLASH_RE, "-").replace(LEADING_AT_RE, "")}-${manifest.version}.tar.gz`;
const tarballPath = join(outDir, tarballName);
consola.start("Creating tarball...");
await createTarball(bundleDir, tarballPath);
const tarballStat = await stat(tarballPath);
const tarballSizeKB = (tarballStat.size / 1024).toFixed(1);
// Calculate checksum
const tarballBuf = await readFile(tarballPath);
const checksum = createHash("sha256").update(tarballBuf).digest("hex");
consola.success(`Created ${tarballName} (${tarballSizeKB}KB)`);
consola.info(` SHA-256: ${checksum}`);
consola.info(` Path: ${tarballPath}`);
} finally {
if (tmpDir.endsWith(".emdash-bundle-tmp")) {
await rm(tmpDir, { recursive: true, force: true });
}
}
},
});

View File

@@ -0,0 +1,442 @@
/**
* emdash content
*
* CRUD commands for managing content items via the EmDash REST API.
*/
import { readFile } from "node:fs/promises";
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Read content data from --data, --file, or --stdin */
async function readInputData(args: {
data?: string;
file?: string;
stdin?: boolean;
}): Promise<Record<string, unknown>> {
if (args.data) {
try {
return JSON.parse(args.data) as Record<string, unknown>;
} catch {
throw new Error("Invalid JSON in --data argument");
}
}
if (args.file) {
try {
const content = await readFile(args.file, "utf-8");
return JSON.parse(content) as Record<string, unknown>;
} catch (error) {
if (error instanceof SyntaxError) {
throw new Error(`Invalid JSON in file: ${args.file}`, { cause: error });
}
throw error;
}
}
if (args.stdin) {
const chunks: Buffer[] = [];
for await (const chunk of process.stdin) {
chunks.push(chunk as Buffer);
}
const content = Buffer.concat(chunks).toString("utf-8");
try {
return JSON.parse(content) as Record<string, unknown>;
} catch {
throw new Error("Invalid JSON from stdin");
}
}
throw new Error("Provide content data via --data, --file, or --stdin");
}
// ---------------------------------------------------------------------------
// Subcommands
// ---------------------------------------------------------------------------
const listCommand = defineCommand({
meta: { name: "list", description: "List content items" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
status: { type: "string", description: "Filter by status" },
locale: { type: "string", description: "Filter by locale" },
limit: { type: "string", description: "Maximum items to return" },
cursor: { type: "string", description: "Pagination cursor" },
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const result = await client.list(args.collection, {
status: args.status,
locale: args.locale,
limit: args.limit ? parseInt(args.limit, 10) : undefined,
cursor: args.cursor,
});
// Summarize items — strip heavy data fields for readable output
const summary = {
items: result.items.map((item) => ({
id: item.id,
slug: item.slug,
locale: item.locale,
status: item.status,
title: typeof item.data?.title === "string" ? item.data.title : undefined,
updatedAt: item.updatedAt,
})),
nextCursor: result.nextCursor,
};
output(summary, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const getCommand = defineCommand({
meta: { name: "get", description: "Get a single content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
locale: { type: "string", description: "Locale for slug resolution" },
raw: {
type: "boolean",
description: "Return raw Portable Text (skip markdown conversion)",
},
published: {
type: "boolean",
description: "Return published data only (ignore pending draft)",
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const item = await client.get(args.collection, args.id, {
raw: args.raw,
locale: args.locale,
});
// If a draft exists, overlay draft data unless --published
if (!args.published && item.draftRevisionId) {
const comparison = await client.compare(args.collection, args.id);
if (comparison.hasChanges && comparison.draft) {
item.data = comparison.draft;
}
}
output(item, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const createCommand = defineCommand({
meta: { name: "create", description: "Create a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
data: { type: "string", description: "Content data as JSON string" },
file: { type: "string", description: "Read content data from a JSON file" },
stdin: { type: "boolean", description: "Read content data from stdin" },
slug: { type: "string", description: "Content slug" },
locale: { type: "string", description: "Content locale" },
"translation-of": {
type: "string",
description: "ID of content item to link as translation",
},
draft: {
type: "boolean",
description: "Keep as draft instead of auto-publishing",
},
...connectionArgs,
},
async run({ args }) {
try {
const data = await readInputData(args);
const client = createClientFromArgs(args);
const item = await client.create(args.collection, {
data,
slug: args.slug,
locale: args.locale,
translationOf: args["translation-of"],
});
// Auto-publish unless --draft is set
if (!args.draft) {
await client.publish(args.collection, item.id);
}
// Re-fetch to return the current state
const result = await client.get(args.collection, item.id);
output(result, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const updateCommand = defineCommand({
meta: { name: "update", description: "Update a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
data: { type: "string", description: "Content data as JSON string" },
file: { type: "string", description: "Read content data from a JSON file" },
rev: {
type: "string",
description: "Revision token from get (prevents overwriting unseen changes)",
required: true,
},
draft: {
type: "boolean",
description: "Keep as draft instead of auto-publishing",
},
...connectionArgs,
},
async run({ args }) {
try {
const data = await readInputData(args);
const client = createClientFromArgs(args);
const updated = await client.update(args.collection, args.id, {
data,
_rev: args.rev,
});
// Auto-publish unless --draft is set.
// Only publish if the update created a draft revision (i.e. the
// collection supports revisions and data went to a draft).
if (!args.draft && updated.draftRevisionId) {
await client.publish(args.collection, args.id);
}
// Re-fetch to return the current state
const item = await client.get(args.collection, args.id);
output(item, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const deleteCommand = defineCommand({
meta: { name: "delete", description: "Delete a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.delete(args.collection, args.id);
consola.success(`Deleted ${args.collection}/${args.id}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const publishCommand = defineCommand({
meta: { name: "publish", description: "Publish a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.publish(args.collection, args.id);
consola.success(`Published ${args.collection}/${args.id}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const unpublishCommand = defineCommand({
meta: { name: "unpublish", description: "Unpublish a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.unpublish(args.collection, args.id);
consola.success(`Unpublished ${args.collection}/${args.id}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const scheduleCommand = defineCommand({
meta: { name: "schedule", description: "Schedule content for publishing" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
at: {
type: "string",
description: "ISO 8601 datetime to publish at",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.schedule(args.collection, args.id, { at: args.at });
consola.success(`Scheduled ${args.collection}/${args.id} for ${args.at}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const restoreCommand = defineCommand({
meta: { name: "restore", description: "Restore a trashed content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.restore(args.collection, args.id);
consola.success(`Restored ${args.collection}/${args.id}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const translationsCommand = defineCommand({
meta: { name: "translations", description: "List translations for a content item" },
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
id: {
type: "positional",
description: "Content item ID or slug",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const translations = await client.translations(args.collection, args.id);
output(translations, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
// ---------------------------------------------------------------------------
// Export
// ---------------------------------------------------------------------------
export const contentCommand = defineCommand({
meta: { name: "content", description: "Manage content" },
subCommands: {
list: listCommand,
get: getCommand,
create: createCommand,
update: updateCommand,
delete: deleteCommand,
publish: publishCommand,
unpublish: unpublishCommand,
schedule: scheduleCommand,
restore: restoreCommand,
translations: translationsCommand,
},
});

View File

@@ -0,0 +1,191 @@
/**
* emdash dev
*
* Start development server with optional schema sync from remote
*/
import { spawn } from "node:child_process";
import { readFile, access } from "node:fs/promises";
import { resolve } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { createDatabase } from "../../database/connection.js";
import { runMigrations } from "../../database/migrations/runner.js";
interface PackageJson {
name?: string;
scripts?: Record<string, string>;
emdash?: {
url?: string;
database?: string;
};
}
async function readPackageJson(cwd: string): Promise<PackageJson | null> {
const pkgPath = resolve(cwd, "package.json");
try {
const content = await readFile(pkgPath, "utf-8");
return JSON.parse(content);
} catch {
return null;
}
}
async function fileExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
export const devCommand = defineCommand({
meta: {
name: "dev",
description: "Start dev server with local database",
},
args: {
database: {
type: "string",
alias: "d",
description: "Database path (default: ./data.db)",
default: "./data.db",
},
types: {
type: "boolean",
alias: "t",
description: "Generate types from remote before starting",
default: false,
},
port: {
type: "string",
alias: "p",
description: "Port for dev server",
default: "4321",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
const pkg = await readPackageJson(cwd);
if (!pkg) {
consola.error("No package.json found");
process.exit(1);
}
const dbPath = resolve(cwd, args.database);
// Run migrations if database doesn't exist
const dbExists = await fileExists(dbPath);
if (!dbExists) {
consola.start("Database not found, initializing...");
}
// Always run migrations (they're idempotent)
const db = createDatabase({ url: `file:${dbPath}` });
try {
consola.start("Checking database migrations...");
const { applied } = await runMigrations(db);
if (applied.length > 0) {
consola.success(`Applied ${applied.length} migrations`);
} else {
consola.info("Database up to date");
}
} catch (error) {
consola.error("Migration failed:", error);
await db.destroy();
process.exit(1);
}
await db.destroy();
// Generate types from remote if requested
if (args.types) {
const remoteUrl = pkg.emdash?.url || process.env.EMDASH_URL;
if (!remoteUrl) {
consola.warn("No remote URL configured. Set EMDASH_URL or emdash.url in package.json");
} else {
try {
const { createClientFromArgs } = await import("../client-factory.js");
const client = createClientFromArgs({ url: remoteUrl });
const schema = await client.schemaExport();
const types = await client.schemaTypes();
const { writeFile, mkdir } = await import("node:fs/promises");
const { resolve: resolvePath, dirname } = await import("node:path");
const outputPath = resolvePath(cwd, ".emdash/types.ts");
await mkdir(dirname(outputPath), { recursive: true });
await writeFile(outputPath, types, "utf-8");
await writeFile(
resolvePath(dirname(outputPath), "schema.json"),
JSON.stringify(schema, null, 2),
"utf-8",
);
consola.success(`Generated types for ${schema.collections.length} collections`);
} catch (error) {
consola.warn("Type generation failed:", error instanceof Error ? error.message : error);
}
}
}
// Start Astro dev server
consola.start("Starting Astro dev server...");
const astroArgs = ["astro", "dev", "--port", args.port];
// Check if using pnpm, npm, or yarn
const pnpmLockExists = await fileExists(resolve(cwd, "pnpm-lock.yaml"));
const yarnLockExists = await fileExists(resolve(cwd, "yarn.lock"));
let cmd: string;
let cmdArgs: string[];
if (pnpmLockExists) {
cmd = "pnpm";
cmdArgs = astroArgs;
} else if (yarnLockExists) {
cmd = "yarn";
cmdArgs = astroArgs;
} else {
cmd = "npx";
cmdArgs = astroArgs;
}
consola.info(`Running: ${cmd} ${cmdArgs.join(" ")}`);
const child = spawn(cmd, cmdArgs, {
cwd,
stdio: "inherit",
env: {
...process.env,
// Pass database path to Astro
EMDASH_DATABASE_URL: `file:${dbPath}`,
},
});
child.on("error", (error) => {
consola.error("Failed to start dev server:", error);
process.exit(1);
});
child.on("exit", (code) => {
process.exit(code ?? 0);
});
// Handle termination signals
const cleanup = () => {
child.kill("SIGTERM");
};
process.on("SIGINT", cleanup);
process.on("SIGTERM", cleanup);
},
});

View File

@@ -0,0 +1,211 @@
/**
* emdash doctor
*
* Diagnose database health: connection, migrations, schema integrity.
*/
import { access } from "node:fs/promises";
import { resolve } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { createDatabase } from "../../database/connection.js";
import { listTablesLike } from "../../database/dialect-helpers.js";
import { getMigrationStatus } from "../../database/migrations/runner.js";
interface CheckResult {
name: string;
status: "pass" | "warn" | "fail";
message: string;
}
async function fileExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
function printResult(result: CheckResult): void {
const color =
result.status === "pass"
? consola.success
: result.status === "warn"
? consola.warn
: consola.error;
color(`${result.name}: ${result.message}`);
}
async function checkDatabase(dbPath: string): Promise<CheckResult[]> {
const results: CheckResult[] = [];
// Check database file exists
if (!(await fileExists(dbPath))) {
results.push({
name: "database",
status: "fail",
message: `not found at ${dbPath} — run "emdash init"`,
});
return results;
}
results.push({
name: "database",
status: "pass",
message: dbPath,
});
// Connect and check migrations
let db;
try {
db = createDatabase({ url: `file:${dbPath}` });
const { applied, pending } = await getMigrationStatus(db);
if (pending.length === 0) {
results.push({
name: "migrations",
status: "pass",
message: `${applied.length} applied, none pending`,
});
} else {
results.push({
name: "migrations",
status: "warn",
message: `${applied.length} applied, ${pending.length} pending — run "emdash init"`,
});
}
const { sql } = await import("kysely");
// Check collections exist
try {
const collectionsResult = await sql<{
count: number;
}>`SELECT COUNT(id) as count FROM _emdash_collections`.execute(db);
const count = collectionsResult.rows[0]?.count ?? 0;
results.push({
name: "collections",
status: count > 0 ? "pass" : "warn",
message:
count > 0 ? `${count} collections defined` : "no collections — seed or create via admin",
});
} catch {
results.push({
name: "collections",
status: "fail",
message: "could not query collections table — migrations may not have run",
});
}
// Check for orphaned ec_ tables without matching collection records
try {
const tableNames = await listTablesLike(db, "ec_%");
const collectionsResult = await sql<{
slug: string;
}>`SELECT slug FROM _emdash_collections`.execute(db);
const registeredSlugs = new Set(collectionsResult.rows.map((r) => `ec_${r.slug}`));
const orphaned = tableNames.filter((name) => !registeredSlugs.has(name));
if (orphaned.length > 0) {
results.push({
name: "orphaned tables",
status: "warn",
message: `found ${orphaned.length}: ${orphaned.join(", ")}`,
});
}
} catch {
// Non-critical — tables may not exist on fresh DB
}
// Check users exist
try {
const usersResult = await sql<{
count: number;
}>`SELECT COUNT(id) as count FROM _emdash_users`.execute(db);
const count = usersResult.rows[0]?.count ?? 0;
results.push({
name: "users",
status: count > 0 ? "pass" : "warn",
message:
count > 0 ? `${count} users` : "no users — complete setup wizard at /_emdash/admin",
});
} catch {
results.push({
name: "users",
status: "warn",
message: "could not query users table",
});
}
} catch (error) {
results.push({
name: "database connection",
status: "fail",
message: error instanceof Error ? error.message : "failed to connect",
});
} finally {
if (db) {
await db.destroy();
}
}
return results;
}
export const doctorCommand = defineCommand({
meta: {
name: "doctor",
description: "Check database health and diagnose issues",
},
args: {
database: {
type: "string",
alias: "d",
description: "Database path (default: ./data.db)",
default: "./data.db",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
json: {
type: "boolean",
description: "Output results as JSON",
default: false,
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
const dbPath = resolve(cwd, args.database);
const results = await checkDatabase(dbPath);
if (args.json) {
process.stdout.write(JSON.stringify(results, null, 2) + "\n");
return;
}
consola.start("EmDash Doctor\n");
for (const result of results) {
printResult(result);
}
// Summary
const fails = results.filter((r) => r.status === "fail");
const warns = results.filter((r) => r.status === "warn");
consola.log("");
if (fails.length === 0 && warns.length === 0) {
consola.success("All checks passed");
} else if (fails.length === 0) {
consola.info(`All critical checks passed (${warns.length} warnings)`);
} else {
consola.error(`${fails.length} issues found`);
process.exitCode = 1;
}
},
});

View File

@@ -0,0 +1,630 @@
/**
* emdash export-seed
*
* Export current database schema (and optionally content) as a seed file
*/
import { resolve } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import type { Kysely } from "kysely";
import { createDatabase } from "../../database/connection.js";
import { runMigrations } from "../../database/migrations/runner.js";
import { ContentRepository } from "../../database/repositories/content.js";
import { MediaRepository } from "../../database/repositories/media.js";
import { OptionsRepository } from "../../database/repositories/options.js";
import { TaxonomyRepository } from "../../database/repositories/taxonomy.js";
import type { Database } from "../../database/types.js";
import { isI18nEnabled } from "../../i18n/config.js";
import { SchemaRegistry } from "../../schema/registry.js";
import type { FieldType } from "../../schema/types.js";
import type {
SeedFile,
SeedCollection,
SeedField,
SeedTaxonomy,
SeedTaxonomyTerm,
SeedMenu,
SeedMenuItem,
SeedWidgetArea,
SeedWidget,
SeedContentEntry,
} from "../../seed/types.js";
const SETTINGS_PREFIX = "site:";
export const exportSeedCommand = defineCommand({
meta: {
name: "export-seed",
description: "Export database schema and content as a seed file",
},
args: {
database: {
type: "string",
alias: "d",
description: "Database path",
default: "./data.db",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
"with-content": {
type: "string",
description: "Include content (all or comma-separated collection names)",
required: false,
},
pretty: {
type: "boolean",
description: "Pretty print JSON output",
default: true,
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
// Connect to database
const dbPath = resolve(cwd, args.database);
consola.info(`Database: ${dbPath}`);
const db = createDatabase({ url: `file:${dbPath}` });
// Run migrations to ensure tables exist
try {
await runMigrations(db);
} catch (error) {
consola.error("Migration failed:", error);
await db.destroy();
process.exit(1);
}
try {
const seed = await exportSeed(db, args["with-content"]);
// Output to stdout
const output = args.pretty ? JSON.stringify(seed, null, "\t") : JSON.stringify(seed);
console.log(output);
} catch (error) {
consola.error("Export failed:", error);
await db.destroy();
process.exit(1);
}
await db.destroy();
},
});
/**
* Export database to seed file format
*/
async function exportSeed(db: Kysely<Database>, withContent?: string): Promise<SeedFile> {
const seed: SeedFile = {
$schema: "https://emdashcms.com/seed.schema.json",
version: "1",
meta: {
name: "Exported Seed",
description: "Exported from existing EmDash database",
},
};
// 1. Export settings
seed.settings = await exportSettings(db);
// 2. Export collections and fields
seed.collections = await exportCollections(db);
// 3. Export taxonomy definitions and terms
seed.taxonomies = await exportTaxonomies(db);
// 4. Export menus
seed.menus = await exportMenus(db);
// 5. Export widget areas
seed.widgetAreas = await exportWidgetAreas(db);
// 6. Export content (if requested)
if (withContent !== undefined) {
const collections =
withContent === "" || withContent === "true"
? null // all collections
: withContent
.split(",")
.map((s) => s.trim())
.filter(Boolean);
seed.content = await exportContent(db, seed.collections || [], collections);
}
return seed;
}
/**
* Export site settings
*/
async function exportSettings(db: Kysely<Database>): Promise<SeedFile["settings"]> {
const options = new OptionsRepository(db);
const allOptions = await options.getByPrefix(SETTINGS_PREFIX);
const settings: Record<string, unknown> = {};
for (const [key, value] of allOptions) {
const settingKey = key.replace(SETTINGS_PREFIX, "");
settings[settingKey] = value;
}
return Object.keys(settings).length > 0 ? settings : undefined;
}
/**
* Export collections and their fields
*/
async function exportCollections(db: Kysely<Database>): Promise<SeedCollection[]> {
const registry = new SchemaRegistry(db);
const collections = await registry.listCollections();
const result: SeedCollection[] = [];
for (const collection of collections) {
const fields = await registry.listFields(collection.id);
const seedCollection: SeedCollection = {
slug: collection.slug,
label: collection.label,
labelSingular: collection.labelSingular || undefined,
description: collection.description || undefined,
icon: collection.icon || undefined,
supports:
collection.supports.length > 0
? (collection.supports as (
| "drafts"
| "revisions"
| "preview"
| "scheduling"
| "search"
)[])
: undefined,
urlPattern: collection.urlPattern || undefined,
fields: fields.map(
(field): SeedField => ({
slug: field.slug,
label: field.label,
type: field.type,
required: field.required || undefined,
unique: field.unique || undefined,
searchable: field.searchable || undefined,
defaultValue: field.defaultValue,
validation: field.validation ? { ...field.validation } : undefined,
widget: field.widget || undefined,
options: field.options || undefined,
}),
),
};
result.push(seedCollection);
}
return result;
}
/**
* Export taxonomy definitions and terms
*/
async function exportTaxonomies(db: Kysely<Database>): Promise<SeedTaxonomy[]> {
// Get taxonomy definitions
const defs = await db.selectFrom("_emdash_taxonomy_defs").selectAll().execute();
const result: SeedTaxonomy[] = [];
const termRepo = new TaxonomyRepository(db);
for (const def of defs) {
// Get terms for this taxonomy
const terms = await termRepo.findByName(def.name);
// Build term tree for hierarchical taxonomies
const seedTerms: SeedTaxonomyTerm[] = [];
// First, create a map of id -> slug for parent resolution
const idToSlug = new Map<string, string>();
for (const term of terms) {
idToSlug.set(term.id, term.slug);
}
for (const term of terms) {
const seedTerm: SeedTaxonomyTerm = {
slug: term.slug,
label: term.label,
description: typeof term.data?.description === "string" ? term.data.description : undefined,
};
// Resolve parent slug
if (term.parentId) {
seedTerm.parent = idToSlug.get(term.parentId);
}
seedTerms.push(seedTerm);
}
const taxonomy: SeedTaxonomy = {
name: def.name,
label: def.label,
labelSingular: def.label_singular || undefined,
hierarchical: def.hierarchical === 1,
collections: def.collections ? JSON.parse(def.collections) : [],
};
if (seedTerms.length > 0) {
taxonomy.terms = seedTerms;
}
result.push(taxonomy);
}
return result;
}
/**
* Export menus with their items
*/
async function exportMenus(db: Kysely<Database>): Promise<SeedMenu[]> {
// Get all menus
const menus = await db.selectFrom("_emdash_menus").selectAll().execute();
const result: SeedMenu[] = [];
for (const menu of menus) {
// Get menu items
const items = await db
.selectFrom("_emdash_menu_items")
.selectAll()
.where("menu_id", "=", menu.id)
.orderBy("sort_order", "asc")
.execute();
// Build item tree
const seedItems = buildMenuItemTree(items);
result.push({
name: menu.name,
label: menu.label,
items: seedItems,
});
}
return result;
}
/** Type guard for valid widget types */
function isWidgetType(t: string): t is SeedWidget["type"] {
return t === "content" || t === "menu" || t === "component";
}
/**
* Build hierarchical menu item tree from flat array
*/
function buildMenuItemTree(
items: Array<{
id: string;
parent_id: string | null;
type: string;
label: string;
custom_url: string | null;
reference_collection: string | null;
reference_id: string | null;
target: string | null;
title_attr: string | null;
css_classes: string | null;
}>,
): SeedMenuItem[] {
// Build parent -> children map
const childMap = new Map<string | null, typeof items>();
for (const item of items) {
const parentId = item.parent_id;
if (!childMap.has(parentId)) {
childMap.set(parentId, []);
}
childMap.get(parentId)!.push(item);
}
// Recursively build tree
function buildLevel(parentId: string | null): SeedMenuItem[] {
const children = childMap.get(parentId) || [];
return children.map((item) => {
const seedItem: SeedMenuItem = {
type: item.type,
label: item.label || undefined,
};
if (item.type === "custom") {
seedItem.url = item.custom_url || undefined;
} else {
seedItem.ref = item.reference_id || undefined;
seedItem.collection = item.reference_collection || undefined;
}
if (item.target === "_blank") {
seedItem.target = "_blank";
}
if (item.title_attr) {
seedItem.titleAttr = item.title_attr;
}
if (item.css_classes) {
seedItem.cssClasses = item.css_classes;
}
// Add children
const itemChildren = buildLevel(item.id);
if (itemChildren.length > 0) {
seedItem.children = itemChildren;
}
return seedItem;
});
}
return buildLevel(null);
}
/**
* Export widget areas with their widgets
*/
async function exportWidgetAreas(db: Kysely<Database>): Promise<SeedWidgetArea[]> {
// Get all widget areas
const areas = await db.selectFrom("_emdash_widget_areas").selectAll().execute();
const result: SeedWidgetArea[] = [];
for (const area of areas) {
// Get widgets for this area
const widgets = await db
.selectFrom("_emdash_widgets")
.selectAll()
.where("area_id", "=", area.id)
.orderBy("sort_order", "asc")
.execute();
const seedWidgets: SeedWidget[] = widgets
.filter((w) => isWidgetType(w.type))
.map((widget) => {
const wType: SeedWidget["type"] = isWidgetType(widget.type) ? widget.type : "content";
const seedWidget: SeedWidget = {
type: wType,
};
if (widget.title) {
seedWidget.title = widget.title;
}
if (widget.type === "content" && widget.content) {
seedWidget.content = JSON.parse(widget.content);
} else if (widget.type === "menu" && widget.menu_name) {
seedWidget.menuName = widget.menu_name;
} else if (widget.type === "component") {
if (widget.component_id) {
seedWidget.componentId = widget.component_id;
}
if (widget.component_props) {
seedWidget.props = JSON.parse(widget.component_props);
}
}
return seedWidget;
});
result.push({
name: area.name,
label: area.label,
description: area.description || undefined,
widgets: seedWidgets,
});
}
return result;
}
/**
* Export content from collections
*/
async function exportContent(
db: Kysely<Database>,
collections: SeedCollection[],
includeCollections: string[] | null,
): Promise<Record<string, SeedContentEntry[]>> {
const content: Record<string, SeedContentEntry[]> = {};
const contentRepo = new ContentRepository(db);
const taxonomyRepo = new TaxonomyRepository(db);
const mediaRepo = new MediaRepository(db);
// Build media id -> info map for $media conversion
const mediaMap = new Map<
string,
{ url: string; filename: string; alt?: string; caption?: string }
>();
try {
let cursor: string | undefined;
do {
const result = await mediaRepo.findMany({
limit: 100,
cursor,
status: "all",
});
for (const media of result.items) {
mediaMap.set(media.id, {
url: `/_emdash/api/media/file/${media.storageKey}`,
filename: media.filename,
alt: media.alt || undefined,
caption: media.caption || undefined,
});
}
cursor = result.nextCursor;
} while (cursor);
} catch {
// Media table might not exist or be empty
}
const i18nEnabled = isI18nEnabled();
for (const collection of collections) {
// Skip if not in include list
if (includeCollections && !includeCollections.includes(collection.slug)) {
continue;
}
const entries: SeedContentEntry[] = [];
let cursor: string | undefined;
// When i18n is enabled, track translation_group -> seed ID so that
// translations can reference the source entry's seed-local ID.
// Key: EmDash translation_group ULID, Value: seed-local ID of the first entry in that group
const translationGroupToSeedId = new Map<string, string>();
// Paginate through all entries
do {
const result = await contentRepo.findMany(collection.slug, {
limit: 100,
cursor,
});
for (const item of result.items) {
// Generate seed ID from collection:slug:locale for stable references
const seedId = item.slug
? i18nEnabled && item.locale
? `${collection.slug}:${item.slug}:${item.locale}`
: `${collection.slug}:${item.slug}`
: item.id;
// Process data fields for $media conversion
const processedData = processDataForExport(item.data, collection.fields, mediaMap);
const entry: SeedContentEntry = {
id: seedId,
slug: item.slug || item.id,
status: item.status === "published" || item.status === "draft" ? item.status : undefined,
data: processedData,
};
// Add i18n fields when enabled
if (i18nEnabled && item.locale) {
entry.locale = item.locale;
if (item.translationGroup) {
const sourceSeedId = translationGroupToSeedId.get(item.translationGroup);
if (sourceSeedId) {
// This is a translation — reference the source entry
entry.translationOf = sourceSeedId;
} else {
// First entry in this translation group — track it
translationGroupToSeedId.set(item.translationGroup, seedId);
}
}
}
// Get taxonomy assignments
const taxonomies = await getTaxonomyAssignments(taxonomyRepo, collection.slug, item.id);
if (Object.keys(taxonomies).length > 0) {
entry.taxonomies = taxonomies;
}
entries.push(entry);
}
cursor = result.nextCursor;
} while (cursor);
if (i18nEnabled && entries.length > 0) {
// Sort entries so source locale entries appear before their translations.
// Entries without translationOf come first; entries with translationOf come after.
entries.sort((a, b) => {
if (a.translationOf && !b.translationOf) return 1;
if (!a.translationOf && b.translationOf) return -1;
return 0;
});
}
if (entries.length > 0) {
content[collection.slug] = entries;
}
}
return content;
}
/**
* Process content data for export, converting image fields to $media syntax
*/
function processDataForExport(
data: Record<string, unknown>,
fields: SeedField[],
mediaMap: Map<string, { url: string; filename: string; alt?: string; caption?: string }>,
): Record<string, unknown> {
const result: Record<string, unknown> = {};
// Create field type lookup
const fieldTypes = new Map<string, FieldType>();
for (const field of fields) {
fieldTypes.set(field.slug, field.type);
}
for (const [key, value] of Object.entries(data)) {
const fieldType = fieldTypes.get(key);
if (fieldType === "image" && value && typeof value === "object") {
// Convert image field to $media syntax
const imageValue = value as { id?: string; src?: string; alt?: string };
if (imageValue.id) {
const mediaInfo = mediaMap.get(imageValue.id);
if (mediaInfo) {
result[key] = {
$media: {
url: mediaInfo.url,
filename: mediaInfo.filename,
alt: imageValue.alt || mediaInfo.alt,
caption: mediaInfo.caption,
},
};
continue;
}
}
// Fallback: keep as-is if no media info found
result[key] = value;
} else if (fieldType === "reference" && typeof value === "string") {
// Convert reference to $ref syntax (assumes same collection for now)
result[key] = `$ref:${value}`;
} else if (Array.isArray(value)) {
// Process arrays (could contain references or images)
result[key] = value.map((item) => {
if (typeof item === "string" && fieldType === "reference") {
return `$ref:${item}`;
}
return item;
});
} else {
result[key] = value;
}
}
return result;
}
/**
* Get taxonomy term assignments for a content entry
*/
async function getTaxonomyAssignments(
taxonomyRepo: TaxonomyRepository,
collection: string,
entryId: string,
): Promise<Record<string, string[]>> {
const terms = await taxonomyRepo.getTermsForEntry(collection, entryId);
const result: Record<string, string[]> = {};
for (const term of terms) {
if (!result[term.name]) {
result[term.name] = [];
}
result[term.name].push(term.slug);
}
return result;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,192 @@
/**
* emdash init
*
* Initialize database from template config in package.json
*/
import { readFile, access } from "node:fs/promises";
import { resolve } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { createDatabase } from "../../database/connection.js";
import { runMigrations } from "../../database/migrations/runner.js";
export interface EmDashConfig {
label?: string;
schema?: string;
seed?: string;
}
interface PackageJson {
name?: string;
emdash?: EmDashConfig;
}
async function fileExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
async function readPackageJson(cwd: string): Promise<PackageJson | null> {
const pkgPath = resolve(cwd, "package.json");
try {
const content = await readFile(pkgPath, "utf-8");
return JSON.parse(content);
} catch {
return null;
}
}
async function runSqlFile(db: ReturnType<typeof createDatabase>, filePath: string): Promise<void> {
const sql = await readFile(filePath, "utf-8");
// Remove single-line comments
const withoutComments = sql
.split("\n")
.filter((line) => !line.trim().startsWith("--"))
.join("\n");
// Split on semicolons, filter empty statements
const statements = withoutComments
.split(";")
.map((s) => s.trim())
.filter((s) => s.length > 0);
for (const statement of statements) {
await db.executeQuery({
sql: statement,
parameters: [],
query: { kind: "RawNode", sqlFragments: [statement], parameters: [] },
});
}
}
/**
* Check if database has already been initialized with template schema
*/
async function isAlreadyInitialized(db: ReturnType<typeof createDatabase>): Promise<boolean> {
try {
// Use raw SQL since this runs on an untyped database connection
const { sql } = await import("kysely");
const result = await sql<{
count: number;
}>`SELECT COUNT(id) as count FROM _emdash_collections`.execute(db);
const row = result.rows[0];
return row ? row.count > 0 : false;
} catch {
// Table doesn't exist yet
return false;
}
}
export const initCommand = defineCommand({
meta: {
name: "init",
description: "Initialize database from template config",
},
args: {
database: {
type: "string",
alias: "d",
description: "Database path (default: ./data.db)",
default: "./data.db",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
force: {
type: "boolean",
alias: "f",
description: "Force re-initialization",
default: false,
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
consola.start("Initializing EmDash...");
// 1. Read package.json
const pkg = await readPackageJson(cwd);
if (!pkg) {
consola.error("No package.json found in", cwd);
process.exit(1);
}
const config = pkg.emdash;
consola.info(`Project: ${pkg.name || "unknown"}`);
if (config?.label) {
consola.info(`Template: ${config.label}`);
}
// 2. Create/connect to database
const dbPath = resolve(cwd, args.database);
consola.info(`Database: ${dbPath}`);
const db = createDatabase({ url: `file:${dbPath}` });
// 3. Run core migrations (always run - they're idempotent)
consola.start("Running migrations...");
try {
const { applied } = await runMigrations(db);
if (applied.length > 0) {
consola.success(`Applied ${applied.length} migrations`);
for (const name of applied) {
consola.info(` - ${name}`);
}
} else {
consola.info("Migrations already up to date");
}
} catch (error) {
consola.error("Migration failed:", error);
await db.destroy();
process.exit(1);
}
// 4. Check if already initialized (has collections)
const alreadyInitialized = await isAlreadyInitialized(db);
if (alreadyInitialized && !args.force) {
await db.destroy();
consola.success("Already initialized. Use --force to re-run schema/seed.");
return;
}
if (alreadyInitialized && args.force) {
consola.warn("Re-initializing (--force)...");
}
// 5. Run template schema.sql if present
if (config?.schema) {
const schemaPath = resolve(cwd, config.schema);
if (await fileExists(schemaPath)) {
consola.start(`Running schema: ${config.schema}`);
try {
await runSqlFile(db, schemaPath);
consola.success("Schema applied");
} catch (error) {
consola.error("Schema failed:", error);
await db.destroy();
process.exit(1);
}
} else {
consola.warn(`Schema file not found: ${config.schema}`);
}
}
// 6. JSON seed files are now handled by `emdash seed` command
// The bootstrap script runs `emdash init && emdash seed`
// Legacy SQL seed files (seed.sql) could be handled here if needed
await db.destroy();
consola.success("EmDash initialized successfully!");
consola.info("Run `pnpm dev` to start the development server");
},
});

View File

@@ -0,0 +1,547 @@
/**
* Login/logout/whoami CLI commands
*
* Login uses the OAuth Device Flow (RFC 8628):
* 1. POST /oauth/device/code → get device_code + user_code
* 2. Display URL + code to user
* 3. Poll POST /oauth/device/token until authorized
* 4. Save tokens to ~/.config/emdash/auth.json
*
* Custom headers (--header / EMDASH_HEADERS) are sent with every request
* and persisted to credentials so subsequent commands inherit them.
* This supports sites behind reverse proxies like Cloudflare Access.
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import pc from "picocolors";
import {
createHeaderAwareFetch,
getCachedAccessToken,
isAccessRedirect,
resolveCustomHeaders,
runCloudflaredLogin,
} from "../../client/cf-access.js";
import {
getCredentials,
removeCredentials,
resolveCredentialKey,
saveCredentials,
} from "../credentials.js";
// ---------------------------------------------------------------------------
// Types for discovery + device flow responses
// ---------------------------------------------------------------------------
interface DiscoveryResponse {
instance?: { name?: string };
auth?: {
mode?: string;
methods?: {
device_flow?: {
device_authorization_endpoint: string;
token_endpoint: string;
};
api_tokens?: boolean;
};
};
}
interface DeviceCodeResponse {
device_code: string;
user_code: string;
verification_uri: string;
expires_in: number;
interval: number;
}
interface TokenResponse {
access_token: string;
refresh_token: string;
token_type: string;
expires_in: number;
scope: string;
}
// ---------------------------------------------------------------------------
// Device Flow polling
// ---------------------------------------------------------------------------
async function pollForToken(
tokenEndpoint: string,
deviceCode: string,
interval: number,
expiresIn: number,
fetchFn: typeof fetch,
): Promise<TokenResponse> {
const deadline = Date.now() + expiresIn * 1000;
let currentInterval = interval;
while (Date.now() < deadline) {
await new Promise((resolve) => setTimeout(resolve, currentInterval * 1000));
const res = await fetchFn(tokenEndpoint, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
device_code: deviceCode,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
}),
});
if (res.ok) {
return (await res.json()) as TokenResponse;
}
const body = (await res.json()) as { error?: string; interval?: number };
if (body.error === "authorization_pending") {
// Keep polling
continue;
}
if (body.error === "slow_down") {
// Use server-provided interval, or fall back to incrementing by 5s
currentInterval = body.interval ?? currentInterval + 5;
continue;
}
if (body.error === "expired_token") {
throw new Error("Device code expired. Please try again.");
}
if (body.error === "access_denied") {
throw new Error("Authorization was denied.");
}
// Unknown error
throw new Error(`Token exchange failed: ${body.error || res.statusText}`);
}
throw new Error("Device code expired (timeout). Please try again.");
}
// ---------------------------------------------------------------------------
// Cloudflare Access handling
// ---------------------------------------------------------------------------
/**
* Handle a Cloudflare Access redirect during login.
*
* 1. Try `cloudflared access token` for a cached JWT
* 2. Try `cloudflared access login` to do the browser flow
* 3. If cloudflared isn't available, print instructions for service tokens
*
* Returns the Access JWT, or null if auth couldn't be resolved.
*/
async function handleAccessRedirect(baseUrl: string): Promise<string | null> {
consola.info("This site is behind Cloudflare Access.");
// Try cached token first
const cached = await getCachedAccessToken(baseUrl);
if (cached) {
consola.success("Using cached Cloudflare Access token from cloudflared.");
return cached;
}
// Try interactive login via cloudflared
consola.info("Launching browser for Cloudflare Access login...");
const loginOk = await runCloudflaredLogin(baseUrl);
if (loginOk) {
const token = await getCachedAccessToken(baseUrl);
if (token) {
consola.success("Cloudflare Access authentication successful.");
return token;
}
}
// cloudflared not available or login failed — guide the user
console.log();
consola.info("Could not authenticate with Cloudflare Access automatically.");
consola.info("You have two options:");
console.log();
consola.info(` ${pc.bold("Option 1:")} Install cloudflared and run:`);
console.log(` ${pc.cyan(`cloudflared access login ${baseUrl}`)}`);
console.log(` ${pc.cyan(`emdash login --url ${baseUrl}`)}`);
console.log();
consola.info(` ${pc.bold("Option 2:")} Use a service token:`);
console.log(
` ${pc.cyan(`emdash login --url ${baseUrl} -H "CF-Access-Client-Id: <id>" -H "CF-Access-Client-Secret: <secret>"`)}`,
);
console.log();
return null;
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
export const loginCommand = defineCommand({
meta: { name: "login", description: "Log in to an EmDash instance" },
args: {
url: {
type: "string",
alias: "u",
description: "EmDash instance URL",
default: "http://localhost:4321",
},
header: {
type: "string",
alias: "H",
description: 'Custom header "Name: Value" (repeatable, or use EMDASH_HEADERS env)',
},
},
async run({ args }) {
const baseUrl = args.url || "http://localhost:4321";
consola.start(`Connecting to ${baseUrl}...`);
// Resolve custom headers from --header flags and EMDASH_HEADERS env
const customHeaders = resolveCustomHeaders();
let headerFetch = createHeaderAwareFetch(customHeaders);
try {
// Step 1: Fetch auth discovery.
// Use redirect: "manual" to detect Cloudflare Access.
const discoveryUrl = new URL("/_emdash/.well-known/auth", baseUrl);
let res = await headerFetch(discoveryUrl, { redirect: "manual" });
// Handle Cloudflare Access
if (isAccessRedirect(res)) {
const accessToken = await handleAccessRedirect(baseUrl);
if (!accessToken) {
return; // handleAccessRedirect printed instructions
}
// Add the Access token to our custom headers and rebuild the fetch wrapper
customHeaders["cf-access-token"] = accessToken;
headerFetch = createHeaderAwareFetch(customHeaders);
res = await headerFetch(discoveryUrl);
} else if (res.status === 301 || res.status === 302) {
// Non-Access redirect — follow it normally
res = await headerFetch(discoveryUrl);
}
if (!res.ok) {
if (res.status === 404) {
const isLocal = baseUrl.includes("localhost") || baseUrl.includes("127.0.0.1");
if (isLocal) {
consola.info("Auth discovery not available. Trying dev bypass...");
const bypassRes = await fetch(new URL("/_emdash/api/auth/dev-bypass", baseUrl), {
redirect: "manual",
});
if (bypassRes.status === 302 || bypassRes.ok) {
consola.success("Dev bypass available. Client will authenticate automatically.");
} else {
consola.error("Could not authenticate. Is the dev server running?");
}
} else {
consola.error("Auth discovery endpoint not found. Is this an EmDash instance?");
}
return;
}
consola.error(`Discovery failed: ${res.status} ${res.statusText}`);
process.exit(2);
}
const discovery = (await res.json()) as DiscoveryResponse;
consola.success(`Connected to ${discovery.instance?.name || "EmDash"}`);
const deviceFlow = discovery.auth?.methods?.device_flow;
if (!deviceFlow) {
// No device flow available (external auth mode)
consola.info("Device Flow is not available for this instance.");
consola.info("Generate an API token in Settings > API Tokens");
consola.info(`Then run: ${pc.cyan(`emdash --token <token> --url ${baseUrl}`)}`);
return;
}
// Step 2: Request device code
const codeUrl = new URL(deviceFlow.device_authorization_endpoint, baseUrl);
const codeRes = await headerFetch(codeUrl, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-EmDash-Request": "1",
},
body: JSON.stringify({
client_id: "emdash-cli",
}),
});
if (!codeRes.ok) {
consola.error(`Failed to request device code: ${codeRes.status}`);
process.exit(2);
}
const deviceCode = (await codeRes.json()) as DeviceCodeResponse;
// Step 3: Display instructions
console.log();
consola.info(`Open your browser to:`);
console.log(` ${pc.cyan(pc.bold(deviceCode.verification_uri))}`);
console.log();
consola.info(`Enter code: ${pc.yellow(pc.bold(deviceCode.user_code))}`);
console.log();
// Try to open browser (best-effort)
try {
const { execFile } = await import("node:child_process");
if (process.platform === "darwin") {
execFile("open", [deviceCode.verification_uri]);
} else if (process.platform === "win32") {
execFile("cmd", ["/c", "start", "", deviceCode.verification_uri]);
} else {
execFile("xdg-open", [deviceCode.verification_uri]);
}
} catch {
// Ignore — user can open manually
}
// Step 4: Poll for token
consola.start("Waiting for authorization...");
const tokenUrl = new URL(deviceFlow.token_endpoint, baseUrl);
const tokenResult = await pollForToken(
tokenUrl.toString(),
deviceCode.device_code,
deviceCode.interval,
deviceCode.expires_in,
headerFetch,
);
// Step 5: Fetch user info
let userEmail = "unknown";
let userRole = "unknown";
try {
const meRes = await headerFetch(new URL("/_emdash/api/auth/me", baseUrl), {
headers: { Authorization: `Bearer ${tokenResult.access_token}` },
});
if (meRes.ok) {
const meJson = (await meRes.json()) as {
data: { email?: string; role?: number };
};
const me = meJson.data;
userEmail = me.email || "unknown";
// Map role number to name
const roleNames: Record<number, string> = {
10: "subscriber",
20: "contributor",
30: "author",
40: "editor",
50: "admin",
};
userRole = (me.role ? roleNames[me.role] : undefined) || "unknown";
}
} catch {
// Non-critical
}
// Step 6: Save credentials (persist custom headers so subsequent commands inherit them)
const expiresAt = new Date(Date.now() + tokenResult.expires_in * 1000).toISOString();
const hasCustomHeaders = Object.keys(customHeaders).length > 0;
saveCredentials(baseUrl, {
accessToken: tokenResult.access_token,
refreshToken: tokenResult.refresh_token,
expiresAt,
...(hasCustomHeaders ? { customHeaders } : {}),
user: { email: userEmail, role: userRole },
});
consola.success(`Logged in as ${pc.bold(userEmail)} (${userRole})`);
consola.info(`Token saved to ${pc.dim(resolveCredentialKey(baseUrl))}`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Login failed");
process.exit(2);
}
},
});
export const logoutCommand = defineCommand({
meta: { name: "logout", description: "Log out of an EmDash instance" },
args: {
url: {
type: "string",
alias: "u",
description: "EmDash instance URL",
default: "http://localhost:4321",
},
},
async run({ args }) {
const baseUrl = args.url || "http://localhost:4321";
// Get stored credentials
const cred = getCredentials(baseUrl);
if (!cred) {
consola.info("No stored credentials found for this instance.");
return;
}
const headerFetch = createHeaderAwareFetch(cred.customHeaders ?? {});
// Revoke tokens server-side (best-effort)
try {
// Revoke the refresh token (which also revokes associated access tokens)
await headerFetch(new URL("/_emdash/api/oauth/token/revoke", baseUrl), {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ token: cred.refreshToken }),
});
} catch {
// Non-critical — the local removal still works
}
// Remove local credentials
removeCredentials(baseUrl);
consola.success("Logged out successfully.");
},
});
export const whoamiCommand = defineCommand({
meta: {
name: "whoami",
description: "Show current user and auth method",
},
args: {
url: {
type: "string",
alias: "u",
description: "EmDash instance URL",
default: "http://localhost:4321",
},
token: {
type: "string",
alias: "t",
description: "Auth token",
},
json: {
type: "boolean",
description: "Output as JSON",
},
},
async run({ args }) {
const baseUrl = args.url || "http://localhost:4321";
// Resolve token: --token flag > EMDASH_TOKEN env > stored credentials
let token = args.token || process.env["EMDASH_TOKEN"];
let authMethod = token ? "token" : "none";
let storedHeaders: Record<string, string> = {};
if (!token) {
const cred = getCredentials(baseUrl);
if (cred) {
token = cred.accessToken;
authMethod = "stored";
storedHeaders = cred.customHeaders ?? {};
// Check if expired
if (new Date(cred.expiresAt) < new Date()) {
const headerFetch = createHeaderAwareFetch(storedHeaders);
// Try to refresh
try {
const refreshRes = await headerFetch(
new URL("/_emdash/api/oauth/token/refresh", baseUrl),
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
refresh_token: cred.refreshToken,
grant_type: "refresh_token",
}),
},
);
if (refreshRes.ok) {
const refreshed = (await refreshRes.json()) as TokenResponse;
token = refreshed.access_token;
saveCredentials(baseUrl, {
...cred,
accessToken: refreshed.access_token,
expiresAt: new Date(Date.now() + refreshed.expires_in * 1000).toISOString(),
});
} else {
consola.warn("Stored token expired and refresh failed. Run: emdash login");
process.exit(2);
}
} catch {
consola.warn("Stored token expired. Run: emdash login");
process.exit(2);
}
}
}
}
if (!token) {
// Try dev bypass for local
const isLocal = baseUrl.includes("localhost") || baseUrl.includes("127.0.0.1");
if (isLocal) {
authMethod = "dev-bypass";
consola.info(`Auth method: ${pc.cyan("dev-bypass")}`);
consola.info("No stored credentials. Client will use dev bypass for localhost.");
return;
}
consola.error("Not logged in. Run: emdash login");
process.exit(2);
}
const headerFetch = createHeaderAwareFetch(storedHeaders);
try {
const meRes = await headerFetch(new URL("/_emdash/api/auth/me", baseUrl), {
headers: { Authorization: `Bearer ${token}` },
});
if (!meRes.ok) {
if (meRes.status === 401) {
consola.error("Token is invalid or expired. Run: emdash login");
process.exit(1);
}
consola.error(`Failed to fetch user info: ${meRes.status}`);
process.exit(1);
}
const raw = (await meRes.json()) as {
data: {
id: string;
email: string;
name: string | null;
role: number;
};
};
const me = raw.data;
const roleNames: Record<number, string> = {
10: "subscriber",
20: "contributor",
30: "author",
40: "editor",
50: "admin",
};
if (args.json) {
console.log(
JSON.stringify({
id: me.id,
email: me.email,
name: me.name,
role: roleNames[me.role] || `unknown (${me.role})`,
authMethod,
url: baseUrl,
}),
);
} else {
consola.info(`Email: ${pc.bold(me.email)}`);
if (me.name) consola.info(`Name: ${me.name}`);
consola.info(`Role: ${pc.cyan(roleNames[me.role] || `unknown (${me.role})`)}`);
consola.info(`Auth: ${pc.dim(authMethod)}`);
consola.info(`URL: ${pc.dim(baseUrl)}`);
}
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});

View File

@@ -0,0 +1,165 @@
/**
* emdash media
*
* Manage media items via the EmDash API
*/
import { readFile } from "node:fs/promises";
import { basename } from "node:path";
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
const listCommand = defineCommand({
meta: {
name: "list",
description: "List media items",
},
args: {
...connectionArgs,
mime: {
type: "string",
description: "Filter by MIME type (e.g., image/png)",
},
limit: {
type: "string",
description: "Number of items to return",
},
cursor: {
type: "string",
description: "Pagination cursor",
},
},
async run({ args }) {
const client = createClientFromArgs(args);
try {
const result = await client.mediaList({
mimeType: args.mime,
limit: args.limit ? Number(args.limit) : undefined,
cursor: args.cursor,
});
output(result, args);
} catch (error) {
consola.error("Failed to list media:", error instanceof Error ? error.message : error);
process.exit(1);
}
},
});
const uploadCommand = defineCommand({
meta: {
name: "upload",
description: "Upload a media file",
},
args: {
file: {
type: "positional",
description: "Path to the file to upload",
required: true,
},
...connectionArgs,
alt: {
type: "string",
description: "Alt text for the media item",
},
caption: {
type: "string",
description: "Caption for the media item",
},
},
async run({ args }) {
const client = createClientFromArgs(args);
const filename = basename(args.file);
consola.start(`Uploading ${filename}...`);
try {
const buffer = await readFile(args.file);
const result = await client.mediaUpload(buffer, filename, {
alt: args.alt,
caption: args.caption,
});
consola.success(`Uploaded ${filename}`);
output(result, args);
} catch (error) {
consola.error("Failed to upload:", error instanceof Error ? error.message : error);
process.exit(1);
}
},
});
const getCommand = defineCommand({
meta: {
name: "get",
description: "Get a media item",
},
args: {
id: {
type: "positional",
description: "Media item ID",
required: true,
},
...connectionArgs,
},
async run({ args }) {
const client = createClientFromArgs(args);
try {
const result = await client.mediaGet(args.id);
output(result, args);
} catch (error) {
consola.error("Failed to get media:", error instanceof Error ? error.message : error);
process.exit(1);
}
},
});
const deleteCommand = defineCommand({
meta: {
name: "delete",
description: "Delete a media item",
},
args: {
id: {
type: "positional",
description: "Media item ID",
required: true,
},
...connectionArgs,
},
async run({ args }) {
const client = createClientFromArgs(args);
try {
await client.mediaDelete(args.id);
if (args.json) {
output({ deleted: true }, args);
} else {
consola.success(`Deleted media item ${args.id}`);
}
} catch (error) {
consola.error("Failed to delete media:", error instanceof Error ? error.message : error);
process.exit(1);
}
},
});
export const mediaCommand = defineCommand({
meta: {
name: "media",
description: "Manage media items",
},
subCommands: {
list: listCommand,
upload: uploadCommand,
get: getCommand,
delete: deleteCommand,
},
});

View File

@@ -0,0 +1,67 @@
/**
* emdash menu
*
* Manage menus via the EmDash REST API.
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
const listCommand = defineCommand({
meta: {
name: "list",
description: "List all menus",
},
args: {
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const menus = await client.menus();
output(menus, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const getCommand = defineCommand({
meta: {
name: "get",
description: "Get a menu with its items",
},
args: {
name: {
type: "positional",
description: "Menu name",
required: true,
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const menu = await client.menu(args.name);
output(menu, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
export const menuCommand = defineCommand({
meta: {
name: "menu",
description: "Manage menus",
},
subCommands: {
list: listCommand,
get: getCommand,
},
});

View File

@@ -0,0 +1,291 @@
/**
* emdash plugin init
*
* Scaffold a new EmDash plugin. Generates the standard-format boilerplate:
* src/index.ts -- descriptor factory
* src/sandbox-entry.ts -- definePlugin({ hooks, routes })
* package.json
* tsconfig.json
*
* Use --native to generate native-format boilerplate instead (createPlugin + React admin).
*
*/
import { mkdir, writeFile } from "node:fs/promises";
import { resolve, join, basename } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { fileExists } from "./bundle-utils.js";
const SLUG_RE = /^[a-z][a-z0-9]*(-[a-z0-9]+)*$/;
const SCOPE_RE = /^@[^/]+\//;
export const pluginInitCommand = defineCommand({
meta: {
name: "init",
description: "Scaffold a new plugin",
},
args: {
dir: {
type: "string",
description: "Directory to create the plugin in (default: current directory)",
default: ".",
},
name: {
type: "string",
description: "Plugin name/id (e.g. my-plugin or @org/my-plugin)",
},
native: {
type: "boolean",
description: "Generate native-format plugin (createPlugin + React admin)",
default: false,
},
},
async run({ args }) {
const targetDir = resolve(args.dir);
const isNative = args.native;
// Derive plugin name from --name or directory name
let pluginName = args.name || basename(targetDir);
if (!pluginName || pluginName === ".") {
pluginName = basename(resolve("."));
}
// Strip scope for the slug
const slug = pluginName.replace(SCOPE_RE, "");
if (!SLUG_RE.test(slug)) {
consola.error(
`Invalid plugin name "${pluginName}". ` +
"Use lowercase letters, numbers, and hyphens (e.g. my-plugin).",
);
process.exit(1);
}
// Check if directory already has files
const srcDir = join(targetDir, "src");
const pkgPath = join(targetDir, "package.json");
if (await fileExists(pkgPath)) {
consola.error(`package.json already exists in ${targetDir}`);
process.exit(1);
}
consola.start(`Scaffolding ${isNative ? "native" : "standard"} plugin: ${pluginName}`);
await mkdir(srcDir, { recursive: true });
if (isNative) {
await scaffoldNative(targetDir, srcDir, pluginName, slug);
} else {
await scaffoldStandard(targetDir, srcDir, pluginName, slug);
}
consola.success(`Plugin scaffolded in ${targetDir}`);
consola.info("Next steps:");
if (args.dir !== ".") {
consola.info(` 1. cd ${args.dir}`);
}
consola.info(` ${args.dir !== "." ? "2" : "1"}. pnpm install`);
if (isNative) {
consola.info(` ${args.dir !== "." ? "3" : "2"}. Edit src/index.ts to add hooks and routes`);
} else {
consola.info(
` ${args.dir !== "." ? "3" : "2"}. Edit src/sandbox-entry.ts to add hooks and routes`,
);
}
consola.info(` ${args.dir !== "." ? "4" : "3"}. emdash plugin validate --dir .`);
},
});
// ── Standard format scaffolding ──────────────────────────────────
async function scaffoldStandard(
targetDir: string,
srcDir: string,
pluginName: string,
slug: string,
): Promise<void> {
// Derive the camelCase function name from slug
const fnName = slug
.split("-")
.map((s, i) => (i === 0 ? s : s[0].toUpperCase() + s.slice(1)))
.join("");
// package.json
await writeFile(
join(targetDir, "package.json"),
JSON.stringify(
{
name: pluginName,
version: "0.1.0",
type: "module",
exports: {
".": "./src/index.ts",
"./sandbox": "./src/sandbox-entry.ts",
},
files: ["src"],
peerDependencies: {
emdash: "*",
},
},
null,
"\t",
) + "\n",
);
// tsconfig.json
await writeFile(
join(targetDir, "tsconfig.json"),
JSON.stringify(
{
compilerOptions: {
target: "ES2022",
module: "preserve",
moduleResolution: "bundler",
strict: true,
esModuleInterop: true,
declaration: true,
outDir: "./dist",
rootDir: "./src",
},
include: ["src/**/*"],
exclude: ["node_modules", "dist"],
},
null,
"\t",
) + "\n",
);
// src/index.ts -- descriptor factory
await writeFile(
join(srcDir, "index.ts"),
`import type { PluginDescriptor } from "emdash";
export function ${fnName}Plugin(): PluginDescriptor {
\treturn {
\t\tid: "${pluginName}",
\t\tversion: "0.1.0",
\t\tformat: "standard",
\t\tentrypoint: "${pluginName}/sandbox",
\t\tcapabilities: [],
\t};
}
`,
);
// src/sandbox-entry.ts -- plugin definition
await writeFile(
join(srcDir, "sandbox-entry.ts"),
`import { definePlugin } from "emdash";
import type { PluginContext } from "emdash";
export default definePlugin({
\thooks: {
\t\t"content:afterSave": {
\t\t\thandler: async (event: any, ctx: PluginContext) => {
\t\t\t\tctx.log.info("Content saved", {
\t\t\t\t\tcollection: event.collection,
\t\t\t\t\tid: event.content.id,
\t\t\t\t});
\t\t\t},
\t\t},
\t},
});
`,
);
}
// ── Native format scaffolding ────────────────────────────────────
async function scaffoldNative(
targetDir: string,
srcDir: string,
pluginName: string,
slug: string,
): Promise<void> {
const fnName = slug
.split("-")
.map((s, i) => (i === 0 ? s : s[0].toUpperCase() + s.slice(1)))
.join("");
// package.json
await writeFile(
join(targetDir, "package.json"),
JSON.stringify(
{
name: pluginName,
version: "0.1.0",
type: "module",
exports: {
".": "./src/index.ts",
},
files: ["src"],
peerDependencies: {
emdash: "*",
},
},
null,
"\t",
) + "\n",
);
// tsconfig.json
await writeFile(
join(targetDir, "tsconfig.json"),
JSON.stringify(
{
compilerOptions: {
target: "ES2022",
module: "preserve",
moduleResolution: "bundler",
strict: true,
esModuleInterop: true,
declaration: true,
outDir: "./dist",
rootDir: "./src",
},
include: ["src/**/*"],
exclude: ["node_modules", "dist"],
},
null,
"\t",
) + "\n",
);
// src/index.ts -- descriptor + createPlugin
await writeFile(
join(srcDir, "index.ts"),
`import { definePlugin } from "emdash";
import type { PluginDescriptor } from "emdash";
export function ${fnName}Plugin(): PluginDescriptor {
\treturn {
\t\tid: "${pluginName}",
\t\tversion: "0.1.0",
\t\tformat: "native",
\t\tentrypoint: "${pluginName}",
\t\toptions: {},
\t};
}
export function createPlugin() {
\treturn definePlugin({
\t\tid: "${pluginName}",
\t\tversion: "0.1.0",
\t\thooks: {
\t\t\t"content:afterSave": async (event, ctx) => {
\t\t\t\tctx.log.info("Content saved", {
\t\t\t\t\tcollection: event.collection,
\t\t\t\t\tid: event.content.id,
\t\t\t\t});
\t\t\t},
\t\t},
\t});
}
export default createPlugin;
`,
);
}

View File

@@ -0,0 +1,31 @@
/**
* emdash plugin validate
*
* Runs bundle validation without producing a tarball.
* Thin wrapper around `emdash plugin bundle --validate-only`.
*
*/
import { defineCommand, runCommand } from "citty";
import { bundleCommand } from "./bundle.js";
export const pluginValidateCommand = defineCommand({
meta: {
name: "validate",
description: "Validate a plugin without producing a tarball (same checks as bundle)",
},
args: {
dir: {
type: "string",
description: "Plugin directory (default: current directory)",
default: ".",
},
},
async run({ args }) {
// Delegate to the bundle command with validateOnly flag
await runCommand(bundleCommand, {
rawArgs: ["--dir", args.dir, "--validateOnly"],
});
},
});

View File

@@ -0,0 +1,33 @@
/**
* emdash plugin
*
* Plugin management commands grouped under a single namespace.
*
* Subcommands:
* - init: Scaffold a new plugin
* - bundle: Bundle a plugin for marketplace distribution
* - validate: Run bundle validation without producing a tarball
* - publish: Publish a plugin to the marketplace
* - login: Log in to the marketplace via GitHub
* - logout: Log out of the marketplace
*
*/
import { defineCommand } from "citty";
import { bundleCommand } from "./bundle.js";
import { pluginInitCommand } from "./plugin-init.js";
import { pluginValidateCommand } from "./plugin-validate.js";
import { publishCommand, marketplaceLoginCommand, marketplaceLogoutCommand } from "./publish.js";
export const pluginCommand = defineCommand({
meta: { name: "plugin", description: "Manage plugins" },
subCommands: {
init: pluginInitCommand,
bundle: bundleCommand,
validate: pluginValidateCommand,
publish: publishCommand,
login: marketplaceLoginCommand,
logout: marketplaceLogoutCommand,
},
});

View File

@@ -0,0 +1,699 @@
/**
* emdash plugin publish
*
* Publishes a plugin tarball to the EmDash Marketplace.
*
* Flow:
* 1. Resolve tarball (from --tarball path, or build via `emdash plugin bundle`)
* 2. Read manifest.json from tarball to show summary
* 3. Authenticate (stored credential or GitHub device flow)
* 4. Pre-publish validation (check plugin exists, version not published)
* 5. Upload via multipart POST
* 6. Display audit results
*/
import { readFile, stat } from "node:fs/promises";
import { resolve, basename } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { createGzipDecoder, unpackTar } from "modern-tar";
import pc from "picocolors";
import { pluginManifestSchema } from "../../plugins/manifest-schema.js";
import {
getMarketplaceCredential,
saveMarketplaceCredential,
removeMarketplaceCredential,
} from "../credentials.js";
const DEFAULT_REGISTRY = "https://marketplace.emdashcms.com";
// ── GitHub Device Flow ──────────────────────────────────────────
interface DeviceCodeResponse {
device_code: string;
user_code: string;
verification_uri: string;
expires_in: number;
interval: number;
}
interface GitHubTokenResponse {
access_token?: string;
token_type?: string;
error?: string;
error_description?: string;
interval?: number;
}
interface MarketplaceAuthResponse {
token: string;
author: {
id: string;
name: string;
avatarUrl: string;
};
}
interface AuthDiscovery {
github: {
clientId: string;
deviceAuthorizationEndpoint: string;
tokenEndpoint: string;
};
marketplace: {
deviceTokenEndpoint: string;
};
}
/**
* Authenticate with the marketplace via GitHub Device Flow.
* Returns the marketplace JWT and author info.
*/
async function authenticateViaDeviceFlow(registryUrl: string): Promise<MarketplaceAuthResponse> {
// Step 1: Fetch auth discovery to get GitHub client_id
consola.start("Fetching auth configuration...");
const discoveryRes = await fetch(new URL("/api/v1/auth/discovery", registryUrl));
if (!discoveryRes.ok) {
throw new Error(`Marketplace unreachable: ${discoveryRes.status} ${discoveryRes.statusText}`);
}
const discovery = (await discoveryRes.json()) as AuthDiscovery;
// Step 2: Request device code from GitHub
const deviceRes = await fetch(discovery.github.deviceAuthorizationEndpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
client_id: discovery.github.clientId,
scope: "read:user user:email",
}),
});
if (!deviceRes.ok) {
throw new Error(`GitHub device flow failed: ${deviceRes.status}`);
}
const deviceCode = (await deviceRes.json()) as DeviceCodeResponse;
// Step 3: Display instructions
console.log();
consola.info("Open your browser to:");
console.log(` ${pc.cyan(pc.bold(deviceCode.verification_uri))}`);
console.log();
consola.info(`Enter code: ${pc.yellow(pc.bold(deviceCode.user_code))}`);
console.log();
// Try to open browser
try {
const { execFile } = await import("node:child_process");
if (process.platform === "darwin") {
execFile("open", [deviceCode.verification_uri]);
} else if (process.platform === "win32") {
execFile("cmd", ["/c", "start", "", deviceCode.verification_uri]);
} else {
execFile("xdg-open", [deviceCode.verification_uri]);
}
} catch {
// User can open manually
}
// Step 4: Poll GitHub for access token
consola.start("Waiting for authorization...");
const githubToken = await pollGitHubDeviceFlow(
discovery.github.tokenEndpoint,
discovery.github.clientId,
deviceCode.device_code,
deviceCode.interval,
deviceCode.expires_in,
);
// Step 5: Exchange GitHub token for marketplace JWT
consola.start("Authenticating with marketplace...");
const deviceTokenUrl = new URL(discovery.marketplace.deviceTokenEndpoint, registryUrl);
const authRes = await fetch(deviceTokenUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ access_token: githubToken }),
});
if (!authRes.ok) {
const body = (await authRes.json().catch(() => ({}))) as { error?: string };
throw new Error(`Marketplace auth failed: ${body.error ?? authRes.statusText}`);
}
return (await authRes.json()) as MarketplaceAuthResponse;
}
async function pollGitHubDeviceFlow(
tokenEndpoint: string,
clientId: string,
deviceCode: string,
interval: number,
expiresIn: number,
): Promise<string> {
const deadline = Date.now() + expiresIn * 1000;
let currentInterval = interval;
while (Date.now() < deadline) {
await new Promise((r) => setTimeout(r, currentInterval * 1000));
const res = await fetch(tokenEndpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
client_id: clientId,
device_code: deviceCode,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
}),
});
const body = (await res.json()) as GitHubTokenResponse;
if (body.access_token) {
return body.access_token;
}
if (body.error === "authorization_pending") continue;
if (body.error === "slow_down") {
currentInterval = body.interval ?? currentInterval + 5;
continue;
}
if (body.error === "expired_token") {
throw new Error("Device code expired. Please try again.");
}
if (body.error === "access_denied") {
throw new Error("Authorization was denied.");
}
throw new Error(`GitHub token exchange failed: ${body.error ?? "unknown error"}`);
}
throw new Error("Device code expired (timeout). Please try again.");
}
// ── Tarball reading ─────────────────────────────────────────────
const manifestSummarySchema = pluginManifestSchema.pick({
id: true,
version: true,
capabilities: true,
allowedHosts: true,
});
type ManifestSummary = typeof manifestSummarySchema._zod.output;
/**
* Read manifest.json from a tarball without fully extracting it.
*/
async function readManifestFromTarball(tarballPath: string): Promise<ManifestSummary> {
const data = await readFile(tarballPath);
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
controller.close();
},
});
const entries = await unpackTar(stream.pipeThrough(createGzipDecoder()), {
filter: (header) => header.name === "manifest.json",
});
const manifest = entries.find((e) => e.header.name === "manifest.json");
if (!manifest?.data) {
throw new Error("Tarball does not contain manifest.json");
}
const content = new TextDecoder().decode(manifest.data);
const parsed: unknown = JSON.parse(content);
const result = manifestSummarySchema.safeParse(parsed);
if (!result.success) {
throw new Error(`Invalid manifest.json: ${result.error.message}`);
}
return result.data;
}
// ── Audit polling helpers ───────────────────────────────────────
const POLL_INTERVAL_MS = 3000;
const POLL_TIMEOUT_MS = 120_000; // 2 minutes
interface VersionStatusResponse {
version: string;
status: string;
audit_verdict?: string | null;
audit_id?: string | null;
image_audit_verdict?: string | null;
}
/**
* Poll the version endpoint until status leaves "pending" or timeout.
* Returns the final version data, or null on timeout.
*/
async function pollVersionStatus(
versionUrl: string,
token: string,
): Promise<VersionStatusResponse | null> {
const deadline = Date.now() + POLL_TIMEOUT_MS;
while (Date.now() < deadline) {
await new Promise((r) => setTimeout(r, POLL_INTERVAL_MS));
try {
const res = await fetch(versionUrl, {
headers: { Authorization: `Bearer ${token}` },
});
if (!res.ok) continue;
const data = (await res.json()) as VersionStatusResponse;
if (data.status !== "pending") {
return data;
}
} catch {
// Network error — retry
}
}
return null;
}
function displayAuditResults(version: VersionStatusResponse): void {
const statusColor =
version.status === "published" ? pc.green : version.status === "flagged" ? pc.yellow : pc.red;
consola.info(` Status: ${statusColor(version.status)}`);
if (version.audit_verdict) {
const verdictColor =
version.audit_verdict === "pass"
? pc.green
: version.audit_verdict === "warn"
? pc.yellow
: pc.red;
consola.info(` Audit: ${verdictColor(version.audit_verdict)}`);
}
if (version.image_audit_verdict) {
const verdictColor =
version.image_audit_verdict === "pass"
? pc.green
: version.image_audit_verdict === "warn"
? pc.yellow
: pc.red;
consola.info(` Image audit: ${verdictColor(version.image_audit_verdict)}`);
}
}
function displayInlineAuditResults(
audit: {
verdict: string;
riskScore: number;
summary: string;
findings: { category: string; severity: string; description: string }[];
},
imageAudit: { verdict: string } | null,
): void {
const verdictColor =
audit.verdict === "pass" ? pc.green : audit.verdict === "warn" ? pc.yellow : pc.red;
consola.info(` Audit: ${verdictColor(audit.verdict)} (risk: ${audit.riskScore}/100)`);
if (audit.findings.length > 0) {
for (const finding of audit.findings) {
const icon = finding.severity === "high" ? pc.red("!") : pc.yellow("~");
consola.info(` ${icon} [${finding.category}] ${finding.description}`);
}
}
if (imageAudit) {
const imgColor =
imageAudit.verdict === "pass" ? pc.green : imageAudit.verdict === "warn" ? pc.yellow : pc.red;
consola.info(` Image audit: ${imgColor(imageAudit.verdict)}`);
}
}
// ── Publish command ─────────────────────────────────────────────
export const publishCommand = defineCommand({
meta: {
name: "publish",
description: "Publish a plugin to the EmDash Marketplace",
},
args: {
tarball: {
type: "string",
description: "Path to plugin tarball (default: build first via `emdash plugin bundle`)",
},
dir: {
type: "string",
description: "Plugin directory (used with --build, default: current directory)",
default: process.cwd(),
},
build: {
type: "boolean",
description: "Build the plugin before publishing",
default: false,
},
registry: {
type: "string",
description: "Marketplace registry URL",
default: DEFAULT_REGISTRY,
},
"no-wait": {
type: "boolean",
description: "Exit immediately after upload without waiting for audit (useful for CI)",
default: false,
},
},
async run({ args }) {
const registryUrl = args.registry;
// ── Step 1: Resolve tarball ──
let tarballPath: string;
if (args.tarball) {
tarballPath = resolve(args.tarball);
} else if (args.build) {
// Build first, then find the output tarball
consola.start("Building plugin...");
const pluginDir = resolve(args.dir);
try {
const { runCommand } = await import("citty");
const { bundleCommand } = await import("./bundle.js");
await runCommand(bundleCommand, {
rawArgs: ["--dir", pluginDir],
});
} catch {
consola.error("Build failed");
process.exit(1);
}
// Find the tarball in dist/
const { readdir } = await import("node:fs/promises");
const distDir = resolve(pluginDir, "dist");
const files = await readdir(distDir);
const tarball = files.find((f) => f.endsWith(".tar.gz"));
if (!tarball) {
consola.error("Build succeeded but no .tar.gz found in dist/");
process.exit(1);
}
tarballPath = resolve(distDir, tarball);
} else {
// Look for an existing tarball in dist/
const pluginDir = resolve(args.dir);
const { readdir } = await import("node:fs/promises");
try {
const distDir = resolve(pluginDir, "dist");
const files = await readdir(distDir);
const tarball = files.find((f) => f.endsWith(".tar.gz"));
if (tarball) {
tarballPath = resolve(distDir, tarball);
} else {
consola.error("No tarball found. Run `emdash plugin bundle` first or use --build.");
process.exit(1);
}
} catch {
consola.error(
"No dist/ directory found. Run `emdash plugin bundle` first or use --build.",
);
process.exit(1);
}
}
const tarballStat = await stat(tarballPath);
const sizeKB = (tarballStat.size / 1024).toFixed(1);
consola.info(`Tarball: ${pc.dim(tarballPath)} (${sizeKB}KB)`);
// ── Step 2: Read manifest from tarball ──
const manifest = await readManifestFromTarball(tarballPath);
console.log();
consola.info(`Plugin: ${pc.bold(`${manifest.id}@${manifest.version}`)}`);
if (manifest.capabilities.length > 0) {
consola.info(`Capabilities: ${manifest.capabilities.join(", ")}`);
}
if (manifest.allowedHosts?.length) {
consola.info(`Allowed hosts: ${manifest.allowedHosts.join(", ")}`);
}
console.log();
// ── Step 3: Authenticate ──
//
// Priority: EMDASH_MARKETPLACE_TOKEN env var > stored credential > interactive device flow.
// The env var enables CI pipelines (including seed token auth) without interactive login.
let token: string;
const envToken = process.env.EMDASH_MARKETPLACE_TOKEN;
const stored = !envToken ? getMarketplaceCredential(registryUrl) : null;
if (envToken) {
token = envToken;
consola.info("Using EMDASH_MARKETPLACE_TOKEN for authentication");
} else if (stored) {
token = stored.token;
consola.info(`Authenticated as ${pc.bold(stored.author?.name ?? "unknown")}`);
} else {
consola.info("Not logged in to marketplace. Starting GitHub authentication...");
const result = await authenticateViaDeviceFlow(registryUrl);
token = result.token;
// Save for next time
saveMarketplaceCredential(registryUrl, {
token: result.token,
expiresAt: new Date(Date.now() + 30 * 86400 * 1000).toISOString(), // 30 days
author: { id: result.author.id, name: result.author.name },
});
consola.success(`Authenticated as ${pc.bold(result.author.name)}`);
}
// ── Step 4: Pre-publish validation ──
consola.start("Checking marketplace...");
// Check if plugin exists
const pluginRes = await fetch(new URL(`/api/v1/plugins/${manifest.id}`, registryUrl));
if (pluginRes.status === 404 && !envToken) {
// Plugin doesn't exist — register it first.
// When using env token (seed), the server auto-registers on publish.
consola.info(`Plugin ${pc.bold(manifest.id)} not found in marketplace. Registering...`);
const createRes = await fetch(new URL("/api/v1/plugins", registryUrl), {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
id: manifest.id,
name: manifest.id, // Use ID as name initially
capabilities: manifest.capabilities,
}),
});
if (!createRes.ok) {
const body = (await createRes.json().catch(() => ({}))) as { error?: string };
if (createRes.status === 401) {
// Token expired — clear and retry
removeMarketplaceCredential(registryUrl);
consola.error(
"Authentication expired. Please run `emdash plugin publish` again to re-authenticate.",
);
process.exit(1);
}
consola.error(`Failed to register plugin: ${body.error ?? createRes.statusText}`);
process.exit(1);
}
consola.success(`Registered ${pc.bold(manifest.id)}`);
} else if (pluginRes.status === 404 && envToken) {
// Using env token — server handles auto-registration on publish
consola.info(`Plugin ${pc.bold(manifest.id)} will be auto-registered on publish`);
} else if (!pluginRes.ok) {
consola.error(`Marketplace error: ${pluginRes.status}`);
process.exit(1);
}
// ── Step 5: Upload ──
consola.start(`Publishing ${manifest.id}@${manifest.version}...`);
const tarballData = await readFile(tarballPath);
const formData = new FormData();
formData.append(
"bundle",
new Blob([tarballData], { type: "application/gzip" }),
basename(tarballPath),
);
const uploadUrl = new URL(`/api/v1/plugins/${manifest.id}/versions`, registryUrl);
const uploadRes = await fetch(uploadUrl, {
method: "POST",
headers: {
Authorization: `Bearer ${token}`,
},
body: formData,
});
if (!uploadRes.ok && uploadRes.status !== 202) {
const body = (await uploadRes.json().catch(() => ({}))) as {
error?: string;
latestVersion?: string;
audit?: { verdict: string; summary: string; findings: unknown[] };
};
if (uploadRes.status === 401) {
if (envToken) {
consola.error("EMDASH_MARKETPLACE_TOKEN was rejected by the marketplace.");
} else {
removeMarketplaceCredential(registryUrl);
consola.error("Authentication expired. Please run `emdash plugin publish` again.");
}
process.exit(1);
}
if (uploadRes.status === 409) {
if (body.latestVersion) {
consola.error(`Version ${manifest.version} must be greater than ${body.latestVersion}`);
} else {
consola.error(body.error ?? "Version conflict");
}
process.exit(1);
}
if (uploadRes.status === 422 && body.audit) {
// Failed security audit
consola.error("Plugin failed security audit:");
consola.error(` Verdict: ${pc.red(body.audit.verdict)}`);
consola.error(` Summary: ${body.audit.summary}`);
process.exit(1);
}
consola.error(`Publish failed: ${body.error ?? uploadRes.statusText}`);
process.exit(1);
}
// ── Step 6: Handle response ──
const result = (await uploadRes.json()) as {
version: string;
bundleSize: number;
checksum: string;
publishedAt: string;
status?: string;
workflowId?: string;
audit?: {
verdict: string;
riskScore: number;
summary: string;
findings: { category: string; severity: string; description: string }[];
};
imageAudit?: {
verdict: string;
} | null;
};
console.log();
consola.success(`Uploaded ${pc.bold(`${manifest.id}@${result.version}`)}`);
consola.info(` Checksum: ${pc.dim(result.checksum)}`);
consola.info(` Size: ${(result.bundleSize / 1024).toFixed(1)}KB`);
// Async audit flow (202 Accepted)
if (uploadRes.status === 202) {
consola.info(` Status: ${pc.yellow("pending")} (audit running in background)`);
if (args["no-wait"]) {
consola.info("Skipping audit wait (--no-wait). Check status later.");
console.log();
return;
}
// Poll version endpoint for audit completion
consola.start("Waiting for security audit to complete...");
const versionUrl = new URL(
`/api/v1/plugins/${manifest.id}/versions/${manifest.version}`,
registryUrl,
);
const finalStatus = await pollVersionStatus(versionUrl.toString(), token);
if (finalStatus) {
displayAuditResults(finalStatus);
} else {
consola.warn("Audit did not complete within timeout. Check status later with:");
consola.info(` ${pc.dim(`curl ${versionUrl.toString()}`)}`);
}
} else {
// Synchronous response (201 or legacy)
if (result.audit) {
displayInlineAuditResults(result.audit, result.imageAudit ?? null);
}
consola.info(` Status: ${pc.green(result.status ?? "published")}`);
}
console.log();
},
});
// ── Marketplace auth subcommands ────────────────────────────────
export const marketplaceLoginCommand = defineCommand({
meta: {
name: "login",
description: "Log in to the EmDash Marketplace via GitHub",
},
args: {
registry: {
type: "string",
description: "Marketplace registry URL",
default: DEFAULT_REGISTRY,
},
},
async run({ args }) {
const registryUrl = args.registry;
const existing = getMarketplaceCredential(registryUrl);
if (existing) {
consola.info(`Already logged in as ${pc.bold(existing.author?.name ?? "unknown")}`);
consola.info("Use `emdash plugin logout` to log out first.");
return;
}
const result = await authenticateViaDeviceFlow(registryUrl);
saveMarketplaceCredential(registryUrl, {
token: result.token,
expiresAt: new Date(Date.now() + 30 * 86400 * 1000).toISOString(),
author: { id: result.author.id, name: result.author.name },
});
consola.success(`Logged in as ${pc.bold(result.author.name)}`);
},
});
export const marketplaceLogoutCommand = defineCommand({
meta: {
name: "logout",
description: "Log out of the EmDash Marketplace",
},
args: {
registry: {
type: "string",
description: "Marketplace registry URL",
default: DEFAULT_REGISTRY,
},
},
async run({ args }) {
const removed = removeMarketplaceCredential(args.registry);
if (removed) {
consola.success("Logged out of marketplace.");
} else {
consola.info("No marketplace credentials found.");
}
},
});

View File

@@ -0,0 +1,233 @@
/**
* emdash schema
*
* Manage collections and fields via the remote API
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs as commonArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
const listCommand = defineCommand({
meta: {
name: "list",
description: "List all collections",
},
args: {
...commonArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const collections = await client.collections();
output(collections, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const getCommand = defineCommand({
meta: {
name: "get",
description: "Get collection with fields",
},
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
...commonArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const collection = await client.collection(args.collection);
output(collection, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const createCommand = defineCommand({
meta: {
name: "create",
description: "Create a collection",
},
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
label: {
type: "string",
description: "Collection label",
required: true,
},
"label-singular": {
type: "string",
description: "Singular label (defaults to label)",
},
description: {
type: "string",
description: "Collection description",
},
...commonArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const data = await client.createCollection({
slug: args.collection,
label: args.label,
labelSingular: args["label-singular"] || args.label,
description: args.description,
});
consola.success(`Created collection "${args.collection}"`);
output(data, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const deleteCommand = defineCommand({
meta: {
name: "delete",
description: "Delete a collection",
},
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
force: {
type: "boolean",
description: "Skip confirmation",
},
...commonArgs,
},
async run({ args }) {
try {
if (!args.force) {
const confirmed = await consola.prompt(`Delete collection "${args.collection}"?`, {
type: "confirm",
});
if (!confirmed) {
consola.info("Cancelled");
return;
}
}
const client = createClientFromArgs(args);
await client.deleteCollection(args.collection);
consola.success(`Deleted collection "${args.collection}"`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const addFieldCommand = defineCommand({
meta: {
name: "add-field",
description: "Add a field to a collection",
},
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
field: {
type: "positional",
description: "Field slug",
required: true,
},
type: {
type: "string",
description:
"Field type (string, text, number, integer, boolean, datetime, image, reference, portableText, json)",
required: true,
},
label: {
type: "string",
description: "Field label",
},
required: {
type: "boolean",
description: "Whether the field is required",
},
...commonArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const data = await client.createField(args.collection, {
slug: args.field,
type: args.type,
label: args.label || args.field,
required: args.required,
});
consola.success(`Added field "${args.field}" to "${args.collection}"`);
output(data, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const removeFieldCommand = defineCommand({
meta: {
name: "remove-field",
description: "Remove a field from a collection",
},
args: {
collection: {
type: "positional",
description: "Collection slug",
required: true,
},
field: {
type: "positional",
description: "Field slug",
required: true,
},
...commonArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
await client.deleteField(args.collection, args.field);
consola.success(`Removed field "${args.field}" from "${args.collection}"`);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
export const schemaCommand = defineCommand({
meta: {
name: "schema",
description: "Manage collections and fields",
},
subCommands: {
list: listCommand,
get: getCommand,
create: createCommand,
delete: deleteCommand,
"add-field": addFieldCommand,
"remove-field": removeFieldCommand,
},
});

View File

@@ -0,0 +1,54 @@
/**
* emdash search
*
* Full-text search across content
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
export const searchCommand = defineCommand({
meta: {
name: "search",
description: "Full-text search across content",
},
args: {
query: {
type: "positional",
description: "Search query",
required: true,
},
collection: {
type: "string",
alias: "c",
description: "Filter by collection",
},
locale: {
type: "string",
description: "Filter by locale",
},
limit: {
type: "string",
alias: "l",
description: "Maximum results to return",
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const results = await client.search(args.query, {
collection: args.collection,
locale: args.locale,
limit: args.limit ? parseInt(args.limit, 10) : undefined,
});
output(results, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});

View File

@@ -0,0 +1,288 @@
/**
* emdash seed
*
* Apply a seed file to the database
*/
import { readFile, access, mkdir } from "node:fs/promises";
import { resolve } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { createDatabase } from "../../database/connection.js";
import { runMigrations } from "../../database/migrations/runner.js";
import { applySeed } from "../../seed/apply.js";
import type { SeedFile, SeedApplyOptions } from "../../seed/types.js";
import { validateSeed } from "../../seed/validate.js";
import { LocalStorage } from "../../storage/local.js";
interface PackageJson {
name?: string;
emdash?: {
seed?: string;
};
}
async function fileExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
async function readPackageJson(cwd: string): Promise<PackageJson | null> {
const pkgPath = resolve(cwd, "package.json");
try {
const content = await readFile(pkgPath, "utf-8");
return JSON.parse(content);
} catch {
return null;
}
}
/**
* Resolve seed file path from:
* 1. Positional argument (if provided)
* 2. .emdash/seed.json (convention)
* 3. package.json emdash.seed (config)
*/
async function resolveSeedPath(cwd: string, positional?: string): Promise<string | null> {
// 1. Positional argument
if (positional) {
const resolved = resolve(cwd, positional);
if (await fileExists(resolved)) {
return resolved;
}
consola.error(`Seed file not found: ${positional}`);
return null;
}
// 2. Convention: .emdash/seed.json
const conventionPath = resolve(cwd, ".emdash", "seed.json");
if (await fileExists(conventionPath)) {
return conventionPath;
}
// 3. package.json emdash.seed
const pkg = await readPackageJson(cwd);
if (pkg?.emdash?.seed) {
const pkgSeedPath = resolve(cwd, pkg.emdash.seed);
if (await fileExists(pkgSeedPath)) {
return pkgSeedPath;
}
consola.warn(`Seed file from package.json not found: ${pkg.emdash.seed}`);
}
return null;
}
export const seedCommand = defineCommand({
meta: {
name: "seed",
description: "Apply a seed file to the database",
},
args: {
path: {
type: "positional",
description: "Path to seed file (default: .emdash/seed.json)",
required: false,
},
database: {
type: "string",
alias: "d",
description: "Database path",
default: "./data.db",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
validate: {
type: "boolean",
description: "Validate only, don't apply",
default: false,
},
"no-content": {
type: "boolean",
description: "Skip sample content",
default: false,
},
"on-conflict": {
type: "string",
description: "Conflict handling: skip, update, error",
default: "skip",
},
"uploads-dir": {
type: "string",
description: "Directory for media uploads",
default: "./uploads",
},
"media-base-url": {
type: "string",
description: "Base URL for media files",
default: "/_emdash/api/media/file",
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
consola.start("Loading seed file...");
// Resolve seed file path
const seedPath = await resolveSeedPath(cwd, args.path);
if (!seedPath) {
consola.error("No seed file found");
consola.info(
"Provide a path, create .emdash/seed.json, or set emdash.seed in package.json",
);
process.exit(1);
}
consola.info(`Seed file: ${seedPath}`);
// Load and parse seed file
let seed: SeedFile;
try {
const content = await readFile(seedPath, "utf-8");
seed = JSON.parse(content);
} catch (error) {
consola.error("Failed to parse seed file:", error);
process.exit(1);
}
// Validate seed
consola.start("Validating seed file...");
const validation = validateSeed(seed);
if (validation.warnings.length > 0) {
for (const warning of validation.warnings) {
consola.warn(warning);
}
}
if (!validation.valid) {
consola.error("Seed validation failed:");
for (const error of validation.errors) {
consola.error(` - ${error}`);
}
process.exit(1);
}
consola.success("Seed file is valid");
// If validate-only mode, exit here
if (args.validate) {
consola.success("Validation complete");
return;
}
// Connect to database
const dbPath = resolve(cwd, args.database);
consola.info(`Database: ${dbPath}`);
const db = createDatabase({ url: `file:${dbPath}` });
// Run migrations
consola.start("Running migrations...");
try {
const { applied } = await runMigrations(db);
if (applied.length > 0) {
consola.success(`Applied ${applied.length} migrations`);
} else {
consola.info("Database up to date");
}
} catch (error) {
consola.error("Migration failed:", error);
await db.destroy();
process.exit(1);
}
// Set up storage for $media resolution
const uploadsDir = resolve(cwd, args["uploads-dir"]);
await mkdir(uploadsDir, { recursive: true });
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: args["media-base-url"],
});
// Prepare apply options
const onConflictRaw = args["on-conflict"];
if (onConflictRaw !== "skip" && onConflictRaw !== "update" && onConflictRaw !== "error") {
consola.error(`Invalid --on-conflict value: ${onConflictRaw}`);
consola.info("Use: skip, update, or error");
await db.destroy();
process.exit(1);
}
const options: SeedApplyOptions = {
includeContent: !args["no-content"],
onConflict: onConflictRaw,
storage,
};
// Apply seed
consola.start("Applying seed...");
try {
const result = await applySeed(db, seed, options);
consola.success("Seed applied successfully!");
consola.log("");
// Print summary
if (result.settings.applied > 0) {
consola.info(`Settings: ${result.settings.applied} applied`);
}
if (
result.collections.created > 0 ||
result.collections.skipped > 0 ||
result.collections.updated > 0
) {
consola.info(
`Collections: ${result.collections.created} created, ${result.collections.skipped} skipped, ${result.collections.updated} updated`,
);
}
if (result.fields.created > 0 || result.fields.skipped > 0 || result.fields.updated > 0) {
consola.info(
`Fields: ${result.fields.created} created, ${result.fields.skipped} skipped, ${result.fields.updated} updated`,
);
}
if (result.taxonomies.created > 0 || result.taxonomies.terms > 0) {
consola.info(
`Taxonomies: ${result.taxonomies.created} created, ${result.taxonomies.terms} terms`,
);
}
if (result.bylines.created > 0 || result.bylines.skipped > 0 || result.bylines.updated > 0) {
consola.info(
`Bylines: ${result.bylines.created} created, ${result.bylines.skipped} skipped, ${result.bylines.updated} updated`,
);
}
if (result.menus.created > 0 || result.menus.items > 0) {
consola.info(`Menus: ${result.menus.created} created, ${result.menus.items} items`);
}
if (result.widgetAreas.created > 0 || result.widgetAreas.widgets > 0) {
consola.info(
`Widget Areas: ${result.widgetAreas.created} created, ${result.widgetAreas.widgets} widgets`,
);
}
if (result.content.created > 0 || result.content.skipped > 0 || result.content.updated > 0) {
consola.info(
`Content: ${result.content.created} created, ${result.content.skipped} skipped, ${result.content.updated} updated`,
);
}
if (result.media.created > 0 || result.media.skipped > 0) {
consola.info(`Media: ${result.media.created} created, ${result.media.skipped} skipped`);
}
} catch (error) {
consola.error("Seed failed:", error instanceof Error ? error.message : error);
await db.destroy();
process.exit(1);
}
await db.destroy();
consola.success("Done!");
},
});

View File

@@ -0,0 +1,128 @@
/**
* emdash taxonomy
*
* Manage taxonomies and terms via the EmDash REST API.
*/
import { defineCommand } from "citty";
import { consola } from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
import { output } from "../output.js";
/** Pattern to replace whitespace with hyphens for slug generation */
const WHITESPACE_PATTERN = /\s+/g;
const listCommand = defineCommand({
meta: {
name: "list",
description: "List all taxonomies",
},
args: {
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const taxonomies = await client.taxonomies();
output(taxonomies, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const termsCommand = defineCommand({
meta: {
name: "terms",
description: "List terms in a taxonomy",
},
args: {
name: {
type: "positional",
description: "Taxonomy name",
required: true,
},
limit: {
type: "string",
alias: "l",
description: "Maximum terms to return",
},
cursor: {
type: "string",
description: "Pagination cursor",
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const result = await client.terms(args.name, {
limit: args.limit ? parseInt(args.limit, 10) : undefined,
cursor: args.cursor,
});
output(result, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
const addTermCommand = defineCommand({
meta: {
name: "add-term",
description: "Create a term in a taxonomy",
},
args: {
taxonomy: {
type: "positional",
description: "Taxonomy name",
required: true,
},
name: {
type: "string",
description: "Term label",
required: true,
},
slug: {
type: "string",
description: "Term slug (defaults to slugified name)",
},
parent: {
type: "string",
description: "Parent term ID",
},
...connectionArgs,
},
async run({ args }) {
try {
const client = createClientFromArgs(args);
const label = args.name;
const slug = args.slug || label.toLowerCase().replace(WHITESPACE_PATTERN, "-");
const term = await client.createTerm(args.taxonomy, {
slug,
label,
parentId: args.parent,
});
consola.success(`Created term "${label}" in ${args.taxonomy}`);
output(term, args);
} catch (error) {
consola.error(error instanceof Error ? error.message : "Unknown error");
process.exit(1);
}
},
});
export const taxonomyCommand = defineCommand({
meta: {
name: "taxonomy",
description: "Manage taxonomies and terms",
},
subCommands: {
list: listCommand,
terms: termsCommand,
"add-term": addTermCommand,
},
});

View File

@@ -0,0 +1,68 @@
/**
* emdash types
*
* Fetch schema from an EmDash instance and generate TypeScript types
*/
import { writeFile, mkdir } from "node:fs/promises";
import { resolve, dirname } from "node:path";
import { defineCommand } from "citty";
import consola from "consola";
import { connectionArgs, createClientFromArgs } from "../client-factory.js";
export const typesCommand = defineCommand({
meta: {
name: "types",
description: "Generate TypeScript types from schema",
},
args: {
...connectionArgs,
output: {
type: "string",
alias: "o",
description: "Output path for generated types",
default: ".emdash/types.ts",
},
cwd: {
type: "string",
description: "Working directory",
default: process.cwd(),
},
},
async run({ args }) {
const cwd = resolve(args.cwd);
consola.start("Fetching schema...");
try {
const client = createClientFromArgs(args);
// Fetch JSON schema
const schema = await client.schemaExport();
consola.success(`Found ${schema.collections.length} collections`);
// Fetch TypeScript types
const types = await client.schemaTypes();
// Write types file
const outputPath = resolve(cwd, args.output);
await mkdir(dirname(outputPath), { recursive: true });
await writeFile(outputPath, types, "utf-8");
consola.success(`Generated ${args.output}`);
// Also write a schema.json for reference
const schemaJsonPath = resolve(dirname(outputPath), "schema.json");
await writeFile(schemaJsonPath, JSON.stringify(schema, null, 2), "utf-8");
consola.info(`Schema version: ${schema.version}`);
consola.box({
title: "Types generated",
message: `${schema.collections.length} collections\n\nTypes: ${args.output}\nSchema: .emdash/schema.json`,
});
} catch (error) {
consola.error("Failed to fetch schema:", error instanceof Error ? error.message : error);
process.exit(1);
}
},
});

View File

@@ -0,0 +1,236 @@
/**
* Credential storage for CLI auth tokens.
*
* Stores OAuth tokens in ~/.config/emdash/auth.json.
* Remote URLs are keyed by origin, local dev by project path.
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join, resolve } from "node:path";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface StoredCredential {
accessToken: string;
refreshToken: string;
expiresAt: string;
url?: string; // For local dev: the localhost URL
/** Custom headers to send with every request (e.g. CF Access service token) */
customHeaders?: Record<string, string>;
user?: {
email: string;
role: string;
};
}
/** Credential for marketplace auth (GitHub OAuth JWT, no refresh token) */
export interface MarketplaceCredential {
token: string;
expiresAt: string;
author?: {
id: string;
name: string;
};
}
type CredentialStore = Record<string, StoredCredential | MarketplaceCredential>;
// ---------------------------------------------------------------------------
// Paths
// ---------------------------------------------------------------------------
function getConfigDir(): string {
// XDG_CONFIG_HOME or ~/.config
const xdg = process.env["XDG_CONFIG_HOME"];
if (xdg) return join(xdg, "emdash");
return join(homedir(), ".config", "emdash");
}
function getCredentialPath(): string {
return join(getConfigDir(), "auth.json");
}
// ---------------------------------------------------------------------------
// Key resolution
// ---------------------------------------------------------------------------
/**
* Resolve the credential key for a given URL.
*
* Remote URLs are keyed by origin (e.g. "https://my-site.pages.dev").
* Local dev instances are keyed by project path (e.g. "path:/Users/matt/sites/blog").
*/
export function resolveCredentialKey(baseUrl: string): string {
try {
const url = new URL(baseUrl);
const isLocal =
url.hostname === "localhost" || url.hostname === "127.0.0.1" || url.hostname === "[::1]";
if (isLocal) {
// For local dev, key by project path
const projectPath = findProjectRoot(process.cwd());
if (projectPath) {
return `path:${projectPath}`;
}
// Fallback to URL if no project root found
return url.origin;
}
return url.origin;
} catch {
return baseUrl;
}
}
/**
* Walk up from cwd to find the project root (directory containing astro.config.*).
*/
function findProjectRoot(from: string): string | null {
let dir = resolve(from);
const root = resolve("/");
while (dir !== root) {
for (const name of [
"astro.config.ts",
"astro.config.mts",
"astro.config.js",
"astro.config.mjs",
]) {
if (existsSync(join(dir, name))) {
return dir;
}
}
const parent = resolve(dir, "..");
if (parent === dir) break;
dir = parent;
}
return null;
}
// ---------------------------------------------------------------------------
// Read/write
// ---------------------------------------------------------------------------
function readStore(): CredentialStore {
const path = getCredentialPath();
try {
if (existsSync(path)) {
const content = readFileSync(path, "utf-8");
return JSON.parse(content) as CredentialStore;
}
} catch {
// Corrupt file — start fresh
}
return {};
}
function writeStore(store: CredentialStore): void {
const dir = getConfigDir();
mkdirSync(dir, { recursive: true });
const path = getCredentialPath();
writeFileSync(path, JSON.stringify(store, null, "\t"), {
encoding: "utf-8",
mode: 0o600, // Owner read/write only
});
}
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
/**
* Get stored credentials for a URL.
*/
export function getCredentials(baseUrl: string): StoredCredential | null {
const key = resolveCredentialKey(baseUrl);
const store = readStore();
const cred = store[key];
if (!cred || !("accessToken" in cred)) return null;
return cred;
}
/**
* Save credentials for a URL.
*/
export function saveCredentials(baseUrl: string, cred: StoredCredential): void {
const key = resolveCredentialKey(baseUrl);
const store = readStore();
store[key] = cred;
writeStore(store);
}
/**
* Remove credentials for a URL.
*/
export function removeCredentials(baseUrl: string): boolean {
const key = resolveCredentialKey(baseUrl);
const store = readStore();
if (key in store) {
delete store[key];
writeStore(store);
return true;
}
return false;
}
/**
* List all stored credential keys.
*/
export function listCredentialKeys(): string[] {
const store = readStore();
return Object.keys(store);
}
// ---------------------------------------------------------------------------
// Marketplace credentials
// ---------------------------------------------------------------------------
function marketplaceKey(registryUrl: string): string {
try {
return `marketplace:${new URL(registryUrl).origin}`;
} catch {
return `marketplace:${registryUrl}`;
}
}
/**
* Get stored marketplace credential for a registry URL.
*/
export function getMarketplaceCredential(registryUrl: string): MarketplaceCredential | null {
const key = marketplaceKey(registryUrl);
const store = readStore();
const cred = store[key];
if (!cred || !("token" in cred)) return null;
// Check expiry
if (new Date(cred.expiresAt) < new Date()) return null;
return cred;
}
/**
* Save marketplace credential for a registry URL.
*/
export function saveMarketplaceCredential(registryUrl: string, cred: MarketplaceCredential): void {
const key = marketplaceKey(registryUrl);
const store = readStore();
store[key] = cred;
writeStore(store);
}
/**
* Remove marketplace credential for a registry URL.
*/
export function removeMarketplaceCredential(registryUrl: string): boolean {
const key = marketplaceKey(registryUrl);
const store = readStore();
if (key in store) {
delete store[key];
writeStore(store);
return true;
}
return false;
}

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env node
/**
* EmDash CLI
*
* Built with citty + clack (same stack as Nuxt CLI)
*
* Commands:
* - init: Bootstrap database from template config, or interactive setup
* - types: Generate TypeScript types from schema
* - dev: Run dev server with local D1
* - seed: Apply a seed file to the database
* - export-seed: Export database schema and content as a seed file
* - auth: Authentication utilities (secret generation)
* - login/logout/whoami: Session management
* - content: Create, read, update, delete content
* - schema: Manage collections and fields
* - media: Upload and manage media
* - search: Full-text search
* - taxonomy: Manage taxonomies and terms
* - menu: Manage navigation menus
* - plugin: Plugin management (init, bundle, validate, publish, login, logout)
*/
import { defineCommand, runMain } from "citty";
import { authCommand } from "./commands/auth.js";
import { contentCommand } from "./commands/content.js";
import { devCommand } from "./commands/dev.js";
import { doctorCommand } from "./commands/doctor.js";
import { exportSeedCommand } from "./commands/export-seed.js";
import { initCommand } from "./commands/init.js";
import { loginCommand, logoutCommand, whoamiCommand } from "./commands/login.js";
import { mediaCommand } from "./commands/media.js";
import { menuCommand } from "./commands/menu.js";
import { pluginCommand } from "./commands/plugin.js";
import { schemaCommand } from "./commands/schema.js";
import { searchCommand } from "./commands/search-cmd.js";
import { seedCommand } from "./commands/seed.js";
import { taxonomyCommand } from "./commands/taxonomy.js";
import { typesCommand } from "./commands/types.js";
const main = defineCommand({
meta: {
name: "emdash",
version: "0.0.0",
description: "CLI for EmDash CMS",
},
subCommands: {
init: initCommand,
types: typesCommand,
dev: devCommand,
doctor: doctorCommand,
seed: seedCommand,
"export-seed": exportSeedCommand,
auth: authCommand,
login: loginCommand,
logout: logoutCommand,
whoami: whoamiCommand,
content: contentCommand,
schema: schemaCommand,
media: mediaCommand,
search: searchCommand,
taxonomy: taxonomyCommand,
menu: menuCommand,
plugin: pluginCommand,
},
});
void runMain(main);

View File

@@ -0,0 +1,75 @@
import { consola } from "consola";
interface OutputArgs {
json?: boolean;
}
/**
* Output data as JSON or pretty-printed.
*
* If stdout is not a TTY or --json is set, outputs JSON.
* Otherwise, outputs a formatted representation.
*/
export function output(data: unknown, args: OutputArgs): void {
const useJson = args.json || !process.stdout.isTTY;
if (useJson) {
// JSON output to stdout for piping
process.stdout.write(JSON.stringify(data, null, 2) + "\n");
} else {
// Pretty output via consola
prettyPrint(data);
}
}
function prettyPrint(data: unknown, indent: number = 0): void {
if (data === null || data === undefined) {
consola.log("(empty)");
return;
}
if (Array.isArray(data)) {
if (data.length === 0) {
consola.log("(no items)");
return;
}
for (const item of data) {
prettyPrint(item, indent);
if (indent === 0) consola.log("---");
}
return;
}
if (typeof data === "object") {
const obj = Object(data) as Record<string, unknown>;
// Check if it's a list result with items
if ("items" in obj && Array.isArray(obj.items)) {
prettyPrint(obj.items, indent);
if (typeof obj.nextCursor === "string") {
consola.log(`\nNext cursor: ${obj.nextCursor}`);
}
return;
}
// Print object fields
const prefix = " ".repeat(indent);
for (const [key, value] of Object.entries(obj)) {
if (value === null || value === undefined) continue;
if (typeof value === "object" && !Array.isArray(value)) {
consola.log(`${prefix}${key}:`);
prettyPrint(value, indent + 1);
} else if (Array.isArray(value)) {
consola.log(`${prefix}${key}: [${value.length} items]`);
} else {
const str = typeof value === "string" ? value : JSON.stringify(value);
// Truncate long values
const display = str.length > 80 ? str.slice(0, 77) + "..." : str;
consola.log(`${prefix}${key}: ${display}`);
}
}
return;
}
consola.log(typeof data === "string" ? data : JSON.stringify(data));
}

View File

@@ -0,0 +1,969 @@
/**
* WordPress WXR (WordPress eXtended RSS) parser
*
* Uses SAX streaming parser to handle large export files efficiently.
* WXR is an RSS extension containing WordPress content exports.
*
* @see https://developer.wordpress.org/plugins/data-storage/wp-xml-rpc/
*/
import type { Readable } from "node:stream";
import sax from "sax";
// Regex patterns for WXR parsing
const PHP_SERIALIZED_STRING_PATTERN = /s:\d+:"([^"]+)"/g;
const PHP_SERIALIZED_STRING_MATCH_PATTERN = /s:\d+:"([^"]+)"/;
/**
* Parsed WordPress export data
*/
export interface WxrData {
/** Site metadata */
site: WxrSite;
/** Posts (including custom post types) */
posts: WxrPost[];
/** Media attachments */
attachments: WxrAttachment[];
/** Categories */
categories: WxrCategory[];
/** Tags */
tags: WxrTag[];
/** Authors */
authors: WxrAuthor[];
/** All taxonomy terms (including custom taxonomies and nav_menu) */
terms: WxrTerm[];
/** Parsed navigation menus */
navMenus: WxrNavMenu[];
}
export interface WxrSite {
title?: string;
link?: string;
description?: string;
language?: string;
baseSiteUrl?: string;
baseBlogUrl?: string;
}
export interface WxrPost {
id?: number;
title?: string;
link?: string;
pubDate?: string;
creator?: string;
guid?: string;
description?: string;
content?: string;
excerpt?: string;
postDate?: string;
postDateGmt?: string;
postModified?: string;
postModifiedGmt?: string;
commentStatus?: string;
pingStatus?: string;
status?: string;
postType?: string;
postName?: string;
postPassword?: string;
isSticky?: boolean;
/** Parent post ID for hierarchical content (pages) */
postParent?: number;
/** Menu order for sorting */
menuOrder?: number;
categories: string[];
tags: string[];
/** Custom taxonomy assignments beyond categories/tags */
customTaxonomies?: Map<string, string[]>;
meta: Map<string, string>;
}
export interface WxrAttachment {
id?: number;
title?: string;
url?: string;
postDate?: string;
meta: Map<string, string>;
}
export interface WxrCategory {
id?: number;
nicename?: string;
name?: string;
parent?: string;
description?: string;
}
export interface WxrTag {
id?: number;
slug?: string;
name?: string;
description?: string;
}
/**
* Generic taxonomy term (categories, tags, nav_menu, custom taxonomies)
*/
export interface WxrTerm {
id: number;
taxonomy: string; // 'category', 'post_tag', 'nav_menu', 'genre', etc.
slug: string;
name: string;
parent?: string;
description?: string;
}
/**
* Navigation menu structure
*/
export interface WxrNavMenu {
id: number;
name: string; // Menu slug
label: string; // Menu name
items: WxrNavMenuItem[];
}
/**
* Navigation menu item
*/
export interface WxrNavMenuItem {
id: number;
menuId: number;
parentId?: number;
sortOrder: number;
type: "custom" | "post_type" | "taxonomy";
objectType?: string; // 'page', 'post', 'category'
objectId?: number;
url?: string;
title: string;
target?: string;
classes?: string;
}
export interface WxrAuthor {
id?: number;
login?: string;
email?: string;
displayName?: string;
firstName?: string;
lastName?: string;
}
/** Extract string value from a SAX attribute (handles both Tag and QualifiedTag) */
function attrStr(attr: string | { value: string } | undefined): string {
if (typeof attr === "string") return attr;
if (attr && typeof attr === "object" && "value" in attr) return attr.value;
return "";
}
/** Type guard for complete WxrTerm (all required fields present) */
function isCompleteWxrTerm(term: Partial<WxrTerm>): term is WxrTerm {
return (
term.id !== undefined &&
term.taxonomy !== undefined &&
term.slug !== undefined &&
term.name !== undefined
);
}
/**
* Parse a WordPress WXR export file
*/
export function parseWxr(stream: Readable): Promise<WxrData> {
return new Promise((resolve, reject) => {
const parser = sax.createStream(true, { trim: true });
const data: WxrData = {
site: {},
posts: [],
attachments: [],
categories: [],
tags: [],
authors: [],
terms: [],
navMenus: [],
};
// Parser state
let currentPath: string[] = [];
let currentText = "";
let currentItem: WxrPost | null = null;
let currentAttachment: WxrAttachment | null = null;
let currentCategory: WxrCategory | null = null;
let currentTag: WxrTag | null = null;
let currentAuthor: WxrAuthor | null = null;
let currentTerm: Partial<WxrTerm> | null = null;
let currentMetaKey = "";
// Track nav_menu_item posts for post-processing
const navMenuItemPosts: WxrPost[] = [];
// Track menu term IDs by slug for linking items to menus
const menuTermsBySlug = new Map<string, number>();
parser.on("opentag", (node) => {
const tagName = node.name.toLowerCase();
currentPath.push(tagName);
currentText = "";
// Start new item
if (tagName === "item") {
currentItem = {
categories: [],
tags: [],
customTaxonomies: new Map(),
meta: new Map(),
};
} else if (tagName === "wp:category") {
currentCategory = {};
} else if (tagName === "wp:tag") {
currentTag = {};
} else if (tagName === "wp:author") {
currentAuthor = {};
} else if (tagName === "wp:term") {
currentTerm = {};
}
// Handle category/tag/custom taxonomy assignment in items
if (tagName === "category" && currentItem && node.attributes) {
const domain = attrStr(node.attributes.domain);
const nicename = attrStr(node.attributes.nicename);
if (domain === "category" && nicename) {
currentItem.categories.push(nicename);
} else if (domain === "post_tag" && nicename) {
currentItem.tags.push(nicename);
} else if (domain && nicename && domain !== "category" && domain !== "post_tag") {
// Custom taxonomy (including nav_menu)
if (!currentItem.customTaxonomies) {
currentItem.customTaxonomies = new Map();
}
const existing = currentItem.customTaxonomies.get(domain) || [];
existing.push(nicename);
currentItem.customTaxonomies.set(domain, existing);
}
}
});
parser.on("text", (text) => {
currentText += text;
});
parser.on("cdata", (cdata) => {
currentText += cdata;
});
parser.on("closetag", (tagName) => {
const tag = tagName.toLowerCase();
const text = currentText.trim();
// Site-level metadata (in channel)
if (currentPath.includes("channel") && !currentItem) {
switch (tag) {
case "title":
if (!data.site.title) data.site.title = text;
break;
case "link":
if (!data.site.link) data.site.link = text;
break;
case "description":
if (!data.site.description) data.site.description = text;
break;
case "language":
data.site.language = text;
break;
case "wp:base_site_url":
data.site.baseSiteUrl = text;
break;
case "wp:base_blog_url":
data.site.baseBlogUrl = text;
break;
}
}
// Item (post/page/attachment) parsing
if (currentItem) {
switch (tag) {
case "title":
currentItem.title = text;
break;
case "link":
currentItem.link = text;
break;
case "pubdate":
currentItem.pubDate = text;
break;
case "dc:creator":
currentItem.creator = text;
break;
case "guid":
currentItem.guid = text;
break;
case "description":
currentItem.description = text;
break;
case "content:encoded":
currentItem.content = text;
break;
case "excerpt:encoded":
currentItem.excerpt = text;
break;
case "wp:post_id":
currentItem.id = parseInt(text, 10);
break;
case "wp:post_date":
currentItem.postDate = text;
break;
case "wp:post_date_gmt":
currentItem.postDateGmt = text;
break;
case "wp:post_modified":
currentItem.postModified = text;
break;
case "wp:post_modified_gmt":
currentItem.postModifiedGmt = text;
break;
case "wp:comment_status":
currentItem.commentStatus = text;
break;
case "wp:ping_status":
currentItem.pingStatus = text;
break;
case "wp:status":
currentItem.status = text;
break;
case "wp:post_type":
currentItem.postType = text;
break;
case "wp:post_name":
currentItem.postName = text;
break;
case "wp:post_parent":
currentItem.postParent = parseInt(text, 10);
break;
case "wp:menu_order":
currentItem.menuOrder = parseInt(text, 10);
break;
case "wp:post_password":
currentItem.postPassword = text || undefined;
break;
case "wp:is_sticky":
currentItem.isSticky = text === "1";
break;
case "wp:meta_key":
currentMetaKey = text;
break;
case "wp:meta_value":
if (currentMetaKey) {
currentItem.meta.set(currentMetaKey, text);
currentMetaKey = "";
}
break;
case "wp:attachment_url":
if (currentItem.postType === "attachment") {
// This is actually an attachment
currentAttachment = {
id: currentItem.id,
title: currentItem.title,
url: text,
postDate: currentItem.postDate,
meta: currentItem.meta,
};
}
break;
case "item":
// End of item - categorize and store
if (currentAttachment) {
data.attachments.push(currentAttachment);
currentAttachment = null;
} else if (currentItem.postType === "nav_menu_item") {
// Track nav_menu_item posts for post-processing into menus
navMenuItemPosts.push(currentItem);
data.posts.push(currentItem);
} else if (currentItem.postType !== "attachment") {
// Store all non-attachment post types (posts, pages, custom post types)
data.posts.push(currentItem);
}
currentItem = null;
break;
}
}
// Category parsing
if (currentCategory) {
switch (tag) {
case "wp:term_id":
currentCategory.id = parseInt(text, 10);
break;
case "wp:category_nicename":
currentCategory.nicename = text;
break;
case "wp:cat_name":
currentCategory.name = text;
break;
case "wp:category_parent":
currentCategory.parent = text || undefined;
break;
case "wp:category_description":
currentCategory.description = text || undefined;
break;
case "wp:category":
if (currentCategory.name) {
data.categories.push(currentCategory);
}
currentCategory = null;
break;
}
}
// Tag parsing
if (currentTag) {
switch (tag) {
case "wp:term_id":
currentTag.id = parseInt(text, 10);
break;
case "wp:tag_slug":
currentTag.slug = text;
break;
case "wp:tag_name":
currentTag.name = text;
break;
case "wp:tag_description":
currentTag.description = text || undefined;
break;
case "wp:tag":
if (currentTag.name) {
data.tags.push(currentTag);
}
currentTag = null;
break;
}
}
// Author parsing
if (currentAuthor) {
switch (tag) {
case "wp:author_id":
currentAuthor.id = parseInt(text, 10);
break;
case "wp:author_login":
currentAuthor.login = text;
break;
case "wp:author_email":
currentAuthor.email = text;
break;
case "wp:author_display_name":
currentAuthor.displayName = text;
break;
case "wp:author_first_name":
currentAuthor.firstName = text;
break;
case "wp:author_last_name":
currentAuthor.lastName = text;
break;
case "wp:author":
if (currentAuthor.login) {
data.authors.push(currentAuthor);
}
currentAuthor = null;
break;
}
}
// Generic term parsing (wp:term elements - custom taxonomies, nav_menu, etc.)
if (currentTerm) {
switch (tag) {
case "wp:term_id":
currentTerm.id = parseInt(text, 10);
break;
case "wp:term_taxonomy":
currentTerm.taxonomy = text;
break;
case "wp:term_slug":
currentTerm.slug = text;
break;
case "wp:term_name":
currentTerm.name = text;
break;
case "wp:term_parent":
currentTerm.parent = text || undefined;
break;
case "wp:term_description":
currentTerm.description = text || undefined;
break;
case "wp:term":
if (isCompleteWxrTerm(currentTerm)) {
data.terms.push(currentTerm);
// Track nav_menu terms for building menus
if (currentTerm.taxonomy === "nav_menu") {
menuTermsBySlug.set(currentTerm.slug, currentTerm.id);
}
}
currentTerm = null;
break;
}
}
currentPath.pop();
currentText = "";
});
parser.on("error", (err) => {
reject(new Error(`XML parsing error: ${err.message}`));
});
parser.on("end", () => {
// Post-process nav_menu_item posts into structured menus
data.navMenus = buildNavMenus(navMenuItemPosts, menuTermsBySlug);
resolve(data);
});
// Pipe the stream through the parser
stream.pipe(parser);
});
}
/**
* Parse a WordPress WXR export from a string
*
* Uses the non-streaming SAX parser API for compatibility with
* environments that don't have Node.js streams (e.g., Cloudflare Workers).
*/
export function parseWxrString(xml: string): Promise<WxrData> {
return new Promise((resolve, reject) => {
const parser = sax.parser(true, { trim: false, normalize: false });
const data: WxrData = {
site: {},
posts: [],
attachments: [],
categories: [],
tags: [],
authors: [],
terms: [],
navMenus: [],
};
let currentPath: string[] = [];
let currentText = "";
let currentItem: WxrPost | null = null;
let currentAttachment: WxrAttachment | null = null;
let currentCategory: WxrCategory | null = null;
let currentTag: WxrTag | null = null;
let currentAuthor: WxrAuthor | null = null;
let currentTerm: Partial<WxrTerm> | null = null;
let currentMetaKey = "";
// Track nav_menu_item posts for post-processing
const navMenuItemPosts: WxrPost[] = [];
// Track menu term IDs by slug for linking items to menus
const menuTermsBySlug = new Map<string, number>();
parser.onopentag = (node) => {
const tag = node.name.toLowerCase();
currentPath.push(tag);
currentText = "";
// Start new elements
if (tag === "item") {
currentItem = {
categories: [],
tags: [],
customTaxonomies: new Map(),
meta: new Map(),
};
} else if (tag === "wp:category") {
currentCategory = {};
} else if (tag === "wp:tag") {
currentTag = {};
} else if (tag === "wp:author") {
currentAuthor = {};
} else if (tag === "wp:term") {
currentTerm = {};
}
// Handle category/tag/custom taxonomy assignment in items
if (tag === "category" && currentItem && node.attributes) {
const domain = attrStr(node.attributes.domain);
const nicename = attrStr(node.attributes.nicename);
if (domain === "category" && nicename) {
currentItem.categories.push(nicename);
} else if (domain === "post_tag" && nicename) {
currentItem.tags.push(nicename);
} else if (domain && nicename && domain !== "category" && domain !== "post_tag") {
// Custom taxonomy (including nav_menu)
if (!currentItem.customTaxonomies) {
currentItem.customTaxonomies = new Map();
}
const existing = currentItem.customTaxonomies.get(domain) || [];
existing.push(nicename);
currentItem.customTaxonomies.set(domain, existing);
}
}
};
parser.ontext = (text) => {
currentText += text;
};
parser.oncdata = (cdata) => {
currentText += cdata;
};
parser.onclosetag = (tagName) => {
const tag = tagName.toLowerCase();
const text = currentText.trim();
// Site metadata
if (currentPath.length === 2 && currentPath[0] === "rss") {
switch (tag) {
case "title":
data.site.title = text;
break;
case "link":
data.site.link = text;
break;
case "description":
data.site.description = text;
break;
case "language":
data.site.language = text;
break;
case "wp:base_site_url":
data.site.baseSiteUrl = text;
break;
case "wp:base_blog_url":
data.site.baseBlogUrl = text;
break;
}
}
// Item (post/page/attachment) parsing
if (currentItem) {
switch (tag) {
case "title":
currentItem.title = text;
break;
case "link":
currentItem.link = text;
break;
case "pubdate":
currentItem.pubDate = text;
break;
case "dc:creator":
currentItem.creator = text;
break;
case "guid":
currentItem.guid = text;
break;
case "description":
currentItem.description = text;
break;
case "content:encoded":
currentItem.content = text;
break;
case "excerpt:encoded":
currentItem.excerpt = text;
break;
case "wp:post_id":
currentItem.id = parseInt(text, 10);
break;
case "wp:post_date":
currentItem.postDate = text;
break;
case "wp:post_date_gmt":
currentItem.postDateGmt = text;
break;
case "wp:post_modified":
currentItem.postModified = text;
break;
case "wp:post_modified_gmt":
currentItem.postModifiedGmt = text;
break;
case "wp:comment_status":
currentItem.commentStatus = text;
break;
case "wp:ping_status":
currentItem.pingStatus = text;
break;
case "wp:post_name":
currentItem.postName = text;
break;
case "wp:status":
currentItem.status = text;
break;
case "wp:post_parent":
currentItem.postParent = parseInt(text, 10);
break;
case "wp:menu_order":
currentItem.menuOrder = parseInt(text, 10);
break;
case "wp:post_type":
currentItem.postType = text;
// If it's an attachment, convert to attachment type
if (text === "attachment") {
currentAttachment = {
id: currentItem.id,
title: currentItem.title,
url: currentItem.link,
postDate: currentItem.postDate,
meta: new Map(),
};
}
break;
case "wp:post_password":
currentItem.postPassword = text || undefined;
break;
case "wp:is_sticky":
currentItem.isSticky = text === "1";
break;
case "wp:attachment_url":
if (currentAttachment) {
currentAttachment.url = text;
}
break;
case "wp:meta_key":
currentMetaKey = text;
break;
case "wp:meta_value":
if (currentMetaKey && currentItem.meta) {
currentItem.meta.set(currentMetaKey, text);
}
break;
case "item":
// End of item - categorize and store
if (currentAttachment) {
data.attachments.push(currentAttachment);
currentAttachment = null;
} else if (currentItem.postType === "nav_menu_item") {
// Track nav_menu_item posts for post-processing into menus
navMenuItemPosts.push(currentItem);
data.posts.push(currentItem);
} else if (currentItem.postType !== "attachment") {
data.posts.push(currentItem);
}
currentItem = null;
break;
}
}
// Category parsing
if (currentCategory) {
switch (tag) {
case "wp:term_id":
currentCategory.id = parseInt(text, 10);
break;
case "wp:category_nicename":
currentCategory.nicename = text;
break;
case "wp:cat_name":
currentCategory.name = text;
break;
case "wp:category_parent":
currentCategory.parent = text || undefined;
break;
case "wp:category_description":
currentCategory.description = text || undefined;
break;
case "wp:category":
if (currentCategory.name) {
data.categories.push(currentCategory);
}
currentCategory = null;
break;
}
}
// Tag parsing
if (currentTag) {
switch (tag) {
case "wp:term_id":
currentTag.id = parseInt(text, 10);
break;
case "wp:tag_slug":
currentTag.slug = text;
break;
case "wp:tag_name":
currentTag.name = text;
break;
case "wp:tag_description":
currentTag.description = text || undefined;
break;
case "wp:tag":
if (currentTag.name) {
data.tags.push(currentTag);
}
currentTag = null;
break;
}
}
// Author parsing
if (currentAuthor) {
switch (tag) {
case "wp:author_id":
currentAuthor.id = parseInt(text, 10);
break;
case "wp:author_login":
currentAuthor.login = text;
break;
case "wp:author_email":
currentAuthor.email = text;
break;
case "wp:author_display_name":
currentAuthor.displayName = text;
break;
case "wp:author_first_name":
currentAuthor.firstName = text;
break;
case "wp:author_last_name":
currentAuthor.lastName = text;
break;
case "wp:author":
if (currentAuthor.login) {
data.authors.push(currentAuthor);
}
currentAuthor = null;
break;
}
}
// Generic term parsing (wp:term elements - custom taxonomies, nav_menu, etc.)
if (currentTerm) {
switch (tag) {
case "wp:term_id":
currentTerm.id = parseInt(text, 10);
break;
case "wp:term_taxonomy":
currentTerm.taxonomy = text;
break;
case "wp:term_slug":
currentTerm.slug = text;
break;
case "wp:term_name":
currentTerm.name = text;
break;
case "wp:term_parent":
currentTerm.parent = text || undefined;
break;
case "wp:term_description":
currentTerm.description = text || undefined;
break;
case "wp:term":
if (isCompleteWxrTerm(currentTerm)) {
data.terms.push(currentTerm);
// Track nav_menu terms for building menus
if (currentTerm.taxonomy === "nav_menu") {
menuTermsBySlug.set(currentTerm.slug, currentTerm.id);
}
}
currentTerm = null;
break;
}
}
currentPath.pop();
currentText = "";
};
parser.onerror = (err) => {
reject(new Error(`XML parsing error: ${err.message}`));
};
parser.onend = () => {
// Post-process nav_menu_item posts into structured menus
data.navMenus = buildNavMenus(navMenuItemPosts, menuTermsBySlug);
resolve(data);
};
// Parse the string (non-streaming)
parser.write(xml).close();
});
}
/**
* Build structured navigation menus from nav_menu_item posts
*/
function buildNavMenus(
navMenuItemPosts: WxrPost[],
menuTermsBySlug: Map<string, number>,
): WxrNavMenu[] {
// Group menu items by menu slug
const menuItemsByMenu = new Map<string, WxrPost[]>();
for (const post of navMenuItemPosts) {
// Get the nav_menu taxonomy assignment to find which menu this item belongs to
const navMenuSlugs = post.customTaxonomies?.get("nav_menu");
if (!navMenuSlugs || navMenuSlugs.length === 0) continue;
const menuSlug = navMenuSlugs[0];
if (!menuSlug) continue;
const items = menuItemsByMenu.get(menuSlug) || [];
items.push(post);
menuItemsByMenu.set(menuSlug, items);
}
// Build structured menus
const menus: WxrNavMenu[] = [];
for (const [menuSlug, posts] of menuItemsByMenu) {
const menuId = menuTermsBySlug.get(menuSlug) || 0;
// Convert posts to menu items
const items: WxrNavMenuItem[] = posts.map((post) => {
const meta = post.meta;
const menuItemTypeRaw = meta.get("_menu_item_type") || "custom";
const menuItemType: WxrNavMenuItem["type"] =
menuItemTypeRaw === "post_type" || menuItemTypeRaw === "taxonomy"
? menuItemTypeRaw
: "custom";
const objectType = meta.get("_menu_item_object");
const objectIdStr = meta.get("_menu_item_object_id");
const url = meta.get("_menu_item_url");
const parentIdStr = meta.get("_menu_item_menu_item_parent");
const target = meta.get("_menu_item_target");
const classesStr = meta.get("_menu_item_classes");
// Parse classes (stored as serialized PHP array)
let classes: string | undefined;
if (classesStr) {
// Simple extraction of class names from serialized PHP
const matches = classesStr.match(PHP_SERIALIZED_STRING_PATTERN);
if (matches) {
classes = matches
.map((m) => m.match(PHP_SERIALIZED_STRING_MATCH_PATTERN)?.[1])
.filter(Boolean)
.join(" ");
}
}
return {
id: post.id || 0,
menuId,
parentId: parentIdStr ? parseInt(parentIdStr, 10) || undefined : undefined,
sortOrder: post.menuOrder || 0,
type: menuItemType,
objectType: objectType || undefined,
objectId: objectIdStr ? parseInt(objectIdStr, 10) : undefined,
url: url || undefined,
title: post.title || "",
target: target || undefined,
classes: classes || undefined,
};
});
// Sort items by menu_order
items.sort((a, b) => a.sortOrder - b.sortOrder);
// Find the menu name from the terms
// For now, use the slug as both name and label; we could enhance this
// by looking up the actual term name from data.terms
menus.push({
id: menuId,
name: menuSlug,
label: menuSlug, // Will be enhanced when we have term data
items,
});
}
return menus;
}