Emdash source with visual editor image upload fix

Fixes:
1. media.ts: wrap placeholder generation in try-catch
2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
2026-05-03 10:44:54 +07:00
parent 78f81bebb6
commit 2d1be52177
2352 changed files with 662964 additions and 0 deletions

View File

@@ -0,0 +1,424 @@
import { execFile, spawn } from "node:child_process";
import { rmSync } from "node:fs";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { describe, expect, it } from "vitest";
import { ensureBuilt } from "../server.js";
interface SiteCase {
name: string;
dir: string;
port: number;
startupTimeoutMs: number;
waitPath?: string;
setupPath?: string | null;
frontendPath?: string;
frontendStatuses?: number[];
requireDoctype?: boolean;
}
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const execAsync = promisify(execFile);
const SITE_MATRIX: SiteCase[] = [
{
name: "demos/playground",
dir: resolve(WORKSPACE_ROOT, "demos/playground"),
port: 4603,
startupTimeoutMs: 120_000,
waitPath: "/playground",
frontendPath: "/playground",
requireDoctype: false,
},
// Templates
{
name: "templates/blog",
dir: resolve(WORKSPACE_ROOT, "templates/blog"),
port: 4612,
startupTimeoutMs: 60_000,
},
{
name: "templates/blog-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/blog-cloudflare"),
port: 4613,
startupTimeoutMs: 120_000,
},
{
name: "templates/marketing",
dir: resolve(WORKSPACE_ROOT, "templates/marketing"),
port: 4614,
startupTimeoutMs: 90_000,
},
{
name: "templates/marketing-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/marketing-cloudflare"),
port: 4615,
startupTimeoutMs: 120_000,
},
{
name: "templates/portfolio",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio"),
port: 4616,
startupTimeoutMs: 90_000,
},
{
name: "templates/portfolio-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio-cloudflare"),
port: 4617,
startupTimeoutMs: 120_000,
},
{
name: "templates/starter-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/starter-cloudflare"),
port: 4618,
startupTimeoutMs: 120_000,
},
];
async function waitForServer(url: string, timeoutMs: number): Promise<void> {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(3000),
});
if (res.status > 0) return;
} catch {
// retry
}
await new Promise((resolveSleep) => setTimeout(resolveSleep, 500));
}
throw new Error(`Server at ${url} did not start within ${timeoutMs}ms`);
}
async function fetchWithRetry(url: string, retries = 10, delayMs = 1500): Promise<Response> {
let lastError: unknown;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(15_000),
});
if (res.status < 500) return res;
lastError = new Error(`${url} returned ${res.status}`);
} catch (error) {
lastError = error;
}
if (attempt < retries) {
await new Promise((resolveSleep) => setTimeout(resolveSleep, delayMs));
}
}
throw lastError instanceof Error ? lastError : new Error(`Request failed for ${url}`);
}
// ---------------------------------------------------------------------------
// Build verification — runs a single recursive `pnpm build` across templates
// and the playground demo in parallel.
// ---------------------------------------------------------------------------
describe("Site build verification", () => {
it("all templates and playground build successfully", { timeout: 300_000 }, async () => {
await ensureBuilt();
try {
await execAsync(
"pnpm",
[
"run",
"--recursive",
"--filter",
"{./templates/*}",
"--filter",
"@emdash-cms/playground",
"build",
],
{
cwd: WORKSPACE_ROOT,
timeout: 240_000,
env: {
...process.env,
CI: "true",
},
},
);
} catch (error) {
const stderr =
error instanceof Error && "stderr" in error ? (error as { stderr: string }).stderr : "";
const stdout =
error instanceof Error && "stdout" in error ? (error as { stdout: string }).stdout : "";
throw new Error(`Site builds failed:\n\n${stderr || stdout}`.slice(0, 5000), {
cause: error,
});
}
});
});
// ---------------------------------------------------------------------------
// Helpers — shared server lifecycle for runtime tests
// ---------------------------------------------------------------------------
interface BootedServer {
baseUrl: string;
process: ReturnType<typeof spawn>;
output: string;
}
async function bootSite(site: SiteCase): Promise<BootedServer> {
await ensureBuilt();
// Remove stale database files so each run starts fresh.
for (const file of ["data.db", "data.db-wal", "data.db-shm"]) {
rmSync(join(site.dir, file), { force: true });
}
const baseUrl = `http://localhost:${site.port}`;
const serverProcess = spawn("pnpm", ["exec", "astro", "dev", "--port", String(site.port)], {
cwd: site.dir,
env: {
...process.env,
CI: "true",
},
stdio: "pipe",
});
let output = "";
serverProcess.stdout?.on("data", (data: Buffer) => {
output += data.toString();
});
serverProcess.stderr?.on("data", (data: Buffer) => {
output += data.toString();
});
const waitPath = site.waitPath ?? "/_emdash/admin/";
await waitForServer(`${baseUrl}${waitPath}`, site.startupTimeoutMs);
return {
baseUrl,
process: serverProcess,
get output() {
return output;
},
};
}
function killServer(serverProcess: ReturnType<typeof spawn>): Promise<void> {
serverProcess.kill("SIGTERM");
return new Promise((done) => {
setTimeout(() => {
if (!serverProcess.killed) {
serverProcess.kill("SIGKILL");
}
setTimeout(done, 500);
}, 1200);
});
}
// ---------------------------------------------------------------------------
// Runtime verification — boots each site with `astro dev` and checks that
// admin + frontend respond.
// ---------------------------------------------------------------------------
describe.sequential("Site runtime verification", () => {
for (const site of SITE_MATRIX) {
const setupPath = site.setupPath ?? "/_emdash/api/setup/dev-bypass?redirect=/";
const frontendPath = site.frontendPath ?? "/";
const frontendStatuses = site.frontendStatuses ?? [200, 302, 307, 308];
const requireDoctype = site.requireDoctype ?? true;
it(
`${site.name} boots and serves admin + frontend`,
{ timeout: site.startupTimeoutMs + 120_000 },
async () => {
const server = await bootSite(site);
try {
if (setupPath) {
const setupRes = await fetchWithRetry(`${server.baseUrl}${setupPath}`);
expect(setupRes.status).toBeLessThan(500);
}
const adminRes = await fetchWithRetry(`${server.baseUrl}/_emdash/admin/`);
expect(adminRes.status).toBeLessThan(500);
const frontendRes = await fetchWithRetry(`${server.baseUrl}${frontendPath}`);
expect(frontendStatuses).toContain(frontendRes.status);
const body = await frontendRes.text();
if (requireDoctype) {
expect(body).toContain("<!DOCTYPE html>");
}
} catch (error) {
throw new Error(
`${site.name} smoke failed: ${error instanceof Error ? error.message : String(error)}\n\n` +
server.output.slice(-3000),
{ cause: error },
);
} finally {
await killServer(server.process);
}
},
);
}
});
// ---------------------------------------------------------------------------
// MCP endpoint verification — boots one Node and one Cloudflare site, gets a
// bearer token, and verifies the MCP server responds to tools/list.
// ---------------------------------------------------------------------------
const MCP_SITES: SiteCase[] = SITE_MATRIX.filter(
(s) => s.name === "templates/blog" || s.name === "templates/starter-cloudflare",
);
describe.sequential("MCP endpoint verification", () => {
for (const site of MCP_SITES) {
it(
`${site.name} MCP tools/list responds with tools`,
{ timeout: site.startupTimeoutMs + 120_000 },
async () => {
const server = await bootSite(site);
try {
// Run dev-bypass with ?token=1 to get a bearer token
const setupRes = await fetchWithRetry(
`${server.baseUrl}/_emdash/api/setup/dev-bypass?token=1`,
);
expect(setupRes.status).toBeLessThan(500);
const setupBody = (await setupRes.json()) as {
data?: { token?: string };
};
const token = setupBody.data?.token;
expect(token).toBeTruthy();
// Send MCP initialize
const initRes = await fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
jsonrpc: "2.0",
method: "initialize",
params: {
protocolVersion: "2025-03-26",
capabilities: {},
clientInfo: { name: "smoke-test", version: "1.0" },
},
id: 1,
}),
});
expect(initRes.status).toBe(200);
// Parse SSE response to extract JSON
const initText = await initRes.text();
const initData = parseSSE(initText);
expect(initData).toHaveProperty("result.serverInfo.name", "emdash");
// Send initialized notification + tools/list in one request
// (stateless mode — each request is independent, so we send
// the full sequence: notifications/initialized then tools/list)
const listRes = await fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify([
{
jsonrpc: "2.0",
method: "notifications/initialized",
},
{
jsonrpc: "2.0",
method: "tools/list",
params: {},
id: 2,
},
]),
});
expect(listRes.status).toBe(200);
const listText = await listRes.text();
const listData = parseSSE(listText);
expect(listData).toHaveProperty("result.tools");
const tools = (listData as { result: { tools: unknown[] } }).result.tools;
expect(tools.length).toBeGreaterThan(0);
// Verify some expected tools exist
const toolNames = tools.map((t: unknown) => (t as { name: string }).name);
expect(toolNames).toContain("content_list");
expect(toolNames).toContain("schema_list_collections");
// Send 14 concurrent tools/list calls and verify all succeed —
// guards against an auth-middleware race observed in production
// where parallel requests on the same authenticated session
// occasionally returned spurious 401s. The InMemoryTransport
// integration test cannot reach this code path; only a live
// HTTP server exercises the auth middleware that's racy.
const concurrentResponses = await Promise.all(
Array.from({ length: 14 }, (_, i) =>
fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify([
{ jsonrpc: "2.0", method: "notifications/initialized" },
{
jsonrpc: "2.0",
method: "tools/list",
params: {},
id: 100 + i,
},
]),
}),
),
);
const statusCodes = concurrentResponses.map((r) => r.status);
const failedStatuses = statusCodes.filter((s) => s !== 200);
expect(
failedStatuses,
`expected all 14 concurrent calls to return 200; got: ${statusCodes.join(",")}`,
).toEqual([]);
} catch (error) {
throw new Error(
`${site.name} MCP smoke failed: ${error instanceof Error ? error.message : String(error)}\n\n` +
server.output.slice(-3000),
{ cause: error },
);
} finally {
await killServer(server.process);
}
},
);
}
});
/**
* Parse the first JSON-RPC message from an SSE text response.
* MCP stateless mode returns `event: message\ndata: {...}\n\n`.
*/
function parseSSE(text: string): unknown {
for (const line of text.split("\n")) {
if (line.startsWith("data: ")) {
return JSON.parse(line.slice(6));
}
}
// Fall back to parsing as plain JSON (non-SSE response)
return JSON.parse(text);
}

View File

@@ -0,0 +1,401 @@
/**
* Smoke tests for template/demo seed fixtures.
*
* Validates that all seed files are well-formed, can be applied
* to a fresh database, and that the resulting database passes
* doctor checks. Does NOT start a dev server — these are fast,
* programmatic tests that exercise the seed/validate/apply/doctor
* pipeline directly.
*
* Also shells out to the CLI binary for seed --validate and doctor
* commands to ensure the CLI interface works correctly.
*/
import { execFile } from "node:child_process";
import { existsSync, readFileSync, readdirSync, mkdtempSync, rmSync, mkdirSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import { applySeed } from "../../../src/seed/apply.js";
import type { SeedFile } from "../../../src/seed/types.js";
import { validateSeed } from "../../../src/seed/validate.js";
import { LocalStorage } from "../../../src/storage/local.js";
import { ensureBuilt } from "../server.js";
const exec = promisify(execFile);
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const CLI_BIN = resolve(import.meta.dirname, "../../../dist/cli/index.mjs");
const VALIDATION_FAILED_RE = /validation failed/i;
// ---------------------------------------------------------------------------
// Discover all templates and demos with seed files
// ---------------------------------------------------------------------------
interface SiteFixture {
/** Human-readable name for test output */
name: string;
/** Absolute path to the template/theme directory */
dir: string;
/** Absolute path to the seed file */
seedPath: string;
/** Parsed seed file contents */
seed: SeedFile;
}
function discoverFixtures(): SiteFixture[] {
const fixtures: SiteFixture[] = [];
const dirs = [
{ prefix: "templates", path: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", path: resolve(WORKSPACE_ROOT, "demos") },
];
for (const { prefix, path: parentDir } of dirs) {
if (!existsSync(parentDir)) continue;
for (const entry of readdirSync(parentDir)) {
const dir = join(parentDir, entry);
// Check for seed path in package.json first (emdash.seed config)
let seedPath = join(dir, ".emdash", "seed.json");
const pkgPath = join(dir, "package.json");
if (existsSync(pkgPath)) {
try {
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
if (pkg.emdash?.seed) {
seedPath = join(dir, pkg.emdash.seed);
}
} catch {
// Ignore parse errors
}
}
if (!existsSync(seedPath)) continue;
const raw = readFileSync(seedPath, "utf-8");
const seed = JSON.parse(raw) as SeedFile;
fixtures.push({
name: `${prefix}/${entry}`,
dir,
seedPath,
seed,
});
}
}
return fixtures;
}
const fixtures = discoverFixtures();
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe("Seed Fixture Smoke Tests", () => {
let tempDirs: string[] = [];
beforeAll(async () => {
// Ensure CLI binary is built for CLI-based tests
await ensureBuilt();
}, 120_000);
afterEach(() => {
// Clean up any temp directories created during tests
for (const dir of tempDirs) {
rmSync(dir, { recursive: true, force: true });
}
tempDirs = [];
});
function createTempDir(): string {
const dir = mkdtempSync(join(tmpdir(), "emdash-smoke-"));
tempDirs.push(dir);
return dir;
}
// Sanity check: we actually found fixtures to test
it("discovers at least one template/demo with a seed file", () => {
expect(fixtures.length).toBeGreaterThanOrEqual(1);
const names = fixtures.map((f) => f.name);
// At minimum the blog template should always be present.
expect(names).toContain("templates/blog");
});
// -----------------------------------------------------------------------
// Per-fixture tests
// -----------------------------------------------------------------------
for (const fixture of fixtures) {
describe(fixture.name, () => {
// --- Seed file is valid JSON with correct structure ---
it("has a valid seed.json that parses as JSON", () => {
expect(fixture.seed).toBeDefined();
expect(fixture.seed.version).toBe("1");
});
// --- Programmatic validation ---
it("passes programmatic seed validation", () => {
const result = validateSeed(fixture.seed);
if (!result.valid) {
// Include errors in failure message for debuggability
expect.fail(`Seed validation failed:\n${result.errors.join("\n")}`);
}
expect(result.valid).toBe(true);
});
// --- CLI --validate ---
it("passes CLI seed --validate", async () => {
const { stdout, stderr } = await exec(
"node",
[CLI_BIN, "seed", fixture.seedPath, "--validate"],
{
cwd: fixture.dir,
timeout: 15_000,
},
);
// The validate command should succeed (exit 0) — if it throws,
// the test will fail with the error message
expect(stdout + stderr).not.toMatch(VALIDATION_FAILED_RE);
});
// --- Seed applies to fresh database ---
it("applies seed to a fresh database without errors", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
// Create database and run migrations
const db = createDatabase({ url: `file:${dbPath}` });
try {
const { applied } = await runMigrations(db);
expect(applied.length).toBeGreaterThan(0);
// Set up local storage for media resolution
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
// Apply seed
const result = await applySeed(db, fixture.seed, {
includeContent: true,
onConflict: "skip",
storage,
mediaBasePath: join(fixture.dir, ".emdash"),
});
// Verify collections were created
if (fixture.seed.collections && fixture.seed.collections.length > 0) {
expect(result.collections.created).toBeGreaterThan(0);
}
// Verify fields were created
const totalFields =
fixture.seed.collections?.reduce((sum, c) => sum + (c.fields?.length ?? 0), 0) ?? 0;
if (totalFields > 0) {
expect(result.fields.created).toBeGreaterThan(0);
}
// Verify content was created if seed has content
if (fixture.seed.content) {
const totalEntries = Object.values(fixture.seed.content).reduce(
(sum, entries) => sum + (Array.isArray(entries) ? entries.length : 0),
0,
);
if (totalEntries > 0) {
expect(result.content.created).toBeGreaterThan(0);
}
}
// Verify taxonomy processing completed (some may be pre-seeded by migrations)
if (fixture.seed.taxonomies && fixture.seed.taxonomies.length > 0) {
// Taxonomies either created or already existed — just verify no crash
expect(result.taxonomies.created + result.taxonomies.terms).toBeGreaterThanOrEqual(0);
}
// Verify menus if present
if (fixture.seed.menus && fixture.seed.menus.length > 0) {
expect(result.menus.created).toBeGreaterThan(0);
}
} finally {
await db.destroy();
}
});
// --- CLI seed apply + doctor ---
it("passes CLI doctor after seed apply", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
// Apply seed via CLI (this also runs migrations)
await exec("node", [CLI_BIN, "seed", fixture.seedPath, "--database", dbPath], {
cwd: fixture.dir,
timeout: 30_000,
});
// Run doctor and verify all checks pass
const { stdout } = await exec("node", [CLI_BIN, "doctor", "--database", dbPath, "--json"], {
cwd: fixture.dir,
timeout: 15_000,
});
const checks = JSON.parse(stdout) as Array<{
name: string;
status: "pass" | "warn" | "fail";
message: string;
}>;
// No failures allowed
const failures = checks.filter((c) => c.status === "fail");
if (failures.length > 0) {
expect.fail(
`Doctor failures:\n${failures.map((f) => ` ${f.name}: ${f.message}`).join("\n")}`,
);
}
// Database, migrations, and collections should all pass
const dbCheck = checks.find((c) => c.name === "database");
expect(dbCheck?.status).toBe("pass");
const migrationsCheck = checks.find((c) => c.name === "migrations");
expect(migrationsCheck?.status).toBe("pass");
const collectionsCheck = checks.find((c) => c.name === "collections");
expect(collectionsCheck?.status).toBe("pass");
});
// --- Idempotent re-apply ---
it(
"can re-apply seed with on-conflict=skip without errors",
{ timeout: 30_000 },
async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
const db = createDatabase({ url: `file:${dbPath}` });
try {
await runMigrations(db);
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
const seedOpts = {
includeContent: true,
onConflict: "skip" as const,
storage,
seedDir: join(fixture.dir, ".emdash"),
};
// First apply
await applySeed(db, fixture.seed, seedOpts);
// Second apply — should not throw
const result2 = await applySeed(db, fixture.seed, seedOpts);
// Everything should be skipped on second apply
expect(result2.collections.created).toBe(0);
} finally {
await db.destroy();
}
},
);
// --- package.json has emdash.seed pointing to seed file ---
it("has package.json with emdash.seed pointing to the seed file", () => {
const pkgPath = join(fixture.dir, "package.json");
if (!existsSync(pkgPath)) return; // blank template has no seed, already filtered
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
// Either emdash.seed is set, or we rely on the .emdash/seed.json convention
const seedRef = pkg.emdash?.seed;
if (seedRef) {
const resolvedSeedPath = resolve(fixture.dir, seedRef);
expect(existsSync(resolvedSeedPath)).toBe(true);
} else {
// Convention: .emdash/seed.json exists (which it does since we're iterating fixtures)
expect(existsSync(fixture.seedPath)).toBe(true);
}
});
});
}
// -----------------------------------------------------------------------
// Cross-cutting: all templates/demos have required files
// -----------------------------------------------------------------------
describe("Required files", () => {
const roots = [
{ prefix: "templates", dir: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", dir: resolve(WORKSPACE_ROOT, "demos") },
].filter((root) => existsSync(root.dir));
const allDirs = roots
.flatMap((root) =>
readdirSync(root.dir).map((entry) => ({
name: `${root.prefix}/${entry}`,
dir: join(root.dir, entry),
})),
)
.filter((d) => existsSync(join(d.dir, "package.json")));
for (const { name, dir } of allDirs) {
describe(name, () => {
it("has astro.config.mjs", () => {
expect(existsSync(join(dir, "astro.config.mjs"))).toBe(true);
});
it("has tsconfig.json", () => {
expect(existsSync(join(dir, "tsconfig.json"))).toBe(true);
});
it("has live.config.ts with emdashLoader", () => {
const liveConfig = join(dir, "src", "live.config.ts");
expect(existsSync(liveConfig)).toBe(true);
const content = readFileSync(liveConfig, "utf-8");
expect(content).toContain("emdashLoader");
expect(content).toContain("defineLiveCollection");
});
it("has typecheck script in package.json", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.scripts?.typecheck || pkg.scripts?.check).toBeDefined();
});
it("uses workspace:* for emdash dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.dependencies?.emdash).toBe("workspace:*");
});
it("uses catalog: for astro dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
const astroVersion = pkg.dependencies?.astro;
expect(astroVersion).toBe("catalog:");
});
});
}
});
});