Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
349
packages/core/tests/integration/database/dialect-compat.test.ts
Normal file
349
packages/core/tests/integration/database/dialect-compat.test.ts
Normal file
@@ -0,0 +1,349 @@
|
||||
/**
|
||||
* Dialect compatibility tests
|
||||
*
|
||||
* Runs core database operations against every available dialect.
|
||||
* SQLite always runs (in-memory). Postgres runs when EMDASH_TEST_PG is set.
|
||||
*
|
||||
* These tests verify that migrations, schema registry, and content CRUD
|
||||
* work identically across dialects.
|
||||
*/
|
||||
|
||||
import { it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
runMigrations,
|
||||
getMigrationStatus,
|
||||
MIGRATION_COUNT,
|
||||
} from "../../../src/database/migrations/runner.js";
|
||||
import { ContentRepository } from "../../../src/database/repositories/content.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { SchemaRegistry } from "../../../src/schema/registry.js";
|
||||
import {
|
||||
createForDialect,
|
||||
describeEachDialect,
|
||||
setupForDialect,
|
||||
setupForDialectWithCollections,
|
||||
teardownForDialect,
|
||||
type DialectTestContext,
|
||||
} from "../../utils/test-db.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Migrations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describeEachDialect("Migrations", (dialect) => {
|
||||
let ctx: DialectTestContext;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Bare database — no migrations yet. Tests run them explicitly.
|
||||
ctx = await createForDialect(dialect);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownForDialect(ctx);
|
||||
});
|
||||
|
||||
it("runs all migrations and creates system tables", async () => {
|
||||
await runMigrations(ctx.db);
|
||||
|
||||
const tables = [
|
||||
"revisions",
|
||||
"taxonomies",
|
||||
"content_taxonomies",
|
||||
"media",
|
||||
"users",
|
||||
"options",
|
||||
"audit_logs",
|
||||
"_emdash_migrations",
|
||||
"_emdash_collections",
|
||||
"_emdash_fields",
|
||||
"_plugin_storage",
|
||||
"_plugin_state",
|
||||
"_plugin_indexes",
|
||||
"_emdash_sections",
|
||||
"_emdash_bylines",
|
||||
"_emdash_content_bylines",
|
||||
];
|
||||
|
||||
for (const table of tables) {
|
||||
const result = await ctx.db
|
||||
.selectFrom(table as keyof Database)
|
||||
.selectAll()
|
||||
.execute();
|
||||
expect(Array.isArray(result), `table ${table} should exist`).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("tracks migrations in _emdash_migrations", async () => {
|
||||
await runMigrations(ctx.db);
|
||||
|
||||
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
|
||||
expect(migrations).toHaveLength(MIGRATION_COUNT);
|
||||
expect(migrations[0]?.name).toBe("001_initial");
|
||||
});
|
||||
|
||||
it("is idempotent", async () => {
|
||||
await runMigrations(ctx.db);
|
||||
await runMigrations(ctx.db);
|
||||
|
||||
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
|
||||
expect(migrations).toHaveLength(MIGRATION_COUNT);
|
||||
});
|
||||
|
||||
it("reports correct migration status", async () => {
|
||||
const before = await getMigrationStatus(ctx.db);
|
||||
expect(before.pending).toContain("001_initial");
|
||||
expect(before.applied).toHaveLength(0);
|
||||
|
||||
await runMigrations(ctx.db);
|
||||
|
||||
const after = await getMigrationStatus(ctx.db);
|
||||
expect(after.applied).toContain("001_initial");
|
||||
expect(after.pending).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema registry
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describeEachDialect("Schema registry", (dialect) => {
|
||||
let ctx: DialectTestContext;
|
||||
let registry: SchemaRegistry;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx = await setupForDialect(dialect);
|
||||
await runMigrations(ctx.db);
|
||||
registry = new SchemaRegistry(ctx.db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownForDialect(ctx);
|
||||
});
|
||||
|
||||
it("creates a collection and its dynamic table", async () => {
|
||||
await registry.createCollection({
|
||||
slug: "article",
|
||||
label: "Articles",
|
||||
labelSingular: "Article",
|
||||
});
|
||||
|
||||
// Dynamic table should exist
|
||||
const rows = await ctx.db
|
||||
.selectFrom("ec_article" as keyof Database)
|
||||
.selectAll()
|
||||
.execute();
|
||||
expect(Array.isArray(rows)).toBe(true);
|
||||
|
||||
// Registry should have the collection
|
||||
const collections = await registry.listCollections();
|
||||
expect(collections.map((c) => c.slug)).toContain("article");
|
||||
});
|
||||
|
||||
it("adds fields to a collection", async () => {
|
||||
await registry.createCollection({
|
||||
slug: "post",
|
||||
label: "Posts",
|
||||
labelSingular: "Post",
|
||||
});
|
||||
|
||||
await registry.createField("post", {
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
});
|
||||
|
||||
await registry.createField("post", {
|
||||
slug: "body",
|
||||
label: "Body",
|
||||
type: "portableText",
|
||||
});
|
||||
|
||||
await registry.createField("post", {
|
||||
slug: "views",
|
||||
label: "Views",
|
||||
type: "integer",
|
||||
});
|
||||
|
||||
const coll = await registry.getCollectionWithFields("post");
|
||||
expect(coll).not.toBeNull();
|
||||
const slugs = coll!.fields.map((f) => f.slug);
|
||||
expect(slugs).toContain("title");
|
||||
expect(slugs).toContain("body");
|
||||
expect(slugs).toContain("views");
|
||||
});
|
||||
|
||||
it("deletes a collection and drops its table", async () => {
|
||||
await registry.createCollection({
|
||||
slug: "temp",
|
||||
label: "Temp",
|
||||
labelSingular: "Temp",
|
||||
});
|
||||
|
||||
// Verify it exists
|
||||
const before = await registry.listCollections();
|
||||
expect(before.map((c) => c.slug)).toContain("temp");
|
||||
|
||||
await registry.deleteCollection("temp");
|
||||
|
||||
const after = await registry.listCollections();
|
||||
expect(after.map((c) => c.slug)).not.toContain("temp");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content CRUD
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describeEachDialect("Content CRUD", (dialect) => {
|
||||
let ctx: DialectTestContext;
|
||||
let repo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx = await setupForDialectWithCollections(dialect);
|
||||
repo = new ContentRepository(ctx.db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownForDialect(ctx);
|
||||
});
|
||||
|
||||
it("creates and retrieves content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "hello-world",
|
||||
data: {
|
||||
title: "Hello World",
|
||||
content: [{ _type: "block", children: [{ _type: "span", text: "Content" }] }],
|
||||
},
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
expect(created.id).toBeDefined();
|
||||
expect(created.slug).toBe("hello-world");
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).not.toBeNull();
|
||||
expect(found!.data.title).toBe("Hello World");
|
||||
expect(found!.slug).toBe("hello-world");
|
||||
});
|
||||
|
||||
it("updates content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "original",
|
||||
data: { title: "Original" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated" },
|
||||
});
|
||||
|
||||
expect(updated.data.title).toBe("Updated");
|
||||
expect(updated.slug).toBe("original");
|
||||
});
|
||||
|
||||
it("lists content with pagination", async () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: `post-${i}`,
|
||||
data: { title: `Post ${i}` },
|
||||
status: "draft",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await repo.findMany("post", { limit: 3 });
|
||||
expect(result.items).toHaveLength(3);
|
||||
|
||||
if (result.nextCursor) {
|
||||
const page2 = await repo.findMany("post", {
|
||||
limit: 3,
|
||||
cursor: result.nextCursor,
|
||||
});
|
||||
expect(page2.items).toHaveLength(2);
|
||||
}
|
||||
});
|
||||
|
||||
it("soft-deletes content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "to-delete",
|
||||
data: { title: "To Delete" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const deleted = await repo.delete("post", created.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("filters by status", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "draft-post",
|
||||
data: { title: "Draft Post" },
|
||||
status: "draft",
|
||||
});
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "published-post",
|
||||
data: { title: "Published Post" },
|
||||
status: "published",
|
||||
});
|
||||
|
||||
const drafts = await repo.findMany("post", { where: { status: "draft" } });
|
||||
expect(drafts.items).toHaveLength(1);
|
||||
expect(drafts.items[0]?.data.title).toBe("Draft Post");
|
||||
|
||||
const published = await repo.findMany("post", { where: { status: "published" } });
|
||||
expect(published.items).toHaveLength(1);
|
||||
expect(published.items[0]?.data.title).toBe("Published Post");
|
||||
});
|
||||
|
||||
it("enforces unique slug within a collection", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "same-slug",
|
||||
data: { title: "First" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
await expect(
|
||||
repo.create({
|
||||
type: "post",
|
||||
slug: "same-slug",
|
||||
data: { title: "Second" },
|
||||
status: "draft",
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("isolates collections", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "shared-slug",
|
||||
data: { title: "A Post" },
|
||||
status: "draft",
|
||||
});
|
||||
await repo.create({
|
||||
type: "page",
|
||||
slug: "shared-slug",
|
||||
data: { title: "A Page" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const posts = await repo.findMany("post");
|
||||
const pages = await repo.findMany("page");
|
||||
|
||||
expect(posts.items).toHaveLength(1);
|
||||
expect(pages.items).toHaveLength(1);
|
||||
expect(posts.items[0]?.data.title).toBe("A Post");
|
||||
expect(pages.items[0]?.data.title).toBe("A Page");
|
||||
});
|
||||
});
|
||||
119
packages/core/tests/integration/database/migration-race.test.ts
Normal file
119
packages/core/tests/integration/database/migration-race.test.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { mkdtempSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { sql } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import { MIGRATION_COUNT, runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
|
||||
/**
|
||||
* Reproduces the issue from #762: when two callers run migrations
|
||||
* concurrently against the same database (e.g. two Cloudflare Workers
|
||||
* isolates handling parallel requests during a fresh deploy), the Kysely
|
||||
* Migrator races on inserting into `_emdash_migrations` and the loser
|
||||
* throws `UNIQUE constraint failed: _emdash_migrations.name`.
|
||||
*
|
||||
* The Kysely SqliteAdapter (which D1 inherits from kysely-d1) has a no-op
|
||||
* `acquireMigrationLock`, so this race is unprotected on D1.
|
||||
*
|
||||
* We simulate the race here by pointing two independent Kysely instances
|
||||
* at the same SQLite file and starting `runMigrations` on both
|
||||
* concurrently. SQLite serializes writes, but both Migrators still race
|
||||
* on the bookkeeping insert.
|
||||
*/
|
||||
describe("Migration race condition (#762)", () => {
|
||||
let tmpDir: string;
|
||||
let dbPath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = mkdtempSync(join(tmpdir(), "emdash-migration-race-"));
|
||||
dbPath = join(tmpDir, "data.db");
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("should not throw when two callers run migrations concurrently", async () => {
|
||||
const dbA = createDatabase({ url: `file:${dbPath}` });
|
||||
const dbB = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
// Fire both migrators in parallel against the same database file.
|
||||
// On D1, this is what happens when two Workers isolates spin up
|
||||
// at once on first request after deploy.
|
||||
const results = await Promise.allSettled([runMigrations(dbA), runMigrations(dbB)]);
|
||||
|
||||
const failures = results.filter((r) => r.status === "rejected");
|
||||
if (failures.length > 0) {
|
||||
const messages = failures.map((f) =>
|
||||
f.status === "rejected" ? String(f.reason?.message ?? f.reason) : "",
|
||||
);
|
||||
throw new Error(
|
||||
`Concurrent runMigrations should not throw, but got ${failures.length} failure(s):\n${messages.join("\n")}`,
|
||||
);
|
||||
}
|
||||
|
||||
// And the DB must actually be fully migrated — we don't want a
|
||||
// fix that just swallows errors and leaves the schema half-built.
|
||||
const verifyDb = createDatabase({ url: `file:${dbPath}` });
|
||||
try {
|
||||
const row = await sql<{ count: number }>`
|
||||
SELECT COUNT(*) as count FROM _emdash_migrations
|
||||
`.execute(verifyDb);
|
||||
expect(Number(row.rows[0]?.count)).toBe(MIGRATION_COUNT);
|
||||
} finally {
|
||||
await verifyDb.destroy();
|
||||
}
|
||||
} finally {
|
||||
await dbA.destroy();
|
||||
await dbB.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should fast-path when the migration table has more rows than this build knows about", async () => {
|
||||
// Simulates an old isolate observing a database that's already been
|
||||
// migrated by a newer build (one extra migration recorded). The
|
||||
// fast-path must treat this as "fully migrated" rather than falling
|
||||
// through to the Kysely Migrator and risking the race-recovery path.
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
try {
|
||||
await runMigrations(db);
|
||||
// Insert a phantom future migration row to simulate a newer build.
|
||||
await sql`
|
||||
INSERT INTO _emdash_migrations (name, timestamp)
|
||||
VALUES ('999_future_build', ${new Date().toISOString()})
|
||||
`.execute(db);
|
||||
|
||||
// Should be a no-op via the fast-path — no errors, no extra work.
|
||||
const result = await runMigrations(db);
|
||||
expect(result.applied).toEqual([]);
|
||||
|
||||
// Row count is still MIGRATION_COUNT + 1 (we didn't truncate).
|
||||
const row = await sql<{ count: number }>`
|
||||
SELECT COUNT(*) as count FROM _emdash_migrations
|
||||
`.execute(db);
|
||||
expect(Number(row.rows[0]?.count)).toBe(MIGRATION_COUNT + 1);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should still surface unrelated migration errors", async () => {
|
||||
// Exercises the non-race error path so a regression that swallows
|
||||
// real errors is caught. We migrate once, then delete a single row
|
||||
// from `_emdash_migrations` so the migrator tries to re-run that
|
||||
// migration and fails with `table ... already exists` — a non-race
|
||||
// error that must NOT be swallowed.
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
try {
|
||||
await runMigrations(db);
|
||||
await sql`DELETE FROM _emdash_migrations WHERE name = '001_initial'`.execute(db);
|
||||
await expect(runMigrations(db)).rejects.toThrow(/Migration failed/i);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
435
packages/core/tests/integration/database/migrations.test.ts
Normal file
435
packages/core/tests/integration/database/migrations.test.ts
Normal file
@@ -0,0 +1,435 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import {
|
||||
runMigrations,
|
||||
getMigrationStatus,
|
||||
MIGRATION_COUNT,
|
||||
} from "../../../src/database/migrations/runner.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
|
||||
|
||||
describe("Database Migrations (Integration)", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create fresh in-memory database for each test
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Close the database connection
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("should create all tables from migrations", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
// Verify all tables exist by querying them
|
||||
// Note: No generic "content" table - collections create ec_* tables dynamically
|
||||
const tables = [
|
||||
"revisions",
|
||||
"taxonomies",
|
||||
"content_taxonomies",
|
||||
"media",
|
||||
"users",
|
||||
"options",
|
||||
"audit_logs",
|
||||
"_emdash_migrations",
|
||||
"_emdash_collections",
|
||||
"_emdash_fields",
|
||||
"_plugin_storage",
|
||||
"_plugin_state",
|
||||
"_plugin_indexes",
|
||||
"_emdash_sections",
|
||||
"_emdash_bylines",
|
||||
"_emdash_content_bylines",
|
||||
];
|
||||
|
||||
for (const table of tables) {
|
||||
// Query table to verify it exists
|
||||
const result = await db
|
||||
.selectFrom(table as keyof Database)
|
||||
.selectAll()
|
||||
.execute();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("should track migration in _emdash_migrations table", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
|
||||
expect(migrations).toHaveLength(MIGRATION_COUNT);
|
||||
expect(migrations[0]?.name).toBe("001_initial");
|
||||
expect(migrations[0]?.timestamp).toBeDefined();
|
||||
expect(migrations[1]?.name).toBe("002_media_status");
|
||||
expect(migrations[1]?.timestamp).toBeDefined();
|
||||
expect(migrations[2]?.name).toBe("003_schema_registry");
|
||||
expect(migrations[2]?.timestamp).toBeDefined();
|
||||
expect(migrations[3]?.name).toBe("004_plugins");
|
||||
expect(migrations[3]?.timestamp).toBeDefined();
|
||||
expect(migrations[4]?.name).toBe("005_menus");
|
||||
expect(migrations[4]?.timestamp).toBeDefined();
|
||||
expect(migrations[5]?.name).toBe("006_taxonomy_defs");
|
||||
expect(migrations[5]?.timestamp).toBeDefined();
|
||||
expect(migrations[6]?.name).toBe("007_widgets");
|
||||
expect(migrations[6]?.timestamp).toBeDefined();
|
||||
expect(migrations[7]?.name).toBe("008_auth");
|
||||
expect(migrations[7]?.timestamp).toBeDefined();
|
||||
expect(migrations[8]?.name).toBe("009_user_disabled");
|
||||
expect(migrations[8]?.timestamp).toBeDefined();
|
||||
expect(migrations[9]?.name).toBe("011_sections");
|
||||
expect(migrations[9]?.timestamp).toBeDefined();
|
||||
expect(migrations[10]?.name).toBe("012_search");
|
||||
expect(migrations[10]?.timestamp).toBeDefined();
|
||||
expect(migrations[11]?.name).toBe("013_scheduled_publishing");
|
||||
expect(migrations[11]?.timestamp).toBeDefined();
|
||||
expect(migrations[12]?.name).toBe("014_draft_revisions");
|
||||
expect(migrations[12]?.timestamp).toBeDefined();
|
||||
expect(migrations[13]?.name).toBe("015_indexes");
|
||||
expect(migrations[13]?.timestamp).toBeDefined();
|
||||
expect(migrations[14]?.name).toBe("016_api_tokens");
|
||||
expect(migrations[14]?.timestamp).toBeDefined();
|
||||
expect(migrations[15]?.name).toBe("017_authorization_codes");
|
||||
expect(migrations[15]?.timestamp).toBeDefined();
|
||||
});
|
||||
|
||||
it("should be idempotent (running twice is safe)", async () => {
|
||||
await runMigrations(db);
|
||||
await runMigrations(db);
|
||||
|
||||
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
|
||||
// Should still only have the same number of migration records
|
||||
expect(migrations).toHaveLength(MIGRATION_COUNT);
|
||||
});
|
||||
|
||||
it("should re-run migrations 034 and 035 when schema changes were partially applied", async () => {
|
||||
await db.destroy();
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
|
||||
await db
|
||||
.deleteFrom("_emdash_migrations")
|
||||
.where("name", "in", ["034_published_at_index", "035_bounded_404_log"])
|
||||
.execute();
|
||||
|
||||
const { applied } = await runMigrations(db);
|
||||
|
||||
expect(applied).toContain("034_published_at_index");
|
||||
expect(applied).toContain("035_bounded_404_log");
|
||||
|
||||
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
expect(migrations).toHaveLength(MIGRATION_COUNT);
|
||||
});
|
||||
|
||||
it("should report correct migration status", async () => {
|
||||
const statusBefore = await getMigrationStatus(db);
|
||||
expect(statusBefore.pending).toContain("001_initial");
|
||||
expect(statusBefore.pending).toContain("002_media_status");
|
||||
expect(statusBefore.applied).toHaveLength(0);
|
||||
|
||||
await runMigrations(db);
|
||||
|
||||
const statusAfter = await getMigrationStatus(db);
|
||||
expect(statusAfter.applied).toContain("001_initial");
|
||||
expect(statusAfter.applied).toContain("002_media_status");
|
||||
expect(statusAfter.pending).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should create schema registry tables", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
// Test collections table
|
||||
const testId = "test-collection";
|
||||
await db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: testId,
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
label_singular: "Post",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const collection = await db
|
||||
.selectFrom("_emdash_collections")
|
||||
.selectAll()
|
||||
.where("id", "=", testId)
|
||||
.executeTakeFirst();
|
||||
|
||||
expect(collection).toBeDefined();
|
||||
expect(collection?.slug).toBe("posts");
|
||||
expect(collection?.label).toBe("Posts");
|
||||
expect(collection?.created_at).toBeDefined();
|
||||
});
|
||||
|
||||
it("should enforce unique constraint on collection slug", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: "id1",
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Attempting to insert duplicate slug should fail
|
||||
await expect(
|
||||
db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: "id2",
|
||||
slug: "posts",
|
||||
label: "Posts Again",
|
||||
})
|
||||
.execute(),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should create fields table with foreign key to collections", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
// Create collection first
|
||||
const collectionId = "collection-1";
|
||||
await db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: collectionId,
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create field
|
||||
await db
|
||||
.insertInto("_emdash_fields")
|
||||
.values({
|
||||
id: "field-1",
|
||||
collection_id: collectionId,
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
column_type: "TEXT",
|
||||
required: 0,
|
||||
unique: 0,
|
||||
sort_order: 0,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const fields = await db
|
||||
.selectFrom("_emdash_fields")
|
||||
.selectAll()
|
||||
.where("collection_id", "=", collectionId)
|
||||
.execute();
|
||||
|
||||
expect(fields).toHaveLength(1);
|
||||
expect(fields[0]?.slug).toBe("title");
|
||||
});
|
||||
|
||||
it("should create revisions table with collection+entry_id", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
// Create revision for a content entry
|
||||
await db
|
||||
.insertInto("revisions")
|
||||
.values({
|
||||
id: "rev-1",
|
||||
collection: "posts",
|
||||
entry_id: "entry-1",
|
||||
data: JSON.stringify({ title: "Revised" }),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const revisions = await db
|
||||
.selectFrom("revisions")
|
||||
.selectAll()
|
||||
.where("collection", "=", "posts")
|
||||
.where("entry_id", "=", "entry-1")
|
||||
.execute();
|
||||
|
||||
expect(revisions).toHaveLength(1);
|
||||
expect(revisions[0]?.collection).toBe("posts");
|
||||
});
|
||||
|
||||
it("should create users table with unique email constraint", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
await db
|
||||
.insertInto("users")
|
||||
.values({
|
||||
id: "user-1",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
role: 50, // ADMIN
|
||||
email_verified: 1,
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Duplicate email should fail
|
||||
await expect(
|
||||
db
|
||||
.insertInto("users")
|
||||
.values({
|
||||
id: "user-2",
|
||||
email: "test@example.com",
|
||||
role: 10, // SUBSCRIBER
|
||||
email_verified: 1,
|
||||
})
|
||||
.execute(),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should create taxonomies table with hierarchical support", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
// Create parent category
|
||||
const parentId = "cat-parent";
|
||||
await db
|
||||
.insertInto("taxonomies")
|
||||
.values({
|
||||
id: parentId,
|
||||
name: "category",
|
||||
slug: "parent",
|
||||
label: "Parent Category",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create child category
|
||||
await db
|
||||
.insertInto("taxonomies")
|
||||
.values({
|
||||
id: "cat-child",
|
||||
name: "category",
|
||||
slug: "child",
|
||||
label: "Child Category",
|
||||
parent_id: parentId,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const child = await db
|
||||
.selectFrom("taxonomies")
|
||||
.selectAll()
|
||||
.where("id", "=", "cat-child")
|
||||
.executeTakeFirst();
|
||||
|
||||
expect(child?.parent_id).toBe(parentId);
|
||||
});
|
||||
|
||||
it("should create content_taxonomies junction table", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
const taxonomyId = "tax-1";
|
||||
|
||||
// Create taxonomy
|
||||
await db
|
||||
.insertInto("taxonomies")
|
||||
.values({
|
||||
id: taxonomyId,
|
||||
name: "category",
|
||||
slug: "tech",
|
||||
label: "Technology",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Assign taxonomy to content entry (collection + entry_id)
|
||||
await db
|
||||
.insertInto("content_taxonomies")
|
||||
.values({
|
||||
collection: "posts",
|
||||
entry_id: "entry-1",
|
||||
taxonomy_id: taxonomyId,
|
||||
})
|
||||
.execute();
|
||||
|
||||
const assignments = await db
|
||||
.selectFrom("content_taxonomies")
|
||||
.selectAll()
|
||||
.where("collection", "=", "posts")
|
||||
.where("entry_id", "=", "entry-1")
|
||||
.execute();
|
||||
|
||||
expect(assignments).toHaveLength(1);
|
||||
expect(assignments[0]?.taxonomy_id).toBe(taxonomyId);
|
||||
});
|
||||
|
||||
it("should create media table", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
await db
|
||||
.insertInto("media")
|
||||
.values({
|
||||
id: "media-1",
|
||||
filename: "photo.jpg",
|
||||
mime_type: "image/jpeg",
|
||||
size: 1024000,
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
alt: "Test photo",
|
||||
storage_key: "uploads/photo.jpg",
|
||||
status: "ready",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const media = await db
|
||||
.selectFrom("media")
|
||||
.selectAll()
|
||||
.where("id", "=", "media-1")
|
||||
.executeTakeFirst();
|
||||
|
||||
expect(media).toBeDefined();
|
||||
expect(media?.width).toBe(1920);
|
||||
expect(media?.height).toBe(1080);
|
||||
});
|
||||
|
||||
it("should create options table for key-value storage", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
await db
|
||||
.insertInto("options")
|
||||
.values({
|
||||
name: "site_title",
|
||||
value: JSON.stringify("My Site"),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const option = await db
|
||||
.selectFrom("options")
|
||||
.selectAll()
|
||||
.where("name", "=", "site_title")
|
||||
.executeTakeFirst();
|
||||
|
||||
expect(option).toBeDefined();
|
||||
expect(JSON.parse(option!.value)).toBe("My Site");
|
||||
});
|
||||
|
||||
it("should create audit_logs table with indexes", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
await db
|
||||
.insertInto("audit_logs")
|
||||
.values({
|
||||
id: "log-1",
|
||||
actor_id: "user-1",
|
||||
actor_ip: "192.168.1.1",
|
||||
action: "content:create",
|
||||
resource_type: "content",
|
||||
resource_id: "post-1",
|
||||
status: "success",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const logs = await db
|
||||
.selectFrom("audit_logs")
|
||||
.selectAll()
|
||||
.where("actor_id", "=", "user-1")
|
||||
.execute();
|
||||
|
||||
expect(logs).toHaveLength(1);
|
||||
expect(logs[0]?.action).toBe("content:create");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* OptionsRepository.setIfAbsent — atomic write-once semantics.
|
||||
*
|
||||
* Used by routes that must never overwrite a stored value once set
|
||||
* (e.g. the setup wizard's emdash:site_url write). Correctness under
|
||||
* concurrent writes is a security property: a non-atomic read-then-write
|
||||
* lets a second caller win the race and poison the value.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { OptionsRepository } from "../../../src/database/repositories/options.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("OptionsRepository.setIfAbsent", () => {
|
||||
let db: Kysely<Database>;
|
||||
let repo: OptionsRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
repo = new OptionsRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("inserts when no row exists and returns true", async () => {
|
||||
const inserted = await repo.setIfAbsent("emdash:site_url", "https://example.com");
|
||||
expect(inserted).toBe(true);
|
||||
expect(await repo.get("emdash:site_url")).toBe("https://example.com");
|
||||
});
|
||||
|
||||
it("does not overwrite an existing value and returns false", async () => {
|
||||
await repo.set("emdash:site_url", "https://real.example");
|
||||
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
|
||||
expect(inserted).toBe(false);
|
||||
expect(await repo.get("emdash:site_url")).toBe("https://real.example");
|
||||
});
|
||||
|
||||
it("treats an empty string as present (does not overwrite)", async () => {
|
||||
await repo.set("emdash:site_url", "");
|
||||
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
|
||||
expect(inserted).toBe(false);
|
||||
expect(await repo.get("emdash:site_url")).toBe("");
|
||||
});
|
||||
|
||||
it("treats a stored null as present (does not overwrite)", async () => {
|
||||
await repo.set("emdash:site_url", null);
|
||||
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
|
||||
expect(inserted).toBe(false);
|
||||
expect(await repo.get("emdash:site_url")).toBeNull();
|
||||
});
|
||||
|
||||
it("is atomic under concurrent callers — only one insert succeeds", async () => {
|
||||
const results = await Promise.all([
|
||||
repo.setIfAbsent("emdash:site_url", "https://a.example"),
|
||||
repo.setIfAbsent("emdash:site_url", "https://b.example"),
|
||||
repo.setIfAbsent("emdash:site_url", "https://c.example"),
|
||||
]);
|
||||
|
||||
// Exactly one caller inserted; the others saw the existing row.
|
||||
expect(results.filter((r) => r === true)).toHaveLength(1);
|
||||
expect(results.filter((r) => r === false)).toHaveLength(2);
|
||||
|
||||
// And whichever value landed first now sticks.
|
||||
const final = await repo.get("emdash:site_url");
|
||||
expect(["https://a.example", "https://b.example", "https://c.example"]).toContain(final);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user