Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
180
packages/core/tests/database/connection.test.ts
Normal file
180
packages/core/tests/database/connection.test.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
createDatabase,
|
||||
EmDashDatabaseError,
|
||||
formatNativeModuleVersionError,
|
||||
} from "../../src/database/connection.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
|
||||
describe("createDatabase", () => {
|
||||
let db: Kysely<Database> | undefined;
|
||||
|
||||
afterEach(async () => {
|
||||
if (db) {
|
||||
await db.destroy();
|
||||
db = undefined;
|
||||
}
|
||||
});
|
||||
|
||||
describe("in-memory SQLite", () => {
|
||||
it("should create in-memory database with :memory: URL", () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
|
||||
it("should allow queries on in-memory database", async () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
|
||||
// Create a simple table
|
||||
await db.schema
|
||||
.createTable("test")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
|
||||
// Insert a row
|
||||
await db
|
||||
.insertInto("test" as any)
|
||||
.values({ id: "test-1" })
|
||||
.execute();
|
||||
|
||||
// Query it back
|
||||
const result = await db
|
||||
.selectFrom("test" as any)
|
||||
.selectAll()
|
||||
.execute();
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe("test-1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("file-based SQLite", () => {
|
||||
const testDbPath = "./test-db.sqlite";
|
||||
|
||||
afterEach(() => {
|
||||
try {
|
||||
unlinkSync(testDbPath);
|
||||
} catch {
|
||||
// Ignore if file doesn't exist
|
||||
}
|
||||
});
|
||||
|
||||
it("should create file-based database with file: URL", () => {
|
||||
db = createDatabase({ url: `file:${testDbPath}` });
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
|
||||
it("should persist data to file", async () => {
|
||||
// Create database and insert data
|
||||
db = createDatabase({ url: `file:${testDbPath}` });
|
||||
|
||||
await db.schema
|
||||
.createTable("test")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("test" as any)
|
||||
.values({ id: "test-1" })
|
||||
.execute();
|
||||
await db.destroy();
|
||||
db = undefined;
|
||||
|
||||
// Reopen database and verify data persists
|
||||
db = createDatabase({ url: `file:${testDbPath}` });
|
||||
const result = await db
|
||||
.selectFrom("test" as any)
|
||||
.selectAll()
|
||||
.execute();
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe("test-1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("libSQL / Turso", () => {
|
||||
it("should throw error for libsql URL without auth token", () => {
|
||||
expect(() => {
|
||||
createDatabase({ url: "libsql://example.turso.io" });
|
||||
}).toThrow(EmDashDatabaseError);
|
||||
|
||||
expect(() => {
|
||||
createDatabase({ url: "libsql://example.turso.io" });
|
||||
}).toThrow("Auth token required");
|
||||
});
|
||||
|
||||
it("should throw not implemented error for libsql URL with token", () => {
|
||||
expect(() => {
|
||||
createDatabase({
|
||||
url: "libsql://example.turso.io",
|
||||
authToken: "test-token",
|
||||
});
|
||||
}).toThrow("LibSQL not yet implemented");
|
||||
});
|
||||
});
|
||||
|
||||
describe("error handling", () => {
|
||||
it("should throw EmDashDatabaseError for invalid URL scheme", () => {
|
||||
expect(() => {
|
||||
createDatabase({ url: "invalid://test" });
|
||||
}).toThrow(EmDashDatabaseError);
|
||||
|
||||
expect(() => {
|
||||
createDatabase({ url: "invalid://test" });
|
||||
}).toThrow("Unsupported database URL scheme");
|
||||
});
|
||||
|
||||
it("should throw EmDashDatabaseError for malformed file path", () => {
|
||||
expect(() => {
|
||||
createDatabase({ url: "file:/nonexistent/path/to/db.sqlite" });
|
||||
}).toThrow(EmDashDatabaseError);
|
||||
});
|
||||
|
||||
it("should wrap underlying errors in EmDashDatabaseError", () => {
|
||||
try {
|
||||
createDatabase({ url: "file:/root/cannot-write-here.db" });
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(EmDashDatabaseError);
|
||||
expect(error).toHaveProperty("cause");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatNativeModuleVersionError", () => {
|
||||
it("returns an actionable message for NODE_MODULE_VERSION mismatch", () => {
|
||||
const err = new Error(
|
||||
"The module '/path/better_sqlite3.node' was compiled against a different Node.js version using NODE_MODULE_VERSION 115. This version of Node.js requires NODE_MODULE_VERSION 127.",
|
||||
);
|
||||
const message = formatNativeModuleVersionError(err);
|
||||
expect(message).not.toBeNull();
|
||||
expect(message).toContain("better-sqlite3");
|
||||
expect(message).toMatch(/rebuild/i);
|
||||
});
|
||||
|
||||
it("returns null for unrelated errors", () => {
|
||||
expect(formatNativeModuleVersionError(new Error("disk full"))).toBeNull();
|
||||
expect(formatNativeModuleVersionError("some string")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("connection lifecycle", () => {
|
||||
it("should allow closing connection with destroy()", async () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
await expect(db.destroy()).resolves.not.toThrow();
|
||||
db = undefined;
|
||||
});
|
||||
|
||||
it("should return functional Kysely instance", () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
|
||||
// Check for Kysely methods
|
||||
expect(db.selectFrom).toBeInstanceOf(Function);
|
||||
expect(db.insertInto).toBeInstanceOf(Function);
|
||||
expect(db.updateTable).toBeInstanceOf(Function);
|
||||
expect(db.deleteFrom).toBeInstanceOf(Function);
|
||||
expect(db.schema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
293
packages/core/tests/database/migrations.test.ts
Normal file
293
packages/core/tests/database/migrations.test.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../src/database/connection.js";
|
||||
import {
|
||||
runMigrations,
|
||||
getMigrationStatus,
|
||||
MIGRATION_COUNT,
|
||||
} from "../../src/database/migrations/runner.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
|
||||
describe("Database Migrations", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Fresh in-memory database for each test
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
describe("getMigrationStatus", () => {
|
||||
it("should return all migrations as pending for fresh database", async () => {
|
||||
const status = await getMigrationStatus(db);
|
||||
|
||||
expect(status.applied).toEqual([]);
|
||||
expect(status.pending).toContain("001_initial");
|
||||
});
|
||||
|
||||
it("should create migrations tracking table when running migrations", async () => {
|
||||
// Note: getMigrationStatus doesn't create the table, runMigrations does
|
||||
await runMigrations(db);
|
||||
|
||||
// Verify table was created
|
||||
const tables = await db.introspection.getTables();
|
||||
const migrationTable = tables.find((t) => t.name === "_emdash_migrations");
|
||||
expect(migrationTable).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("runMigrations", () => {
|
||||
it("should run all pending migrations on fresh database", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
const status = await getMigrationStatus(db);
|
||||
expect(status.pending).toEqual([]);
|
||||
expect(status.applied).toContain("001_initial");
|
||||
});
|
||||
|
||||
it("should create all tables from initial migration", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
|
||||
// Core system tables (no generic "content" table - collections create ec_* tables)
|
||||
expect(tableNames).toContain("revisions");
|
||||
expect(tableNames).toContain("taxonomies");
|
||||
expect(tableNames).toContain("content_taxonomies");
|
||||
expect(tableNames).toContain("media");
|
||||
expect(tableNames).toContain("users");
|
||||
expect(tableNames).toContain("options");
|
||||
expect(tableNames).toContain("audit_logs");
|
||||
expect(tableNames).toContain("_emdash_migrations");
|
||||
// Schema registry tables
|
||||
expect(tableNames).toContain("_emdash_collections");
|
||||
expect(tableNames).toContain("_emdash_fields");
|
||||
});
|
||||
|
||||
it("should be idempotent - running twice should not error", async () => {
|
||||
await runMigrations(db);
|
||||
await expect(runMigrations(db)).resolves.not.toThrow();
|
||||
|
||||
const status = await getMigrationStatus(db);
|
||||
expect(status.applied).toHaveLength(MIGRATION_COUNT); // derived from MIGRATIONS map in runner.ts
|
||||
});
|
||||
|
||||
it("should record migration in tracking table", async () => {
|
||||
await runMigrations(db);
|
||||
|
||||
const records = await db.selectFrom("_emdash_migrations").selectAll().execute();
|
||||
|
||||
expect(records).toHaveLength(MIGRATION_COUNT);
|
||||
expect(records[0].name).toBe("001_initial");
|
||||
expect(records[0].timestamp).toBeDefined();
|
||||
expect(records[1].name).toBe("002_media_status");
|
||||
expect(records[1].timestamp).toBeDefined();
|
||||
expect(records[2].name).toBe("003_schema_registry");
|
||||
expect(records[2].timestamp).toBeDefined();
|
||||
expect(records[3].name).toBe("004_plugins");
|
||||
expect(records[3].timestamp).toBeDefined();
|
||||
expect(records[4].name).toBe("005_menus");
|
||||
expect(records[4].timestamp).toBeDefined();
|
||||
expect(records[5].name).toBe("006_taxonomy_defs");
|
||||
expect(records[5].timestamp).toBeDefined();
|
||||
expect(records[6].name).toBe("007_widgets");
|
||||
expect(records[6].timestamp).toBeDefined();
|
||||
expect(records[7].name).toBe("008_auth");
|
||||
expect(records[7].timestamp).toBeDefined();
|
||||
expect(records[8].name).toBe("009_user_disabled");
|
||||
expect(records[8].timestamp).toBeDefined();
|
||||
expect(records[9].name).toBe("011_sections");
|
||||
expect(records[9].timestamp).toBeDefined();
|
||||
expect(records[10].name).toBe("012_search");
|
||||
expect(records[10].timestamp).toBeDefined();
|
||||
expect(records[11].name).toBe("013_scheduled_publishing");
|
||||
expect(records[11].timestamp).toBeDefined();
|
||||
expect(records[12].name).toBe("014_draft_revisions");
|
||||
expect(records[12].timestamp).toBeDefined();
|
||||
expect(records[13].name).toBe("015_indexes");
|
||||
expect(records[13].timestamp).toBeDefined();
|
||||
expect(records[14].name).toBe("016_api_tokens");
|
||||
expect(records[14].timestamp).toBeDefined();
|
||||
expect(records[15].name).toBe("017_authorization_codes");
|
||||
expect(records[15].timestamp).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("schema registry tables", () => {
|
||||
beforeEach(async () => {
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
it("should have _emdash_collections table with correct columns", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const collectionsTable = tables.find((t) => t.name === "_emdash_collections");
|
||||
|
||||
expect(collectionsTable).toBeDefined();
|
||||
const columns = collectionsTable!.columns.map((c) => c.name);
|
||||
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("slug");
|
||||
expect(columns).toContain("label");
|
||||
expect(columns).toContain("label_singular");
|
||||
expect(columns).toContain("description");
|
||||
expect(columns).toContain("icon");
|
||||
expect(columns).toContain("supports");
|
||||
expect(columns).toContain("source");
|
||||
expect(columns).toContain("created_at");
|
||||
expect(columns).toContain("updated_at");
|
||||
});
|
||||
|
||||
it("should have _emdash_fields table with correct columns", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const fieldsTable = tables.find((t) => t.name === "_emdash_fields");
|
||||
|
||||
expect(fieldsTable).toBeDefined();
|
||||
const columns = fieldsTable!.columns.map((c) => c.name);
|
||||
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("collection_id");
|
||||
expect(columns).toContain("slug");
|
||||
expect(columns).toContain("label");
|
||||
expect(columns).toContain("type");
|
||||
expect(columns).toContain("column_type");
|
||||
expect(columns).toContain("required");
|
||||
expect(columns).toContain("unique");
|
||||
expect(columns).toContain("default_value");
|
||||
expect(columns).toContain("validation");
|
||||
expect(columns).toContain("widget");
|
||||
expect(columns).toContain("options");
|
||||
expect(columns).toContain("sort_order");
|
||||
expect(columns).toContain("created_at");
|
||||
});
|
||||
|
||||
it("should enforce unique constraint on collection slug", async () => {
|
||||
await db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: "1",
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
})
|
||||
.execute();
|
||||
|
||||
await expect(
|
||||
db
|
||||
.insertInto("_emdash_collections")
|
||||
.values({
|
||||
id: "2",
|
||||
slug: "posts",
|
||||
label: "Posts Again",
|
||||
})
|
||||
.execute(),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("users table schema", () => {
|
||||
beforeEach(async () => {
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
it("should enforce unique constraint on email", async () => {
|
||||
await db
|
||||
.insertInto("users")
|
||||
.values({
|
||||
id: "1",
|
||||
email: "test@example.com",
|
||||
role: 50, // ADMIN
|
||||
})
|
||||
.execute();
|
||||
|
||||
await expect(
|
||||
db
|
||||
.insertInto("users")
|
||||
.values({
|
||||
id: "2",
|
||||
email: "test@example.com",
|
||||
role: 40, // EDITOR
|
||||
})
|
||||
.execute(),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should have auth-related tables", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
|
||||
expect(tableNames).toContain("credentials");
|
||||
expect(tableNames).toContain("auth_tokens");
|
||||
expect(tableNames).toContain("oauth_accounts");
|
||||
expect(tableNames).toContain("allowed_domains");
|
||||
});
|
||||
});
|
||||
|
||||
describe("revisions table", () => {
|
||||
beforeEach(async () => {
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
it("should have correct columns for per-collection architecture", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const revisionsTable = tables.find((t) => t.name === "revisions");
|
||||
|
||||
expect(revisionsTable).toBeDefined();
|
||||
const columns = revisionsTable!.columns.map((c) => c.name);
|
||||
|
||||
// Revisions now reference collection + entry_id instead of content_id
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("collection");
|
||||
expect(columns).toContain("entry_id");
|
||||
expect(columns).toContain("data");
|
||||
expect(columns).toContain("created_at");
|
||||
});
|
||||
|
||||
it("should store revision data", async () => {
|
||||
await db
|
||||
.insertInto("revisions")
|
||||
.values({
|
||||
id: "rev-1",
|
||||
collection: "posts",
|
||||
entry_id: "entry-1",
|
||||
data: JSON.stringify({ title: "Original Title" }),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const revisions = await db
|
||||
.selectFrom("revisions")
|
||||
.where("collection", "=", "posts")
|
||||
.where("entry_id", "=", "entry-1")
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
expect(revisions).toHaveLength(1);
|
||||
expect(JSON.parse(revisions[0].data)).toEqual({
|
||||
title: "Original Title",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("media table", () => {
|
||||
beforeEach(async () => {
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
it("should have correct columns", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const mediaTable = tables.find((t) => t.name === "media");
|
||||
|
||||
expect(mediaTable).toBeDefined();
|
||||
const columns = mediaTable!.columns.map((c) => c.name);
|
||||
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("filename");
|
||||
expect(columns).toContain("mime_type");
|
||||
expect(columns).toContain("size");
|
||||
expect(columns).toContain("storage_key");
|
||||
});
|
||||
});
|
||||
});
|
||||
834
packages/core/tests/database/repositories/content.test.ts
Normal file
834
packages/core/tests/database/repositories/content.test.ts
Normal file
@@ -0,0 +1,834 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import { ContentRepository } from "../../../src/database/repositories/content.js";
|
||||
import { EmDashValidationError } from "../../../src/database/repositories/types.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { SchemaRegistry } from "../../../src/schema/registry.js";
|
||||
|
||||
describe("ContentRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let repo: ContentRepository;
|
||||
let registry: SchemaRegistry;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Fresh in-memory database for each test
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
await runMigrations(db);
|
||||
repo = new ContentRepository(db);
|
||||
registry = new SchemaRegistry(db);
|
||||
|
||||
// Create collections needed for tests (this creates ec_post and ec_page tables)
|
||||
await registry.createCollection({
|
||||
slug: "post",
|
||||
label: "Posts",
|
||||
labelSingular: "Post",
|
||||
});
|
||||
await registry.createCollection({
|
||||
slug: "page",
|
||||
label: "Pages",
|
||||
labelSingular: "Page",
|
||||
});
|
||||
|
||||
// Add fields to both collections
|
||||
await registry.createField("post", {
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
});
|
||||
await registry.createField("post", {
|
||||
slug: "content",
|
||||
label: "Content",
|
||||
type: "portableText",
|
||||
});
|
||||
await registry.createField("page", {
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create content with minimal data", async () => {
|
||||
const content = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test Post" },
|
||||
});
|
||||
|
||||
expect(content.id).toBeDefined();
|
||||
expect(content.type).toBe("post");
|
||||
expect(content.data).toEqual({ title: "Test Post" });
|
||||
expect(content.status).toBe("draft");
|
||||
expect(content.createdAt).toBeDefined();
|
||||
expect(content.updatedAt).toBeDefined();
|
||||
});
|
||||
|
||||
it("should create content with all fields", async () => {
|
||||
const content = await repo.create({
|
||||
type: "post",
|
||||
slug: "test-post",
|
||||
data: { title: "Test Post", content: "Body" },
|
||||
status: "published",
|
||||
authorId: "author-1",
|
||||
});
|
||||
|
||||
expect(content.id).toBeDefined();
|
||||
expect(content.type).toBe("post");
|
||||
expect(content.slug).toBe("test-post");
|
||||
expect(content.data).toEqual({ title: "Test Post", content: "Body" });
|
||||
expect(content.status).toBe("published");
|
||||
expect(content.authorId).toBe("author-1");
|
||||
});
|
||||
|
||||
it("should throw validation error when type is missing", async () => {
|
||||
await expect(
|
||||
repo.create({
|
||||
type: "",
|
||||
data: { title: "Test" },
|
||||
}),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("should throw error for duplicate type+slug", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "duplicate-slug",
|
||||
data: { title: "First" },
|
||||
});
|
||||
|
||||
await expect(
|
||||
repo.create({
|
||||
type: "post",
|
||||
slug: "duplicate-slug",
|
||||
data: { title: "Second" },
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should allow same slug for different types", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "same-slug",
|
||||
data: { title: "Post" },
|
||||
});
|
||||
|
||||
await expect(
|
||||
repo.create({
|
||||
type: "page",
|
||||
slug: "same-slug",
|
||||
data: { title: "Page" },
|
||||
}),
|
||||
).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it("should allow null slug", async () => {
|
||||
const content = await repo.create({
|
||||
type: "post",
|
||||
slug: null,
|
||||
data: { title: "No slug" },
|
||||
});
|
||||
|
||||
expect(content.slug).toBeNull();
|
||||
});
|
||||
|
||||
it("should default status to draft", async () => {
|
||||
const content = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
expect(content.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("should generate unique ID", async () => {
|
||||
const content1 = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "First" },
|
||||
});
|
||||
|
||||
const content2 = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Second" },
|
||||
});
|
||||
|
||||
expect(content1.id).not.toBe(content2.id);
|
||||
});
|
||||
|
||||
it("should store complex nested data in JSON columns", async () => {
|
||||
// Portable Text content is stored as JSON
|
||||
const portableTextContent = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
children: [{ _type: "span", text: "Hello world" }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "h1",
|
||||
children: [{ _type: "span", text: "Heading", marks: ["bold"] }],
|
||||
},
|
||||
];
|
||||
|
||||
const content = await repo.create({
|
||||
type: "post",
|
||||
data: {
|
||||
title: "Complex Post",
|
||||
content: portableTextContent,
|
||||
},
|
||||
});
|
||||
|
||||
expect(content.data.title).toBe("Complex Post");
|
||||
expect(content.data.content).toEqual(portableTextContent);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findById", () => {
|
||||
it("should find content by ID", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
|
||||
expect(found).not.toBeNull();
|
||||
expect(found!.id).toBe(created.id);
|
||||
expect(found!.data).toEqual(created.data);
|
||||
});
|
||||
|
||||
it("should return null for non-existent ID", async () => {
|
||||
const found = await repo.findById("post", "non-existent-id");
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when type doesn't match", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const found = await repo.findById("page", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should not find soft-deleted content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findBySlug", () => {
|
||||
it("should find content by slug", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "test-slug",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const found = await repo.findBySlug("post", "test-slug");
|
||||
|
||||
expect(found).not.toBeNull();
|
||||
expect(found!.slug).toBe("test-slug");
|
||||
});
|
||||
|
||||
it("should return null for non-existent slug", async () => {
|
||||
const found = await repo.findBySlug("post", "non-existent");
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should return correct content when same slug exists for different types", async () => {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: "shared-slug",
|
||||
data: { title: "Post" },
|
||||
});
|
||||
|
||||
await repo.create({
|
||||
type: "page",
|
||||
slug: "shared-slug",
|
||||
data: { title: "Page" },
|
||||
});
|
||||
|
||||
const post = await repo.findBySlug("post", "shared-slug");
|
||||
const page = await repo.findBySlug("page", "shared-slug");
|
||||
|
||||
expect(post!.type).toBe("post");
|
||||
expect(post!.data.title).toBe("Post");
|
||||
expect(page!.type).toBe("page");
|
||||
expect(page!.data.title).toBe("Page");
|
||||
});
|
||||
|
||||
it("should not find soft-deleted content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "test-slug",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const found = await repo.findBySlug("post", "test-slug");
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findMany", () => {
|
||||
beforeEach(async () => {
|
||||
// Create test data
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
slug: `post-${i}`,
|
||||
data: { title: `Post ${i}` },
|
||||
status: i % 2 === 0 ? "published" : "draft",
|
||||
authorId: i < 3 ? "author-1" : "author-2",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should return all content by default", async () => {
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items).toHaveLength(5);
|
||||
});
|
||||
|
||||
it("should filter by status", async () => {
|
||||
const result = await repo.findMany("post", {
|
||||
where: { status: "published" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(3);
|
||||
expect(result.items.every((item) => item.status === "published")).toBe(true);
|
||||
});
|
||||
|
||||
it("should filter by authorId", async () => {
|
||||
const result = await repo.findMany("post", {
|
||||
where: { authorId: "author-1" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(3);
|
||||
expect(result.items.every((item) => item.authorId === "author-1")).toBe(true);
|
||||
});
|
||||
|
||||
it("should filter by both status and authorId", async () => {
|
||||
const result = await repo.findMany("post", {
|
||||
where: {
|
||||
status: "published",
|
||||
authorId: "author-1",
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should apply limit", async () => {
|
||||
const result = await repo.findMany("post", { limit: 2 });
|
||||
|
||||
expect(result.items).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should support cursor pagination", async () => {
|
||||
const page1 = await repo.findMany("post", { limit: 2 });
|
||||
expect(page1.items).toHaveLength(2);
|
||||
expect(page1.nextCursor).toBeDefined();
|
||||
|
||||
const page2 = await repo.findMany("post", {
|
||||
limit: 2,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
expect(page2.items).toHaveLength(2);
|
||||
|
||||
// Items should be different
|
||||
const page1Ids = page1.items.map((i) => i.id);
|
||||
const page2Ids = page2.items.map((i) => i.id);
|
||||
expect(page1Ids).not.toEqual(page2Ids);
|
||||
});
|
||||
|
||||
it("should not include nextCursor when no more items", async () => {
|
||||
const result = await repo.findMany("post", { limit: 10 });
|
||||
|
||||
expect(result.items).toHaveLength(5);
|
||||
expect(result.nextCursor).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should order by createdAt desc by default", async () => {
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
// Items should be in descending order (newest first)
|
||||
for (let i = 1; i < result.items.length; i++) {
|
||||
expect(result.items[i - 1].createdAt >= result.items[i].createdAt).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("should support custom ordering", async () => {
|
||||
const result = await repo.findMany("post", {
|
||||
orderBy: {
|
||||
field: "createdAt",
|
||||
direction: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
// Items should be in ascending order (oldest first)
|
||||
for (let i = 1; i < result.items.length; i++) {
|
||||
expect(result.items[i - 1].createdAt <= result.items[i].createdAt).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("should default limit to 50", async () => {
|
||||
// Create more than 50 items
|
||||
for (let i = 0; i < 60; i++) {
|
||||
await repo.create({
|
||||
type: "page",
|
||||
data: { title: `Page ${i}` },
|
||||
});
|
||||
}
|
||||
|
||||
const result = await repo.findMany("page");
|
||||
|
||||
expect(result.items.length).toBeLessThanOrEqual(50);
|
||||
});
|
||||
|
||||
it("should cap limit at 100", async () => {
|
||||
const result = await repo.findMany("post", { limit: 200 });
|
||||
|
||||
// Even with limit: 200, should not return more than 100
|
||||
expect(result.items.length).toBeLessThanOrEqual(100);
|
||||
});
|
||||
|
||||
it("should not include soft-deleted content", async () => {
|
||||
const toDelete = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "To Delete" },
|
||||
});
|
||||
|
||||
await repo.delete("post", toDelete.id);
|
||||
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items.every((item) => item.id !== toDelete.id)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return empty array when no items match", async () => {
|
||||
const result = await repo.findMany("page");
|
||||
|
||||
expect(result.items).toEqual([]);
|
||||
expect(result.nextCursor).toBeUndefined();
|
||||
});
|
||||
|
||||
describe("orderBy", () => {
|
||||
// Regression guard for "table headers aren't sort controls": the
|
||||
// admin now sends orderBy={field,direction} — the repo must accept
|
||||
// the columns the UI wants to expose, not just dates.
|
||||
it("accepts status as an order field", async () => {
|
||||
const result = await repo.findMany("post", {
|
||||
orderBy: { field: "status", direction: "asc" },
|
||||
});
|
||||
|
||||
// alphabetical asc places 'draft' before 'published'
|
||||
expect(result.items[0]!.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("accepts locale as an order field", async () => {
|
||||
await repo.findMany("post", {
|
||||
orderBy: { field: "locale", direction: "desc" },
|
||||
});
|
||||
// no throw = pass
|
||||
});
|
||||
|
||||
it("rejects unknown fields to block column enumeration", async () => {
|
||||
await expect(
|
||||
repo.findMany("post", {
|
||||
orderBy: { field: "password", direction: "asc" },
|
||||
}),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("update", () => {
|
||||
it("should update content data", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Original" },
|
||||
});
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated" },
|
||||
});
|
||||
|
||||
expect(updated.data.title).toBe("Updated");
|
||||
expect(updated.id).toBe(created.id);
|
||||
});
|
||||
|
||||
it("should update status", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
status: "published",
|
||||
});
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
});
|
||||
|
||||
it("should update slug", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "old-slug",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
slug: "new-slug",
|
||||
});
|
||||
|
||||
expect(updated.slug).toBe("new-slug");
|
||||
});
|
||||
|
||||
it("should update publishedAt", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const publishedAt = new Date().toISOString();
|
||||
const updated = await repo.update("post", created.id, {
|
||||
publishedAt,
|
||||
});
|
||||
|
||||
expect(updated.publishedAt).toBe(publishedAt);
|
||||
});
|
||||
|
||||
it("should support partial updates", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "test-slug",
|
||||
data: { title: "Test", content: "Original content" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
status: "published",
|
||||
});
|
||||
|
||||
// Only status should change
|
||||
expect(updated.status).toBe("published");
|
||||
expect(updated.slug).toBe("test-slug");
|
||||
expect(updated.data).toEqual({
|
||||
title: "Test",
|
||||
content: "Original content",
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw error for non-existent content", async () => {
|
||||
await expect(repo.update("post", "non-existent", { status: "published" })).rejects.toThrow(
|
||||
"Content not found",
|
||||
);
|
||||
});
|
||||
|
||||
it("should persist removal of array items in JSON fields (multiSelect)", async () => {
|
||||
// Add a multiSelect (JSON) field to the post collection
|
||||
await registry.createField("post", {
|
||||
slug: "tags",
|
||||
label: "Tags",
|
||||
type: "multiSelect",
|
||||
});
|
||||
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test", tags: ["news", "sports", "tech"] },
|
||||
});
|
||||
|
||||
expect(created.data.tags).toEqual(["news", "sports", "tech"]);
|
||||
|
||||
// Remove "sports" from the array (simulates unchecking a checkbox)
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Test", tags: ["news", "tech"] },
|
||||
});
|
||||
|
||||
expect(updated.data.tags).toEqual(["news", "tech"]);
|
||||
|
||||
// Verify it persists when re-reading
|
||||
const fetched = await repo.findById("post", updated.id);
|
||||
expect(fetched!.data.tags).toEqual(["news", "tech"]);
|
||||
});
|
||||
|
||||
it("should persist empty array in JSON fields (multiSelect)", async () => {
|
||||
await registry.createField("post", {
|
||||
slug: "categories",
|
||||
label: "Categories",
|
||||
type: "multiSelect",
|
||||
});
|
||||
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test", categories: ["news"] },
|
||||
});
|
||||
|
||||
// Uncheck all items
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Test", categories: [] },
|
||||
});
|
||||
|
||||
expect(updated.data.categories).toEqual([]);
|
||||
|
||||
const fetched = await repo.findById("post", updated.id);
|
||||
expect(fetched!.data.categories).toEqual([]);
|
||||
});
|
||||
|
||||
it("should not update soft-deleted content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
await expect(repo.update("post", created.id, { status: "published" })).rejects.toThrow(
|
||||
"Content not found",
|
||||
);
|
||||
});
|
||||
|
||||
it("should update updatedAt timestamp", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
// Small delay to ensure timestamp difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated" },
|
||||
});
|
||||
|
||||
expect(updated.updatedAt > created.updatedAt).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete", () => {
|
||||
it("should soft delete content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify content is not found
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should return true for successful deletion", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-existent content", async () => {
|
||||
const result = await repo.delete("post", "non-existent");
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false for already deleted content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should set deleted_at timestamp", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "Test" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
// Directly query database to check deleted_at
|
||||
// Use raw SQL since ec_post is a dynamic table
|
||||
const { sql } = await import("kysely");
|
||||
const result = await sql<{ deleted_at: string | null }>`
|
||||
SELECT deleted_at FROM ec_post WHERE id = ${created.id}
|
||||
`.execute(db);
|
||||
|
||||
expect(result.rows[0]?.deleted_at).toBeDefined();
|
||||
expect(result.rows[0]?.deleted_at).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("count", () => {
|
||||
beforeEach(async () => {
|
||||
// Create test data
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await repo.create({
|
||||
type: "post",
|
||||
data: { title: `Post ${i}` },
|
||||
status: i % 2 === 0 ? "published" : "draft",
|
||||
authorId: i < 5 ? "author-1" : "author-2",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should count all content of a type", async () => {
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(10);
|
||||
});
|
||||
|
||||
it("should count by status", async () => {
|
||||
const count = await repo.count("post", { status: "published" });
|
||||
|
||||
expect(count).toBe(5);
|
||||
});
|
||||
|
||||
it("should count by authorId", async () => {
|
||||
const count = await repo.count("post", { authorId: "author-1" });
|
||||
|
||||
expect(count).toBe(5);
|
||||
});
|
||||
|
||||
it("should count by both status and authorId", async () => {
|
||||
const count = await repo.count("post", {
|
||||
status: "published",
|
||||
authorId: "author-1",
|
||||
});
|
||||
|
||||
// Posts 0, 2, 4 are published by author-1
|
||||
expect(count).toBe(3);
|
||||
});
|
||||
|
||||
it("should return 0 when no items match", async () => {
|
||||
const count = await repo.count("page");
|
||||
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
it("should not count soft-deleted content", async () => {
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: { title: "To Delete" },
|
||||
});
|
||||
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(10); // Not 11
|
||||
});
|
||||
});
|
||||
|
||||
describe("integration scenarios", () => {
|
||||
it("should handle full CRUD lifecycle", async () => {
|
||||
// Create
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
slug: "test-post",
|
||||
data: { title: "Test Post", content: "Original content" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
expect(created.id).toBeDefined();
|
||||
expect(created.status).toBe("draft");
|
||||
|
||||
// Read
|
||||
const found = await repo.findBySlug("post", "test-post");
|
||||
expect(found!.id).toBe(created.id);
|
||||
|
||||
// Update
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated Post", content: "New content" },
|
||||
status: "published",
|
||||
});
|
||||
|
||||
expect(updated.data.title).toBe("Updated Post");
|
||||
expect(updated.status).toBe("published");
|
||||
|
||||
// Delete
|
||||
const deleted = await repo.delete("post", created.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify not found
|
||||
const notFound = await repo.findById("post", created.id);
|
||||
expect(notFound).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle concurrent operations", async () => {
|
||||
// Create multiple items concurrently
|
||||
const promises = Array.from({ length: 10 }, (_, i) =>
|
||||
repo.create({
|
||||
type: "post",
|
||||
data: { title: `Post ${i}` },
|
||||
}),
|
||||
);
|
||||
|
||||
const created = await Promise.all(promises);
|
||||
|
||||
expect(created).toHaveLength(10);
|
||||
expect(new Set(created.map((c) => c.id)).size).toBe(10); // All unique IDs
|
||||
});
|
||||
|
||||
it("should persist complex nested data structures", async () => {
|
||||
// Use the content field (portableText type) for complex nested data
|
||||
const complexContent = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "h1",
|
||||
children: [{ _type: "span", text: "Title" }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
children: [
|
||||
{ _type: "span", text: "Bold", marks: ["bold"] },
|
||||
{ _type: "span", text: " and " },
|
||||
{ _type: "span", text: "italic", marks: ["italic"] },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const created = await repo.create({
|
||||
type: "post",
|
||||
data: {
|
||||
title: "Complex Post",
|
||||
content: complexContent,
|
||||
},
|
||||
});
|
||||
|
||||
const retrieved = await repo.findById("post", created.id);
|
||||
|
||||
expect(retrieved!.data.title).toBe("Complex Post");
|
||||
expect(retrieved!.data.content).toEqual(complexContent);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user