Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
@@ -0,0 +1,74 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { sql } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../../src/database/connection.js";
|
||||
import { down, up } from "../../../../src/database/migrations/031_bylines.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
|
||||
describe("031_bylines migration", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
|
||||
await db.schema
|
||||
.createTable("users")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("media")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("ec_posts")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("adds byline tables and primary_byline_id to existing content tables", async () => {
|
||||
await up(db);
|
||||
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
expect(tableNames).toContain("_emdash_bylines");
|
||||
expect(tableNames).toContain("_emdash_content_bylines");
|
||||
|
||||
const contentTable = tables.find((t) => t.name === "ec_posts");
|
||||
expect(contentTable).toBeDefined();
|
||||
expect(contentTable?.columns.map((c) => c.name)).toContain("primary_byline_id");
|
||||
|
||||
const idx = await sql<{ name: string }>`
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index' AND name = 'idx_ec_posts_primary_byline'
|
||||
`.execute(db);
|
||||
expect(idx.rows).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("reverts added tables, indexes, and columns", async () => {
|
||||
await up(db);
|
||||
await down(db);
|
||||
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
expect(tableNames).not.toContain("_emdash_bylines");
|
||||
expect(tableNames).not.toContain("_emdash_content_bylines");
|
||||
|
||||
const contentTable = tables.find((t) => t.name === "ec_posts");
|
||||
expect(contentTable).toBeDefined();
|
||||
expect(contentTable?.columns.map((c) => c.name)).not.toContain("primary_byline_id");
|
||||
|
||||
const idx = await sql<{ name: string }>`
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index' AND name = 'idx_ec_posts_primary_byline'
|
||||
`.execute(db);
|
||||
expect(idx.rows).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
256
packages/core/tests/unit/database/repositories/byline.test.ts
Normal file
256
packages/core/tests/unit/database/repositories/byline.test.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { BylineRepository } from "../../../../src/database/repositories/byline.js";
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { SQL_BATCH_SIZE } from "../../../../src/utils/chunks.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
describe("BylineRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let bylineRepo: BylineRepository;
|
||||
let contentRepo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
bylineRepo = new BylineRepository(db);
|
||||
contentRepo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("creates and reads bylines", async () => {
|
||||
const created = await bylineRepo.create({
|
||||
slug: "jane-doe",
|
||||
displayName: "Jane Doe",
|
||||
isGuest: true,
|
||||
});
|
||||
|
||||
expect(created.slug).toBe("jane-doe");
|
||||
expect(created.displayName).toBe("Jane Doe");
|
||||
expect(created.isGuest).toBe(true);
|
||||
|
||||
const foundById = await bylineRepo.findById(created.id);
|
||||
expect(foundById?.id).toBe(created.id);
|
||||
|
||||
const foundBySlug = await bylineRepo.findBySlug("jane-doe");
|
||||
expect(foundBySlug?.id).toBe(created.id);
|
||||
|
||||
const foundByUser = await bylineRepo.findByUserId("missing-user");
|
||||
expect(foundByUser).toBeNull();
|
||||
});
|
||||
|
||||
it("supports updates and paginated listing", async () => {
|
||||
const alpha = await bylineRepo.create({
|
||||
slug: "alpha",
|
||||
displayName: "Alpha Writer",
|
||||
isGuest: true,
|
||||
});
|
||||
await bylineRepo.create({
|
||||
slug: "beta",
|
||||
displayName: "Beta Writer",
|
||||
isGuest: false,
|
||||
});
|
||||
|
||||
const updated = await bylineRepo.update(alpha.id, {
|
||||
displayName: "Alpha Updated",
|
||||
websiteUrl: "https://example.com",
|
||||
});
|
||||
expect(updated?.displayName).toBe("Alpha Updated");
|
||||
expect(updated?.websiteUrl).toBe("https://example.com");
|
||||
|
||||
const searchResult = await bylineRepo.findMany({ search: "Beta" });
|
||||
expect(searchResult.items).toHaveLength(1);
|
||||
expect(searchResult.items[0]?.slug).toBe("beta");
|
||||
|
||||
const page1 = await bylineRepo.findMany({ limit: 1 });
|
||||
expect(page1.items).toHaveLength(1);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
const page2 = await bylineRepo.findMany({ limit: 1, cursor: page1.nextCursor });
|
||||
expect(page2.items).toHaveLength(1);
|
||||
expect(page2.items[0]?.id).not.toBe(page1.items[0]?.id);
|
||||
});
|
||||
|
||||
it("assigns ordered bylines to content and syncs primary_byline_id", async () => {
|
||||
const lead = await bylineRepo.create({
|
||||
slug: "lead",
|
||||
displayName: "Lead Author",
|
||||
});
|
||||
const second = await bylineRepo.create({
|
||||
slug: "second",
|
||||
displayName: "Second Author",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "bylined-post",
|
||||
data: { title: "Bylined Post" },
|
||||
});
|
||||
|
||||
const assigned = await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: lead.id },
|
||||
{ bylineId: second.id, roleLabel: "Editor" },
|
||||
]);
|
||||
|
||||
expect(assigned).toHaveLength(2);
|
||||
expect(assigned[0]?.byline.id).toBe(lead.id);
|
||||
expect(assigned[0]?.sortOrder).toBe(0);
|
||||
expect(assigned[1]?.byline.id).toBe(second.id);
|
||||
expect(assigned[1]?.roleLabel).toBe("Editor");
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBe(lead.id);
|
||||
});
|
||||
|
||||
it("reorders bylines and updates primary_byline_id", async () => {
|
||||
const first = await bylineRepo.create({
|
||||
slug: "first",
|
||||
displayName: "First",
|
||||
});
|
||||
const second = await bylineRepo.create({
|
||||
slug: "second-reorder",
|
||||
displayName: "Second",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "reordered-post",
|
||||
data: { title: "Reordered" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: first.id },
|
||||
{ bylineId: second.id },
|
||||
]);
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: second.id },
|
||||
{ bylineId: first.id },
|
||||
]);
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBe(second.id);
|
||||
|
||||
const bylines = await bylineRepo.getContentBylines("post", content.id);
|
||||
expect(bylines[0]?.byline.id).toBe(second.id);
|
||||
expect(bylines[1]?.byline.id).toBe(first.id);
|
||||
});
|
||||
|
||||
it("getContentBylinesMany handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "batch-author",
|
||||
displayName: "Batch Author",
|
||||
});
|
||||
|
||||
// Create a few real content entries with bylines
|
||||
const realIds: string[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: `batch-post-${i}`,
|
||||
data: { title: `Batch Post ${i}` },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
realIds.push(content.id);
|
||||
}
|
||||
|
||||
// Build an ID list larger than SQL_BATCH_SIZE with the real IDs spread across chunks
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
// Place real IDs so they span different chunks
|
||||
ids[0] = realIds[0]!;
|
||||
ids[SQL_BATCH_SIZE - 1] = realIds[1]!;
|
||||
ids[SQL_BATCH_SIZE + 5] = realIds[2]!;
|
||||
|
||||
const result = await bylineRepo.getContentBylinesMany("post", ids);
|
||||
|
||||
// All 3 real entries should have their byline resolved
|
||||
expect(result.get(realIds[0]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[1]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[2]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[0]!)![0]!.byline.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("getContentBylinesMany does not duplicate credits for repeated content IDs", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "duplicate-batch-author",
|
||||
displayName: "Duplicate Batch Author",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "duplicate-batch-post",
|
||||
data: { title: "Duplicate Batch Post" },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = content.id;
|
||||
ids[SQL_BATCH_SIZE + 5] = content.id;
|
||||
|
||||
const result = await bylineRepo.getContentBylinesMany("post", ids);
|
||||
|
||||
expect(result.get(content.id)).toHaveLength(1);
|
||||
expect(result.get(content.id)?.[0]?.byline.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("findByUserIds handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
// Create a real user so the FK constraint is satisfied
|
||||
const userId = "user-batch-test";
|
||||
await db
|
||||
.insertInto("users" as any)
|
||||
.values({ id: userId, email: "batch@test.com", name: "Batch", role: 50 })
|
||||
.execute();
|
||||
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "user-batch",
|
||||
displayName: "User Batch",
|
||||
userId,
|
||||
});
|
||||
|
||||
// Build a user ID list larger than SQL_BATCH_SIZE
|
||||
const userIds: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
userIds.push(`user-fake-${i}`);
|
||||
}
|
||||
userIds[SQL_BATCH_SIZE + 5] = userId;
|
||||
|
||||
const result = await bylineRepo.findByUserIds(userIds);
|
||||
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(userId)?.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("deletes byline, removes links, and nulls primary_byline_id", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "delete-me",
|
||||
displayName: "Delete Me",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "delete-byline-post",
|
||||
data: { title: "Delete Byline" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
|
||||
const deleted = await bylineRepo.delete(byline.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const unresolved = await bylineRepo.getContentBylines("post", content.id);
|
||||
expect(unresolved).toHaveLength(0);
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBeNull();
|
||||
});
|
||||
});
|
||||
611
packages/core/tests/unit/database/repositories/content.test.ts
Normal file
611
packages/core/tests/unit/database/repositories/content.test.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import { RevisionRepository } from "../../../../src/database/repositories/revision.js";
|
||||
import { EmDashValidationError } from "../../../../src/database/repositories/types.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { createPostFixture, createPageFixture } from "../../../utils/fixtures.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for ID validation
|
||||
const ULID_FORMAT_REGEX = /^[0-9A-Z]+$/i;
|
||||
|
||||
describe("ContentRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let repo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
repo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("create()", () => {
|
||||
it("should create content with valid data", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBeTruthy();
|
||||
expect(result.type).toBe("post");
|
||||
expect(result.slug).toBe("hello-world");
|
||||
expect(result.status).toBe("draft");
|
||||
expect(result.data).toEqual(input.data);
|
||||
});
|
||||
|
||||
it("should generate ULID for ID", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
// ULID is 26 characters long
|
||||
expect(result.id).toHaveLength(26);
|
||||
// ULID starts with timestamp (base32) - should be alphanumeric
|
||||
expect(result.id).toMatch(ULID_FORMAT_REGEX);
|
||||
});
|
||||
|
||||
it("should set default status to draft", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).status;
|
||||
|
||||
const result = await repo.create(input);
|
||||
expect(result.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("should throw validation error when type is missing", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).type;
|
||||
|
||||
await expect(repo.create(input)).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("should allow creating content without slug", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).slug;
|
||||
|
||||
const result = await repo.create(input);
|
||||
expect(result.slug).toBeNull();
|
||||
});
|
||||
|
||||
it("should set createdAt and updatedAt timestamps", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
expect(result.createdAt).toBeTruthy();
|
||||
expect(result.updatedAt).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should persist primaryBylineId on create", async () => {
|
||||
const result = await repo.create(
|
||||
createPostFixture({
|
||||
slug: "with-primary-byline",
|
||||
primaryBylineId: "byline_1",
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.primaryBylineId).toBe("byline_1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("findById()", () => {
|
||||
it("should return content by ID", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
|
||||
expect(found).toBeDefined();
|
||||
expect(found?.id).toBe(created.id);
|
||||
expect(found?.data).toEqual(created.data);
|
||||
});
|
||||
|
||||
it("should return null for non-existent ID", async () => {
|
||||
const found = await repo.findById("post", "01J9FAKE0000000000000000");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should not return content of wrong type", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findById("page", created.id);
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findBySlug()", () => {
|
||||
it("should return content by slug", async () => {
|
||||
const input = createPostFixture({ slug: "test-slug" });
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findBySlug("post", "test-slug");
|
||||
|
||||
expect(found).toBeDefined();
|
||||
expect(found?.id).toBe(created.id);
|
||||
expect(found?.slug).toBe("test-slug");
|
||||
});
|
||||
|
||||
it("should return null for non-existent slug", async () => {
|
||||
const found = await repo.findBySlug("post", "non-existent");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should not return content of wrong type", async () => {
|
||||
const input = createPostFixture({ slug: "test-slug" });
|
||||
await repo.create(input);
|
||||
|
||||
const found = await repo.findBySlug("page", "test-slug");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findMany()", () => {
|
||||
it("should return all content of specified type", async () => {
|
||||
await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items).toHaveLength(2);
|
||||
expect(result.items.every((item) => item.type === "post")).toBe(true);
|
||||
});
|
||||
|
||||
it("should filter by status", async () => {
|
||||
await repo.create(createPostFixture({ slug: "draft", status: "draft" }));
|
||||
await repo.create(createPostFixture({ slug: "published", status: "published" }));
|
||||
|
||||
const result = await repo.findMany("post", {
|
||||
where: { status: "published" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].status).toBe("published");
|
||||
});
|
||||
|
||||
it("should filter by authorId", async () => {
|
||||
await repo.create(createPostFixture({ slug: "author1", authorId: "user1" }));
|
||||
await repo.create(createPostFixture({ slug: "author2", authorId: "user2" }));
|
||||
|
||||
const result = await repo.findMany("post", {
|
||||
where: { authorId: "user1" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].authorId).toBe("user1");
|
||||
});
|
||||
|
||||
it("should support cursor pagination", async () => {
|
||||
// Create multiple posts
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await repo.create(createPostFixture({ slug: `post-${i}` }));
|
||||
}
|
||||
|
||||
// First page
|
||||
const page1 = await repo.findMany("post", { limit: 2 });
|
||||
expect(page1.items).toHaveLength(2);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Second page
|
||||
const page2 = await repo.findMany("post", {
|
||||
limit: 2,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
expect(page2.items).toHaveLength(2);
|
||||
expect(page2.nextCursor).toBeTruthy();
|
||||
|
||||
// Verify no overlap
|
||||
const page1Ids = page1.items.map((i) => i.id);
|
||||
const page2Ids = page2.items.map((i) => i.id);
|
||||
expect(page1Ids).not.toContain(page2Ids[0]);
|
||||
});
|
||||
|
||||
it("should support ordering", async () => {
|
||||
// Create posts with specific dates
|
||||
const post1 = await repo.create(createPostFixture({ slug: "old-post" }));
|
||||
// Wait a bit to ensure different timestamps
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
const post2 = await repo.create(createPostFixture({ slug: "new-post" }));
|
||||
|
||||
// Default order (desc by createdAt)
|
||||
const resultDesc = await repo.findMany("post", {
|
||||
orderBy: { field: "createdAt", direction: "desc" },
|
||||
});
|
||||
expect(resultDesc.items[0].id).toBe(post2.id);
|
||||
|
||||
// Ascending order
|
||||
const resultAsc = await repo.findMany("post", {
|
||||
orderBy: { field: "createdAt", direction: "asc" },
|
||||
});
|
||||
expect(resultAsc.items[0].id).toBe(post1.id);
|
||||
});
|
||||
|
||||
it("should respect limit", async () => {
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
await repo.create(createPostFixture({ slug: `post-${i}` }));
|
||||
}
|
||||
|
||||
const result = await repo.findMany("post", { limit: 5 });
|
||||
|
||||
expect(result.items).toHaveLength(5);
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.delete("post", post1.id);
|
||||
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].slug).toBe("post-2");
|
||||
});
|
||||
});
|
||||
|
||||
describe("update()", () => {
|
||||
it("should update content data", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated Title", content: [] },
|
||||
});
|
||||
|
||||
expect(updated.data).toEqual({ title: "Updated Title", content: [] });
|
||||
});
|
||||
|
||||
it("should update status", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
status: "published",
|
||||
});
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
});
|
||||
|
||||
it("should update slug", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
slug: "new-slug",
|
||||
});
|
||||
|
||||
expect(updated.slug).toBe("new-slug");
|
||||
});
|
||||
|
||||
it("should update publishedAt timestamp", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const publishedAt = new Date().toISOString();
|
||||
const updated = await repo.update("post", created.id, {
|
||||
publishedAt,
|
||||
});
|
||||
|
||||
expect(updated.publishedAt).toBe(publishedAt);
|
||||
});
|
||||
|
||||
it("should update updatedAt timestamp automatically", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
// Wait a bit to ensure different timestamp
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated" },
|
||||
});
|
||||
|
||||
expect(updated.updatedAt).not.toBe(created.updatedAt);
|
||||
});
|
||||
|
||||
it("should throw error for non-existent content", async () => {
|
||||
await expect(repo.update("post", "01J9FAKE0000000000000000", { data: {} })).rejects.toThrow(
|
||||
"Content not found",
|
||||
);
|
||||
});
|
||||
|
||||
it("should update primaryBylineId", async () => {
|
||||
const created = await repo.create(
|
||||
createPostFixture({
|
||||
slug: "update-primary-byline",
|
||||
primaryBylineId: "byline_old",
|
||||
}),
|
||||
);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
primaryBylineId: "byline_new",
|
||||
});
|
||||
|
||||
expect(updated.primaryBylineId).toBe("byline_new");
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete()", () => {
|
||||
it("should soft delete content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify content is not returned by findById
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should return false for non-existent content", async () => {
|
||||
const result = await repo.delete("post", "01J9FAKE0000000000000000");
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when deleting already deleted content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("count()", () => {
|
||||
it("should count all content of specified type", async () => {
|
||||
await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it("should count with status filter", async () => {
|
||||
await repo.create(createPostFixture({ slug: "draft", status: "draft" }));
|
||||
await repo.create(createPostFixture({ slug: "published", status: "published" }));
|
||||
|
||||
const count = await repo.count("post", { status: "published" });
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
it("should count with authorId filter", async () => {
|
||||
await repo.create(createPostFixture({ slug: "author1", authorId: "user1" }));
|
||||
await repo.create(createPostFixture({ slug: "author2", authorId: "user2" }));
|
||||
|
||||
const count = await repo.count("post", { authorId: "user1" });
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.delete("post", post1.id);
|
||||
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("schedule()", () => {
|
||||
it("should set status to 'scheduled' for draft posts", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
|
||||
const updated = await repo.schedule("post", post.id, future);
|
||||
|
||||
expect(updated.status).toBe("scheduled");
|
||||
expect(updated.scheduledAt).toBe(future);
|
||||
});
|
||||
|
||||
it("should keep status 'published' for published posts", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
|
||||
const updated = await repo.schedule("post", post.id, future);
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
expect(updated.scheduledAt).toBe(future);
|
||||
});
|
||||
|
||||
it("should reject dates in the past", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const past = new Date(Date.now() - 86_400_000).toISOString();
|
||||
|
||||
await expect(repo.schedule("post", post.id, past)).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("should reject invalid date strings", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
|
||||
await expect(repo.schedule("post", post.id, "not-a-date")).rejects.toThrow(
|
||||
EmDashValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unschedule()", () => {
|
||||
it("should revert scheduled draft to 'draft'", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const updated = await repo.unschedule("post", post.id);
|
||||
|
||||
expect(updated.status).toBe("draft");
|
||||
expect(updated.scheduledAt).toBeNull();
|
||||
});
|
||||
|
||||
it("should keep published posts as 'published'", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const updated = await repo.unschedule("post", post.id);
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
expect(updated.scheduledAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("setDraftRevision()", () => {
|
||||
it("sets the draft_revision_id so publish() picks it up", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const draft = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post.id,
|
||||
data: { ...post.data, title: "Staged for publish" },
|
||||
});
|
||||
|
||||
await repo.setDraftRevision("post", post.id, draft.id);
|
||||
|
||||
const afterStaging = await repo.findById("post", post.id);
|
||||
expect(afterStaging?.draftRevisionId).toBe(draft.id);
|
||||
|
||||
const published = await repo.publish("post", post.id);
|
||||
|
||||
expect(published.liveRevisionId).toBe(draft.id);
|
||||
expect(published.draftRevisionId).toBeNull();
|
||||
});
|
||||
|
||||
it("throws when the content item does not exist", async () => {
|
||||
await expect(
|
||||
repo.setDraftRevision("post", "01K0000000000000000000000", "01K0000000000000000000001"),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("throws when the revision does not exist", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await expect(
|
||||
repo.setDraftRevision("post", post.id, "01K0000000000000000000001"),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("throws when the revision belongs to a different content item", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "one" }));
|
||||
const post2 = await repo.create(createPostFixture({ slug: "two" }));
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const draft = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post2.id,
|
||||
data: post2.data,
|
||||
});
|
||||
|
||||
await expect(repo.setDraftRevision("post", post1.id, draft.id)).rejects.toThrow(
|
||||
EmDashValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("publish() clears schedule", () => {
|
||||
it("should clear scheduled_at when publishing a scheduled draft", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const published = await repo.publish("post", post.id);
|
||||
|
||||
expect(published.status).toBe("published");
|
||||
expect(published.scheduledAt).toBeNull();
|
||||
});
|
||||
|
||||
it("should clear scheduled_at when publishing a published post with scheduled changes", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const republished = await repo.publish("post", post.id);
|
||||
|
||||
expect(republished.status).toBe("published");
|
||||
expect(republished.scheduledAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findReadyToPublish()", () => {
|
||||
it("should find scheduled drafts past their time", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
// Schedule in the past by directly updating (schedule() rejects past dates)
|
||||
const past = new Date(Date.now() - 60_000).toISOString();
|
||||
await repo.update("post", post.id, { status: "scheduled", scheduledAt: past });
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(1);
|
||||
expect(ready[0]!.id).toBe(post.id);
|
||||
});
|
||||
|
||||
it("should find published posts with past scheduled_at", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
// Set scheduled_at in the past directly
|
||||
const past = new Date(Date.now() - 60_000).toISOString();
|
||||
await repo.update("post", post.id, { scheduledAt: past });
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(1);
|
||||
expect(ready[0]!.id).toBe(post.id);
|
||||
});
|
||||
|
||||
it("should not include items with future scheduled_at", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("countScheduled()", () => {
|
||||
it("should count both scheduled drafts and published posts with scheduled_at", async () => {
|
||||
// Draft with schedule
|
||||
const draft = await repo.create(createPostFixture({ slug: "draft-scheduled" }));
|
||||
const future1 = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", draft.id, future1);
|
||||
|
||||
// Published with schedule
|
||||
const pub = await repo.create(createPostFixture({ slug: "pub-scheduled" }));
|
||||
await repo.publish("post", pub.id);
|
||||
const future2 = new Date(Date.now() + 172_800_000).toISOString();
|
||||
await repo.schedule("post", pub.id, future2);
|
||||
|
||||
// Unscheduled draft (should not be counted)
|
||||
await repo.create(createPostFixture({ slug: "plain-draft" }));
|
||||
|
||||
const count = await repo.countScheduled("post");
|
||||
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,60 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
decodeCursor,
|
||||
encodeCursor,
|
||||
InvalidCursorError,
|
||||
} from "../../../../src/database/repositories/types.js";
|
||||
|
||||
describe("decodeCursor", () => {
|
||||
it("round-trips a valid cursor", () => {
|
||||
const cursor = encodeCursor("2024-01-01", "01ABC");
|
||||
const decoded = decodeCursor(cursor);
|
||||
expect(decoded).toEqual({ orderValue: "2024-01-01", id: "01ABC" });
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on empty string", () => {
|
||||
expect(() => decodeCursor("")).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on non-base64 input", () => {
|
||||
expect(() => decodeCursor("not-base64-!!!")).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on base64 of malformed JSON", () => {
|
||||
const bad = Buffer.from("{not valid json").toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on base64 JSON missing required fields", () => {
|
||||
const bad = Buffer.from(JSON.stringify({ wrong: "shape" })).toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError when id is not a string", () => {
|
||||
const bad = Buffer.from(JSON.stringify({ orderValue: "x", id: 42 })).toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("rejects oversized cursors before attempting to decode (DoS guard)", () => {
|
||||
// MAX_CURSOR_LENGTH is 4096 inside the decoder. The MCP/REST schemas
|
||||
// cap earlier (2048), but the decoder is the last line of defense
|
||||
// for any caller that bypasses the schemas. A pre-decode rejection
|
||||
// avoids allocating O(N) bytes for `decodeBase64` on a hostile
|
||||
// input.
|
||||
const huge = "A".repeat(5000);
|
||||
expect(() => decodeCursor(huge)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("error message truncates very long cursors", () => {
|
||||
const longish = "A".repeat(200);
|
||||
try {
|
||||
decodeCursor(longish);
|
||||
expect.fail("expected throw");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InvalidCursorError);
|
||||
// The truncation cap is 50; the message itself stays short.
|
||||
expect((error as Error).message.length).toBeLessThan(120);
|
||||
}
|
||||
});
|
||||
});
|
||||
114
packages/core/tests/unit/database/repositories/seo.test.ts
Normal file
114
packages/core/tests/unit/database/repositories/seo.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import { SeoRepository } from "../../../../src/database/repositories/seo.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { SQL_BATCH_SIZE } from "../../../../src/utils/chunks.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
describe("SeoRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let seoRepo: SeoRepository;
|
||||
let contentRepo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
// Enable SEO on the post collection — createCollection defaults has_seo to 0.
|
||||
await db
|
||||
.updateTable("_emdash_collections")
|
||||
.set({ has_seo: 1 })
|
||||
.where("slug", "=", "post")
|
||||
.execute();
|
||||
seoRepo = new SeoRepository(db);
|
||||
contentRepo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("getMany handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
// Create a few real content entries with SEO rows
|
||||
const realIds: string[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: `seo-batch-post-${i}`,
|
||||
data: { title: `SEO Batch Post ${i}` },
|
||||
});
|
||||
await seoRepo.upsert("post", content.id, {
|
||||
title: `SEO Title ${i}`,
|
||||
description: `SEO Description ${i}`,
|
||||
});
|
||||
realIds.push(content.id);
|
||||
}
|
||||
|
||||
// Build an ID list larger than SQL_BATCH_SIZE with real IDs spread across chunks
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = realIds[0]!;
|
||||
ids[SQL_BATCH_SIZE - 1] = realIds[1]!;
|
||||
ids[SQL_BATCH_SIZE + 5] = realIds[2]!;
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
// All input IDs should be present in the result Map
|
||||
expect(result.size).toBe(ids.length);
|
||||
|
||||
// Real IDs should have their SEO data resolved
|
||||
expect(result.get(realIds[0]!)?.title).toBe("SEO Title 0");
|
||||
expect(result.get(realIds[1]!)?.title).toBe("SEO Title 1");
|
||||
expect(result.get(realIds[2]!)?.title).toBe("SEO Title 2");
|
||||
|
||||
// Fake IDs should get default values
|
||||
expect(result.get("fake-id-5")?.title).toBeNull();
|
||||
expect(result.get("fake-id-5")?.description).toBeNull();
|
||||
expect(result.get("fake-id-5")?.noIndex).toBe(false);
|
||||
});
|
||||
|
||||
it("getMany returns defaults for every input id when no rows exist", async () => {
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`missing-id-${i}`);
|
||||
}
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
expect(result.size).toBe(ids.length);
|
||||
for (const id of ids) {
|
||||
const entry = result.get(id);
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.title).toBeNull();
|
||||
expect(entry?.description).toBeNull();
|
||||
expect(entry?.image).toBeNull();
|
||||
expect(entry?.canonical).toBeNull();
|
||||
expect(entry?.noIndex).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it("getMany deduplicates repeated content IDs without duplicate rows", async () => {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "seo-duplicate-post",
|
||||
data: { title: "SEO Duplicate" },
|
||||
});
|
||||
await seoRepo.upsert("post", content.id, {
|
||||
title: "Duplicate SEO",
|
||||
});
|
||||
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = content.id;
|
||||
ids[SQL_BATCH_SIZE + 5] = content.id;
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
// The real entry should resolve to its SEO row regardless of the duplicate input
|
||||
expect(result.get(content.id)?.title).toBe("Duplicate SEO");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user