Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
360
packages/core/tests/unit/cli/bundle-utils.test.ts
Normal file
360
packages/core/tests/unit/cli/bundle-utils.test.ts
Normal file
@@ -0,0 +1,360 @@
|
||||
/**
|
||||
* Tests for bundle utility functions.
|
||||
*
|
||||
* Focuses on the functions where bugs would be non-obvious:
|
||||
* - Tarball round-trip (custom tar implementation)
|
||||
* - Manifest extraction (shape transformation, function stripping)
|
||||
* - Source entry resolution (path mapping logic)
|
||||
* - Node.js built-in detection (regex against bundled output)
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import { mkdtemp, rm, writeFile, mkdir, readFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import {
|
||||
extractManifest,
|
||||
createTarball,
|
||||
resolveSourceEntry,
|
||||
findNodeBuiltinImports,
|
||||
findBuildOutput,
|
||||
findSourceExports,
|
||||
} from "../../../src/cli/commands/bundle-utils.js";
|
||||
import type { ResolvedPlugin } from "../../../src/plugins/types.js";
|
||||
|
||||
function mockPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: {},
|
||||
routes: {},
|
||||
admin: { pages: [], widgets: [] },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("extractManifest", () => {
|
||||
it("converts hooks from handler objects to name array", () => {
|
||||
const plugin = mockPlugin({
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler: vi.fn(),
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
"media:afterUpload": {
|
||||
handler: vi.fn(),
|
||||
priority: 50,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
// content:beforeSave has all defaults → plain string
|
||||
// media:afterUpload has non-default priority → structured entry
|
||||
expect(manifest.hooks).toEqual([
|
||||
"content:beforeSave",
|
||||
{ name: "media:afterUpload", priority: 50 },
|
||||
]);
|
||||
});
|
||||
|
||||
it("converts routes from handler objects to name array", () => {
|
||||
const plugin = mockPlugin({
|
||||
routes: {
|
||||
sync: { handler: vi.fn() },
|
||||
webhook: { handler: vi.fn() },
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.routes).toEqual(["sync", "webhook"]);
|
||||
});
|
||||
|
||||
it("strips admin.entry (host-only concern, not in bundles)", () => {
|
||||
const plugin = mockPlugin({
|
||||
admin: {
|
||||
entry: "@test/plugin/admin",
|
||||
settingsSchema: { apiKey: { type: "string", label: "Key" } as any },
|
||||
pages: [{ id: "settings", title: "Settings" }],
|
||||
widgets: [],
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect((manifest.admin as any).entry).toBeUndefined();
|
||||
expect(manifest.admin.settingsSchema).toBeDefined();
|
||||
expect(manifest.admin.pages).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("result is JSON-serializable (no functions survive)", () => {
|
||||
const plugin = mockPlugin({
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler: vi.fn(),
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
},
|
||||
routes: { sync: { handler: vi.fn() } },
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
const json = JSON.stringify(manifest);
|
||||
const parsed = JSON.parse(json);
|
||||
|
||||
expect(parsed.hooks).toEqual(["content:beforeSave"]);
|
||||
expect(parsed.routes).toEqual(["sync"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createTarball", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-tar-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("produces a tarball that system tar can list", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
await writeFile(join(srcDir, "manifest.json"), '{"id":"test"}');
|
||||
await writeFile(join(srcDir, "backend.js"), "export default {}");
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const listing = execSync(`tar tzf "${out}"`, { encoding: "utf-8" });
|
||||
const files = listing.trim().split("\n").toSorted();
|
||||
expect(files).toContain("manifest.json");
|
||||
expect(files).toContain("backend.js");
|
||||
});
|
||||
|
||||
it("preserves file content through pack/unpack", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
const content = JSON.stringify({ id: "round-trip", version: "2.0.0" });
|
||||
await writeFile(join(srcDir, "manifest.json"), content);
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const extractDir = join(tempDir, "extract");
|
||||
await mkdir(extractDir);
|
||||
execSync(`tar xzf "${out}" -C "${extractDir}"`);
|
||||
|
||||
expect(await readFile(join(extractDir, "manifest.json"), "utf-8")).toBe(content);
|
||||
});
|
||||
|
||||
it("handles nested directories (screenshots/)", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(join(srcDir, "screenshots"), { recursive: true });
|
||||
await writeFile(join(srcDir, "manifest.json"), "{}");
|
||||
await writeFile(join(srcDir, "screenshots", "shot1.png"), "fake");
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const listing = execSync(`tar tzf "${out}"`, { encoding: "utf-8" });
|
||||
expect(listing).toContain("screenshots/shot1.png");
|
||||
});
|
||||
|
||||
it("handles binary content without corruption", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
// Write bytes that would break text-mode handling
|
||||
const binary = Buffer.from([0x00, 0xff, 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||
await writeFile(join(srcDir, "icon.png"), binary);
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const extractDir = join(tempDir, "extract");
|
||||
await mkdir(extractDir);
|
||||
execSync(`tar xzf "${out}" -C "${extractDir}"`);
|
||||
|
||||
const extracted = await readFile(join(extractDir, "icon.png"));
|
||||
expect(extracted.equals(binary)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveSourceEntry", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-resolve-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("maps ./dist/index.mjs → src/index.ts", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.mjs");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("maps ./dist/index.js → src/index.ts", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.js");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("falls back to .tsx when .ts doesn't exist", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.tsx"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.mjs");
|
||||
expect(result).toBe(join(tempDir, "src", "index.tsx"));
|
||||
});
|
||||
|
||||
it("returns the direct path if it already exists", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "src/index.ts");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("returns undefined when nothing matches", async () => {
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/missing.mjs");
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findBuildOutput", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-build-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("prefers .mjs over .js", async () => {
|
||||
await writeFile(join(tempDir, "index.mjs"), "");
|
||||
await writeFile(join(tempDir, "index.js"), "");
|
||||
|
||||
expect(await findBuildOutput(tempDir, "index")).toBe(join(tempDir, "index.mjs"));
|
||||
});
|
||||
|
||||
it("falls back through .js then .cjs", async () => {
|
||||
await writeFile(join(tempDir, "index.cjs"), "");
|
||||
expect(await findBuildOutput(tempDir, "index")).toBe(join(tempDir, "index.cjs"));
|
||||
});
|
||||
|
||||
it("returns undefined when no match", async () => {
|
||||
expect(await findBuildOutput(tempDir, "index")).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findNodeBuiltinImports", () => {
|
||||
it("detects require('node:fs') in bundled output", () => {
|
||||
expect(findNodeBuiltinImports(`const fs = require("node:fs");`)).toEqual(["fs"]);
|
||||
});
|
||||
|
||||
it("detects require('fs') without node: prefix", () => {
|
||||
expect(findNodeBuiltinImports(`const fs = require("fs");`)).toEqual(["fs"]);
|
||||
});
|
||||
|
||||
it("detects dynamic import('node:child_process')", () => {
|
||||
expect(findNodeBuiltinImports(`await import("node:child_process")`)).toEqual(["child_process"]);
|
||||
});
|
||||
|
||||
it("returns empty for code with no builtins", () => {
|
||||
expect(findNodeBuiltinImports(`import("emdash"); require("lodash");`)).toEqual([]);
|
||||
});
|
||||
|
||||
it("deduplicates repeated requires", () => {
|
||||
const code = `require("node:fs"); require("node:fs");`;
|
||||
expect(findNodeBuiltinImports(code)).toEqual(["fs"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findSourceExports", () => {
|
||||
it("flags .ts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.ts" });
|
||||
expect(issues).toEqual([{ exportPath: ".", resolvedPath: "./src/index.ts" }]);
|
||||
});
|
||||
|
||||
it("flags .tsx exports", () => {
|
||||
const issues = findSourceExports({ "./admin": "./src/admin.tsx" });
|
||||
expect(issues).toEqual([{ exportPath: "./admin", resolvedPath: "./src/admin.tsx" }]);
|
||||
});
|
||||
|
||||
it("flags .mts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.mts" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("flags .cts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.cts" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("flags .jsx exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.jsx" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("accepts .mjs exports", () => {
|
||||
const issues = findSourceExports({ ".": "./dist/index.mjs" });
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts .js exports", () => {
|
||||
const issues = findSourceExports({ ".": "./dist/index.js" });
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles conditional exports with import field", () => {
|
||||
const issues = findSourceExports({
|
||||
".": { import: "./src/index.ts", types: "./dist/index.d.mts" },
|
||||
});
|
||||
expect(issues).toEqual([{ exportPath: ".", resolvedPath: "./src/index.ts" }]);
|
||||
});
|
||||
|
||||
it("accepts conditional exports pointing to built files", () => {
|
||||
const issues = findSourceExports({
|
||||
".": { import: "./dist/index.mjs", types: "./dist/index.d.mts" },
|
||||
});
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("flags multiple bad exports", () => {
|
||||
const issues = findSourceExports({
|
||||
".": "./src/index.ts",
|
||||
"./sandbox": "./src/sandbox-entry.ts",
|
||||
});
|
||||
expect(issues).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
92
packages/core/tests/unit/cli/secrets-commands.test.ts
Normal file
92
packages/core/tests/unit/cli/secrets-commands.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Tests for the `emdash secrets` CLI surface.
|
||||
*
|
||||
* Focuses on the file-write helper used by `secrets generate --write`,
|
||||
* which is the only piece with non-trivial logic. The command runners
|
||||
* themselves are thin wrappers around `generateEncryptionKey()` and
|
||||
* `fingerprintKey()` (covered by `tests/unit/config/secrets.test.ts`).
|
||||
*/
|
||||
|
||||
import { mkdtemp, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { writeEncryptionKeyToFile } from "../../../src/cli/commands/secrets.js";
|
||||
|
||||
describe("secrets CLI: writeEncryptionKeyToFile", () => {
|
||||
let dir: string;
|
||||
let target: string;
|
||||
const sample = "emdash_enc_v1_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
|
||||
const sample2 = "emdash_enc_v1_BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB";
|
||||
|
||||
beforeEach(async () => {
|
||||
dir = await mkdtemp(join(tmpdir(), "emdash-secrets-cli-"));
|
||||
target = join(dir, ".dev.vars");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("creates a new file with a trailing newline", async () => {
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`EMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("appends to an existing file without clobbering other vars", async () => {
|
||||
await writeFile(target, "OTHER=value\nFOO=bar\n");
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nFOO=bar\nEMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("appends to a file that lacks a trailing newline", async () => {
|
||||
await writeFile(target, "OTHER=value");
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("refuses to overwrite an existing entry without force", async () => {
|
||||
await writeFile(target, `EMDASH_ENCRYPTION_KEY=${sample}\nOTHER=value\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, false);
|
||||
expect(result).toBe("skipped");
|
||||
const content = await readFile(target, "utf-8");
|
||||
// Entry untouched.
|
||||
expect(content).toBe(`EMDASH_ENCRYPTION_KEY=${sample}\nOTHER=value\n`);
|
||||
});
|
||||
|
||||
it("replaces an existing entry in place when force is true", async () => {
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\nMORE=stuff\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, true);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
// Other vars untouched, key replaced inline (no duplication).
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample2}\nMORE=stuff\n`);
|
||||
});
|
||||
|
||||
it("treats an empty-value entry as not-set and replaces it without --force", async () => {
|
||||
// Operators sometimes leave `EMDASH_ENCRYPTION_KEY=` as a placeholder.
|
||||
// A skip in that case would be hostile — they actively want a value.
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=\nMORE=stuff\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\nMORE=stuff\n`);
|
||||
});
|
||||
|
||||
it("always ends with a trailing newline, even when replacing in-place in a file without one", async () => {
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, true);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content.endsWith("\n")).toBe(true);
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample2}\n`);
|
||||
});
|
||||
});
|
||||
289
packages/core/tests/unit/cli/seed-commands.test.ts
Normal file
289
packages/core/tests/unit/cli/seed-commands.test.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
/**
|
||||
* Tests for CLI seed commands
|
||||
*/
|
||||
|
||||
import { mkdtemp, rm, writeFile, mkdir, readFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import { applySeed } from "../../../src/seed/apply.js";
|
||||
import type { SeedFile } from "../../../src/seed/types.js";
|
||||
import { validateSeed } from "../../../src/seed/validate.js";
|
||||
|
||||
describe("CLI Seed Commands", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-cli-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("seed file resolution", () => {
|
||||
it("should resolve .emdash/seed.json by convention", async () => {
|
||||
// Create convention seed file
|
||||
const emdashDir = join(tempDir, ".emdash");
|
||||
await mkdir(emdashDir);
|
||||
const seedPath = join(emdashDir, "seed.json");
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Convention Seed" },
|
||||
};
|
||||
await writeFile(seedPath, JSON.stringify(seed));
|
||||
|
||||
// Read it back
|
||||
const content = await readFile(seedPath, "utf-8");
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.settings.title).toBe("Convention Seed");
|
||||
});
|
||||
|
||||
it("should resolve seed from package.json emdash.seed", async () => {
|
||||
// Create seed file in custom location
|
||||
const customDir = join(tempDir, "custom");
|
||||
await mkdir(customDir);
|
||||
const seedPath = join(customDir, "my-seed.json");
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Package.json Seed" },
|
||||
};
|
||||
await writeFile(seedPath, JSON.stringify(seed));
|
||||
|
||||
// Create package.json referencing it
|
||||
const pkg = {
|
||||
name: "test-project",
|
||||
emdash: {
|
||||
seed: "custom/my-seed.json",
|
||||
},
|
||||
};
|
||||
await writeFile(join(tempDir, "package.json"), JSON.stringify(pkg));
|
||||
|
||||
// Verify the referenced path works
|
||||
const content = await readFile(seedPath, "utf-8");
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.settings.title).toBe("Package.json Seed");
|
||||
});
|
||||
});
|
||||
|
||||
describe("seed validation", () => {
|
||||
it("should validate a valid seed file", () => {
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Test Site" },
|
||||
collections: [
|
||||
{
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", label: "Title", type: "string", required: true }],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should reject invalid seed version", () => {
|
||||
const seed = {
|
||||
version: "999",
|
||||
settings: {},
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes("version"))).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject seed with invalid collection", () => {
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "", // Invalid: empty slug
|
||||
label: "Posts",
|
||||
fields: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("seed application", () => {
|
||||
it("should apply settings from seed", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: {
|
||||
title: "My Test Site",
|
||||
tagline: "A test site for testing",
|
||||
},
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, {});
|
||||
|
||||
expect(result.settings.applied).toBe(2);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should apply collections from seed", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "articles",
|
||||
label: "Articles",
|
||||
labelSingular: "Article",
|
||||
fields: [
|
||||
{
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
{ slug: "body", label: "Body", type: "portableText" },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, {});
|
||||
|
||||
expect(result.collections.created).toBe(1);
|
||||
expect(result.fields.created).toBe(2);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should be idempotent (skip existing)", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "pages",
|
||||
label: "Pages",
|
||||
fields: [{ slug: "title", label: "Title", type: "string" }],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// First apply
|
||||
const result1 = await applySeed(db, seed, {});
|
||||
expect(result1.collections.created).toBe(1);
|
||||
expect(result1.collections.skipped).toBe(0);
|
||||
|
||||
// Second apply - should skip
|
||||
const result2 = await applySeed(db, seed, {});
|
||||
expect(result2.collections.created).toBe(0);
|
||||
expect(result2.collections.skipped).toBe(1);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("export-seed output", () => {
|
||||
it("should produce valid seed from exported data", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
// Apply a seed first
|
||||
const inputSeed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Export Test" },
|
||||
collections: [
|
||||
{
|
||||
slug: "docs",
|
||||
label: "Documentation",
|
||||
fields: [
|
||||
{ slug: "title", label: "Title", type: "string" },
|
||||
{ slug: "content", label: "Content", type: "portableText" },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await applySeed(db, inputSeed, {});
|
||||
|
||||
// Now export (simulating what export-seed does)
|
||||
// For this test, we just verify the input seed validates
|
||||
const validation = validateSeed(inputSeed);
|
||||
expect(validation.valid).toBe(true);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("content export with $media", () => {
|
||||
it("should handle content without media gracefully", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", label: "Title", type: "string" }],
|
||||
},
|
||||
],
|
||||
content: {
|
||||
posts: [
|
||||
{
|
||||
id: "post-1",
|
||||
slug: "hello-world",
|
||||
status: "published",
|
||||
data: { title: "Hello World" },
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, { includeContent: true });
|
||||
|
||||
expect(result.collections.created).toBe(1);
|
||||
expect(result.content.created).toBe(1);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
427
packages/core/tests/unit/cli/wxr-parser.test.ts
Normal file
427
packages/core/tests/unit/cli/wxr-parser.test.ts
Normal file
@@ -0,0 +1,427 @@
|
||||
/**
|
||||
* Tests for WXR parser
|
||||
*/
|
||||
|
||||
import { Readable } from "node:stream";
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { parseWxr } from "../../../src/cli/wxr/parser.js";
|
||||
|
||||
function createStream(content: string): Readable {
|
||||
return Readable.from([content]);
|
||||
}
|
||||
|
||||
describe("parseWxr", () => {
|
||||
it("parses basic WXR structure", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:excerpt="http://wordpress.org/export/1.2/excerpt/"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<title>Test Site</title>
|
||||
<link>https://example.com</link>
|
||||
<description>A test WordPress site</description>
|
||||
<language>en-US</language>
|
||||
<wp:base_site_url>https://example.com</wp:base_site_url>
|
||||
<wp:base_blog_url>https://example.com</wp:base_blog_url>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.site.title).toBe("Test Site");
|
||||
expect(result.site.link).toBe("https://example.com");
|
||||
expect(result.site.description).toBe("A test WordPress site");
|
||||
expect(result.site.language).toBe("en-US");
|
||||
});
|
||||
|
||||
it("parses posts", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<title>Test Site</title>
|
||||
<item>
|
||||
<title>Hello World</title>
|
||||
<link>https://example.com/hello-world/</link>
|
||||
<pubDate>Mon, 01 Jan 2024 12:00:00 +0000</pubDate>
|
||||
<dc:creator>admin</dc:creator>
|
||||
<content:encoded><![CDATA[<!-- wp:paragraph -->
|
||||
<p>Welcome to WordPress!</p>
|
||||
<!-- /wp:paragraph -->]]></content:encoded>
|
||||
<wp:post_id>1</wp:post_id>
|
||||
<wp:post_date>2024-01-01 12:00:00</wp:post_date>
|
||||
<wp:status>publish</wp:status>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<wp:post_name>hello-world</wp:post_name>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(1);
|
||||
expect(result.posts[0]?.title).toBe("Hello World");
|
||||
expect(result.posts[0]?.id).toBe(1);
|
||||
expect(result.posts[0]?.status).toBe("publish");
|
||||
expect(result.posts[0]?.postType).toBe("post");
|
||||
expect(result.posts[0]?.content).toContain("wp:paragraph");
|
||||
});
|
||||
|
||||
it("parses pages", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>About Us</title>
|
||||
<content:encoded><![CDATA[<p>About page content</p>]]></content:encoded>
|
||||
<wp:post_id>2</wp:post_id>
|
||||
<wp:status>publish</wp:status>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(1);
|
||||
expect(result.posts[0]?.title).toBe("About Us");
|
||||
expect(result.posts[0]?.postType).toBe("page");
|
||||
});
|
||||
|
||||
it("parses attachments", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Test Image</title>
|
||||
<wp:post_id>10</wp:post_id>
|
||||
<wp:post_type>attachment</wp:post_type>
|
||||
<wp:attachment_url>https://example.com/wp-content/uploads/2024/01/test.jpg</wp:attachment_url>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(0);
|
||||
expect(result.attachments).toHaveLength(1);
|
||||
expect(result.attachments[0]?.id).toBe(10);
|
||||
expect(result.attachments[0]?.title).toBe("Test Image");
|
||||
expect(result.attachments[0]?.url).toContain("test.jpg");
|
||||
});
|
||||
|
||||
it("parses categories", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:category>
|
||||
<wp:term_id>1</wp:term_id>
|
||||
<wp:category_nicename>uncategorized</wp:category_nicename>
|
||||
<wp:cat_name><![CDATA[Uncategorized]]></wp:cat_name>
|
||||
</wp:category>
|
||||
<wp:category>
|
||||
<wp:term_id>2</wp:term_id>
|
||||
<wp:category_nicename>news</wp:category_nicename>
|
||||
<wp:cat_name><![CDATA[News]]></wp:cat_name>
|
||||
<wp:category_parent>uncategorized</wp:category_parent>
|
||||
</wp:category>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.categories).toHaveLength(2);
|
||||
expect(result.categories[0]?.nicename).toBe("uncategorized");
|
||||
expect(result.categories[0]?.name).toBe("Uncategorized");
|
||||
expect(result.categories[1]?.parent).toBe("uncategorized");
|
||||
});
|
||||
|
||||
it("parses tags", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:tag>
|
||||
<wp:term_id>5</wp:term_id>
|
||||
<wp:tag_slug>javascript</wp:tag_slug>
|
||||
<wp:tag_name><![CDATA[JavaScript]]></wp:tag_name>
|
||||
</wp:tag>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.tags).toHaveLength(1);
|
||||
expect(result.tags[0]?.slug).toBe("javascript");
|
||||
expect(result.tags[0]?.name).toBe("JavaScript");
|
||||
});
|
||||
|
||||
it("parses post categories and tags", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Tagged Post</title>
|
||||
<category domain="category" nicename="news"><![CDATA[News]]></category>
|
||||
<category domain="post_tag" nicename="javascript"><![CDATA[JavaScript]]></category>
|
||||
<category domain="post_tag" nicename="typescript"><![CDATA[TypeScript]]></category>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.categories).toContain("news");
|
||||
expect(result.posts[0]?.tags).toContain("javascript");
|
||||
expect(result.posts[0]?.tags).toContain("typescript");
|
||||
});
|
||||
|
||||
it("parses authors", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:author>
|
||||
<wp:author_id>1</wp:author_id>
|
||||
<wp:author_login>admin</wp:author_login>
|
||||
<wp:author_email>admin@example.com</wp:author_email>
|
||||
<wp:author_display_name><![CDATA[Administrator]]></wp:author_display_name>
|
||||
<wp:author_first_name>Admin</wp:author_first_name>
|
||||
<wp:author_last_name>User</wp:author_last_name>
|
||||
</wp:author>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.authors).toHaveLength(1);
|
||||
expect(result.authors[0]?.login).toBe("admin");
|
||||
expect(result.authors[0]?.email).toBe("admin@example.com");
|
||||
expect(result.authors[0]?.displayName).toBe("Administrator");
|
||||
});
|
||||
|
||||
it("parses post meta", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Post with Meta</title>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_yoast_wpseo_title</wp:meta_key>
|
||||
<wp:meta_value>SEO Title</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_yoast_wpseo_metadesc</wp:meta_key>
|
||||
<wp:meta_value>SEO Description</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.meta.get("_yoast_wpseo_title")).toBe("SEO Title");
|
||||
expect(result.posts[0]?.meta.get("_yoast_wpseo_metadesc")).toBe("SEO Description");
|
||||
});
|
||||
|
||||
it("handles empty WXR", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>Empty Site</title>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(0);
|
||||
expect(result.attachments).toHaveLength(0);
|
||||
expect(result.categories).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("parses page hierarchy (post_parent and menu_order)", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Parent Page</title>
|
||||
<wp:post_id>10</wp:post_id>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
<wp:post_parent>0</wp:post_parent>
|
||||
<wp:menu_order>1</wp:menu_order>
|
||||
</item>
|
||||
<item>
|
||||
<title>Child Page</title>
|
||||
<wp:post_id>11</wp:post_id>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
<wp:post_parent>10</wp:post_parent>
|
||||
<wp:menu_order>2</wp:menu_order>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(2);
|
||||
expect(result.posts[0]?.postParent).toBe(0);
|
||||
expect(result.posts[0]?.menuOrder).toBe(1);
|
||||
expect(result.posts[1]?.postParent).toBe(10);
|
||||
expect(result.posts[1]?.menuOrder).toBe(2);
|
||||
});
|
||||
|
||||
it("parses generic wp:term elements (custom taxonomies)", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:term>
|
||||
<wp:term_id>100</wp:term_id>
|
||||
<wp:term_taxonomy>genre</wp:term_taxonomy>
|
||||
<wp:term_slug>sci-fi</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Science Fiction]]></wp:term_name>
|
||||
<wp:term_description><![CDATA[Science fiction books]]></wp:term_description>
|
||||
</wp:term>
|
||||
<wp:term>
|
||||
<wp:term_id>101</wp:term_id>
|
||||
<wp:term_taxonomy>genre</wp:term_taxonomy>
|
||||
<wp:term_slug>fantasy</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Fantasy]]></wp:term_name>
|
||||
<wp:term_parent>sci-fi</wp:term_parent>
|
||||
</wp:term>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.terms).toHaveLength(2);
|
||||
expect(result.terms[0]?.id).toBe(100);
|
||||
expect(result.terms[0]?.taxonomy).toBe("genre");
|
||||
expect(result.terms[0]?.slug).toBe("sci-fi");
|
||||
expect(result.terms[0]?.name).toBe("Science Fiction");
|
||||
expect(result.terms[0]?.description).toBe("Science fiction books");
|
||||
expect(result.terms[1]?.parent).toBe("sci-fi");
|
||||
});
|
||||
|
||||
it("parses nav_menu terms and nav_menu_item posts into structured menus", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:term>
|
||||
<wp:term_id>5</wp:term_id>
|
||||
<wp:term_taxonomy>nav_menu</wp:term_taxonomy>
|
||||
<wp:term_slug>main-menu</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Main Menu]]></wp:term_name>
|
||||
</wp:term>
|
||||
<item>
|
||||
<title>Home</title>
|
||||
<wp:post_id>50</wp:post_id>
|
||||
<wp:post_type>nav_menu_item</wp:post_type>
|
||||
<wp:menu_order>1</wp:menu_order>
|
||||
<category domain="nav_menu" nicename="main-menu"><![CDATA[Main Menu]]></category>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_type</wp:meta_key>
|
||||
<wp:meta_value>custom</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_url</wp:meta_key>
|
||||
<wp:meta_value>https://example.com/</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_menu_item_parent</wp:meta_key>
|
||||
<wp:meta_value>0</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
<item>
|
||||
<title>About</title>
|
||||
<wp:post_id>51</wp:post_id>
|
||||
<wp:post_type>nav_menu_item</wp:post_type>
|
||||
<wp:menu_order>2</wp:menu_order>
|
||||
<category domain="nav_menu" nicename="main-menu"><![CDATA[Main Menu]]></category>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_type</wp:meta_key>
|
||||
<wp:meta_value>post_type</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_object</wp:meta_key>
|
||||
<wp:meta_value>page</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_object_id</wp:meta_key>
|
||||
<wp:meta_value>10</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_menu_item_parent</wp:meta_key>
|
||||
<wp:meta_value>0</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
// Check terms array includes nav_menu term
|
||||
expect(result.terms.some((t) => t.taxonomy === "nav_menu")).toBe(true);
|
||||
|
||||
// Check nav_menu_item posts are in posts array
|
||||
expect(result.posts.filter((p) => p.postType === "nav_menu_item")).toHaveLength(2);
|
||||
|
||||
// Check structured navMenus
|
||||
expect(result.navMenus).toHaveLength(1);
|
||||
expect(result.navMenus[0]?.name).toBe("main-menu");
|
||||
expect(result.navMenus[0]?.id).toBe(5);
|
||||
expect(result.navMenus[0]?.items).toHaveLength(2);
|
||||
|
||||
// Check menu items are sorted by menu_order
|
||||
expect(result.navMenus[0]?.items[0]?.title).toBe("Home");
|
||||
expect(result.navMenus[0]?.items[0]?.type).toBe("custom");
|
||||
expect(result.navMenus[0]?.items[0]?.url).toBe("https://example.com/");
|
||||
expect(result.navMenus[0]?.items[0]?.sortOrder).toBe(1);
|
||||
|
||||
expect(result.navMenus[0]?.items[1]?.title).toBe("About");
|
||||
expect(result.navMenus[0]?.items[1]?.type).toBe("post_type");
|
||||
expect(result.navMenus[0]?.items[1]?.objectType).toBe("page");
|
||||
expect(result.navMenus[0]?.items[1]?.objectId).toBe(10);
|
||||
expect(result.navMenus[0]?.items[1]?.sortOrder).toBe(2);
|
||||
});
|
||||
|
||||
it("parses custom taxonomy assignments on posts", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Book Review</title>
|
||||
<wp:post_id>1</wp:post_id>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<category domain="category" nicename="reviews"><![CDATA[Reviews]]></category>
|
||||
<category domain="genre" nicename="sci-fi"><![CDATA[Science Fiction]]></category>
|
||||
<category domain="genre" nicename="dystopian"><![CDATA[Dystopian]]></category>
|
||||
<category domain="reading_level" nicename="advanced"><![CDATA[Advanced]]></category>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.categories).toContain("reviews");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("genre")).toContain("sci-fi");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("genre")).toContain("dystopian");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("reading_level")).toContain("advanced");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user