Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
308
packages/core/tests/unit/auth/allowed-domains.test.ts
Normal file
308
packages/core/tests/unit/auth/allowed-domains.test.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
import type { AuthAdapter } from "@emdash-cms/auth";
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Allowed Domains Management", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("getAllowedDomains", () => {
|
||||
it("should return empty array when no domains exist", async () => {
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return all allowed domains", async () => {
|
||||
await adapter.createAllowedDomain("acme.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("partner.org", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("editors.net", Role.EDITOR);
|
||||
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
|
||||
expect(domains).toHaveLength(3);
|
||||
const domainNames = domains.map((d) => d.domain);
|
||||
expect(domainNames).toContain("acme.com");
|
||||
expect(domainNames).toContain("partner.org");
|
||||
expect(domainNames).toContain("editors.net");
|
||||
});
|
||||
|
||||
it("should include both enabled and disabled domains", async () => {
|
||||
await adapter.createAllowedDomain("enabled.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("disabled.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("disabled.com", false);
|
||||
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
|
||||
expect(domains).toHaveLength(2);
|
||||
const enabled = domains.find((d) => d.domain === "enabled.com");
|
||||
const disabled = domains.find((d) => d.domain === "disabled.com");
|
||||
|
||||
expect(enabled?.enabled).toBe(true);
|
||||
expect(disabled?.enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllowedDomain", () => {
|
||||
it("should return null for non-existent domain", async () => {
|
||||
const domain = await adapter.getAllowedDomain("nonexistent.com");
|
||||
expect(domain).toBeNull();
|
||||
});
|
||||
|
||||
it("should return domain with all properties", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.EDITOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("example.com");
|
||||
|
||||
expect(domain).not.toBeNull();
|
||||
expect(domain?.domain).toBe("example.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
expect(domain?.enabled).toBe(true);
|
||||
expect(domain?.createdAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for domain lookup (normalizes to lowercase)", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.AUTHOR);
|
||||
|
||||
// Lowercase should work
|
||||
const lower = await adapter.getAllowedDomain("example.com");
|
||||
expect(lower).not.toBeNull();
|
||||
|
||||
// Uppercase should also work (domains are normalized to lowercase)
|
||||
const upper = await adapter.getAllowedDomain("EXAMPLE.COM");
|
||||
expect(upper).not.toBeNull();
|
||||
expect(upper?.domain).toBe("example.com"); // stored as lowercase
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAllowedDomain", () => {
|
||||
it("should create a new allowed domain", async () => {
|
||||
const domain = await adapter.createAllowedDomain("newdomain.com", Role.AUTHOR);
|
||||
|
||||
expect(domain.domain).toBe("newdomain.com");
|
||||
expect(domain.defaultRole).toBe(Role.AUTHOR);
|
||||
expect(domain.enabled).toBe(true);
|
||||
expect(domain.createdAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should create domain with specified role", async () => {
|
||||
await adapter.createAllowedDomain("subscribers.com", Role.SUBSCRIBER);
|
||||
await adapter.createAllowedDomain("contributors.com", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("authors.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("editors.com", Role.EDITOR);
|
||||
await adapter.createAllowedDomain("admins.com", Role.ADMIN);
|
||||
|
||||
expect((await adapter.getAllowedDomain("subscribers.com"))?.defaultRole).toBe(
|
||||
Role.SUBSCRIBER,
|
||||
);
|
||||
expect((await adapter.getAllowedDomain("contributors.com"))?.defaultRole).toBe(
|
||||
Role.CONTRIBUTOR,
|
||||
);
|
||||
expect((await adapter.getAllowedDomain("authors.com"))?.defaultRole).toBe(Role.AUTHOR);
|
||||
expect((await adapter.getAllowedDomain("editors.com"))?.defaultRole).toBe(Role.EDITOR);
|
||||
expect((await adapter.getAllowedDomain("admins.com"))?.defaultRole).toBe(Role.ADMIN);
|
||||
});
|
||||
|
||||
it("should throw error for duplicate domain", async () => {
|
||||
await adapter.createAllowedDomain("duplicate.com", Role.AUTHOR);
|
||||
|
||||
await expect(adapter.createAllowedDomain("duplicate.com", Role.EDITOR)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should set enabled to true by default", async () => {
|
||||
const domain = await adapter.createAllowedDomain("enabled-default.com", Role.AUTHOR);
|
||||
expect(domain.enabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateAllowedDomain", () => {
|
||||
it("should toggle domain enabled status", async () => {
|
||||
await adapter.createAllowedDomain("toggle.com", Role.AUTHOR);
|
||||
|
||||
// Disable
|
||||
await adapter.updateAllowedDomain("toggle.com", false);
|
||||
let domain = await adapter.getAllowedDomain("toggle.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
|
||||
// Re-enable
|
||||
await adapter.updateAllowedDomain("toggle.com", true);
|
||||
domain = await adapter.getAllowedDomain("toggle.com");
|
||||
expect(domain?.enabled).toBe(true);
|
||||
});
|
||||
|
||||
it("should update default role", async () => {
|
||||
await adapter.createAllowedDomain("role-change.com", Role.AUTHOR);
|
||||
|
||||
await adapter.updateAllowedDomain("role-change.com", true, Role.EDITOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("role-change.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should update both enabled and role at once", async () => {
|
||||
await adapter.createAllowedDomain("both.com", Role.AUTHOR);
|
||||
|
||||
await adapter.updateAllowedDomain("both.com", false, Role.CONTRIBUTOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("both.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
expect(domain?.defaultRole).toBe(Role.CONTRIBUTOR);
|
||||
});
|
||||
|
||||
it("should preserve role when only updating enabled", async () => {
|
||||
await adapter.createAllowedDomain("preserve.com", Role.EDITOR);
|
||||
|
||||
await adapter.updateAllowedDomain("preserve.com", false);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("preserve.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should preserve createdAt when updating", async () => {
|
||||
const created = await adapter.createAllowedDomain("timestamp.com", Role.AUTHOR);
|
||||
const originalCreatedAt = created.createdAt;
|
||||
|
||||
// Small delay
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
await adapter.updateAllowedDomain("timestamp.com", false, Role.EDITOR);
|
||||
|
||||
const updated = await adapter.getAllowedDomain("timestamp.com");
|
||||
expect(updated?.createdAt.getTime()).toBe(originalCreatedAt.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteAllowedDomain", () => {
|
||||
it("should delete an existing domain", async () => {
|
||||
await adapter.createAllowedDomain("todelete.com", Role.AUTHOR);
|
||||
|
||||
await adapter.deleteAllowedDomain("todelete.com");
|
||||
|
||||
const domain = await adapter.getAllowedDomain("todelete.com");
|
||||
expect(domain).toBeNull();
|
||||
});
|
||||
|
||||
it("should not affect other domains", async () => {
|
||||
await adapter.createAllowedDomain("keep.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("delete.com", Role.AUTHOR);
|
||||
|
||||
await adapter.deleteAllowedDomain("delete.com");
|
||||
|
||||
const kept = await adapter.getAllowedDomain("keep.com");
|
||||
const deleted = await adapter.getAllowedDomain("delete.com");
|
||||
|
||||
expect(kept).not.toBeNull();
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
|
||||
it("should be idempotent (no error on non-existent)", async () => {
|
||||
// Deleting non-existent domain should not throw
|
||||
await expect(adapter.deleteAllowedDomain("nonexistent.com")).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Domain Management Flow", () => {
|
||||
it("should support full CRUD flow", async () => {
|
||||
// Create
|
||||
const created = await adapter.createAllowedDomain("company.com", Role.AUTHOR);
|
||||
expect(created.domain).toBe("company.com");
|
||||
expect(created.enabled).toBe(true);
|
||||
|
||||
// Read
|
||||
let domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.domain).toBe("company.com");
|
||||
|
||||
// Update - change role
|
||||
await adapter.updateAllowedDomain("company.com", true, Role.EDITOR);
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
|
||||
// Update - disable
|
||||
await adapter.updateAllowedDomain("company.com", false);
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
|
||||
// List
|
||||
const all = await adapter.getAllowedDomains();
|
||||
expect(all).toHaveLength(1);
|
||||
|
||||
// Delete
|
||||
await adapter.deleteAllowedDomain("company.com");
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain).toBeNull();
|
||||
|
||||
// List after delete
|
||||
const afterDelete = await adapter.getAllowedDomains();
|
||||
expect(afterDelete).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should handle multiple domains correctly", async () => {
|
||||
// Create multiple domains
|
||||
await adapter.createAllowedDomain("first.com", Role.SUBSCRIBER);
|
||||
await adapter.createAllowedDomain("second.com", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("third.com", Role.AUTHOR);
|
||||
|
||||
// Verify all exist
|
||||
let domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toHaveLength(3);
|
||||
|
||||
// Disable one
|
||||
await adapter.updateAllowedDomain("second.com", false);
|
||||
|
||||
// Delete another
|
||||
await adapter.deleteAllowedDomain("first.com");
|
||||
|
||||
// Verify state
|
||||
domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toHaveLength(2);
|
||||
|
||||
const second = domains.find((d) => d.domain === "second.com");
|
||||
const third = domains.find((d) => d.domain === "third.com");
|
||||
|
||||
expect(second?.enabled).toBe(false);
|
||||
expect(third?.enabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Edge Cases", () => {
|
||||
it("should handle subdomains correctly", async () => {
|
||||
await adapter.createAllowedDomain("sub.domain.com", Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("sub.domain.com");
|
||||
expect(domain).not.toBeNull();
|
||||
|
||||
// Parent domain should not match
|
||||
const parent = await adapter.getAllowedDomain("domain.com");
|
||||
expect(parent).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle domains with hyphens", async () => {
|
||||
await adapter.createAllowedDomain("my-company.com", Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("my-company.com");
|
||||
expect(domain?.domain).toBe("my-company.com");
|
||||
});
|
||||
|
||||
it("should handle long domain names", async () => {
|
||||
const longDomain = "very-long-subdomain.another-part.yet-another.example.com";
|
||||
await adapter.createAllowedDomain(longDomain, Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain(longDomain);
|
||||
expect(domain?.domain).toBe(longDomain);
|
||||
});
|
||||
});
|
||||
});
|
||||
211
packages/core/tests/unit/auth/allowed-origins.test.ts
Normal file
211
packages/core/tests/unit/auth/allowed-origins.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { _resetEnvCache } from "../../../src/api/public-url.js";
|
||||
import {
|
||||
getConfiguredAllowedOrigins,
|
||||
validateAllowedOrigins,
|
||||
validateOriginShape,
|
||||
type TaggedOrigin,
|
||||
} from "../../../src/auth/allowed-origins.js";
|
||||
|
||||
const origEnvAllowed = process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
|
||||
beforeEach(() => {
|
||||
_resetEnvCache();
|
||||
delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
_resetEnvCache();
|
||||
if (origEnvAllowed === undefined) delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
else process.env.EMDASH_ALLOWED_ORIGINS = origEnvAllowed;
|
||||
});
|
||||
|
||||
function tag(
|
||||
origin: string,
|
||||
source: TaggedOrigin["source"] = "config.allowedOrigins",
|
||||
): TaggedOrigin {
|
||||
return { origin, source };
|
||||
}
|
||||
|
||||
describe("getConfiguredAllowedOrigins()", () => {
|
||||
it("returns [] when neither config nor env supplies origins", () => {
|
||||
expect(getConfiguredAllowedOrigins(undefined)).toEqual([]);
|
||||
expect(getConfiguredAllowedOrigins({})).toEqual([]);
|
||||
});
|
||||
|
||||
it("tags config entries as config.allowedOrigins", () => {
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("tags env entries as EMDASH_ALLOWED_ORIGINS", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://preview.example.com";
|
||||
const tagged = getConfiguredAllowedOrigins({});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "EMDASH_ALLOWED_ORIGINS" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("merges config first, then env (config wins on dedupe by tag-of-first-occurrence)", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://staging.example.com";
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
{ origin: "https://staging.example.com", source: "EMDASH_ALLOWED_ORIGINS" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("filters falsy config entries", () => {
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["", "https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateOriginShape()", () => {
|
||||
it("returns [] for empty input", () => {
|
||||
expect(validateOriginShape([])).toEqual([]);
|
||||
});
|
||||
|
||||
it("normalizes to URL.origin form (path/query stripped)", () => {
|
||||
expect(validateOriginShape([tag("https://example.com/admin?x=1")])).toEqual([
|
||||
"https://example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("dedupes duplicate origins", () => {
|
||||
expect(validateOriginShape([tag("https://example.com"), tag("https://example.com/x")])).toEqual(
|
||||
["https://example.com"],
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects unparseable URLs with source attribution", () => {
|
||||
expect(() => validateOriginShape([tag("not-a-url")])).toThrow(
|
||||
/EmDash config error in config\.allowedOrigins:.*invalid URL/,
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects non-http(s) protocols", () => {
|
||||
expect(() => validateOriginShape([tag("ftp://example.com", "EMDASH_ALLOWED_ORIGINS")])).toThrow(
|
||||
/EmDash config error in EMDASH_ALLOWED_ORIGINS:.*must be http or https.*ftp:/,
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects hostnames with trailing dots", () => {
|
||||
expect(() => validateOriginShape([tag("https://example.com.")])).toThrow(/trailing dot/);
|
||||
});
|
||||
|
||||
it("rejects hostnames with empty labels", () => {
|
||||
// "foo..example.com" parses with hostname "foo..example.com"
|
||||
expect(() => validateOriginShape([tag("https://foo..example.com")])).toThrow(/empty labels/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateAllowedOrigins() — Rule A and Rule B", () => {
|
||||
it("returns [] when input is empty (no Rule A check fires)", () => {
|
||||
expect(validateAllowedOrigins(undefined, [])).toEqual([]);
|
||||
expect(validateAllowedOrigins("https://example.com", [])).toEqual([]);
|
||||
});
|
||||
|
||||
it("throws Rule A when origins are non-empty but siteUrl is missing", () => {
|
||||
expect(() => validateAllowedOrigins(undefined, [tag("https://preview.example.com")])).toThrow(
|
||||
/allowedOrigins is set.*but siteUrl is not/,
|
||||
);
|
||||
});
|
||||
|
||||
it("accepts an exact-hostname-match entry (apex listed alongside apex siteUrl)", () => {
|
||||
expect(validateAllowedOrigins("https://example.com", [tag("https://example.com")])).toEqual([
|
||||
"https://example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("accepts a true subdomain", () => {
|
||||
expect(
|
||||
validateAllowedOrigins("https://example.com", [tag("https://preview.example.com")]),
|
||||
).toEqual(["https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("rejects a sibling/unrelated domain", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://other-site.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects a suffix-attacker (example.com.evil.com)", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://example.com.evil.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects a prefix-attacker (fakeexample.com)", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://fakeexample.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects apex when siteHost is itself a subdomain", () => {
|
||||
// rpId would be app.example.com — the browser refuses apex assertions for it
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://app.example.com", [tag("https://example.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects siteUrl with a trailing-dot hostname when allowedOrigins is non-empty", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com.", [tag("https://preview.example.com")]),
|
||||
).toThrow(/trailing-dot hostname.*Remove the trailing dot/);
|
||||
});
|
||||
|
||||
it("rejects IP-literal siteUrl (IPv4) when allowedOrigins is non-empty", () => {
|
||||
// IP-literal check fires before Rule B in the validator, so the entry shape
|
||||
// itself doesn't need to relate to the IP — any parseable origin triggers it.
|
||||
expect(() =>
|
||||
validateAllowedOrigins("http://127.0.0.1:4321", [tag("https://preview.example.com")]),
|
||||
).toThrow(/IP-literal hostname/);
|
||||
});
|
||||
|
||||
it("rejects IP-literal siteUrl (IPv6) when allowedOrigins is non-empty", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("http://[::1]:4321", [tag("http://x.example.com")]),
|
||||
).toThrow(/IP-literal hostname/);
|
||||
});
|
||||
|
||||
it("allows IP-literal siteUrl when allowedOrigins is empty (single-origin dev)", () => {
|
||||
expect(validateAllowedOrigins("http://127.0.0.1:4321", [])).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts mixed config + env tagged origins", () => {
|
||||
const result = validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://staging.example.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]);
|
||||
expect(result).toEqual(["https://preview.example.com", "https://staging.example.com"]);
|
||||
});
|
||||
|
||||
it("attributes Rule B errors to the source of the offending entry", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://other-site.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]),
|
||||
).toThrow(/EmDash config error in EMDASH_ALLOWED_ORIGINS.*not a subdomain/);
|
||||
});
|
||||
|
||||
it("dedupes when config and env list the same origin", () => {
|
||||
const result = validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://preview.example.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]);
|
||||
expect(result).toEqual(["https://preview.example.com"]);
|
||||
});
|
||||
});
|
||||
224
packages/core/tests/unit/auth/api-tokens.test.ts
Normal file
224
packages/core/tests/unit/auth/api-tokens.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Unit tests for API token generation, hashing, and scope utilities.
|
||||
*/
|
||||
|
||||
import { Role, scopesForRole, clampScopes } from "@emdash-cms/auth";
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
validateScopes,
|
||||
hasScope,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../../src/auth/api-tokens.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const PAT_PREFIX_REGEX = /^ec_pat_/;
|
||||
const OAUTH_ACCESS_PREFIX_REGEX = /^ec_oat_/;
|
||||
const OAUTH_REFRESH_PREFIX_REGEX = /^ec_ort_/;
|
||||
const BASE64URL_INVALID_CHARS_REGEX = /[+/=]/;
|
||||
const BASE64URL_VALID_REGEX = /^[A-Za-z0-9_-]+$/;
|
||||
|
||||
describe("generatePrefixedToken", () => {
|
||||
it("generates a PAT with ec_pat_ prefix", () => {
|
||||
const { raw, hash, prefix } = generatePrefixedToken(TOKEN_PREFIXES.PAT);
|
||||
|
||||
expect(raw).toMatch(PAT_PREFIX_REGEX);
|
||||
expect(raw.length).toBeGreaterThan(20);
|
||||
expect(hash).toBeTruthy();
|
||||
expect(hash).not.toBe(raw);
|
||||
expect(prefix).toMatch(PAT_PREFIX_REGEX);
|
||||
expect(prefix.length).toBe(TOKEN_PREFIXES.PAT.length + 4);
|
||||
});
|
||||
|
||||
it("generates an OAuth access token with ec_oat_ prefix", () => {
|
||||
const { raw } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
expect(raw).toMatch(OAUTH_ACCESS_PREFIX_REGEX);
|
||||
});
|
||||
|
||||
it("generates an OAuth refresh token with ec_ort_ prefix", () => {
|
||||
const { raw } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
expect(raw).toMatch(OAUTH_REFRESH_PREFIX_REGEX);
|
||||
});
|
||||
|
||||
it("generates unique tokens each time", () => {
|
||||
const tokens = new Set<string>();
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const { raw } = generatePrefixedToken("ec_pat_");
|
||||
tokens.add(raw);
|
||||
}
|
||||
expect(tokens.size).toBe(50);
|
||||
});
|
||||
|
||||
it("generates unique hashes for different tokens", () => {
|
||||
const { hash: hash1 } = generatePrefixedToken("ec_pat_");
|
||||
const { hash: hash2 } = generatePrefixedToken("ec_pat_");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hashApiToken", () => {
|
||||
it("produces a deterministic hash", () => {
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_pat_abc123");
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it("produces different hashes for different tokens", () => {
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_pat_def456");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it("hashes the full prefixed token", () => {
|
||||
// Same suffix but different prefix should produce different hashes
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_oat_abc123");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it("produces URL-safe base64 output", () => {
|
||||
const hash = hashApiToken("ec_pat_test");
|
||||
// Should not contain +, /, or = (standard base64 chars)
|
||||
expect(hash).not.toMatch(BASE64URL_INVALID_CHARS_REGEX);
|
||||
// Should only contain base64url chars
|
||||
expect(hash).toMatch(BASE64URL_VALID_REGEX);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateScopes", () => {
|
||||
it("returns empty array for valid scopes", () => {
|
||||
const invalid = validateScopes(["content:read", "media:write"]);
|
||||
expect(invalid).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns invalid scopes", () => {
|
||||
const invalid = validateScopes(["content:read", "invalid:scope", "admin"]);
|
||||
expect(invalid).toEqual(["invalid:scope"]);
|
||||
});
|
||||
|
||||
it("handles empty array", () => {
|
||||
expect(validateScopes([])).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts all valid scopes", () => {
|
||||
const invalid = validateScopes([...VALID_SCOPES]);
|
||||
expect(invalid).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasScope", () => {
|
||||
it("returns true when scope is present", () => {
|
||||
expect(hasScope(["content:read", "media:write"], "content:read")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false when scope is missing", () => {
|
||||
expect(hasScope(["content:read"], "content:write")).toBe(false);
|
||||
});
|
||||
|
||||
it("admin scope grants access to everything", () => {
|
||||
expect(hasScope(["admin"], "content:read")).toBe(true);
|
||||
expect(hasScope(["admin"], "schema:write")).toBe(true);
|
||||
expect(hasScope(["admin"], "media:write")).toBe(true);
|
||||
});
|
||||
|
||||
it("handles empty scopes", () => {
|
||||
expect(hasScope([], "content:read")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// scopesForRole — maps roles to maximum allowed scopes
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("scopesForRole", () => {
|
||||
it("SUBSCRIBER gets only read scopes for content and media", () => {
|
||||
const scopes = scopesForRole(Role.SUBSCRIBER);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).not.toContain("content:write");
|
||||
expect(scopes).not.toContain("media:write");
|
||||
expect(scopes).not.toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("CONTRIBUTOR gets content and media read/write", () => {
|
||||
const scopes = scopesForRole(Role.CONTRIBUTOR);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).not.toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("EDITOR gets content, media, and schema:read", () => {
|
||||
const scopes = scopesForRole(Role.EDITOR);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("ADMIN gets all scopes including admin and schema:write", () => {
|
||||
const scopes = scopesForRole(Role.ADMIN);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).toContain("schema:read");
|
||||
expect(scopes).toContain("schema:write");
|
||||
expect(scopes).toContain("admin");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// clampScopes — intersects requested scopes with role-allowed scopes
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("clampScopes", () => {
|
||||
it("strips admin scope from non-admin role", () => {
|
||||
const result = clampScopes(["content:read", "admin"], Role.CONTRIBUTOR);
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
|
||||
it("strips schema:write from editor role", () => {
|
||||
const result = clampScopes(["schema:read", "schema:write"], Role.EDITOR);
|
||||
expect(result).toEqual(["schema:read"]);
|
||||
});
|
||||
|
||||
it("preserves all scopes for admin role", () => {
|
||||
const all = [
|
||||
"content:read",
|
||||
"content:write",
|
||||
"media:read",
|
||||
"media:write",
|
||||
"schema:read",
|
||||
"schema:write",
|
||||
"admin",
|
||||
];
|
||||
const result = clampScopes(all, Role.ADMIN);
|
||||
expect(result).toEqual(all);
|
||||
});
|
||||
|
||||
it("returns empty array when no scopes survive clamping", () => {
|
||||
const result = clampScopes(["admin", "schema:write"], Role.SUBSCRIBER);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles empty input", () => {
|
||||
expect(clampScopes([], Role.ADMIN)).toEqual([]);
|
||||
});
|
||||
|
||||
it("strips schema:read from contributor role", () => {
|
||||
const result = clampScopes(["content:read", "schema:read"], Role.CONTRIBUTOR);
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
});
|
||||
214
packages/core/tests/unit/auth/challenge-store.test.ts
Normal file
214
packages/core/tests/unit/auth/challenge-store.test.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
createChallengeStore,
|
||||
cleanupExpiredChallenges,
|
||||
} from "../../../src/auth/challenge-store.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("ChallengeStore", () => {
|
||||
let db: Kysely<Database>;
|
||||
let store: ReturnType<typeof createChallengeStore>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
store = createChallengeStore(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
describe("set()", () => {
|
||||
it("stores challenge with expiry", async () => {
|
||||
const challenge = "test-challenge-123";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-1",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.type).toBe("registration");
|
||||
expect(result?.userId).toBe("user-1");
|
||||
expect(result?.expiresAt).toBe(expiresAt);
|
||||
});
|
||||
|
||||
it("stores challenge without userId", async () => {
|
||||
const challenge = "auth-challenge-456";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.type).toBe("authentication");
|
||||
expect(result?.userId).toBeUndefined();
|
||||
});
|
||||
|
||||
it("updates existing challenge on conflict", async () => {
|
||||
const challenge = "update-test";
|
||||
const expiresAt1 = Date.now() + 5 * 60 * 1000;
|
||||
const expiresAt2 = Date.now() + 10 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-1",
|
||||
expiresAt: expiresAt1,
|
||||
});
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
userId: "user-2",
|
||||
expiresAt: expiresAt2,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result?.type).toBe("authentication");
|
||||
expect(result?.userId).toBe("user-2");
|
||||
expect(result?.expiresAt).toBe(expiresAt2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("get()", () => {
|
||||
it("returns stored challenge", async () => {
|
||||
const challenge = "get-test";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-abc",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).toEqual({
|
||||
type: "registration",
|
||||
userId: "user-abc",
|
||||
expiresAt,
|
||||
});
|
||||
});
|
||||
|
||||
it("returns null for non-existent challenge", async () => {
|
||||
const result = await store.get("does-not-exist");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for expired challenges and deletes them", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const challenge = "expired-test";
|
||||
const expiresAt = Date.now() + 60 * 1000; // 1 minute
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
// Advance time past expiry
|
||||
vi.advanceTimersByTime(61 * 1000);
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).toBeNull();
|
||||
|
||||
// Verify it was deleted
|
||||
vi.useRealTimers();
|
||||
const afterDelete = await db
|
||||
.selectFrom("auth_challenges")
|
||||
.selectAll()
|
||||
.where("challenge", "=", challenge)
|
||||
.executeTakeFirst();
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete()", () => {
|
||||
it("removes challenge", async () => {
|
||||
const challenge = "delete-test";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
// Verify it exists
|
||||
const before = await store.get(challenge);
|
||||
expect(before).not.toBeNull();
|
||||
|
||||
// Delete it
|
||||
await store.delete(challenge);
|
||||
|
||||
// Verify it's gone
|
||||
const after = await store.get(challenge);
|
||||
expect(after).toBeNull();
|
||||
});
|
||||
|
||||
it("does not throw when deleting non-existent challenge", async () => {
|
||||
await expect(store.delete("non-existent")).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cleanupExpiredChallenges()", () => {
|
||||
it("removes only expired entries", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
// Create some challenges with different expiry times
|
||||
await store.set("expired-1", {
|
||||
type: "registration",
|
||||
expiresAt: now + 30 * 1000, // expires in 30s
|
||||
});
|
||||
await store.set("expired-2", {
|
||||
type: "authentication",
|
||||
expiresAt: now + 60 * 1000, // expires in 60s
|
||||
});
|
||||
await store.set("valid-1", {
|
||||
type: "registration",
|
||||
expiresAt: now + 5 * 60 * 1000, // expires in 5 minutes
|
||||
});
|
||||
await store.set("valid-2", {
|
||||
type: "authentication",
|
||||
expiresAt: now + 10 * 60 * 1000, // expires in 10 minutes
|
||||
});
|
||||
|
||||
// Advance time by 90 seconds (past first two, but not last two)
|
||||
vi.advanceTimersByTime(90 * 1000);
|
||||
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(2);
|
||||
|
||||
// Verify only valid ones remain
|
||||
vi.useRealTimers();
|
||||
const remaining = await db.selectFrom("auth_challenges").select("challenge").execute();
|
||||
|
||||
expect(remaining.map((r) => r.challenge).toSorted()).toEqual(["valid-1", "valid-2"]);
|
||||
});
|
||||
|
||||
it("returns 0 when no expired challenges", async () => {
|
||||
const expiresAt = Date.now() + 10 * 60 * 1000;
|
||||
|
||||
await store.set("valid", {
|
||||
type: "registration",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(0);
|
||||
});
|
||||
|
||||
it("handles empty table", async () => {
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
124
packages/core/tests/unit/auth/discovery-endpoints.test.ts
Normal file
124
packages/core/tests/unit/auth/discovery-endpoints.test.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
/**
|
||||
* Unit tests for OAuth discovery endpoint response shapes.
|
||||
*
|
||||
* These endpoints are public, unauthenticated, and return JSON metadata
|
||||
* that MCP clients use to discover OAuth endpoints. The response shapes
|
||||
* are contractual — changing them breaks MCP client compatibility.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { GET as getAuthorizationServer } from "../../../src/astro/routes/api/well-known/oauth-authorization-server.js";
|
||||
// We import the GET handlers directly — they're plain functions that take
|
||||
// an Astro-like context and return a Response.
|
||||
import { GET as getProtectedResource } from "../../../src/astro/routes/api/well-known/oauth-protected-resource.js";
|
||||
import { VALID_SCOPES } from "../../../src/auth/api-tokens.js";
|
||||
|
||||
/** Minimal mock of what the route handlers actually use from the Astro context. */
|
||||
function mockContext(origin = "https://example.com") {
|
||||
return {
|
||||
url: new URL("/.well-known/test", origin),
|
||||
locals: { emdash: undefined },
|
||||
} as unknown as Parameters<typeof getProtectedResource>[0];
|
||||
}
|
||||
|
||||
describe("Protected Resource Metadata (RFC 9728)", () => {
|
||||
it("returns correct resource and authorization_servers", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.resource).toBe("https://example.com/_emdash/api/mcp");
|
||||
expect(body.authorization_servers).toEqual(["https://example.com/_emdash"]);
|
||||
});
|
||||
|
||||
it("includes all valid scopes", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
const body = (await response.json()) as { scopes_supported: string[] };
|
||||
expect(body.scopes_supported).toEqual([...VALID_SCOPES]);
|
||||
});
|
||||
|
||||
it("advertises header-based bearer method", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
const body = (await response.json()) as { bearer_methods_supported: string[] };
|
||||
expect(body.bearer_methods_supported).toEqual(["header"]);
|
||||
});
|
||||
|
||||
it("sets CORS and cache headers", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
expect(response.headers.get("Access-Control-Allow-Origin")).toBe("*");
|
||||
expect(response.headers.get("Cache-Control")).toContain("public");
|
||||
});
|
||||
|
||||
it("uses the request origin for URLs", async () => {
|
||||
const response = await getProtectedResource(mockContext("https://cms.mysite.com"));
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.resource).toBe("https://cms.mysite.com/_emdash/api/mcp");
|
||||
expect(body.authorization_servers).toEqual(["https://cms.mysite.com/_emdash"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Authorization Server Metadata (RFC 8414)", () => {
|
||||
it("returns correct issuer and endpoints", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.issuer).toBe("https://example.com/_emdash");
|
||||
expect(body.authorization_endpoint).toBe("https://example.com/_emdash/oauth/authorize");
|
||||
expect(body.token_endpoint).toBe("https://example.com/_emdash/api/oauth/token");
|
||||
expect(body.device_authorization_endpoint).toBe(
|
||||
"https://example.com/_emdash/api/oauth/device/code",
|
||||
);
|
||||
});
|
||||
|
||||
it("supports authorization_code, refresh_token, and device_code grants", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { grant_types_supported: string[] };
|
||||
expect(body.grant_types_supported).toContain("authorization_code");
|
||||
expect(body.grant_types_supported).toContain("refresh_token");
|
||||
expect(body.grant_types_supported).toContain("urn:ietf:params:oauth:grant-type:device_code");
|
||||
});
|
||||
|
||||
it("requires S256 code challenge method only", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { code_challenge_methods_supported: string[] };
|
||||
expect(body.code_challenge_methods_supported).toEqual(["S256"]);
|
||||
});
|
||||
|
||||
it("only supports code response type", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { response_types_supported: string[] };
|
||||
expect(body.response_types_supported).toEqual(["code"]);
|
||||
});
|
||||
|
||||
it("supports public clients (no auth method)", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { token_endpoint_auth_methods_supported: string[] };
|
||||
expect(body.token_endpoint_auth_methods_supported).toEqual(["none"]);
|
||||
});
|
||||
|
||||
it("advertises dynamic client registration", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { registration_endpoint: string };
|
||||
expect(body.registration_endpoint).toBe("https://example.com/_emdash/api/oauth/register");
|
||||
});
|
||||
|
||||
it("includes all valid scopes", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { scopes_supported: string[] };
|
||||
expect(body.scopes_supported).toEqual([...VALID_SCOPES]);
|
||||
});
|
||||
|
||||
it("sets CORS and cache headers", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
expect(response.headers.get("Access-Control-Allow-Origin")).toBe("*");
|
||||
expect(response.headers.get("Cache-Control")).toContain("public");
|
||||
});
|
||||
|
||||
it("does not advertise unsupported client_id metadata documents", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body).not.toHaveProperty("client_id_metadata_document_supported");
|
||||
});
|
||||
});
|
||||
322
packages/core/tests/unit/auth/invite.test.ts
Normal file
322
packages/core/tests/unit/auth/invite.test.ts
Normal file
@@ -0,0 +1,322 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import {
|
||||
Role,
|
||||
createInvite,
|
||||
createInviteToken,
|
||||
validateInvite,
|
||||
completeInvite,
|
||||
InviteError,
|
||||
escapeHtml,
|
||||
generateToken,
|
||||
} from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const TOKEN_PARAM_REGEX = /token=/;
|
||||
const TOKEN_EXTRACT_REGEX = /token=([a-zA-Z0-9_-]+)/;
|
||||
|
||||
describe("Invite", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let adminId: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
|
||||
// Create an admin user (required for the invitedBy FK)
|
||||
const admin = await adapter.createUser({
|
||||
email: "admin@example.com",
|
||||
name: "Admin",
|
||||
role: Role.ADMIN,
|
||||
emailVerified: true,
|
||||
});
|
||||
adminId = admin.id;
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("createInviteToken", () => {
|
||||
it("should create a token and return url + email", async () => {
|
||||
const result = await createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"new@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.email).toBe("new@example.com");
|
||||
expect(result.url).toContain("https://example.com");
|
||||
expect(result.url).toContain("/admin/invite/accept?token=");
|
||||
expect(result.url).toMatch(TOKEN_PARAM_REGEX);
|
||||
// Should NOT have a token field on the result
|
||||
expect("token" in result).toBe(false);
|
||||
});
|
||||
|
||||
it("should preserve baseUrl path prefix in invite URL", async () => {
|
||||
const result = await createInviteToken(
|
||||
{ baseUrl: "https://example.com/_emdash" },
|
||||
adapter,
|
||||
"path@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.url).toContain("https://example.com/_emdash/admin/invite/accept");
|
||||
});
|
||||
|
||||
it("should throw user_exists if email is already registered", async () => {
|
||||
await adapter.createUser({
|
||||
email: "existing@example.com",
|
||||
name: "Existing",
|
||||
role: Role.AUTHOR,
|
||||
emailVerified: true,
|
||||
});
|
||||
|
||||
await expect(
|
||||
createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"existing@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
),
|
||||
).rejects.toThrow(InviteError);
|
||||
|
||||
try {
|
||||
await createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"existing@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InviteError);
|
||||
expect((error as InviteError).code).toBe("user_exists");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("createInvite", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(() => {
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
it("should send email when email sender is provided", async () => {
|
||||
const result = await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"invite@example.com",
|
||||
Role.EDITOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledOnce();
|
||||
expect(sentEmails).toHaveLength(1);
|
||||
expect(sentEmails[0]!.to).toBe("invite@example.com");
|
||||
expect(sentEmails[0]!.subject).toContain("Test Site");
|
||||
expect(sentEmails[0]!.html).toContain("Accept Invite");
|
||||
expect(sentEmails[0]!.text).toContain(result.url);
|
||||
});
|
||||
|
||||
it("should return url without sending email when no sender", async () => {
|
||||
const result = await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
// No email sender — copy-link fallback
|
||||
},
|
||||
adapter,
|
||||
"noemail@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.url).toContain("https://example.com");
|
||||
expect(result.url).toMatch(TOKEN_PARAM_REGEX);
|
||||
expect(result.email).toBe("noemail@example.com");
|
||||
});
|
||||
|
||||
it("should HTML-escape siteName in email HTML body", async () => {
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: '<script>alert("xss")</script>',
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"xss@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(sentEmails).toHaveLength(1);
|
||||
const html = sentEmails[0]!.html!;
|
||||
// HTML body should be escaped
|
||||
expect(html).not.toContain("<script>");
|
||||
expect(html).toContain("<script>");
|
||||
// Plain text subject should NOT be escaped (it's not HTML)
|
||||
expect(sentEmails[0]!.subject).toContain("<script>");
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateInvite", () => {
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
});
|
||||
|
||||
async function createTestInvite(email: string, role: number = Role.AUTHOR): Promise<string> {
|
||||
const mockSend = vi.fn(async (msg: EmailMessage) => {
|
||||
const match = msg.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test",
|
||||
email: mockSend,
|
||||
},
|
||||
adapter,
|
||||
email,
|
||||
role,
|
||||
adminId,
|
||||
);
|
||||
|
||||
if (!capturedToken) throw new Error("Token not captured from email");
|
||||
return capturedToken;
|
||||
}
|
||||
|
||||
it("should validate a valid token and return email + role", async () => {
|
||||
const token = await createTestInvite("valid@example.com", Role.EDITOR);
|
||||
|
||||
const result = await validateInvite(adapter, token);
|
||||
|
||||
expect(result.email).toBe("valid@example.com");
|
||||
expect(result.role).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for a nonexistent token", async () => {
|
||||
// Use a valid base64url token that doesn't exist in the DB
|
||||
const fakeToken = generateToken();
|
||||
|
||||
await expect(validateInvite(adapter, fakeToken)).rejects.toThrow(InviteError);
|
||||
|
||||
try {
|
||||
await validateInvite(adapter, fakeToken);
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InviteError);
|
||||
expect((error as InviteError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw invalid_token for an already-used token", async () => {
|
||||
const token = await createTestInvite("used@example.com");
|
||||
|
||||
// Complete the invite (consumes the token)
|
||||
await completeInvite(adapter, token, { name: "Used User" });
|
||||
|
||||
// Token should now be invalid
|
||||
await expect(validateInvite(adapter, token)).rejects.toThrow(InviteError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("completeInvite", () => {
|
||||
async function createTestInvite(email: string, role: number = Role.AUTHOR): Promise<string> {
|
||||
let token: string | null = null;
|
||||
const mockSend = vi.fn(async (msg: EmailMessage) => {
|
||||
const match = msg.text.match(TOKEN_EXTRACT_REGEX);
|
||||
token = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test",
|
||||
email: mockSend,
|
||||
},
|
||||
adapter,
|
||||
email,
|
||||
role,
|
||||
adminId,
|
||||
);
|
||||
|
||||
if (!token) throw new Error("Token not captured from email");
|
||||
return token;
|
||||
}
|
||||
|
||||
it("should create user with correct email and role", async () => {
|
||||
const token = await createTestInvite("new@example.com", Role.EDITOR);
|
||||
|
||||
const user = await completeInvite(adapter, token, { name: "New User" });
|
||||
|
||||
expect(user.email).toBe("new@example.com");
|
||||
expect(user.role).toBe(Role.EDITOR);
|
||||
expect(user.name).toBe("New User");
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
|
||||
it("should delete token after use (single-use)", async () => {
|
||||
const token = await createTestInvite("oneuse@example.com");
|
||||
|
||||
await completeInvite(adapter, token, { name: "One Use" });
|
||||
|
||||
// Second use should fail
|
||||
await expect(completeInvite(adapter, token, { name: "Second Use" })).rejects.toThrow(
|
||||
InviteError,
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for nonexistent token", async () => {
|
||||
const fakeToken = generateToken();
|
||||
|
||||
await expect(completeInvite(adapter, fakeToken, { name: "Fake" })).rejects.toThrow(
|
||||
InviteError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("escapeHtml", () => {
|
||||
it("should escape angle brackets", () => {
|
||||
expect(escapeHtml("<script>")).toBe("<script>");
|
||||
});
|
||||
|
||||
it("should escape ampersands", () => {
|
||||
expect(escapeHtml("a & b")).toBe("a & b");
|
||||
});
|
||||
|
||||
it("should escape double quotes", () => {
|
||||
expect(escapeHtml('"hello"')).toBe(""hello"");
|
||||
});
|
||||
|
||||
it("should handle strings with no special characters", () => {
|
||||
expect(escapeHtml("My Site")).toBe("My Site");
|
||||
});
|
||||
|
||||
it("should handle empty string", () => {
|
||||
expect(escapeHtml("")).toBe("");
|
||||
});
|
||||
});
|
||||
});
|
||||
53
packages/core/tests/unit/auth/magic-link.test.ts
Normal file
53
packages/core/tests/unit/auth/magic-link.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import { Role, sendMagicLink } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Magic Link", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("sends verify links through the injected EmDash auth route", async () => {
|
||||
await adapter.createUser({
|
||||
email: "author@example.com",
|
||||
name: "Author",
|
||||
role: Role.AUTHOR,
|
||||
emailVerified: true,
|
||||
});
|
||||
|
||||
await sendMagicLink(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"author@example.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledOnce();
|
||||
expect(sentEmails[0]!.text).toContain(
|
||||
"https://example.com/_emdash/api/auth/magic-link/verify?token=",
|
||||
);
|
||||
});
|
||||
});
|
||||
188
packages/core/tests/unit/auth/mcp-discovery-post.test.ts
Normal file
188
packages/core/tests/unit/auth/mcp-discovery-post.test.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("virtual:emdash/auth", () => ({ authenticate: vi.fn() }));
|
||||
vi.mock("virtual:emdash/config", () => ({ default: {} }));
|
||||
vi.mock("astro:middleware", () => ({
|
||||
defineMiddleware: (handler: unknown) => handler,
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth", () => ({
|
||||
TOKEN_PREFIXES: {},
|
||||
generatePrefixedToken: vi.fn(),
|
||||
hashPrefixedToken: vi.fn(),
|
||||
VALID_SCOPES: [],
|
||||
validateScopes: vi.fn(),
|
||||
hasScope: vi.fn(() => false),
|
||||
computeS256Challenge: vi.fn(),
|
||||
Role: { ADMIN: 50 },
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth/adapters/kysely", () => ({
|
||||
createKyselyAdapter: vi.fn(() => ({
|
||||
getUserById: vi.fn(async (id: string) => ({
|
||||
id,
|
||||
email: "admin@test.com",
|
||||
name: "Admin",
|
||||
role: 50,
|
||||
disabled: 0,
|
||||
})),
|
||||
getUserByEmail: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
type AuthMiddlewareModule = typeof import("../../../src/astro/middleware/auth.js");
|
||||
|
||||
let onRequest: AuthMiddlewareModule["onRequest"];
|
||||
|
||||
beforeAll(async () => {
|
||||
({ onRequest } = await import("../../../src/astro/middleware/auth.js"));
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
async function runAuthMiddleware(opts: {
|
||||
pathname: string;
|
||||
method?: string;
|
||||
headers?: HeadersInit;
|
||||
sessionUserId?: string | null;
|
||||
siteUrl?: string;
|
||||
}) {
|
||||
const url = new URL(opts.pathname, "https://example.com");
|
||||
const session = {
|
||||
get: vi.fn().mockResolvedValue(opts.sessionUserId ? { id: opts.sessionUserId } : null),
|
||||
set: vi.fn(),
|
||||
destroy: vi.fn(),
|
||||
};
|
||||
const next = vi.fn(async () => new Response("ok"));
|
||||
const response = await onRequest(
|
||||
{
|
||||
url,
|
||||
request: new Request(url, {
|
||||
method: opts.method ?? "POST",
|
||||
headers: opts.headers,
|
||||
body: JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
id: 1,
|
||||
method: "initialize",
|
||||
params: {
|
||||
protocolVersion: "2025-03-26",
|
||||
capabilities: {},
|
||||
clientInfo: { name: "debug", version: "1.0" },
|
||||
},
|
||||
}),
|
||||
}),
|
||||
locals: {
|
||||
emdash: {
|
||||
db: {},
|
||||
config: opts.siteUrl ? { siteUrl: opts.siteUrl } : {},
|
||||
},
|
||||
},
|
||||
session,
|
||||
redirect: (location: string) =>
|
||||
new Response(null, {
|
||||
status: 302,
|
||||
headers: { Location: location },
|
||||
}),
|
||||
} as Parameters<AuthMiddlewareModule["onRequest"]>[0],
|
||||
next,
|
||||
);
|
||||
|
||||
return { response, next, session };
|
||||
}
|
||||
|
||||
describe("MCP discovery auth middleware", () => {
|
||||
it("returns 401 with discovery metadata for unauthenticated MCP POST requests", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" },
|
||||
});
|
||||
});
|
||||
|
||||
it("does not read the session for anonymous MCP POST discovery requests", async () => {
|
||||
const { response, next, session } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(session.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("uses the configured public origin for anonymous MCP POST discovery responses", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
siteUrl: "https://public.example.com",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://public.example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
});
|
||||
|
||||
it("returns 401 with discovery metadata for invalid bearer tokens on MCP POST", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: {
|
||||
Authorization: "Bearer invalid",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "INVALID_TOKEN", message: "Invalid or expired token" },
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects MCP POST requests that only have session auth", async () => {
|
||||
const { response, next, session } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-EmDash-Request": "1",
|
||||
},
|
||||
sessionUserId: "user_1",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(session.get).not.toHaveBeenCalled();
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" },
|
||||
});
|
||||
});
|
||||
|
||||
it("still rejects non-MCP API POST requests without the CSRF header", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/content/posts",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(403);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "CSRF_REJECTED", message: "Missing required header" },
|
||||
});
|
||||
});
|
||||
});
|
||||
131
packages/core/tests/unit/auth/oauth-register-route.test.ts
Normal file
131
packages/core/tests/unit/auth/oauth-register-route.test.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { POST as registerClient } from "../../../src/astro/routes/api/oauth/register.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("oauth register route", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("returns RFC 7591-style errors for malformed JSON", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: "{",
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Request body must be valid JSON",
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects unsupported token endpoint auth methods", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
redirect_uris: ["http://127.0.0.1:9999/callback"],
|
||||
token_endpoint_auth_method: "client_secret_basic",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Only token_endpoint_auth_method=none is supported",
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects redirect URIs that the authorize flow would later refuse", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
redirect_uris: ["http://example.com/callback"],
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Invalid redirect URI: HTTP redirect URIs are only allowed for localhost",
|
||||
});
|
||||
});
|
||||
|
||||
it("registers public clients with loopback redirect URIs", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
client_name: "Harness Test",
|
||||
redirect_uris: ["http://127.0.0.1:9999/callback"],
|
||||
token_endpoint_auth_method: "none",
|
||||
grant_types: ["authorization_code", "refresh_token"],
|
||||
response_types: ["code"],
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.headers.get("Cache-Control")).toBe("no-store");
|
||||
expect(response.headers.get("Pragma")).toBe("no-cache");
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.client_name).toBe("Harness Test");
|
||||
expect(body.redirect_uris).toEqual(["http://127.0.0.1:9999/callback"]);
|
||||
expect(body.token_endpoint_auth_method).toBe("none");
|
||||
expect(body.grant_types).toEqual(["authorization_code", "refresh_token"]);
|
||||
expect(body.response_types).toEqual(["code"]);
|
||||
expect(typeof body.client_id).toBe("string");
|
||||
});
|
||||
});
|
||||
225
packages/core/tests/unit/auth/passkey-config.test.ts
Normal file
225
packages/core/tests/unit/auth/passkey-config.test.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { getPasskeyConfig } from "../../../src/auth/passkey-config.js";
|
||||
|
||||
/** URL shape from `new URL(request.url)` after trusted proxy + Astro `security.allowedDomains`. */
|
||||
function urlAfterTrustedProxy(path: string, host: string, proto: "http" | "https"): URL {
|
||||
return new URL(path, `${proto}://${host}`);
|
||||
}
|
||||
|
||||
describe("passkey-config", () => {
|
||||
describe("getPasskeyConfig() via emulated reverse proxy URL", () => {
|
||||
const internalDevUrl = "http://127.0.0.1:4321/_emdash/api/auth/passkey/register/options";
|
||||
|
||||
it("loopback URL alone matches Node before rewrite — rpId is not the public host", () => {
|
||||
const url = new URL(internalDevUrl);
|
||||
expect(getPasskeyConfig(url).rpId).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("emits a single-element origins array by default", () => {
|
||||
const url = new URL(internalDevUrl);
|
||||
expect(getPasskeyConfig(url).origins).toEqual(["http://127.0.0.1:4321"]);
|
||||
});
|
||||
|
||||
it("forwarded Host/Proto yield the URL handlers see; rp matches HTTP reverse-proxy edge", () => {
|
||||
const url = urlAfterTrustedProxy(
|
||||
"/_emdash/api/auth/passkey/register/options",
|
||||
"emdash.local:8080",
|
||||
"http",
|
||||
);
|
||||
const config = getPasskeyConfig(url, "My Site");
|
||||
expect(config.rpId).toBe("emdash.local");
|
||||
expect(config.rpName).toBe("My Site");
|
||||
expect(config.origins[0]).toBe("http://emdash.local:8080");
|
||||
});
|
||||
|
||||
it("HTTPS listener on proxy with HTTP upstream: siteUrl aligns origin with browser", () => {
|
||||
const urlAstroSeesFromForwardedHttp = urlAfterTrustedProxy(
|
||||
"/_emdash/api/setup/admin",
|
||||
"emdash.local:8080",
|
||||
"http",
|
||||
);
|
||||
const browserOrigin = "https://emdash.local:8443";
|
||||
const config = getPasskeyConfig(urlAstroSeesFromForwardedHttp, "My Site", browserOrigin);
|
||||
expect(config.rpId).toBe("emdash.local");
|
||||
expect(config.rpName).toBe("My Site");
|
||||
expect(config.origins[0]).toBe(browserOrigin);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPasskeyConfig()", () => {
|
||||
it("throws when siteUrl is not a valid URL", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
expect(() => getPasskeyConfig(url, "Site", "::not-a-url")).toThrow("Invalid siteUrl");
|
||||
});
|
||||
|
||||
it("extracts rpId from localhost URL", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("localhost");
|
||||
});
|
||||
|
||||
it("extracts rpId from production URL", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("extracts rpId from subdomain URL", () => {
|
||||
const url = new URL("https://admin.example.com/dashboard");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("admin.example.com");
|
||||
});
|
||||
|
||||
it("returns correct origin for http", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.origins[0]).toBe("http://localhost:4321");
|
||||
});
|
||||
|
||||
it("returns correct origin for https", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.origins[0]).toBe("https://example.com");
|
||||
});
|
||||
|
||||
it("handles port numbers correctly", () => {
|
||||
const url = new URL("http://localhost:3000/setup");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("localhost");
|
||||
expect(config.origins[0]).toBe("http://localhost:3000");
|
||||
});
|
||||
|
||||
it("handles https with non-standard port", () => {
|
||||
const url = new URL("https://staging.example.com:8443/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("staging.example.com");
|
||||
expect(config.origins[0]).toBe("https://staging.example.com:8443");
|
||||
});
|
||||
|
||||
it("uses hostname as rpName by default", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpName).toBe("example.com");
|
||||
});
|
||||
|
||||
it("uses provided siteName for rpName", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url, "My Cool Site");
|
||||
|
||||
expect(config.rpName).toBe("My Cool Site");
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("ignores path and query params for origin", () => {
|
||||
const url = new URL("https://example.com:443/admin/setup?foo=bar#section");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
// Standard https port 443 is omitted from origin
|
||||
expect(config.origins[0]).toBe("https://example.com");
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("documents HTTPS reverse-proxy dev pitfall: server URL scheme must match the browser", () => {
|
||||
const serverDevUrl = new URL("http://emdash.local:8443/_emdash/api/setup/admin");
|
||||
const browserPageOrigin = new URL("https://emdash.local:8443/_emdash/admin/setup");
|
||||
|
||||
const fromServer = getPasskeyConfig(serverDevUrl);
|
||||
const fromBrowser = getPasskeyConfig(browserPageOrigin);
|
||||
|
||||
expect(fromServer.rpId).toBe(fromBrowser.rpId);
|
||||
expect(fromServer.origins[0]).toBe("http://emdash.local:8443");
|
||||
expect(fromBrowser.origins[0]).toBe("https://emdash.local:8443");
|
||||
// verifyRegistrationResponse requires clientData.origin === config.origins[0] (see @emdash-cms/auth/passkey)
|
||||
expect(fromServer.origins[0]).not.toBe(fromBrowser.origins[0]);
|
||||
});
|
||||
|
||||
it("siteUrl overrides origin and rpId (TLS termination and loopback request URL)", () => {
|
||||
const fromForwardedHttp = getPasskeyConfig(
|
||||
new URL("http://emdash.local:8443/_emdash/api/setup/admin"),
|
||||
"My Site",
|
||||
"https://emdash.local:8443",
|
||||
);
|
||||
expect(fromForwardedHttp.rpName).toBe("My Site");
|
||||
expect(fromForwardedHttp.rpId).toBe("emdash.local");
|
||||
expect(fromForwardedHttp.origins[0]).toBe("https://emdash.local:8443");
|
||||
|
||||
const fromLoopback = getPasskeyConfig(
|
||||
new URL("http://127.0.0.1:4321/_emdash/api/setup/admin"),
|
||||
"My CMS",
|
||||
"https://public.example:8443",
|
||||
);
|
||||
expect(fromLoopback.rpId).toBe("public.example");
|
||||
expect(fromLoopback.rpName).toBe("My CMS");
|
||||
expect(fromLoopback.origins[0]).toBe("https://public.example:8443");
|
||||
|
||||
const hostnameOnly = getPasskeyConfig(
|
||||
new URL("http://127.0.0.1:4321/x"),
|
||||
undefined,
|
||||
"https://public.example:8443",
|
||||
);
|
||||
expect(hostnameOnly.rpName).toBe("public.example");
|
||||
expect(hostnameOnly.rpId).toBe("public.example");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPasskeyConfig() multi-origin", () => {
|
||||
it("appends allowedOrigins after the canonical origin", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, "Site", "https://example.com", [
|
||||
"https://preview.example.com",
|
||||
"https://staging.example.com",
|
||||
]);
|
||||
|
||||
expect(config.rpId).toBe("example.com");
|
||||
expect(config.origins).toEqual([
|
||||
"https://example.com",
|
||||
"https://preview.example.com",
|
||||
"https://staging.example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("places the canonical origin first when no siteUrl is set", () => {
|
||||
const url = new URL("https://preview.example.com/admin");
|
||||
const config = getPasskeyConfig(url, undefined, undefined, ["https://example.com"]);
|
||||
|
||||
// rpId is preview.example.com (no siteUrl); allow apex as second origin
|
||||
expect(config.rpId).toBe("preview.example.com");
|
||||
expect(config.origins).toEqual(["https://preview.example.com", "https://example.com"]);
|
||||
});
|
||||
|
||||
it("dedupes if allowedOrigins repeats the canonical origin", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", [
|
||||
"https://example.com",
|
||||
"https://preview.example.com",
|
||||
]);
|
||||
|
||||
expect(config.origins).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("ignores empty/falsy entries in allowedOrigins", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", [
|
||||
"",
|
||||
"https://preview.example.com",
|
||||
]);
|
||||
|
||||
expect(config.origins).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("yields a single-element origins array when allowedOrigins is empty", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", []);
|
||||
expect(config.origins).toEqual(["https://example.com"]);
|
||||
});
|
||||
});
|
||||
});
|
||||
278
packages/core/tests/unit/auth/passkey-management.test.ts
Normal file
278
packages/core/tests/unit/auth/passkey-management.test.ts
Normal file
@@ -0,0 +1,278 @@
|
||||
import type { AuthAdapter, Credential, User } from "@emdash-cms/auth";
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Passkey Management", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let testUser: User;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
|
||||
// Create a test user
|
||||
testUser = await adapter.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
role: Role.ADMIN,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
// Helper to create a test credential
|
||||
async function createTestCredential(userId: string, name?: string): Promise<Credential> {
|
||||
const credentialId = `cred-${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
return adapter.createCredential({
|
||||
id: credentialId,
|
||||
userId,
|
||||
publicKey: new Uint8Array([1, 2, 3, 4]),
|
||||
counter: 0,
|
||||
deviceType: "multiDevice",
|
||||
backedUp: true,
|
||||
transports: ["internal"],
|
||||
name: name ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
describe("getCredentialById", () => {
|
||||
it("should return credential by ID", async () => {
|
||||
const created = await createTestCredential(testUser.id, "My MacBook");
|
||||
|
||||
const credential = await adapter.getCredentialById(created.id);
|
||||
|
||||
expect(credential).not.toBeNull();
|
||||
expect(credential?.id).toBe(created.id);
|
||||
expect(credential?.userId).toBe(testUser.id);
|
||||
expect(credential?.name).toBe("My MacBook");
|
||||
expect(credential?.deviceType).toBe("multiDevice");
|
||||
expect(credential?.backedUp).toBe(true);
|
||||
});
|
||||
|
||||
it("should return null for non-existent credential", async () => {
|
||||
const credential = await adapter.getCredentialById("non-existent");
|
||||
expect(credential).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCredentialsByUserId", () => {
|
||||
it("should return empty array for user with no passkeys", async () => {
|
||||
const credentials = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(credentials).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return all passkeys for a user", async () => {
|
||||
await createTestCredential(testUser.id, "MacBook Pro");
|
||||
await createTestCredential(testUser.id, "iPhone");
|
||||
await createTestCredential(testUser.id, null);
|
||||
|
||||
const credentials = await adapter.getCredentialsByUserId(testUser.id);
|
||||
|
||||
expect(credentials).toHaveLength(3);
|
||||
const names = credentials.map((c) => c.name);
|
||||
expect(names).toContain("MacBook Pro");
|
||||
expect(names).toContain("iPhone");
|
||||
expect(names).toContain(null);
|
||||
});
|
||||
|
||||
it("should not return passkeys from other users", async () => {
|
||||
const otherUser = await adapter.createUser({
|
||||
email: "other@example.com",
|
||||
name: "Other User",
|
||||
});
|
||||
|
||||
await createTestCredential(testUser.id, "Test User Passkey");
|
||||
await createTestCredential(otherUser.id, "Other User Passkey");
|
||||
|
||||
const testUserCreds = await adapter.getCredentialsByUserId(testUser.id);
|
||||
const otherUserCreds = await adapter.getCredentialsByUserId(otherUser.id);
|
||||
|
||||
expect(testUserCreds).toHaveLength(1);
|
||||
expect(testUserCreds[0].name).toBe("Test User Passkey");
|
||||
|
||||
expect(otherUserCreds).toHaveLength(1);
|
||||
expect(otherUserCreds[0].name).toBe("Other User Passkey");
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateCredentialName", () => {
|
||||
it("should update the credential name", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Old Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, "New Name");
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBe("New Name");
|
||||
});
|
||||
|
||||
it("should set name to null when provided null", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Has Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, null);
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle empty string as name", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Has Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, "");
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("countCredentialsByUserId", () => {
|
||||
it("should return 0 for user with no passkeys", async () => {
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
it("should return correct count", async () => {
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(3);
|
||||
});
|
||||
|
||||
it("should only count credentials for the specified user", async () => {
|
||||
const otherUser = await adapter.createUser({
|
||||
email: "other@example.com",
|
||||
});
|
||||
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(otherUser.id);
|
||||
|
||||
const testUserCount = await adapter.countCredentialsByUserId(testUser.id);
|
||||
const otherUserCount = await adapter.countCredentialsByUserId(otherUser.id);
|
||||
|
||||
expect(testUserCount).toBe(2);
|
||||
expect(otherUserCount).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteCredential", () => {
|
||||
it("should delete a credential", async () => {
|
||||
const credential = await createTestCredential(testUser.id);
|
||||
|
||||
await adapter.deleteCredential(credential.id);
|
||||
|
||||
const deleted = await adapter.getCredentialById(credential.id);
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
|
||||
it("should not affect other credentials", async () => {
|
||||
await createTestCredential(testUser.id, "Keep This");
|
||||
const cred2 = await createTestCredential(testUser.id, "Delete This");
|
||||
|
||||
await adapter.deleteCredential(cred2.id);
|
||||
|
||||
const remaining = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(remaining).toHaveLength(1);
|
||||
expect(remaining[0].name).toBe("Keep This");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Passkey Management Flow", () => {
|
||||
it("should support full CRUD flow", async () => {
|
||||
// Create passkeys
|
||||
const passkey1 = await createTestCredential(testUser.id, "MacBook");
|
||||
const passkey2 = await createTestCredential(testUser.id, "iPhone");
|
||||
|
||||
// List passkeys
|
||||
let passkeys = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(passkeys).toHaveLength(2);
|
||||
|
||||
// Rename a passkey
|
||||
await adapter.updateCredentialName(passkey1.id, "MacBook Pro M3");
|
||||
const renamed = await adapter.getCredentialById(passkey1.id);
|
||||
expect(renamed?.name).toBe("MacBook Pro M3");
|
||||
|
||||
// Delete a passkey (not the last one)
|
||||
const countBefore = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(countBefore).toBe(2);
|
||||
|
||||
await adapter.deleteCredential(passkey2.id);
|
||||
|
||||
const countAfter = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(countAfter).toBe(1);
|
||||
|
||||
// Verify only one remains
|
||||
passkeys = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(passkeys).toHaveLength(1);
|
||||
expect(passkeys[0].name).toBe("MacBook Pro M3");
|
||||
});
|
||||
|
||||
it("should enforce 'cannot delete last passkey' in application logic", async () => {
|
||||
// Create a single passkey
|
||||
const passkey = await createTestCredential(testUser.id, "Only Passkey");
|
||||
|
||||
// Check count before deletion attempt
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(1);
|
||||
|
||||
// Application should check count and prevent deletion
|
||||
// The adapter itself doesn't enforce this - it's the API layer's job
|
||||
if (count <= 1) {
|
||||
// Don't delete - this is what the API should do
|
||||
const stillExists = await adapter.getCredentialById(passkey.id);
|
||||
expect(stillExists).not.toBeNull();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Credential properties", () => {
|
||||
it("should preserve all credential properties", async () => {
|
||||
await adapter.createCredential({
|
||||
id: "test-cred-123",
|
||||
userId: testUser.id,
|
||||
publicKey: new Uint8Array([10, 20, 30, 40, 50]),
|
||||
counter: 5,
|
||||
deviceType: "singleDevice",
|
||||
backedUp: false,
|
||||
transports: ["usb", "nfc"],
|
||||
name: "YubiKey 5",
|
||||
});
|
||||
|
||||
const retrieved = await adapter.getCredentialById("test-cred-123");
|
||||
|
||||
expect(retrieved).not.toBeNull();
|
||||
expect(retrieved?.id).toBe("test-cred-123");
|
||||
expect(retrieved?.userId).toBe(testUser.id);
|
||||
expect(retrieved?.counter).toBe(5);
|
||||
expect(retrieved?.deviceType).toBe("singleDevice");
|
||||
expect(retrieved?.backedUp).toBe(false);
|
||||
expect(retrieved?.transports).toEqual(["usb", "nfc"]);
|
||||
expect(retrieved?.name).toBe("YubiKey 5");
|
||||
expect(retrieved?.createdAt).toBeInstanceOf(Date);
|
||||
expect(retrieved?.lastUsedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should update lastUsedAt when counter is updated", async () => {
|
||||
const credential = await createTestCredential(testUser.id);
|
||||
const originalLastUsed = credential.lastUsedAt;
|
||||
|
||||
// Small delay to ensure time difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
await adapter.updateCredentialCounter(credential.id, 1);
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.counter).toBe(1);
|
||||
expect(updated?.lastUsedAt.getTime()).toBeGreaterThan(originalLastUsed.getTime());
|
||||
});
|
||||
});
|
||||
});
|
||||
58
packages/core/tests/unit/auth/passkey-verify-route.test.ts
Normal file
58
packages/core/tests/unit/auth/passkey-verify-route.test.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import { POST as verifyPasskey } from "../../../src/astro/routes/api/auth/passkey/verify.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("passkey verify route", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("returns unauthorized instead of internal server error when the credential is not registered", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/auth/passkey/verify", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
credential: {
|
||||
id: "unregistered-credential",
|
||||
rawId: "unregistered-credential",
|
||||
type: "public-key",
|
||||
response: {
|
||||
clientDataJSON: "AA",
|
||||
authenticatorData: "AA",
|
||||
signature: "AA",
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await verifyPasskey({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
session: {
|
||||
set: vi.fn(),
|
||||
},
|
||||
} as Parameters<typeof verifyPasskey>[0]);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: {
|
||||
code: "UNAUTHORIZED",
|
||||
message: "Authentication failed",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
114
packages/core/tests/unit/auth/scopes.test.ts
Normal file
114
packages/core/tests/unit/auth/scopes.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Unit tests for scope enforcement.
|
||||
*
|
||||
* Tests the requireScope() guard that API routes and MCP tools use
|
||||
* to enforce token scope restrictions.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { requireScope } from "../../../src/auth/scopes.js";
|
||||
|
||||
describe("requireScope", () => {
|
||||
it("allows session auth (no tokenScopes) unconditionally", () => {
|
||||
const result = requireScope({}, "content:write");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("allows session auth with undefined tokenScopes", () => {
|
||||
const result = requireScope({ tokenScopes: undefined }, "schema:write");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("allows when token has the required scope", () => {
|
||||
const result = requireScope(
|
||||
{ tokenScopes: ["content:read", "content:write"] },
|
||||
"content:write",
|
||||
);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("rejects when token lacks the required scope", () => {
|
||||
const result = requireScope({ tokenScopes: ["content:read"] }, "content:write");
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("returns INSUFFICIENT_SCOPE error body", async () => {
|
||||
const result = requireScope({ tokenScopes: ["media:read"] }, "schema:write");
|
||||
expect(result).not.toBeNull();
|
||||
const body = (await result!.json()) as { error: { code: string; message: string } };
|
||||
expect(body.error.code).toBe("INSUFFICIENT_SCOPE");
|
||||
expect(body.error.message).toContain("schema:write");
|
||||
});
|
||||
|
||||
it("admin scope grants access to everything", () => {
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "content:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "content:write")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "schema:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "schema:write")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "media:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "media:write")).toBeNull();
|
||||
});
|
||||
|
||||
it("empty scopes array rejects everything", () => {
|
||||
expect(requireScope({ tokenScopes: [] }, "content:read")).toBeInstanceOf(Response);
|
||||
expect(requireScope({ tokenScopes: [] }, "admin")).toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
it("read scope does not grant write access", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:read"] }, "content:write")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
expect(requireScope({ tokenScopes: ["media:read"] }, "media:write")).toBeInstanceOf(Response);
|
||||
expect(requireScope({ tokenScopes: ["schema:read"] }, "schema:write")).toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
describe("backwards compatibility: content:write implicit grants", () => {
|
||||
// Before the menu/taxonomy mutation MCP tools were split out into
|
||||
// `menus:manage` and `taxonomies:manage`, the only scope checked for
|
||||
// those operations was `content:write`. Tokens issued before the
|
||||
// split must continue to work — `content:write` implicitly grants
|
||||
// `menus:manage` and `taxonomies:manage`.
|
||||
|
||||
it("content:write grants menus:manage", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:write"] }, "menus:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("content:write grants taxonomies:manage", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:write"] }, "taxonomies:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("content:read does NOT grant menus:manage (read-only doesn't escalate)", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:read"] }, "menus:manage")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
});
|
||||
|
||||
it("menus:manage alone allows menu operations", () => {
|
||||
expect(requireScope({ tokenScopes: ["menus:manage"] }, "menus:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("menus:manage does not grant content:write (no reverse implication)", () => {
|
||||
expect(requireScope({ tokenScopes: ["menus:manage"] }, "content:write")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
});
|
||||
|
||||
it("taxonomies:manage alone allows taxonomy operations", () => {
|
||||
expect(requireScope({ tokenScopes: ["taxonomies:manage"] }, "taxonomies:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("prototype-chain keys do not crash or grant access", () => {
|
||||
// Defense in depth: the implicit-grants table is a Map, but a
|
||||
// regression to a plain-object lookup would let Object.prototype
|
||||
// keys (`__proto__`, `constructor`, `toString`) walk the chain
|
||||
// and either crash with "x.includes is not a function" or
|
||||
// accidentally satisfy the check. Either is a 500 instead of a
|
||||
// 403. Verify both paths reject cleanly.
|
||||
for (const key of ["__proto__", "constructor", "toString", "hasOwnProperty"]) {
|
||||
expect(requireScope({ tokenScopes: [key] }, "menus:manage")).toBeInstanceOf(Response);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
465
packages/core/tests/unit/auth/signup.test.ts
Normal file
465
packages/core/tests/unit/auth/signup.test.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import {
|
||||
Role,
|
||||
canSignup,
|
||||
requestSignup,
|
||||
validateSignupToken,
|
||||
completeSignup,
|
||||
SignupError,
|
||||
} from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const TOKEN_PARAM_REGEX = /token=/;
|
||||
const TOKEN_EXTRACT_REGEX = /token=([a-zA-Z0-9_-]+)/;
|
||||
|
||||
describe("Self-Signup", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("canSignup", () => {
|
||||
it("should return null for email with no allowed domain", async () => {
|
||||
const result = await canSignup(adapter, "user@notallowed.com");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for email with disabled domain", async () => {
|
||||
// Create a disabled domain
|
||||
await adapter.createAllowedDomain("disabled.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("disabled.com", false);
|
||||
|
||||
const result = await canSignup(adapter, "user@disabled.com");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return allowed:true and role for email with allowed domain", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
const result = await canSignup(adapter, "user@allowed.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.allowed).toBe(true);
|
||||
expect(result?.role).toBe(Role.AUTHOR);
|
||||
});
|
||||
|
||||
it("should return correct role for each domain", async () => {
|
||||
await adapter.createAllowedDomain("authors.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("editors.com", Role.EDITOR);
|
||||
await adapter.createAllowedDomain("contributors.com", Role.CONTRIBUTOR);
|
||||
|
||||
const author = await canSignup(adapter, "user@authors.com");
|
||||
const editor = await canSignup(adapter, "user@editors.com");
|
||||
const contributor = await canSignup(adapter, "user@contributors.com");
|
||||
|
||||
expect(author?.role).toBe(Role.AUTHOR);
|
||||
expect(editor?.role).toBe(Role.EDITOR);
|
||||
expect(contributor?.role).toBe(Role.CONTRIBUTOR);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for email domains", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.AUTHOR);
|
||||
|
||||
const result = await canSignup(adapter, "User@EXAMPLE.COM");
|
||||
expect(result).not.toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for invalid email format", async () => {
|
||||
const result = await canSignup(adapter, "not-an-email");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("requestSignup", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(() => {
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
it("should send verification email for allowed domain", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledTimes(1);
|
||||
expect(sentEmails[0]!.to).toBe("newuser@allowed.com");
|
||||
expect(sentEmails[0]!.subject).toContain("Test Site");
|
||||
expect(sentEmails[0]!.text).toContain(
|
||||
"https://example.com/_emdash/api/auth/signup/verify?token=",
|
||||
);
|
||||
expect(sentEmails[0]!.text).toContain("verify");
|
||||
});
|
||||
|
||||
it("should fail silently for disallowed domain (no email sent)", async () => {
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"user@notallowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should fail silently if user already exists (no email sent)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
// Create existing user
|
||||
await adapter.createUser({
|
||||
email: "existing@allowed.com",
|
||||
name: "Existing User",
|
||||
});
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"existing@allowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should create a token in the database", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.EDITOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// The email should contain a verification link with a token
|
||||
expect(sentEmails[0]!.text).toMatch(TOKEN_PARAM_REGEX);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateSignupToken", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
// Extract token from email text
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should validate a valid token and return email/role", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
const result = await validateSignupToken(adapter, capturedToken!);
|
||||
|
||||
expect(result.email).toBe("newuser@allowed.com");
|
||||
expect(result.role).toBe(Role.AUTHOR);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for non-existent token", async () => {
|
||||
// Use a properly formatted but non-existent token (base64url encoded)
|
||||
const fakeToken = "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo"; // base64url of "abcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
try {
|
||||
await validateSignupToken(adapter, fakeToken);
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw token_expired for expired token", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
// Manually expire the token by updating it in the database
|
||||
// We need to find the token hash and update its expiry
|
||||
// Since we can't easily do this, we'll test the error path differently
|
||||
// by creating a token directly with an expired date
|
||||
|
||||
// First, validate and get the hash
|
||||
const result = await validateSignupToken(adapter, capturedToken!);
|
||||
expect(result.email).toBe("newuser@allowed.com");
|
||||
|
||||
// For expiry testing, we'd need direct DB access to set expiry in the past
|
||||
// This is tested implicitly by the token creation with short expiry
|
||||
});
|
||||
});
|
||||
|
||||
describe("completeSignup", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should create user with correct email and role", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "New User",
|
||||
});
|
||||
|
||||
expect(user.email).toBe("newuser@allowed.com");
|
||||
expect(user.name).toBe("New User");
|
||||
expect(user.role).toBe(Role.AUTHOR);
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
|
||||
it("should throw user_exists if user created during signup flow (race condition)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// Simulate race condition - create user before completing signup
|
||||
await adapter.createUser({
|
||||
email: "newuser@allowed.com",
|
||||
name: "Created During Race",
|
||||
});
|
||||
|
||||
// Try to complete signup - should fail with user_exists
|
||||
try {
|
||||
await completeSignup(adapter, capturedToken!, { name: "New User" });
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("user_exists");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw invalid_token for non-existent token", async () => {
|
||||
// Use a properly formatted but non-existent token (base64url encoded)
|
||||
const fakeToken = "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo"; // base64url of "abcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
try {
|
||||
await completeSignup(adapter, fakeToken, { name: "User" });
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should delete token after successful signup (single-use)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// First completion should succeed
|
||||
await completeSignup(adapter, capturedToken!, { name: "New User" });
|
||||
|
||||
// Second attempt should fail - token is deleted
|
||||
await expect(
|
||||
completeSignup(adapter, capturedToken!, { name: "Another User" }),
|
||||
).rejects.toThrow(SignupError);
|
||||
});
|
||||
|
||||
it("should allow optional name and avatarUrl", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"noname@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {});
|
||||
|
||||
expect(user.email).toBe("noname@allowed.com");
|
||||
expect(user.name).toBeNull();
|
||||
});
|
||||
|
||||
it("should set emailVerified to true", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"verified@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "Verified User",
|
||||
});
|
||||
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Integration: Full Signup Flow", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should complete full signup flow for allowed domain", async () => {
|
||||
// 1. Admin adds allowed domain
|
||||
await adapter.createAllowedDomain("company.com", Role.EDITOR);
|
||||
|
||||
// 2. Check if signup is allowed
|
||||
const check = await canSignup(adapter, "employee@company.com");
|
||||
expect(check?.allowed).toBe(true);
|
||||
expect(check?.role).toBe(Role.EDITOR);
|
||||
|
||||
// 3. Request signup
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Company CMS",
|
||||
},
|
||||
adapter,
|
||||
"employee@company.com",
|
||||
);
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
// 4. Validate token (simulating email link click)
|
||||
const validation = await validateSignupToken(adapter, capturedToken!);
|
||||
expect(validation.email).toBe("employee@company.com");
|
||||
expect(validation.role).toBe(Role.EDITOR);
|
||||
|
||||
// 5. Complete signup
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "New Employee",
|
||||
});
|
||||
|
||||
expect(user.email).toBe("employee@company.com");
|
||||
expect(user.name).toBe("New Employee");
|
||||
expect(user.role).toBe(Role.EDITOR);
|
||||
expect(user.emailVerified).toBe(true);
|
||||
|
||||
// 6. Verify user exists in database
|
||||
const fetchedUser = await adapter.getUserByEmail("employee@company.com");
|
||||
expect(fetchedUser).not.toBeNull();
|
||||
expect(fetchedUser?.id).toBe(user.id);
|
||||
});
|
||||
|
||||
it("should prevent signup for disabled domain", async () => {
|
||||
// Add domain then disable it
|
||||
await adapter.createAllowedDomain("company.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("company.com", false);
|
||||
|
||||
// Check - should not be allowed
|
||||
const check = await canSignup(adapter, "user@company.com");
|
||||
expect(check).toBeNull();
|
||||
|
||||
// Request signup - should fail silently (no email)
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test",
|
||||
},
|
||||
adapter,
|
||||
"user@company.com",
|
||||
);
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
97
packages/core/tests/unit/auth/trusted-proxy.test.ts
Normal file
97
packages/core/tests/unit/auth/trusted-proxy.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* Tests for getTrustedProxyHeaders — resolves the list of trusted client-IP
|
||||
* headers from config, falling back to the EMDASH_TRUSTED_PROXY_HEADERS env
|
||||
* var, then to an empty array.
|
||||
*
|
||||
* The helper lets operators declare which headers they trust when running
|
||||
* behind a reverse proxy. On Cloudflare the `cf` object is used instead and
|
||||
* this list is usually empty.
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
_resetTrustedProxyHeadersCache,
|
||||
getTrustedProxyHeaders,
|
||||
} from "../../../src/auth/trusted-proxy.js";
|
||||
|
||||
describe("getTrustedProxyHeaders", () => {
|
||||
const ORIGINAL_ENV = process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
|
||||
beforeEach(() => {
|
||||
_resetTrustedProxyHeadersCache();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (ORIGINAL_ENV === undefined) {
|
||||
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
} else {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = ORIGINAL_ENV;
|
||||
}
|
||||
_resetTrustedProxyHeadersCache();
|
||||
});
|
||||
|
||||
it("returns config value when set", () => {
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["x-real-ip"] })).toEqual(["x-real-ip"]);
|
||||
});
|
||||
|
||||
it("prefers config over env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "fly-client-ip";
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["x-real-ip"] })).toEqual(["x-real-ip"]);
|
||||
});
|
||||
|
||||
it("falls back to env when config is absent", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip,fly-client-ip";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
|
||||
it("trims whitespace and drops empty entries from env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = " x-real-ip , , fly-client-ip ";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
|
||||
it("lowercases header names for consistent matching", () => {
|
||||
// Header lookups go through Headers.get() which is case-insensitive,
|
||||
// so we normalise the list here to avoid double-normalising elsewhere.
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["X-Real-IP", "Fly-Client-IP"] })).toEqual(
|
||||
["x-real-ip", "fly-client-ip"],
|
||||
);
|
||||
});
|
||||
|
||||
it("returns empty array when neither config nor env is set", () => {
|
||||
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty array when config has empty list", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip";
|
||||
// An explicit empty array means "trust nothing" — do not fall through
|
||||
// to the env. Operators use this to override an inherited env value.
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: [] })).toEqual([]);
|
||||
});
|
||||
|
||||
// Header names must be valid RFC 7230 tokens; passing anything else into
|
||||
// `Headers.get()` throws. Drop invalid entries silently rather than
|
||||
// taking down every rate-limited endpoint with a 500.
|
||||
it("drops invalid header names from config", () => {
|
||||
expect(
|
||||
getTrustedProxyHeaders({
|
||||
trustedProxyHeaders: ["x-real-ip", "", "invalid name", "bad:colon", "ok-name"],
|
||||
}),
|
||||
).toEqual(["x-real-ip", "ok-name"]);
|
||||
});
|
||||
|
||||
it("drops invalid header names from env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip, x y z , bad:one, ok-name";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "ok-name"]);
|
||||
});
|
||||
|
||||
it("trims whitespace from config entries before matching", () => {
|
||||
// Common typo: `"x-real-ip "` (trailing space). Previously the raw
|
||||
// value was lowercased but not trimmed, so validation silently
|
||||
// dropped it and per-IP bucketing was disabled.
|
||||
expect(
|
||||
getTrustedProxyHeaders({ trustedProxyHeaders: [" x-real-ip ", "fly-client-ip"] }),
|
||||
).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user