Initial commit: New MoreminiMore website with fresh design
This commit is contained in:
2
node_modules/@astrojs/db/dist/_internal/core/integration/error-map.d.ts
generated
vendored
Normal file
2
node_modules/@astrojs/db/dist/_internal/core/integration/error-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { $ZodErrorMap } from 'zod/v4/core';
|
||||
export declare const errorMap: $ZodErrorMap;
|
||||
1444
node_modules/@astrojs/db/dist/_internal/core/schemas.d.ts
generated
vendored
Normal file
1444
node_modules/@astrojs/db/dist/_internal/core/schemas.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
60
node_modules/@astrojs/db/dist/_internal/core/types.d.ts
generated
vendored
Normal file
60
node_modules/@astrojs/db/dist/_internal/core/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import type * as z from 'zod/v4';
|
||||
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
|
||||
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
|
||||
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
|
||||
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
|
||||
export type NumberColumn = z.infer<typeof numberColumnSchema>;
|
||||
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
|
||||
export type TextColumn = z.infer<typeof textColumnSchema>;
|
||||
export type TextColumnInput = z.input<typeof textColumnSchema>;
|
||||
export type DateColumn = z.infer<typeof dateColumnSchema>;
|
||||
export type DateColumnInput = z.input<typeof dateColumnSchema>;
|
||||
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
|
||||
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
|
||||
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
|
||||
export type DBColumn = z.infer<typeof columnSchema>;
|
||||
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
|
||||
export type DBColumns = z.infer<typeof columnsSchema>;
|
||||
export type DBTable = z.infer<typeof tableSchema>;
|
||||
export type DBTables = Record<string, DBTable>;
|
||||
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
|
||||
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
|
||||
export type DBSnapshot = {
|
||||
schema: Record<string, ResolvedDBTable>;
|
||||
version: string;
|
||||
};
|
||||
export type DBConfigInput = z.input<typeof dbConfigSchema>;
|
||||
export type DBConfig = z.infer<typeof dbConfigSchema>;
|
||||
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
|
||||
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
|
||||
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
|
||||
columns: TColumns;
|
||||
foreignKeys?: Array<{
|
||||
columns: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
|
||||
}>;
|
||||
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
|
||||
deprecated?: boolean;
|
||||
}
|
||||
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
|
||||
on: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
}
|
||||
/** @deprecated */
|
||||
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
|
||||
on: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
}
|
||||
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
|
||||
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
|
||||
declare global {
|
||||
namespace Astro {
|
||||
interface IntegrationHooks {
|
||||
'astro:db:setup'?: (options: {
|
||||
extendDb: (options: {
|
||||
configEntrypoint?: URL | string;
|
||||
seedEntrypoint?: URL | string;
|
||||
}) => void;
|
||||
}) => void | Promise<void>;
|
||||
}
|
||||
}
|
||||
}
|
||||
export {};
|
||||
19
node_modules/@astrojs/db/dist/_internal/core/utils.d.ts
generated
vendored
Normal file
19
node_modules/@astrojs/db/dist/_internal/core/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { AstroConfig, AstroIntegration } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import './types.js';
|
||||
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
|
||||
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
|
||||
export type RemoteDatabaseInfo = {
|
||||
url: string;
|
||||
token: string;
|
||||
};
|
||||
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
|
||||
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
|
||||
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
|
||||
export declare function getDbDirectoryUrl(root: URL | string): URL;
|
||||
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
|
||||
/**
|
||||
* Map an object's values to a new set of values
|
||||
* while preserving types.
|
||||
*/
|
||||
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;
|
||||
92
node_modules/@astrojs/db/dist/_internal/runtime/types.d.ts
generated
vendored
Normal file
92
node_modules/@astrojs/db/dist/_internal/runtime/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
|
||||
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
|
||||
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
|
||||
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
|
||||
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
|
||||
data: E extends readonly (infer U)[] ? U : string;
|
||||
dataType: 'string';
|
||||
columnType: 'SQLiteText';
|
||||
driverParam: string;
|
||||
enumValues: E extends [string, ...string[]] ? E : never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
|
||||
data: Date;
|
||||
dataType: 'custom';
|
||||
columnType: 'SQLiteCustomColumn';
|
||||
driverParam: string;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
|
||||
data: boolean;
|
||||
dataType: 'boolean';
|
||||
columnType: 'SQLiteBoolean';
|
||||
driverParam: number;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
|
||||
data: number;
|
||||
dataType: 'number';
|
||||
columnType: 'SQLiteInteger';
|
||||
driverParam: number;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
|
||||
data: unknown;
|
||||
dataType: 'custom';
|
||||
columnType: 'SQLiteCustomColumn';
|
||||
driverParam: string;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
|
||||
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
|
||||
name: TTableName;
|
||||
schema: undefined;
|
||||
dialect: 'sqlite';
|
||||
columns: {
|
||||
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
|
||||
enum: infer E;
|
||||
} ? E extends readonly [string, ...string[]] ? E : string : string, {
|
||||
tableName: TTableName;
|
||||
name: K;
|
||||
isPrimaryKey: TColumns[K]['schema'] extends {
|
||||
primaryKey: true;
|
||||
} ? true : false;
|
||||
hasDefault: TColumns[K]['schema'] extends {
|
||||
default: NonNullable<unknown>;
|
||||
} ? true : TColumns[K]['schema'] extends {
|
||||
primaryKey: true;
|
||||
} ? true : false;
|
||||
hasRuntimeDefault: TColumns[K]['schema'] extends {
|
||||
default: NonNullable<unknown>;
|
||||
} ? true : false;
|
||||
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
|
||||
}>;
|
||||
};
|
||||
}>;
|
||||
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
|
||||
export type SerializedSQL = {
|
||||
[SERIALIZED_SQL_KEY]: true;
|
||||
sql: string;
|
||||
};
|
||||
export declare function isSerializedSQL(value: any): value is SerializedSQL;
|
||||
export {};
|
||||
9
node_modules/@astrojs/db/dist/_internal/runtime/utils.d.ts
generated
vendored
Normal file
9
node_modules/@astrojs/db/dist/_internal/runtime/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { LibsqlError } from '@libsql/client';
|
||||
import { AstroError } from 'astro/errors';
|
||||
import type { DBColumn } from '../core/types.js';
|
||||
export declare function hasPrimaryKey(column: DBColumn): boolean;
|
||||
export declare class AstroDbError extends AstroError {
|
||||
name: string;
|
||||
}
|
||||
export declare function isDbError(err: unknown): err is LibsqlError;
|
||||
export declare function pathToFileURL(path: string): URL;
|
||||
48
node_modules/@astrojs/db/dist/_internal/runtime/virtual.d.ts
generated
vendored
Normal file
48
node_modules/@astrojs/db/dist/_internal/runtime/virtual.d.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { BooleanColumnInput, ColumnsConfig, DateColumnInput, DBConfigInput, JsonColumnInput, NumberColumnOpts, TableConfig, TextColumnOpts } from '../core/types.js';
|
||||
export declare const column: {
|
||||
number: <T extends NumberColumnOpts>(opts?: T) => {
|
||||
type: "number";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema: T;
|
||||
};
|
||||
boolean: <T extends BooleanColumnInput["schema"]>(opts?: T) => {
|
||||
type: "boolean";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema: T;
|
||||
};
|
||||
text: <T extends TextColumnOpts, const E extends T["enum"] extends readonly [string, ...string[]] ? Omit<T, "enum"> & T["enum"] : T>(opts?: E) => {
|
||||
type: "text";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema: E;
|
||||
};
|
||||
date<T extends DateColumnInput["schema"]>(opts?: T): {
|
||||
type: "date";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema: T;
|
||||
};
|
||||
json<T extends JsonColumnInput["schema"]>(opts?: T): {
|
||||
type: "json";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema: T;
|
||||
};
|
||||
};
|
||||
export declare function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>): TableConfig<TColumns>;
|
||||
export declare function defineDb(userConfig: DBConfigInput): {
|
||||
tables?: unknown;
|
||||
};
|
||||
export declare const NOW: import("drizzle-orm").SQL<unknown>;
|
||||
export declare const TRUE: import("drizzle-orm").SQL<unknown>;
|
||||
export declare const FALSE: import("drizzle-orm").SQL<unknown>;
|
||||
export { and, asc, avg, avgDistinct, between, count, countDistinct, desc, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, max, min, ne, not, notBetween, notExists, notIlike, notInArray, or, sql, sum, sumDistinct, } from 'drizzle-orm';
|
||||
export { alias } from 'drizzle-orm/sqlite-core';
|
||||
export { isDbError } from './utils.js';
|
||||
8
node_modules/@astrojs/db/dist/core/cli/commands/execute/index.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/cli/commands/execute/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfig } from '../../../types.js';
|
||||
export declare function cmd({ astroConfig, dbConfig, flags, }: {
|
||||
astroConfig: AstroConfig;
|
||||
dbConfig: DBConfig;
|
||||
flags: Arguments;
|
||||
}): Promise<void>;
|
||||
65
node_modules/@astrojs/db/dist/core/cli/commands/execute/index.js
generated
vendored
Normal file
65
node_modules/@astrojs/db/dist/core/cli/commands/execute/index.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import colors from "piccolore";
|
||||
import { isDbError } from "../../../../runtime/utils.js";
|
||||
import {
|
||||
EXEC_DEFAULT_EXPORT_ERROR,
|
||||
EXEC_ERROR,
|
||||
FILE_NOT_FOUND_ERROR,
|
||||
MISSING_EXECUTE_PATH_ERROR
|
||||
} from "../../../errors.js";
|
||||
import {
|
||||
getLocalVirtualModContents,
|
||||
getRemoteVirtualModContents
|
||||
} from "../../../integration/vite-plugin-db.js";
|
||||
import { bundleFile, importBundledFile } from "../../../load-file.js";
|
||||
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
|
||||
async function cmd({
|
||||
astroConfig,
|
||||
dbConfig,
|
||||
flags
|
||||
}) {
|
||||
const filePath = flags._[4];
|
||||
if (typeof filePath !== "string") {
|
||||
console.error(MISSING_EXECUTE_PATH_ERROR);
|
||||
process.exit(1);
|
||||
}
|
||||
const fileUrl = new URL(filePath, astroConfig.root);
|
||||
if (!existsSync(fileUrl)) {
|
||||
console.error(FILE_NOT_FOUND_ERROR(filePath));
|
||||
process.exit(1);
|
||||
}
|
||||
let virtualModContents;
|
||||
if (flags.remote) {
|
||||
const dbInfo = getRemoteDatabaseInfo();
|
||||
const appToken = resolveDbAppToken(flags, dbInfo.token);
|
||||
virtualModContents = getRemoteVirtualModContents({
|
||||
tables: dbConfig.tables ?? {},
|
||||
appToken,
|
||||
isBuild: false,
|
||||
output: "server",
|
||||
localExecution: true
|
||||
});
|
||||
} else {
|
||||
virtualModContents = getLocalVirtualModContents({
|
||||
tables: dbConfig.tables ?? {},
|
||||
root: astroConfig.root,
|
||||
localExecution: true
|
||||
});
|
||||
}
|
||||
const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
|
||||
const mod = await importBundledFile({ code, root: astroConfig.root });
|
||||
if (typeof mod.default !== "function") {
|
||||
console.error(EXEC_DEFAULT_EXPORT_ERROR(filePath));
|
||||
process.exit(1);
|
||||
}
|
||||
try {
|
||||
await mod.default();
|
||||
console.info(`${colors.green("\u2714")} File run successfully.`);
|
||||
} catch (e) {
|
||||
if (isDbError(e)) throw new Error(EXEC_ERROR(e.message));
|
||||
else throw e;
|
||||
}
|
||||
}
|
||||
export {
|
||||
cmd
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/cli/commands/push/index.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/cli/commands/push/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfig } from '../../../types.js';
|
||||
export declare function cmd({ dbConfig, flags, }: {
|
||||
astroConfig: AstroConfig;
|
||||
dbConfig: DBConfig;
|
||||
flags: Arguments;
|
||||
}): Promise<void>;
|
||||
106
node_modules/@astrojs/db/dist/core/cli/commands/push/index.js
generated
vendored
Normal file
106
node_modules/@astrojs/db/dist/core/cli/commands/push/index.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
import * as clack from "@clack/prompts";
|
||||
import { sql } from "drizzle-orm";
|
||||
import { MIGRATION_VERSION } from "../../../consts.js";
|
||||
import { createClient } from "../../../db-client/libsql-node.js";
|
||||
import {
|
||||
getRemoteDatabaseInfo,
|
||||
resolveDbAppToken
|
||||
} from "../../../utils.js";
|
||||
import {
|
||||
createCurrentSnapshot,
|
||||
createEmptySnapshot,
|
||||
formatDataLossMessage,
|
||||
getMigrationQueries,
|
||||
getProductionCurrentSnapshot
|
||||
} from "../../migration-queries.js";
|
||||
async function cmd({
|
||||
dbConfig,
|
||||
flags
|
||||
}) {
|
||||
const isDryRun = flags.dryRun;
|
||||
const isForceReset = flags.forceReset;
|
||||
const dbInfo = getRemoteDatabaseInfo();
|
||||
const appToken = resolveDbAppToken(flags, dbInfo.token);
|
||||
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
|
||||
const currentSnapshot = createCurrentSnapshot(dbConfig);
|
||||
const isFromScratch = !productionSnapshot;
|
||||
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
|
||||
oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
|
||||
newSnapshot: currentSnapshot,
|
||||
reset: isForceReset
|
||||
});
|
||||
if (migrationQueries.length === 0) {
|
||||
console.log("Database schema is up to date.");
|
||||
} else {
|
||||
console.log(`Database schema is out of date.`);
|
||||
}
|
||||
if (isForceReset) {
|
||||
const begin = await clack.confirm({
|
||||
message: `Reset your database? All of your data will be erased and your schema created from scratch.`,
|
||||
initialValue: false,
|
||||
withGuide: false
|
||||
});
|
||||
if (begin !== true) {
|
||||
console.log("Canceled.");
|
||||
process.exit(0);
|
||||
}
|
||||
console.log(`Force-pushing to the database. All existing data will be erased.`);
|
||||
} else if (confirmations.length > 0) {
|
||||
console.log("\n" + formatDataLossMessage(confirmations) + "\n");
|
||||
throw new Error("Exiting.");
|
||||
}
|
||||
if (isDryRun) {
|
||||
console.log("Statements:", JSON.stringify(migrationQueries, void 0, 2));
|
||||
} else {
|
||||
console.log(`Pushing database schema updates...`);
|
||||
await pushSchema({
|
||||
statements: migrationQueries,
|
||||
dbInfo,
|
||||
appToken,
|
||||
isDryRun,
|
||||
currentSnapshot
|
||||
});
|
||||
}
|
||||
console.info("Push complete!");
|
||||
}
|
||||
async function pushSchema({
|
||||
statements,
|
||||
dbInfo,
|
||||
appToken,
|
||||
isDryRun,
|
||||
currentSnapshot
|
||||
}) {
|
||||
const requestBody = {
|
||||
snapshot: currentSnapshot,
|
||||
sql: statements,
|
||||
version: MIGRATION_VERSION
|
||||
};
|
||||
if (isDryRun) {
|
||||
console.info("[DRY RUN] Batch query:", JSON.stringify(requestBody, null, 2));
|
||||
return new Response(null, { status: 200 });
|
||||
}
|
||||
return pushToDb(requestBody, appToken, dbInfo.url);
|
||||
}
|
||||
async function pushToDb(requestBody, appToken, remoteUrl) {
|
||||
const client = createClient({
|
||||
token: appToken,
|
||||
url: remoteUrl
|
||||
});
|
||||
await client.run(sql`create table if not exists _astro_db_snapshot (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
version TEXT,
|
||||
snapshot BLOB
|
||||
);`);
|
||||
await client.transaction(async (tx) => {
|
||||
for (const stmt of requestBody.sql) {
|
||||
await tx.run(sql.raw(stmt));
|
||||
}
|
||||
await tx.run(sql`insert into _astro_db_snapshot (version, snapshot) values (
|
||||
${requestBody.version},
|
||||
${JSON.stringify(requestBody.snapshot)}
|
||||
)`);
|
||||
});
|
||||
}
|
||||
export {
|
||||
cmd
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/cli/commands/shell/index.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/cli/commands/shell/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfigInput } from '../../../types.js';
|
||||
export declare function cmd({ flags, astroConfig, }: {
|
||||
dbConfig: DBConfigInput;
|
||||
astroConfig: AstroConfig;
|
||||
flags: Arguments;
|
||||
}): Promise<void>;
|
||||
36
node_modules/@astrojs/db/dist/core/cli/commands/shell/index.js
generated
vendored
Normal file
36
node_modules/@astrojs/db/dist/core/cli/commands/shell/index.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import { normalizeDatabaseUrl } from "../../../../runtime/index.js";
|
||||
import { DB_PATH } from "../../../consts.js";
|
||||
import { createClient as createLocalDatabaseClient } from "../../../db-client/libsql-local.js";
|
||||
import { createClient as createRemoteDatabaseClient } from "../../../db-client/libsql-node.js";
|
||||
import { SHELL_QUERY_MISSING_ERROR } from "../../../errors.js";
|
||||
import { getAstroEnv, getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
|
||||
async function cmd({
|
||||
flags,
|
||||
astroConfig
|
||||
}) {
|
||||
const query = flags.query;
|
||||
if (!query) {
|
||||
console.error(SHELL_QUERY_MISSING_ERROR);
|
||||
process.exit(1);
|
||||
}
|
||||
const dbInfo = getRemoteDatabaseInfo();
|
||||
if (flags.remote) {
|
||||
const appToken = resolveDbAppToken(flags, dbInfo.token);
|
||||
const db = createRemoteDatabaseClient({ ...dbInfo, token: appToken });
|
||||
const result = await db.run(sql.raw(query));
|
||||
console.log(result);
|
||||
} else {
|
||||
const { ASTRO_DATABASE_FILE } = getAstroEnv();
|
||||
const dbUrl = normalizeDatabaseUrl(
|
||||
ASTRO_DATABASE_FILE,
|
||||
new URL(DB_PATH, astroConfig.root).href
|
||||
);
|
||||
const db = createLocalDatabaseClient({ url: dbUrl });
|
||||
const result = await db.run(sql.raw(query));
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
export {
|
||||
cmd
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/cli/commands/verify/index.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/cli/commands/verify/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import type { DBConfig } from '../../../types.js';
|
||||
export declare function cmd({ dbConfig, flags, }: {
|
||||
astroConfig: AstroConfig;
|
||||
dbConfig: DBConfig;
|
||||
flags: Arguments;
|
||||
}): Promise<void>;
|
||||
46
node_modules/@astrojs/db/dist/core/cli/commands/verify/index.js
generated
vendored
Normal file
46
node_modules/@astrojs/db/dist/core/cli/commands/verify/index.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
|
||||
import {
|
||||
createCurrentSnapshot,
|
||||
createEmptySnapshot,
|
||||
formatDataLossMessage,
|
||||
getMigrationQueries,
|
||||
getProductionCurrentSnapshot
|
||||
} from "../../migration-queries.js";
|
||||
async function cmd({
|
||||
dbConfig,
|
||||
flags
|
||||
}) {
|
||||
const isJson = flags.json;
|
||||
const dbInfo = getRemoteDatabaseInfo();
|
||||
const appToken = resolveDbAppToken(flags, dbInfo.token);
|
||||
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
|
||||
const currentSnapshot = createCurrentSnapshot(dbConfig);
|
||||
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
|
||||
oldSnapshot: productionSnapshot || createEmptySnapshot(),
|
||||
newSnapshot: currentSnapshot
|
||||
});
|
||||
const result = { exitCode: 0, message: "", code: "", data: void 0 };
|
||||
if (migrationQueries.length === 0) {
|
||||
result.code = "MATCH";
|
||||
result.message = `Database schema is up to date.`;
|
||||
} else {
|
||||
result.code = "NO_MATCH";
|
||||
result.message = `Database schema is out of date.
|
||||
Run 'astro db push' to push up your latest changes.`;
|
||||
}
|
||||
if (confirmations.length > 0) {
|
||||
result.code = "DATA_LOSS";
|
||||
result.exitCode = 1;
|
||||
result.data = confirmations;
|
||||
result.message = formatDataLossMessage(confirmations, !isJson);
|
||||
}
|
||||
if (isJson) {
|
||||
console.log(JSON.stringify(result));
|
||||
} else {
|
||||
console.log(result.message);
|
||||
}
|
||||
process.exit(result.exitCode);
|
||||
}
|
||||
export {
|
||||
cmd
|
||||
};
|
||||
6
node_modules/@astrojs/db/dist/core/cli/index.d.ts
generated
vendored
Normal file
6
node_modules/@astrojs/db/dist/core/cli/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
export declare function cli({ flags, config: astroConfig, }: {
|
||||
flags: Arguments;
|
||||
config: AstroConfig;
|
||||
}): Promise<void>;
|
||||
75
node_modules/@astrojs/db/dist/core/cli/index.js
generated
vendored
Normal file
75
node_modules/@astrojs/db/dist/core/cli/index.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import { resolveDbConfig } from "../load-file.js";
|
||||
import { printHelp } from "./print-help.js";
|
||||
async function cli({
|
||||
flags,
|
||||
config: astroConfig
|
||||
}) {
|
||||
const args = flags._;
|
||||
const command = args[2] === "db" ? args[3] : args[2];
|
||||
validateDbAppTokenFlag(command, flags);
|
||||
const { dbConfig } = await resolveDbConfig(astroConfig);
|
||||
switch (command) {
|
||||
case "shell": {
|
||||
const { cmd } = await import("./commands/shell/index.js");
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
}
|
||||
case "gen": {
|
||||
console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
|
||||
return;
|
||||
}
|
||||
case "sync": {
|
||||
console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
|
||||
return;
|
||||
}
|
||||
case "push": {
|
||||
const { cmd } = await import("./commands/push/index.js");
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
}
|
||||
case "verify": {
|
||||
const { cmd } = await import("./commands/verify/index.js");
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
}
|
||||
case "execute": {
|
||||
const { cmd } = await import("./commands/execute/index.js");
|
||||
return await cmd({ astroConfig, dbConfig, flags });
|
||||
}
|
||||
default: {
|
||||
if (command != null) {
|
||||
console.error(`Unknown command: ${command}`);
|
||||
}
|
||||
printHelp({
|
||||
commandName: "astro db",
|
||||
usage: "[command] [...flags]",
|
||||
headline: " ",
|
||||
tables: {
|
||||
Commands: [
|
||||
["push", "Push table schema updates to libSQL."],
|
||||
["verify", "Test schema updates with libSQL (good for CI)."],
|
||||
[
|
||||
"astro db execute <file-path>",
|
||||
"Execute a ts/js file using astro:db. Use --remote to connect to libSQL."
|
||||
],
|
||||
[
|
||||
"astro db shell --query <sql-string>",
|
||||
"Execute a SQL string. Use --remote to connect to libSQL."
|
||||
]
|
||||
]
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
function validateDbAppTokenFlag(command, flags) {
|
||||
if (command !== "execute" && command !== "push" && command !== "verify" && command !== "shell")
|
||||
return;
|
||||
const dbAppToken = flags.dbAppToken;
|
||||
if (dbAppToken == null) return;
|
||||
if (typeof dbAppToken !== "string") {
|
||||
console.error(`Invalid value for --db-app-token; expected a string.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
export {
|
||||
cli
|
||||
};
|
||||
22
node_modules/@astrojs/db/dist/core/cli/migration-queries.d.ts
generated
vendored
Normal file
22
node_modules/@astrojs/db/dist/core/cli/migration-queries.d.ts
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { DBConfig, DBSnapshot, ResolvedDBTable } from '../types.js';
|
||||
import type { RemoteDatabaseInfo } from '../utils.js';
|
||||
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, reset, }: {
|
||||
oldSnapshot: DBSnapshot;
|
||||
newSnapshot: DBSnapshot;
|
||||
reset?: boolean;
|
||||
}): Promise<{
|
||||
queries: string[];
|
||||
confirmations: string[];
|
||||
}>;
|
||||
export declare function getTableChangeQueries({ tableName, oldTable, newTable, }: {
|
||||
tableName: string;
|
||||
oldTable: ResolvedDBTable;
|
||||
newTable: ResolvedDBTable;
|
||||
}): Promise<{
|
||||
queries: string[];
|
||||
confirmations: string[];
|
||||
}>;
|
||||
export declare function getProductionCurrentSnapshot({ url, token, }: RemoteDatabaseInfo): Promise<DBSnapshot | undefined>;
|
||||
export declare function createCurrentSnapshot({ tables }: DBConfig): DBSnapshot;
|
||||
export declare function createEmptySnapshot(): DBSnapshot;
|
||||
export declare function formatDataLossMessage(confirmations: string[], isColor?: boolean): string;
|
||||
373
node_modules/@astrojs/db/dist/core/cli/migration-queries.js
generated
vendored
Normal file
373
node_modules/@astrojs/db/dist/core/cli/migration-queries.js
generated
vendored
Normal file
@@ -0,0 +1,373 @@
|
||||
import { stripVTControlCharacters } from "node:util";
|
||||
import diff from "microdiff";
|
||||
import { sql } from "drizzle-orm";
|
||||
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import color from "piccolore";
|
||||
import { isSerializedSQL } from "../../runtime/types.js";
|
||||
import { hasPrimaryKey, isDbError } from "../../runtime/utils.js";
|
||||
import { MIGRATION_VERSION } from "../consts.js";
|
||||
import { createClient } from "../db-client/libsql-node.js";
|
||||
import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from "../errors.js";
|
||||
import {
|
||||
getCreateIndexQueries,
|
||||
getCreateTableQuery,
|
||||
getDropTableIfExistsQuery,
|
||||
getModifiers,
|
||||
getReferencesConfig,
|
||||
hasDefault,
|
||||
schemaTypeToSqlType
|
||||
} from "../queries.js";
|
||||
import { columnSchema } from "../schemas.js";
|
||||
const sqlite = new SQLiteAsyncDialect();
|
||||
const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);
|
||||
async function getMigrationQueries({
|
||||
oldSnapshot,
|
||||
newSnapshot,
|
||||
reset = false
|
||||
}) {
|
||||
const queries = [];
|
||||
const confirmations = [];
|
||||
if (reset) {
|
||||
const currentSnapshot = oldSnapshot;
|
||||
oldSnapshot = createEmptySnapshot();
|
||||
queries.push(...getDropTableQueriesForSnapshot(currentSnapshot));
|
||||
}
|
||||
const addedTables = getAddedTables(oldSnapshot, newSnapshot);
|
||||
const droppedTables = getDroppedTables(oldSnapshot, newSnapshot);
|
||||
const notDeprecatedDroppedTables = Object.fromEntries(
|
||||
Object.entries(droppedTables).filter(([, table]) => !table.deprecated)
|
||||
);
|
||||
if (!isEmpty(addedTables) && !isEmpty(notDeprecatedDroppedTables)) {
|
||||
const oldTable = Object.keys(notDeprecatedDroppedTables)[0];
|
||||
const newTable = Object.keys(addedTables)[0];
|
||||
throw new Error(RENAME_TABLE_ERROR(oldTable, newTable));
|
||||
}
|
||||
for (const [tableName, table] of Object.entries(addedTables)) {
|
||||
queries.push(getCreateTableQuery(tableName, table));
|
||||
queries.push(...getCreateIndexQueries(tableName, table));
|
||||
}
|
||||
for (const [tableName] of Object.entries(droppedTables)) {
|
||||
const dropQuery = `DROP TABLE ${sqlite.escapeName(tableName)}`;
|
||||
queries.push(dropQuery);
|
||||
}
|
||||
for (const [tableName, newTable] of Object.entries(newSnapshot.schema)) {
|
||||
const oldTable = oldSnapshot.schema[tableName];
|
||||
if (!oldTable) continue;
|
||||
const addedColumns = getAdded(oldTable.columns, newTable.columns);
|
||||
const droppedColumns = getDropped(oldTable.columns, newTable.columns);
|
||||
const notDeprecatedDroppedColumns = Object.fromEntries(
|
||||
Object.entries(droppedColumns).filter(([, col]) => !col.schema.deprecated)
|
||||
);
|
||||
if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
|
||||
throw new Error(
|
||||
RENAME_COLUMN_ERROR(
|
||||
`${tableName}.${Object.keys(addedColumns)[0]}`,
|
||||
`${tableName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`
|
||||
)
|
||||
);
|
||||
}
|
||||
const result = await getTableChangeQueries({
|
||||
tableName,
|
||||
oldTable,
|
||||
newTable
|
||||
});
|
||||
queries.push(...result.queries);
|
||||
confirmations.push(...result.confirmations);
|
||||
}
|
||||
return { queries, confirmations };
|
||||
}
|
||||
async function getTableChangeQueries({
|
||||
tableName,
|
||||
oldTable,
|
||||
newTable
|
||||
}) {
|
||||
const queries = [];
|
||||
const confirmations = [];
|
||||
const updated = getUpdatedColumns(oldTable.columns, newTable.columns);
|
||||
const added = getAdded(oldTable.columns, newTable.columns);
|
||||
const dropped = getDropped(oldTable.columns, newTable.columns);
|
||||
const hasForeignKeyChanges = diff(oldTable.foreignKeys ?? [], newTable.foreignKeys ?? []).length > 0;
|
||||
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
|
||||
return {
|
||||
queries: getChangeIndexQueries({
|
||||
tableName,
|
||||
oldIndexes: oldTable.indexes,
|
||||
newIndexes: newTable.indexes
|
||||
}),
|
||||
confirmations
|
||||
};
|
||||
}
|
||||
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
|
||||
queries.push(
|
||||
...getAlterTableQueries(tableName, added, dropped),
|
||||
...getChangeIndexQueries({
|
||||
tableName,
|
||||
oldIndexes: oldTable.indexes,
|
||||
newIndexes: newTable.indexes
|
||||
})
|
||||
);
|
||||
return { queries, confirmations };
|
||||
}
|
||||
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
|
||||
if (dataLossCheck.dataLoss) {
|
||||
const { reason, columnName } = dataLossCheck;
|
||||
const reasonMsgs = {
|
||||
"added-required": `You added new required column '${color.bold(
|
||||
tableName + "." + columnName
|
||||
)}' with no default value.
|
||||
This cannot be executed on an existing table.`,
|
||||
"updated-type": `Updating existing column ${color.bold(
|
||||
tableName + "." + columnName
|
||||
)} to a new type that cannot be handled automatically.`
|
||||
};
|
||||
confirmations.push(reasonMsgs[reason]);
|
||||
}
|
||||
const primaryKeyExists = Object.entries(newTable.columns).find(
|
||||
([, column]) => hasPrimaryKey(column)
|
||||
);
|
||||
const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
|
||||
const recreateTableQueries = getRecreateTableQueries({
|
||||
tableName,
|
||||
newTable,
|
||||
added,
|
||||
hasDataLoss: dataLossCheck.dataLoss,
|
||||
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
|
||||
});
|
||||
queries.push(...recreateTableQueries, ...getCreateIndexQueries(tableName, newTable));
|
||||
return { queries, confirmations };
|
||||
}
|
||||
function getChangeIndexQueries({
|
||||
tableName,
|
||||
oldIndexes = {},
|
||||
newIndexes = {}
|
||||
}) {
|
||||
const added = getAdded(oldIndexes, newIndexes);
|
||||
const dropped = getDropped(oldIndexes, newIndexes);
|
||||
const updated = getUpdated(oldIndexes, newIndexes);
|
||||
Object.assign(dropped, updated);
|
||||
Object.assign(added, updated);
|
||||
const queries = [];
|
||||
for (const indexName of Object.keys(dropped)) {
|
||||
const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
|
||||
queries.push(dropQuery);
|
||||
}
|
||||
queries.push(...getCreateIndexQueries(tableName, { indexes: added }));
|
||||
return queries;
|
||||
}
|
||||
function getAddedTables(oldTables, newTables) {
|
||||
const added = {};
|
||||
for (const [key, newTable] of Object.entries(newTables.schema)) {
|
||||
if (!(key in oldTables.schema)) added[key] = newTable;
|
||||
}
|
||||
return added;
|
||||
}
|
||||
function getDroppedTables(oldTables, newTables) {
|
||||
const dropped = {};
|
||||
for (const [key, oldTable] of Object.entries(oldTables.schema)) {
|
||||
if (!(key in newTables.schema)) dropped[key] = oldTable;
|
||||
}
|
||||
return dropped;
|
||||
}
|
||||
function getAlterTableQueries(unescTableName, added, dropped) {
|
||||
const queries = [];
|
||||
const tableName = sqlite.escapeName(unescTableName);
|
||||
for (const [unescColumnName, column] of Object.entries(added)) {
|
||||
const columnName = sqlite.escapeName(unescColumnName);
|
||||
const type = schemaTypeToSqlType(column.type);
|
||||
const q = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${type}${getModifiers(
|
||||
columnName,
|
||||
column
|
||||
)}`;
|
||||
queries.push(q);
|
||||
}
|
||||
for (const unescColumnName of Object.keys(dropped)) {
|
||||
const columnName = sqlite.escapeName(unescColumnName);
|
||||
const q = `ALTER TABLE ${tableName} DROP COLUMN ${columnName}`;
|
||||
queries.push(q);
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
function getRecreateTableQueries({
|
||||
tableName: unescTableName,
|
||||
newTable,
|
||||
added,
|
||||
hasDataLoss,
|
||||
migrateHiddenPrimaryKey
|
||||
}) {
|
||||
const unescTempName = `${unescTableName}_${genTempTableName()}`;
|
||||
const tempName = sqlite.escapeName(unescTempName);
|
||||
const tableName = sqlite.escapeName(unescTableName);
|
||||
if (hasDataLoss) {
|
||||
return [`DROP TABLE ${tableName}`, getCreateTableQuery(unescTableName, newTable)];
|
||||
}
|
||||
const newColumns = [...Object.keys(newTable.columns)];
|
||||
if (migrateHiddenPrimaryKey) {
|
||||
newColumns.unshift("_id");
|
||||
}
|
||||
const escapedColumns = newColumns.filter((i) => !(i in added)).map((c) => sqlite.escapeName(c)).join(", ");
|
||||
return [
|
||||
getCreateTableQuery(unescTempName, newTable),
|
||||
`INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${tableName}`,
|
||||
`DROP TABLE ${tableName}`,
|
||||
`ALTER TABLE ${tempName} RENAME TO ${tableName}`
|
||||
];
|
||||
}
|
||||
function isEmpty(obj) {
|
||||
return Object.keys(obj).length === 0;
|
||||
}
|
||||
function canAlterTableAddColumn(column) {
|
||||
if (column.schema.unique) return false;
|
||||
if (hasRuntimeDefault(column)) return false;
|
||||
if (!column.schema.optional && !hasDefault(column)) return false;
|
||||
if (hasPrimaryKey(column)) return false;
|
||||
if (getReferencesConfig(column)) return false;
|
||||
return true;
|
||||
}
|
||||
function canAlterTableDropColumn(column) {
|
||||
if (column.schema.unique) return false;
|
||||
if (hasPrimaryKey(column)) return false;
|
||||
return true;
|
||||
}
|
||||
function canRecreateTableWithoutDataLoss(added, updated) {
|
||||
for (const [columnName, a] of Object.entries(added)) {
|
||||
if (hasPrimaryKey(a) && a.type !== "number" && !hasDefault(a)) {
|
||||
return { dataLoss: true, columnName, reason: "added-required" };
|
||||
}
|
||||
if (!a.schema.optional && !hasDefault(a)) {
|
||||
return { dataLoss: true, columnName, reason: "added-required" };
|
||||
}
|
||||
}
|
||||
for (const [columnName, u] of Object.entries(updated)) {
|
||||
if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
|
||||
return { dataLoss: true, columnName, reason: "updated-type" };
|
||||
}
|
||||
}
|
||||
return { dataLoss: false };
|
||||
}
|
||||
function getAdded(oldObj, newObj) {
|
||||
const added = {};
|
||||
for (const [key, value] of Object.entries(newObj)) {
|
||||
if (!(key in oldObj)) added[key] = value;
|
||||
}
|
||||
return added;
|
||||
}
|
||||
function getDropped(oldObj, newObj) {
|
||||
const dropped = {};
|
||||
for (const [key, value] of Object.entries(oldObj)) {
|
||||
if (!(key in newObj)) dropped[key] = value;
|
||||
}
|
||||
return dropped;
|
||||
}
|
||||
function getUpdated(oldObj, newObj) {
|
||||
const updated = {};
|
||||
for (const [key, value] of Object.entries(newObj)) {
|
||||
const oldValue = oldObj[key];
|
||||
if (!oldValue) continue;
|
||||
if (diff(oldValue, value).length > 0) updated[key] = value;
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
function getUpdatedColumns(oldColumns, newColumns) {
|
||||
const updated = {};
|
||||
for (const [key, newColumn] of Object.entries(newColumns)) {
|
||||
let oldColumn = oldColumns[key];
|
||||
if (!oldColumn) continue;
|
||||
if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
|
||||
const asNewColumn = columnSchema.safeParse({
|
||||
type: newColumn.type,
|
||||
schema: oldColumn.schema
|
||||
});
|
||||
if (asNewColumn.success) {
|
||||
oldColumn = asNewColumn.data;
|
||||
}
|
||||
}
|
||||
const diffResult = diff(oldColumn, newColumn);
|
||||
if (diffResult.length > 0) {
|
||||
updated[key] = { old: oldColumn, new: newColumn };
|
||||
}
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
const typeChangesWithoutQuery = [
|
||||
{ from: "boolean", to: "number" },
|
||||
{ from: "date", to: "text" },
|
||||
{ from: "json", to: "text" }
|
||||
];
|
||||
function canChangeTypeWithoutQuery(oldColumn, newColumn) {
|
||||
return typeChangesWithoutQuery.some(
|
||||
({ from, to }) => oldColumn.type === from && newColumn.type === to
|
||||
);
|
||||
}
|
||||
function hasRuntimeDefault(column) {
|
||||
return !!(column.schema.default && isSerializedSQL(column.schema.default));
|
||||
}
|
||||
function getProductionCurrentSnapshot({
|
||||
url,
|
||||
token
|
||||
}) {
|
||||
return getDbCurrentSnapshot(token, url);
|
||||
}
|
||||
async function getDbCurrentSnapshot(appToken, remoteUrl) {
|
||||
const client = createClient({
|
||||
token: appToken,
|
||||
url: remoteUrl
|
||||
});
|
||||
try {
|
||||
const res = await client.get(
|
||||
// Latest snapshot
|
||||
sql`select snapshot from _astro_db_snapshot order by id desc limit 1;`
|
||||
);
|
||||
return JSON.parse(res.snapshot);
|
||||
} catch (error) {
|
||||
if (isDbError(error) && // If the schema was never pushed to the database yet the table won't exist.
|
||||
// Treat a missing snapshot table as an empty table.
|
||||
// When connecting to a remote database in that condition
|
||||
// the query will fail with the following error code and message.
|
||||
(error.code === "SQLITE_UNKNOWN" && error.message === "SQLITE_UNKNOWN: SQLite error: no such table: _astro_db_snapshot" || // When connecting to a local or in-memory database that does not have a snapshot table yet
|
||||
// the query will fail with the following error code and message.
|
||||
error.code === "SQLITE_ERROR" && error.message === "SQLITE_ERROR: no such table: _astro_db_snapshot")) {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
function getDropTableQueriesForSnapshot(snapshot) {
|
||||
const queries = [];
|
||||
for (const tableName of Object.keys(snapshot.schema)) {
|
||||
const dropQuery = getDropTableIfExistsQuery(tableName);
|
||||
queries.unshift(dropQuery);
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
function createCurrentSnapshot({ tables = {} }) {
|
||||
const schema = JSON.parse(JSON.stringify(tables));
|
||||
return { version: MIGRATION_VERSION, schema };
|
||||
}
|
||||
function createEmptySnapshot() {
|
||||
return { version: MIGRATION_VERSION, schema: {} };
|
||||
}
|
||||
function formatDataLossMessage(confirmations, isColor = true) {
|
||||
const messages = [];
|
||||
messages.push(color.red("\u2716 We found some schema changes that cannot be handled automatically:"));
|
||||
messages.push(``);
|
||||
messages.push(...confirmations.map((m, i) => color.red(` (${i + 1}) `) + m));
|
||||
messages.push(``);
|
||||
messages.push(`To resolve, revert these changes or update your schema, and re-run the command.`);
|
||||
messages.push(
|
||||
`You may also run 'astro db push --force-reset' to ignore all warnings and force-push your local database schema to production instead. All data will be lost and the database will be reset.`
|
||||
);
|
||||
let finalMessage = messages.join("\n");
|
||||
if (!isColor) {
|
||||
finalMessage = stripVTControlCharacters(finalMessage);
|
||||
}
|
||||
return finalMessage;
|
||||
}
|
||||
export {
|
||||
createCurrentSnapshot,
|
||||
createEmptySnapshot,
|
||||
formatDataLossMessage,
|
||||
getMigrationQueries,
|
||||
getProductionCurrentSnapshot,
|
||||
getTableChangeQueries
|
||||
};
|
||||
11
node_modules/@astrojs/db/dist/core/cli/print-help.d.ts
generated
vendored
Normal file
11
node_modules/@astrojs/db/dist/core/cli/print-help.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Uses implementation from Astro core
|
||||
* @see https://github.com/withastro/astro/blob/main/packages/astro/src/core/messages.ts#L303
|
||||
*/
|
||||
export declare function printHelp({ commandName, headline, usage, tables, description, }: {
|
||||
commandName: string;
|
||||
headline?: string;
|
||||
usage?: string;
|
||||
tables?: Record<string, [command: string, help: string][]>;
|
||||
description?: string;
|
||||
}): void;
|
||||
55
node_modules/@astrojs/db/dist/core/cli/print-help.js
generated
vendored
Normal file
55
node_modules/@astrojs/db/dist/core/cli/print-help.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
import colors from "piccolore";
|
||||
function printHelp({
|
||||
commandName,
|
||||
headline,
|
||||
usage,
|
||||
tables,
|
||||
description
|
||||
}) {
|
||||
const linebreak = () => "";
|
||||
const title = (label) => ` ${colors.bgWhite(colors.black(` ${label} `))}`;
|
||||
const table = (rows, { padding }) => {
|
||||
const split = process.stdout.columns < 60;
|
||||
let raw = "";
|
||||
for (const row of rows) {
|
||||
if (split) {
|
||||
raw += ` ${row[0]}
|
||||
`;
|
||||
} else {
|
||||
raw += `${`${row[0]}`.padStart(padding)}`;
|
||||
}
|
||||
raw += " " + colors.dim(row[1]) + "\n";
|
||||
}
|
||||
return raw.slice(0, -1);
|
||||
};
|
||||
let message = [];
|
||||
if (headline) {
|
||||
message.push(
|
||||
linebreak(),
|
||||
` ${colors.bgGreen(colors.black(` ${commandName} `))} ${colors.green(
|
||||
`v${"0.20.1"}`
|
||||
)} ${headline}`
|
||||
);
|
||||
}
|
||||
if (usage) {
|
||||
message.push(linebreak(), ` ${colors.green(commandName)} ${colors.bold(usage)}`);
|
||||
}
|
||||
if (tables) {
|
||||
let calculateTablePadding2 = function(rows) {
|
||||
return rows.reduce((val, [first]) => Math.max(val, first.length), 0) + 2;
|
||||
};
|
||||
var calculateTablePadding = calculateTablePadding2;
|
||||
const tableEntries = Object.entries(tables);
|
||||
const padding = Math.max(...tableEntries.map(([, rows]) => calculateTablePadding2(rows)));
|
||||
for (const [tableTitle, tableRows] of tableEntries) {
|
||||
message.push(linebreak(), title(tableTitle), table(tableRows, { padding }));
|
||||
}
|
||||
}
|
||||
if (description) {
|
||||
message.push(linebreak(), `${description}`);
|
||||
}
|
||||
console.log(message.join("\n") + "\n");
|
||||
}
|
||||
export {
|
||||
printHelp
|
||||
};
|
||||
12
node_modules/@astrojs/db/dist/core/consts.d.ts
generated
vendored
Normal file
12
node_modules/@astrojs/db/dist/core/consts.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export declare const RUNTIME_IMPORT: string;
|
||||
export declare const RUNTIME_VIRTUAL_IMPORT: string;
|
||||
export declare const VIRTUAL_MODULE_ID = "astro:db";
|
||||
export declare const DB_PATH = ".astro/content.db";
|
||||
export declare const CONFIG_FILE_NAMES: string[];
|
||||
export declare const MIGRATION_VERSION = "2024-03-12";
|
||||
export declare const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
|
||||
export declare const DB_CLIENTS: {
|
||||
node: string;
|
||||
web: string;
|
||||
local: string;
|
||||
};
|
||||
26
node_modules/@astrojs/db/dist/core/consts.js
generated
vendored
Normal file
26
node_modules/@astrojs/db/dist/core/consts.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
const PACKAGE_NAME = JSON.parse(
|
||||
readFileSync(new URL("../../package.json", import.meta.url), "utf8")
|
||||
).name;
|
||||
const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
|
||||
const RUNTIME_VIRTUAL_IMPORT = JSON.stringify(`${PACKAGE_NAME}/dist/runtime/virtual.js`);
|
||||
const VIRTUAL_MODULE_ID = "astro:db";
|
||||
const DB_PATH = ".astro/content.db";
|
||||
const CONFIG_FILE_NAMES = ["config.ts", "config.js", "config.mts", "config.mjs"];
|
||||
const MIGRATION_VERSION = "2024-03-12";
|
||||
const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
|
||||
const DB_CLIENTS = {
|
||||
node: `${PACKAGE_NAME}/db-client/libsql-node.js`,
|
||||
web: `${PACKAGE_NAME}/db-client/libsql-web.js`,
|
||||
local: `${PACKAGE_NAME}/db-client/libsql-local.js`
|
||||
};
|
||||
export {
|
||||
CONFIG_FILE_NAMES,
|
||||
DB_CLIENTS,
|
||||
DB_PATH,
|
||||
MIGRATION_VERSION,
|
||||
RUNTIME_IMPORT,
|
||||
RUNTIME_VIRTUAL_IMPORT,
|
||||
VIRTUAL_CLIENT_MODULE_ID,
|
||||
VIRTUAL_MODULE_ID
|
||||
};
|
||||
6
node_modules/@astrojs/db/dist/core/db-client/libsql-local.d.ts
generated
vendored
Normal file
6
node_modules/@astrojs/db/dist/core/db-client/libsql-local.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { type LibSQLDatabase } from 'drizzle-orm/libsql';
|
||||
type LocalDbClientOptions = {
|
||||
url: string;
|
||||
};
|
||||
export declare function createClient(options: LocalDbClientOptions): LibSQLDatabase;
|
||||
export {};
|
||||
12
node_modules/@astrojs/db/dist/core/db-client/libsql-local.js
generated
vendored
Normal file
12
node_modules/@astrojs/db/dist/core/db-client/libsql-local.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import { createClient as createLibsqlClient } from "@libsql/client";
|
||||
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
|
||||
const isWebContainer = !!process.versions?.webcontainer;
|
||||
function createClient(options) {
|
||||
const url = isWebContainer ? "file:content.db" : options.url;
|
||||
const client = createLibsqlClient({ url });
|
||||
const db = drizzleLibsql(client);
|
||||
return db;
|
||||
}
|
||||
export {
|
||||
createClient
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/db-client/libsql-node.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/db-client/libsql-node.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
type RemoteDbClientOptions = {
|
||||
token: string;
|
||||
url: string;
|
||||
};
|
||||
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
|
||||
$client: import("@libsql/client").Client;
|
||||
};
|
||||
export {};
|
||||
21
node_modules/@astrojs/db/dist/core/db-client/libsql-node.js
generated
vendored
Normal file
21
node_modules/@astrojs/db/dist/core/db-client/libsql-node.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { createClient as createLibsqlClient } from "@libsql/client";
|
||||
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
|
||||
import { parseLibSQLConfig } from "./utils.js";
|
||||
function createClient(opts) {
|
||||
const { token, url: rawUrl } = opts;
|
||||
let parsedUrl = new URL(rawUrl);
|
||||
const options = Object.fromEntries(parsedUrl.searchParams.entries());
|
||||
parsedUrl.search = "";
|
||||
let url = parsedUrl.toString();
|
||||
if (parsedUrl.protocol === "memory:") {
|
||||
url = ":memory:";
|
||||
} else if (parsedUrl.protocol === "file:" && parsedUrl.pathname.startsWith("/") && !rawUrl.startsWith("file:/")) {
|
||||
url = "file:" + parsedUrl.pathname.substring(1);
|
||||
}
|
||||
const libSQLOptions = parseLibSQLConfig(options);
|
||||
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
|
||||
return drizzleLibsql(client);
|
||||
}
|
||||
export {
|
||||
createClient
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/db-client/libsql-web.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/db-client/libsql-web.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
type RemoteDbClientOptions = {
|
||||
token: string;
|
||||
url: string;
|
||||
};
|
||||
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
|
||||
$client: import("@libsql/client/web").Client;
|
||||
};
|
||||
export {};
|
||||
22
node_modules/@astrojs/db/dist/core/db-client/libsql-web.js
generated
vendored
Normal file
22
node_modules/@astrojs/db/dist/core/db-client/libsql-web.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
import { createClient as createLibsqlClient } from "@libsql/client/web";
|
||||
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql/web";
|
||||
import { parseLibSQLConfig } from "./utils.js";
|
||||
function createClient(opts) {
|
||||
const { token, url: rawUrl } = opts;
|
||||
let parsedUrl = new URL(rawUrl);
|
||||
const options = Object.fromEntries(parsedUrl.searchParams.entries());
|
||||
parsedUrl.search = "";
|
||||
let url = parsedUrl.toString();
|
||||
const supportedProtocols = ["http:", "https:", "libsql:"];
|
||||
if (!supportedProtocols.includes(parsedUrl.protocol)) {
|
||||
throw new Error(
|
||||
`Unsupported protocol "${parsedUrl.protocol}" for libSQL web client. Supported protocols are: ${supportedProtocols.join(", ")}.`
|
||||
);
|
||||
}
|
||||
const libSQLOptions = parseLibSQLConfig(options);
|
||||
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
|
||||
return drizzleLibsql(client);
|
||||
}
|
||||
export {
|
||||
createClient
|
||||
};
|
||||
2
node_modules/@astrojs/db/dist/core/db-client/utils.d.ts
generated
vendored
Normal file
2
node_modules/@astrojs/db/dist/core/db-client/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { Config as LibSQLConfig } from '@libsql/client';
|
||||
export declare const parseLibSQLConfig: (config: Record<string, string>) => Partial<LibSQLConfig>;
|
||||
46
node_modules/@astrojs/db/dist/core/db-client/utils.js
generated
vendored
Normal file
46
node_modules/@astrojs/db/dist/core/db-client/utils.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import * as z from "zod/v4";
|
||||
const rawLibSQLOptions = z.record(z.string(), z.string());
|
||||
const parseNumber = (value) => z.coerce.number().parse(value);
|
||||
const parseBoolean = (value) => z.coerce.boolean().parse(value);
|
||||
const booleanValues = ["true", "false"];
|
||||
const parseOptionalBoolean = (value) => {
|
||||
if (booleanValues.includes(value)) {
|
||||
return parseBoolean(value);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
const libSQLConfigTransformed = rawLibSQLOptions.transform((raw) => {
|
||||
const parsed = {};
|
||||
for (const [key, value] of Object.entries(raw)) {
|
||||
switch (key) {
|
||||
case "syncInterval":
|
||||
case "concurrency":
|
||||
parsed[key] = parseNumber(value);
|
||||
break;
|
||||
case "readYourWrites":
|
||||
case "offline":
|
||||
case "tls":
|
||||
parsed[key] = parseOptionalBoolean(value);
|
||||
break;
|
||||
case "authToken":
|
||||
case "encryptionKey":
|
||||
case "syncUrl":
|
||||
parsed[key] = value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return parsed;
|
||||
});
|
||||
const parseLibSQLConfig = (config) => {
|
||||
try {
|
||||
return libSQLConfigTransformed.parse(config);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
throw new Error(`Invalid LibSQL config: ${error.issues.map((e) => e.message).join(", ")}`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
export {
|
||||
parseLibSQLConfig
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/errors.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/errors.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export declare const MISSING_EXECUTE_PATH_ERROR: string;
|
||||
export declare const RENAME_TABLE_ERROR: (oldTable: string, newTable: string) => string;
|
||||
export declare const RENAME_COLUMN_ERROR: (oldSelector: string, newSelector: string) => string;
|
||||
export declare const FILE_NOT_FOUND_ERROR: (path: string) => string;
|
||||
export declare const SHELL_QUERY_MISSING_ERROR: string;
|
||||
export declare const EXEC_ERROR: (error: string) => string;
|
||||
export declare const EXEC_DEFAULT_EXPORT_ERROR: (fileName: string) => string;
|
||||
export declare const INTEGRATION_TABLE_CONFLICT_ERROR: (integrationName: string, tableName: string, isUserConflict: boolean) => string;
|
||||
48
node_modules/@astrojs/db/dist/core/errors.js
generated
vendored
Normal file
48
node_modules/@astrojs/db/dist/core/errors.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import colors from "piccolore";
|
||||
const MISSING_EXECUTE_PATH_ERROR = `${colors.red(
|
||||
"\u25B6 No file path provided."
|
||||
)} Provide a path by running ${colors.cyan("astro db execute <path>")}
|
||||
`;
|
||||
const RENAME_TABLE_ERROR = (oldTable, newTable) => {
|
||||
return colors.red("\u25B6 Potential table rename detected: " + oldTable + " -> " + newTable) + `
|
||||
You cannot add and remove tables in the same schema update batch.
|
||||
|
||||
1. Use "deprecated: true" to deprecate a table before renaming.
|
||||
2. Use "--force-reset" to ignore this warning and reset the database (deleting all of your data).
|
||||
|
||||
Visit https://docs.astro.build/en/guides/astro-db/#renaming-tables to learn more.`;
|
||||
};
|
||||
const RENAME_COLUMN_ERROR = (oldSelector, newSelector) => {
|
||||
return colors.red("\u25B6 Potential column rename detected: " + oldSelector + ", " + newSelector) + `
|
||||
You cannot add and remove columns in the same table.
|
||||
To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`;
|
||||
};
|
||||
const FILE_NOT_FOUND_ERROR = (path) => `${colors.red("\u25B6 File not found:")} ${colors.bold(path)}
|
||||
`;
|
||||
const SHELL_QUERY_MISSING_ERROR = `${colors.red(
|
||||
"\u25B6 Please provide a query to execute using the --query flag."
|
||||
)}
|
||||
`;
|
||||
const EXEC_ERROR = (error) => {
|
||||
return `${colors.red(`Error while executing file:`)}
|
||||
|
||||
${error}`;
|
||||
};
|
||||
const EXEC_DEFAULT_EXPORT_ERROR = (fileName) => {
|
||||
return EXEC_ERROR(`Missing default function export in ${colors.bold(fileName)}`);
|
||||
};
|
||||
const INTEGRATION_TABLE_CONFLICT_ERROR = (integrationName, tableName, isUserConflict) => {
|
||||
return colors.red("\u25B6 Conflicting table name in integration " + colors.bold(integrationName)) + isUserConflict ? `
|
||||
A user-defined table named ${colors.bold(tableName)} already exists` : `
|
||||
Another integration already added a table named ${colors.bold(tableName)}`;
|
||||
};
|
||||
export {
|
||||
EXEC_DEFAULT_EXPORT_ERROR,
|
||||
EXEC_ERROR,
|
||||
FILE_NOT_FOUND_ERROR,
|
||||
INTEGRATION_TABLE_CONFLICT_ERROR,
|
||||
MISSING_EXECUTE_PATH_ERROR,
|
||||
RENAME_COLUMN_ERROR,
|
||||
RENAME_TABLE_ERROR,
|
||||
SHELL_QUERY_MISSING_ERROR
|
||||
};
|
||||
2
node_modules/@astrojs/db/dist/core/integration/error-map.d.ts
generated
vendored
Normal file
2
node_modules/@astrojs/db/dist/core/integration/error-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { $ZodErrorMap } from 'zod/v4/core';
|
||||
export declare const errorMap: $ZodErrorMap;
|
||||
101
node_modules/@astrojs/db/dist/core/integration/error-map.js
generated
vendored
Normal file
101
node_modules/@astrojs/db/dist/core/integration/error-map.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
const errorMap = (issue) => {
|
||||
const baseErrorPath = flattenErrorPath(issue.path ?? []);
|
||||
if (issue.code === "invalid_union") {
|
||||
let typeOrLiteralErrByPath = /* @__PURE__ */ new Map();
|
||||
for (const unionError of issue.errors.flat()) {
|
||||
if (unionError.code === "invalid_type") {
|
||||
const flattenedErrorPath = flattenErrorPath(unionError.path);
|
||||
if (typeOrLiteralErrByPath.has(flattenedErrorPath)) {
|
||||
typeOrLiteralErrByPath.get(flattenedErrorPath).expected.push(unionError.expected);
|
||||
} else {
|
||||
typeOrLiteralErrByPath.set(flattenedErrorPath, {
|
||||
code: unionError.code,
|
||||
received: unionError.received,
|
||||
expected: [unionError.expected],
|
||||
message: unionError.message
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
const messages = [prefix(baseErrorPath, "Did not match union.")];
|
||||
const details = [...typeOrLiteralErrByPath.entries()].filter(([, error]) => error.expected.length === issue.errors.flat().length).map(
|
||||
([key, error]) => key === baseErrorPath ? (
|
||||
// Avoid printing the key again if it's a base error
|
||||
`> ${getTypeOrLiteralMsg(error)}`
|
||||
) : `> ${prefix(key, getTypeOrLiteralMsg(error))}`
|
||||
);
|
||||
if (details.length === 0) {
|
||||
const expectedShapes = [];
|
||||
for (const unionErrors of issue.errors) {
|
||||
const expectedShape = [];
|
||||
for (const _issue of unionErrors) {
|
||||
if (_issue.code === "invalid_union") {
|
||||
return errorMap(_issue);
|
||||
}
|
||||
const relativePath = flattenErrorPath(_issue.path).replace(baseErrorPath, "").replace(leadingPeriod, "");
|
||||
if ("expected" in _issue && typeof _issue.expected === "string") {
|
||||
expectedShape.push(
|
||||
relativePath ? `${relativePath}: ${_issue.expected}` : _issue.expected
|
||||
);
|
||||
} else if ("values" in _issue) {
|
||||
expectedShape.push(
|
||||
..._issue.values.filter((v) => typeof v === "string").map((v) => `"${v}"`)
|
||||
);
|
||||
} else if (relativePath) {
|
||||
expectedShape.push(relativePath);
|
||||
}
|
||||
}
|
||||
if (expectedShape.length === 1 && !expectedShape[0]?.includes(":")) {
|
||||
expectedShapes.push(expectedShape.join(""));
|
||||
} else if (expectedShape.length > 0) {
|
||||
expectedShapes.push(`{ ${expectedShape.join("; ")} }`);
|
||||
}
|
||||
}
|
||||
if (expectedShapes.length) {
|
||||
details.push("> Expected type `" + expectedShapes.join(" | ") + "`");
|
||||
details.push("> Received `" + stringify(issue.input) + "`");
|
||||
}
|
||||
}
|
||||
return {
|
||||
message: messages.concat(details).join("\n")
|
||||
};
|
||||
} else if (issue.code === "invalid_type") {
|
||||
return {
|
||||
message: prefix(
|
||||
baseErrorPath,
|
||||
getTypeOrLiteralMsg({
|
||||
code: issue.code,
|
||||
received: typeof issue.input,
|
||||
expected: [issue.expected],
|
||||
message: issue.message
|
||||
})
|
||||
)
|
||||
};
|
||||
} else if (issue.message) {
|
||||
return { message: prefix(baseErrorPath, issue.message) };
|
||||
}
|
||||
};
|
||||
const getTypeOrLiteralMsg = (error) => {
|
||||
if (typeof error.received === "undefined" || error.received === "undefined")
|
||||
return error.message ?? "Required";
|
||||
const expectedDeduped = new Set(error.expected);
|
||||
switch (error.code) {
|
||||
case "invalid_type":
|
||||
return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
|
||||
error.received
|
||||
)}\``;
|
||||
case "invalid_literal":
|
||||
return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
|
||||
error.received
|
||||
)}\``;
|
||||
}
|
||||
};
|
||||
const prefix = (key, msg) => key.length ? `**${key}**: ${msg}` : msg;
|
||||
const unionExpectedVals = (expectedVals) => [...expectedVals].map((expectedVal) => stringify(expectedVal)).join(" | ");
|
||||
const flattenErrorPath = (errorPath) => errorPath.join(".");
|
||||
const stringify = (val) => JSON.stringify(val, null, 1).split(newlinePlusWhitespace).join(" ");
|
||||
const newlinePlusWhitespace = /\n\s*/;
|
||||
const leadingPeriod = /^\./;
|
||||
export {
|
||||
errorMap
|
||||
};
|
||||
2
node_modules/@astrojs/db/dist/core/integration/file-url.d.ts
generated
vendored
Normal file
2
node_modules/@astrojs/db/dist/core/integration/file-url.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { AstroIntegration } from 'astro';
|
||||
export declare function fileURLIntegration(): AstroIntegration;
|
||||
84
node_modules/@astrojs/db/dist/core/integration/file-url.js
generated
vendored
Normal file
84
node_modules/@astrojs/db/dist/core/integration/file-url.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
async function copyFile(toDir, fromUrl, toUrl) {
|
||||
await fs.promises.mkdir(toDir, { recursive: true });
|
||||
await fs.promises.rename(fromUrl, toUrl);
|
||||
}
|
||||
function fileURLIntegration() {
|
||||
const fileNames = [];
|
||||
function createVitePlugin(command) {
|
||||
let referenceIds = [];
|
||||
return {
|
||||
name: "@astrojs/db/file-url",
|
||||
enforce: "pre",
|
||||
load: {
|
||||
filter: {
|
||||
id: /\?fileurl$/
|
||||
},
|
||||
async handler(id) {
|
||||
const filePath = id.slice(0, id.indexOf("?"));
|
||||
if (command === "build") {
|
||||
const data = await fs.promises.readFile(filePath);
|
||||
const name = path.basename(filePath);
|
||||
const referenceId = this.emitFile({
|
||||
name,
|
||||
source: data,
|
||||
type: "asset"
|
||||
});
|
||||
referenceIds.push(referenceId);
|
||||
return `export default import.meta.ROLLUP_FILE_URL_${referenceId};`;
|
||||
} else {
|
||||
return `export default new URL(${JSON.stringify(pathToFileURL(filePath).toString())})`;
|
||||
}
|
||||
}
|
||||
},
|
||||
generateBundle() {
|
||||
for (const referenceId of referenceIds) {
|
||||
fileNames.push(this.getFileName(referenceId));
|
||||
}
|
||||
referenceIds = [];
|
||||
}
|
||||
};
|
||||
}
|
||||
let config;
|
||||
return {
|
||||
name: "@astrojs/db/file-url",
|
||||
hooks: {
|
||||
"astro:config:setup"({ updateConfig, command }) {
|
||||
updateConfig({
|
||||
vite: {
|
||||
plugins: [createVitePlugin(command)]
|
||||
}
|
||||
});
|
||||
},
|
||||
"astro:config:done": ({ config: _config }) => {
|
||||
config = _config;
|
||||
},
|
||||
async "astro:build:done"() {
|
||||
if (config.output === "static") {
|
||||
const unlinks = [];
|
||||
for (const fileName of fileNames) {
|
||||
const url = new URL(fileName, config.outDir);
|
||||
unlinks.push(fs.promises.unlink(url));
|
||||
}
|
||||
await Promise.all(unlinks);
|
||||
const assetDir = new URL(config.build.assets, config.outDir);
|
||||
await fs.promises.rmdir(assetDir).catch(() => []);
|
||||
} else {
|
||||
const moves = [];
|
||||
for (const fileName of fileNames) {
|
||||
const fromUrl = new URL(fileName, config.build.client);
|
||||
const toUrl = new URL(fileName, config.build.server);
|
||||
const toDir = new URL("./", toUrl);
|
||||
moves.push(copyFile(toDir, fromUrl, toUrl));
|
||||
}
|
||||
await Promise.all(moves);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
export {
|
||||
fileURLIntegration
|
||||
};
|
||||
8
node_modules/@astrojs/db/dist/core/integration/index.d.ts
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/core/integration/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { AstroIntegration } from 'astro';
|
||||
import * as z from 'zod/v4';
|
||||
declare const astroDBConfigSchema: z.ZodPrefault<z.ZodOptional<z.ZodObject<{
|
||||
mode: z.ZodDefault<z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"node">, z.ZodLiteral<"web">]>>>;
|
||||
}, z.core.$strip>>>;
|
||||
export type AstroDBConfig = z.infer<typeof astroDBConfigSchema>;
|
||||
export declare function integration(options?: AstroDBConfig): AstroIntegration[];
|
||||
export {};
|
||||
215
node_modules/@astrojs/db/dist/core/integration/index.js
generated
vendored
Normal file
215
node_modules/@astrojs/db/dist/core/integration/index.js
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import { mkdir, writeFile } from "node:fs/promises";
|
||||
import { dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import colors from "piccolore";
|
||||
import {
|
||||
createServer,
|
||||
loadEnv,
|
||||
mergeConfig
|
||||
} from "vite";
|
||||
import parseArgs from "yargs-parser";
|
||||
import * as z from "zod/v4";
|
||||
import { AstroDbError, isDbError } from "../../runtime/utils.js";
|
||||
import { CONFIG_FILE_NAMES, DB_PATH, VIRTUAL_MODULE_ID } from "../consts.js";
|
||||
import { EXEC_DEFAULT_EXPORT_ERROR, EXEC_ERROR } from "../errors.js";
|
||||
import { resolveDbConfig } from "../load-file.js";
|
||||
import { SEED_DEV_FILE_NAME } from "../queries.js";
|
||||
import { getDbDirectoryUrl, getRemoteDatabaseInfo } from "../utils.js";
|
||||
import { fileURLIntegration } from "./file-url.js";
|
||||
import { getDtsContent } from "./typegen.js";
|
||||
import {
|
||||
vitePluginDb
|
||||
} from "./vite-plugin-db.js";
|
||||
import { vitePluginDbClient } from "./vite-plugin-db-client.js";
|
||||
const astroDBConfigSchema = z.object({
|
||||
/**
|
||||
* Sets the mode of the underlying `@libsql/client` connection.
|
||||
*
|
||||
* In most cases, the default 'node' mode is sufficient. On platforms like Cloudflare, or Deno, you may need to set this to 'web'.
|
||||
*
|
||||
* @default 'node'
|
||||
*/
|
||||
mode: z.union([z.literal("node"), z.literal("web")]).optional().default("node")
|
||||
}).optional().prefault({});
|
||||
function astroDBIntegration(options) {
|
||||
const resolvedConfig = astroDBConfigSchema.parse(options);
|
||||
let connectToRemote = false;
|
||||
let configFileDependencies = [];
|
||||
let root;
|
||||
let tempViteServer;
|
||||
let tables = {
|
||||
get() {
|
||||
throw new Error("[astro:db] INTERNAL Tables not loaded yet");
|
||||
}
|
||||
};
|
||||
let seedFiles = {
|
||||
get() {
|
||||
throw new Error("[astro:db] INTERNAL Seed files not loaded yet");
|
||||
}
|
||||
};
|
||||
let seedHandler = {
|
||||
execute: () => {
|
||||
throw new Error("[astro:db] INTERNAL Seed handler not loaded yet");
|
||||
},
|
||||
inProgress: false
|
||||
};
|
||||
let command;
|
||||
let finalBuildOutput;
|
||||
return {
|
||||
name: "astro:db",
|
||||
hooks: {
|
||||
"astro:config:setup": async ({ updateConfig, config, command: _command, logger }) => {
|
||||
command = _command;
|
||||
root = config.root;
|
||||
if (command === "preview") return;
|
||||
let dbPlugin = void 0;
|
||||
const args = parseArgs(process.argv.slice(3));
|
||||
connectToRemote = process.env.ASTRO_INTERNAL_TEST_REMOTE || args["remote"];
|
||||
const dbClientPlugin = vitePluginDbClient({
|
||||
connectToRemote,
|
||||
mode: resolvedConfig.mode
|
||||
});
|
||||
if (connectToRemote) {
|
||||
dbPlugin = vitePluginDb({
|
||||
connectToRemote,
|
||||
appToken: getRemoteDatabaseInfo().token,
|
||||
tables,
|
||||
root: config.root,
|
||||
srcDir: config.srcDir,
|
||||
output: config.output,
|
||||
seedHandler
|
||||
});
|
||||
} else {
|
||||
dbPlugin = vitePluginDb({
|
||||
connectToRemote,
|
||||
tables,
|
||||
seedFiles,
|
||||
root: config.root,
|
||||
srcDir: config.srcDir,
|
||||
output: config.output,
|
||||
logger,
|
||||
seedHandler
|
||||
});
|
||||
}
|
||||
updateConfig({
|
||||
vite: {
|
||||
assetsInclude: [DB_PATH],
|
||||
plugins: [dbClientPlugin, dbPlugin]
|
||||
}
|
||||
});
|
||||
},
|
||||
"astro:config:done": async ({ config, injectTypes, buildOutput }) => {
|
||||
if (command === "preview") return;
|
||||
finalBuildOutput = buildOutput;
|
||||
const { dbConfig, dependencies, integrationSeedPaths } = await resolveDbConfig(config);
|
||||
tables.get = () => dbConfig.tables;
|
||||
seedFiles.get = () => integrationSeedPaths;
|
||||
configFileDependencies = dependencies;
|
||||
const localDbUrl = new URL(DB_PATH, config.root);
|
||||
if (!connectToRemote && !existsSync(localDbUrl)) {
|
||||
await mkdir(dirname(fileURLToPath(localDbUrl)), { recursive: true });
|
||||
await writeFile(localDbUrl, "");
|
||||
}
|
||||
injectTypes({
|
||||
filename: "db.d.ts",
|
||||
content: getDtsContent(tables.get() ?? {})
|
||||
});
|
||||
},
|
||||
"astro:server:setup": async ({ server, logger }) => {
|
||||
const environment = server.environments.ssr;
|
||||
seedHandler.execute = async (fileUrl) => {
|
||||
await executeSeedFile({ fileUrl, environment });
|
||||
};
|
||||
const filesToWatch = [
|
||||
...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
|
||||
...configFileDependencies.map((c) => new URL(c, root))
|
||||
];
|
||||
server.watcher.on("all", (_event, relativeEntry) => {
|
||||
const entry = new URL(relativeEntry, root);
|
||||
if (filesToWatch.some((f) => entry.href === f.href)) {
|
||||
server.restart();
|
||||
}
|
||||
});
|
||||
logger.info(
|
||||
connectToRemote ? "Connected to remote database." : "New local database created."
|
||||
);
|
||||
if (connectToRemote) return;
|
||||
const localSeedPaths = SEED_DEV_FILE_NAME.map(
|
||||
(name) => new URL(name, getDbDirectoryUrl(root))
|
||||
);
|
||||
if (seedFiles.get().length || localSeedPaths.find((path) => existsSync(path))) {
|
||||
await environment.runner.import(VIRTUAL_MODULE_ID).catch((e) => {
|
||||
logger.error(e instanceof Error ? e.message : String(e));
|
||||
});
|
||||
}
|
||||
},
|
||||
"astro:build:start": async ({ logger }) => {
|
||||
if (!connectToRemote && !databaseFileEnvDefined() && finalBuildOutput === "server") {
|
||||
const message = `Attempting to build without the --remote flag or the ASTRO_DATABASE_FILE environment variable defined. You probably want to pass --remote to astro build.`;
|
||||
const hint = "Learn more connecting to libSQL: https://docs.astro.build/en/guides/astro-db/#connect-a-libsql-database-for-production";
|
||||
throw new AstroDbError(message, hint);
|
||||
}
|
||||
logger.info(
|
||||
"database: " + (connectToRemote ? colors.yellow("remote") : colors.blue("local database."))
|
||||
);
|
||||
},
|
||||
"astro:build:setup": async ({ vite }) => {
|
||||
tempViteServer = await getTempViteServer({ viteConfig: vite });
|
||||
const environment = tempViteServer.environments.ssr;
|
||||
seedHandler.execute = async (fileUrl) => {
|
||||
await executeSeedFile({ fileUrl, environment });
|
||||
};
|
||||
},
|
||||
"astro:build:done": async () => {
|
||||
await tempViteServer?.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function databaseFileEnvDefined() {
|
||||
const env = loadEnv("", process.cwd());
|
||||
return env.ASTRO_DATABASE_FILE != null || process.env.ASTRO_DATABASE_FILE != null;
|
||||
}
|
||||
function integration(options) {
|
||||
return [astroDBIntegration(options), fileURLIntegration()];
|
||||
}
|
||||
async function executeSeedFile({
|
||||
fileUrl,
|
||||
environment
|
||||
}) {
|
||||
const pathname = decodeURIComponent(fileUrl.pathname);
|
||||
const mod = await environment.runner.import(pathname);
|
||||
if (typeof mod.default !== "function") {
|
||||
throw new AstroDbError(EXEC_DEFAULT_EXPORT_ERROR(fileURLToPath(fileUrl)));
|
||||
}
|
||||
try {
|
||||
await mod.default();
|
||||
} catch (e) {
|
||||
if (isDbError(e)) {
|
||||
throw new AstroDbError(EXEC_ERROR(e.message));
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
async function getTempViteServer({ viteConfig }) {
|
||||
const tempViteServer = await createServer(
|
||||
mergeConfig(viteConfig, {
|
||||
server: { middlewareMode: true, hmr: false, watch: null, ws: false },
|
||||
optimizeDeps: { noDiscovery: true },
|
||||
ssr: { external: [] },
|
||||
logLevel: "silent"
|
||||
})
|
||||
);
|
||||
const hotSend = tempViteServer.environments.client.hot.send;
|
||||
tempViteServer.environments.client.hot.send = (payload) => {
|
||||
if (payload.type === "error") {
|
||||
throw payload.err;
|
||||
}
|
||||
return hotSend(payload);
|
||||
};
|
||||
return tempViteServer;
|
||||
}
|
||||
export {
|
||||
integration
|
||||
};
|
||||
2
node_modules/@astrojs/db/dist/core/integration/typegen.d.ts
generated
vendored
Normal file
2
node_modules/@astrojs/db/dist/core/integration/typegen.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { DBTables } from '../types.js';
|
||||
export declare function getDtsContent(tables: DBTables): string;
|
||||
21
node_modules/@astrojs/db/dist/core/integration/typegen.js
generated
vendored
Normal file
21
node_modules/@astrojs/db/dist/core/integration/typegen.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { RUNTIME_IMPORT } from "../consts.js";
|
||||
function getDtsContent(tables) {
|
||||
const content = `// This file is generated by Astro DB
|
||||
declare module 'astro:db' {
|
||||
${Object.entries(tables).map(([name, table]) => generateTableType(name, table)).join("\n")}
|
||||
}
|
||||
`;
|
||||
return content;
|
||||
}
|
||||
function generateTableType(name, table) {
|
||||
const sanitizedColumnsList = Object.entries(table.columns).filter(([, val]) => !val.schema.deprecated);
|
||||
const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
|
||||
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
|
||||
${JSON.stringify(name)},
|
||||
${JSON.stringify(sanitizedColumns)}
|
||||
>;`;
|
||||
return tableType;
|
||||
}
|
||||
export {
|
||||
getDtsContent
|
||||
};
|
||||
7
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db-client.d.ts
generated
vendored
Normal file
7
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db-client.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { VitePlugin } from '../utils.js';
|
||||
type VitePluginDBClientParams = {
|
||||
connectToRemote: boolean;
|
||||
mode: 'node' | 'web';
|
||||
};
|
||||
export declare function vitePluginDbClient(params: VitePluginDBClientParams): VitePlugin;
|
||||
export {};
|
||||
50
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db-client.js
generated
vendored
Normal file
50
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db-client.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
import { DB_CLIENTS, VIRTUAL_CLIENT_MODULE_ID } from "../consts.js";
|
||||
function getRemoteClientModule(mode) {
|
||||
switch (mode) {
|
||||
case "web":
|
||||
return `export { createClient } from '${DB_CLIENTS.web}';`;
|
||||
case "node":
|
||||
default:
|
||||
return `export { createClient } from '${DB_CLIENTS.node}';`;
|
||||
}
|
||||
}
|
||||
function getLocalClientModule(mode) {
|
||||
switch (mode) {
|
||||
case "node":
|
||||
case "web":
|
||||
default:
|
||||
return `export { createClient } from '${DB_CLIENTS.local}';`;
|
||||
}
|
||||
}
|
||||
const resolved = "\0" + VIRTUAL_CLIENT_MODULE_ID;
|
||||
function vitePluginDbClient(params) {
|
||||
return {
|
||||
name: "virtual:astro:db-client",
|
||||
enforce: "pre",
|
||||
resolveId: {
|
||||
filter: {
|
||||
id: new RegExp(`^${VIRTUAL_CLIENT_MODULE_ID}$`)
|
||||
},
|
||||
handler() {
|
||||
return resolved;
|
||||
}
|
||||
},
|
||||
load: {
|
||||
filter: {
|
||||
id: new RegExp(`^${resolved}$`)
|
||||
},
|
||||
handler() {
|
||||
switch (params.connectToRemote) {
|
||||
case true:
|
||||
return getRemoteClientModule(params.mode);
|
||||
case false:
|
||||
default:
|
||||
return getLocalClientModule(params.mode);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
export {
|
||||
vitePluginDbClient
|
||||
};
|
||||
60
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db.d.ts
generated
vendored
Normal file
60
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db.d.ts
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { AstroConfig, AstroIntegrationLogger } from 'astro';
|
||||
import type { DBTables } from '../types.js';
|
||||
import { type VitePlugin } from '../utils.js';
|
||||
export type LateTables = {
|
||||
get: () => DBTables;
|
||||
};
|
||||
export type LateSeedFiles = {
|
||||
get: () => Array<string | URL>;
|
||||
};
|
||||
export type SeedHandler = {
|
||||
inProgress: boolean;
|
||||
execute: (fileUrl: URL) => Promise<void>;
|
||||
};
|
||||
type VitePluginDBParams = {
|
||||
connectToRemote: false;
|
||||
tables: LateTables;
|
||||
seedFiles: LateSeedFiles;
|
||||
srcDir: URL;
|
||||
root: URL;
|
||||
logger?: AstroIntegrationLogger;
|
||||
output: AstroConfig['output'];
|
||||
seedHandler: SeedHandler;
|
||||
} | {
|
||||
connectToRemote: true;
|
||||
tables: LateTables;
|
||||
appToken: string;
|
||||
srcDir: URL;
|
||||
root: URL;
|
||||
output: AstroConfig['output'];
|
||||
seedHandler: SeedHandler;
|
||||
};
|
||||
export declare function vitePluginDb(params: VitePluginDBParams): VitePlugin;
|
||||
export declare function getConfigVirtualModContents(): string;
|
||||
export declare function getLocalVirtualModContents({ tables, root, localExecution, }: {
|
||||
tables: DBTables;
|
||||
root: URL;
|
||||
/**
|
||||
* Used for the execute command to import the client directly.
|
||||
* In other cases, we use the runtime only vite virtual module.
|
||||
*
|
||||
* This is used to ensure that the client is imported correctly
|
||||
* when executing commands like `astro db execute`.
|
||||
*/
|
||||
localExecution: boolean;
|
||||
}): string;
|
||||
export declare function getRemoteVirtualModContents({ tables, appToken, isBuild, output, localExecution, }: {
|
||||
tables: DBTables;
|
||||
appToken: string;
|
||||
isBuild: boolean;
|
||||
output: AstroConfig['output'];
|
||||
/**
|
||||
* Used for the execute command to import the client directly.
|
||||
* In other cases, we use the runtime only vite virtual module.
|
||||
*
|
||||
* This is used to ensure that the client is imported correctly
|
||||
* when executing commands like `astro db execute`.
|
||||
*/
|
||||
localExecution: boolean;
|
||||
}): string;
|
||||
export {};
|
||||
191
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db.js
generated
vendored
Normal file
191
node_modules/@astrojs/db/dist/core/integration/vite-plugin-db.js
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { sql } from "drizzle-orm";
|
||||
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
|
||||
import { normalizeDatabaseUrl } from "../../runtime/index.js";
|
||||
import {
|
||||
DB_CLIENTS,
|
||||
DB_PATH,
|
||||
RUNTIME_IMPORT,
|
||||
RUNTIME_VIRTUAL_IMPORT,
|
||||
VIRTUAL_CLIENT_MODULE_ID,
|
||||
VIRTUAL_MODULE_ID
|
||||
} from "../consts.js";
|
||||
import { createClient } from "../db-client/libsql-local.js";
|
||||
import { getResolvedFileUrl } from "../load-file.js";
|
||||
import { getCreateIndexQueries, getCreateTableQuery, SEED_DEV_FILE_NAME } from "../queries.js";
|
||||
import {
|
||||
getAstroEnv,
|
||||
getDbDirectoryUrl,
|
||||
getRemoteDatabaseInfo
|
||||
} from "../utils.js";
|
||||
const resolved = {
|
||||
module: "\0" + VIRTUAL_MODULE_ID,
|
||||
importedFromSeedFile: "\0" + VIRTUAL_MODULE_ID + ":seed"
|
||||
};
|
||||
function vitePluginDb(params) {
|
||||
let command = "build";
|
||||
return {
|
||||
name: "astro:db",
|
||||
enforce: "pre",
|
||||
configResolved(resolvedConfig) {
|
||||
command = resolvedConfig.command;
|
||||
},
|
||||
resolveId: {
|
||||
filter: {
|
||||
id: new RegExp(`^${VIRTUAL_MODULE_ID}$`)
|
||||
},
|
||||
handler() {
|
||||
if (params.seedHandler.inProgress) {
|
||||
return resolved.importedFromSeedFile;
|
||||
}
|
||||
return resolved.module;
|
||||
}
|
||||
},
|
||||
load: {
|
||||
filter: {
|
||||
id: new RegExp(`^(${resolved.module}|${resolved.importedFromSeedFile})$`)
|
||||
},
|
||||
async handler(id) {
|
||||
if (params.connectToRemote) {
|
||||
return getRemoteVirtualModContents({
|
||||
appToken: params.appToken,
|
||||
tables: params.tables.get(),
|
||||
isBuild: command === "build",
|
||||
output: params.output,
|
||||
localExecution: false
|
||||
});
|
||||
}
|
||||
if (id === resolved.importedFromSeedFile) {
|
||||
return getLocalVirtualModContents({
|
||||
root: params.root,
|
||||
tables: params.tables.get(),
|
||||
localExecution: false
|
||||
});
|
||||
}
|
||||
await recreateTables(params);
|
||||
const seedFiles = getResolvedSeedFiles(params);
|
||||
for await (const seedFile of seedFiles) {
|
||||
this.addWatchFile(fileURLToPath(seedFile));
|
||||
if (existsSync(seedFile)) {
|
||||
params.seedHandler.inProgress = true;
|
||||
await params.seedHandler.execute(seedFile);
|
||||
}
|
||||
}
|
||||
if (params.seedHandler.inProgress) {
|
||||
(params.logger ?? console).info("Seeded database.");
|
||||
params.seedHandler.inProgress = false;
|
||||
}
|
||||
return getLocalVirtualModContents({
|
||||
root: params.root,
|
||||
tables: params.tables.get(),
|
||||
localExecution: false
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function getConfigVirtualModContents() {
|
||||
return `export * from ${RUNTIME_VIRTUAL_IMPORT}`;
|
||||
}
|
||||
function getDBModule(localExecution) {
|
||||
return localExecution ? `import { createClient } from '${DB_CLIENTS.node}';` : `import { createClient } from '${VIRTUAL_CLIENT_MODULE_ID}';`;
|
||||
}
|
||||
function getLocalVirtualModContents({
|
||||
tables,
|
||||
root,
|
||||
localExecution
|
||||
}) {
|
||||
const { ASTRO_DATABASE_FILE } = getAstroEnv();
|
||||
const dbUrl = new URL(DB_PATH, root);
|
||||
const clientImport = getDBModule(localExecution);
|
||||
return `
|
||||
import { asDrizzleTable, normalizeDatabaseUrl } from ${RUNTIME_IMPORT};
|
||||
|
||||
${clientImport}
|
||||
|
||||
const dbUrl = normalizeDatabaseUrl(${JSON.stringify(ASTRO_DATABASE_FILE)}, ${JSON.stringify(dbUrl)});
|
||||
export const db = createClient({ url: dbUrl });
|
||||
|
||||
export * from ${RUNTIME_VIRTUAL_IMPORT};
|
||||
|
||||
${getStringifiedTableExports(tables)}`;
|
||||
}
|
||||
function getRemoteVirtualModContents({
|
||||
tables,
|
||||
appToken,
|
||||
isBuild,
|
||||
output,
|
||||
localExecution
|
||||
}) {
|
||||
const dbInfo = getRemoteDatabaseInfo();
|
||||
function appTokenArg() {
|
||||
if (isBuild) {
|
||||
if (output === "server") {
|
||||
return `process.env.ASTRO_DB_APP_TOKEN`;
|
||||
} else {
|
||||
return `process.env.ASTRO_DB_APP_TOKEN ?? ${JSON.stringify(appToken)}`;
|
||||
}
|
||||
} else {
|
||||
return JSON.stringify(appToken);
|
||||
}
|
||||
}
|
||||
function dbUrlArg() {
|
||||
const dbStr = JSON.stringify(dbInfo.url);
|
||||
if (isBuild) {
|
||||
return `import.meta.env.ASTRO_DB_REMOTE_URL ?? ${dbStr}`;
|
||||
} else {
|
||||
return dbStr;
|
||||
}
|
||||
}
|
||||
const clientImport = getDBModule(localExecution);
|
||||
return `
|
||||
import {asDrizzleTable} from ${RUNTIME_IMPORT};
|
||||
|
||||
${clientImport}
|
||||
|
||||
export const db = await createClient({
|
||||
url: ${dbUrlArg()},
|
||||
token: ${appTokenArg()},
|
||||
});
|
||||
|
||||
export * from ${RUNTIME_VIRTUAL_IMPORT};
|
||||
|
||||
${getStringifiedTableExports(tables)}
|
||||
`;
|
||||
}
|
||||
function getStringifiedTableExports(tables) {
|
||||
return Object.entries(tables).map(
|
||||
([name, table]) => `export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
|
||||
table
|
||||
)}, false)`
|
||||
).join("\n");
|
||||
}
|
||||
const sqlite = new SQLiteAsyncDialect();
|
||||
async function recreateTables({ tables, root }) {
|
||||
const { ASTRO_DATABASE_FILE } = getAstroEnv();
|
||||
const dbUrl = normalizeDatabaseUrl(ASTRO_DATABASE_FILE, new URL(DB_PATH, root).href);
|
||||
const db = createClient({ url: dbUrl });
|
||||
const setupQueries = [];
|
||||
for (const [name, table] of Object.entries(tables.get() ?? {})) {
|
||||
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
|
||||
const createQuery = sql.raw(getCreateTableQuery(name, table));
|
||||
const indexQueries = getCreateIndexQueries(name, table);
|
||||
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
|
||||
}
|
||||
await db.batch([
|
||||
db.run(sql`pragma defer_foreign_keys=true;`),
|
||||
...setupQueries.map((q) => db.run(q))
|
||||
]);
|
||||
}
|
||||
function getResolvedSeedFiles({ root, seedFiles }) {
|
||||
const localSeedFiles = SEED_DEV_FILE_NAME.map((name) => new URL(name, getDbDirectoryUrl(root)));
|
||||
const integrationSeedFiles = seedFiles.get().map((s) => getResolvedFileUrl(root, s));
|
||||
return [...integrationSeedFiles, ...localSeedFiles];
|
||||
}
|
||||
export {
|
||||
getConfigVirtualModContents,
|
||||
getLocalVirtualModContents,
|
||||
getRemoteVirtualModContents,
|
||||
vitePluginDb
|
||||
};
|
||||
126
node_modules/@astrojs/db/dist/core/load-file.d.ts
generated
vendored
Normal file
126
node_modules/@astrojs/db/dist/core/load-file.d.ts
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
import type { AstroConfig } from 'astro';
|
||||
import './types.js';
|
||||
/**
|
||||
* Load a user’s `astro:db` configuration file and additional configuration files provided by integrations.
|
||||
*/
|
||||
export declare function resolveDbConfig({ root, integrations, }: Pick<AstroConfig, 'root' | 'integrations'>): Promise<{
|
||||
/** Resolved `astro:db` config, including tables added by integrations. */
|
||||
dbConfig: {
|
||||
tables: Record<string, {
|
||||
indexes: Record<string, {
|
||||
on: string | string[];
|
||||
unique?: boolean | undefined;
|
||||
}>;
|
||||
columns: Record<string, {
|
||||
type: "boolean";
|
||||
schema: {
|
||||
optional: boolean;
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
label?: string | undefined;
|
||||
name?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
default?: boolean | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
};
|
||||
} | {
|
||||
type: "number";
|
||||
schema: ({
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
name?: string | undefined;
|
||||
label?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
} & ({
|
||||
primaryKey: false;
|
||||
optional: boolean;
|
||||
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
} | {
|
||||
primaryKey: true;
|
||||
optional?: false | undefined;
|
||||
default?: undefined;
|
||||
})) & {
|
||||
references?: import("./types.js").NumberColumn;
|
||||
};
|
||||
} | {
|
||||
type: "text";
|
||||
schema: ({
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
name?: string | undefined;
|
||||
label?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
multiline?: boolean | undefined;
|
||||
enum?: [string, ...string[]] | undefined;
|
||||
} & ({
|
||||
primaryKey: false;
|
||||
optional: boolean;
|
||||
} | {
|
||||
primaryKey: true;
|
||||
optional?: false | undefined;
|
||||
})) & {
|
||||
references?: import("./types.js").TextColumn;
|
||||
};
|
||||
} | {
|
||||
type: "date";
|
||||
schema: {
|
||||
optional: boolean;
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
label?: string | undefined;
|
||||
name?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
};
|
||||
} | {
|
||||
type: "json";
|
||||
schema: {
|
||||
optional: boolean;
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
label?: string | undefined;
|
||||
name?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
default?: unknown;
|
||||
};
|
||||
}>;
|
||||
deprecated: boolean;
|
||||
foreignKeys?: (Omit<{
|
||||
columns: import("./schemas.js").MaybeArray<string>;
|
||||
references: () => import("./schemas.js").MaybeArray<Omit<import("zod/v4").input<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
|
||||
}, "references"> & {
|
||||
references: import("./schemas.js").MaybeArray<Omit<import("zod/v4").infer<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
|
||||
})[] | undefined;
|
||||
}>;
|
||||
};
|
||||
/** Dependencies imported into the user config file. */
|
||||
dependencies: string[];
|
||||
/** Additional `astro:db` seed file paths provided by integrations. */
|
||||
integrationSeedPaths: (string | URL)[];
|
||||
}>;
|
||||
export declare function getResolvedFileUrl(root: URL, filePathOrUrl: string | URL): URL;
|
||||
/**
|
||||
* Bundle arbitrary `mjs` or `ts` file.
|
||||
* Simplified fork from Vite's `bundleConfigFile` function.
|
||||
*
|
||||
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
|
||||
*/
|
||||
export declare function bundleFile({ fileUrl, root, virtualModContents, }: {
|
||||
fileUrl: URL;
|
||||
root: URL;
|
||||
virtualModContents: string;
|
||||
}): Promise<{
|
||||
code: string;
|
||||
dependencies: string[];
|
||||
}>;
|
||||
/**
|
||||
* Forked from Vite config loader, replacing CJS-based path concat with ESM only
|
||||
*
|
||||
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
|
||||
*/
|
||||
export declare function importBundledFile({ code, root, }: {
|
||||
code: string;
|
||||
root: URL;
|
||||
}): Promise<{
|
||||
default?: unknown;
|
||||
}>;
|
||||
170
node_modules/@astrojs/db/dist/core/load-file.js
generated
vendored
Normal file
170
node_modules/@astrojs/db/dist/core/load-file.js
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
import { existsSync } from "node:fs";
|
||||
import { unlink, writeFile } from "node:fs/promises";
|
||||
import { createRequire } from "node:module";
|
||||
import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
import { build as esbuild } from "esbuild";
|
||||
import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from "./consts.js";
|
||||
import { INTEGRATION_TABLE_CONFLICT_ERROR } from "./errors.js";
|
||||
import { errorMap } from "./integration/error-map.js";
|
||||
import { getConfigVirtualModContents } from "./integration/vite-plugin-db.js";
|
||||
import { dbConfigSchema } from "./schemas.js";
|
||||
import "./types.js";
|
||||
import { getAstroEnv, getDbDirectoryUrl } from "./utils.js";
|
||||
async function resolveDbConfig({
|
||||
root,
|
||||
integrations
|
||||
}) {
|
||||
const { mod, dependencies } = await loadUserConfigFile(root);
|
||||
const userDbConfig = dbConfigSchema.parse(mod?.default ?? {}, { error: errorMap });
|
||||
const dbConfig = { tables: userDbConfig.tables ?? {} };
|
||||
const integrationDbConfigPaths = [];
|
||||
const integrationSeedPaths = [];
|
||||
for (const integration of integrations) {
|
||||
const { name, hooks } = integration;
|
||||
if (hooks["astro:db:setup"]) {
|
||||
hooks["astro:db:setup"]({
|
||||
extendDb({ configEntrypoint, seedEntrypoint }) {
|
||||
if (configEntrypoint) {
|
||||
integrationDbConfigPaths.push({ name, configEntrypoint });
|
||||
}
|
||||
if (seedEntrypoint) {
|
||||
integrationSeedPaths.push(seedEntrypoint);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
for (const { name, configEntrypoint } of integrationDbConfigPaths) {
|
||||
const loadedConfig = await loadIntegrationConfigFile(root, configEntrypoint);
|
||||
const integrationDbConfig = dbConfigSchema.parse(loadedConfig.mod?.default ?? {}, {
|
||||
error: errorMap
|
||||
});
|
||||
for (const key in integrationDbConfig.tables) {
|
||||
if (key in dbConfig.tables) {
|
||||
const isUserConflict = key in (userDbConfig.tables ?? {});
|
||||
throw new Error(INTEGRATION_TABLE_CONFLICT_ERROR(name, key, isUserConflict));
|
||||
} else {
|
||||
dbConfig.tables[key] = integrationDbConfig.tables[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
/** Resolved `astro:db` config, including tables added by integrations. */
|
||||
dbConfig,
|
||||
/** Dependencies imported into the user config file. */
|
||||
dependencies,
|
||||
/** Additional `astro:db` seed file paths provided by integrations. */
|
||||
integrationSeedPaths
|
||||
};
|
||||
}
|
||||
async function loadUserConfigFile(root) {
|
||||
let configFileUrl;
|
||||
for (const fileName of CONFIG_FILE_NAMES) {
|
||||
const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
|
||||
if (existsSync(fileUrl)) {
|
||||
configFileUrl = fileUrl;
|
||||
}
|
||||
}
|
||||
return await loadAndBundleDbConfigFile({ root, fileUrl: configFileUrl });
|
||||
}
|
||||
function getResolvedFileUrl(root, filePathOrUrl) {
|
||||
if (typeof filePathOrUrl === "string") {
|
||||
const { resolve } = createRequire(root);
|
||||
const resolvedFilePath = resolve(filePathOrUrl);
|
||||
return pathToFileURL(resolvedFilePath);
|
||||
}
|
||||
return filePathOrUrl;
|
||||
}
|
||||
async function loadIntegrationConfigFile(root, filePathOrUrl) {
|
||||
const fileUrl = getResolvedFileUrl(root, filePathOrUrl);
|
||||
return await loadAndBundleDbConfigFile({ root, fileUrl });
|
||||
}
|
||||
async function loadAndBundleDbConfigFile({
|
||||
root,
|
||||
fileUrl
|
||||
}) {
|
||||
if (!fileUrl) {
|
||||
return { mod: void 0, dependencies: [] };
|
||||
}
|
||||
const { code, dependencies } = await bundleFile({
|
||||
virtualModContents: getConfigVirtualModContents(),
|
||||
root,
|
||||
fileUrl
|
||||
});
|
||||
return {
|
||||
mod: await importBundledFile({ code, root }),
|
||||
dependencies
|
||||
};
|
||||
}
|
||||
async function bundleFile({
|
||||
fileUrl,
|
||||
root,
|
||||
virtualModContents
|
||||
}) {
|
||||
const { ASTRO_DATABASE_FILE } = getAstroEnv();
|
||||
const result = await esbuild({
|
||||
absWorkingDir: process.cwd(),
|
||||
entryPoints: [fileURLToPath(fileUrl)],
|
||||
outfile: "out.js",
|
||||
packages: "external",
|
||||
write: false,
|
||||
target: ["node16"],
|
||||
platform: "node",
|
||||
bundle: true,
|
||||
format: "esm",
|
||||
sourcemap: "inline",
|
||||
metafile: true,
|
||||
define: {
|
||||
"import.meta.env.ASTRO_DATABASE_FILE": JSON.stringify(ASTRO_DATABASE_FILE ?? "")
|
||||
},
|
||||
plugins: [
|
||||
{
|
||||
name: "resolve-astro-db",
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
|
||||
return { path, namespace: VIRTUAL_MODULE_ID };
|
||||
});
|
||||
build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
|
||||
return {
|
||||
contents: virtualModContents,
|
||||
// Needed to resolve runtime dependencies
|
||||
resolveDir: fileURLToPath(root)
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
const file = result.outputFiles[0];
|
||||
if (!file) {
|
||||
throw new Error(`Unexpected: no output file`);
|
||||
}
|
||||
return {
|
||||
code: file.text,
|
||||
dependencies: Object.keys(result.metafile.inputs)
|
||||
};
|
||||
}
|
||||
async function importBundledFile({
|
||||
code,
|
||||
root
|
||||
}) {
|
||||
const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
|
||||
await writeFile(tmpFileUrl, code, { encoding: "utf8" });
|
||||
try {
|
||||
return await import(
|
||||
/* @vite-ignore */
|
||||
tmpFileUrl.toString()
|
||||
);
|
||||
} finally {
|
||||
try {
|
||||
await unlink(tmpFileUrl);
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
}
|
||||
export {
|
||||
bundleFile,
|
||||
getResolvedFileUrl,
|
||||
importBundledFile,
|
||||
resolveDbConfig
|
||||
};
|
||||
53
node_modules/@astrojs/db/dist/core/queries.d.ts
generated
vendored
Normal file
53
node_modules/@astrojs/db/dist/core/queries.d.ts
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { BooleanColumn, ColumnType, DateColumn, DBColumn, DBTable, JsonColumn, NumberColumn, TextColumn } from './types.js';
|
||||
export declare const SEED_DEV_FILE_NAME: string[];
|
||||
export declare function getDropTableIfExistsQuery(tableName: string): string;
|
||||
export declare function getCreateTableQuery(tableName: string, table: DBTable): string;
|
||||
export declare function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>): string[];
|
||||
export declare function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer';
|
||||
export declare function getModifiers(columnName: string, column: DBColumn): string;
|
||||
export declare function getReferencesConfig(column: DBColumn): {
|
||||
type: "number";
|
||||
schema: ({
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
name?: string | undefined;
|
||||
label?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
} & ({
|
||||
primaryKey: false;
|
||||
optional: boolean;
|
||||
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
} | {
|
||||
primaryKey: true;
|
||||
optional?: false | undefined;
|
||||
default?: undefined;
|
||||
})) & {
|
||||
references?: NumberColumn;
|
||||
};
|
||||
} | {
|
||||
type: "text";
|
||||
schema: ({
|
||||
unique: boolean;
|
||||
deprecated: boolean;
|
||||
name?: string | undefined;
|
||||
label?: string | undefined;
|
||||
collection?: string | undefined;
|
||||
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
|
||||
multiline?: boolean | undefined;
|
||||
enum?: [string, ...string[]] | undefined;
|
||||
} & ({
|
||||
primaryKey: false;
|
||||
optional: boolean;
|
||||
} | {
|
||||
primaryKey: true;
|
||||
optional?: false | undefined;
|
||||
})) & {
|
||||
references?: TextColumn;
|
||||
};
|
||||
} | undefined;
|
||||
type WithDefaultDefined<T extends DBColumn> = T & {
|
||||
schema: Required<Pick<T['schema'], 'default'>>;
|
||||
};
|
||||
type DBColumnWithDefault = WithDefaultDefined<TextColumn> | WithDefaultDefined<DateColumn> | WithDefaultDefined<NumberColumn> | WithDefaultDefined<BooleanColumn> | WithDefaultDefined<JsonColumn>;
|
||||
export declare function hasDefault(column: DBColumn): column is DBColumnWithDefault;
|
||||
export {};
|
||||
166
node_modules/@astrojs/db/dist/core/queries.js
generated
vendored
Normal file
166
node_modules/@astrojs/db/dist/core/queries.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
|
||||
import colors from "piccolore";
|
||||
import {
|
||||
FOREIGN_KEY_DNE_ERROR,
|
||||
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
|
||||
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
|
||||
REFERENCE_DNE_ERROR
|
||||
} from "../runtime/errors.js";
|
||||
import { isSerializedSQL } from "../runtime/types.js";
|
||||
import { hasPrimaryKey } from "../runtime/utils.js";
|
||||
const sqlite = new SQLiteAsyncDialect();
|
||||
const SEED_DEV_FILE_NAME = ["seed.ts", "seed.js", "seed.mjs", "seed.mts"];
|
||||
function getDropTableIfExistsQuery(tableName) {
|
||||
return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
|
||||
}
|
||||
function getCreateTableQuery(tableName, table) {
|
||||
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
|
||||
const colQueries = [];
|
||||
const colHasPrimaryKey = Object.entries(table.columns).find(
|
||||
([, column]) => hasPrimaryKey(column)
|
||||
);
|
||||
if (!colHasPrimaryKey) {
|
||||
colQueries.push("_id INTEGER PRIMARY KEY");
|
||||
}
|
||||
for (const [columnName, column] of Object.entries(table.columns)) {
|
||||
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
|
||||
column.type
|
||||
)}${getModifiers(columnName, column)}`;
|
||||
colQueries.push(colQuery);
|
||||
}
|
||||
colQueries.push(...getCreateForeignKeyQueries(tableName, table));
|
||||
query += colQueries.join(", ") + ")";
|
||||
return query;
|
||||
}
|
||||
function getCreateIndexQueries(tableName, table) {
|
||||
let queries = [];
|
||||
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
|
||||
const onColNames = asArray(indexProps.on);
|
||||
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
|
||||
const unique = indexProps.unique ? "UNIQUE " : "";
|
||||
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
|
||||
indexName
|
||||
)} ON ${sqlite.escapeName(tableName)} (${onCols.join(", ")})`;
|
||||
queries.push(indexQuery);
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
function getCreateForeignKeyQueries(tableName, table) {
|
||||
let queries = [];
|
||||
for (const foreignKey of table.foreignKeys ?? []) {
|
||||
const columns = asArray(foreignKey.columns);
|
||||
const references = asArray(foreignKey.references);
|
||||
if (columns.length !== references.length) {
|
||||
throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
|
||||
}
|
||||
const firstReference = references[0];
|
||||
if (!firstReference) {
|
||||
throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
|
||||
}
|
||||
const referencedTable = firstReference.schema.collection;
|
||||
if (!referencedTable) {
|
||||
throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
|
||||
}
|
||||
const query = `FOREIGN KEY (${columns.map((f) => sqlite.escapeName(f)).join(", ")}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references.map((r) => sqlite.escapeName(r.schema.name)).join(", ")})`;
|
||||
queries.push(query);
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
function asArray(value) {
|
||||
return Array.isArray(value) ? value : [value];
|
||||
}
|
||||
function schemaTypeToSqlType(type) {
|
||||
switch (type) {
|
||||
case "date":
|
||||
case "text":
|
||||
case "json":
|
||||
return "text";
|
||||
case "number":
|
||||
case "boolean":
|
||||
return "integer";
|
||||
}
|
||||
}
|
||||
function getModifiers(columnName, column) {
|
||||
let modifiers = "";
|
||||
if (hasPrimaryKey(column)) {
|
||||
return " PRIMARY KEY";
|
||||
}
|
||||
if (!column.schema.optional) {
|
||||
modifiers += " NOT NULL";
|
||||
}
|
||||
if (column.schema.unique) {
|
||||
modifiers += " UNIQUE";
|
||||
}
|
||||
if (hasDefault(column)) {
|
||||
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
|
||||
}
|
||||
const references = getReferencesConfig(column);
|
||||
if (references) {
|
||||
const { collection: tableName, name } = references.schema;
|
||||
if (!tableName || !name) {
|
||||
throw new Error(REFERENCE_DNE_ERROR(columnName));
|
||||
}
|
||||
modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
|
||||
}
|
||||
return modifiers;
|
||||
}
|
||||
function getReferencesConfig(column) {
|
||||
const canHaveReferences = column.type === "number" || column.type === "text";
|
||||
if (!canHaveReferences) return void 0;
|
||||
return column.schema.references;
|
||||
}
|
||||
function hasDefault(column) {
|
||||
if (column.schema.default !== void 0) {
|
||||
return true;
|
||||
}
|
||||
if (hasPrimaryKey(column) && column.type === "number") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function toDefault(def) {
|
||||
const type = typeof def;
|
||||
if (type === "string") {
|
||||
return sqlite.escapeString(def);
|
||||
} else if (type === "boolean") {
|
||||
return def ? "TRUE" : "FALSE";
|
||||
} else {
|
||||
return def + "";
|
||||
}
|
||||
}
|
||||
function getDefaultValueSql(columnName, column) {
|
||||
if (isSerializedSQL(column.schema.default)) {
|
||||
return column.schema.default.sql;
|
||||
}
|
||||
switch (column.type) {
|
||||
case "boolean":
|
||||
case "number":
|
||||
case "text":
|
||||
case "date":
|
||||
return toDefault(column.schema.default);
|
||||
case "json": {
|
||||
let stringified = "";
|
||||
try {
|
||||
stringified = JSON.stringify(column.schema.default);
|
||||
} catch {
|
||||
console.log(
|
||||
`Invalid default value for column ${colors.bold(
|
||||
columnName
|
||||
)}. Defaults must be valid JSON when using the \`json()\` type.`
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
return sqlite.escapeString(stringified);
|
||||
}
|
||||
}
|
||||
}
|
||||
export {
|
||||
SEED_DEV_FILE_NAME,
|
||||
getCreateIndexQueries,
|
||||
getCreateTableQuery,
|
||||
getDropTableIfExistsQuery,
|
||||
getModifiers,
|
||||
getReferencesConfig,
|
||||
hasDefault,
|
||||
schemaTypeToSqlType
|
||||
};
|
||||
1444
node_modules/@astrojs/db/dist/core/schemas.d.ts
generated
vendored
Normal file
1444
node_modules/@astrojs/db/dist/core/schemas.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
193
node_modules/@astrojs/db/dist/core/schemas.js
generated
vendored
Normal file
193
node_modules/@astrojs/db/dist/core/schemas.js
generated
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
import { SQL } from "drizzle-orm";
|
||||
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
|
||||
import * as z from "zod/v4";
|
||||
import { SERIALIZED_SQL_KEY } from "../runtime/types.js";
|
||||
import { errorMap } from "./integration/error-map.js";
|
||||
import { mapObject } from "./utils.js";
|
||||
const sqlite = new SQLiteAsyncDialect();
|
||||
const sqlSchema = z.instanceof(SQL).transform(
|
||||
(sqlObj) => ({
|
||||
[SERIALIZED_SQL_KEY]: true,
|
||||
sql: sqlite.sqlToQuery(sqlObj).sql
|
||||
})
|
||||
);
|
||||
const baseColumnSchema = z.object({
|
||||
label: z.string().optional(),
|
||||
optional: z.boolean().optional().default(false),
|
||||
unique: z.boolean().optional().default(false),
|
||||
deprecated: z.boolean().optional().default(false),
|
||||
// Defined when `defineDb()` is called to resolve `references`
|
||||
name: z.string().optional(),
|
||||
// TODO: Update to `table`. Will need migration file version change
|
||||
collection: z.string().optional()
|
||||
});
|
||||
const booleanColumnSchema = z.object({
|
||||
type: z.literal("boolean"),
|
||||
schema: baseColumnSchema.extend({
|
||||
default: z.union([z.boolean(), sqlSchema]).optional()
|
||||
})
|
||||
});
|
||||
const numberColumnBaseSchema = baseColumnSchema.omit({ optional: true }).and(
|
||||
z.union([
|
||||
z.object({
|
||||
primaryKey: z.literal(false).optional().default(false),
|
||||
optional: baseColumnSchema.shape.optional,
|
||||
default: z.union([z.number(), sqlSchema]).optional()
|
||||
}),
|
||||
z.object({
|
||||
// `integer primary key` uses ROWID as the default value.
|
||||
// `optional` and `default` do not have an effect,
|
||||
// so disable these config options for primary keys.
|
||||
primaryKey: z.literal(true),
|
||||
optional: z.literal(false).optional(),
|
||||
default: z.literal(void 0).optional()
|
||||
})
|
||||
])
|
||||
);
|
||||
const numberColumnOptsSchema = numberColumnBaseSchema.and(
|
||||
z.object({
|
||||
references: z.function({ output: z.lazy(() => numberColumnSchema) }).optional().transform((fn) => fn?.())
|
||||
})
|
||||
);
|
||||
const numberColumnSchema = z.object({
|
||||
type: z.literal("number"),
|
||||
schema: numberColumnOptsSchema
|
||||
});
|
||||
const textColumnBaseSchema = baseColumnSchema.omit({ optional: true }).extend({
|
||||
default: z.union([z.string(), sqlSchema]).optional(),
|
||||
multiline: z.boolean().optional(),
|
||||
enum: z.tuple([z.string()]).rest(z.string()).optional()
|
||||
// At least one value required,
|
||||
}).and(
|
||||
z.union([
|
||||
z.object({
|
||||
primaryKey: z.literal(false).optional().default(false),
|
||||
optional: baseColumnSchema.shape.optional
|
||||
}),
|
||||
z.object({
|
||||
// text primary key allows NULL values.
|
||||
// NULL values bypass unique checks, which could
|
||||
// lead to duplicate URLs per record.
|
||||
// disable `optional` for primary keys.
|
||||
primaryKey: z.literal(true),
|
||||
optional: z.literal(false).optional()
|
||||
})
|
||||
])
|
||||
);
|
||||
const textColumnOptsSchema = textColumnBaseSchema.and(
|
||||
z.object({
|
||||
references: z.function({ output: z.lazy(() => textColumnSchema) }).optional().transform((fn) => fn?.())
|
||||
})
|
||||
);
|
||||
const textColumnSchema = z.object({
|
||||
type: z.literal("text"),
|
||||
schema: textColumnOptsSchema
|
||||
});
|
||||
const dateColumnSchema = z.object({
|
||||
type: z.literal("date"),
|
||||
schema: baseColumnSchema.extend({
|
||||
default: z.union([
|
||||
sqlSchema,
|
||||
// transform to ISO string for serialization
|
||||
z.date().transform((d) => d.toISOString())
|
||||
]).optional()
|
||||
})
|
||||
});
|
||||
const jsonColumnSchema = z.object({
|
||||
type: z.literal("json"),
|
||||
schema: baseColumnSchema.extend({
|
||||
default: z.unknown().optional()
|
||||
})
|
||||
});
|
||||
const columnSchema = z.discriminatedUnion("type", [
|
||||
booleanColumnSchema,
|
||||
numberColumnSchema,
|
||||
textColumnSchema,
|
||||
dateColumnSchema,
|
||||
jsonColumnSchema
|
||||
]);
|
||||
const referenceableColumnSchema = z.union([textColumnSchema, numberColumnSchema]);
|
||||
const columnsSchema = z.record(z.string(), columnSchema);
|
||||
const foreignKeysSchema = z.object({
|
||||
columns: z.string().or(z.array(z.string())),
|
||||
references: z.function({
|
||||
output: z.lazy(() => referenceableColumnSchema.or(z.array(referenceableColumnSchema)))
|
||||
}).transform((fn) => fn())
|
||||
});
|
||||
const resolvedIndexSchema = z.object({
|
||||
on: z.string().or(z.array(z.string())),
|
||||
unique: z.boolean().optional()
|
||||
});
|
||||
const legacyIndexesSchema = z.record(z.string(), resolvedIndexSchema);
|
||||
const indexSchema = z.object({
|
||||
on: z.string().or(z.array(z.string())),
|
||||
unique: z.boolean().optional(),
|
||||
name: z.string().optional()
|
||||
});
|
||||
const indexesSchema = z.array(indexSchema);
|
||||
const tableSchema = z.object({
|
||||
columns: columnsSchema,
|
||||
indexes: indexesSchema.or(legacyIndexesSchema).optional(),
|
||||
foreignKeys: z.array(foreignKeysSchema).optional(),
|
||||
deprecated: z.boolean().optional().default(false)
|
||||
});
|
||||
const tablesSchema = z.preprocess(
|
||||
(rawTables) => {
|
||||
const tables = z.record(z.string(), z.any()).parse(rawTables, { error: errorMap });
|
||||
for (const [tableName, table] of Object.entries(tables)) {
|
||||
table.getName = () => tableName;
|
||||
const { columns } = z.object({ columns: z.record(z.string(), z.any()) }).parse(table, { error: errorMap });
|
||||
for (const [columnName, column] of Object.entries(columns)) {
|
||||
column.schema.name = columnName;
|
||||
column.schema.collection = tableName;
|
||||
}
|
||||
}
|
||||
return rawTables;
|
||||
},
|
||||
z.record(z.string(), tableSchema)
|
||||
);
|
||||
const dbConfigSchema = z.object({
|
||||
tables: tablesSchema.optional()
|
||||
}).transform(({ tables = {}, ...config }) => {
|
||||
return {
|
||||
...config,
|
||||
tables: mapObject(tables, (tableName, table) => {
|
||||
const { indexes = {} } = table;
|
||||
if (!Array.isArray(indexes)) {
|
||||
return { ...table, indexes };
|
||||
}
|
||||
const resolvedIndexes = {};
|
||||
for (const index of indexes) {
|
||||
if (index.name) {
|
||||
const { name: name2, ...rest } = index;
|
||||
resolvedIndexes[index.name] = rest;
|
||||
continue;
|
||||
}
|
||||
const indexOn = Array.isArray(index.on) ? index.on.sort().join("_") : index.on;
|
||||
const name = tableName + "_" + indexOn + "_idx";
|
||||
resolvedIndexes[name] = index;
|
||||
}
|
||||
return {
|
||||
...table,
|
||||
indexes: resolvedIndexes
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
export {
|
||||
booleanColumnSchema,
|
||||
columnSchema,
|
||||
columnsSchema,
|
||||
dateColumnSchema,
|
||||
dbConfigSchema,
|
||||
indexSchema,
|
||||
jsonColumnSchema,
|
||||
numberColumnOptsSchema,
|
||||
numberColumnSchema,
|
||||
referenceableColumnSchema,
|
||||
resolvedIndexSchema,
|
||||
tableSchema,
|
||||
tablesSchema,
|
||||
textColumnOptsSchema,
|
||||
textColumnSchema
|
||||
};
|
||||
60
node_modules/@astrojs/db/dist/core/types.d.ts
generated
vendored
Normal file
60
node_modules/@astrojs/db/dist/core/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import type * as z from 'zod/v4';
|
||||
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
|
||||
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
|
||||
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
|
||||
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
|
||||
export type NumberColumn = z.infer<typeof numberColumnSchema>;
|
||||
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
|
||||
export type TextColumn = z.infer<typeof textColumnSchema>;
|
||||
export type TextColumnInput = z.input<typeof textColumnSchema>;
|
||||
export type DateColumn = z.infer<typeof dateColumnSchema>;
|
||||
export type DateColumnInput = z.input<typeof dateColumnSchema>;
|
||||
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
|
||||
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
|
||||
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
|
||||
export type DBColumn = z.infer<typeof columnSchema>;
|
||||
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
|
||||
export type DBColumns = z.infer<typeof columnsSchema>;
|
||||
export type DBTable = z.infer<typeof tableSchema>;
|
||||
export type DBTables = Record<string, DBTable>;
|
||||
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
|
||||
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
|
||||
export type DBSnapshot = {
|
||||
schema: Record<string, ResolvedDBTable>;
|
||||
version: string;
|
||||
};
|
||||
export type DBConfigInput = z.input<typeof dbConfigSchema>;
|
||||
export type DBConfig = z.infer<typeof dbConfigSchema>;
|
||||
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
|
||||
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
|
||||
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
|
||||
columns: TColumns;
|
||||
foreignKeys?: Array<{
|
||||
columns: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
|
||||
}>;
|
||||
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
|
||||
deprecated?: boolean;
|
||||
}
|
||||
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
|
||||
on: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
}
|
||||
/** @deprecated */
|
||||
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
|
||||
on: MaybeArray<Extract<keyof TColumns, string>>;
|
||||
}
|
||||
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
|
||||
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
|
||||
declare global {
|
||||
namespace Astro {
|
||||
interface IntegrationHooks {
|
||||
'astro:db:setup'?: (options: {
|
||||
extendDb: (options: {
|
||||
configEntrypoint?: URL | string;
|
||||
seedEntrypoint?: URL | string;
|
||||
}) => void;
|
||||
}) => void | Promise<void>;
|
||||
}
|
||||
}
|
||||
}
|
||||
export {};
|
||||
0
node_modules/@astrojs/db/dist/core/types.js
generated
vendored
Normal file
0
node_modules/@astrojs/db/dist/core/types.js
generated
vendored
Normal file
19
node_modules/@astrojs/db/dist/core/utils.d.ts
generated
vendored
Normal file
19
node_modules/@astrojs/db/dist/core/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { AstroConfig, AstroIntegration } from 'astro';
|
||||
import type { Arguments } from 'yargs-parser';
|
||||
import './types.js';
|
||||
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
|
||||
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
|
||||
export type RemoteDatabaseInfo = {
|
||||
url: string;
|
||||
token: string;
|
||||
};
|
||||
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
|
||||
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
|
||||
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
|
||||
export declare function getDbDirectoryUrl(root: URL | string): URL;
|
||||
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
|
||||
/**
|
||||
* Map an object's values to a new set of values
|
||||
* while preserving types.
|
||||
*/
|
||||
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;
|
||||
37
node_modules/@astrojs/db/dist/core/utils.js
generated
vendored
Normal file
37
node_modules/@astrojs/db/dist/core/utils.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import { loadEnv } from "vite";
|
||||
import "./types.js";
|
||||
function getAstroEnv(envMode = "") {
|
||||
const env = loadEnv(envMode, process.cwd(), "ASTRO_");
|
||||
return env;
|
||||
}
|
||||
function getRemoteDatabaseInfo() {
|
||||
const astroEnv = getAstroEnv();
|
||||
return {
|
||||
url: astroEnv.ASTRO_DB_REMOTE_URL,
|
||||
token: astroEnv.ASTRO_DB_APP_TOKEN
|
||||
};
|
||||
}
|
||||
function resolveDbAppToken(flags, envToken) {
|
||||
const dbAppToken = flags.dbAppToken;
|
||||
if (typeof dbAppToken === "string") return dbAppToken;
|
||||
return envToken;
|
||||
}
|
||||
function getDbDirectoryUrl(root) {
|
||||
return new URL("db/", root);
|
||||
}
|
||||
function defineDbIntegration(integration) {
|
||||
return integration;
|
||||
}
|
||||
function mapObject(item, callback) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(item).map(([key, value]) => [key, callback(key, value)])
|
||||
);
|
||||
}
|
||||
export {
|
||||
defineDbIntegration,
|
||||
getAstroEnv,
|
||||
getDbDirectoryUrl,
|
||||
getRemoteDatabaseInfo,
|
||||
mapObject,
|
||||
resolveDbAppToken
|
||||
};
|
||||
0
node_modules/@astrojs/db/dist/db-client.d.js
generated
vendored
Normal file
0
node_modules/@astrojs/db/dist/db-client.d.js
generated
vendored
Normal file
3
node_modules/@astrojs/db/dist/index.d.ts
generated
vendored
Normal file
3
node_modules/@astrojs/db/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export { cli } from './core/cli/index.js';
|
||||
export { type AstroDBConfig, integration as default } from './core/integration/index.js';
|
||||
export type { TableConfig } from './core/types.js';
|
||||
6
node_modules/@astrojs/db/dist/index.js
generated
vendored
Normal file
6
node_modules/@astrojs/db/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { cli } from "./core/cli/index.js";
|
||||
import { integration } from "./core/integration/index.js";
|
||||
export {
|
||||
cli,
|
||||
integration as default
|
||||
};
|
||||
4
node_modules/@astrojs/db/dist/runtime/errors.d.ts
generated
vendored
Normal file
4
node_modules/@astrojs/db/dist/runtime/errors.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export declare const FOREIGN_KEY_DNE_ERROR: (tableName: string) => string;
|
||||
export declare const FOREIGN_KEY_REFERENCES_LENGTH_ERROR: (tableName: string) => string;
|
||||
export declare const FOREIGN_KEY_REFERENCES_EMPTY_ERROR: (tableName: string) => string;
|
||||
export declare const REFERENCE_DNE_ERROR: (columnName: string) => string;
|
||||
27
node_modules/@astrojs/db/dist/runtime/errors.js
generated
vendored
Normal file
27
node_modules/@astrojs/db/dist/runtime/errors.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import colors from "piccolore";
|
||||
const FOREIGN_KEY_DNE_ERROR = (tableName) => {
|
||||
return `Table ${colors.bold(
|
||||
tableName
|
||||
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
|
||||
};
|
||||
const FOREIGN_KEY_REFERENCES_LENGTH_ERROR = (tableName) => {
|
||||
return `Foreign key on ${colors.bold(
|
||||
tableName
|
||||
)} is misconfigured. \`columns\` and \`references\` must be the same length.`;
|
||||
};
|
||||
const FOREIGN_KEY_REFERENCES_EMPTY_ERROR = (tableName) => {
|
||||
return `Foreign key on ${colors.bold(
|
||||
tableName
|
||||
)} is misconfigured. \`references\` array cannot be empty.`;
|
||||
};
|
||||
const REFERENCE_DNE_ERROR = (columnName) => {
|
||||
return `Column ${colors.bold(
|
||||
columnName
|
||||
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
|
||||
};
|
||||
export {
|
||||
FOREIGN_KEY_DNE_ERROR,
|
||||
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
|
||||
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
|
||||
REFERENCE_DNE_ERROR
|
||||
};
|
||||
31
node_modules/@astrojs/db/dist/runtime/index.d.ts
generated
vendored
Normal file
31
node_modules/@astrojs/db/dist/runtime/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
import { type ColumnDataType } from 'drizzle-orm';
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
|
||||
import type { DBTable } from '../core/types.js';
|
||||
export type Database = LibSQLDatabase;
|
||||
export type { Table } from './types.js';
|
||||
export { hasPrimaryKey } from './utils.js';
|
||||
export declare function asDrizzleTable(name: string, table: DBTable): import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{
|
||||
name: string;
|
||||
schema: undefined;
|
||||
columns: {
|
||||
[x: string]: import("drizzle-orm/sqlite-core").SQLiteColumn<{
|
||||
name: string;
|
||||
tableName: string;
|
||||
dataType: ColumnDataType;
|
||||
columnType: string;
|
||||
data: unknown;
|
||||
driverParam: unknown;
|
||||
notNull: false;
|
||||
hasDefault: false;
|
||||
isPrimaryKey: false;
|
||||
isAutoincrement: false;
|
||||
hasRuntimeDefault: false;
|
||||
enumValues: string[] | undefined;
|
||||
baseColumn: never;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}, {}, {}>;
|
||||
};
|
||||
dialect: "sqlite";
|
||||
}>;
|
||||
export declare function normalizeDatabaseUrl(envDbUrl: string | undefined, defaultDbUrl: string): string;
|
||||
121
node_modules/@astrojs/db/dist/runtime/index.js
generated
vendored
Normal file
121
node_modules/@astrojs/db/dist/runtime/index.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
import { sql } from "drizzle-orm";
|
||||
import {
|
||||
customType,
|
||||
index,
|
||||
integer,
|
||||
sqliteTable,
|
||||
text
|
||||
} from "drizzle-orm/sqlite-core";
|
||||
import { isSerializedSQL } from "./types.js";
|
||||
import { hasPrimaryKey, pathToFileURL } from "./utils.js";
|
||||
import { hasPrimaryKey as hasPrimaryKey2 } from "./utils.js";
|
||||
const isISODateString = (str) => /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/.test(str);
|
||||
const dateType = customType({
|
||||
dataType() {
|
||||
return "text";
|
||||
},
|
||||
toDriver(value) {
|
||||
return value.toISOString();
|
||||
},
|
||||
fromDriver(value) {
|
||||
if (!isISODateString(value)) {
|
||||
value += "Z";
|
||||
}
|
||||
return new Date(value);
|
||||
}
|
||||
});
|
||||
const jsonType = customType({
|
||||
dataType() {
|
||||
return "text";
|
||||
},
|
||||
toDriver(value) {
|
||||
return JSON.stringify(value);
|
||||
},
|
||||
fromDriver(value) {
|
||||
return JSON.parse(value);
|
||||
}
|
||||
});
|
||||
function asDrizzleTable(name, table) {
|
||||
const columns = {};
|
||||
if (!Object.entries(table.columns).some(([, column]) => hasPrimaryKey(column))) {
|
||||
columns["_id"] = integer("_id").primaryKey();
|
||||
}
|
||||
for (const [columnName, column] of Object.entries(table.columns)) {
|
||||
columns[columnName] = columnMapper(columnName, column);
|
||||
}
|
||||
const drizzleTable = sqliteTable(name, columns, (ormTable) => {
|
||||
const indexes = [];
|
||||
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
|
||||
const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
|
||||
const onCols = onColNames.map((colName) => ormTable[colName]);
|
||||
if (!atLeastOne(onCols)) continue;
|
||||
indexes.push(index(indexName).on(...onCols));
|
||||
}
|
||||
return indexes;
|
||||
});
|
||||
return drizzleTable;
|
||||
}
|
||||
function atLeastOne(arr) {
|
||||
return arr.length > 0;
|
||||
}
|
||||
function columnMapper(columnName, column) {
|
||||
let c;
|
||||
switch (column.type) {
|
||||
case "text": {
|
||||
c = text(columnName, { enum: column.schema.enum });
|
||||
if (column.schema.default !== void 0)
|
||||
c = c.default(handleSerializedSQL(column.schema.default));
|
||||
if (column.schema.primaryKey === true) c = c.primaryKey();
|
||||
break;
|
||||
}
|
||||
case "number": {
|
||||
c = integer(columnName);
|
||||
if (column.schema.default !== void 0)
|
||||
c = c.default(handleSerializedSQL(column.schema.default));
|
||||
if (column.schema.primaryKey === true) c = c.primaryKey();
|
||||
break;
|
||||
}
|
||||
case "boolean": {
|
||||
c = integer(columnName, { mode: "boolean" });
|
||||
if (column.schema.default !== void 0)
|
||||
c = c.default(handleSerializedSQL(column.schema.default));
|
||||
break;
|
||||
}
|
||||
case "json":
|
||||
c = jsonType(columnName);
|
||||
if (column.schema.default !== void 0) c = c.default(column.schema.default);
|
||||
break;
|
||||
case "date": {
|
||||
c = dateType(columnName);
|
||||
if (column.schema.default !== void 0) {
|
||||
const def = handleSerializedSQL(column.schema.default);
|
||||
c = c.default(typeof def === "string" ? new Date(def) : def);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!column.schema.optional) c = c.notNull();
|
||||
if (column.schema.unique) c = c.unique();
|
||||
return c;
|
||||
}
|
||||
function handleSerializedSQL(def) {
|
||||
if (isSerializedSQL(def)) {
|
||||
return sql.raw(def.sql);
|
||||
}
|
||||
return def;
|
||||
}
|
||||
function normalizeDatabaseUrl(envDbUrl, defaultDbUrl) {
|
||||
if (envDbUrl) {
|
||||
if (envDbUrl.startsWith("file://")) {
|
||||
return envDbUrl;
|
||||
}
|
||||
return new URL(envDbUrl, pathToFileURL(process.cwd()) + "/").toString();
|
||||
} else {
|
||||
return defaultDbUrl;
|
||||
}
|
||||
}
|
||||
export {
|
||||
asDrizzleTable,
|
||||
hasPrimaryKey2 as hasPrimaryKey,
|
||||
normalizeDatabaseUrl
|
||||
};
|
||||
92
node_modules/@astrojs/db/dist/runtime/types.d.ts
generated
vendored
Normal file
92
node_modules/@astrojs/db/dist/runtime/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
|
||||
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
|
||||
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
|
||||
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
|
||||
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
|
||||
data: E extends readonly (infer U)[] ? U : string;
|
||||
dataType: 'string';
|
||||
columnType: 'SQLiteText';
|
||||
driverParam: string;
|
||||
enumValues: E extends [string, ...string[]] ? E : never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
|
||||
data: Date;
|
||||
dataType: 'custom';
|
||||
columnType: 'SQLiteCustomColumn';
|
||||
driverParam: string;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
|
||||
data: boolean;
|
||||
dataType: 'boolean';
|
||||
columnType: 'SQLiteBoolean';
|
||||
driverParam: number;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
|
||||
data: number;
|
||||
dataType: 'number';
|
||||
columnType: 'SQLiteInteger';
|
||||
driverParam: number;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
|
||||
data: unknown;
|
||||
dataType: 'custom';
|
||||
columnType: 'SQLiteCustomColumn';
|
||||
driverParam: string;
|
||||
enumValues: never;
|
||||
baseColumn: never;
|
||||
isAutoincrement: boolean;
|
||||
identity: undefined;
|
||||
generated: undefined;
|
||||
}>;
|
||||
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
|
||||
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
|
||||
name: TTableName;
|
||||
schema: undefined;
|
||||
dialect: 'sqlite';
|
||||
columns: {
|
||||
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
|
||||
enum: infer E;
|
||||
} ? E extends readonly [string, ...string[]] ? E : string : string, {
|
||||
tableName: TTableName;
|
||||
name: K;
|
||||
isPrimaryKey: TColumns[K]['schema'] extends {
|
||||
primaryKey: true;
|
||||
} ? true : false;
|
||||
hasDefault: TColumns[K]['schema'] extends {
|
||||
default: NonNullable<unknown>;
|
||||
} ? true : TColumns[K]['schema'] extends {
|
||||
primaryKey: true;
|
||||
} ? true : false;
|
||||
hasRuntimeDefault: TColumns[K]['schema'] extends {
|
||||
default: NonNullable<unknown>;
|
||||
} ? true : false;
|
||||
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
|
||||
}>;
|
||||
};
|
||||
}>;
|
||||
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
|
||||
export type SerializedSQL = {
|
||||
[SERIALIZED_SQL_KEY]: true;
|
||||
sql: string;
|
||||
};
|
||||
export declare function isSerializedSQL(value: any): value is SerializedSQL;
|
||||
export {};
|
||||
8
node_modules/@astrojs/db/dist/runtime/types.js
generated
vendored
Normal file
8
node_modules/@astrojs/db/dist/runtime/types.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const SERIALIZED_SQL_KEY = "__serializedSQL";
|
||||
function isSerializedSQL(value) {
|
||||
return typeof value === "object" && value !== null && SERIALIZED_SQL_KEY in value;
|
||||
}
|
||||
export {
|
||||
SERIALIZED_SQL_KEY,
|
||||
isSerializedSQL
|
||||
};
|
||||
9
node_modules/@astrojs/db/dist/runtime/utils.d.ts
generated
vendored
Normal file
9
node_modules/@astrojs/db/dist/runtime/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { LibsqlError } from '@libsql/client';
|
||||
import { AstroError } from 'astro/errors';
|
||||
import type { DBColumn } from '../core/types.js';
|
||||
export declare function hasPrimaryKey(column: DBColumn): boolean;
|
||||
export declare class AstroDbError extends AstroError {
|
||||
name: string;
|
||||
}
|
||||
export declare function isDbError(err: unknown): err is LibsqlError;
|
||||
export declare function pathToFileURL(path: string): URL;
|
||||
35
node_modules/@astrojs/db/dist/runtime/utils.js
generated
vendored
Normal file
35
node_modules/@astrojs/db/dist/runtime/utils.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { LibsqlError } from "@libsql/client";
|
||||
import { AstroError } from "astro/errors";
|
||||
function hasPrimaryKey(column) {
|
||||
return "primaryKey" in column.schema && !!column.schema.primaryKey;
|
||||
}
|
||||
const isWindows = process?.platform === "win32";
|
||||
class AstroDbError extends AstroError {
|
||||
name = "Astro DB Error";
|
||||
}
|
||||
function isDbError(err) {
|
||||
return err instanceof LibsqlError || err instanceof Error && err.libsqlError === true;
|
||||
}
|
||||
function slash(path) {
|
||||
const isExtendedLengthPath = path.startsWith("\\\\?\\");
|
||||
if (isExtendedLengthPath) {
|
||||
return path;
|
||||
}
|
||||
return path.replace(/\\/g, "/");
|
||||
}
|
||||
function pathToFileURL(path) {
|
||||
if (isWindows) {
|
||||
let slashed = slash(path);
|
||||
if (!slashed.startsWith("/")) {
|
||||
slashed = "/" + slashed;
|
||||
}
|
||||
return new URL("file://" + slashed);
|
||||
}
|
||||
return new URL("file://" + path);
|
||||
}
|
||||
export {
|
||||
AstroDbError,
|
||||
hasPrimaryKey,
|
||||
isDbError,
|
||||
pathToFileURL
|
||||
};
|
||||
112
node_modules/@astrojs/db/dist/runtime/virtual.js
generated
vendored
Normal file
112
node_modules/@astrojs/db/dist/runtime/virtual.js
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
import { sql as _sql } from "drizzle-orm";
|
||||
function createColumn(type, schema) {
|
||||
return {
|
||||
type,
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
schema
|
||||
};
|
||||
}
|
||||
const column = {
|
||||
number: (opts = {}) => {
|
||||
return createColumn("number", opts);
|
||||
},
|
||||
boolean: (opts = {}) => {
|
||||
return createColumn("boolean", opts);
|
||||
},
|
||||
text: (opts = {}) => {
|
||||
return createColumn("text", opts);
|
||||
},
|
||||
date(opts = {}) {
|
||||
return createColumn("date", opts);
|
||||
},
|
||||
json(opts = {}) {
|
||||
return createColumn("json", opts);
|
||||
}
|
||||
};
|
||||
function defineTable(userConfig) {
|
||||
return userConfig;
|
||||
}
|
||||
function defineDb(userConfig) {
|
||||
return userConfig;
|
||||
}
|
||||
const NOW = _sql`CURRENT_TIMESTAMP`;
|
||||
const TRUE = _sql`TRUE`;
|
||||
const FALSE = _sql`FALSE`;
|
||||
import {
|
||||
and,
|
||||
asc,
|
||||
avg,
|
||||
avgDistinct,
|
||||
between,
|
||||
count,
|
||||
countDistinct,
|
||||
desc,
|
||||
eq,
|
||||
exists,
|
||||
gt,
|
||||
gte,
|
||||
ilike,
|
||||
inArray,
|
||||
isNotNull,
|
||||
isNull,
|
||||
like,
|
||||
lt,
|
||||
lte,
|
||||
max,
|
||||
min,
|
||||
ne,
|
||||
not,
|
||||
notBetween,
|
||||
notExists,
|
||||
notIlike,
|
||||
notInArray,
|
||||
or,
|
||||
sql,
|
||||
sum,
|
||||
sumDistinct
|
||||
} from "drizzle-orm";
|
||||
import { alias } from "drizzle-orm/sqlite-core";
|
||||
import { isDbError } from "./utils.js";
|
||||
export {
|
||||
FALSE,
|
||||
NOW,
|
||||
TRUE,
|
||||
alias,
|
||||
and,
|
||||
asc,
|
||||
avg,
|
||||
avgDistinct,
|
||||
between,
|
||||
column,
|
||||
count,
|
||||
countDistinct,
|
||||
defineDb,
|
||||
defineTable,
|
||||
desc,
|
||||
eq,
|
||||
exists,
|
||||
gt,
|
||||
gte,
|
||||
ilike,
|
||||
inArray,
|
||||
isDbError,
|
||||
isNotNull,
|
||||
isNull,
|
||||
like,
|
||||
lt,
|
||||
lte,
|
||||
max,
|
||||
min,
|
||||
ne,
|
||||
not,
|
||||
notBetween,
|
||||
notExists,
|
||||
notIlike,
|
||||
notInArray,
|
||||
or,
|
||||
sql,
|
||||
sum,
|
||||
sumDistinct
|
||||
};
|
||||
4
node_modules/@astrojs/db/dist/utils.d.ts
generated
vendored
Normal file
4
node_modules/@astrojs/db/dist/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export { defineDbIntegration } from './core/utils.js';
|
||||
import type { ColumnsConfig, TableConfig } from './core/types.js';
|
||||
import { type Table } from './runtime/index.js';
|
||||
export declare function asDrizzleTable<TableName extends string = string, TColumns extends ColumnsConfig = ColumnsConfig>(name: TableName, tableConfig: TableConfig<TColumns>): Table<TableName, TColumns>;
|
||||
10
node_modules/@astrojs/db/dist/utils.js
generated
vendored
Normal file
10
node_modules/@astrojs/db/dist/utils.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineDbIntegration } from "./core/utils.js";
|
||||
import { tableSchema } from "./core/schemas.js";
|
||||
import { asDrizzleTable as internal_asDrizzleTable } from "./runtime/index.js";
|
||||
function asDrizzleTable(name, tableConfig) {
|
||||
return internal_asDrizzleTable(name, tableSchema.parse(tableConfig));
|
||||
}
|
||||
export {
|
||||
asDrizzleTable,
|
||||
defineDbIntegration
|
||||
};
|
||||
Reference in New Issue
Block a user