Complete Astro migration - PDPA compliant website

- Migrated all pages from Next.js to Astro
- Added PDPA-compliant Privacy Policy (Thai)
- Added PDPA-compliant Terms & Conditions (Thai)
- Added Cookie Policy with disclosure (Thai)
- Implemented cookie consent banner (client-side)
- Integrated Umami Analytics placeholder
- Blog system with 3 posts
- Optimized Docker configuration for production
- Static site build (184KB, 11 pages)
- Ready for Easypanel deployment

Backup: /Users/kunthawatgreethong/Gitea/dealplustech-backup-nextjs-20260309.tar.gz
This commit is contained in:
Kunthawat Greethong
2026-03-09 18:28:01 +07:00
parent 668f69048f
commit 6402d885f9
6183 changed files with 463899 additions and 1913 deletions

File diff suppressed because one or more lines are too long

1
dealplustech-astro/node_modules/.bin/astro-consent generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../astro-consent/dist/cli.cjs

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +1,25 @@
{
"hash": "bd82ba1f",
"hash": "ec7c8616",
"configHash": "06117f6a",
"lockfileHash": "503a0907",
"browserHash": "4ee2f0da",
"lockfileHash": "fa2c2659",
"browserHash": "12cbe3a0",
"optimized": {
"astro > cssesc": {
"src": "../../cssesc/cssesc.js",
"file": "astro___cssesc.js",
"fileHash": "ac027a7e",
"fileHash": "3b44b1ed",
"needsInterop": true
},
"astro > aria-query": {
"src": "../../aria-query/lib/index.js",
"file": "astro___aria-query.js",
"fileHash": "e77ce3d0",
"fileHash": "4b6e1232",
"needsInterop": true
},
"astro > axobject-query": {
"src": "../../axobject-query/lib/index.js",
"file": "astro___axobject-query.js",
"fileHash": "052bb1ed",
"fileHash": "32d0da0b",
"needsInterop": true
}
},

59
dealplustech-astro/node_modules/@astrojs/db/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,59 @@
MIT License
Copyright (c) 2021 Fred K. Schott
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/sveltejs/kit repository:
Copyright (c) 2020 [these people](https://github.com/sveltejs/kit/graphs/contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/vitejs/vite repository:
MIT License
Copyright (c) 2019-present, Yuxi (Evan) You and Vite contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

38
dealplustech-astro/node_modules/@astrojs/db/README.md generated vendored Normal file
View File

@@ -0,0 +1,38 @@
# @astrojs/db (experimental) 💿
This **[Astro integration][astro-integration]** enables the usage of [SQLite](https://www.sqlite.org/) in Astro Projects.
## Documentation
Read the [`@astrojs/db` docs][docs]
## Support
- Get help in the [Astro Discord][discord]. Post questions in our `#support` forum, or visit our dedicated `#dev` channel to discuss current development and more!
- Check our [Astro Integration Documentation][astro-integration] for more on integrations.
- Submit bug reports and feature requests as [GitHub issues][issues].
## Contributing
This package is maintained by Astro's Core team. You're welcome to submit an issue or PR! These links will help you get started:
- [Contributor Manual][contributing]
- [Code of Conduct][coc]
- [Community Guide][community]
## License
MIT
Copyright (c) 2023present [Astro][astro]
[astro]: https://astro.build/
[docs]: https://docs.astro.build/en/guides/integrations-guide/db/
[contributing]: https://github.com/withastro/astro/blob/main/CONTRIBUTING.md
[coc]: https://github.com/withastro/.github/blob/main/CODE_OF_CONDUCT.md
[community]: https://github.com/withastro/.github/blob/main/COMMUNITY_GUIDE.md
[discord]: https://astro.build/chat/
[issues]: https://github.com/withastro/astro/issues
[astro-integration]: https://docs.astro.build/en/guides/integrations-guide/

View File

@@ -0,0 +1,6 @@
/**
* This is a modified version of Astro's error map. source:
* https://github.com/withastro/astro/blob/main/packages/astro/src/content/error-map.ts
*/
import type { z } from 'astro/zod';
export declare const errorMap: z.ZodErrorMap;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
import type { z } from 'zod';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

View File

@@ -0,0 +1,92 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
data: E extends readonly (infer U)[] ? U : string;
dataType: 'string';
columnType: 'SQLiteText';
driverParam: string;
enumValues: E extends [string, ...string[]] ? E : never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: Date;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
data: boolean;
dataType: 'boolean';
columnType: 'SQLiteBoolean';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
data: number;
dataType: 'number';
columnType: 'SQLiteInteger';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: unknown;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;
dialect: 'sqlite';
columns: {
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
enum: infer E;
} ? E extends readonly [string, ...string[]] ? E : string : string, {
tableName: TTableName;
name: K;
isPrimaryKey: TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasRuntimeDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : false;
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};

View File

@@ -0,0 +1,9 @@
import { LibsqlError } from '@libsql/client';
import { AstroError } from 'astro/errors';
import type { DBColumn } from '../core/types.js';
export declare function hasPrimaryKey(column: DBColumn): boolean;
export declare class AstroDbError extends AstroError {
name: string;
}
export declare function isDbError(err: unknown): err is LibsqlError;
export declare function pathToFileURL(path: string): URL;

View File

@@ -0,0 +1,48 @@
import type { BooleanColumnInput, ColumnsConfig, DateColumnInput, DBConfigInput, JsonColumnInput, NumberColumnOpts, TableConfig, TextColumnOpts } from '../core/types.js';
export declare const column: {
number: <T extends NumberColumnOpts>(opts?: T) => {
type: "number";
/**
* @internal
*/
schema: T;
};
boolean: <T extends BooleanColumnInput["schema"]>(opts?: T) => {
type: "boolean";
/**
* @internal
*/
schema: T;
};
text: <T extends TextColumnOpts, const E extends T["enum"] extends readonly [string, ...string[]] ? Omit<T, "enum"> & T["enum"] : T>(opts?: E) => {
type: "text";
/**
* @internal
*/
schema: E;
};
date<T extends DateColumnInput["schema"]>(opts?: T): {
type: "date";
/**
* @internal
*/
schema: T;
};
json<T extends JsonColumnInput["schema"]>(opts?: T): {
type: "json";
/**
* @internal
*/
schema: T;
};
};
export declare function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>): TableConfig<TColumns>;
export declare function defineDb(userConfig: DBConfigInput): {
tables?: unknown;
};
export declare const NOW: import("drizzle-orm").SQL<unknown>;
export declare const TRUE: import("drizzle-orm").SQL<unknown>;
export declare const FALSE: import("drizzle-orm").SQL<unknown>;
export { and, asc, avg, avgDistinct, between, count, countDistinct, desc, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, max, min, ne, not, notBetween, notExists, notIlike, notInArray, or, sql, sum, sumDistinct, } from 'drizzle-orm';
export { alias } from 'drizzle-orm/sqlite-core';
export { isDbError } from './utils.js';

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ astroConfig, dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,65 @@
import { existsSync } from "node:fs";
import colors from "piccolore";
import { isDbError } from "../../../../runtime/utils.js";
import {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
MISSING_EXECUTE_PATH_ERROR
} from "../../../errors.js";
import {
getLocalVirtualModContents,
getRemoteVirtualModContents
} from "../../../integration/vite-plugin-db.js";
import { bundleFile, importBundledFile } from "../../../load-file.js";
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
astroConfig,
dbConfig,
flags
}) {
const filePath = flags._[4];
if (typeof filePath !== "string") {
console.error(MISSING_EXECUTE_PATH_ERROR);
process.exit(1);
}
const fileUrl = new URL(filePath, astroConfig.root);
if (!existsSync(fileUrl)) {
console.error(FILE_NOT_FOUND_ERROR(filePath));
process.exit(1);
}
let virtualModContents;
if (flags.remote) {
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
virtualModContents = getRemoteVirtualModContents({
tables: dbConfig.tables ?? {},
appToken,
isBuild: false,
output: "server",
localExecution: true
});
} else {
virtualModContents = getLocalVirtualModContents({
tables: dbConfig.tables ?? {},
root: astroConfig.root,
localExecution: true
});
}
const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
const mod = await importBundledFile({ code, root: astroConfig.root });
if (typeof mod.default !== "function") {
console.error(EXEC_DEFAULT_EXPORT_ERROR(filePath));
process.exit(1);
}
try {
await mod.default();
console.info(`${colors.green("\u2714")} File run successfully.`);
} catch (e) {
if (isDbError(e)) throw new Error(EXEC_ERROR(e.message));
else throw e;
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,107 @@
import { sql } from "drizzle-orm";
import prompts from "prompts";
import { MIGRATION_VERSION } from "../../../consts.js";
import { createClient } from "../../../db-client/libsql-node.js";
import {
getRemoteDatabaseInfo,
resolveDbAppToken
} from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isDryRun = flags.dryRun;
const isForceReset = flags.forceReset;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const isFromScratch = !productionSnapshot;
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
newSnapshot: currentSnapshot,
reset: isForceReset
});
if (migrationQueries.length === 0) {
console.log("Database schema is up to date.");
} else {
console.log(`Database schema is out of date.`);
}
if (isForceReset) {
const { begin } = await prompts({
type: "confirm",
name: "begin",
message: `Reset your database? All of your data will be erased and your schema created from scratch.`,
initial: false
});
if (!begin) {
console.log("Canceled.");
process.exit(0);
}
console.log(`Force-pushing to the database. All existing data will be erased.`);
} else if (confirmations.length > 0) {
console.log("\n" + formatDataLossMessage(confirmations) + "\n");
throw new Error("Exiting.");
}
if (isDryRun) {
console.log("Statements:", JSON.stringify(migrationQueries, void 0, 2));
} else {
console.log(`Pushing database schema updates...`);
await pushSchema({
statements: migrationQueries,
dbInfo,
appToken,
isDryRun,
currentSnapshot
});
}
console.info("Push complete!");
}
async function pushSchema({
statements,
dbInfo,
appToken,
isDryRun,
currentSnapshot
}) {
const requestBody = {
snapshot: currentSnapshot,
sql: statements,
version: MIGRATION_VERSION
};
if (isDryRun) {
console.info("[DRY RUN] Batch query:", JSON.stringify(requestBody, null, 2));
return new Response(null, { status: 200 });
}
return pushToDb(requestBody, appToken, dbInfo.url);
}
async function pushToDb(requestBody, appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
await client.run(sql`create table if not exists _astro_db_snapshot (
id INTEGER PRIMARY KEY AUTOINCREMENT,
version TEXT,
snapshot BLOB
);`);
await client.transaction(async (tx) => {
for (const stmt of requestBody.sql) {
await tx.run(sql.raw(stmt));
}
await tx.run(sql`insert into _astro_db_snapshot (version, snapshot) values (
${requestBody.version},
${JSON.stringify(requestBody.snapshot)}
)`);
});
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfigInput } from '../../../types.js';
export declare function cmd({ flags, astroConfig, }: {
dbConfig: DBConfigInput;
astroConfig: AstroConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,36 @@
import { sql } from "drizzle-orm";
import { normalizeDatabaseUrl } from "../../../../runtime/index.js";
import { DB_PATH } from "../../../consts.js";
import { createClient as createLocalDatabaseClient } from "../../../db-client/libsql-local.js";
import { createClient as createRemoteDatabaseClient } from "../../../db-client/libsql-node.js";
import { SHELL_QUERY_MISSING_ERROR } from "../../../errors.js";
import { getAstroEnv, getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
flags,
astroConfig
}) {
const query = flags.query;
if (!query) {
console.error(SHELL_QUERY_MISSING_ERROR);
process.exit(1);
}
const dbInfo = getRemoteDatabaseInfo();
if (flags.remote) {
const appToken = resolveDbAppToken(flags, dbInfo.token);
const db = createRemoteDatabaseClient({ ...dbInfo, token: appToken });
const result = await db.run(sql.raw(query));
console.log(result);
} else {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(
ASTRO_DATABASE_FILE,
new URL(DB_PATH, astroConfig.root).href
);
const db = createLocalDatabaseClient({ url: dbUrl });
const result = await db.run(sql.raw(query));
console.log(result);
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,46 @@
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isJson = flags.json;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: productionSnapshot || createEmptySnapshot(),
newSnapshot: currentSnapshot
});
const result = { exitCode: 0, message: "", code: "", data: void 0 };
if (migrationQueries.length === 0) {
result.code = "MATCH";
result.message = `Database schema is up to date.`;
} else {
result.code = "NO_MATCH";
result.message = `Database schema is out of date.
Run 'astro db push' to push up your latest changes.`;
}
if (confirmations.length > 0) {
result.code = "DATA_LOSS";
result.exitCode = 1;
result.data = confirmations;
result.message = formatDataLossMessage(confirmations, !isJson);
}
if (isJson) {
console.log(JSON.stringify(result));
} else {
console.log(result.message);
}
process.exit(result.exitCode);
}
export {
cmd
};

View File

@@ -0,0 +1,6 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
export declare function cli({ flags, config: astroConfig, }: {
flags: Arguments;
config: AstroConfig;
}): Promise<void>;

View File

@@ -0,0 +1,75 @@
import { resolveDbConfig } from "../load-file.js";
import { printHelp } from "./print-help.js";
async function cli({
flags,
config: astroConfig
}) {
const args = flags._;
const command = args[2] === "db" ? args[3] : args[2];
validateDbAppTokenFlag(command, flags);
const { dbConfig } = await resolveDbConfig(astroConfig);
switch (command) {
case "shell": {
const { cmd } = await import("./commands/shell/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "gen": {
console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
return;
}
case "sync": {
console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
return;
}
case "push": {
const { cmd } = await import("./commands/push/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "verify": {
const { cmd } = await import("./commands/verify/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "execute": {
const { cmd } = await import("./commands/execute/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
default: {
if (command != null) {
console.error(`Unknown command: ${command}`);
}
printHelp({
commandName: "astro db",
usage: "[command] [...flags]",
headline: " ",
tables: {
Commands: [
["push", "Push table schema updates to libSQL."],
["verify", "Test schema updates with libSQL (good for CI)."],
[
"astro db execute <file-path>",
"Execute a ts/js file using astro:db. Use --remote to connect to libSQL."
],
[
"astro db shell --query <sql-string>",
"Execute a SQL string. Use --remote to connect to libSQL."
]
]
}
});
return;
}
}
}
function validateDbAppTokenFlag(command, flags) {
if (command !== "execute" && command !== "push" && command !== "verify" && command !== "shell")
return;
const dbAppToken = flags.dbAppToken;
if (dbAppToken == null) return;
if (typeof dbAppToken !== "string") {
console.error(`Invalid value for --db-app-token; expected a string.`);
process.exit(1);
}
}
export {
cli
};

View File

@@ -0,0 +1,22 @@
import type { DBConfig, DBSnapshot, ResolvedDBTable } from '../types.js';
import type { RemoteDatabaseInfo } from '../utils.js';
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, reset, }: {
oldSnapshot: DBSnapshot;
newSnapshot: DBSnapshot;
reset?: boolean;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getTableChangeQueries({ tableName, oldTable, newTable, }: {
tableName: string;
oldTable: ResolvedDBTable;
newTable: ResolvedDBTable;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getProductionCurrentSnapshot({ url, token, }: RemoteDatabaseInfo): Promise<DBSnapshot | undefined>;
export declare function createCurrentSnapshot({ tables }: DBConfig): DBSnapshot;
export declare function createEmptySnapshot(): DBSnapshot;
export declare function formatDataLossMessage(confirmations: string[], isColor?: boolean): string;

View File

@@ -0,0 +1,373 @@
import { stripVTControlCharacters } from "node:util";
import deepDiff from "deep-diff";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { customAlphabet } from "nanoid";
import color from "piccolore";
import { isSerializedSQL } from "../../runtime/types.js";
import { hasPrimaryKey, isDbError } from "../../runtime/utils.js";
import { MIGRATION_VERSION } from "../consts.js";
import { createClient } from "../db-client/libsql-node.js";
import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from "../errors.js";
import {
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
} from "../queries.js";
import { columnSchema } from "../schemas.js";
const sqlite = new SQLiteAsyncDialect();
const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);
async function getMigrationQueries({
oldSnapshot,
newSnapshot,
reset = false
}) {
const queries = [];
const confirmations = [];
if (reset) {
const currentSnapshot = oldSnapshot;
oldSnapshot = createEmptySnapshot();
queries.push(...getDropTableQueriesForSnapshot(currentSnapshot));
}
const addedTables = getAddedTables(oldSnapshot, newSnapshot);
const droppedTables = getDroppedTables(oldSnapshot, newSnapshot);
const notDeprecatedDroppedTables = Object.fromEntries(
Object.entries(droppedTables).filter(([, table]) => !table.deprecated)
);
if (!isEmpty(addedTables) && !isEmpty(notDeprecatedDroppedTables)) {
const oldTable = Object.keys(notDeprecatedDroppedTables)[0];
const newTable = Object.keys(addedTables)[0];
throw new Error(RENAME_TABLE_ERROR(oldTable, newTable));
}
for (const [tableName, table] of Object.entries(addedTables)) {
queries.push(getCreateTableQuery(tableName, table));
queries.push(...getCreateIndexQueries(tableName, table));
}
for (const [tableName] of Object.entries(droppedTables)) {
const dropQuery = `DROP TABLE ${sqlite.escapeName(tableName)}`;
queries.push(dropQuery);
}
for (const [tableName, newTable] of Object.entries(newSnapshot.schema)) {
const oldTable = oldSnapshot.schema[tableName];
if (!oldTable) continue;
const addedColumns = getAdded(oldTable.columns, newTable.columns);
const droppedColumns = getDropped(oldTable.columns, newTable.columns);
const notDeprecatedDroppedColumns = Object.fromEntries(
Object.entries(droppedColumns).filter(([, col]) => !col.schema.deprecated)
);
if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
throw new Error(
RENAME_COLUMN_ERROR(
`${tableName}.${Object.keys(addedColumns)[0]}`,
`${tableName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`
)
);
}
const result = await getTableChangeQueries({
tableName,
oldTable,
newTable
});
queries.push(...result.queries);
confirmations.push(...result.confirmations);
}
return { queries, confirmations };
}
async function getTableChangeQueries({
tableName,
oldTable,
newTable
}) {
const queries = [];
const confirmations = [];
const updated = getUpdatedColumns(oldTable.columns, newTable.columns);
const added = getAdded(oldTable.columns, newTable.columns);
const dropped = getDropped(oldTable.columns, newTable.columns);
const hasForeignKeyChanges = Boolean(deepDiff(oldTable.foreignKeys, newTable.foreignKeys));
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
return {
queries: getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
}),
confirmations
};
}
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
queries.push(
...getAlterTableQueries(tableName, added, dropped),
...getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
})
);
return { queries, confirmations };
}
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
if (dataLossCheck.dataLoss) {
const { reason, columnName } = dataLossCheck;
const reasonMsgs = {
"added-required": `You added new required column '${color.bold(
tableName + "." + columnName
)}' with no default value.
This cannot be executed on an existing table.`,
"updated-type": `Updating existing column ${color.bold(
tableName + "." + columnName
)} to a new type that cannot be handled automatically.`
};
confirmations.push(reasonMsgs[reason]);
}
const primaryKeyExists = Object.entries(newTable.columns).find(
([, column]) => hasPrimaryKey(column)
);
const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
const recreateTableQueries = getRecreateTableQueries({
tableName,
newTable,
added,
hasDataLoss: dataLossCheck.dataLoss,
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
});
queries.push(...recreateTableQueries, ...getCreateIndexQueries(tableName, newTable));
return { queries, confirmations };
}
function getChangeIndexQueries({
tableName,
oldIndexes = {},
newIndexes = {}
}) {
const added = getAdded(oldIndexes, newIndexes);
const dropped = getDropped(oldIndexes, newIndexes);
const updated = getUpdated(oldIndexes, newIndexes);
Object.assign(dropped, updated);
Object.assign(added, updated);
const queries = [];
for (const indexName of Object.keys(dropped)) {
const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
queries.push(dropQuery);
}
queries.push(...getCreateIndexQueries(tableName, { indexes: added }));
return queries;
}
function getAddedTables(oldTables, newTables) {
const added = {};
for (const [key, newTable] of Object.entries(newTables.schema)) {
if (!(key in oldTables.schema)) added[key] = newTable;
}
return added;
}
function getDroppedTables(oldTables, newTables) {
const dropped = {};
for (const [key, oldTable] of Object.entries(oldTables.schema)) {
if (!(key in newTables.schema)) dropped[key] = oldTable;
}
return dropped;
}
function getAlterTableQueries(unescTableName, added, dropped) {
const queries = [];
const tableName = sqlite.escapeName(unescTableName);
for (const [unescColumnName, column] of Object.entries(added)) {
const columnName = sqlite.escapeName(unescColumnName);
const type = schemaTypeToSqlType(column.type);
const q = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${type}${getModifiers(
columnName,
column
)}`;
queries.push(q);
}
for (const unescColumnName of Object.keys(dropped)) {
const columnName = sqlite.escapeName(unescColumnName);
const q = `ALTER TABLE ${tableName} DROP COLUMN ${columnName}`;
queries.push(q);
}
return queries;
}
function getRecreateTableQueries({
tableName: unescTableName,
newTable,
added,
hasDataLoss,
migrateHiddenPrimaryKey
}) {
const unescTempName = `${unescTableName}_${genTempTableName()}`;
const tempName = sqlite.escapeName(unescTempName);
const tableName = sqlite.escapeName(unescTableName);
if (hasDataLoss) {
return [`DROP TABLE ${tableName}`, getCreateTableQuery(unescTableName, newTable)];
}
const newColumns = [...Object.keys(newTable.columns)];
if (migrateHiddenPrimaryKey) {
newColumns.unshift("_id");
}
const escapedColumns = newColumns.filter((i) => !(i in added)).map((c) => sqlite.escapeName(c)).join(", ");
return [
getCreateTableQuery(unescTempName, newTable),
`INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${tableName}`,
`DROP TABLE ${tableName}`,
`ALTER TABLE ${tempName} RENAME TO ${tableName}`
];
}
function isEmpty(obj) {
return Object.keys(obj).length === 0;
}
function canAlterTableAddColumn(column) {
if (column.schema.unique) return false;
if (hasRuntimeDefault(column)) return false;
if (!column.schema.optional && !hasDefault(column)) return false;
if (hasPrimaryKey(column)) return false;
if (getReferencesConfig(column)) return false;
return true;
}
function canAlterTableDropColumn(column) {
if (column.schema.unique) return false;
if (hasPrimaryKey(column)) return false;
return true;
}
function canRecreateTableWithoutDataLoss(added, updated) {
for (const [columnName, a] of Object.entries(added)) {
if (hasPrimaryKey(a) && a.type !== "number" && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
if (!a.schema.optional && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
}
for (const [columnName, u] of Object.entries(updated)) {
if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
return { dataLoss: true, columnName, reason: "updated-type" };
}
}
return { dataLoss: false };
}
function getAdded(oldObj, newObj) {
const added = {};
for (const [key, value] of Object.entries(newObj)) {
if (!(key in oldObj)) added[key] = value;
}
return added;
}
function getDropped(oldObj, newObj) {
const dropped = {};
for (const [key, value] of Object.entries(oldObj)) {
if (!(key in newObj)) dropped[key] = value;
}
return dropped;
}
function getUpdated(oldObj, newObj) {
const updated = {};
for (const [key, value] of Object.entries(newObj)) {
const oldValue = oldObj[key];
if (!oldValue) continue;
if (deepDiff(oldValue, value)) updated[key] = value;
}
return updated;
}
function getUpdatedColumns(oldColumns, newColumns) {
const updated = {};
for (const [key, newColumn] of Object.entries(newColumns)) {
let oldColumn = oldColumns[key];
if (!oldColumn) continue;
if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
const asNewColumn = columnSchema.safeParse({
type: newColumn.type,
schema: oldColumn.schema
});
if (asNewColumn.success) {
oldColumn = asNewColumn.data;
}
}
const diff = deepDiff(oldColumn, newColumn);
if (diff) {
updated[key] = { old: oldColumn, new: newColumn };
}
}
return updated;
}
const typeChangesWithoutQuery = [
{ from: "boolean", to: "number" },
{ from: "date", to: "text" },
{ from: "json", to: "text" }
];
function canChangeTypeWithoutQuery(oldColumn, newColumn) {
return typeChangesWithoutQuery.some(
({ from, to }) => oldColumn.type === from && newColumn.type === to
);
}
function hasRuntimeDefault(column) {
return !!(column.schema.default && isSerializedSQL(column.schema.default));
}
function getProductionCurrentSnapshot({
url,
token
}) {
return getDbCurrentSnapshot(token, url);
}
async function getDbCurrentSnapshot(appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
try {
const res = await client.get(
// Latest snapshot
sql`select snapshot from _astro_db_snapshot order by id desc limit 1;`
);
return JSON.parse(res.snapshot);
} catch (error) {
if (isDbError(error) && // If the schema was never pushed to the database yet the table won't exist.
// Treat a missing snapshot table as an empty table.
// When connecting to a remote database in that condition
// the query will fail with the following error code and message.
(error.code === "SQLITE_UNKNOWN" && error.message === "SQLITE_UNKNOWN: SQLite error: no such table: _astro_db_snapshot" || // When connecting to a local or in-memory database that does not have a snapshot table yet
// the query will fail with the following error code and message.
error.code === "SQLITE_ERROR" && error.message === "SQLITE_ERROR: no such table: _astro_db_snapshot")) {
return;
}
throw error;
}
}
function getDropTableQueriesForSnapshot(snapshot) {
const queries = [];
for (const tableName of Object.keys(snapshot.schema)) {
const dropQuery = getDropTableIfExistsQuery(tableName);
queries.unshift(dropQuery);
}
return queries;
}
function createCurrentSnapshot({ tables = {} }) {
const schema = JSON.parse(JSON.stringify(tables));
return { version: MIGRATION_VERSION, schema };
}
function createEmptySnapshot() {
return { version: MIGRATION_VERSION, schema: {} };
}
function formatDataLossMessage(confirmations, isColor = true) {
const messages = [];
messages.push(color.red("\u2716 We found some schema changes that cannot be handled automatically:"));
messages.push(``);
messages.push(...confirmations.map((m, i) => color.red(` (${i + 1}) `) + m));
messages.push(``);
messages.push(`To resolve, revert these changes or update your schema, and re-run the command.`);
messages.push(
`You may also run 'astro db push --force-reset' to ignore all warnings and force-push your local database schema to production instead. All data will be lost and the database will be reset.`
);
let finalMessage = messages.join("\n");
if (!isColor) {
finalMessage = stripVTControlCharacters(finalMessage);
}
return finalMessage;
}
export {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot,
getTableChangeQueries
};

View File

@@ -0,0 +1,11 @@
/**
* Uses implementation from Astro core
* @see https://github.com/withastro/astro/blob/main/packages/astro/src/core/messages.ts#L303
*/
export declare function printHelp({ commandName, headline, usage, tables, description, }: {
commandName: string;
headline?: string;
usage?: string;
tables?: Record<string, [command: string, help: string][]>;
description?: string;
}): void;

View File

@@ -0,0 +1,55 @@
import colors from "piccolore";
function printHelp({
commandName,
headline,
usage,
tables,
description
}) {
const linebreak = () => "";
const title = (label) => ` ${colors.bgWhite(colors.black(` ${label} `))}`;
const table = (rows, { padding }) => {
const split = process.stdout.columns < 60;
let raw = "";
for (const row of rows) {
if (split) {
raw += ` ${row[0]}
`;
} else {
raw += `${`${row[0]}`.padStart(padding)}`;
}
raw += " " + colors.dim(row[1]) + "\n";
}
return raw.slice(0, -1);
};
let message = [];
if (headline) {
message.push(
linebreak(),
` ${colors.bgGreen(colors.black(` ${commandName} `))} ${colors.green(
`v${"0.19.0"}`
)} ${headline}`
);
}
if (usage) {
message.push(linebreak(), ` ${colors.green(commandName)} ${colors.bold(usage)}`);
}
if (tables) {
let calculateTablePadding2 = function(rows) {
return rows.reduce((val, [first]) => Math.max(val, first.length), 0) + 2;
};
var calculateTablePadding = calculateTablePadding2;
const tableEntries = Object.entries(tables);
const padding = Math.max(...tableEntries.map(([, rows]) => calculateTablePadding2(rows)));
for (const [tableTitle, tableRows] of tableEntries) {
message.push(linebreak(), title(tableTitle), table(tableRows, { padding }));
}
}
if (description) {
message.push(linebreak(), `${description}`);
}
console.log(message.join("\n") + "\n");
}
export {
printHelp
};

View File

@@ -0,0 +1,12 @@
export declare const RUNTIME_IMPORT: string;
export declare const RUNTIME_VIRTUAL_IMPORT: string;
export declare const VIRTUAL_MODULE_ID = "astro:db";
export declare const DB_PATH = ".astro/content.db";
export declare const CONFIG_FILE_NAMES: string[];
export declare const MIGRATION_VERSION = "2024-03-12";
export declare const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
export declare const DB_CLIENTS: {
node: string;
web: string;
local: string;
};

View File

@@ -0,0 +1,26 @@
import { readFileSync } from "node:fs";
const PACKAGE_NAME = JSON.parse(
readFileSync(new URL("../../package.json", import.meta.url), "utf8")
).name;
const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
const RUNTIME_VIRTUAL_IMPORT = JSON.stringify(`${PACKAGE_NAME}/dist/runtime/virtual.js`);
const VIRTUAL_MODULE_ID = "astro:db";
const DB_PATH = ".astro/content.db";
const CONFIG_FILE_NAMES = ["config.ts", "config.js", "config.mts", "config.mjs"];
const MIGRATION_VERSION = "2024-03-12";
const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
const DB_CLIENTS = {
node: `${PACKAGE_NAME}/db-client/libsql-node.js`,
web: `${PACKAGE_NAME}/db-client/libsql-web.js`,
local: `${PACKAGE_NAME}/db-client/libsql-local.js`
};
export {
CONFIG_FILE_NAMES,
DB_CLIENTS,
DB_PATH,
MIGRATION_VERSION,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
};

View File

@@ -0,0 +1,6 @@
import { type LibSQLDatabase } from 'drizzle-orm/libsql';
type LocalDbClientOptions = {
url: string;
};
export declare function createClient(options: LocalDbClientOptions): LibSQLDatabase;
export {};

View File

@@ -0,0 +1,12 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
const isWebContainer = !!process.versions?.webcontainer;
function createClient(options) {
const url = isWebContainer ? "file:content.db" : options.url;
const client = createLibsqlClient({ url });
const db = drizzleLibsql(client);
return db;
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client").Client;
};
export {};

View File

@@ -0,0 +1,21 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
if (parsedUrl.protocol === "memory:") {
url = ":memory:";
} else if (parsedUrl.protocol === "file:" && parsedUrl.pathname.startsWith("/") && !rawUrl.startsWith("file:/")) {
url = "file:" + parsedUrl.pathname.substring(1);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client/web").Client;
};
export {};

View File

@@ -0,0 +1,22 @@
import { createClient as createLibsqlClient } from "@libsql/client/web";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql/web";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
const supportedProtocols = ["http:", "https:", "libsql:"];
if (!supportedProtocols.includes(parsedUrl.protocol)) {
throw new Error(
`Unsupported protocol "${parsedUrl.protocol}" for libSQL web client. Supported protocols are: ${supportedProtocols.join(", ")}.`
);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,2 @@
import type { Config as LibSQLConfig } from '@libsql/client';
export declare const parseLibSQLConfig: (config: Record<string, string>) => Partial<LibSQLConfig>;

View File

@@ -0,0 +1,46 @@
import z from "zod";
const rawLibSQLOptions = z.record(z.string());
const parseNumber = (value) => z.coerce.number().parse(value);
const parseBoolean = (value) => z.coerce.boolean().parse(value);
const booleanValues = ["true", "false"];
const parseOptionalBoolean = (value) => {
if (booleanValues.includes(value)) {
return parseBoolean(value);
}
return true;
};
const libSQLConfigTransformed = rawLibSQLOptions.transform((raw) => {
const parsed = {};
for (const [key, value] of Object.entries(raw)) {
switch (key) {
case "syncInterval":
case "concurrency":
parsed[key] = parseNumber(value);
break;
case "readYourWrites":
case "offline":
case "tls":
parsed[key] = parseOptionalBoolean(value);
break;
case "authToken":
case "encryptionKey":
case "syncUrl":
parsed[key] = value;
break;
}
}
return parsed;
});
const parseLibSQLConfig = (config) => {
try {
return libSQLConfigTransformed.parse(config);
} catch (error) {
if (error instanceof z.ZodError) {
throw new Error(`Invalid LibSQL config: ${error.errors.map((e) => e.message).join(", ")}`);
}
throw error;
}
};
export {
parseLibSQLConfig
};

View File

@@ -0,0 +1,8 @@
export declare const MISSING_EXECUTE_PATH_ERROR: string;
export declare const RENAME_TABLE_ERROR: (oldTable: string, newTable: string) => string;
export declare const RENAME_COLUMN_ERROR: (oldSelector: string, newSelector: string) => string;
export declare const FILE_NOT_FOUND_ERROR: (path: string) => string;
export declare const SHELL_QUERY_MISSING_ERROR: string;
export declare const EXEC_ERROR: (error: string) => string;
export declare const EXEC_DEFAULT_EXPORT_ERROR: (fileName: string) => string;
export declare const INTEGRATION_TABLE_CONFLICT_ERROR: (integrationName: string, tableName: string, isUserConflict: boolean) => string;

View File

@@ -0,0 +1,48 @@
import colors from "piccolore";
const MISSING_EXECUTE_PATH_ERROR = `${colors.red(
"\u25B6 No file path provided."
)} Provide a path by running ${colors.cyan("astro db execute <path>")}
`;
const RENAME_TABLE_ERROR = (oldTable, newTable) => {
return colors.red("\u25B6 Potential table rename detected: " + oldTable + " -> " + newTable) + `
You cannot add and remove tables in the same schema update batch.
1. Use "deprecated: true" to deprecate a table before renaming.
2. Use "--force-reset" to ignore this warning and reset the database (deleting all of your data).
Visit https://docs.astro.build/en/guides/astro-db/#renaming-tables to learn more.`;
};
const RENAME_COLUMN_ERROR = (oldSelector, newSelector) => {
return colors.red("\u25B6 Potential column rename detected: " + oldSelector + ", " + newSelector) + `
You cannot add and remove columns in the same table.
To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`;
};
const FILE_NOT_FOUND_ERROR = (path) => `${colors.red("\u25B6 File not found:")} ${colors.bold(path)}
`;
const SHELL_QUERY_MISSING_ERROR = `${colors.red(
"\u25B6 Please provide a query to execute using the --query flag."
)}
`;
const EXEC_ERROR = (error) => {
return `${colors.red(`Error while executing file:`)}
${error}`;
};
const EXEC_DEFAULT_EXPORT_ERROR = (fileName) => {
return EXEC_ERROR(`Missing default function export in ${colors.bold(fileName)}`);
};
const INTEGRATION_TABLE_CONFLICT_ERROR = (integrationName, tableName, isUserConflict) => {
return colors.red("\u25B6 Conflicting table name in integration " + colors.bold(integrationName)) + isUserConflict ? `
A user-defined table named ${colors.bold(tableName)} already exists` : `
Another integration already added a table named ${colors.bold(tableName)}`;
};
export {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
INTEGRATION_TABLE_CONFLICT_ERROR,
MISSING_EXECUTE_PATH_ERROR,
RENAME_COLUMN_ERROR,
RENAME_TABLE_ERROR,
SHELL_QUERY_MISSING_ERROR
};

View File

@@ -0,0 +1,6 @@
/**
* This is a modified version of Astro's error map. source:
* https://github.com/withastro/astro/blob/main/packages/astro/src/content/error-map.ts
*/
import type { z } from 'astro/zod';
export declare const errorMap: z.ZodErrorMap;

View File

@@ -0,0 +1,77 @@
const errorMap = (baseError, ctx) => {
const baseErrorPath = flattenErrorPath(baseError.path);
if (baseError.code === "invalid_union") {
const typeOrLiteralErrByPath = /* @__PURE__ */ new Map();
for (const unionError of baseError.unionErrors.flatMap((e) => e.errors)) {
if (unionError.code === "invalid_type" || unionError.code === "invalid_literal") {
const flattenedErrorPath = flattenErrorPath(unionError.path);
const typeOrLiteralErr = typeOrLiteralErrByPath.get(flattenedErrorPath);
if (typeOrLiteralErr) {
typeOrLiteralErr.expected.push(unionError.expected);
} else {
typeOrLiteralErrByPath.set(flattenedErrorPath, {
code: unionError.code,
received: unionError.received,
expected: [unionError.expected]
});
}
}
}
const messages = [
prefix(
baseErrorPath,
typeOrLiteralErrByPath.size ? "Did not match union:" : "Did not match union."
)
];
return {
message: messages.concat(
[...typeOrLiteralErrByPath.entries()].filter(([, error]) => error.expected.length === baseError.unionErrors.length).map(
([key, error]) => (
// Avoid printing the key again if it's a base error
key === baseErrorPath ? `> ${getTypeOrLiteralMsg(error)}` : `> ${prefix(key, getTypeOrLiteralMsg(error))}`
)
)
).join("\n")
};
}
if (baseError.code === "invalid_literal" || baseError.code === "invalid_type") {
return {
message: prefix(
baseErrorPath,
getTypeOrLiteralMsg({
code: baseError.code,
received: baseError.received,
expected: [baseError.expected]
})
)
};
} else if (baseError.message) {
return { message: prefix(baseErrorPath, baseError.message) };
} else {
return { message: prefix(baseErrorPath, ctx.defaultError) };
}
};
const getTypeOrLiteralMsg = (error) => {
if (error.received === "undefined") return "Required";
const expectedDeduped = new Set(error.expected);
switch (error.code) {
case "invalid_type":
return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
error.received
)}`;
case "invalid_literal":
return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received ${JSON.stringify(
error.received
)}`;
}
};
const prefix = (key, msg) => key.length ? `**${key}**: ${msg}` : msg;
const unionExpectedVals = (expectedVals) => [...expectedVals].map((expectedVal, idx) => {
if (idx === 0) return JSON.stringify(expectedVal);
const sep = " | ";
return `${sep}${JSON.stringify(expectedVal)}`;
}).join("");
const flattenErrorPath = (errorPath) => errorPath.join(".");
export {
errorMap
};

View File

@@ -0,0 +1,2 @@
import type { AstroIntegration } from 'astro';
export declare function fileURLIntegration(): AstroIntegration;

View File

@@ -0,0 +1,81 @@
import fs from "node:fs";
import path from "node:path";
import { pathToFileURL } from "node:url";
async function copyFile(toDir, fromUrl, toUrl) {
await fs.promises.mkdir(toDir, { recursive: true });
await fs.promises.rename(fromUrl, toUrl);
}
function fileURLIntegration() {
const fileNames = [];
function createVitePlugin(command) {
let referenceIds = [];
return {
name: "@astrojs/db/file-url",
enforce: "pre",
async load(id) {
if (id.endsWith("?fileurl")) {
const filePath = id.slice(0, id.indexOf("?"));
if (command === "build") {
const data = await fs.promises.readFile(filePath);
const name = path.basename(filePath);
const referenceId = this.emitFile({
name,
source: data,
type: "asset"
});
referenceIds.push(referenceId);
return `export default import.meta.ROLLUP_FILE_URL_${referenceId};`;
} else {
return `export default new URL(${JSON.stringify(pathToFileURL(filePath).toString())})`;
}
}
},
generateBundle() {
for (const referenceId of referenceIds) {
fileNames.push(this.getFileName(referenceId));
}
referenceIds = [];
}
};
}
let config;
return {
name: "@astrojs/db/file-url",
hooks: {
"astro:config:setup"({ updateConfig, command }) {
updateConfig({
vite: {
plugins: [createVitePlugin(command)]
}
});
},
"astro:config:done": ({ config: _config }) => {
config = _config;
},
async "astro:build:done"() {
if (config.output === "static") {
const unlinks = [];
for (const fileName of fileNames) {
const url = new URL(fileName, config.outDir);
unlinks.push(fs.promises.unlink(url));
}
await Promise.all(unlinks);
const assetDir = new URL(config.build.assets, config.outDir);
await fs.promises.rmdir(assetDir).catch(() => []);
} else {
const moves = [];
for (const fileName of fileNames) {
const fromUrl = new URL(fileName, config.build.client);
const toUrl = new URL(fileName, config.build.server);
const toDir = new URL("./", toUrl);
moves.push(copyFile(toDir, fromUrl, toUrl));
}
await Promise.all(moves);
}
}
}
};
}
export {
fileURLIntegration
};

View File

@@ -0,0 +1,19 @@
import type { AstroIntegration } from 'astro';
import { z } from 'zod';
declare const astroDBConfigSchema: z.ZodDefault<z.ZodOptional<z.ZodObject<{
/**
* Sets the mode of the underlying `@libsql/client` connection.
*
* In most cases, the default 'node' mode is sufficient. On platforms like Cloudflare, or Deno, you may need to set this to 'web'.
*
* @default 'node'
*/
mode: z.ZodDefault<z.ZodOptional<z.ZodUnion<[z.ZodLiteral<"node">, z.ZodLiteral<"web">]>>>;
}, "strip", z.ZodTypeAny, {
mode: "node" | "web";
}, {
mode?: "node" | "web" | undefined;
}>>>;
export type AstroDBConfig = z.infer<typeof astroDBConfigSchema>;
export declare function integration(options?: AstroDBConfig): AstroIntegration[];
export {};

View File

@@ -0,0 +1,215 @@
import { existsSync } from "node:fs";
import { mkdir, writeFile } from "node:fs/promises";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import colors from "piccolore";
import {
createServer,
loadEnv,
mergeConfig
} from "vite";
import parseArgs from "yargs-parser";
import { z } from "zod";
import { AstroDbError, isDbError } from "../../runtime/utils.js";
import { CONFIG_FILE_NAMES, DB_PATH, VIRTUAL_MODULE_ID } from "../consts.js";
import { EXEC_DEFAULT_EXPORT_ERROR, EXEC_ERROR } from "../errors.js";
import { resolveDbConfig } from "../load-file.js";
import { SEED_DEV_FILE_NAME } from "../queries.js";
import { getDbDirectoryUrl, getRemoteDatabaseInfo } from "../utils.js";
import { fileURLIntegration } from "./file-url.js";
import { getDtsContent } from "./typegen.js";
import {
vitePluginDb
} from "./vite-plugin-db.js";
import { vitePluginDbClient } from "./vite-plugin-db-client.js";
const astroDBConfigSchema = z.object({
/**
* Sets the mode of the underlying `@libsql/client` connection.
*
* In most cases, the default 'node' mode is sufficient. On platforms like Cloudflare, or Deno, you may need to set this to 'web'.
*
* @default 'node'
*/
mode: z.union([z.literal("node"), z.literal("web")]).optional().default("node")
}).optional().default({});
function astroDBIntegration(options) {
const resolvedConfig = astroDBConfigSchema.parse(options);
let connectToRemote = false;
let configFileDependencies = [];
let root;
let tempViteServer;
let tables = {
get() {
throw new Error("[astro:db] INTERNAL Tables not loaded yet");
}
};
let seedFiles = {
get() {
throw new Error("[astro:db] INTERNAL Seed files not loaded yet");
}
};
let seedHandler = {
execute: () => {
throw new Error("[astro:db] INTERNAL Seed handler not loaded yet");
},
inProgress: false
};
let command;
let finalBuildOutput;
return {
name: "astro:db",
hooks: {
"astro:config:setup": async ({ updateConfig, config, command: _command, logger }) => {
command = _command;
root = config.root;
if (command === "preview") return;
let dbPlugin = void 0;
const args = parseArgs(process.argv.slice(3));
connectToRemote = process.env.ASTRO_INTERNAL_TEST_REMOTE || args["remote"];
const dbClientPlugin = vitePluginDbClient({
connectToRemote,
mode: resolvedConfig.mode
});
if (connectToRemote) {
dbPlugin = vitePluginDb({
connectToRemote,
appToken: getRemoteDatabaseInfo().token,
tables,
root: config.root,
srcDir: config.srcDir,
output: config.output,
seedHandler
});
} else {
dbPlugin = vitePluginDb({
connectToRemote,
tables,
seedFiles,
root: config.root,
srcDir: config.srcDir,
output: config.output,
logger,
seedHandler
});
}
updateConfig({
vite: {
assetsInclude: [DB_PATH],
plugins: [dbClientPlugin, dbPlugin]
}
});
},
"astro:config:done": async ({ config, injectTypes, buildOutput }) => {
if (command === "preview") return;
finalBuildOutput = buildOutput;
const { dbConfig, dependencies, integrationSeedPaths } = await resolveDbConfig(config);
tables.get = () => dbConfig.tables;
seedFiles.get = () => integrationSeedPaths;
configFileDependencies = dependencies;
const localDbUrl = new URL(DB_PATH, config.root);
if (!connectToRemote && !existsSync(localDbUrl)) {
await mkdir(dirname(fileURLToPath(localDbUrl)), { recursive: true });
await writeFile(localDbUrl, "");
}
injectTypes({
filename: "db.d.ts",
content: getDtsContent(tables.get() ?? {})
});
},
"astro:server:setup": async ({ server, logger }) => {
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, viteServer: server });
};
const filesToWatch = [
...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
...configFileDependencies.map((c) => new URL(c, root))
];
server.watcher.on("all", (_event, relativeEntry) => {
const entry = new URL(relativeEntry, root);
if (filesToWatch.some((f) => entry.href === f.href)) {
server.restart();
}
});
setTimeout(() => {
logger.info(
connectToRemote ? "Connected to remote database." : "New local database created."
);
if (connectToRemote) return;
const localSeedPaths = SEED_DEV_FILE_NAME.map(
(name) => new URL(name, getDbDirectoryUrl(root))
);
if (seedFiles.get().length || localSeedPaths.find((path) => existsSync(path))) {
server.ssrLoadModule(VIRTUAL_MODULE_ID).catch((e) => {
logger.error(e instanceof Error ? e.message : String(e));
});
}
}, 100);
},
"astro:build:start": async ({ logger }) => {
if (!connectToRemote && !databaseFileEnvDefined() && finalBuildOutput === "server") {
const message = `Attempting to build without the --remote flag or the ASTRO_DATABASE_FILE environment variable defined. You probably want to pass --remote to astro build.`;
const hint = "Learn more connecting to libSQL: https://docs.astro.build/en/guides/astro-db/#connect-a-libsql-database-for-production";
throw new AstroDbError(message, hint);
}
logger.info(
"database: " + (connectToRemote ? colors.yellow("remote") : colors.blue("local database."))
);
},
"astro:build:setup": async ({ vite }) => {
tempViteServer = await getTempViteServer({ viteConfig: vite });
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, viteServer: tempViteServer });
};
},
"astro:build:done": async ({}) => {
await tempViteServer?.close();
}
}
};
}
function databaseFileEnvDefined() {
const env = loadEnv("", process.cwd());
return env.ASTRO_DATABASE_FILE != null || process.env.ASTRO_DATABASE_FILE != null;
}
function integration(options) {
return [astroDBIntegration(options), fileURLIntegration()];
}
async function executeSeedFile({
fileUrl,
viteServer
}) {
const pathname = decodeURIComponent(fileUrl.pathname);
const mod = await viteServer.ssrLoadModule(pathname);
if (typeof mod.default !== "function") {
throw new AstroDbError(EXEC_DEFAULT_EXPORT_ERROR(fileURLToPath(fileUrl)));
}
try {
await mod.default();
} catch (e) {
if (isDbError(e)) {
throw new AstroDbError(EXEC_ERROR(e.message));
}
throw e;
}
}
async function getTempViteServer({ viteConfig }) {
const tempViteServer = await createServer(
mergeConfig(viteConfig, {
server: { middlewareMode: true, hmr: false, watch: null, ws: false },
optimizeDeps: { noDiscovery: true },
ssr: { external: [] },
logLevel: "silent"
})
);
const hotSend = tempViteServer.hot.send;
tempViteServer.hot.send = (payload) => {
if (payload.type === "error") {
throw payload.err;
}
return hotSend(payload);
};
return tempViteServer;
}
export {
integration
};

View File

@@ -0,0 +1,2 @@
import type { DBTables } from '../types.js';
export declare function getDtsContent(tables: DBTables): string;

View File

@@ -0,0 +1,21 @@
import { RUNTIME_IMPORT } from "../consts.js";
function getDtsContent(tables) {
const content = `// This file is generated by Astro DB
declare module 'astro:db' {
${Object.entries(tables).map(([name, table]) => generateTableType(name, table)).join("\n")}
}
`;
return content;
}
function generateTableType(name, table) {
const sanitizedColumnsList = Object.entries(table.columns).filter(([, val]) => !val.schema.deprecated);
const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
${JSON.stringify(name)},
${JSON.stringify(sanitizedColumns)}
>;`;
return tableType;
}
export {
getDtsContent
};

View File

@@ -0,0 +1,7 @@
import type { VitePlugin } from '../utils.js';
type VitePluginDBClientParams = {
connectToRemote: boolean;
mode: 'node' | 'web';
};
export declare function vitePluginDbClient(params: VitePluginDBClientParams): VitePlugin;
export {};

View File

@@ -0,0 +1,42 @@
import { DB_CLIENTS, VIRTUAL_CLIENT_MODULE_ID } from "../consts.js";
function getRemoteClientModule(mode) {
switch (mode) {
case "web":
return `export { createClient } from '${DB_CLIENTS.web}';`;
case "node":
default:
return `export { createClient } from '${DB_CLIENTS.node}';`;
}
}
function getLocalClientModule(mode) {
switch (mode) {
case "node":
case "web":
default:
return `export { createClient } from '${DB_CLIENTS.local}';`;
}
}
const resolved = "\0" + VIRTUAL_CLIENT_MODULE_ID;
function vitePluginDbClient(params) {
return {
name: "virtual:astro:db-client",
enforce: "pre",
async resolveId(id) {
if (id !== VIRTUAL_CLIENT_MODULE_ID) return;
return resolved;
},
async load(id) {
if (id !== resolved) return;
switch (params.connectToRemote) {
case true:
return getRemoteClientModule(params.mode);
case false:
default:
return getLocalClientModule(params.mode);
}
}
};
}
export {
vitePluginDbClient
};

View File

@@ -0,0 +1,60 @@
import type { AstroConfig, AstroIntegrationLogger } from 'astro';
import type { DBTables } from '../types.js';
import { type VitePlugin } from '../utils.js';
export type LateTables = {
get: () => DBTables;
};
export type LateSeedFiles = {
get: () => Array<string | URL>;
};
export type SeedHandler = {
inProgress: boolean;
execute: (fileUrl: URL) => Promise<void>;
};
type VitePluginDBParams = {
connectToRemote: false;
tables: LateTables;
seedFiles: LateSeedFiles;
srcDir: URL;
root: URL;
logger?: AstroIntegrationLogger;
output: AstroConfig['output'];
seedHandler: SeedHandler;
} | {
connectToRemote: true;
tables: LateTables;
appToken: string;
srcDir: URL;
root: URL;
output: AstroConfig['output'];
seedHandler: SeedHandler;
};
export declare function vitePluginDb(params: VitePluginDBParams): VitePlugin;
export declare function getConfigVirtualModContents(): string;
export declare function getLocalVirtualModContents({ tables, root, localExecution, }: {
tables: DBTables;
root: URL;
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export declare function getRemoteVirtualModContents({ tables, appToken, isBuild, output, localExecution, }: {
tables: DBTables;
appToken: string;
isBuild: boolean;
output: AstroConfig['output'];
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export {};

View File

@@ -0,0 +1,183 @@
import { existsSync } from "node:fs";
import { fileURLToPath } from "node:url";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { normalizeDatabaseUrl } from "../../runtime/index.js";
import {
DB_CLIENTS,
DB_PATH,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
} from "../consts.js";
import { createClient } from "../db-client/libsql-local.js";
import { getResolvedFileUrl } from "../load-file.js";
import { getCreateIndexQueries, getCreateTableQuery, SEED_DEV_FILE_NAME } from "../queries.js";
import {
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo
} from "../utils.js";
const resolved = {
module: "\0" + VIRTUAL_MODULE_ID,
importedFromSeedFile: "\0" + VIRTUAL_MODULE_ID + ":seed"
};
function vitePluginDb(params) {
let command = "build";
return {
name: "astro:db",
enforce: "pre",
configResolved(resolvedConfig) {
command = resolvedConfig.command;
},
async resolveId(id) {
if (id !== VIRTUAL_MODULE_ID) return;
if (params.seedHandler.inProgress) {
return resolved.importedFromSeedFile;
}
return resolved.module;
},
async load(id) {
if (id !== resolved.module && id !== resolved.importedFromSeedFile) return;
if (params.connectToRemote) {
return getRemoteVirtualModContents({
appToken: params.appToken,
tables: params.tables.get(),
isBuild: command === "build",
output: params.output,
localExecution: false
});
}
if (id === resolved.importedFromSeedFile) {
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
await recreateTables(params);
const seedFiles = getResolvedSeedFiles(params);
for await (const seedFile of seedFiles) {
this.addWatchFile(fileURLToPath(seedFile));
if (existsSync(seedFile)) {
params.seedHandler.inProgress = true;
await params.seedHandler.execute(seedFile);
}
}
if (params.seedHandler.inProgress) {
(params.logger ?? console).info("Seeded database.");
params.seedHandler.inProgress = false;
}
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
};
}
function getConfigVirtualModContents() {
return `export * from ${RUNTIME_VIRTUAL_IMPORT}`;
}
function getDBModule(localExecution) {
return localExecution ? `import { createClient } from '${DB_CLIENTS.node}';` : `import { createClient } from '${VIRTUAL_CLIENT_MODULE_ID}';`;
}
function getLocalVirtualModContents({
tables,
root,
localExecution
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = new URL(DB_PATH, root);
const clientImport = getDBModule(localExecution);
return `
import { asDrizzleTable, normalizeDatabaseUrl } from ${RUNTIME_IMPORT};
${clientImport}
const dbUrl = normalizeDatabaseUrl(${JSON.stringify(ASTRO_DATABASE_FILE)}, ${JSON.stringify(dbUrl)});
export const db = createClient({ url: dbUrl });
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}`;
}
function getRemoteVirtualModContents({
tables,
appToken,
isBuild,
output,
localExecution
}) {
const dbInfo = getRemoteDatabaseInfo();
function appTokenArg() {
if (isBuild) {
if (output === "server") {
return `process.env.ASTRO_DB_APP_TOKEN`;
} else {
return `process.env.ASTRO_DB_APP_TOKEN ?? ${JSON.stringify(appToken)}`;
}
} else {
return JSON.stringify(appToken);
}
}
function dbUrlArg() {
const dbStr = JSON.stringify(dbInfo.url);
if (isBuild) {
return `import.meta.env.ASTRO_DB_REMOTE_URL ?? ${dbStr}`;
} else {
return dbStr;
}
}
const clientImport = getDBModule(localExecution);
return `
import {asDrizzleTable} from ${RUNTIME_IMPORT};
${clientImport}
export const db = await createClient({
url: ${dbUrlArg()},
token: ${appTokenArg()},
});
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}
`;
}
function getStringifiedTableExports(tables) {
return Object.entries(tables).map(
([name, table]) => `export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
table
)}, false)`
).join("\n");
}
const sqlite = new SQLiteAsyncDialect();
async function recreateTables({ tables, root }) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(ASTRO_DATABASE_FILE, new URL(DB_PATH, root).href);
const db = createClient({ url: dbUrl });
const setupQueries = [];
for (const [name, table] of Object.entries(tables.get() ?? {})) {
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
const createQuery = sql.raw(getCreateTableQuery(name, table));
const indexQueries = getCreateIndexQueries(name, table);
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
}
await db.batch([
db.run(sql`pragma defer_foreign_keys=true;`),
...setupQueries.map((q) => db.run(q))
]);
}
function getResolvedSeedFiles({ root, seedFiles }) {
const localSeedFiles = SEED_DEV_FILE_NAME.map((name) => new URL(name, getDbDirectoryUrl(root)));
const integrationSeedFiles = seedFiles.get().map((s) => getResolvedFileUrl(root, s));
return [...integrationSeedFiles, ...localSeedFiles];
}
export {
getConfigVirtualModContents,
getLocalVirtualModContents,
getRemoteVirtualModContents,
vitePluginDb
};

View File

@@ -0,0 +1,126 @@
import type { AstroConfig } from 'astro';
import './types.js';
/**
* Load a users `astro:db` configuration file and additional configuration files provided by integrations.
*/
export declare function resolveDbConfig({ root, integrations, }: Pick<AstroConfig, 'root' | 'integrations'>): Promise<{
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig: {
tables: Record<string, {
indexes: Record<string, {
on: string | string[];
unique?: boolean | undefined;
}>;
deprecated: boolean;
columns: Record<string, {
type: "boolean";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
default?: boolean | import("../runtime/types.js").SerializedSQL | undefined;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
};
} | {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
optional: boolean;
primaryKey: false;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
default?: undefined;
optional?: false | undefined;
})) & {
references?: import("./types.js").NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
optional: boolean;
primaryKey: false;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: import("./types.js").TextColumn;
};
} | {
type: "date";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
};
} | {
type: "json";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
default?: unknown;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
};
}>;
foreignKeys?: (Omit<{
columns: import("./schemas.js").MaybeArray<string>;
references: () => import("./schemas.js").MaybeArray<Omit<import("zod").input<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
}, "references"> & {
references: import("./schemas.js").MaybeArray<Omit<import("zod").output<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
})[] | undefined;
}>;
};
/** Dependencies imported into the user config file. */
dependencies: string[];
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths: (string | URL)[];
}>;
export declare function getResolvedFileUrl(root: URL, filePathOrUrl: string | URL): URL;
/**
* Bundle arbitrary `mjs` or `ts` file.
* Simplified fork from Vite's `bundleConfigFile` function.
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
*/
export declare function bundleFile({ fileUrl, root, virtualModContents, }: {
fileUrl: URL;
root: URL;
virtualModContents: string;
}): Promise<{
code: string;
dependencies: string[];
}>;
/**
* Forked from Vite config loader, replacing CJS-based path concat with ESM only
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
*/
export declare function importBundledFile({ code, root, }: {
code: string;
root: URL;
}): Promise<{
default?: unknown;
}>;

View File

@@ -0,0 +1,170 @@
import { existsSync } from "node:fs";
import { unlink, writeFile } from "node:fs/promises";
import { createRequire } from "node:module";
import { fileURLToPath, pathToFileURL } from "node:url";
import { build as esbuild } from "esbuild";
import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from "./consts.js";
import { INTEGRATION_TABLE_CONFLICT_ERROR } from "./errors.js";
import { errorMap } from "./integration/error-map.js";
import { getConfigVirtualModContents } from "./integration/vite-plugin-db.js";
import { dbConfigSchema } from "./schemas.js";
import "./types.js";
import { getAstroEnv, getDbDirectoryUrl } from "./utils.js";
async function resolveDbConfig({
root,
integrations
}) {
const { mod, dependencies } = await loadUserConfigFile(root);
const userDbConfig = dbConfigSchema.parse(mod?.default ?? {}, { errorMap });
const dbConfig = { tables: userDbConfig.tables ?? {} };
const integrationDbConfigPaths = [];
const integrationSeedPaths = [];
for (const integration of integrations) {
const { name, hooks } = integration;
if (hooks["astro:db:setup"]) {
hooks["astro:db:setup"]({
extendDb({ configEntrypoint, seedEntrypoint }) {
if (configEntrypoint) {
integrationDbConfigPaths.push({ name, configEntrypoint });
}
if (seedEntrypoint) {
integrationSeedPaths.push(seedEntrypoint);
}
}
});
}
}
for (const { name, configEntrypoint } of integrationDbConfigPaths) {
const loadedConfig = await loadIntegrationConfigFile(root, configEntrypoint);
const integrationDbConfig = dbConfigSchema.parse(loadedConfig.mod?.default ?? {}, {
errorMap
});
for (const key in integrationDbConfig.tables) {
if (key in dbConfig.tables) {
const isUserConflict = key in (userDbConfig.tables ?? {});
throw new Error(INTEGRATION_TABLE_CONFLICT_ERROR(name, key, isUserConflict));
} else {
dbConfig.tables[key] = integrationDbConfig.tables[key];
}
}
}
return {
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig,
/** Dependencies imported into the user config file. */
dependencies,
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths
};
}
async function loadUserConfigFile(root) {
let configFileUrl;
for (const fileName of CONFIG_FILE_NAMES) {
const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
if (existsSync(fileUrl)) {
configFileUrl = fileUrl;
}
}
return await loadAndBundleDbConfigFile({ root, fileUrl: configFileUrl });
}
function getResolvedFileUrl(root, filePathOrUrl) {
if (typeof filePathOrUrl === "string") {
const { resolve } = createRequire(root);
const resolvedFilePath = resolve(filePathOrUrl);
return pathToFileURL(resolvedFilePath);
}
return filePathOrUrl;
}
async function loadIntegrationConfigFile(root, filePathOrUrl) {
const fileUrl = getResolvedFileUrl(root, filePathOrUrl);
return await loadAndBundleDbConfigFile({ root, fileUrl });
}
async function loadAndBundleDbConfigFile({
root,
fileUrl
}) {
if (!fileUrl) {
return { mod: void 0, dependencies: [] };
}
const { code, dependencies } = await bundleFile({
virtualModContents: getConfigVirtualModContents(),
root,
fileUrl
});
return {
mod: await importBundledFile({ code, root }),
dependencies
};
}
async function bundleFile({
fileUrl,
root,
virtualModContents
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const result = await esbuild({
absWorkingDir: process.cwd(),
entryPoints: [fileURLToPath(fileUrl)],
outfile: "out.js",
packages: "external",
write: false,
target: ["node16"],
platform: "node",
bundle: true,
format: "esm",
sourcemap: "inline",
metafile: true,
define: {
"import.meta.env.ASTRO_DATABASE_FILE": JSON.stringify(ASTRO_DATABASE_FILE ?? "")
},
plugins: [
{
name: "resolve-astro-db",
setup(build) {
build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
return { path, namespace: VIRTUAL_MODULE_ID };
});
build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
return {
contents: virtualModContents,
// Needed to resolve runtime dependencies
resolveDir: fileURLToPath(root)
};
});
}
}
]
});
const file = result.outputFiles[0];
if (!file) {
throw new Error(`Unexpected: no output file`);
}
return {
code: file.text,
dependencies: Object.keys(result.metafile.inputs)
};
}
async function importBundledFile({
code,
root
}) {
const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
await writeFile(tmpFileUrl, code, { encoding: "utf8" });
try {
return await import(
/* @vite-ignore */
tmpFileUrl.toString()
);
} finally {
try {
await unlink(tmpFileUrl);
} catch {
}
}
}
export {
bundleFile,
getResolvedFileUrl,
importBundledFile,
resolveDbConfig
};

View File

@@ -0,0 +1,53 @@
import type { BooleanColumn, ColumnType, DateColumn, DBColumn, DBTable, JsonColumn, NumberColumn, TextColumn } from './types.js';
export declare const SEED_DEV_FILE_NAME: string[];
export declare function getDropTableIfExistsQuery(tableName: string): string;
export declare function getCreateTableQuery(tableName: string, table: DBTable): string;
export declare function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>): string[];
export declare function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer';
export declare function getModifiers(columnName: string, column: DBColumn): string;
export declare function getReferencesConfig(column: DBColumn): {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
optional: boolean;
primaryKey: false;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
default?: undefined;
optional?: false | undefined;
})) & {
references?: NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
optional: boolean;
primaryKey: false;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: TextColumn;
};
} | undefined;
type WithDefaultDefined<T extends DBColumn> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBColumnWithDefault = WithDefaultDefined<TextColumn> | WithDefaultDefined<DateColumn> | WithDefaultDefined<NumberColumn> | WithDefaultDefined<BooleanColumn> | WithDefaultDefined<JsonColumn>;
export declare function hasDefault(column: DBColumn): column is DBColumnWithDefault;
export {};

View File

@@ -0,0 +1,166 @@
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import colors from "piccolore";
import {
FOREIGN_KEY_DNE_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
REFERENCE_DNE_ERROR
} from "../runtime/errors.js";
import { isSerializedSQL } from "../runtime/types.js";
import { hasPrimaryKey } from "../runtime/utils.js";
const sqlite = new SQLiteAsyncDialect();
const SEED_DEV_FILE_NAME = ["seed.ts", "seed.js", "seed.mjs", "seed.mts"];
function getDropTableIfExistsQuery(tableName) {
return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
}
function getCreateTableQuery(tableName, table) {
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
const colQueries = [];
const colHasPrimaryKey = Object.entries(table.columns).find(
([, column]) => hasPrimaryKey(column)
);
if (!colHasPrimaryKey) {
colQueries.push("_id INTEGER PRIMARY KEY");
}
for (const [columnName, column] of Object.entries(table.columns)) {
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
column.type
)}${getModifiers(columnName, column)}`;
colQueries.push(colQuery);
}
colQueries.push(...getCreateForeignKeyQueries(tableName, table));
query += colQueries.join(", ") + ")";
return query;
}
function getCreateIndexQueries(tableName, table) {
let queries = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
const unique = indexProps.unique ? "UNIQUE " : "";
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
indexName
)} ON ${sqlite.escapeName(tableName)} (${onCols.join(", ")})`;
queries.push(indexQuery);
}
return queries;
}
function getCreateForeignKeyQueries(tableName, table) {
let queries = [];
for (const foreignKey of table.foreignKeys ?? []) {
const columns = asArray(foreignKey.columns);
const references = asArray(foreignKey.references);
if (columns.length !== references.length) {
throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
}
const firstReference = references[0];
if (!firstReference) {
throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
}
const referencedTable = firstReference.schema.collection;
if (!referencedTable) {
throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
}
const query = `FOREIGN KEY (${columns.map((f) => sqlite.escapeName(f)).join(", ")}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references.map((r) => sqlite.escapeName(r.schema.name)).join(", ")})`;
queries.push(query);
}
return queries;
}
function asArray(value) {
return Array.isArray(value) ? value : [value];
}
function schemaTypeToSqlType(type) {
switch (type) {
case "date":
case "text":
case "json":
return "text";
case "number":
case "boolean":
return "integer";
}
}
function getModifiers(columnName, column) {
let modifiers = "";
if (hasPrimaryKey(column)) {
return " PRIMARY KEY";
}
if (!column.schema.optional) {
modifiers += " NOT NULL";
}
if (column.schema.unique) {
modifiers += " UNIQUE";
}
if (hasDefault(column)) {
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
}
const references = getReferencesConfig(column);
if (references) {
const { collection: tableName, name } = references.schema;
if (!tableName || !name) {
throw new Error(REFERENCE_DNE_ERROR(columnName));
}
modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
function getReferencesConfig(column) {
const canHaveReferences = column.type === "number" || column.type === "text";
if (!canHaveReferences) return void 0;
return column.schema.references;
}
function hasDefault(column) {
if (column.schema.default !== void 0) {
return true;
}
if (hasPrimaryKey(column) && column.type === "number") {
return true;
}
return false;
}
function toDefault(def) {
const type = typeof def;
if (type === "string") {
return sqlite.escapeString(def);
} else if (type === "boolean") {
return def ? "TRUE" : "FALSE";
} else {
return def + "";
}
}
function getDefaultValueSql(columnName, column) {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case "boolean":
case "number":
case "text":
case "date":
return toDefault(column.schema.default);
case "json": {
let stringified = "";
try {
stringified = JSON.stringify(column.schema.default);
} catch {
console.log(
`Invalid default value for column ${colors.bold(
columnName
)}. Defaults must be valid JSON when using the \`json()\` type.`
);
process.exit(0);
}
return sqlite.escapeString(stringified);
}
}
}
export {
SEED_DEV_FILE_NAME,
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,188 @@
import { SQL } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { z } from "zod";
import { SERIALIZED_SQL_KEY } from "../runtime/types.js";
import { errorMap } from "./integration/error-map.js";
import { mapObject } from "./utils.js";
const sqlite = new SQLiteAsyncDialect();
const sqlSchema = z.instanceof(SQL).transform(
(sqlObj) => ({
[SERIALIZED_SQL_KEY]: true,
sql: sqlite.sqlToQuery(sqlObj).sql
})
);
const baseColumnSchema = z.object({
label: z.string().optional(),
optional: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
deprecated: z.boolean().optional().default(false),
// Defined when `defineDb()` is called to resolve `references`
name: z.string().optional(),
// TODO: Update to `table`. Will need migration file version change
collection: z.string().optional()
});
const booleanColumnSchema = z.object({
type: z.literal("boolean"),
schema: baseColumnSchema.extend({
default: z.union([z.boolean(), sqlSchema]).optional()
})
});
const numberColumnBaseSchema = baseColumnSchema.omit({ optional: true }).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional,
default: z.union([z.number(), sqlSchema]).optional()
}),
z.object({
// `integer primary key` uses ROWID as the default value.
// `optional` and `default` do not have an effect,
// so disable these config options for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional(),
default: z.literal(void 0).optional()
})
])
);
const numberColumnOptsSchema = numberColumnBaseSchema.and(
z.object({
references: z.function().returns(z.lazy(() => numberColumnSchema)).optional().transform((fn) => fn?.())
})
);
const numberColumnSchema = z.object({
type: z.literal("number"),
schema: numberColumnOptsSchema
});
const textColumnBaseSchema = baseColumnSchema.omit({ optional: true }).extend({
default: z.union([z.string(), sqlSchema]).optional(),
multiline: z.boolean().optional(),
enum: z.tuple([z.string()]).rest(z.string()).optional()
// At least one value required,
}).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional
}),
z.object({
// text primary key allows NULL values.
// NULL values bypass unique checks, which could
// lead to duplicate URLs per record.
// disable `optional` for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional()
})
])
);
const textColumnOptsSchema = textColumnBaseSchema.and(
z.object({
references: z.function().returns(z.lazy(() => textColumnSchema)).optional().transform((fn) => fn?.())
})
);
const textColumnSchema = z.object({
type: z.literal("text"),
schema: textColumnOptsSchema
});
const dateColumnSchema = z.object({
type: z.literal("date"),
schema: baseColumnSchema.extend({
default: z.union([
sqlSchema,
// transform to ISO string for serialization
z.date().transform((d) => d.toISOString())
]).optional()
})
});
const jsonColumnSchema = z.object({
type: z.literal("json"),
schema: baseColumnSchema.extend({
default: z.unknown().optional()
})
});
const columnSchema = z.discriminatedUnion("type", [
booleanColumnSchema,
numberColumnSchema,
textColumnSchema,
dateColumnSchema,
jsonColumnSchema
]);
const referenceableColumnSchema = z.union([textColumnSchema, numberColumnSchema]);
const columnsSchema = z.record(columnSchema);
const foreignKeysSchema = z.object({
columns: z.string().or(z.array(z.string())),
references: z.function().returns(z.lazy(() => referenceableColumnSchema.or(z.array(referenceableColumnSchema)))).transform((fn) => fn())
});
const resolvedIndexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional()
});
const legacyIndexesSchema = z.record(resolvedIndexSchema);
const indexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional(),
name: z.string().optional()
});
const indexesSchema = z.array(indexSchema);
const tableSchema = z.object({
columns: columnsSchema,
indexes: indexesSchema.or(legacyIndexesSchema).optional(),
foreignKeys: z.array(foreignKeysSchema).optional(),
deprecated: z.boolean().optional().default(false)
});
const tablesSchema = z.preprocess((rawTables) => {
const tables = z.record(z.any()).parse(rawTables, { errorMap });
for (const [tableName, table] of Object.entries(tables)) {
table.getName = () => tableName;
const { columns } = z.object({ columns: z.record(z.any()) }).parse(table, { errorMap });
for (const [columnName, column] of Object.entries(columns)) {
column.schema.name = columnName;
column.schema.collection = tableName;
}
}
return rawTables;
}, z.record(tableSchema));
const dbConfigSchema = z.object({
tables: tablesSchema.optional()
}).transform(({ tables = {}, ...config }) => {
return {
...config,
tables: mapObject(tables, (tableName, table) => {
const { indexes = {} } = table;
if (!Array.isArray(indexes)) {
return { ...table, indexes };
}
const resolvedIndexes = {};
for (const index of indexes) {
if (index.name) {
const { name: name2, ...rest } = index;
resolvedIndexes[index.name] = rest;
continue;
}
const indexOn = Array.isArray(index.on) ? index.on.sort().join("_") : index.on;
const name = tableName + "_" + indexOn + "_idx";
resolvedIndexes[name] = index;
}
return {
...table,
indexes: resolvedIndexes
};
})
};
});
export {
booleanColumnSchema,
columnSchema,
columnsSchema,
dateColumnSchema,
dbConfigSchema,
indexSchema,
jsonColumnSchema,
numberColumnOptsSchema,
numberColumnSchema,
referenceableColumnSchema,
resolvedIndexSchema,
tableSchema,
tablesSchema,
textColumnOptsSchema,
textColumnSchema
};

View File

@@ -0,0 +1,60 @@
import type { z } from 'zod';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

View File

View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

View File

@@ -0,0 +1,37 @@
import { loadEnv } from "vite";
import "./types.js";
function getAstroEnv(envMode = "") {
const env = loadEnv(envMode, process.cwd(), "ASTRO_");
return env;
}
function getRemoteDatabaseInfo() {
const astroEnv = getAstroEnv();
return {
url: astroEnv.ASTRO_DB_REMOTE_URL,
token: astroEnv.ASTRO_DB_APP_TOKEN
};
}
function resolveDbAppToken(flags, envToken) {
const dbAppToken = flags.dbAppToken;
if (typeof dbAppToken === "string") return dbAppToken;
return envToken;
}
function getDbDirectoryUrl(root) {
return new URL("db/", root);
}
function defineDbIntegration(integration) {
return integration;
}
function mapObject(item, callback) {
return Object.fromEntries(
Object.entries(item).map(([key, value]) => [key, callback(key, value)])
);
}
export {
defineDbIntegration,
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo,
mapObject,
resolveDbAppToken
};

View File

View File

@@ -0,0 +1,3 @@
export { cli } from './core/cli/index.js';
export { type AstroDBConfig, integration as default } from './core/integration/index.js';
export type { TableConfig } from './core/types.js';

View File

@@ -0,0 +1,6 @@
import { cli } from "./core/cli/index.js";
import { integration } from "./core/integration/index.js";
export {
cli,
integration as default
};

View File

@@ -0,0 +1,4 @@
export declare const FOREIGN_KEY_DNE_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_LENGTH_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_EMPTY_ERROR: (tableName: string) => string;
export declare const REFERENCE_DNE_ERROR: (columnName: string) => string;

View File

@@ -0,0 +1,27 @@
import colors from "piccolore";
const FOREIGN_KEY_DNE_ERROR = (tableName) => {
return `Table ${colors.bold(
tableName
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
const FOREIGN_KEY_REFERENCES_LENGTH_ERROR = (tableName) => {
return `Foreign key on ${colors.bold(
tableName
)} is misconfigured. \`columns\` and \`references\` must be the same length.`;
};
const FOREIGN_KEY_REFERENCES_EMPTY_ERROR = (tableName) => {
return `Foreign key on ${colors.bold(
tableName
)} is misconfigured. \`references\` array cannot be empty.`;
};
const REFERENCE_DNE_ERROR = (columnName) => {
return `Column ${colors.bold(
columnName
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
export {
FOREIGN_KEY_DNE_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
REFERENCE_DNE_ERROR
};

View File

@@ -0,0 +1,31 @@
import { type ColumnDataType } from 'drizzle-orm';
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
import type { DBTable } from '../core/types.js';
export type Database = LibSQLDatabase;
export type { Table } from './types.js';
export { hasPrimaryKey } from './utils.js';
export declare function asDrizzleTable(name: string, table: DBTable): import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{
name: string;
schema: undefined;
columns: {
[x: string]: import("drizzle-orm/sqlite-core").SQLiteColumn<{
name: string;
tableName: string;
dataType: ColumnDataType;
columnType: string;
data: unknown;
driverParam: unknown;
notNull: false;
hasDefault: false;
isPrimaryKey: false;
isAutoincrement: false;
hasRuntimeDefault: false;
enumValues: string[] | undefined;
baseColumn: never;
identity: undefined;
generated: undefined;
}, {}, {}>;
};
dialect: "sqlite";
}>;
export declare function normalizeDatabaseUrl(envDbUrl: string | undefined, defaultDbUrl: string): string;

View File

@@ -0,0 +1,121 @@
import { sql } from "drizzle-orm";
import {
customType,
index,
integer,
sqliteTable,
text
} from "drizzle-orm/sqlite-core";
import { isSerializedSQL } from "./types.js";
import { hasPrimaryKey, pathToFileURL } from "./utils.js";
import { hasPrimaryKey as hasPrimaryKey2 } from "./utils.js";
const isISODateString = (str) => /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/.test(str);
const dateType = customType({
dataType() {
return "text";
},
toDriver(value) {
return value.toISOString();
},
fromDriver(value) {
if (!isISODateString(value)) {
value += "Z";
}
return new Date(value);
}
});
const jsonType = customType({
dataType() {
return "text";
},
toDriver(value) {
return JSON.stringify(value);
},
fromDriver(value) {
return JSON.parse(value);
}
});
function asDrizzleTable(name, table) {
const columns = {};
if (!Object.entries(table.columns).some(([, column]) => hasPrimaryKey(column))) {
columns["_id"] = integer("_id").primaryKey();
}
for (const [columnName, column] of Object.entries(table.columns)) {
columns[columnName] = columnMapper(columnName, column);
}
const drizzleTable = sqliteTable(name, columns, (ormTable) => {
const indexes = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
const onCols = onColNames.map((colName) => ormTable[colName]);
if (!atLeastOne(onCols)) continue;
indexes.push(index(indexName).on(...onCols));
}
return indexes;
});
return drizzleTable;
}
function atLeastOne(arr) {
return arr.length > 0;
}
function columnMapper(columnName, column) {
let c;
switch (column.type) {
case "text": {
c = text(columnName, { enum: column.schema.enum });
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
if (column.schema.primaryKey === true) c = c.primaryKey();
break;
}
case "number": {
c = integer(columnName);
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
if (column.schema.primaryKey === true) c = c.primaryKey();
break;
}
case "boolean": {
c = integer(columnName, { mode: "boolean" });
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
break;
}
case "json":
c = jsonType(columnName);
if (column.schema.default !== void 0) c = c.default(column.schema.default);
break;
case "date": {
c = dateType(columnName);
if (column.schema.default !== void 0) {
const def = handleSerializedSQL(column.schema.default);
c = c.default(typeof def === "string" ? new Date(def) : def);
}
break;
}
}
if (!column.schema.optional) c = c.notNull();
if (column.schema.unique) c = c.unique();
return c;
}
function handleSerializedSQL(def) {
if (isSerializedSQL(def)) {
return sql.raw(def.sql);
}
return def;
}
function normalizeDatabaseUrl(envDbUrl, defaultDbUrl) {
if (envDbUrl) {
if (envDbUrl.startsWith("file://")) {
return envDbUrl;
}
return new URL(envDbUrl, pathToFileURL(process.cwd()) + "/").toString();
} else {
return defaultDbUrl;
}
}
export {
asDrizzleTable,
hasPrimaryKey2 as hasPrimaryKey,
normalizeDatabaseUrl
};

View File

@@ -0,0 +1,92 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
data: E extends readonly (infer U)[] ? U : string;
dataType: 'string';
columnType: 'SQLiteText';
driverParam: string;
enumValues: E extends [string, ...string[]] ? E : never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: Date;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
data: boolean;
dataType: 'boolean';
columnType: 'SQLiteBoolean';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
data: number;
dataType: 'number';
columnType: 'SQLiteInteger';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: unknown;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;
dialect: 'sqlite';
columns: {
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
enum: infer E;
} ? E extends readonly [string, ...string[]] ? E : string : string, {
tableName: TTableName;
name: K;
isPrimaryKey: TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasRuntimeDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : false;
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};

View File

@@ -0,0 +1,8 @@
const SERIALIZED_SQL_KEY = "__serializedSQL";
function isSerializedSQL(value) {
return typeof value === "object" && value !== null && SERIALIZED_SQL_KEY in value;
}
export {
SERIALIZED_SQL_KEY,
isSerializedSQL
};

View File

@@ -0,0 +1,9 @@
import { LibsqlError } from '@libsql/client';
import { AstroError } from 'astro/errors';
import type { DBColumn } from '../core/types.js';
export declare function hasPrimaryKey(column: DBColumn): boolean;
export declare class AstroDbError extends AstroError {
name: string;
}
export declare function isDbError(err: unknown): err is LibsqlError;
export declare function pathToFileURL(path: string): URL;

View File

@@ -0,0 +1,35 @@
import { LibsqlError } from "@libsql/client";
import { AstroError } from "astro/errors";
function hasPrimaryKey(column) {
return "primaryKey" in column.schema && !!column.schema.primaryKey;
}
const isWindows = process?.platform === "win32";
class AstroDbError extends AstroError {
name = "Astro DB Error";
}
function isDbError(err) {
return err instanceof LibsqlError || err instanceof Error && err.libsqlError === true;
}
function slash(path) {
const isExtendedLengthPath = path.startsWith("\\\\?\\");
if (isExtendedLengthPath) {
return path;
}
return path.replace(/\\/g, "/");
}
function pathToFileURL(path) {
if (isWindows) {
let slashed = slash(path);
if (!slashed.startsWith("/")) {
slashed = "/" + slashed;
}
return new URL("file://" + slashed);
}
return new URL("file://" + path);
}
export {
AstroDbError,
hasPrimaryKey,
isDbError,
pathToFileURL
};

View File

@@ -0,0 +1,112 @@
import { sql as _sql } from "drizzle-orm";
function createColumn(type, schema) {
return {
type,
/**
* @internal
*/
schema
};
}
const column = {
number: (opts = {}) => {
return createColumn("number", opts);
},
boolean: (opts = {}) => {
return createColumn("boolean", opts);
},
text: (opts = {}) => {
return createColumn("text", opts);
},
date(opts = {}) {
return createColumn("date", opts);
},
json(opts = {}) {
return createColumn("json", opts);
}
};
function defineTable(userConfig) {
return userConfig;
}
function defineDb(userConfig) {
return userConfig;
}
const NOW = _sql`CURRENT_TIMESTAMP`;
const TRUE = _sql`TRUE`;
const FALSE = _sql`FALSE`;
import {
and,
asc,
avg,
avgDistinct,
between,
count,
countDistinct,
desc,
eq,
exists,
gt,
gte,
ilike,
inArray,
isNotNull,
isNull,
like,
lt,
lte,
max,
min,
ne,
not,
notBetween,
notExists,
notIlike,
notInArray,
or,
sql,
sum,
sumDistinct
} from "drizzle-orm";
import { alias } from "drizzle-orm/sqlite-core";
import { isDbError } from "./utils.js";
export {
FALSE,
NOW,
TRUE,
alias,
and,
asc,
avg,
avgDistinct,
between,
column,
count,
countDistinct,
defineDb,
defineTable,
desc,
eq,
exists,
gt,
gte,
ilike,
inArray,
isDbError,
isNotNull,
isNull,
like,
lt,
lte,
max,
min,
ne,
not,
notBetween,
notExists,
notIlike,
notInArray,
or,
sql,
sum,
sumDistinct
};

View File

@@ -0,0 +1,4 @@
export { defineDbIntegration } from './core/utils.js';
import type { ColumnsConfig, TableConfig } from './core/types.js';
import { type Table } from './runtime/index.js';
export declare function asDrizzleTable<TableName extends string = string, TColumns extends ColumnsConfig = ColumnsConfig>(name: TableName, tableConfig: TableConfig<TColumns>): Table<TableName, TColumns>;

View File

@@ -0,0 +1,10 @@
import { defineDbIntegration } from "./core/utils.js";
import { tableSchema } from "./core/schemas.js";
import { asDrizzleTable as internal_asDrizzleTable } from "./runtime/index.js";
function asDrizzleTable(name, tableConfig) {
return internal_asDrizzleTable(name, tableSchema.parse(tableConfig));
}
export {
asDrizzleTable,
defineDbIntegration
};

View File

@@ -0,0 +1,3 @@
import './virtual.js';
export { default, cli } from './dist/index.js';

View File

@@ -0,0 +1 @@
../nanoid/bin/nanoid.js

View File

@@ -0,0 +1,44 @@
<div align="center">
<img src="./misc/readme/logo-github-sq-dark.svg#gh-dark-mode-only" />
<img src="./misc/readme/logo-github-sq-light.svg#gh-light-mode-only" />
</div>
<br/>
<div align="center">
<h3>Headless ORM for NodeJS, TypeScript and JavaScript 🚀</h3>
<a href="https://orm.drizzle.team">Website</a> •
<a href="https://orm.drizzle.team/docs/overview">Documentation</a> •
<a href="https://x.com/drizzleorm">Twitter</a> •
<a href="https://driz.link/discord">Discord</a>
</div>
<br/>
<br/>
### What's Drizzle?
Drizzle is a modern TypeScript ORM developers [wanna use in their next project](https://stateofdb.com/tools/drizzle).
It is [lightweight](https://bundlephobia.com/package/drizzle-orm) at only ~7.4kb minified+gzipped, and it's tree shakeable with exactly 0 dependencies.
**Drizzle supports every PostgreSQL, MySQL and SQLite database**, including serverless ones like [Turso](https://orm.drizzle.team/docs/get-started-sqlite#turso), [Neon](https://orm.drizzle.team/docs/get-started-postgresql#neon), [Xata](xata.io), [PlanetScale](https://orm.drizzle.team/docs/get-started-mysql#planetscale), [Cloudflare D1](https://orm.drizzle.team/docs/get-started-sqlite#cloudflare-d1), [FlyIO LiteFS](https://fly.io/docs/litefs/), [Vercel Postgres](https://orm.drizzle.team/docs/get-started-postgresql#vercel-postgres), [Supabase](https://orm.drizzle.team/docs/get-started-postgresql#supabase) and [AWS Data API](https://orm.drizzle.team/docs/get-started-postgresql#aws-data-api). No bells and whistles, no Rust binaries, no serverless adapters, everything just works out of the box.
**Drizzle is serverless-ready by design**. It works in every major JavaScript runtime like NodeJS, Bun, Deno, Cloudflare Workers, Supabase functions, any Edge runtime, and even in browsers.
With Drizzle you can be [**fast out of the box**](https://orm.drizzle.team/benchmarks) and save time and costs while never introducing any data proxies into your infrastructure.
While you can use Drizzle as a JavaScript library, it shines with TypeScript. It lets you [**declare SQL schemas**](https://orm.drizzle.team/docs/sql-schema-declaration) and build both [**relational**](https://orm.drizzle.team/docs/rqb) and [**SQL-like queries**](https://orm.drizzle.team/docs/select), while keeping the balance between type-safety and extensibility for toolmakers to build on top.
### Ecosystem
While Drizzle ORM remains a thin typed layer on top of SQL, we made a set of tools for people to have best possible developer experience.
Drizzle comes with a powerful [**Drizzle Kit**](https://orm.drizzle.team/kit-docs/overview) CLI companion for you to have hassle-free migrations. It can generate SQL migration files for you or apply schema changes directly to the database.
We also have [**Drizzle Studio**](https://orm.drizzle.team/drizzle-studio/overview) for you to effortlessly browse and manipulate data in your database of choice.
### Documentation
Check out the full documentation on [the website](https://orm.drizzle.team/docs/overview).
### Our sponsors ❤️
<p align="center">
<a href="https://drizzle.team" target="_blank">
<img src='https://api.drizzle.team/v2/sponsors/svg'/>
</a>
</p>

View File

@@ -0,0 +1,144 @@
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var alias_exports = {};
__export(alias_exports, {
ColumnAliasProxyHandler: () => ColumnAliasProxyHandler,
RelationTableAliasProxyHandler: () => RelationTableAliasProxyHandler,
TableAliasProxyHandler: () => TableAliasProxyHandler,
aliasedRelation: () => aliasedRelation,
aliasedTable: () => aliasedTable,
aliasedTableColumn: () => aliasedTableColumn,
mapColumnsInAliasedSQLToAlias: () => mapColumnsInAliasedSQLToAlias,
mapColumnsInSQLToAlias: () => mapColumnsInSQLToAlias
});
module.exports = __toCommonJS(alias_exports);
var import_column = require("./column.cjs");
var import_entity = require("./entity.cjs");
var import_sql = require("./sql/sql.cjs");
var import_table = require("./table.cjs");
var import_view_common = require("./view-common.cjs");
class ColumnAliasProxyHandler {
constructor(table) {
this.table = table;
}
static [import_entity.entityKind] = "ColumnAliasProxyHandler";
get(columnObj, prop) {
if (prop === "table") {
return this.table;
}
return columnObj[prop];
}
}
class TableAliasProxyHandler {
constructor(alias, replaceOriginalName) {
this.alias = alias;
this.replaceOriginalName = replaceOriginalName;
}
static [import_entity.entityKind] = "TableAliasProxyHandler";
get(target, prop) {
if (prop === import_table.Table.Symbol.IsAlias) {
return true;
}
if (prop === import_table.Table.Symbol.Name) {
return this.alias;
}
if (this.replaceOriginalName && prop === import_table.Table.Symbol.OriginalName) {
return this.alias;
}
if (prop === import_view_common.ViewBaseConfig) {
return {
...target[import_view_common.ViewBaseConfig],
name: this.alias,
isAlias: true
};
}
if (prop === import_table.Table.Symbol.Columns) {
const columns = target[import_table.Table.Symbol.Columns];
if (!columns) {
return columns;
}
const proxiedColumns = {};
Object.keys(columns).map((key) => {
proxiedColumns[key] = new Proxy(
columns[key],
new ColumnAliasProxyHandler(new Proxy(target, this))
);
});
return proxiedColumns;
}
const value = target[prop];
if ((0, import_entity.is)(value, import_column.Column)) {
return new Proxy(value, new ColumnAliasProxyHandler(new Proxy(target, this)));
}
return value;
}
}
class RelationTableAliasProxyHandler {
constructor(alias) {
this.alias = alias;
}
static [import_entity.entityKind] = "RelationTableAliasProxyHandler";
get(target, prop) {
if (prop === "sourceTable") {
return aliasedTable(target.sourceTable, this.alias);
}
return target[prop];
}
}
function aliasedTable(table, tableAlias) {
return new Proxy(table, new TableAliasProxyHandler(tableAlias, false));
}
function aliasedRelation(relation, tableAlias) {
return new Proxy(relation, new RelationTableAliasProxyHandler(tableAlias));
}
function aliasedTableColumn(column, tableAlias) {
return new Proxy(
column,
new ColumnAliasProxyHandler(new Proxy(column.table, new TableAliasProxyHandler(tableAlias, false)))
);
}
function mapColumnsInAliasedSQLToAlias(query, alias) {
return new import_sql.SQL.Aliased(mapColumnsInSQLToAlias(query.sql, alias), query.fieldAlias);
}
function mapColumnsInSQLToAlias(query, alias) {
return import_sql.sql.join(query.queryChunks.map((c) => {
if ((0, import_entity.is)(c, import_column.Column)) {
return aliasedTableColumn(c, alias);
}
if ((0, import_entity.is)(c, import_sql.SQL)) {
return mapColumnsInSQLToAlias(c, alias);
}
if ((0, import_entity.is)(c, import_sql.SQL.Aliased)) {
return mapColumnsInAliasedSQLToAlias(c, alias);
}
return c;
}));
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
ColumnAliasProxyHandler,
RelationTableAliasProxyHandler,
TableAliasProxyHandler,
aliasedRelation,
aliasedTable,
aliasedTableColumn,
mapColumnsInAliasedSQLToAlias,
mapColumnsInSQLToAlias
});
//# sourceMappingURL=alias.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,31 @@
import type { AnyColumn } from "./column.cjs";
import { Column } from "./column.cjs";
import { entityKind } from "./entity.cjs";
import type { Relation } from "./relations.cjs";
import type { View } from "./sql/sql.cjs";
import { SQL } from "./sql/sql.cjs";
import { Table } from "./table.cjs";
export declare class ColumnAliasProxyHandler<TColumn extends Column> implements ProxyHandler<TColumn> {
private table;
static readonly [entityKind]: string;
constructor(table: Table | View);
get(columnObj: TColumn, prop: string | symbol): any;
}
export declare class TableAliasProxyHandler<T extends Table | View> implements ProxyHandler<T> {
private alias;
private replaceOriginalName;
static readonly [entityKind]: string;
constructor(alias: string, replaceOriginalName: boolean);
get(target: T, prop: string | symbol): any;
}
export declare class RelationTableAliasProxyHandler<T extends Relation> implements ProxyHandler<T> {
private alias;
static readonly [entityKind]: string;
constructor(alias: string);
get(target: T, prop: string | symbol): any;
}
export declare function aliasedTable<T extends Table | View>(table: T, tableAlias: string): T;
export declare function aliasedRelation<T extends Relation>(relation: T, tableAlias: string): T;
export declare function aliasedTableColumn<T extends AnyColumn>(column: T, tableAlias: string): T;
export declare function mapColumnsInAliasedSQLToAlias(query: SQL.Aliased, alias: string): SQL.Aliased;
export declare function mapColumnsInSQLToAlias(query: SQL, alias: string): SQL;

View File

@@ -0,0 +1,31 @@
import type { AnyColumn } from "./column.js";
import { Column } from "./column.js";
import { entityKind } from "./entity.js";
import type { Relation } from "./relations.js";
import type { View } from "./sql/sql.js";
import { SQL } from "./sql/sql.js";
import { Table } from "./table.js";
export declare class ColumnAliasProxyHandler<TColumn extends Column> implements ProxyHandler<TColumn> {
private table;
static readonly [entityKind]: string;
constructor(table: Table | View);
get(columnObj: TColumn, prop: string | symbol): any;
}
export declare class TableAliasProxyHandler<T extends Table | View> implements ProxyHandler<T> {
private alias;
private replaceOriginalName;
static readonly [entityKind]: string;
constructor(alias: string, replaceOriginalName: boolean);
get(target: T, prop: string | symbol): any;
}
export declare class RelationTableAliasProxyHandler<T extends Relation> implements ProxyHandler<T> {
private alias;
static readonly [entityKind]: string;
constructor(alias: string);
get(target: T, prop: string | symbol): any;
}
export declare function aliasedTable<T extends Table | View>(table: T, tableAlias: string): T;
export declare function aliasedRelation<T extends Relation>(relation: T, tableAlias: string): T;
export declare function aliasedTableColumn<T extends AnyColumn>(column: T, tableAlias: string): T;
export declare function mapColumnsInAliasedSQLToAlias(query: SQL.Aliased, alias: string): SQL.Aliased;
export declare function mapColumnsInSQLToAlias(query: SQL, alias: string): SQL;

View File

@@ -0,0 +1,113 @@
import { Column } from "./column.js";
import { entityKind, is } from "./entity.js";
import { SQL, sql } from "./sql/sql.js";
import { Table } from "./table.js";
import { ViewBaseConfig } from "./view-common.js";
class ColumnAliasProxyHandler {
constructor(table) {
this.table = table;
}
static [entityKind] = "ColumnAliasProxyHandler";
get(columnObj, prop) {
if (prop === "table") {
return this.table;
}
return columnObj[prop];
}
}
class TableAliasProxyHandler {
constructor(alias, replaceOriginalName) {
this.alias = alias;
this.replaceOriginalName = replaceOriginalName;
}
static [entityKind] = "TableAliasProxyHandler";
get(target, prop) {
if (prop === Table.Symbol.IsAlias) {
return true;
}
if (prop === Table.Symbol.Name) {
return this.alias;
}
if (this.replaceOriginalName && prop === Table.Symbol.OriginalName) {
return this.alias;
}
if (prop === ViewBaseConfig) {
return {
...target[ViewBaseConfig],
name: this.alias,
isAlias: true
};
}
if (prop === Table.Symbol.Columns) {
const columns = target[Table.Symbol.Columns];
if (!columns) {
return columns;
}
const proxiedColumns = {};
Object.keys(columns).map((key) => {
proxiedColumns[key] = new Proxy(
columns[key],
new ColumnAliasProxyHandler(new Proxy(target, this))
);
});
return proxiedColumns;
}
const value = target[prop];
if (is(value, Column)) {
return new Proxy(value, new ColumnAliasProxyHandler(new Proxy(target, this)));
}
return value;
}
}
class RelationTableAliasProxyHandler {
constructor(alias) {
this.alias = alias;
}
static [entityKind] = "RelationTableAliasProxyHandler";
get(target, prop) {
if (prop === "sourceTable") {
return aliasedTable(target.sourceTable, this.alias);
}
return target[prop];
}
}
function aliasedTable(table, tableAlias) {
return new Proxy(table, new TableAliasProxyHandler(tableAlias, false));
}
function aliasedRelation(relation, tableAlias) {
return new Proxy(relation, new RelationTableAliasProxyHandler(tableAlias));
}
function aliasedTableColumn(column, tableAlias) {
return new Proxy(
column,
new ColumnAliasProxyHandler(new Proxy(column.table, new TableAliasProxyHandler(tableAlias, false)))
);
}
function mapColumnsInAliasedSQLToAlias(query, alias) {
return new SQL.Aliased(mapColumnsInSQLToAlias(query.sql, alias), query.fieldAlias);
}
function mapColumnsInSQLToAlias(query, alias) {
return sql.join(query.queryChunks.map((c) => {
if (is(c, Column)) {
return aliasedTableColumn(c, alias);
}
if (is(c, SQL)) {
return mapColumnsInSQLToAlias(c, alias);
}
if (is(c, SQL.Aliased)) {
return mapColumnsInAliasedSQLToAlias(c, alias);
}
return c;
}));
}
export {
ColumnAliasProxyHandler,
RelationTableAliasProxyHandler,
TableAliasProxyHandler,
aliasedRelation,
aliasedTable,
aliasedTableColumn,
mapColumnsInAliasedSQLToAlias,
mapColumnsInSQLToAlias
};
//# sourceMappingURL=alias.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,119 @@
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var common_exports = {};
__export(common_exports, {
getValueFromDataApi: () => getValueFromDataApi,
toValueParam: () => toValueParam,
typingsToAwsTypeHint: () => typingsToAwsTypeHint
});
module.exports = __toCommonJS(common_exports);
var import_client_rds_data = require("@aws-sdk/client-rds-data");
function getValueFromDataApi(field) {
if (field.stringValue !== void 0) {
return field.stringValue;
} else if (field.booleanValue !== void 0) {
return field.booleanValue;
} else if (field.doubleValue !== void 0) {
return field.doubleValue;
} else if (field.isNull !== void 0) {
return null;
} else if (field.longValue !== void 0) {
return field.longValue;
} else if (field.blobValue !== void 0) {
return field.blobValue;
} else if (field.arrayValue !== void 0) {
if (field.arrayValue.stringValues !== void 0) {
return field.arrayValue.stringValues;
}
if (field.arrayValue.longValues !== void 0) {
return field.arrayValue.longValues;
}
if (field.arrayValue.doubleValues !== void 0) {
return field.arrayValue.doubleValues;
}
if (field.arrayValue.booleanValues !== void 0) {
return field.arrayValue.booleanValues;
}
if (field.arrayValue.arrayValues !== void 0) {
return field.arrayValue.arrayValues;
}
throw new Error("Unknown array type");
} else {
throw new Error("Unknown type");
}
}
function typingsToAwsTypeHint(typings) {
if (typings === "date") {
return import_client_rds_data.TypeHint.DATE;
} else if (typings === "decimal") {
return import_client_rds_data.TypeHint.DECIMAL;
} else if (typings === "json") {
return import_client_rds_data.TypeHint.JSON;
} else if (typings === "time") {
return import_client_rds_data.TypeHint.TIME;
} else if (typings === "timestamp") {
return import_client_rds_data.TypeHint.TIMESTAMP;
} else if (typings === "uuid") {
return import_client_rds_data.TypeHint.UUID;
} else {
return void 0;
}
}
function toValueParam(value, typings) {
const response = {
value: {},
typeHint: typingsToAwsTypeHint(typings)
};
if (value === null) {
response.value = { isNull: true };
} else if (typeof value === "string") {
switch (response.typeHint) {
case import_client_rds_data.TypeHint.DATE: {
response.value = { stringValue: value.split("T")[0] };
break;
}
case import_client_rds_data.TypeHint.TIMESTAMP: {
response.value = { stringValue: value.replace("T", " ").replace("Z", "") };
break;
}
default: {
response.value = { stringValue: value };
break;
}
}
} else if (typeof value === "number" && Number.isInteger(value)) {
response.value = { longValue: value };
} else if (typeof value === "number" && !Number.isInteger(value)) {
response.value = { doubleValue: value };
} else if (typeof value === "boolean") {
response.value = { booleanValue: value };
} else if (value instanceof Date) {
response.value = { stringValue: value.toISOString().replace("T", " ").replace("Z", "") };
} else {
throw new Error(`Unknown type for ${value}`);
}
return response;
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
getValueFromDataApi,
toValueParam,
typingsToAwsTypeHint
});
//# sourceMappingURL=index.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,9 @@
import type { Field } from '@aws-sdk/client-rds-data';
import { TypeHint } from '@aws-sdk/client-rds-data';
import type { QueryTypingsValue } from "../../sql/sql.cjs";
export declare function getValueFromDataApi(field: Field): string | number | boolean | string[] | number[] | Uint8Array | boolean[] | import("@aws-sdk/client-rds-data").ArrayValue[] | null;
export declare function typingsToAwsTypeHint(typings?: QueryTypingsValue): TypeHint | undefined;
export declare function toValueParam(value: any, typings?: QueryTypingsValue): {
value: Field;
typeHint?: TypeHint;
};

View File

@@ -0,0 +1,9 @@
import type { Field } from '@aws-sdk/client-rds-data';
import { TypeHint } from '@aws-sdk/client-rds-data';
import type { QueryTypingsValue } from "../../sql/sql.js";
export declare function getValueFromDataApi(field: Field): string | number | boolean | string[] | number[] | Uint8Array | boolean[] | import("@aws-sdk/client-rds-data").ArrayValue[] | null;
export declare function typingsToAwsTypeHint(typings?: QueryTypingsValue): TypeHint | undefined;
export declare function toValueParam(value: any, typings?: QueryTypingsValue): {
value: Field;
typeHint?: TypeHint;
};

View File

@@ -0,0 +1,93 @@
import { TypeHint } from "@aws-sdk/client-rds-data";
function getValueFromDataApi(field) {
if (field.stringValue !== void 0) {
return field.stringValue;
} else if (field.booleanValue !== void 0) {
return field.booleanValue;
} else if (field.doubleValue !== void 0) {
return field.doubleValue;
} else if (field.isNull !== void 0) {
return null;
} else if (field.longValue !== void 0) {
return field.longValue;
} else if (field.blobValue !== void 0) {
return field.blobValue;
} else if (field.arrayValue !== void 0) {
if (field.arrayValue.stringValues !== void 0) {
return field.arrayValue.stringValues;
}
if (field.arrayValue.longValues !== void 0) {
return field.arrayValue.longValues;
}
if (field.arrayValue.doubleValues !== void 0) {
return field.arrayValue.doubleValues;
}
if (field.arrayValue.booleanValues !== void 0) {
return field.arrayValue.booleanValues;
}
if (field.arrayValue.arrayValues !== void 0) {
return field.arrayValue.arrayValues;
}
throw new Error("Unknown array type");
} else {
throw new Error("Unknown type");
}
}
function typingsToAwsTypeHint(typings) {
if (typings === "date") {
return TypeHint.DATE;
} else if (typings === "decimal") {
return TypeHint.DECIMAL;
} else if (typings === "json") {
return TypeHint.JSON;
} else if (typings === "time") {
return TypeHint.TIME;
} else if (typings === "timestamp") {
return TypeHint.TIMESTAMP;
} else if (typings === "uuid") {
return TypeHint.UUID;
} else {
return void 0;
}
}
function toValueParam(value, typings) {
const response = {
value: {},
typeHint: typingsToAwsTypeHint(typings)
};
if (value === null) {
response.value = { isNull: true };
} else if (typeof value === "string") {
switch (response.typeHint) {
case TypeHint.DATE: {
response.value = { stringValue: value.split("T")[0] };
break;
}
case TypeHint.TIMESTAMP: {
response.value = { stringValue: value.replace("T", " ").replace("Z", "") };
break;
}
default: {
response.value = { stringValue: value };
break;
}
}
} else if (typeof value === "number" && Number.isInteger(value)) {
response.value = { longValue: value };
} else if (typeof value === "number" && !Number.isInteger(value)) {
response.value = { doubleValue: value };
} else if (typeof value === "boolean") {
response.value = { booleanValue: value };
} else if (value instanceof Date) {
response.value = { stringValue: value.toISOString().replace("T", " ").replace("Z", "") };
} else {
throw new Error(`Unknown type for ${value}`);
}
return response;
}
export {
getValueFromDataApi,
toValueParam,
typingsToAwsTypeHint
};
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,122 @@
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var driver_exports = {};
__export(driver_exports, {
AwsDataApiPgDatabase: () => AwsDataApiPgDatabase,
AwsPgDialect: () => AwsPgDialect,
drizzle: () => drizzle
});
module.exports = __toCommonJS(driver_exports);
var import_client_rds_data = require("@aws-sdk/client-rds-data");
var import_entity = require("../../entity.cjs");
var import_logger = require("../../logger.cjs");
var import_db = require("../../pg-core/db.cjs");
var import_dialect = require("../../pg-core/dialect.cjs");
var import_pg_core = require("../../pg-core/index.cjs");
var import_relations = require("../../relations.cjs");
var import_sql = require("../../sql/sql.cjs");
var import_table = require("../../table.cjs");
var import_session = require("./session.cjs");
class AwsDataApiPgDatabase extends import_db.PgDatabase {
static [import_entity.entityKind] = "AwsDataApiPgDatabase";
execute(query) {
return super.execute(query);
}
}
class AwsPgDialect extends import_dialect.PgDialect {
static [import_entity.entityKind] = "AwsPgDialect";
escapeParam(num) {
return `:${num + 1}`;
}
buildInsertQuery({ table, values, onConflict, returning, select, withList }) {
const columns = table[import_table.Table.Symbol.Columns];
if (!select) {
for (const value of values) {
for (const fieldName of Object.keys(columns)) {
const colValue = value[fieldName];
if ((0, import_entity.is)(colValue, import_sql.Param) && colValue.value !== void 0 && (0, import_entity.is)(colValue.encoder, import_pg_core.PgArray) && Array.isArray(colValue.value)) {
value[fieldName] = import_sql.sql`cast(${colValue} as ${import_sql.sql.raw(colValue.encoder.getSQLType())})`;
}
}
}
}
return super.buildInsertQuery({ table, values, onConflict, returning, withList });
}
buildUpdateSet(table, set) {
const columns = table[import_table.Table.Symbol.Columns];
for (const [colName, colValue] of Object.entries(set)) {
const currentColumn = columns[colName];
if (currentColumn && (0, import_entity.is)(colValue, import_sql.Param) && colValue.value !== void 0 && (0, import_entity.is)(colValue.encoder, import_pg_core.PgArray) && Array.isArray(colValue.value)) {
set[colName] = import_sql.sql`cast(${colValue} as ${import_sql.sql.raw(colValue.encoder.getSQLType())})`;
}
}
return super.buildUpdateSet(table, set);
}
}
function construct(client, config) {
const dialect = new AwsPgDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new import_logger.DefaultLogger();
} else if (config.logger !== false) {
logger = config.logger;
}
let schema;
if (config.schema) {
const tablesConfig = (0, import_relations.extractTablesRelationalConfig)(
config.schema,
import_relations.createTableRelationsHelpers
);
schema = {
fullSchema: config.schema,
schema: tablesConfig.tables,
tableNamesMap: tablesConfig.tableNamesMap
};
}
const session = new import_session.AwsDataApiSession(client, dialect, schema, { ...config, logger }, void 0);
const db = new AwsDataApiPgDatabase(dialect, session, schema);
db.$client = client;
return db;
}
function drizzle(...params) {
if (params[0] instanceof import_client_rds_data.RDSDataClient || params[0].constructor.name !== "Object") {
return construct(params[0], params[1]);
}
if (params[0].client) {
const { client, ...drizzleConfig2 } = params[0];
return construct(client, drizzleConfig2);
}
const { connection, ...drizzleConfig } = params[0];
const { resourceArn, database, secretArn, ...rdsConfig } = connection;
const instance = new import_client_rds_data.RDSDataClient(rdsConfig);
return construct(instance, { resourceArn, database, secretArn, ...drizzleConfig });
}
((drizzle2) => {
function mock(config) {
return construct({}, config);
}
drizzle2.mock = mock;
})(drizzle || (drizzle = {}));
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
AwsDataApiPgDatabase,
AwsPgDialect,
drizzle
});
//# sourceMappingURL=driver.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,48 @@
import { RDSDataClient, type RDSDataClientConfig } from '@aws-sdk/client-rds-data';
import { entityKind } from "../../entity.cjs";
import type { Logger } from "../../logger.cjs";
import { PgDatabase } from "../../pg-core/db.cjs";
import { PgDialect } from "../../pg-core/dialect.cjs";
import type { PgInsertConfig, PgTable, TableConfig } from "../../pg-core/index.cjs";
import type { PgRaw } from "../../pg-core/query-builders/raw.cjs";
import { type SQL, type SQLWrapper } from "../../sql/sql.cjs";
import type { DrizzleConfig, UpdateSet } from "../../utils.cjs";
import type { AwsDataApiClient, AwsDataApiPgQueryResult, AwsDataApiPgQueryResultHKT } from "./session.cjs";
export interface PgDriverOptions {
logger?: Logger;
database: string;
resourceArn: string;
secretArn: string;
}
export interface DrizzleAwsDataApiPgConfig<TSchema extends Record<string, unknown> = Record<string, never>> extends DrizzleConfig<TSchema> {
database: string;
resourceArn: string;
secretArn: string;
}
export declare class AwsDataApiPgDatabase<TSchema extends Record<string, unknown> = Record<string, never>> extends PgDatabase<AwsDataApiPgQueryResultHKT, TSchema> {
static readonly [entityKind]: string;
execute<TRow extends Record<string, unknown> = Record<string, unknown>>(query: SQLWrapper | string): PgRaw<AwsDataApiPgQueryResult<TRow>>;
}
export declare class AwsPgDialect extends PgDialect {
static readonly [entityKind]: string;
escapeParam(num: number): string;
buildInsertQuery({ table, values, onConflict, returning, select, withList }: PgInsertConfig<PgTable<TableConfig>>): SQL<unknown>;
buildUpdateSet(table: PgTable<TableConfig>, set: UpdateSet): SQL<unknown>;
}
export declare function drizzle<TSchema extends Record<string, unknown> = Record<string, never>, TClient extends AwsDataApiClient = RDSDataClient>(...params: [
TClient,
DrizzleAwsDataApiPgConfig<TSchema>
] | [
((DrizzleConfig<TSchema> & {
connection: RDSDataClientConfig & Omit<DrizzleAwsDataApiPgConfig, keyof DrizzleConfig>;
}) | (DrizzleAwsDataApiPgConfig<TSchema> & {
client: TClient;
}))
]): AwsDataApiPgDatabase<TSchema> & {
$client: TClient;
};
export declare namespace drizzle {
function mock<TSchema extends Record<string, unknown> = Record<string, never>>(config: DrizzleAwsDataApiPgConfig<TSchema>): AwsDataApiPgDatabase<TSchema> & {
$client: '$client is not available on drizzle.mock()';
};
}

View File

@@ -0,0 +1,48 @@
import { RDSDataClient, type RDSDataClientConfig } from '@aws-sdk/client-rds-data';
import { entityKind } from "../../entity.js";
import type { Logger } from "../../logger.js";
import { PgDatabase } from "../../pg-core/db.js";
import { PgDialect } from "../../pg-core/dialect.js";
import type { PgInsertConfig, PgTable, TableConfig } from "../../pg-core/index.js";
import type { PgRaw } from "../../pg-core/query-builders/raw.js";
import { type SQL, type SQLWrapper } from "../../sql/sql.js";
import type { DrizzleConfig, UpdateSet } from "../../utils.js";
import type { AwsDataApiClient, AwsDataApiPgQueryResult, AwsDataApiPgQueryResultHKT } from "./session.js";
export interface PgDriverOptions {
logger?: Logger;
database: string;
resourceArn: string;
secretArn: string;
}
export interface DrizzleAwsDataApiPgConfig<TSchema extends Record<string, unknown> = Record<string, never>> extends DrizzleConfig<TSchema> {
database: string;
resourceArn: string;
secretArn: string;
}
export declare class AwsDataApiPgDatabase<TSchema extends Record<string, unknown> = Record<string, never>> extends PgDatabase<AwsDataApiPgQueryResultHKT, TSchema> {
static readonly [entityKind]: string;
execute<TRow extends Record<string, unknown> = Record<string, unknown>>(query: SQLWrapper | string): PgRaw<AwsDataApiPgQueryResult<TRow>>;
}
export declare class AwsPgDialect extends PgDialect {
static readonly [entityKind]: string;
escapeParam(num: number): string;
buildInsertQuery({ table, values, onConflict, returning, select, withList }: PgInsertConfig<PgTable<TableConfig>>): SQL<unknown>;
buildUpdateSet(table: PgTable<TableConfig>, set: UpdateSet): SQL<unknown>;
}
export declare function drizzle<TSchema extends Record<string, unknown> = Record<string, never>, TClient extends AwsDataApiClient = RDSDataClient>(...params: [
TClient,
DrizzleAwsDataApiPgConfig<TSchema>
] | [
((DrizzleConfig<TSchema> & {
connection: RDSDataClientConfig & Omit<DrizzleAwsDataApiPgConfig, keyof DrizzleConfig>;
}) | (DrizzleAwsDataApiPgConfig<TSchema> & {
client: TClient;
}))
]): AwsDataApiPgDatabase<TSchema> & {
$client: TClient;
};
export declare namespace drizzle {
function mock<TSchema extends Record<string, unknown> = Record<string, never>>(config: DrizzleAwsDataApiPgConfig<TSchema>): AwsDataApiPgDatabase<TSchema> & {
$client: '$client is not available on drizzle.mock()';
};
}

View File

@@ -0,0 +1,99 @@
import { RDSDataClient } from "@aws-sdk/client-rds-data";
import { entityKind, is } from "../../entity.js";
import { DefaultLogger } from "../../logger.js";
import { PgDatabase } from "../../pg-core/db.js";
import { PgDialect } from "../../pg-core/dialect.js";
import { PgArray } from "../../pg-core/index.js";
import {
createTableRelationsHelpers,
extractTablesRelationalConfig
} from "../../relations.js";
import { Param, sql } from "../../sql/sql.js";
import { Table } from "../../table.js";
import { AwsDataApiSession } from "./session.js";
class AwsDataApiPgDatabase extends PgDatabase {
static [entityKind] = "AwsDataApiPgDatabase";
execute(query) {
return super.execute(query);
}
}
class AwsPgDialect extends PgDialect {
static [entityKind] = "AwsPgDialect";
escapeParam(num) {
return `:${num + 1}`;
}
buildInsertQuery({ table, values, onConflict, returning, select, withList }) {
const columns = table[Table.Symbol.Columns];
if (!select) {
for (const value of values) {
for (const fieldName of Object.keys(columns)) {
const colValue = value[fieldName];
if (is(colValue, Param) && colValue.value !== void 0 && is(colValue.encoder, PgArray) && Array.isArray(colValue.value)) {
value[fieldName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`;
}
}
}
}
return super.buildInsertQuery({ table, values, onConflict, returning, withList });
}
buildUpdateSet(table, set) {
const columns = table[Table.Symbol.Columns];
for (const [colName, colValue] of Object.entries(set)) {
const currentColumn = columns[colName];
if (currentColumn && is(colValue, Param) && colValue.value !== void 0 && is(colValue.encoder, PgArray) && Array.isArray(colValue.value)) {
set[colName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`;
}
}
return super.buildUpdateSet(table, set);
}
}
function construct(client, config) {
const dialect = new AwsPgDialect({ casing: config.casing });
let logger;
if (config.logger === true) {
logger = new DefaultLogger();
} else if (config.logger !== false) {
logger = config.logger;
}
let schema;
if (config.schema) {
const tablesConfig = extractTablesRelationalConfig(
config.schema,
createTableRelationsHelpers
);
schema = {
fullSchema: config.schema,
schema: tablesConfig.tables,
tableNamesMap: tablesConfig.tableNamesMap
};
}
const session = new AwsDataApiSession(client, dialect, schema, { ...config, logger }, void 0);
const db = new AwsDataApiPgDatabase(dialect, session, schema);
db.$client = client;
return db;
}
function drizzle(...params) {
if (params[0] instanceof RDSDataClient || params[0].constructor.name !== "Object") {
return construct(params[0], params[1]);
}
if (params[0].client) {
const { client, ...drizzleConfig2 } = params[0];
return construct(client, drizzleConfig2);
}
const { connection, ...drizzleConfig } = params[0];
const { resourceArn, database, secretArn, ...rdsConfig } = connection;
const instance = new RDSDataClient(rdsConfig);
return construct(instance, { resourceArn, database, secretArn, ...drizzleConfig });
}
((drizzle2) => {
function mock(config) {
return construct({}, config);
}
drizzle2.mock = mock;
})(drizzle || (drizzle = {}));
export {
AwsDataApiPgDatabase,
AwsPgDialect,
drizzle
};
//# sourceMappingURL=driver.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,25 @@
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var pg_exports = {};
module.exports = __toCommonJS(pg_exports);
__reExport(pg_exports, require("./driver.cjs"), module.exports);
__reExport(pg_exports, require("./session.cjs"), module.exports);
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
...require("./driver.cjs"),
...require("./session.cjs")
});
//# sourceMappingURL=index.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/aws-data-api/pg/index.ts"],"sourcesContent":["export * from './driver.ts';\nexport * from './session.ts';\n"],"mappings":";;;;;;;;;;;;;;;AAAA;AAAA;AAAA,uBAAc,wBAAd;AACA,uBAAc,yBADd;","names":[]}

View File

@@ -0,0 +1,2 @@
export * from "./driver.cjs";
export * from "./session.cjs";

View File

@@ -0,0 +1,2 @@
export * from "./driver.js";
export * from "./session.js";

Some files were not shown because too many files have changed in this diff Show More