Initial commit: New MoreminiMore website with fresh design

This commit is contained in:
MoreminiMore
2026-04-22 01:59:05 +07:00
commit 76409638cc
14010 changed files with 2052041 additions and 0 deletions

59
node_modules/@astrojs/db/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,59 @@
MIT License
Copyright (c) 2021 Fred K. Schott
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/sveltejs/kit repository:
Copyright (c) 2020 [these people](https://github.com/sveltejs/kit/graphs/contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/vitejs/vite repository:
MIT License
Copyright (c) 2019-present, Yuxi (Evan) You and Vite contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

38
node_modules/@astrojs/db/README.md generated vendored Normal file
View File

@@ -0,0 +1,38 @@
# @astrojs/db (experimental) 💿
This **[Astro integration][astro-integration]** enables the usage of [SQLite](https://www.sqlite.org/) in Astro Projects.
## Documentation
Read the [`@astrojs/db` docs][docs]
## Support
- Get help in the [Astro Discord][discord]. Post questions in our `#support` forum, or visit our dedicated `#dev` channel to discuss current development and more!
- Check our [Astro Integration Documentation][astro-integration] for more on integrations.
- Submit bug reports and feature requests as [GitHub issues][issues].
## Contributing
This package is maintained by Astro's Core team. You're welcome to submit an issue or PR! These links will help you get started:
- [Contributor Manual][contributing]
- [Code of Conduct][coc]
- [Community Guide][community]
## License
MIT
Copyright (c) 2023present [Astro][astro]
[astro]: https://astro.build/
[docs]: https://docs.astro.build/en/guides/integrations-guide/db/
[contributing]: https://github.com/withastro/astro/blob/main/CONTRIBUTING.md
[coc]: https://github.com/withastro/.github/blob/main/CODE_OF_CONDUCT.md
[community]: https://github.com/withastro/.github/blob/main/COMMUNITY_GUIDE.md
[discord]: https://astro.build/chat/
[issues]: https://github.com/withastro/astro/issues
[astro-integration]: https://docs.astro.build/en/guides/integrations-guide/

View File

@@ -0,0 +1,2 @@
import type { $ZodErrorMap } from 'zod/v4/core';
export declare const errorMap: $ZodErrorMap;

1444
node_modules/@astrojs/db/dist/_internal/core/schemas.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
import type * as z from 'zod/v4';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

View File

@@ -0,0 +1,92 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
data: E extends readonly (infer U)[] ? U : string;
dataType: 'string';
columnType: 'SQLiteText';
driverParam: string;
enumValues: E extends [string, ...string[]] ? E : never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: Date;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
data: boolean;
dataType: 'boolean';
columnType: 'SQLiteBoolean';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
data: number;
dataType: 'number';
columnType: 'SQLiteInteger';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: unknown;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;
dialect: 'sqlite';
columns: {
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
enum: infer E;
} ? E extends readonly [string, ...string[]] ? E : string : string, {
tableName: TTableName;
name: K;
isPrimaryKey: TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasRuntimeDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : false;
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};

View File

@@ -0,0 +1,9 @@
import { LibsqlError } from '@libsql/client';
import { AstroError } from 'astro/errors';
import type { DBColumn } from '../core/types.js';
export declare function hasPrimaryKey(column: DBColumn): boolean;
export declare class AstroDbError extends AstroError {
name: string;
}
export declare function isDbError(err: unknown): err is LibsqlError;
export declare function pathToFileURL(path: string): URL;

View File

@@ -0,0 +1,48 @@
import type { BooleanColumnInput, ColumnsConfig, DateColumnInput, DBConfigInput, JsonColumnInput, NumberColumnOpts, TableConfig, TextColumnOpts } from '../core/types.js';
export declare const column: {
number: <T extends NumberColumnOpts>(opts?: T) => {
type: "number";
/**
* @internal
*/
schema: T;
};
boolean: <T extends BooleanColumnInput["schema"]>(opts?: T) => {
type: "boolean";
/**
* @internal
*/
schema: T;
};
text: <T extends TextColumnOpts, const E extends T["enum"] extends readonly [string, ...string[]] ? Omit<T, "enum"> & T["enum"] : T>(opts?: E) => {
type: "text";
/**
* @internal
*/
schema: E;
};
date<T extends DateColumnInput["schema"]>(opts?: T): {
type: "date";
/**
* @internal
*/
schema: T;
};
json<T extends JsonColumnInput["schema"]>(opts?: T): {
type: "json";
/**
* @internal
*/
schema: T;
};
};
export declare function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>): TableConfig<TColumns>;
export declare function defineDb(userConfig: DBConfigInput): {
tables?: unknown;
};
export declare const NOW: import("drizzle-orm").SQL<unknown>;
export declare const TRUE: import("drizzle-orm").SQL<unknown>;
export declare const FALSE: import("drizzle-orm").SQL<unknown>;
export { and, asc, avg, avgDistinct, between, count, countDistinct, desc, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, max, min, ne, not, notBetween, notExists, notIlike, notInArray, or, sql, sum, sumDistinct, } from 'drizzle-orm';
export { alias } from 'drizzle-orm/sqlite-core';
export { isDbError } from './utils.js';

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ astroConfig, dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,65 @@
import { existsSync } from "node:fs";
import colors from "piccolore";
import { isDbError } from "../../../../runtime/utils.js";
import {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
MISSING_EXECUTE_PATH_ERROR
} from "../../../errors.js";
import {
getLocalVirtualModContents,
getRemoteVirtualModContents
} from "../../../integration/vite-plugin-db.js";
import { bundleFile, importBundledFile } from "../../../load-file.js";
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
astroConfig,
dbConfig,
flags
}) {
const filePath = flags._[4];
if (typeof filePath !== "string") {
console.error(MISSING_EXECUTE_PATH_ERROR);
process.exit(1);
}
const fileUrl = new URL(filePath, astroConfig.root);
if (!existsSync(fileUrl)) {
console.error(FILE_NOT_FOUND_ERROR(filePath));
process.exit(1);
}
let virtualModContents;
if (flags.remote) {
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
virtualModContents = getRemoteVirtualModContents({
tables: dbConfig.tables ?? {},
appToken,
isBuild: false,
output: "server",
localExecution: true
});
} else {
virtualModContents = getLocalVirtualModContents({
tables: dbConfig.tables ?? {},
root: astroConfig.root,
localExecution: true
});
}
const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
const mod = await importBundledFile({ code, root: astroConfig.root });
if (typeof mod.default !== "function") {
console.error(EXEC_DEFAULT_EXPORT_ERROR(filePath));
process.exit(1);
}
try {
await mod.default();
console.info(`${colors.green("\u2714")} File run successfully.`);
} catch (e) {
if (isDbError(e)) throw new Error(EXEC_ERROR(e.message));
else throw e;
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,106 @@
import * as clack from "@clack/prompts";
import { sql } from "drizzle-orm";
import { MIGRATION_VERSION } from "../../../consts.js";
import { createClient } from "../../../db-client/libsql-node.js";
import {
getRemoteDatabaseInfo,
resolveDbAppToken
} from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isDryRun = flags.dryRun;
const isForceReset = flags.forceReset;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const isFromScratch = !productionSnapshot;
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
newSnapshot: currentSnapshot,
reset: isForceReset
});
if (migrationQueries.length === 0) {
console.log("Database schema is up to date.");
} else {
console.log(`Database schema is out of date.`);
}
if (isForceReset) {
const begin = await clack.confirm({
message: `Reset your database? All of your data will be erased and your schema created from scratch.`,
initialValue: false,
withGuide: false
});
if (begin !== true) {
console.log("Canceled.");
process.exit(0);
}
console.log(`Force-pushing to the database. All existing data will be erased.`);
} else if (confirmations.length > 0) {
console.log("\n" + formatDataLossMessage(confirmations) + "\n");
throw new Error("Exiting.");
}
if (isDryRun) {
console.log("Statements:", JSON.stringify(migrationQueries, void 0, 2));
} else {
console.log(`Pushing database schema updates...`);
await pushSchema({
statements: migrationQueries,
dbInfo,
appToken,
isDryRun,
currentSnapshot
});
}
console.info("Push complete!");
}
async function pushSchema({
statements,
dbInfo,
appToken,
isDryRun,
currentSnapshot
}) {
const requestBody = {
snapshot: currentSnapshot,
sql: statements,
version: MIGRATION_VERSION
};
if (isDryRun) {
console.info("[DRY RUN] Batch query:", JSON.stringify(requestBody, null, 2));
return new Response(null, { status: 200 });
}
return pushToDb(requestBody, appToken, dbInfo.url);
}
async function pushToDb(requestBody, appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
await client.run(sql`create table if not exists _astro_db_snapshot (
id INTEGER PRIMARY KEY AUTOINCREMENT,
version TEXT,
snapshot BLOB
);`);
await client.transaction(async (tx) => {
for (const stmt of requestBody.sql) {
await tx.run(sql.raw(stmt));
}
await tx.run(sql`insert into _astro_db_snapshot (version, snapshot) values (
${requestBody.version},
${JSON.stringify(requestBody.snapshot)}
)`);
});
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfigInput } from '../../../types.js';
export declare function cmd({ flags, astroConfig, }: {
dbConfig: DBConfigInput;
astroConfig: AstroConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,36 @@
import { sql } from "drizzle-orm";
import { normalizeDatabaseUrl } from "../../../../runtime/index.js";
import { DB_PATH } from "../../../consts.js";
import { createClient as createLocalDatabaseClient } from "../../../db-client/libsql-local.js";
import { createClient as createRemoteDatabaseClient } from "../../../db-client/libsql-node.js";
import { SHELL_QUERY_MISSING_ERROR } from "../../../errors.js";
import { getAstroEnv, getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
flags,
astroConfig
}) {
const query = flags.query;
if (!query) {
console.error(SHELL_QUERY_MISSING_ERROR);
process.exit(1);
}
const dbInfo = getRemoteDatabaseInfo();
if (flags.remote) {
const appToken = resolveDbAppToken(flags, dbInfo.token);
const db = createRemoteDatabaseClient({ ...dbInfo, token: appToken });
const result = await db.run(sql.raw(query));
console.log(result);
} else {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(
ASTRO_DATABASE_FILE,
new URL(DB_PATH, astroConfig.root).href
);
const db = createLocalDatabaseClient({ url: dbUrl });
const result = await db.run(sql.raw(query));
console.log(result);
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,46 @@
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isJson = flags.json;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: productionSnapshot || createEmptySnapshot(),
newSnapshot: currentSnapshot
});
const result = { exitCode: 0, message: "", code: "", data: void 0 };
if (migrationQueries.length === 0) {
result.code = "MATCH";
result.message = `Database schema is up to date.`;
} else {
result.code = "NO_MATCH";
result.message = `Database schema is out of date.
Run 'astro db push' to push up your latest changes.`;
}
if (confirmations.length > 0) {
result.code = "DATA_LOSS";
result.exitCode = 1;
result.data = confirmations;
result.message = formatDataLossMessage(confirmations, !isJson);
}
if (isJson) {
console.log(JSON.stringify(result));
} else {
console.log(result.message);
}
process.exit(result.exitCode);
}
export {
cmd
};

6
node_modules/@astrojs/db/dist/core/cli/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
export declare function cli({ flags, config: astroConfig, }: {
flags: Arguments;
config: AstroConfig;
}): Promise<void>;

75
node_modules/@astrojs/db/dist/core/cli/index.js generated vendored Normal file
View File

@@ -0,0 +1,75 @@
import { resolveDbConfig } from "../load-file.js";
import { printHelp } from "./print-help.js";
async function cli({
flags,
config: astroConfig
}) {
const args = flags._;
const command = args[2] === "db" ? args[3] : args[2];
validateDbAppTokenFlag(command, flags);
const { dbConfig } = await resolveDbConfig(astroConfig);
switch (command) {
case "shell": {
const { cmd } = await import("./commands/shell/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "gen": {
console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
return;
}
case "sync": {
console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
return;
}
case "push": {
const { cmd } = await import("./commands/push/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "verify": {
const { cmd } = await import("./commands/verify/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "execute": {
const { cmd } = await import("./commands/execute/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
default: {
if (command != null) {
console.error(`Unknown command: ${command}`);
}
printHelp({
commandName: "astro db",
usage: "[command] [...flags]",
headline: " ",
tables: {
Commands: [
["push", "Push table schema updates to libSQL."],
["verify", "Test schema updates with libSQL (good for CI)."],
[
"astro db execute <file-path>",
"Execute a ts/js file using astro:db. Use --remote to connect to libSQL."
],
[
"astro db shell --query <sql-string>",
"Execute a SQL string. Use --remote to connect to libSQL."
]
]
}
});
return;
}
}
}
function validateDbAppTokenFlag(command, flags) {
if (command !== "execute" && command !== "push" && command !== "verify" && command !== "shell")
return;
const dbAppToken = flags.dbAppToken;
if (dbAppToken == null) return;
if (typeof dbAppToken !== "string") {
console.error(`Invalid value for --db-app-token; expected a string.`);
process.exit(1);
}
}
export {
cli
};

View File

@@ -0,0 +1,22 @@
import type { DBConfig, DBSnapshot, ResolvedDBTable } from '../types.js';
import type { RemoteDatabaseInfo } from '../utils.js';
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, reset, }: {
oldSnapshot: DBSnapshot;
newSnapshot: DBSnapshot;
reset?: boolean;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getTableChangeQueries({ tableName, oldTable, newTable, }: {
tableName: string;
oldTable: ResolvedDBTable;
newTable: ResolvedDBTable;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getProductionCurrentSnapshot({ url, token, }: RemoteDatabaseInfo): Promise<DBSnapshot | undefined>;
export declare function createCurrentSnapshot({ tables }: DBConfig): DBSnapshot;
export declare function createEmptySnapshot(): DBSnapshot;
export declare function formatDataLossMessage(confirmations: string[], isColor?: boolean): string;

View File

@@ -0,0 +1,373 @@
import { stripVTControlCharacters } from "node:util";
import diff from "microdiff";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { customAlphabet } from "nanoid";
import color from "piccolore";
import { isSerializedSQL } from "../../runtime/types.js";
import { hasPrimaryKey, isDbError } from "../../runtime/utils.js";
import { MIGRATION_VERSION } from "../consts.js";
import { createClient } from "../db-client/libsql-node.js";
import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from "../errors.js";
import {
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
} from "../queries.js";
import { columnSchema } from "../schemas.js";
const sqlite = new SQLiteAsyncDialect();
const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);
async function getMigrationQueries({
oldSnapshot,
newSnapshot,
reset = false
}) {
const queries = [];
const confirmations = [];
if (reset) {
const currentSnapshot = oldSnapshot;
oldSnapshot = createEmptySnapshot();
queries.push(...getDropTableQueriesForSnapshot(currentSnapshot));
}
const addedTables = getAddedTables(oldSnapshot, newSnapshot);
const droppedTables = getDroppedTables(oldSnapshot, newSnapshot);
const notDeprecatedDroppedTables = Object.fromEntries(
Object.entries(droppedTables).filter(([, table]) => !table.deprecated)
);
if (!isEmpty(addedTables) && !isEmpty(notDeprecatedDroppedTables)) {
const oldTable = Object.keys(notDeprecatedDroppedTables)[0];
const newTable = Object.keys(addedTables)[0];
throw new Error(RENAME_TABLE_ERROR(oldTable, newTable));
}
for (const [tableName, table] of Object.entries(addedTables)) {
queries.push(getCreateTableQuery(tableName, table));
queries.push(...getCreateIndexQueries(tableName, table));
}
for (const [tableName] of Object.entries(droppedTables)) {
const dropQuery = `DROP TABLE ${sqlite.escapeName(tableName)}`;
queries.push(dropQuery);
}
for (const [tableName, newTable] of Object.entries(newSnapshot.schema)) {
const oldTable = oldSnapshot.schema[tableName];
if (!oldTable) continue;
const addedColumns = getAdded(oldTable.columns, newTable.columns);
const droppedColumns = getDropped(oldTable.columns, newTable.columns);
const notDeprecatedDroppedColumns = Object.fromEntries(
Object.entries(droppedColumns).filter(([, col]) => !col.schema.deprecated)
);
if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
throw new Error(
RENAME_COLUMN_ERROR(
`${tableName}.${Object.keys(addedColumns)[0]}`,
`${tableName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`
)
);
}
const result = await getTableChangeQueries({
tableName,
oldTable,
newTable
});
queries.push(...result.queries);
confirmations.push(...result.confirmations);
}
return { queries, confirmations };
}
async function getTableChangeQueries({
tableName,
oldTable,
newTable
}) {
const queries = [];
const confirmations = [];
const updated = getUpdatedColumns(oldTable.columns, newTable.columns);
const added = getAdded(oldTable.columns, newTable.columns);
const dropped = getDropped(oldTable.columns, newTable.columns);
const hasForeignKeyChanges = diff(oldTable.foreignKeys ?? [], newTable.foreignKeys ?? []).length > 0;
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
return {
queries: getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
}),
confirmations
};
}
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
queries.push(
...getAlterTableQueries(tableName, added, dropped),
...getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
})
);
return { queries, confirmations };
}
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
if (dataLossCheck.dataLoss) {
const { reason, columnName } = dataLossCheck;
const reasonMsgs = {
"added-required": `You added new required column '${color.bold(
tableName + "." + columnName
)}' with no default value.
This cannot be executed on an existing table.`,
"updated-type": `Updating existing column ${color.bold(
tableName + "." + columnName
)} to a new type that cannot be handled automatically.`
};
confirmations.push(reasonMsgs[reason]);
}
const primaryKeyExists = Object.entries(newTable.columns).find(
([, column]) => hasPrimaryKey(column)
);
const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
const recreateTableQueries = getRecreateTableQueries({
tableName,
newTable,
added,
hasDataLoss: dataLossCheck.dataLoss,
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
});
queries.push(...recreateTableQueries, ...getCreateIndexQueries(tableName, newTable));
return { queries, confirmations };
}
function getChangeIndexQueries({
tableName,
oldIndexes = {},
newIndexes = {}
}) {
const added = getAdded(oldIndexes, newIndexes);
const dropped = getDropped(oldIndexes, newIndexes);
const updated = getUpdated(oldIndexes, newIndexes);
Object.assign(dropped, updated);
Object.assign(added, updated);
const queries = [];
for (const indexName of Object.keys(dropped)) {
const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
queries.push(dropQuery);
}
queries.push(...getCreateIndexQueries(tableName, { indexes: added }));
return queries;
}
function getAddedTables(oldTables, newTables) {
const added = {};
for (const [key, newTable] of Object.entries(newTables.schema)) {
if (!(key in oldTables.schema)) added[key] = newTable;
}
return added;
}
function getDroppedTables(oldTables, newTables) {
const dropped = {};
for (const [key, oldTable] of Object.entries(oldTables.schema)) {
if (!(key in newTables.schema)) dropped[key] = oldTable;
}
return dropped;
}
function getAlterTableQueries(unescTableName, added, dropped) {
const queries = [];
const tableName = sqlite.escapeName(unescTableName);
for (const [unescColumnName, column] of Object.entries(added)) {
const columnName = sqlite.escapeName(unescColumnName);
const type = schemaTypeToSqlType(column.type);
const q = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${type}${getModifiers(
columnName,
column
)}`;
queries.push(q);
}
for (const unescColumnName of Object.keys(dropped)) {
const columnName = sqlite.escapeName(unescColumnName);
const q = `ALTER TABLE ${tableName} DROP COLUMN ${columnName}`;
queries.push(q);
}
return queries;
}
function getRecreateTableQueries({
tableName: unescTableName,
newTable,
added,
hasDataLoss,
migrateHiddenPrimaryKey
}) {
const unescTempName = `${unescTableName}_${genTempTableName()}`;
const tempName = sqlite.escapeName(unescTempName);
const tableName = sqlite.escapeName(unescTableName);
if (hasDataLoss) {
return [`DROP TABLE ${tableName}`, getCreateTableQuery(unescTableName, newTable)];
}
const newColumns = [...Object.keys(newTable.columns)];
if (migrateHiddenPrimaryKey) {
newColumns.unshift("_id");
}
const escapedColumns = newColumns.filter((i) => !(i in added)).map((c) => sqlite.escapeName(c)).join(", ");
return [
getCreateTableQuery(unescTempName, newTable),
`INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${tableName}`,
`DROP TABLE ${tableName}`,
`ALTER TABLE ${tempName} RENAME TO ${tableName}`
];
}
function isEmpty(obj) {
return Object.keys(obj).length === 0;
}
function canAlterTableAddColumn(column) {
if (column.schema.unique) return false;
if (hasRuntimeDefault(column)) return false;
if (!column.schema.optional && !hasDefault(column)) return false;
if (hasPrimaryKey(column)) return false;
if (getReferencesConfig(column)) return false;
return true;
}
function canAlterTableDropColumn(column) {
if (column.schema.unique) return false;
if (hasPrimaryKey(column)) return false;
return true;
}
function canRecreateTableWithoutDataLoss(added, updated) {
for (const [columnName, a] of Object.entries(added)) {
if (hasPrimaryKey(a) && a.type !== "number" && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
if (!a.schema.optional && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
}
for (const [columnName, u] of Object.entries(updated)) {
if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
return { dataLoss: true, columnName, reason: "updated-type" };
}
}
return { dataLoss: false };
}
function getAdded(oldObj, newObj) {
const added = {};
for (const [key, value] of Object.entries(newObj)) {
if (!(key in oldObj)) added[key] = value;
}
return added;
}
function getDropped(oldObj, newObj) {
const dropped = {};
for (const [key, value] of Object.entries(oldObj)) {
if (!(key in newObj)) dropped[key] = value;
}
return dropped;
}
function getUpdated(oldObj, newObj) {
const updated = {};
for (const [key, value] of Object.entries(newObj)) {
const oldValue = oldObj[key];
if (!oldValue) continue;
if (diff(oldValue, value).length > 0) updated[key] = value;
}
return updated;
}
function getUpdatedColumns(oldColumns, newColumns) {
const updated = {};
for (const [key, newColumn] of Object.entries(newColumns)) {
let oldColumn = oldColumns[key];
if (!oldColumn) continue;
if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
const asNewColumn = columnSchema.safeParse({
type: newColumn.type,
schema: oldColumn.schema
});
if (asNewColumn.success) {
oldColumn = asNewColumn.data;
}
}
const diffResult = diff(oldColumn, newColumn);
if (diffResult.length > 0) {
updated[key] = { old: oldColumn, new: newColumn };
}
}
return updated;
}
const typeChangesWithoutQuery = [
{ from: "boolean", to: "number" },
{ from: "date", to: "text" },
{ from: "json", to: "text" }
];
function canChangeTypeWithoutQuery(oldColumn, newColumn) {
return typeChangesWithoutQuery.some(
({ from, to }) => oldColumn.type === from && newColumn.type === to
);
}
function hasRuntimeDefault(column) {
return !!(column.schema.default && isSerializedSQL(column.schema.default));
}
function getProductionCurrentSnapshot({
url,
token
}) {
return getDbCurrentSnapshot(token, url);
}
async function getDbCurrentSnapshot(appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
try {
const res = await client.get(
// Latest snapshot
sql`select snapshot from _astro_db_snapshot order by id desc limit 1;`
);
return JSON.parse(res.snapshot);
} catch (error) {
if (isDbError(error) && // If the schema was never pushed to the database yet the table won't exist.
// Treat a missing snapshot table as an empty table.
// When connecting to a remote database in that condition
// the query will fail with the following error code and message.
(error.code === "SQLITE_UNKNOWN" && error.message === "SQLITE_UNKNOWN: SQLite error: no such table: _astro_db_snapshot" || // When connecting to a local or in-memory database that does not have a snapshot table yet
// the query will fail with the following error code and message.
error.code === "SQLITE_ERROR" && error.message === "SQLITE_ERROR: no such table: _astro_db_snapshot")) {
return;
}
throw error;
}
}
function getDropTableQueriesForSnapshot(snapshot) {
const queries = [];
for (const tableName of Object.keys(snapshot.schema)) {
const dropQuery = getDropTableIfExistsQuery(tableName);
queries.unshift(dropQuery);
}
return queries;
}
function createCurrentSnapshot({ tables = {} }) {
const schema = JSON.parse(JSON.stringify(tables));
return { version: MIGRATION_VERSION, schema };
}
function createEmptySnapshot() {
return { version: MIGRATION_VERSION, schema: {} };
}
function formatDataLossMessage(confirmations, isColor = true) {
const messages = [];
messages.push(color.red("\u2716 We found some schema changes that cannot be handled automatically:"));
messages.push(``);
messages.push(...confirmations.map((m, i) => color.red(` (${i + 1}) `) + m));
messages.push(``);
messages.push(`To resolve, revert these changes or update your schema, and re-run the command.`);
messages.push(
`You may also run 'astro db push --force-reset' to ignore all warnings and force-push your local database schema to production instead. All data will be lost and the database will be reset.`
);
let finalMessage = messages.join("\n");
if (!isColor) {
finalMessage = stripVTControlCharacters(finalMessage);
}
return finalMessage;
}
export {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot,
getTableChangeQueries
};

11
node_modules/@astrojs/db/dist/core/cli/print-help.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
/**
* Uses implementation from Astro core
* @see https://github.com/withastro/astro/blob/main/packages/astro/src/core/messages.ts#L303
*/
export declare function printHelp({ commandName, headline, usage, tables, description, }: {
commandName: string;
headline?: string;
usage?: string;
tables?: Record<string, [command: string, help: string][]>;
description?: string;
}): void;

55
node_modules/@astrojs/db/dist/core/cli/print-help.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
import colors from "piccolore";
function printHelp({
commandName,
headline,
usage,
tables,
description
}) {
const linebreak = () => "";
const title = (label) => ` ${colors.bgWhite(colors.black(` ${label} `))}`;
const table = (rows, { padding }) => {
const split = process.stdout.columns < 60;
let raw = "";
for (const row of rows) {
if (split) {
raw += ` ${row[0]}
`;
} else {
raw += `${`${row[0]}`.padStart(padding)}`;
}
raw += " " + colors.dim(row[1]) + "\n";
}
return raw.slice(0, -1);
};
let message = [];
if (headline) {
message.push(
linebreak(),
` ${colors.bgGreen(colors.black(` ${commandName} `))} ${colors.green(
`v${"0.20.1"}`
)} ${headline}`
);
}
if (usage) {
message.push(linebreak(), ` ${colors.green(commandName)} ${colors.bold(usage)}`);
}
if (tables) {
let calculateTablePadding2 = function(rows) {
return rows.reduce((val, [first]) => Math.max(val, first.length), 0) + 2;
};
var calculateTablePadding = calculateTablePadding2;
const tableEntries = Object.entries(tables);
const padding = Math.max(...tableEntries.map(([, rows]) => calculateTablePadding2(rows)));
for (const [tableTitle, tableRows] of tableEntries) {
message.push(linebreak(), title(tableTitle), table(tableRows, { padding }));
}
}
if (description) {
message.push(linebreak(), `${description}`);
}
console.log(message.join("\n") + "\n");
}
export {
printHelp
};

12
node_modules/@astrojs/db/dist/core/consts.d.ts generated vendored Normal file
View File

@@ -0,0 +1,12 @@
export declare const RUNTIME_IMPORT: string;
export declare const RUNTIME_VIRTUAL_IMPORT: string;
export declare const VIRTUAL_MODULE_ID = "astro:db";
export declare const DB_PATH = ".astro/content.db";
export declare const CONFIG_FILE_NAMES: string[];
export declare const MIGRATION_VERSION = "2024-03-12";
export declare const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
export declare const DB_CLIENTS: {
node: string;
web: string;
local: string;
};

26
node_modules/@astrojs/db/dist/core/consts.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
import { readFileSync } from "node:fs";
const PACKAGE_NAME = JSON.parse(
readFileSync(new URL("../../package.json", import.meta.url), "utf8")
).name;
const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
const RUNTIME_VIRTUAL_IMPORT = JSON.stringify(`${PACKAGE_NAME}/dist/runtime/virtual.js`);
const VIRTUAL_MODULE_ID = "astro:db";
const DB_PATH = ".astro/content.db";
const CONFIG_FILE_NAMES = ["config.ts", "config.js", "config.mts", "config.mjs"];
const MIGRATION_VERSION = "2024-03-12";
const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
const DB_CLIENTS = {
node: `${PACKAGE_NAME}/db-client/libsql-node.js`,
web: `${PACKAGE_NAME}/db-client/libsql-web.js`,
local: `${PACKAGE_NAME}/db-client/libsql-local.js`
};
export {
CONFIG_FILE_NAMES,
DB_CLIENTS,
DB_PATH,
MIGRATION_VERSION,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
};

View File

@@ -0,0 +1,6 @@
import { type LibSQLDatabase } from 'drizzle-orm/libsql';
type LocalDbClientOptions = {
url: string;
};
export declare function createClient(options: LocalDbClientOptions): LibSQLDatabase;
export {};

View File

@@ -0,0 +1,12 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
const isWebContainer = !!process.versions?.webcontainer;
function createClient(options) {
const url = isWebContainer ? "file:content.db" : options.url;
const client = createLibsqlClient({ url });
const db = drizzleLibsql(client);
return db;
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client").Client;
};
export {};

View File

@@ -0,0 +1,21 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
if (parsedUrl.protocol === "memory:") {
url = ":memory:";
} else if (parsedUrl.protocol === "file:" && parsedUrl.pathname.startsWith("/") && !rawUrl.startsWith("file:/")) {
url = "file:" + parsedUrl.pathname.substring(1);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client/web").Client;
};
export {};

View File

@@ -0,0 +1,22 @@
import { createClient as createLibsqlClient } from "@libsql/client/web";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql/web";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
const supportedProtocols = ["http:", "https:", "libsql:"];
if (!supportedProtocols.includes(parsedUrl.protocol)) {
throw new Error(
`Unsupported protocol "${parsedUrl.protocol}" for libSQL web client. Supported protocols are: ${supportedProtocols.join(", ")}.`
);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,2 @@
import type { Config as LibSQLConfig } from '@libsql/client';
export declare const parseLibSQLConfig: (config: Record<string, string>) => Partial<LibSQLConfig>;

46
node_modules/@astrojs/db/dist/core/db-client/utils.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
import * as z from "zod/v4";
const rawLibSQLOptions = z.record(z.string(), z.string());
const parseNumber = (value) => z.coerce.number().parse(value);
const parseBoolean = (value) => z.coerce.boolean().parse(value);
const booleanValues = ["true", "false"];
const parseOptionalBoolean = (value) => {
if (booleanValues.includes(value)) {
return parseBoolean(value);
}
return true;
};
const libSQLConfigTransformed = rawLibSQLOptions.transform((raw) => {
const parsed = {};
for (const [key, value] of Object.entries(raw)) {
switch (key) {
case "syncInterval":
case "concurrency":
parsed[key] = parseNumber(value);
break;
case "readYourWrites":
case "offline":
case "tls":
parsed[key] = parseOptionalBoolean(value);
break;
case "authToken":
case "encryptionKey":
case "syncUrl":
parsed[key] = value;
break;
}
}
return parsed;
});
const parseLibSQLConfig = (config) => {
try {
return libSQLConfigTransformed.parse(config);
} catch (error) {
if (error instanceof z.ZodError) {
throw new Error(`Invalid LibSQL config: ${error.issues.map((e) => e.message).join(", ")}`);
}
throw error;
}
};
export {
parseLibSQLConfig
};

8
node_modules/@astrojs/db/dist/core/errors.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
export declare const MISSING_EXECUTE_PATH_ERROR: string;
export declare const RENAME_TABLE_ERROR: (oldTable: string, newTable: string) => string;
export declare const RENAME_COLUMN_ERROR: (oldSelector: string, newSelector: string) => string;
export declare const FILE_NOT_FOUND_ERROR: (path: string) => string;
export declare const SHELL_QUERY_MISSING_ERROR: string;
export declare const EXEC_ERROR: (error: string) => string;
export declare const EXEC_DEFAULT_EXPORT_ERROR: (fileName: string) => string;
export declare const INTEGRATION_TABLE_CONFLICT_ERROR: (integrationName: string, tableName: string, isUserConflict: boolean) => string;

48
node_modules/@astrojs/db/dist/core/errors.js generated vendored Normal file
View File

@@ -0,0 +1,48 @@
import colors from "piccolore";
const MISSING_EXECUTE_PATH_ERROR = `${colors.red(
"\u25B6 No file path provided."
)} Provide a path by running ${colors.cyan("astro db execute <path>")}
`;
const RENAME_TABLE_ERROR = (oldTable, newTable) => {
return colors.red("\u25B6 Potential table rename detected: " + oldTable + " -> " + newTable) + `
You cannot add and remove tables in the same schema update batch.
1. Use "deprecated: true" to deprecate a table before renaming.
2. Use "--force-reset" to ignore this warning and reset the database (deleting all of your data).
Visit https://docs.astro.build/en/guides/astro-db/#renaming-tables to learn more.`;
};
const RENAME_COLUMN_ERROR = (oldSelector, newSelector) => {
return colors.red("\u25B6 Potential column rename detected: " + oldSelector + ", " + newSelector) + `
You cannot add and remove columns in the same table.
To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`;
};
const FILE_NOT_FOUND_ERROR = (path) => `${colors.red("\u25B6 File not found:")} ${colors.bold(path)}
`;
const SHELL_QUERY_MISSING_ERROR = `${colors.red(
"\u25B6 Please provide a query to execute using the --query flag."
)}
`;
const EXEC_ERROR = (error) => {
return `${colors.red(`Error while executing file:`)}
${error}`;
};
const EXEC_DEFAULT_EXPORT_ERROR = (fileName) => {
return EXEC_ERROR(`Missing default function export in ${colors.bold(fileName)}`);
};
const INTEGRATION_TABLE_CONFLICT_ERROR = (integrationName, tableName, isUserConflict) => {
return colors.red("\u25B6 Conflicting table name in integration " + colors.bold(integrationName)) + isUserConflict ? `
A user-defined table named ${colors.bold(tableName)} already exists` : `
Another integration already added a table named ${colors.bold(tableName)}`;
};
export {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
INTEGRATION_TABLE_CONFLICT_ERROR,
MISSING_EXECUTE_PATH_ERROR,
RENAME_COLUMN_ERROR,
RENAME_TABLE_ERROR,
SHELL_QUERY_MISSING_ERROR
};

View File

@@ -0,0 +1,2 @@
import type { $ZodErrorMap } from 'zod/v4/core';
export declare const errorMap: $ZodErrorMap;

View File

@@ -0,0 +1,101 @@
const errorMap = (issue) => {
const baseErrorPath = flattenErrorPath(issue.path ?? []);
if (issue.code === "invalid_union") {
let typeOrLiteralErrByPath = /* @__PURE__ */ new Map();
for (const unionError of issue.errors.flat()) {
if (unionError.code === "invalid_type") {
const flattenedErrorPath = flattenErrorPath(unionError.path);
if (typeOrLiteralErrByPath.has(flattenedErrorPath)) {
typeOrLiteralErrByPath.get(flattenedErrorPath).expected.push(unionError.expected);
} else {
typeOrLiteralErrByPath.set(flattenedErrorPath, {
code: unionError.code,
received: unionError.received,
expected: [unionError.expected],
message: unionError.message
});
}
}
}
const messages = [prefix(baseErrorPath, "Did not match union.")];
const details = [...typeOrLiteralErrByPath.entries()].filter(([, error]) => error.expected.length === issue.errors.flat().length).map(
([key, error]) => key === baseErrorPath ? (
// Avoid printing the key again if it's a base error
`> ${getTypeOrLiteralMsg(error)}`
) : `> ${prefix(key, getTypeOrLiteralMsg(error))}`
);
if (details.length === 0) {
const expectedShapes = [];
for (const unionErrors of issue.errors) {
const expectedShape = [];
for (const _issue of unionErrors) {
if (_issue.code === "invalid_union") {
return errorMap(_issue);
}
const relativePath = flattenErrorPath(_issue.path).replace(baseErrorPath, "").replace(leadingPeriod, "");
if ("expected" in _issue && typeof _issue.expected === "string") {
expectedShape.push(
relativePath ? `${relativePath}: ${_issue.expected}` : _issue.expected
);
} else if ("values" in _issue) {
expectedShape.push(
..._issue.values.filter((v) => typeof v === "string").map((v) => `"${v}"`)
);
} else if (relativePath) {
expectedShape.push(relativePath);
}
}
if (expectedShape.length === 1 && !expectedShape[0]?.includes(":")) {
expectedShapes.push(expectedShape.join(""));
} else if (expectedShape.length > 0) {
expectedShapes.push(`{ ${expectedShape.join("; ")} }`);
}
}
if (expectedShapes.length) {
details.push("> Expected type `" + expectedShapes.join(" | ") + "`");
details.push("> Received `" + stringify(issue.input) + "`");
}
}
return {
message: messages.concat(details).join("\n")
};
} else if (issue.code === "invalid_type") {
return {
message: prefix(
baseErrorPath,
getTypeOrLiteralMsg({
code: issue.code,
received: typeof issue.input,
expected: [issue.expected],
message: issue.message
})
)
};
} else if (issue.message) {
return { message: prefix(baseErrorPath, issue.message) };
}
};
const getTypeOrLiteralMsg = (error) => {
if (typeof error.received === "undefined" || error.received === "undefined")
return error.message ?? "Required";
const expectedDeduped = new Set(error.expected);
switch (error.code) {
case "invalid_type":
return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
error.received
)}\``;
case "invalid_literal":
return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
error.received
)}\``;
}
};
const prefix = (key, msg) => key.length ? `**${key}**: ${msg}` : msg;
const unionExpectedVals = (expectedVals) => [...expectedVals].map((expectedVal) => stringify(expectedVal)).join(" | ");
const flattenErrorPath = (errorPath) => errorPath.join(".");
const stringify = (val) => JSON.stringify(val, null, 1).split(newlinePlusWhitespace).join(" ");
const newlinePlusWhitespace = /\n\s*/;
const leadingPeriod = /^\./;
export {
errorMap
};

View File

@@ -0,0 +1,2 @@
import type { AstroIntegration } from 'astro';
export declare function fileURLIntegration(): AstroIntegration;

View File

@@ -0,0 +1,84 @@
import fs from "node:fs";
import path from "node:path";
import { pathToFileURL } from "node:url";
async function copyFile(toDir, fromUrl, toUrl) {
await fs.promises.mkdir(toDir, { recursive: true });
await fs.promises.rename(fromUrl, toUrl);
}
function fileURLIntegration() {
const fileNames = [];
function createVitePlugin(command) {
let referenceIds = [];
return {
name: "@astrojs/db/file-url",
enforce: "pre",
load: {
filter: {
id: /\?fileurl$/
},
async handler(id) {
const filePath = id.slice(0, id.indexOf("?"));
if (command === "build") {
const data = await fs.promises.readFile(filePath);
const name = path.basename(filePath);
const referenceId = this.emitFile({
name,
source: data,
type: "asset"
});
referenceIds.push(referenceId);
return `export default import.meta.ROLLUP_FILE_URL_${referenceId};`;
} else {
return `export default new URL(${JSON.stringify(pathToFileURL(filePath).toString())})`;
}
}
},
generateBundle() {
for (const referenceId of referenceIds) {
fileNames.push(this.getFileName(referenceId));
}
referenceIds = [];
}
};
}
let config;
return {
name: "@astrojs/db/file-url",
hooks: {
"astro:config:setup"({ updateConfig, command }) {
updateConfig({
vite: {
plugins: [createVitePlugin(command)]
}
});
},
"astro:config:done": ({ config: _config }) => {
config = _config;
},
async "astro:build:done"() {
if (config.output === "static") {
const unlinks = [];
for (const fileName of fileNames) {
const url = new URL(fileName, config.outDir);
unlinks.push(fs.promises.unlink(url));
}
await Promise.all(unlinks);
const assetDir = new URL(config.build.assets, config.outDir);
await fs.promises.rmdir(assetDir).catch(() => []);
} else {
const moves = [];
for (const fileName of fileNames) {
const fromUrl = new URL(fileName, config.build.client);
const toUrl = new URL(fileName, config.build.server);
const toDir = new URL("./", toUrl);
moves.push(copyFile(toDir, fromUrl, toUrl));
}
await Promise.all(moves);
}
}
}
};
}
export {
fileURLIntegration
};

View File

@@ -0,0 +1,8 @@
import type { AstroIntegration } from 'astro';
import * as z from 'zod/v4';
declare const astroDBConfigSchema: z.ZodPrefault<z.ZodOptional<z.ZodObject<{
mode: z.ZodDefault<z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"node">, z.ZodLiteral<"web">]>>>;
}, z.core.$strip>>>;
export type AstroDBConfig = z.infer<typeof astroDBConfigSchema>;
export declare function integration(options?: AstroDBConfig): AstroIntegration[];
export {};

215
node_modules/@astrojs/db/dist/core/integration/index.js generated vendored Normal file
View File

@@ -0,0 +1,215 @@
import { existsSync } from "node:fs";
import { mkdir, writeFile } from "node:fs/promises";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import colors from "piccolore";
import {
createServer,
loadEnv,
mergeConfig
} from "vite";
import parseArgs from "yargs-parser";
import * as z from "zod/v4";
import { AstroDbError, isDbError } from "../../runtime/utils.js";
import { CONFIG_FILE_NAMES, DB_PATH, VIRTUAL_MODULE_ID } from "../consts.js";
import { EXEC_DEFAULT_EXPORT_ERROR, EXEC_ERROR } from "../errors.js";
import { resolveDbConfig } from "../load-file.js";
import { SEED_DEV_FILE_NAME } from "../queries.js";
import { getDbDirectoryUrl, getRemoteDatabaseInfo } from "../utils.js";
import { fileURLIntegration } from "./file-url.js";
import { getDtsContent } from "./typegen.js";
import {
vitePluginDb
} from "./vite-plugin-db.js";
import { vitePluginDbClient } from "./vite-plugin-db-client.js";
const astroDBConfigSchema = z.object({
/**
* Sets the mode of the underlying `@libsql/client` connection.
*
* In most cases, the default 'node' mode is sufficient. On platforms like Cloudflare, or Deno, you may need to set this to 'web'.
*
* @default 'node'
*/
mode: z.union([z.literal("node"), z.literal("web")]).optional().default("node")
}).optional().prefault({});
function astroDBIntegration(options) {
const resolvedConfig = astroDBConfigSchema.parse(options);
let connectToRemote = false;
let configFileDependencies = [];
let root;
let tempViteServer;
let tables = {
get() {
throw new Error("[astro:db] INTERNAL Tables not loaded yet");
}
};
let seedFiles = {
get() {
throw new Error("[astro:db] INTERNAL Seed files not loaded yet");
}
};
let seedHandler = {
execute: () => {
throw new Error("[astro:db] INTERNAL Seed handler not loaded yet");
},
inProgress: false
};
let command;
let finalBuildOutput;
return {
name: "astro:db",
hooks: {
"astro:config:setup": async ({ updateConfig, config, command: _command, logger }) => {
command = _command;
root = config.root;
if (command === "preview") return;
let dbPlugin = void 0;
const args = parseArgs(process.argv.slice(3));
connectToRemote = process.env.ASTRO_INTERNAL_TEST_REMOTE || args["remote"];
const dbClientPlugin = vitePluginDbClient({
connectToRemote,
mode: resolvedConfig.mode
});
if (connectToRemote) {
dbPlugin = vitePluginDb({
connectToRemote,
appToken: getRemoteDatabaseInfo().token,
tables,
root: config.root,
srcDir: config.srcDir,
output: config.output,
seedHandler
});
} else {
dbPlugin = vitePluginDb({
connectToRemote,
tables,
seedFiles,
root: config.root,
srcDir: config.srcDir,
output: config.output,
logger,
seedHandler
});
}
updateConfig({
vite: {
assetsInclude: [DB_PATH],
plugins: [dbClientPlugin, dbPlugin]
}
});
},
"astro:config:done": async ({ config, injectTypes, buildOutput }) => {
if (command === "preview") return;
finalBuildOutput = buildOutput;
const { dbConfig, dependencies, integrationSeedPaths } = await resolveDbConfig(config);
tables.get = () => dbConfig.tables;
seedFiles.get = () => integrationSeedPaths;
configFileDependencies = dependencies;
const localDbUrl = new URL(DB_PATH, config.root);
if (!connectToRemote && !existsSync(localDbUrl)) {
await mkdir(dirname(fileURLToPath(localDbUrl)), { recursive: true });
await writeFile(localDbUrl, "");
}
injectTypes({
filename: "db.d.ts",
content: getDtsContent(tables.get() ?? {})
});
},
"astro:server:setup": async ({ server, logger }) => {
const environment = server.environments.ssr;
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, environment });
};
const filesToWatch = [
...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
...configFileDependencies.map((c) => new URL(c, root))
];
server.watcher.on("all", (_event, relativeEntry) => {
const entry = new URL(relativeEntry, root);
if (filesToWatch.some((f) => entry.href === f.href)) {
server.restart();
}
});
logger.info(
connectToRemote ? "Connected to remote database." : "New local database created."
);
if (connectToRemote) return;
const localSeedPaths = SEED_DEV_FILE_NAME.map(
(name) => new URL(name, getDbDirectoryUrl(root))
);
if (seedFiles.get().length || localSeedPaths.find((path) => existsSync(path))) {
await environment.runner.import(VIRTUAL_MODULE_ID).catch((e) => {
logger.error(e instanceof Error ? e.message : String(e));
});
}
},
"astro:build:start": async ({ logger }) => {
if (!connectToRemote && !databaseFileEnvDefined() && finalBuildOutput === "server") {
const message = `Attempting to build without the --remote flag or the ASTRO_DATABASE_FILE environment variable defined. You probably want to pass --remote to astro build.`;
const hint = "Learn more connecting to libSQL: https://docs.astro.build/en/guides/astro-db/#connect-a-libsql-database-for-production";
throw new AstroDbError(message, hint);
}
logger.info(
"database: " + (connectToRemote ? colors.yellow("remote") : colors.blue("local database."))
);
},
"astro:build:setup": async ({ vite }) => {
tempViteServer = await getTempViteServer({ viteConfig: vite });
const environment = tempViteServer.environments.ssr;
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, environment });
};
},
"astro:build:done": async () => {
await tempViteServer?.close();
}
}
};
}
function databaseFileEnvDefined() {
const env = loadEnv("", process.cwd());
return env.ASTRO_DATABASE_FILE != null || process.env.ASTRO_DATABASE_FILE != null;
}
function integration(options) {
return [astroDBIntegration(options), fileURLIntegration()];
}
async function executeSeedFile({
fileUrl,
environment
}) {
const pathname = decodeURIComponent(fileUrl.pathname);
const mod = await environment.runner.import(pathname);
if (typeof mod.default !== "function") {
throw new AstroDbError(EXEC_DEFAULT_EXPORT_ERROR(fileURLToPath(fileUrl)));
}
try {
await mod.default();
} catch (e) {
if (isDbError(e)) {
throw new AstroDbError(EXEC_ERROR(e.message));
}
throw e;
}
}
async function getTempViteServer({ viteConfig }) {
const tempViteServer = await createServer(
mergeConfig(viteConfig, {
server: { middlewareMode: true, hmr: false, watch: null, ws: false },
optimizeDeps: { noDiscovery: true },
ssr: { external: [] },
logLevel: "silent"
})
);
const hotSend = tempViteServer.environments.client.hot.send;
tempViteServer.environments.client.hot.send = (payload) => {
if (payload.type === "error") {
throw payload.err;
}
return hotSend(payload);
};
return tempViteServer;
}
export {
integration
};

View File

@@ -0,0 +1,2 @@
import type { DBTables } from '../types.js';
export declare function getDtsContent(tables: DBTables): string;

View File

@@ -0,0 +1,21 @@
import { RUNTIME_IMPORT } from "../consts.js";
function getDtsContent(tables) {
const content = `// This file is generated by Astro DB
declare module 'astro:db' {
${Object.entries(tables).map(([name, table]) => generateTableType(name, table)).join("\n")}
}
`;
return content;
}
function generateTableType(name, table) {
const sanitizedColumnsList = Object.entries(table.columns).filter(([, val]) => !val.schema.deprecated);
const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
${JSON.stringify(name)},
${JSON.stringify(sanitizedColumns)}
>;`;
return tableType;
}
export {
getDtsContent
};

View File

@@ -0,0 +1,7 @@
import type { VitePlugin } from '../utils.js';
type VitePluginDBClientParams = {
connectToRemote: boolean;
mode: 'node' | 'web';
};
export declare function vitePluginDbClient(params: VitePluginDBClientParams): VitePlugin;
export {};

View File

@@ -0,0 +1,50 @@
import { DB_CLIENTS, VIRTUAL_CLIENT_MODULE_ID } from "../consts.js";
function getRemoteClientModule(mode) {
switch (mode) {
case "web":
return `export { createClient } from '${DB_CLIENTS.web}';`;
case "node":
default:
return `export { createClient } from '${DB_CLIENTS.node}';`;
}
}
function getLocalClientModule(mode) {
switch (mode) {
case "node":
case "web":
default:
return `export { createClient } from '${DB_CLIENTS.local}';`;
}
}
const resolved = "\0" + VIRTUAL_CLIENT_MODULE_ID;
function vitePluginDbClient(params) {
return {
name: "virtual:astro:db-client",
enforce: "pre",
resolveId: {
filter: {
id: new RegExp(`^${VIRTUAL_CLIENT_MODULE_ID}$`)
},
handler() {
return resolved;
}
},
load: {
filter: {
id: new RegExp(`^${resolved}$`)
},
handler() {
switch (params.connectToRemote) {
case true:
return getRemoteClientModule(params.mode);
case false:
default:
return getLocalClientModule(params.mode);
}
}
}
};
}
export {
vitePluginDbClient
};

View File

@@ -0,0 +1,60 @@
import type { AstroConfig, AstroIntegrationLogger } from 'astro';
import type { DBTables } from '../types.js';
import { type VitePlugin } from '../utils.js';
export type LateTables = {
get: () => DBTables;
};
export type LateSeedFiles = {
get: () => Array<string | URL>;
};
export type SeedHandler = {
inProgress: boolean;
execute: (fileUrl: URL) => Promise<void>;
};
type VitePluginDBParams = {
connectToRemote: false;
tables: LateTables;
seedFiles: LateSeedFiles;
srcDir: URL;
root: URL;
logger?: AstroIntegrationLogger;
output: AstroConfig['output'];
seedHandler: SeedHandler;
} | {
connectToRemote: true;
tables: LateTables;
appToken: string;
srcDir: URL;
root: URL;
output: AstroConfig['output'];
seedHandler: SeedHandler;
};
export declare function vitePluginDb(params: VitePluginDBParams): VitePlugin;
export declare function getConfigVirtualModContents(): string;
export declare function getLocalVirtualModContents({ tables, root, localExecution, }: {
tables: DBTables;
root: URL;
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export declare function getRemoteVirtualModContents({ tables, appToken, isBuild, output, localExecution, }: {
tables: DBTables;
appToken: string;
isBuild: boolean;
output: AstroConfig['output'];
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export {};

View File

@@ -0,0 +1,191 @@
import { existsSync } from "node:fs";
import { fileURLToPath } from "node:url";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { normalizeDatabaseUrl } from "../../runtime/index.js";
import {
DB_CLIENTS,
DB_PATH,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
} from "../consts.js";
import { createClient } from "../db-client/libsql-local.js";
import { getResolvedFileUrl } from "../load-file.js";
import { getCreateIndexQueries, getCreateTableQuery, SEED_DEV_FILE_NAME } from "../queries.js";
import {
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo
} from "../utils.js";
const resolved = {
module: "\0" + VIRTUAL_MODULE_ID,
importedFromSeedFile: "\0" + VIRTUAL_MODULE_ID + ":seed"
};
function vitePluginDb(params) {
let command = "build";
return {
name: "astro:db",
enforce: "pre",
configResolved(resolvedConfig) {
command = resolvedConfig.command;
},
resolveId: {
filter: {
id: new RegExp(`^${VIRTUAL_MODULE_ID}$`)
},
handler() {
if (params.seedHandler.inProgress) {
return resolved.importedFromSeedFile;
}
return resolved.module;
}
},
load: {
filter: {
id: new RegExp(`^(${resolved.module}|${resolved.importedFromSeedFile})$`)
},
async handler(id) {
if (params.connectToRemote) {
return getRemoteVirtualModContents({
appToken: params.appToken,
tables: params.tables.get(),
isBuild: command === "build",
output: params.output,
localExecution: false
});
}
if (id === resolved.importedFromSeedFile) {
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
await recreateTables(params);
const seedFiles = getResolvedSeedFiles(params);
for await (const seedFile of seedFiles) {
this.addWatchFile(fileURLToPath(seedFile));
if (existsSync(seedFile)) {
params.seedHandler.inProgress = true;
await params.seedHandler.execute(seedFile);
}
}
if (params.seedHandler.inProgress) {
(params.logger ?? console).info("Seeded database.");
params.seedHandler.inProgress = false;
}
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
}
};
}
function getConfigVirtualModContents() {
return `export * from ${RUNTIME_VIRTUAL_IMPORT}`;
}
function getDBModule(localExecution) {
return localExecution ? `import { createClient } from '${DB_CLIENTS.node}';` : `import { createClient } from '${VIRTUAL_CLIENT_MODULE_ID}';`;
}
function getLocalVirtualModContents({
tables,
root,
localExecution
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = new URL(DB_PATH, root);
const clientImport = getDBModule(localExecution);
return `
import { asDrizzleTable, normalizeDatabaseUrl } from ${RUNTIME_IMPORT};
${clientImport}
const dbUrl = normalizeDatabaseUrl(${JSON.stringify(ASTRO_DATABASE_FILE)}, ${JSON.stringify(dbUrl)});
export const db = createClient({ url: dbUrl });
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}`;
}
function getRemoteVirtualModContents({
tables,
appToken,
isBuild,
output,
localExecution
}) {
const dbInfo = getRemoteDatabaseInfo();
function appTokenArg() {
if (isBuild) {
if (output === "server") {
return `process.env.ASTRO_DB_APP_TOKEN`;
} else {
return `process.env.ASTRO_DB_APP_TOKEN ?? ${JSON.stringify(appToken)}`;
}
} else {
return JSON.stringify(appToken);
}
}
function dbUrlArg() {
const dbStr = JSON.stringify(dbInfo.url);
if (isBuild) {
return `import.meta.env.ASTRO_DB_REMOTE_URL ?? ${dbStr}`;
} else {
return dbStr;
}
}
const clientImport = getDBModule(localExecution);
return `
import {asDrizzleTable} from ${RUNTIME_IMPORT};
${clientImport}
export const db = await createClient({
url: ${dbUrlArg()},
token: ${appTokenArg()},
});
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}
`;
}
function getStringifiedTableExports(tables) {
return Object.entries(tables).map(
([name, table]) => `export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
table
)}, false)`
).join("\n");
}
const sqlite = new SQLiteAsyncDialect();
async function recreateTables({ tables, root }) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(ASTRO_DATABASE_FILE, new URL(DB_PATH, root).href);
const db = createClient({ url: dbUrl });
const setupQueries = [];
for (const [name, table] of Object.entries(tables.get() ?? {})) {
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
const createQuery = sql.raw(getCreateTableQuery(name, table));
const indexQueries = getCreateIndexQueries(name, table);
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
}
await db.batch([
db.run(sql`pragma defer_foreign_keys=true;`),
...setupQueries.map((q) => db.run(q))
]);
}
function getResolvedSeedFiles({ root, seedFiles }) {
const localSeedFiles = SEED_DEV_FILE_NAME.map((name) => new URL(name, getDbDirectoryUrl(root)));
const integrationSeedFiles = seedFiles.get().map((s) => getResolvedFileUrl(root, s));
return [...integrationSeedFiles, ...localSeedFiles];
}
export {
getConfigVirtualModContents,
getLocalVirtualModContents,
getRemoteVirtualModContents,
vitePluginDb
};

126
node_modules/@astrojs/db/dist/core/load-file.d.ts generated vendored Normal file
View File

@@ -0,0 +1,126 @@
import type { AstroConfig } from 'astro';
import './types.js';
/**
* Load a users `astro:db` configuration file and additional configuration files provided by integrations.
*/
export declare function resolveDbConfig({ root, integrations, }: Pick<AstroConfig, 'root' | 'integrations'>): Promise<{
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig: {
tables: Record<string, {
indexes: Record<string, {
on: string | string[];
unique?: boolean | undefined;
}>;
columns: Record<string, {
type: "boolean";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: boolean | import("../runtime/types.js").SerializedSQL | undefined;
};
} | {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
primaryKey: false;
optional: boolean;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
optional?: false | undefined;
default?: undefined;
})) & {
references?: import("./types.js").NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
primaryKey: false;
optional: boolean;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: import("./types.js").TextColumn;
};
} | {
type: "date";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
};
} | {
type: "json";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: unknown;
};
}>;
deprecated: boolean;
foreignKeys?: (Omit<{
columns: import("./schemas.js").MaybeArray<string>;
references: () => import("./schemas.js").MaybeArray<Omit<import("zod/v4").input<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
}, "references"> & {
references: import("./schemas.js").MaybeArray<Omit<import("zod/v4").infer<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
})[] | undefined;
}>;
};
/** Dependencies imported into the user config file. */
dependencies: string[];
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths: (string | URL)[];
}>;
export declare function getResolvedFileUrl(root: URL, filePathOrUrl: string | URL): URL;
/**
* Bundle arbitrary `mjs` or `ts` file.
* Simplified fork from Vite's `bundleConfigFile` function.
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
*/
export declare function bundleFile({ fileUrl, root, virtualModContents, }: {
fileUrl: URL;
root: URL;
virtualModContents: string;
}): Promise<{
code: string;
dependencies: string[];
}>;
/**
* Forked from Vite config loader, replacing CJS-based path concat with ESM only
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
*/
export declare function importBundledFile({ code, root, }: {
code: string;
root: URL;
}): Promise<{
default?: unknown;
}>;

170
node_modules/@astrojs/db/dist/core/load-file.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
import { existsSync } from "node:fs";
import { unlink, writeFile } from "node:fs/promises";
import { createRequire } from "node:module";
import { fileURLToPath, pathToFileURL } from "node:url";
import { build as esbuild } from "esbuild";
import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from "./consts.js";
import { INTEGRATION_TABLE_CONFLICT_ERROR } from "./errors.js";
import { errorMap } from "./integration/error-map.js";
import { getConfigVirtualModContents } from "./integration/vite-plugin-db.js";
import { dbConfigSchema } from "./schemas.js";
import "./types.js";
import { getAstroEnv, getDbDirectoryUrl } from "./utils.js";
async function resolveDbConfig({
root,
integrations
}) {
const { mod, dependencies } = await loadUserConfigFile(root);
const userDbConfig = dbConfigSchema.parse(mod?.default ?? {}, { error: errorMap });
const dbConfig = { tables: userDbConfig.tables ?? {} };
const integrationDbConfigPaths = [];
const integrationSeedPaths = [];
for (const integration of integrations) {
const { name, hooks } = integration;
if (hooks["astro:db:setup"]) {
hooks["astro:db:setup"]({
extendDb({ configEntrypoint, seedEntrypoint }) {
if (configEntrypoint) {
integrationDbConfigPaths.push({ name, configEntrypoint });
}
if (seedEntrypoint) {
integrationSeedPaths.push(seedEntrypoint);
}
}
});
}
}
for (const { name, configEntrypoint } of integrationDbConfigPaths) {
const loadedConfig = await loadIntegrationConfigFile(root, configEntrypoint);
const integrationDbConfig = dbConfigSchema.parse(loadedConfig.mod?.default ?? {}, {
error: errorMap
});
for (const key in integrationDbConfig.tables) {
if (key in dbConfig.tables) {
const isUserConflict = key in (userDbConfig.tables ?? {});
throw new Error(INTEGRATION_TABLE_CONFLICT_ERROR(name, key, isUserConflict));
} else {
dbConfig.tables[key] = integrationDbConfig.tables[key];
}
}
}
return {
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig,
/** Dependencies imported into the user config file. */
dependencies,
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths
};
}
async function loadUserConfigFile(root) {
let configFileUrl;
for (const fileName of CONFIG_FILE_NAMES) {
const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
if (existsSync(fileUrl)) {
configFileUrl = fileUrl;
}
}
return await loadAndBundleDbConfigFile({ root, fileUrl: configFileUrl });
}
function getResolvedFileUrl(root, filePathOrUrl) {
if (typeof filePathOrUrl === "string") {
const { resolve } = createRequire(root);
const resolvedFilePath = resolve(filePathOrUrl);
return pathToFileURL(resolvedFilePath);
}
return filePathOrUrl;
}
async function loadIntegrationConfigFile(root, filePathOrUrl) {
const fileUrl = getResolvedFileUrl(root, filePathOrUrl);
return await loadAndBundleDbConfigFile({ root, fileUrl });
}
async function loadAndBundleDbConfigFile({
root,
fileUrl
}) {
if (!fileUrl) {
return { mod: void 0, dependencies: [] };
}
const { code, dependencies } = await bundleFile({
virtualModContents: getConfigVirtualModContents(),
root,
fileUrl
});
return {
mod: await importBundledFile({ code, root }),
dependencies
};
}
async function bundleFile({
fileUrl,
root,
virtualModContents
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const result = await esbuild({
absWorkingDir: process.cwd(),
entryPoints: [fileURLToPath(fileUrl)],
outfile: "out.js",
packages: "external",
write: false,
target: ["node16"],
platform: "node",
bundle: true,
format: "esm",
sourcemap: "inline",
metafile: true,
define: {
"import.meta.env.ASTRO_DATABASE_FILE": JSON.stringify(ASTRO_DATABASE_FILE ?? "")
},
plugins: [
{
name: "resolve-astro-db",
setup(build) {
build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
return { path, namespace: VIRTUAL_MODULE_ID };
});
build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
return {
contents: virtualModContents,
// Needed to resolve runtime dependencies
resolveDir: fileURLToPath(root)
};
});
}
}
]
});
const file = result.outputFiles[0];
if (!file) {
throw new Error(`Unexpected: no output file`);
}
return {
code: file.text,
dependencies: Object.keys(result.metafile.inputs)
};
}
async function importBundledFile({
code,
root
}) {
const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
await writeFile(tmpFileUrl, code, { encoding: "utf8" });
try {
return await import(
/* @vite-ignore */
tmpFileUrl.toString()
);
} finally {
try {
await unlink(tmpFileUrl);
} catch {
}
}
}
export {
bundleFile,
getResolvedFileUrl,
importBundledFile,
resolveDbConfig
};

53
node_modules/@astrojs/db/dist/core/queries.d.ts generated vendored Normal file
View File

@@ -0,0 +1,53 @@
import type { BooleanColumn, ColumnType, DateColumn, DBColumn, DBTable, JsonColumn, NumberColumn, TextColumn } from './types.js';
export declare const SEED_DEV_FILE_NAME: string[];
export declare function getDropTableIfExistsQuery(tableName: string): string;
export declare function getCreateTableQuery(tableName: string, table: DBTable): string;
export declare function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>): string[];
export declare function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer';
export declare function getModifiers(columnName: string, column: DBColumn): string;
export declare function getReferencesConfig(column: DBColumn): {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
primaryKey: false;
optional: boolean;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
optional?: false | undefined;
default?: undefined;
})) & {
references?: NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
primaryKey: false;
optional: boolean;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: TextColumn;
};
} | undefined;
type WithDefaultDefined<T extends DBColumn> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBColumnWithDefault = WithDefaultDefined<TextColumn> | WithDefaultDefined<DateColumn> | WithDefaultDefined<NumberColumn> | WithDefaultDefined<BooleanColumn> | WithDefaultDefined<JsonColumn>;
export declare function hasDefault(column: DBColumn): column is DBColumnWithDefault;
export {};

166
node_modules/@astrojs/db/dist/core/queries.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import colors from "piccolore";
import {
FOREIGN_KEY_DNE_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
REFERENCE_DNE_ERROR
} from "../runtime/errors.js";
import { isSerializedSQL } from "../runtime/types.js";
import { hasPrimaryKey } from "../runtime/utils.js";
const sqlite = new SQLiteAsyncDialect();
const SEED_DEV_FILE_NAME = ["seed.ts", "seed.js", "seed.mjs", "seed.mts"];
function getDropTableIfExistsQuery(tableName) {
return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
}
function getCreateTableQuery(tableName, table) {
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
const colQueries = [];
const colHasPrimaryKey = Object.entries(table.columns).find(
([, column]) => hasPrimaryKey(column)
);
if (!colHasPrimaryKey) {
colQueries.push("_id INTEGER PRIMARY KEY");
}
for (const [columnName, column] of Object.entries(table.columns)) {
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
column.type
)}${getModifiers(columnName, column)}`;
colQueries.push(colQuery);
}
colQueries.push(...getCreateForeignKeyQueries(tableName, table));
query += colQueries.join(", ") + ")";
return query;
}
function getCreateIndexQueries(tableName, table) {
let queries = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
const unique = indexProps.unique ? "UNIQUE " : "";
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
indexName
)} ON ${sqlite.escapeName(tableName)} (${onCols.join(", ")})`;
queries.push(indexQuery);
}
return queries;
}
function getCreateForeignKeyQueries(tableName, table) {
let queries = [];
for (const foreignKey of table.foreignKeys ?? []) {
const columns = asArray(foreignKey.columns);
const references = asArray(foreignKey.references);
if (columns.length !== references.length) {
throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
}
const firstReference = references[0];
if (!firstReference) {
throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
}
const referencedTable = firstReference.schema.collection;
if (!referencedTable) {
throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
}
const query = `FOREIGN KEY (${columns.map((f) => sqlite.escapeName(f)).join(", ")}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references.map((r) => sqlite.escapeName(r.schema.name)).join(", ")})`;
queries.push(query);
}
return queries;
}
function asArray(value) {
return Array.isArray(value) ? value : [value];
}
function schemaTypeToSqlType(type) {
switch (type) {
case "date":
case "text":
case "json":
return "text";
case "number":
case "boolean":
return "integer";
}
}
function getModifiers(columnName, column) {
let modifiers = "";
if (hasPrimaryKey(column)) {
return " PRIMARY KEY";
}
if (!column.schema.optional) {
modifiers += " NOT NULL";
}
if (column.schema.unique) {
modifiers += " UNIQUE";
}
if (hasDefault(column)) {
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
}
const references = getReferencesConfig(column);
if (references) {
const { collection: tableName, name } = references.schema;
if (!tableName || !name) {
throw new Error(REFERENCE_DNE_ERROR(columnName));
}
modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
function getReferencesConfig(column) {
const canHaveReferences = column.type === "number" || column.type === "text";
if (!canHaveReferences) return void 0;
return column.schema.references;
}
function hasDefault(column) {
if (column.schema.default !== void 0) {
return true;
}
if (hasPrimaryKey(column) && column.type === "number") {
return true;
}
return false;
}
function toDefault(def) {
const type = typeof def;
if (type === "string") {
return sqlite.escapeString(def);
} else if (type === "boolean") {
return def ? "TRUE" : "FALSE";
} else {
return def + "";
}
}
function getDefaultValueSql(columnName, column) {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case "boolean":
case "number":
case "text":
case "date":
return toDefault(column.schema.default);
case "json": {
let stringified = "";
try {
stringified = JSON.stringify(column.schema.default);
} catch {
console.log(
`Invalid default value for column ${colors.bold(
columnName
)}. Defaults must be valid JSON when using the \`json()\` type.`
);
process.exit(0);
}
return sqlite.escapeString(stringified);
}
}
}
export {
SEED_DEV_FILE_NAME,
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
};

1444
node_modules/@astrojs/db/dist/core/schemas.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

193
node_modules/@astrojs/db/dist/core/schemas.js generated vendored Normal file
View File

@@ -0,0 +1,193 @@
import { SQL } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import * as z from "zod/v4";
import { SERIALIZED_SQL_KEY } from "../runtime/types.js";
import { errorMap } from "./integration/error-map.js";
import { mapObject } from "./utils.js";
const sqlite = new SQLiteAsyncDialect();
const sqlSchema = z.instanceof(SQL).transform(
(sqlObj) => ({
[SERIALIZED_SQL_KEY]: true,
sql: sqlite.sqlToQuery(sqlObj).sql
})
);
const baseColumnSchema = z.object({
label: z.string().optional(),
optional: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
deprecated: z.boolean().optional().default(false),
// Defined when `defineDb()` is called to resolve `references`
name: z.string().optional(),
// TODO: Update to `table`. Will need migration file version change
collection: z.string().optional()
});
const booleanColumnSchema = z.object({
type: z.literal("boolean"),
schema: baseColumnSchema.extend({
default: z.union([z.boolean(), sqlSchema]).optional()
})
});
const numberColumnBaseSchema = baseColumnSchema.omit({ optional: true }).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional,
default: z.union([z.number(), sqlSchema]).optional()
}),
z.object({
// `integer primary key` uses ROWID as the default value.
// `optional` and `default` do not have an effect,
// so disable these config options for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional(),
default: z.literal(void 0).optional()
})
])
);
const numberColumnOptsSchema = numberColumnBaseSchema.and(
z.object({
references: z.function({ output: z.lazy(() => numberColumnSchema) }).optional().transform((fn) => fn?.())
})
);
const numberColumnSchema = z.object({
type: z.literal("number"),
schema: numberColumnOptsSchema
});
const textColumnBaseSchema = baseColumnSchema.omit({ optional: true }).extend({
default: z.union([z.string(), sqlSchema]).optional(),
multiline: z.boolean().optional(),
enum: z.tuple([z.string()]).rest(z.string()).optional()
// At least one value required,
}).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional
}),
z.object({
// text primary key allows NULL values.
// NULL values bypass unique checks, which could
// lead to duplicate URLs per record.
// disable `optional` for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional()
})
])
);
const textColumnOptsSchema = textColumnBaseSchema.and(
z.object({
references: z.function({ output: z.lazy(() => textColumnSchema) }).optional().transform((fn) => fn?.())
})
);
const textColumnSchema = z.object({
type: z.literal("text"),
schema: textColumnOptsSchema
});
const dateColumnSchema = z.object({
type: z.literal("date"),
schema: baseColumnSchema.extend({
default: z.union([
sqlSchema,
// transform to ISO string for serialization
z.date().transform((d) => d.toISOString())
]).optional()
})
});
const jsonColumnSchema = z.object({
type: z.literal("json"),
schema: baseColumnSchema.extend({
default: z.unknown().optional()
})
});
const columnSchema = z.discriminatedUnion("type", [
booleanColumnSchema,
numberColumnSchema,
textColumnSchema,
dateColumnSchema,
jsonColumnSchema
]);
const referenceableColumnSchema = z.union([textColumnSchema, numberColumnSchema]);
const columnsSchema = z.record(z.string(), columnSchema);
const foreignKeysSchema = z.object({
columns: z.string().or(z.array(z.string())),
references: z.function({
output: z.lazy(() => referenceableColumnSchema.or(z.array(referenceableColumnSchema)))
}).transform((fn) => fn())
});
const resolvedIndexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional()
});
const legacyIndexesSchema = z.record(z.string(), resolvedIndexSchema);
const indexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional(),
name: z.string().optional()
});
const indexesSchema = z.array(indexSchema);
const tableSchema = z.object({
columns: columnsSchema,
indexes: indexesSchema.or(legacyIndexesSchema).optional(),
foreignKeys: z.array(foreignKeysSchema).optional(),
deprecated: z.boolean().optional().default(false)
});
const tablesSchema = z.preprocess(
(rawTables) => {
const tables = z.record(z.string(), z.any()).parse(rawTables, { error: errorMap });
for (const [tableName, table] of Object.entries(tables)) {
table.getName = () => tableName;
const { columns } = z.object({ columns: z.record(z.string(), z.any()) }).parse(table, { error: errorMap });
for (const [columnName, column] of Object.entries(columns)) {
column.schema.name = columnName;
column.schema.collection = tableName;
}
}
return rawTables;
},
z.record(z.string(), tableSchema)
);
const dbConfigSchema = z.object({
tables: tablesSchema.optional()
}).transform(({ tables = {}, ...config }) => {
return {
...config,
tables: mapObject(tables, (tableName, table) => {
const { indexes = {} } = table;
if (!Array.isArray(indexes)) {
return { ...table, indexes };
}
const resolvedIndexes = {};
for (const index of indexes) {
if (index.name) {
const { name: name2, ...rest } = index;
resolvedIndexes[index.name] = rest;
continue;
}
const indexOn = Array.isArray(index.on) ? index.on.sort().join("_") : index.on;
const name = tableName + "_" + indexOn + "_idx";
resolvedIndexes[name] = index;
}
return {
...table,
indexes: resolvedIndexes
};
})
};
});
export {
booleanColumnSchema,
columnSchema,
columnsSchema,
dateColumnSchema,
dbConfigSchema,
indexSchema,
jsonColumnSchema,
numberColumnOptsSchema,
numberColumnSchema,
referenceableColumnSchema,
resolvedIndexSchema,
tableSchema,
tablesSchema,
textColumnOptsSchema,
textColumnSchema
};

60
node_modules/@astrojs/db/dist/core/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
import type * as z from 'zod/v4';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

0
node_modules/@astrojs/db/dist/core/types.js generated vendored Normal file
View File

19
node_modules/@astrojs/db/dist/core/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

37
node_modules/@astrojs/db/dist/core/utils.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
import { loadEnv } from "vite";
import "./types.js";
function getAstroEnv(envMode = "") {
const env = loadEnv(envMode, process.cwd(), "ASTRO_");
return env;
}
function getRemoteDatabaseInfo() {
const astroEnv = getAstroEnv();
return {
url: astroEnv.ASTRO_DB_REMOTE_URL,
token: astroEnv.ASTRO_DB_APP_TOKEN
};
}
function resolveDbAppToken(flags, envToken) {
const dbAppToken = flags.dbAppToken;
if (typeof dbAppToken === "string") return dbAppToken;
return envToken;
}
function getDbDirectoryUrl(root) {
return new URL("db/", root);
}
function defineDbIntegration(integration) {
return integration;
}
function mapObject(item, callback) {
return Object.fromEntries(
Object.entries(item).map(([key, value]) => [key, callback(key, value)])
);
}
export {
defineDbIntegration,
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo,
mapObject,
resolveDbAppToken
};

0
node_modules/@astrojs/db/dist/db-client.d.js generated vendored Normal file
View File

3
node_modules/@astrojs/db/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export { cli } from './core/cli/index.js';
export { type AstroDBConfig, integration as default } from './core/integration/index.js';
export type { TableConfig } from './core/types.js';

6
node_modules/@astrojs/db/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import { cli } from "./core/cli/index.js";
import { integration } from "./core/integration/index.js";
export {
cli,
integration as default
};

4
node_modules/@astrojs/db/dist/runtime/errors.d.ts generated vendored Normal file
View File

@@ -0,0 +1,4 @@
export declare const FOREIGN_KEY_DNE_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_LENGTH_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_EMPTY_ERROR: (tableName: string) => string;
export declare const REFERENCE_DNE_ERROR: (columnName: string) => string;

27
node_modules/@astrojs/db/dist/runtime/errors.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
import colors from "piccolore";
const FOREIGN_KEY_DNE_ERROR = (tableName) => {
return `Table ${colors.bold(
tableName
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
const FOREIGN_KEY_REFERENCES_LENGTH_ERROR = (tableName) => {
return `Foreign key on ${colors.bold(
tableName
)} is misconfigured. \`columns\` and \`references\` must be the same length.`;
};
const FOREIGN_KEY_REFERENCES_EMPTY_ERROR = (tableName) => {
return `Foreign key on ${colors.bold(
tableName
)} is misconfigured. \`references\` array cannot be empty.`;
};
const REFERENCE_DNE_ERROR = (columnName) => {
return `Column ${colors.bold(
columnName
)} references a table that does not exist. Did you apply the referenced table to the \`tables\` object in your db config?`;
};
export {
FOREIGN_KEY_DNE_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
REFERENCE_DNE_ERROR
};

31
node_modules/@astrojs/db/dist/runtime/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,31 @@
import { type ColumnDataType } from 'drizzle-orm';
import type { LibSQLDatabase } from 'drizzle-orm/libsql';
import type { DBTable } from '../core/types.js';
export type Database = LibSQLDatabase;
export type { Table } from './types.js';
export { hasPrimaryKey } from './utils.js';
export declare function asDrizzleTable(name: string, table: DBTable): import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{
name: string;
schema: undefined;
columns: {
[x: string]: import("drizzle-orm/sqlite-core").SQLiteColumn<{
name: string;
tableName: string;
dataType: ColumnDataType;
columnType: string;
data: unknown;
driverParam: unknown;
notNull: false;
hasDefault: false;
isPrimaryKey: false;
isAutoincrement: false;
hasRuntimeDefault: false;
enumValues: string[] | undefined;
baseColumn: never;
identity: undefined;
generated: undefined;
}, {}, {}>;
};
dialect: "sqlite";
}>;
export declare function normalizeDatabaseUrl(envDbUrl: string | undefined, defaultDbUrl: string): string;

121
node_modules/@astrojs/db/dist/runtime/index.js generated vendored Normal file
View File

@@ -0,0 +1,121 @@
import { sql } from "drizzle-orm";
import {
customType,
index,
integer,
sqliteTable,
text
} from "drizzle-orm/sqlite-core";
import { isSerializedSQL } from "./types.js";
import { hasPrimaryKey, pathToFileURL } from "./utils.js";
import { hasPrimaryKey as hasPrimaryKey2 } from "./utils.js";
const isISODateString = (str) => /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/.test(str);
const dateType = customType({
dataType() {
return "text";
},
toDriver(value) {
return value.toISOString();
},
fromDriver(value) {
if (!isISODateString(value)) {
value += "Z";
}
return new Date(value);
}
});
const jsonType = customType({
dataType() {
return "text";
},
toDriver(value) {
return JSON.stringify(value);
},
fromDriver(value) {
return JSON.parse(value);
}
});
function asDrizzleTable(name, table) {
const columns = {};
if (!Object.entries(table.columns).some(([, column]) => hasPrimaryKey(column))) {
columns["_id"] = integer("_id").primaryKey();
}
for (const [columnName, column] of Object.entries(table.columns)) {
columns[columnName] = columnMapper(columnName, column);
}
const drizzleTable = sqliteTable(name, columns, (ormTable) => {
const indexes = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
const onCols = onColNames.map((colName) => ormTable[colName]);
if (!atLeastOne(onCols)) continue;
indexes.push(index(indexName).on(...onCols));
}
return indexes;
});
return drizzleTable;
}
function atLeastOne(arr) {
return arr.length > 0;
}
function columnMapper(columnName, column) {
let c;
switch (column.type) {
case "text": {
c = text(columnName, { enum: column.schema.enum });
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
if (column.schema.primaryKey === true) c = c.primaryKey();
break;
}
case "number": {
c = integer(columnName);
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
if (column.schema.primaryKey === true) c = c.primaryKey();
break;
}
case "boolean": {
c = integer(columnName, { mode: "boolean" });
if (column.schema.default !== void 0)
c = c.default(handleSerializedSQL(column.schema.default));
break;
}
case "json":
c = jsonType(columnName);
if (column.schema.default !== void 0) c = c.default(column.schema.default);
break;
case "date": {
c = dateType(columnName);
if (column.schema.default !== void 0) {
const def = handleSerializedSQL(column.schema.default);
c = c.default(typeof def === "string" ? new Date(def) : def);
}
break;
}
}
if (!column.schema.optional) c = c.notNull();
if (column.schema.unique) c = c.unique();
return c;
}
function handleSerializedSQL(def) {
if (isSerializedSQL(def)) {
return sql.raw(def.sql);
}
return def;
}
function normalizeDatabaseUrl(envDbUrl, defaultDbUrl) {
if (envDbUrl) {
if (envDbUrl.startsWith("file://")) {
return envDbUrl;
}
return new URL(envDbUrl, pathToFileURL(process.cwd()) + "/").toString();
} else {
return defaultDbUrl;
}
}
export {
asDrizzleTable,
hasPrimaryKey2 as hasPrimaryKey,
normalizeDatabaseUrl
};

92
node_modules/@astrojs/db/dist/runtime/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,92 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
data: E extends readonly (infer U)[] ? U : string;
dataType: 'string';
columnType: 'SQLiteText';
driverParam: string;
enumValues: E extends [string, ...string[]] ? E : never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: Date;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
data: boolean;
dataType: 'boolean';
columnType: 'SQLiteBoolean';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
data: number;
dataType: 'number';
columnType: 'SQLiteInteger';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: unknown;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;
dialect: 'sqlite';
columns: {
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
enum: infer E;
} ? E extends readonly [string, ...string[]] ? E : string : string, {
tableName: TTableName;
name: K;
isPrimaryKey: TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasRuntimeDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : false;
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};

8
node_modules/@astrojs/db/dist/runtime/types.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
const SERIALIZED_SQL_KEY = "__serializedSQL";
function isSerializedSQL(value) {
return typeof value === "object" && value !== null && SERIALIZED_SQL_KEY in value;
}
export {
SERIALIZED_SQL_KEY,
isSerializedSQL
};

9
node_modules/@astrojs/db/dist/runtime/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
import { LibsqlError } from '@libsql/client';
import { AstroError } from 'astro/errors';
import type { DBColumn } from '../core/types.js';
export declare function hasPrimaryKey(column: DBColumn): boolean;
export declare class AstroDbError extends AstroError {
name: string;
}
export declare function isDbError(err: unknown): err is LibsqlError;
export declare function pathToFileURL(path: string): URL;

35
node_modules/@astrojs/db/dist/runtime/utils.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
import { LibsqlError } from "@libsql/client";
import { AstroError } from "astro/errors";
function hasPrimaryKey(column) {
return "primaryKey" in column.schema && !!column.schema.primaryKey;
}
const isWindows = process?.platform === "win32";
class AstroDbError extends AstroError {
name = "Astro DB Error";
}
function isDbError(err) {
return err instanceof LibsqlError || err instanceof Error && err.libsqlError === true;
}
function slash(path) {
const isExtendedLengthPath = path.startsWith("\\\\?\\");
if (isExtendedLengthPath) {
return path;
}
return path.replace(/\\/g, "/");
}
function pathToFileURL(path) {
if (isWindows) {
let slashed = slash(path);
if (!slashed.startsWith("/")) {
slashed = "/" + slashed;
}
return new URL("file://" + slashed);
}
return new URL("file://" + path);
}
export {
AstroDbError,
hasPrimaryKey,
isDbError,
pathToFileURL
};

112
node_modules/@astrojs/db/dist/runtime/virtual.js generated vendored Normal file
View File

@@ -0,0 +1,112 @@
import { sql as _sql } from "drizzle-orm";
function createColumn(type, schema) {
return {
type,
/**
* @internal
*/
schema
};
}
const column = {
number: (opts = {}) => {
return createColumn("number", opts);
},
boolean: (opts = {}) => {
return createColumn("boolean", opts);
},
text: (opts = {}) => {
return createColumn("text", opts);
},
date(opts = {}) {
return createColumn("date", opts);
},
json(opts = {}) {
return createColumn("json", opts);
}
};
function defineTable(userConfig) {
return userConfig;
}
function defineDb(userConfig) {
return userConfig;
}
const NOW = _sql`CURRENT_TIMESTAMP`;
const TRUE = _sql`TRUE`;
const FALSE = _sql`FALSE`;
import {
and,
asc,
avg,
avgDistinct,
between,
count,
countDistinct,
desc,
eq,
exists,
gt,
gte,
ilike,
inArray,
isNotNull,
isNull,
like,
lt,
lte,
max,
min,
ne,
not,
notBetween,
notExists,
notIlike,
notInArray,
or,
sql,
sum,
sumDistinct
} from "drizzle-orm";
import { alias } from "drizzle-orm/sqlite-core";
import { isDbError } from "./utils.js";
export {
FALSE,
NOW,
TRUE,
alias,
and,
asc,
avg,
avgDistinct,
between,
column,
count,
countDistinct,
defineDb,
defineTable,
desc,
eq,
exists,
gt,
gte,
ilike,
inArray,
isDbError,
isNotNull,
isNull,
like,
lt,
lte,
max,
min,
ne,
not,
notBetween,
notExists,
notIlike,
notInArray,
or,
sql,
sum,
sumDistinct
};

4
node_modules/@astrojs/db/dist/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,4 @@
export { defineDbIntegration } from './core/utils.js';
import type { ColumnsConfig, TableConfig } from './core/types.js';
import { type Table } from './runtime/index.js';
export declare function asDrizzleTable<TableName extends string = string, TColumns extends ColumnsConfig = ColumnsConfig>(name: TableName, tableConfig: TableConfig<TColumns>): Table<TableName, TColumns>;

10
node_modules/@astrojs/db/dist/utils.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
import { defineDbIntegration } from "./core/utils.js";
import { tableSchema } from "./core/schemas.js";
import { asDrizzleTable as internal_asDrizzleTable } from "./runtime/index.js";
function asDrizzleTable(name, tableConfig) {
return internal_asDrizzleTable(name, tableSchema.parse(tableConfig));
}
export {
asDrizzleTable,
defineDbIntegration
};

3
node_modules/@astrojs/db/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import './virtual.js';
export { default, cli } from './dist/index.js';

View File

@@ -0,0 +1,128 @@
<p align="center">
<a href="https://tur.so/turso-ts">
<picture>
<img src="/.github/cover.png" alt="libSQL TypeScript" />
</picture>
</a>
<h1 align="center">libSQL TypeScript</h1>
</p>
<p align="center">
Databases for all TypeScript and JS multi-tenant apps.
</p>
<p align="center">
<a href="https://tur.so/turso-ts"><strong>Turso</strong></a> ·
<a href="https://docs.turso.tech"><strong>Docs</strong></a> ·
<a href="https://docs.turso.tech/sdk/ts/quickstart"><strong>Quickstart</strong></a> ·
<a href="https://docs.turso.tech/sdk/ts/reference"><strong>SDK Reference</strong></a> ·
<a href="https://turso.tech/blog"><strong>Blog &amp; Tutorials</strong></a>
</p>
<p align="center">
<a href="LICENSE">
<picture>
<img src="https://img.shields.io/github/license/tursodatabase/libsql-client-ts?color=0F624B" alt="MIT License" />
</picture>
</a>
<a href="https://tur.so/discord-ts">
<picture>
<img src="https://img.shields.io/discord/933071162680958986?color=0F624B" alt="Discord" />
</picture>
</a>
<a href="#contributors">
<picture>
<img src="https://img.shields.io/github/contributors/tursodatabase/libsql-client-ts?color=0F624B" alt="Contributors" />
</picture>
</a>
<a href="https://www.npmjs.com/package/@libsql/client">
<picture>
<img src="https://img.shields.io/npm/dw/%40libsql%2Fclient?color=0F624B" alt="Weekly downloads" />
</picture>
</a>
<a href="/examples">
<picture>
<img src="https://img.shields.io/badge/browse-examples-0F624B" alt="Examples" />
</picture>
</a>
</p>
> **Looking for the Turso serverless package?** Check out [`@tursodatabase/serverless`](https://www.npmjs.com/package/@tursodatabase/serverless) — the lightest option with zero native dependencies, and will be the driver to later support concurrent writes. Use `@libsql/client` if you need a battle-tested driver today with ORM integration.
## Features
- 🔌 Works offline with [Embedded Replicas](https://docs.turso.tech/features/embedded-replicas/introduction)
- 🌎 Works with remote Turso databases
- ✨ Works with Turso [AI & Vector Search](https://docs.turso.tech/features/ai-and-embeddings)
- 🔐 Supports [encryption at rest](https://docs.turso.tech/libsql#encryption-at-rest)
## Install
```bash
npm install @libsql/client
```
## Quickstart
The example below uses Embedded Replicas and syncs every minute from Turso.
```ts
import { createClient } from "@libsql/client";
export const turso = createClient({
url: "file:local.db",
syncUrl: process.env.TURSO_DATABASE_URL,
authToken: process.env.TURSO_AUTH_TOKEN,
syncInterval: 60000,
});
await turso.batch(
[
"CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)",
{
sql: "INSERT INTO users(name) VALUES (?)",
args: ["Iku"],
},
],
"write",
);
await turso.execute({
sql: "SELECT * FROM users WHERE id = ?",
args: [1],
});
```
## Examples
| Example | Description |
| ------------------------------------- | --------------------------------------------------------------------------------------- |
| [local](examples/local) | Uses libsql with a local SQLite file. Creates database, inserts data, and queries. |
| [remote](examples/remote) | Connects to a remote database. Requires environment variables for URL and auth token. |
| [sync](examples/sync) | Demonstrates synchronization between local and remote databases. |
| [batch](examples/batch) | Executes multiple SQL statements in a single batch operation. |
| [transactions](examples/transactions) | Shows transaction usage: starting, performing operations, and committing/rolling back. |
| [memory](examples/memory) | Uses an in-memory SQLite database for temporary storage or fast access. |
| [vector](examples/vector) | Works with vector embeddings, storing and querying for similarity search. |
| [encryption](examples/encryption) | Creates and uses an encrypted SQLite database, demonstrating setup and data operations. |
| [ollama](examples/ollama) | Similarity search with Ollama and Mistral. |
## Documentation
Visit our [official documentation](https://docs.turso.tech/sdk/ts).
## Support
Join us [on Discord](https://tur.so/discord-ts) to get help using this SDK. Report security issues [via email](mailto:security@turso.tech).
## Contributors
See the [contributing guide](CONTRIBUTING.md) to learn how to get involved.
![Contributors](https://contrib.nn.ci/api?repo=tursodatabase/libsql-client-ts)
<a href="https://github.com/tursodatabase/libsql-client-ts/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22">
<picture>
<img src="https://img.shields.io/github/issues-search/tursodatabase/libsql-client-ts?label=good%20first%20issue&query=label%3A%22good%20first%20issue%22%20&color=0F624B" alt="good first issue" />
</picture>
</a>

View File

@@ -0,0 +1,372 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.mapHranaError = exports.resultSetFromHrana = exports.stmtToHrana = exports.executeHranaBatch = exports.HranaTransaction = void 0;
const hrana = __importStar(require("@libsql/hrana-client"));
const api_1 = require("@libsql/core/api");
const util_1 = require("@libsql/core/util");
class HranaTransaction {
#mode;
#version;
// Promise that is resolved when the BEGIN statement completes, or `undefined` if we haven't executed the
// BEGIN statement yet.
#started;
/** @private */
constructor(mode, version) {
this.#mode = mode;
this.#version = version;
this.#started = undefined;
}
execute(stmt) {
return this.batch([stmt]).then((results) => results[0]);
}
async batch(stmts) {
const stream = this._getStream();
if (stream.closed) {
throw new api_1.LibsqlError("Cannot execute statements because the transaction is closed", "TRANSACTION_CLOSED");
}
try {
const hranaStmts = stmts.map(stmtToHrana);
let rowsPromises;
if (this.#started === undefined) {
// The transaction hasn't started yet, so we need to send the BEGIN statement in a batch with
// `hranaStmts`.
this._getSqlCache().apply(hranaStmts);
const batch = stream.batch(this.#version >= 3);
const beginStep = batch.step();
const beginPromise = beginStep.run((0, util_1.transactionModeToBegin)(this.#mode));
// Execute the `hranaStmts` only if the BEGIN succeeded, to make sure that we don't execute it
// outside of a transaction.
let lastStep = beginStep;
rowsPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch
.step()
.condition(hrana.BatchCond.ok(lastStep));
if (this.#version >= 3) {
// If the Hrana version supports it, make sure that we are still in a transaction
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const rowsPromise = stmtStep.query(hranaStmt);
rowsPromise.catch(() => undefined); // silence Node warning
lastStep = stmtStep;
return rowsPromise;
});
// `this.#started` is resolved successfully only if the batch and the BEGIN statement inside
// of the batch are both successful.
this.#started = batch
.execute()
.then(() => beginPromise)
.then(() => undefined);
try {
await this.#started;
}
catch (e) {
// If the BEGIN failed, the transaction is unusable and we must close it. However, if the
// BEGIN suceeds and `hranaStmts` fail, the transaction is _not_ closed.
this.close();
throw e;
}
}
else {
if (this.#version < 3) {
// The transaction has started, so we must wait until the BEGIN statement completed to make
// sure that we don't execute `hranaStmts` outside of a transaction.
await this.#started;
}
else {
// The transaction has started, but we will use `hrana.BatchCond.isAutocommit()` to make
// sure that we don't execute `hranaStmts` outside of a transaction, so we don't have to
// wait for `this.#started`
}
this._getSqlCache().apply(hranaStmts);
const batch = stream.batch(this.#version >= 3);
let lastStep = undefined;
rowsPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch.step();
if (lastStep !== undefined) {
stmtStep.condition(hrana.BatchCond.ok(lastStep));
}
if (this.#version >= 3) {
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const rowsPromise = stmtStep.query(hranaStmt);
rowsPromise.catch(() => undefined); // silence Node warning
lastStep = stmtStep;
return rowsPromise;
});
await batch.execute();
}
const resultSets = [];
for (let i = 0; i < rowsPromises.length; i++) {
try {
const rows = await rowsPromises[i];
if (rows === undefined) {
throw new api_1.LibsqlBatchError("Statement in a transaction was not executed, " +
"probably because the transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(resultSetFromHrana(rows));
}
catch (e) {
if (e instanceof api_1.LibsqlBatchError) {
throw e;
}
// Map hrana errors to LibsqlError first, then wrap in LibsqlBatchError
const mappedError = mapHranaError(e);
if (mappedError instanceof api_1.LibsqlError) {
throw new api_1.LibsqlBatchError(mappedError.message, i, mappedError.code, mappedError.extendedCode, mappedError.rawCode, mappedError.cause instanceof Error
? mappedError.cause
: undefined);
}
throw mappedError;
}
}
return resultSets;
}
catch (e) {
throw mapHranaError(e);
}
}
async executeMultiple(sql) {
const stream = this._getStream();
if (stream.closed) {
throw new api_1.LibsqlError("Cannot execute statements because the transaction is closed", "TRANSACTION_CLOSED");
}
try {
if (this.#started === undefined) {
// If the transaction hasn't started yet, start it now
this.#started = stream
.run((0, util_1.transactionModeToBegin)(this.#mode))
.then(() => undefined);
try {
await this.#started;
}
catch (e) {
this.close();
throw e;
}
}
else {
// Wait until the transaction has started
await this.#started;
}
await stream.sequence(sql);
}
catch (e) {
throw mapHranaError(e);
}
}
async rollback() {
try {
const stream = this._getStream();
if (stream.closed) {
return;
}
if (this.#started !== undefined) {
// We don't have to wait for the BEGIN statement to complete. If the BEGIN fails, we will
// execute a ROLLBACK outside of an active transaction, which should be harmless.
}
else {
// We did nothing in the transaction, so there is nothing to rollback.
return;
}
// Pipeline the ROLLBACK statement and the stream close.
const promise = stream.run("ROLLBACK").catch((e) => {
throw mapHranaError(e);
});
stream.closeGracefully();
await promise;
}
catch (e) {
throw mapHranaError(e);
}
finally {
// `this.close()` may close the `hrana.Client`, which aborts all pending stream requests, so we
// must call it _after_ we receive the ROLLBACK response.
// Also note that the current stream should already be closed, but we need to call `this.close()`
// anyway, because it may need to do more cleanup.
this.close();
}
}
async commit() {
// (this method is analogous to `rollback()`)
try {
const stream = this._getStream();
if (stream.closed) {
throw new api_1.LibsqlError("Cannot commit the transaction because it is already closed", "TRANSACTION_CLOSED");
}
if (this.#started !== undefined) {
// Make sure to execute the COMMIT only if the BEGIN was successful.
await this.#started;
}
else {
return;
}
const promise = stream.run("COMMIT").catch((e) => {
throw mapHranaError(e);
});
stream.closeGracefully();
await promise;
}
catch (e) {
throw mapHranaError(e);
}
finally {
this.close();
}
}
}
exports.HranaTransaction = HranaTransaction;
async function executeHranaBatch(mode, version, batch, hranaStmts, disableForeignKeys = false) {
if (disableForeignKeys) {
batch.step().run("PRAGMA foreign_keys=off");
}
const beginStep = batch.step();
const beginPromise = beginStep.run((0, util_1.transactionModeToBegin)(mode));
let lastStep = beginStep;
const stmtPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch.step().condition(hrana.BatchCond.ok(lastStep));
if (version >= 3) {
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const stmtPromise = stmtStep.query(hranaStmt);
lastStep = stmtStep;
return stmtPromise;
});
const commitStep = batch.step().condition(hrana.BatchCond.ok(lastStep));
if (version >= 3) {
commitStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const commitPromise = commitStep.run("COMMIT");
const rollbackStep = batch
.step()
.condition(hrana.BatchCond.not(hrana.BatchCond.ok(commitStep)));
rollbackStep.run("ROLLBACK").catch((_) => undefined);
if (disableForeignKeys) {
batch.step().run("PRAGMA foreign_keys=on");
}
await batch.execute();
const resultSets = [];
await beginPromise;
for (let i = 0; i < stmtPromises.length; i++) {
try {
const hranaRows = await stmtPromises[i];
if (hranaRows === undefined) {
throw new api_1.LibsqlBatchError("Statement in a batch was not executed, probably because the transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(resultSetFromHrana(hranaRows));
}
catch (e) {
if (e instanceof api_1.LibsqlBatchError) {
throw e;
}
// Map hrana errors to LibsqlError first, then wrap in LibsqlBatchError
const mappedError = mapHranaError(e);
if (mappedError instanceof api_1.LibsqlError) {
throw new api_1.LibsqlBatchError(mappedError.message, i, mappedError.code, mappedError.extendedCode, mappedError.rawCode, mappedError.cause instanceof Error
? mappedError.cause
: undefined);
}
throw mappedError;
}
}
await commitPromise;
return resultSets;
}
exports.executeHranaBatch = executeHranaBatch;
function stmtToHrana(stmt) {
let sql;
let args;
if (Array.isArray(stmt)) {
[sql, args] = stmt;
}
else if (typeof stmt === "string") {
sql = stmt;
}
else {
sql = stmt.sql;
args = stmt.args;
}
const hranaStmt = new hrana.Stmt(sql);
if (args) {
if (Array.isArray(args)) {
hranaStmt.bindIndexes(args);
}
else {
for (const [key, value] of Object.entries(args)) {
hranaStmt.bindName(key, value);
}
}
}
return hranaStmt;
}
exports.stmtToHrana = stmtToHrana;
function resultSetFromHrana(hranaRows) {
const columns = hranaRows.columnNames.map((c) => c ?? "");
const columnTypes = hranaRows.columnDecltypes.map((c) => c ?? "");
const rows = hranaRows.rows;
const rowsAffected = hranaRows.affectedRowCount;
const lastInsertRowid = hranaRows.lastInsertRowid !== undefined
? hranaRows.lastInsertRowid
: undefined;
return new util_1.ResultSetImpl(columns, columnTypes, rows, rowsAffected, lastInsertRowid);
}
exports.resultSetFromHrana = resultSetFromHrana;
function mapHranaError(e) {
if (e instanceof hrana.ClientError) {
const code = mapHranaErrorCode(e);
// TODO: Parse extendedCode once the SQL over HTTP protocol supports it
return new api_1.LibsqlError(e.message, code, undefined, undefined, e);
}
return e;
}
exports.mapHranaError = mapHranaError;
function mapHranaErrorCode(e) {
if (e instanceof hrana.ResponseError && e.code !== undefined) {
return e.code;
}
else if (e instanceof hrana.ProtoError) {
return "HRANA_PROTO_ERROR";
}
else if (e instanceof hrana.ClosedError) {
return e.cause instanceof hrana.ClientError
? mapHranaErrorCode(e.cause)
: "HRANA_CLOSED_ERROR";
}
else if (e instanceof hrana.WebSocketError) {
return "HRANA_WEBSOCKET_ERROR";
}
else if (e instanceof hrana.HttpServerError) {
return "SERVER_ERROR";
}
else if (e instanceof hrana.ProtocolVersionError) {
return "PROTOCOL_VERSION_ERROR";
}
else if (e instanceof hrana.InternalError) {
return "INTERNAL_ERROR";
}
else {
return "UNKNOWN";
}
}

View File

@@ -0,0 +1,268 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpTransaction = exports.HttpClient = exports._createClient = exports.createClient = void 0;
const hrana = __importStar(require("@libsql/hrana-client"));
const api_1 = require("@libsql/core/api");
const config_1 = require("@libsql/core/config");
const hrana_js_1 = require("./hrana.js");
const sql_cache_js_1 = require("./sql_cache.js");
const uri_1 = require("@libsql/core/uri");
const util_1 = require("@libsql/core/util");
const promise_limit_1 = __importDefault(require("promise-limit"));
__exportStar(require("@libsql/core/api"), exports);
function createClient(config) {
return _createClient((0, config_1.expandConfig)(config, true));
}
exports.createClient = createClient;
/** @private */
function _createClient(config) {
if (config.scheme !== "https" && config.scheme !== "http") {
throw new api_1.LibsqlError('The HTTP client supports only "libsql:", "https:" and "http:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${util_1.supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
if (config.encryptionKey !== undefined) {
throw new api_1.LibsqlError("Encryption key is not supported by the remote client.", "ENCRYPTION_KEY_NOT_SUPPORTED");
}
if (config.scheme === "http" && config.tls) {
throw new api_1.LibsqlError(`A "http:" URL cannot opt into TLS by using ?tls=1`, "URL_INVALID");
}
else if (config.scheme === "https" && !config.tls) {
throw new api_1.LibsqlError(`A "https:" URL cannot opt out of TLS by using ?tls=0`, "URL_INVALID");
}
const url = (0, uri_1.encodeBaseUrl)(config.scheme, config.authority, config.path);
return new HttpClient(url, config.authToken, config.intMode, config.fetch, config.concurrency, config.remoteEncryptionKey);
}
exports._createClient = _createClient;
const sqlCacheCapacity = 30;
class HttpClient {
#client;
protocol;
#url;
#intMode;
#customFetch;
#concurrency;
#authToken;
#remoteEncryptionKey;
#promiseLimitFunction;
/** @private */
constructor(url, authToken, intMode, customFetch, concurrency, remoteEncryptionKey) {
this.#url = url;
this.#authToken = authToken;
this.#intMode = intMode;
this.#customFetch = customFetch;
this.#concurrency = concurrency;
this.#remoteEncryptionKey = remoteEncryptionKey;
this.#client = hrana.openHttp(this.#url, this.#authToken, this.#customFetch, remoteEncryptionKey);
this.#client.intMode = this.#intMode;
this.protocol = "http";
this.#promiseLimitFunction = (0, promise_limit_1.default)(this.#concurrency);
}
async limit(fn) {
return this.#promiseLimitFunction(fn);
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
return this.limit(async () => {
try {
const hranaStmt = (0, hrana_js_1.stmtToHrana)(stmt);
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the statement and
// close the stream in a single HTTP request.
let rowsPromise;
const stream = this.#client.openStream();
try {
rowsPromise = stream.query(hranaStmt);
}
finally {
stream.closeGracefully();
}
const rowsResult = await rowsPromise;
return (0, hrana_js_1.resultSetFromHrana)(rowsResult);
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
async batch(stmts, mode = "deferred") {
return this.limit(async () => {
try {
const normalizedStmts = stmts.map((stmt) => {
if (Array.isArray(stmt)) {
return {
sql: stmt[0],
args: stmt[1] || [],
};
}
return stmt;
});
const hranaStmts = normalizedStmts.map(hrana_js_1.stmtToHrana);
const version = await this.#client.getVersion();
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the batch and
// close the stream in a single HTTP request.
let resultsPromise;
const stream = this.#client.openStream();
try {
// It makes sense to use a SQL cache even for a single batch, because it may contain the same
// statement repeated multiple times.
const sqlCache = new sql_cache_js_1.SqlCache(stream, sqlCacheCapacity);
sqlCache.apply(hranaStmts);
// TODO: we do not use a cursor here, because it would cause three roundtrips:
// 1. pipeline request to store SQL texts
// 2. cursor request
// 3. pipeline request to close the stream
const batch = stream.batch(false);
resultsPromise = (0, hrana_js_1.executeHranaBatch)(mode, version, batch, hranaStmts);
}
finally {
stream.closeGracefully();
}
const results = await resultsPromise;
return results;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
async migrate(stmts) {
return this.limit(async () => {
try {
const hranaStmts = stmts.map(hrana_js_1.stmtToHrana);
const version = await this.#client.getVersion();
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the batch and
// close the stream in a single HTTP request.
let resultsPromise;
const stream = this.#client.openStream();
try {
const batch = stream.batch(false);
resultsPromise = (0, hrana_js_1.executeHranaBatch)("deferred", version, batch, hranaStmts, true);
}
finally {
stream.closeGracefully();
}
const results = await resultsPromise;
return results;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
async transaction(mode = "write") {
return this.limit(async () => {
try {
const version = await this.#client.getVersion();
return new HttpTransaction(this.#client.openStream(), mode, version);
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
async executeMultiple(sql) {
return this.limit(async () => {
try {
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the sequence and
// close the stream in a single HTTP request.
let promise;
const stream = this.#client.openStream();
try {
promise = stream.sequence(sql);
}
finally {
stream.closeGracefully();
}
await promise;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
sync() {
throw new api_1.LibsqlError("sync not supported in http mode", "SYNC_NOT_SUPPORTED");
}
close() {
this.#client.close();
}
async reconnect() {
try {
if (!this.closed) {
// Abort in-flight ops and free resources
this.#client.close();
}
}
finally {
// Recreate the underlying hrana client
this.#client = hrana.openHttp(this.#url, this.#authToken, this.#customFetch, this.#remoteEncryptionKey);
this.#client.intMode = this.#intMode;
}
}
get closed() {
return this.#client.closed;
}
}
exports.HttpClient = HttpClient;
class HttpTransaction extends hrana_js_1.HranaTransaction {
#stream;
#sqlCache;
/** @private */
constructor(stream, mode, version) {
super(mode, version);
this.#stream = stream;
this.#sqlCache = new sql_cache_js_1.SqlCache(stream, sqlCacheCapacity);
}
/** @private */
_getStream() {
return this.#stream;
}
/** @private */
_getSqlCache() {
return this.#sqlCache;
}
close() {
this.#stream.close();
}
get closed() {
return this.#stream.closed;
}
}
exports.HttpTransaction = HttpTransaction;

View File

@@ -0,0 +1,41 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createClient = void 0;
const config_1 = require("@libsql/core/config");
const sqlite3_js_1 = require("./sqlite3.js");
const ws_js_1 = require("./ws.js");
const http_js_1 = require("./http.js");
__exportStar(require("@libsql/core/api"), exports);
/** Creates a {@link Client} object.
*
* You must pass at least an `url` in the {@link Config} object.
*/
function createClient(config) {
return _createClient((0, config_1.expandConfig)(config, true));
}
exports.createClient = createClient;
function _createClient(config) {
if (config.scheme === "wss" || config.scheme === "ws") {
return (0, ws_js_1._createClient)(config);
}
else if (config.scheme === "https" || config.scheme === "http") {
return (0, http_js_1._createClient)(config);
}
else {
return (0, sqlite3_js_1._createClient)(config);
}
}

View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

View File

@@ -0,0 +1,91 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SqlCache = void 0;
class SqlCache {
#owner;
#sqls;
capacity;
constructor(owner, capacity) {
this.#owner = owner;
this.#sqls = new Lru();
this.capacity = capacity;
}
// Replaces SQL strings with cached `hrana.Sql` objects in the statements in `hranaStmts`. After this
// function returns, we guarantee that all `hranaStmts` refer to valid (not closed) `hrana.Sql` objects,
// but _we may invalidate any other `hrana.Sql` objects_ (by closing them, thus removing them from the
// server).
//
// In practice, this means that after calling this function, you can use the statements only up to the
// first `await`, because concurrent code may also use the cache and invalidate those statements.
apply(hranaStmts) {
if (this.capacity <= 0) {
return;
}
const usedSqlObjs = new Set();
for (const hranaStmt of hranaStmts) {
if (typeof hranaStmt.sql !== "string") {
continue;
}
const sqlText = hranaStmt.sql;
// Stored SQL cannot exceed 5kb.
// https://github.com/tursodatabase/libsql/blob/e9d637e051685f92b0da43849507b5ef4232fbeb/libsql-server/src/hrana/http/request.rs#L10
if (sqlText.length >= 5000) {
continue;
}
let sqlObj = this.#sqls.get(sqlText);
if (sqlObj === undefined) {
while (this.#sqls.size + 1 > this.capacity) {
const [evictSqlText, evictSqlObj] = this.#sqls.peekLru();
if (usedSqlObjs.has(evictSqlObj)) {
// The SQL object that we are trying to evict is already in use in this batch, so we
// must not evict and close it.
break;
}
evictSqlObj.close();
this.#sqls.delete(evictSqlText);
}
if (this.#sqls.size + 1 <= this.capacity) {
sqlObj = this.#owner.storeSql(sqlText);
this.#sqls.set(sqlText, sqlObj);
}
}
if (sqlObj !== undefined) {
hranaStmt.sql = sqlObj;
usedSqlObjs.add(sqlObj);
}
}
}
}
exports.SqlCache = SqlCache;
class Lru {
// This maps keys to the cache values. The entries are ordered by their last use (entires that were used
// most recently are at the end).
#cache;
constructor() {
this.#cache = new Map();
}
get(key) {
const value = this.#cache.get(key);
if (value !== undefined) {
// move the entry to the back of the Map
this.#cache.delete(key);
this.#cache.set(key, value);
}
return value;
}
set(key, value) {
this.#cache.set(key, value);
}
peekLru() {
for (const entry of this.#cache.entries()) {
return entry;
}
return undefined;
}
delete(key) {
this.#cache.delete(key);
}
get size() {
return this.#cache.size;
}
}

View File

@@ -0,0 +1,500 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Sqlite3Transaction = exports.Sqlite3Client = exports._createClient = exports.createClient = void 0;
const libsql_1 = __importDefault(require("libsql"));
const node_buffer_1 = require("node:buffer");
const api_1 = require("@libsql/core/api");
const config_1 = require("@libsql/core/config");
const util_1 = require("@libsql/core/util");
__exportStar(require("@libsql/core/api"), exports);
function createClient(config) {
return _createClient((0, config_1.expandConfig)(config, true));
}
exports.createClient = createClient;
/** @private */
function _createClient(config) {
if (config.scheme !== "file") {
throw new api_1.LibsqlError(`URL scheme ${JSON.stringify(config.scheme + ":")} is not supported by the local sqlite3 client. ` +
`For more information, please read ${util_1.supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
const authority = config.authority;
if (authority !== undefined) {
const host = authority.host.toLowerCase();
if (host !== "" && host !== "localhost") {
throw new api_1.LibsqlError(`Invalid host in file URL: ${JSON.stringify(authority.host)}. ` +
'A "file:" URL with an absolute path should start with one slash ("file:/absolute/path.db") ' +
'or with three slashes ("file:///absolute/path.db"). ' +
`For more information, please read ${util_1.supportedUrlLink}`, "URL_INVALID");
}
if (authority.port !== undefined) {
throw new api_1.LibsqlError("File URL cannot have a port", "URL_INVALID");
}
if (authority.userinfo !== undefined) {
throw new api_1.LibsqlError("File URL cannot have username and password", "URL_INVALID");
}
}
let isInMemory = (0, config_1.isInMemoryConfig)(config);
if (isInMemory && config.syncUrl) {
throw new api_1.LibsqlError(`Embedded replica must use file for local db but URI with in-memory mode were provided instead: ${config.path}`, "URL_INVALID");
}
let path = config.path;
if (isInMemory) {
// note: we should prepend file scheme in order for SQLite3 to recognize :memory: connection query parameters
path = `${config.scheme}:${config.path}`;
}
const options = {
authToken: config.authToken,
encryptionKey: config.encryptionKey,
remoteEncryptionKey: config.remoteEncryptionKey,
syncUrl: config.syncUrl,
syncPeriod: config.syncInterval,
readYourWrites: config.readYourWrites,
offline: config.offline,
};
const db = new libsql_1.default(path, options);
executeStmt(db, "SELECT 1 AS checkThatTheDatabaseCanBeOpened", config.intMode);
return new Sqlite3Client(path, options, db, config.intMode);
}
exports._createClient = _createClient;
class Sqlite3Client {
#path;
#options;
#db;
#intMode;
closed;
protocol;
/** @private */
constructor(path, options, db, intMode) {
this.#path = path;
this.#options = options;
this.#db = db;
this.#intMode = intMode;
this.closed = false;
this.protocol = "file";
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
this.#checkNotClosed();
return executeStmt(this.#getDb(), stmt, this.#intMode);
}
async batch(stmts, mode = "deferred") {
this.#checkNotClosed();
const db = this.#getDb();
try {
executeStmt(db, (0, util_1.transactionModeToBegin)(mode), this.#intMode);
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
if (!db.inTransaction) {
throw new api_1.LibsqlBatchError("The transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
const stmt = stmts[i];
const normalizedStmt = Array.isArray(stmt)
? { sql: stmt[0], args: stmt[1] || [] }
: stmt;
resultSets.push(executeStmt(db, normalizedStmt, this.#intMode));
}
catch (e) {
if (e instanceof api_1.LibsqlBatchError) {
throw e;
}
if (e instanceof api_1.LibsqlError) {
throw new api_1.LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
executeStmt(db, "COMMIT", this.#intMode);
return resultSets;
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
}
}
async migrate(stmts) {
this.#checkNotClosed();
const db = this.#getDb();
try {
executeStmt(db, "PRAGMA foreign_keys=off", this.#intMode);
executeStmt(db, (0, util_1.transactionModeToBegin)("deferred"), this.#intMode);
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
if (!db.inTransaction) {
throw new api_1.LibsqlBatchError("The transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(executeStmt(db, stmts[i], this.#intMode));
}
catch (e) {
if (e instanceof api_1.LibsqlBatchError) {
throw e;
}
if (e instanceof api_1.LibsqlError) {
throw new api_1.LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
executeStmt(db, "COMMIT", this.#intMode);
return resultSets;
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
executeStmt(db, "PRAGMA foreign_keys=on", this.#intMode);
}
}
async transaction(mode = "write") {
const db = this.#getDb();
executeStmt(db, (0, util_1.transactionModeToBegin)(mode), this.#intMode);
this.#db = null; // A new connection will be lazily created on next use
return new Sqlite3Transaction(db, this.#intMode);
}
async executeMultiple(sql) {
this.#checkNotClosed();
const db = this.#getDb();
try {
return executeMultiple(db, sql);
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
}
}
async sync() {
this.#checkNotClosed();
const rep = await this.#getDb().sync();
return {
frames_synced: rep.frames_synced,
frame_no: rep.frame_no,
};
}
async reconnect() {
try {
if (!this.closed && this.#db !== null) {
this.#db.close();
}
}
finally {
this.#db = new libsql_1.default(this.#path, this.#options);
this.closed = false;
}
}
close() {
this.closed = true;
if (this.#db !== null) {
this.#db.close();
this.#db = null;
}
}
#checkNotClosed() {
if (this.closed) {
throw new api_1.LibsqlError("The client is closed", "CLIENT_CLOSED");
}
}
// Lazily creates the database connection and returns it
#getDb() {
if (this.#db === null) {
this.#db = new libsql_1.default(this.#path, this.#options);
}
return this.#db;
}
}
exports.Sqlite3Client = Sqlite3Client;
class Sqlite3Transaction {
#database;
#intMode;
/** @private */
constructor(database, intMode) {
this.#database = database;
this.#intMode = intMode;
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
this.#checkNotClosed();
return executeStmt(this.#database, stmt, this.#intMode);
}
async batch(stmts) {
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
this.#checkNotClosed();
const stmt = stmts[i];
const normalizedStmt = Array.isArray(stmt)
? { sql: stmt[0], args: stmt[1] || [] }
: stmt;
resultSets.push(executeStmt(this.#database, normalizedStmt, this.#intMode));
}
catch (e) {
if (e instanceof api_1.LibsqlBatchError) {
throw e;
}
if (e instanceof api_1.LibsqlError) {
throw new api_1.LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
return resultSets;
}
async executeMultiple(sql) {
this.#checkNotClosed();
return executeMultiple(this.#database, sql);
}
async rollback() {
if (!this.#database.open) {
return;
}
this.#checkNotClosed();
executeStmt(this.#database, "ROLLBACK", this.#intMode);
}
async commit() {
this.#checkNotClosed();
executeStmt(this.#database, "COMMIT", this.#intMode);
}
close() {
if (this.#database.inTransaction) {
executeStmt(this.#database, "ROLLBACK", this.#intMode);
}
}
get closed() {
return !this.#database.inTransaction;
}
#checkNotClosed() {
if (this.closed) {
throw new api_1.LibsqlError("The transaction is closed", "TRANSACTION_CLOSED");
}
}
}
exports.Sqlite3Transaction = Sqlite3Transaction;
function executeStmt(db, stmt, intMode) {
let sql;
let args;
if (typeof stmt === "string") {
sql = stmt;
args = [];
}
else {
sql = stmt.sql;
if (Array.isArray(stmt.args)) {
args = stmt.args.map((value) => valueToSql(value, intMode));
}
else {
args = {};
for (const name in stmt.args) {
const argName = name[0] === "@" || name[0] === "$" || name[0] === ":"
? name.substring(1)
: name;
args[argName] = valueToSql(stmt.args[name], intMode);
}
}
}
try {
const sqlStmt = db.prepare(sql);
sqlStmt.safeIntegers(true);
let returnsData = true;
try {
sqlStmt.raw(true);
}
catch {
// raw() throws an exception if the statement does not return data
returnsData = false;
}
if (returnsData) {
const columns = Array.from(sqlStmt.columns().map((col) => col.name));
const columnTypes = Array.from(sqlStmt.columns().map((col) => col.type ?? ""));
const rows = sqlStmt.all(args).map((sqlRow) => {
return rowFromSql(sqlRow, columns, intMode);
});
// TODO: can we get this info from better-sqlite3?
const rowsAffected = 0;
const lastInsertRowid = undefined;
return new util_1.ResultSetImpl(columns, columnTypes, rows, rowsAffected, lastInsertRowid);
}
else {
const info = sqlStmt.run(args);
const rowsAffected = info.changes;
const lastInsertRowid = BigInt(info.lastInsertRowid);
return new util_1.ResultSetImpl([], [], [], rowsAffected, lastInsertRowid);
}
}
catch (e) {
throw mapSqliteError(e);
}
}
function rowFromSql(sqlRow, columns, intMode) {
const row = {};
// make sure that the "length" property is not enumerable
Object.defineProperty(row, "length", { value: sqlRow.length });
for (let i = 0; i < sqlRow.length; ++i) {
const value = valueFromSql(sqlRow[i], intMode);
Object.defineProperty(row, i, { value });
const column = columns[i];
if (!Object.hasOwn(row, column)) {
Object.defineProperty(row, column, {
value,
enumerable: true,
configurable: true,
writable: true,
});
}
}
return row;
}
function valueFromSql(sqlValue, intMode) {
if (typeof sqlValue === "bigint") {
if (intMode === "number") {
if (sqlValue < minSafeBigint || sqlValue > maxSafeBigint) {
throw new RangeError("Received integer which cannot be safely represented as a JavaScript number");
}
return Number(sqlValue);
}
else if (intMode === "bigint") {
return sqlValue;
}
else if (intMode === "string") {
return "" + sqlValue;
}
else {
throw new Error("Invalid value for IntMode");
}
}
else if (sqlValue instanceof node_buffer_1.Buffer) {
return sqlValue.buffer;
}
return sqlValue;
}
const minSafeBigint = -9007199254740991n;
const maxSafeBigint = 9007199254740991n;
function valueToSql(value, intMode) {
if (typeof value === "number") {
if (!Number.isFinite(value)) {
throw new RangeError("Only finite numbers (not Infinity or NaN) can be passed as arguments");
}
return value;
}
else if (typeof value === "bigint") {
if (value < minInteger || value > maxInteger) {
throw new RangeError("bigint is too large to be represented as a 64-bit integer and passed as argument");
}
return value;
}
else if (typeof value === "boolean") {
switch (intMode) {
case "bigint":
return value ? 1n : 0n;
case "string":
return value ? "1" : "0";
default:
return value ? 1 : 0;
}
}
else if (value instanceof ArrayBuffer) {
return node_buffer_1.Buffer.from(value);
}
else if (value instanceof Date) {
return value.valueOf();
}
else if (value === undefined) {
throw new TypeError("undefined cannot be passed as argument to the database");
}
else {
return value;
}
}
const minInteger = -9223372036854775808n;
const maxInteger = 9223372036854775807n;
function executeMultiple(db, sql) {
try {
db.exec(sql);
}
catch (e) {
throw mapSqliteError(e);
}
}
function mapSqliteError(e) {
if (e instanceof libsql_1.default.SqliteError) {
const extendedCode = e.code;
const code = mapToBaseCode(e.rawCode);
return new api_1.LibsqlError(e.message, code, extendedCode, e.rawCode, e);
}
return e;
}
// Map SQLite raw error code to base error code string.
// Extended error codes are (base | (extended << 8)), so base = rawCode & 0xFF
function mapToBaseCode(rawCode) {
if (rawCode === undefined) {
return "SQLITE_UNKNOWN";
}
const baseCode = rawCode & 0xff;
return (sqliteErrorCodes[baseCode] ?? `SQLITE_UNKNOWN_${baseCode.toString()}`);
}
const sqliteErrorCodes = {
1: "SQLITE_ERROR",
2: "SQLITE_INTERNAL",
3: "SQLITE_PERM",
4: "SQLITE_ABORT",
5: "SQLITE_BUSY",
6: "SQLITE_LOCKED",
7: "SQLITE_NOMEM",
8: "SQLITE_READONLY",
9: "SQLITE_INTERRUPT",
10: "SQLITE_IOERR",
11: "SQLITE_CORRUPT",
12: "SQLITE_NOTFOUND",
13: "SQLITE_FULL",
14: "SQLITE_CANTOPEN",
15: "SQLITE_PROTOCOL",
16: "SQLITE_EMPTY",
17: "SQLITE_SCHEMA",
18: "SQLITE_TOOBIG",
19: "SQLITE_CONSTRAINT",
20: "SQLITE_MISMATCH",
21: "SQLITE_MISUSE",
22: "SQLITE_NOLFS",
23: "SQLITE_AUTH",
24: "SQLITE_FORMAT",
25: "SQLITE_RANGE",
26: "SQLITE_NOTADB",
27: "SQLITE_NOTICE",
28: "SQLITE_WARNING",
};

View File

@@ -0,0 +1,41 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports._createClient = exports.createClient = void 0;
const api_1 = require("@libsql/core/api");
const config_1 = require("@libsql/core/config");
const util_1 = require("@libsql/core/util");
const ws_js_1 = require("./ws.js");
const http_js_1 = require("./http.js");
__exportStar(require("@libsql/core/api"), exports);
function createClient(config) {
return _createClient((0, config_1.expandConfig)(config, true));
}
exports.createClient = createClient;
/** @private */
function _createClient(config) {
if (config.scheme === "ws" || config.scheme === "wss") {
return (0, ws_js_1._createClient)(config);
}
else if (config.scheme === "http" || config.scheme === "https") {
return (0, http_js_1._createClient)(config);
}
else {
throw new api_1.LibsqlError('The client that uses Web standard APIs supports only "libsql:", "wss:", "ws:", "https:" and "http:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${util_1.supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
}
exports._createClient = _createClient;

View File

@@ -0,0 +1,395 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.WsTransaction = exports.WsClient = exports._createClient = exports.createClient = void 0;
const hrana = __importStar(require("@libsql/hrana-client"));
const api_1 = require("@libsql/core/api");
const config_1 = require("@libsql/core/config");
const hrana_js_1 = require("./hrana.js");
const sql_cache_js_1 = require("./sql_cache.js");
const uri_1 = require("@libsql/core/uri");
const util_1 = require("@libsql/core/util");
const promise_limit_1 = __importDefault(require("promise-limit"));
__exportStar(require("@libsql/core/api"), exports);
function createClient(config) {
return _createClient((0, config_1.expandConfig)(config, false));
}
exports.createClient = createClient;
/** @private */
function _createClient(config) {
if (config.scheme !== "wss" && config.scheme !== "ws") {
throw new api_1.LibsqlError('The WebSocket client supports only "libsql:", "wss:" and "ws:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${util_1.supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
if (config.encryptionKey !== undefined) {
throw new api_1.LibsqlError("Encryption key is not supported by the remote client.", "ENCRYPTION_KEY_NOT_SUPPORTED");
}
if (config.scheme === "ws" && config.tls) {
throw new api_1.LibsqlError(`A "ws:" URL cannot opt into TLS by using ?tls=1`, "URL_INVALID");
}
else if (config.scheme === "wss" && !config.tls) {
throw new api_1.LibsqlError(`A "wss:" URL cannot opt out of TLS by using ?tls=0`, "URL_INVALID");
}
const url = (0, uri_1.encodeBaseUrl)(config.scheme, config.authority, config.path);
let client;
try {
client = hrana.openWs(url, config.authToken);
}
catch (e) {
if (e instanceof hrana.WebSocketUnsupportedError) {
const suggestedScheme = config.scheme === "wss" ? "https" : "http";
const suggestedUrl = (0, uri_1.encodeBaseUrl)(suggestedScheme, config.authority, config.path);
throw new api_1.LibsqlError("This environment does not support WebSockets, please switch to the HTTP client by using " +
`a "${suggestedScheme}:" URL (${JSON.stringify(suggestedUrl)}). ` +
`For more information, please read ${util_1.supportedUrlLink}`, "WEBSOCKETS_NOT_SUPPORTED");
}
throw (0, hrana_js_1.mapHranaError)(e);
}
return new WsClient(client, url, config.authToken, config.intMode, config.concurrency);
}
exports._createClient = _createClient;
const maxConnAgeMillis = 60 * 1000;
const sqlCacheCapacity = 100;
class WsClient {
#url;
#authToken;
#intMode;
// State of the current connection. The `hrana.WsClient` inside may be closed at any moment due to an
// asynchronous error.
#connState;
// If defined, this is a connection that will be used in the future, once it is ready.
#futureConnState;
closed;
protocol;
#isSchemaDatabase;
#promiseLimitFunction;
/** @private */
constructor(client, url, authToken, intMode, concurrency) {
this.#url = url;
this.#authToken = authToken;
this.#intMode = intMode;
this.#connState = this.#openConn(client);
this.#futureConnState = undefined;
this.closed = false;
this.protocol = "ws";
this.#promiseLimitFunction = (0, promise_limit_1.default)(concurrency);
}
async limit(fn) {
return this.#promiseLimitFunction(fn);
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const hranaStmt = (0, hrana_js_1.stmtToHrana)(stmt);
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
streamState.conn.sqlCache.apply([hranaStmt]);
const hranaRowsPromise = streamState.stream.query(hranaStmt);
streamState.stream.closeGracefully();
const hranaRowsResult = await hranaRowsPromise;
return (0, hrana_js_1.resultSetFromHrana)(hranaRowsResult);
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
finally {
this._closeStream(streamState);
}
});
}
async batch(stmts, mode = "deferred") {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const normalizedStmts = stmts.map((stmt) => {
if (Array.isArray(stmt)) {
return {
sql: stmt[0],
args: stmt[1] || [],
};
}
return stmt;
});
const hranaStmts = normalizedStmts.map(hrana_js_1.stmtToHrana);
const version = await streamState.conn.client.getVersion();
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
streamState.conn.sqlCache.apply(hranaStmts);
const batch = streamState.stream.batch(version >= 3);
const resultsPromise = (0, hrana_js_1.executeHranaBatch)(mode, version, batch, hranaStmts);
const results = await resultsPromise;
return results;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
finally {
this._closeStream(streamState);
}
});
}
async migrate(stmts) {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const hranaStmts = stmts.map(hrana_js_1.stmtToHrana);
const version = await streamState.conn.client.getVersion();
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
const batch = streamState.stream.batch(version >= 3);
const resultsPromise = (0, hrana_js_1.executeHranaBatch)("deferred", version, batch, hranaStmts, true);
const results = await resultsPromise;
return results;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
finally {
this._closeStream(streamState);
}
});
}
async transaction(mode = "write") {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const version = await streamState.conn.client.getVersion();
// the BEGIN statement will be batched with the first statement on the transaction to save a
// network roundtrip
return new WsTransaction(this, streamState, mode, version);
}
catch (e) {
this._closeStream(streamState);
throw (0, hrana_js_1.mapHranaError)(e);
}
});
}
async executeMultiple(sql) {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
const promise = streamState.stream.sequence(sql);
streamState.stream.closeGracefully();
await promise;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
finally {
this._closeStream(streamState);
}
});
}
sync() {
throw new api_1.LibsqlError("sync not supported in ws mode", "SYNC_NOT_SUPPORTED");
}
async #openStream() {
if (this.closed) {
throw new api_1.LibsqlError("The client is closed", "CLIENT_CLOSED");
}
const now = new Date();
const ageMillis = now.valueOf() - this.#connState.openTime.valueOf();
if (ageMillis > maxConnAgeMillis &&
this.#futureConnState === undefined) {
// The existing connection is too old, let's open a new one.
const futureConnState = this.#openConn();
this.#futureConnState = futureConnState;
// However, if we used `futureConnState` immediately, we would introduce additional latency,
// because we would have to wait for the WebSocket handshake to complete, even though we may a
// have perfectly good existing connection in `this.#connState`!
//
// So we wait until the `hrana.Client.getVersion()` operation completes (which happens when the
// WebSocket hanshake completes), and only then we replace `this.#connState` with
// `futureConnState`, which is stored in `this.#futureConnState` in the meantime.
futureConnState.client.getVersion().then((_version) => {
if (this.#connState !== futureConnState) {
// We need to close `this.#connState` before we replace it. However, it is possible
// that `this.#connState` has already been replaced: see the code below.
if (this.#connState.streamStates.size === 0) {
this.#connState.client.close();
}
else {
// If there are existing streams on the connection, we must not close it, because
// these streams would be broken. The last stream to be closed will also close the
// connection in `_closeStream()`.
}
}
this.#connState = futureConnState;
this.#futureConnState = undefined;
}, (_e) => {
// If the new connection could not be established, let's just ignore the error and keep
// using the existing connection.
this.#futureConnState = undefined;
});
}
if (this.#connState.client.closed) {
// An error happened on this connection and it has been closed. Let's try to seamlessly reconnect.
try {
if (this.#futureConnState !== undefined) {
// We are already in the process of opening a new connection, so let's just use it
// immediately.
this.#connState = this.#futureConnState;
}
else {
this.#connState = this.#openConn();
}
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
}
const connState = this.#connState;
try {
// Now we wait for the WebSocket handshake to complete (if it hasn't completed yet). Note that
// this does not increase latency, because any messages that we would send on the WebSocket before
// the handshake would be queued until the handshake is completed anyway.
if (connState.useSqlCache === undefined) {
connState.useSqlCache =
(await connState.client.getVersion()) >= 2;
if (connState.useSqlCache) {
connState.sqlCache.capacity = sqlCacheCapacity;
}
}
const stream = connState.client.openStream();
stream.intMode = this.#intMode;
const streamState = { conn: connState, stream };
connState.streamStates.add(streamState);
return streamState;
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
}
#openConn(client) {
try {
client ??= hrana.openWs(this.#url, this.#authToken);
return {
client,
useSqlCache: undefined,
sqlCache: new sql_cache_js_1.SqlCache(client, 0),
openTime: new Date(),
streamStates: new Set(),
};
}
catch (e) {
throw (0, hrana_js_1.mapHranaError)(e);
}
}
async reconnect() {
try {
for (const st of Array.from(this.#connState.streamStates)) {
try {
st.stream.close();
}
catch { }
}
this.#connState.client.close();
}
catch { }
if (this.#futureConnState) {
try {
this.#futureConnState.client.close();
}
catch { }
this.#futureConnState = undefined;
}
const next = this.#openConn();
const version = await next.client.getVersion();
next.useSqlCache = version >= 2;
if (next.useSqlCache) {
next.sqlCache.capacity = sqlCacheCapacity;
}
this.#connState = next;
this.closed = false;
}
_closeStream(streamState) {
streamState.stream.close();
const connState = streamState.conn;
connState.streamStates.delete(streamState);
if (connState.streamStates.size === 0 &&
connState !== this.#connState) {
// We are not using this connection anymore and this is the last stream that was using it, so we
// must close it now.
connState.client.close();
}
}
close() {
this.#connState.client.close();
this.closed = true;
if (this.#futureConnState) {
try {
this.#futureConnState.client.close();
}
catch { }
this.#futureConnState = undefined;
}
this.closed = true;
}
}
exports.WsClient = WsClient;
class WsTransaction extends hrana_js_1.HranaTransaction {
#client;
#streamState;
/** @private */
constructor(client, state, mode, version) {
super(mode, version);
this.#client = client;
this.#streamState = state;
}
/** @private */
_getStream() {
return this.#streamState.stream;
}
/** @private */
_getSqlCache() {
return this.#streamState.conn.sqlCache;
}
close() {
this.#client._closeStream(this.#streamState);
}
get closed() {
return this.#streamState.stream.closed;
}
}
exports.WsTransaction = WsTransaction;

View File

@@ -0,0 +1,23 @@
import * as hrana from "@libsql/hrana-client";
import type { InStatement, ResultSet, Transaction, TransactionMode, InArgs } from "@libsql/core/api";
import type { SqlCache } from "./sql_cache.js";
export declare abstract class HranaTransaction implements Transaction {
#private;
/** @private */
constructor(mode: TransactionMode, version: hrana.ProtocolVersion);
/** @private */
abstract _getStream(): hrana.Stream;
/** @private */
abstract _getSqlCache(): SqlCache;
abstract close(): void;
abstract get closed(): boolean;
execute(stmt: InStatement): Promise<ResultSet>;
batch(stmts: Array<InStatement>): Promise<Array<ResultSet>>;
executeMultiple(sql: string): Promise<void>;
rollback(): Promise<void>;
commit(): Promise<void>;
}
export declare function executeHranaBatch(mode: TransactionMode, version: hrana.ProtocolVersion, batch: hrana.Batch, hranaStmts: Array<hrana.Stmt>, disableForeignKeys?: boolean): Promise<Array<ResultSet>>;
export declare function stmtToHrana(stmt: InStatement | [string, InArgs?]): hrana.Stmt;
export declare function resultSetFromHrana(hranaRows: hrana.RowsResult): ResultSet;
export declare function mapHranaError(e: unknown): unknown;

View File

@@ -0,0 +1,341 @@
import * as hrana from "@libsql/hrana-client";
import { LibsqlError, LibsqlBatchError } from "@libsql/core/api";
import { transactionModeToBegin, ResultSetImpl } from "@libsql/core/util";
export class HranaTransaction {
#mode;
#version;
// Promise that is resolved when the BEGIN statement completes, or `undefined` if we haven't executed the
// BEGIN statement yet.
#started;
/** @private */
constructor(mode, version) {
this.#mode = mode;
this.#version = version;
this.#started = undefined;
}
execute(stmt) {
return this.batch([stmt]).then((results) => results[0]);
}
async batch(stmts) {
const stream = this._getStream();
if (stream.closed) {
throw new LibsqlError("Cannot execute statements because the transaction is closed", "TRANSACTION_CLOSED");
}
try {
const hranaStmts = stmts.map(stmtToHrana);
let rowsPromises;
if (this.#started === undefined) {
// The transaction hasn't started yet, so we need to send the BEGIN statement in a batch with
// `hranaStmts`.
this._getSqlCache().apply(hranaStmts);
const batch = stream.batch(this.#version >= 3);
const beginStep = batch.step();
const beginPromise = beginStep.run(transactionModeToBegin(this.#mode));
// Execute the `hranaStmts` only if the BEGIN succeeded, to make sure that we don't execute it
// outside of a transaction.
let lastStep = beginStep;
rowsPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch
.step()
.condition(hrana.BatchCond.ok(lastStep));
if (this.#version >= 3) {
// If the Hrana version supports it, make sure that we are still in a transaction
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const rowsPromise = stmtStep.query(hranaStmt);
rowsPromise.catch(() => undefined); // silence Node warning
lastStep = stmtStep;
return rowsPromise;
});
// `this.#started` is resolved successfully only if the batch and the BEGIN statement inside
// of the batch are both successful.
this.#started = batch
.execute()
.then(() => beginPromise)
.then(() => undefined);
try {
await this.#started;
}
catch (e) {
// If the BEGIN failed, the transaction is unusable and we must close it. However, if the
// BEGIN suceeds and `hranaStmts` fail, the transaction is _not_ closed.
this.close();
throw e;
}
}
else {
if (this.#version < 3) {
// The transaction has started, so we must wait until the BEGIN statement completed to make
// sure that we don't execute `hranaStmts` outside of a transaction.
await this.#started;
}
else {
// The transaction has started, but we will use `hrana.BatchCond.isAutocommit()` to make
// sure that we don't execute `hranaStmts` outside of a transaction, so we don't have to
// wait for `this.#started`
}
this._getSqlCache().apply(hranaStmts);
const batch = stream.batch(this.#version >= 3);
let lastStep = undefined;
rowsPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch.step();
if (lastStep !== undefined) {
stmtStep.condition(hrana.BatchCond.ok(lastStep));
}
if (this.#version >= 3) {
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const rowsPromise = stmtStep.query(hranaStmt);
rowsPromise.catch(() => undefined); // silence Node warning
lastStep = stmtStep;
return rowsPromise;
});
await batch.execute();
}
const resultSets = [];
for (let i = 0; i < rowsPromises.length; i++) {
try {
const rows = await rowsPromises[i];
if (rows === undefined) {
throw new LibsqlBatchError("Statement in a transaction was not executed, " +
"probably because the transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(resultSetFromHrana(rows));
}
catch (e) {
if (e instanceof LibsqlBatchError) {
throw e;
}
// Map hrana errors to LibsqlError first, then wrap in LibsqlBatchError
const mappedError = mapHranaError(e);
if (mappedError instanceof LibsqlError) {
throw new LibsqlBatchError(mappedError.message, i, mappedError.code, mappedError.extendedCode, mappedError.rawCode, mappedError.cause instanceof Error
? mappedError.cause
: undefined);
}
throw mappedError;
}
}
return resultSets;
}
catch (e) {
throw mapHranaError(e);
}
}
async executeMultiple(sql) {
const stream = this._getStream();
if (stream.closed) {
throw new LibsqlError("Cannot execute statements because the transaction is closed", "TRANSACTION_CLOSED");
}
try {
if (this.#started === undefined) {
// If the transaction hasn't started yet, start it now
this.#started = stream
.run(transactionModeToBegin(this.#mode))
.then(() => undefined);
try {
await this.#started;
}
catch (e) {
this.close();
throw e;
}
}
else {
// Wait until the transaction has started
await this.#started;
}
await stream.sequence(sql);
}
catch (e) {
throw mapHranaError(e);
}
}
async rollback() {
try {
const stream = this._getStream();
if (stream.closed) {
return;
}
if (this.#started !== undefined) {
// We don't have to wait for the BEGIN statement to complete. If the BEGIN fails, we will
// execute a ROLLBACK outside of an active transaction, which should be harmless.
}
else {
// We did nothing in the transaction, so there is nothing to rollback.
return;
}
// Pipeline the ROLLBACK statement and the stream close.
const promise = stream.run("ROLLBACK").catch((e) => {
throw mapHranaError(e);
});
stream.closeGracefully();
await promise;
}
catch (e) {
throw mapHranaError(e);
}
finally {
// `this.close()` may close the `hrana.Client`, which aborts all pending stream requests, so we
// must call it _after_ we receive the ROLLBACK response.
// Also note that the current stream should already be closed, but we need to call `this.close()`
// anyway, because it may need to do more cleanup.
this.close();
}
}
async commit() {
// (this method is analogous to `rollback()`)
try {
const stream = this._getStream();
if (stream.closed) {
throw new LibsqlError("Cannot commit the transaction because it is already closed", "TRANSACTION_CLOSED");
}
if (this.#started !== undefined) {
// Make sure to execute the COMMIT only if the BEGIN was successful.
await this.#started;
}
else {
return;
}
const promise = stream.run("COMMIT").catch((e) => {
throw mapHranaError(e);
});
stream.closeGracefully();
await promise;
}
catch (e) {
throw mapHranaError(e);
}
finally {
this.close();
}
}
}
export async function executeHranaBatch(mode, version, batch, hranaStmts, disableForeignKeys = false) {
if (disableForeignKeys) {
batch.step().run("PRAGMA foreign_keys=off");
}
const beginStep = batch.step();
const beginPromise = beginStep.run(transactionModeToBegin(mode));
let lastStep = beginStep;
const stmtPromises = hranaStmts.map((hranaStmt) => {
const stmtStep = batch.step().condition(hrana.BatchCond.ok(lastStep));
if (version >= 3) {
stmtStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const stmtPromise = stmtStep.query(hranaStmt);
lastStep = stmtStep;
return stmtPromise;
});
const commitStep = batch.step().condition(hrana.BatchCond.ok(lastStep));
if (version >= 3) {
commitStep.condition(hrana.BatchCond.not(hrana.BatchCond.isAutocommit(batch)));
}
const commitPromise = commitStep.run("COMMIT");
const rollbackStep = batch
.step()
.condition(hrana.BatchCond.not(hrana.BatchCond.ok(commitStep)));
rollbackStep.run("ROLLBACK").catch((_) => undefined);
if (disableForeignKeys) {
batch.step().run("PRAGMA foreign_keys=on");
}
await batch.execute();
const resultSets = [];
await beginPromise;
for (let i = 0; i < stmtPromises.length; i++) {
try {
const hranaRows = await stmtPromises[i];
if (hranaRows === undefined) {
throw new LibsqlBatchError("Statement in a batch was not executed, probably because the transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(resultSetFromHrana(hranaRows));
}
catch (e) {
if (e instanceof LibsqlBatchError) {
throw e;
}
// Map hrana errors to LibsqlError first, then wrap in LibsqlBatchError
const mappedError = mapHranaError(e);
if (mappedError instanceof LibsqlError) {
throw new LibsqlBatchError(mappedError.message, i, mappedError.code, mappedError.extendedCode, mappedError.rawCode, mappedError.cause instanceof Error
? mappedError.cause
: undefined);
}
throw mappedError;
}
}
await commitPromise;
return resultSets;
}
export function stmtToHrana(stmt) {
let sql;
let args;
if (Array.isArray(stmt)) {
[sql, args] = stmt;
}
else if (typeof stmt === "string") {
sql = stmt;
}
else {
sql = stmt.sql;
args = stmt.args;
}
const hranaStmt = new hrana.Stmt(sql);
if (args) {
if (Array.isArray(args)) {
hranaStmt.bindIndexes(args);
}
else {
for (const [key, value] of Object.entries(args)) {
hranaStmt.bindName(key, value);
}
}
}
return hranaStmt;
}
export function resultSetFromHrana(hranaRows) {
const columns = hranaRows.columnNames.map((c) => c ?? "");
const columnTypes = hranaRows.columnDecltypes.map((c) => c ?? "");
const rows = hranaRows.rows;
const rowsAffected = hranaRows.affectedRowCount;
const lastInsertRowid = hranaRows.lastInsertRowid !== undefined
? hranaRows.lastInsertRowid
: undefined;
return new ResultSetImpl(columns, columnTypes, rows, rowsAffected, lastInsertRowid);
}
export function mapHranaError(e) {
if (e instanceof hrana.ClientError) {
const code = mapHranaErrorCode(e);
// TODO: Parse extendedCode once the SQL over HTTP protocol supports it
return new LibsqlError(e.message, code, undefined, undefined, e);
}
return e;
}
function mapHranaErrorCode(e) {
if (e instanceof hrana.ResponseError && e.code !== undefined) {
return e.code;
}
else if (e instanceof hrana.ProtoError) {
return "HRANA_PROTO_ERROR";
}
else if (e instanceof hrana.ClosedError) {
return e.cause instanceof hrana.ClientError
? mapHranaErrorCode(e.cause)
: "HRANA_CLOSED_ERROR";
}
else if (e instanceof hrana.WebSocketError) {
return "HRANA_WEBSOCKET_ERROR";
}
else if (e instanceof hrana.HttpServerError) {
return "SERVER_ERROR";
}
else if (e instanceof hrana.ProtocolVersionError) {
return "PROTOCOL_VERSION_ERROR";
}
else if (e instanceof hrana.InternalError) {
return "INTERNAL_ERROR";
}
else {
return "UNKNOWN";
}
}

View File

@@ -0,0 +1,38 @@
import * as hrana from "@libsql/hrana-client";
import type { Config, Client } from "@libsql/core/api";
import type { InStatement, ResultSet, Transaction, IntMode, InArgs, Replicated } from "@libsql/core/api";
import { TransactionMode } from "@libsql/core/api";
import type { ExpandedConfig } from "@libsql/core/config";
import { HranaTransaction } from "./hrana.js";
import { SqlCache } from "./sql_cache.js";
export * from "@libsql/core/api";
export declare function createClient(config: Config): Client;
/** @private */
export declare function _createClient(config: ExpandedConfig): Client;
export declare class HttpClient implements Client {
#private;
protocol: "http";
/** @private */
constructor(url: URL, authToken: string | undefined, intMode: IntMode, customFetch: Function | undefined, concurrency: number, remoteEncryptionKey: string | undefined);
private limit;
execute(stmtOrSql: InStatement | string, args?: InArgs): Promise<ResultSet>;
batch(stmts: Array<InStatement | [string, InArgs?]>, mode?: TransactionMode): Promise<Array<ResultSet>>;
migrate(stmts: Array<InStatement>): Promise<Array<ResultSet>>;
transaction(mode?: TransactionMode): Promise<HttpTransaction>;
executeMultiple(sql: string): Promise<void>;
sync(): Promise<Replicated>;
close(): void;
reconnect(): Promise<void>;
get closed(): boolean;
}
export declare class HttpTransaction extends HranaTransaction implements Transaction {
#private;
/** @private */
constructor(stream: hrana.HttpStream, mode: TransactionMode, version: hrana.ProtocolVersion);
/** @private */
_getStream(): hrana.Stream;
/** @private */
_getSqlCache(): SqlCache;
close(): void;
get closed(): boolean;
}

View File

@@ -0,0 +1,232 @@
import * as hrana from "@libsql/hrana-client";
import { LibsqlError } from "@libsql/core/api";
import { expandConfig } from "@libsql/core/config";
import { HranaTransaction, executeHranaBatch, stmtToHrana, resultSetFromHrana, mapHranaError, } from "./hrana.js";
import { SqlCache } from "./sql_cache.js";
import { encodeBaseUrl } from "@libsql/core/uri";
import { supportedUrlLink } from "@libsql/core/util";
import promiseLimit from "promise-limit";
export * from "@libsql/core/api";
export function createClient(config) {
return _createClient(expandConfig(config, true));
}
/** @private */
export function _createClient(config) {
if (config.scheme !== "https" && config.scheme !== "http") {
throw new LibsqlError('The HTTP client supports only "libsql:", "https:" and "http:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
if (config.encryptionKey !== undefined) {
throw new LibsqlError("Encryption key is not supported by the remote client.", "ENCRYPTION_KEY_NOT_SUPPORTED");
}
if (config.scheme === "http" && config.tls) {
throw new LibsqlError(`A "http:" URL cannot opt into TLS by using ?tls=1`, "URL_INVALID");
}
else if (config.scheme === "https" && !config.tls) {
throw new LibsqlError(`A "https:" URL cannot opt out of TLS by using ?tls=0`, "URL_INVALID");
}
const url = encodeBaseUrl(config.scheme, config.authority, config.path);
return new HttpClient(url, config.authToken, config.intMode, config.fetch, config.concurrency, config.remoteEncryptionKey);
}
const sqlCacheCapacity = 30;
export class HttpClient {
#client;
protocol;
#url;
#intMode;
#customFetch;
#concurrency;
#authToken;
#remoteEncryptionKey;
#promiseLimitFunction;
/** @private */
constructor(url, authToken, intMode, customFetch, concurrency, remoteEncryptionKey) {
this.#url = url;
this.#authToken = authToken;
this.#intMode = intMode;
this.#customFetch = customFetch;
this.#concurrency = concurrency;
this.#remoteEncryptionKey = remoteEncryptionKey;
this.#client = hrana.openHttp(this.#url, this.#authToken, this.#customFetch, remoteEncryptionKey);
this.#client.intMode = this.#intMode;
this.protocol = "http";
this.#promiseLimitFunction = promiseLimit(this.#concurrency);
}
async limit(fn) {
return this.#promiseLimitFunction(fn);
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
return this.limit(async () => {
try {
const hranaStmt = stmtToHrana(stmt);
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the statement and
// close the stream in a single HTTP request.
let rowsPromise;
const stream = this.#client.openStream();
try {
rowsPromise = stream.query(hranaStmt);
}
finally {
stream.closeGracefully();
}
const rowsResult = await rowsPromise;
return resultSetFromHrana(rowsResult);
}
catch (e) {
throw mapHranaError(e);
}
});
}
async batch(stmts, mode = "deferred") {
return this.limit(async () => {
try {
const normalizedStmts = stmts.map((stmt) => {
if (Array.isArray(stmt)) {
return {
sql: stmt[0],
args: stmt[1] || [],
};
}
return stmt;
});
const hranaStmts = normalizedStmts.map(stmtToHrana);
const version = await this.#client.getVersion();
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the batch and
// close the stream in a single HTTP request.
let resultsPromise;
const stream = this.#client.openStream();
try {
// It makes sense to use a SQL cache even for a single batch, because it may contain the same
// statement repeated multiple times.
const sqlCache = new SqlCache(stream, sqlCacheCapacity);
sqlCache.apply(hranaStmts);
// TODO: we do not use a cursor here, because it would cause three roundtrips:
// 1. pipeline request to store SQL texts
// 2. cursor request
// 3. pipeline request to close the stream
const batch = stream.batch(false);
resultsPromise = executeHranaBatch(mode, version, batch, hranaStmts);
}
finally {
stream.closeGracefully();
}
const results = await resultsPromise;
return results;
}
catch (e) {
throw mapHranaError(e);
}
});
}
async migrate(stmts) {
return this.limit(async () => {
try {
const hranaStmts = stmts.map(stmtToHrana);
const version = await this.#client.getVersion();
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the batch and
// close the stream in a single HTTP request.
let resultsPromise;
const stream = this.#client.openStream();
try {
const batch = stream.batch(false);
resultsPromise = executeHranaBatch("deferred", version, batch, hranaStmts, true);
}
finally {
stream.closeGracefully();
}
const results = await resultsPromise;
return results;
}
catch (e) {
throw mapHranaError(e);
}
});
}
async transaction(mode = "write") {
return this.limit(async () => {
try {
const version = await this.#client.getVersion();
return new HttpTransaction(this.#client.openStream(), mode, version);
}
catch (e) {
throw mapHranaError(e);
}
});
}
async executeMultiple(sql) {
return this.limit(async () => {
try {
// Pipeline all operations, so `hrana.HttpClient` can open the stream, execute the sequence and
// close the stream in a single HTTP request.
let promise;
const stream = this.#client.openStream();
try {
promise = stream.sequence(sql);
}
finally {
stream.closeGracefully();
}
await promise;
}
catch (e) {
throw mapHranaError(e);
}
});
}
sync() {
throw new LibsqlError("sync not supported in http mode", "SYNC_NOT_SUPPORTED");
}
close() {
this.#client.close();
}
async reconnect() {
try {
if (!this.closed) {
// Abort in-flight ops and free resources
this.#client.close();
}
}
finally {
// Recreate the underlying hrana client
this.#client = hrana.openHttp(this.#url, this.#authToken, this.#customFetch, this.#remoteEncryptionKey);
this.#client.intMode = this.#intMode;
}
}
get closed() {
return this.#client.closed;
}
}
export class HttpTransaction extends HranaTransaction {
#stream;
#sqlCache;
/** @private */
constructor(stream, mode, version) {
super(mode, version);
this.#stream = stream;
this.#sqlCache = new SqlCache(stream, sqlCacheCapacity);
}
/** @private */
_getStream() {
return this.#stream;
}
/** @private */
_getSqlCache() {
return this.#sqlCache;
}
close() {
this.#stream.close();
}
get closed() {
return this.#stream.closed;
}
}

View File

@@ -0,0 +1,7 @@
import type { Config, Client } from "@libsql/core/api";
export * from "@libsql/core/api";
/** Creates a {@link Client} object.
*
* You must pass at least an `url` in the {@link Config} object.
*/
export declare function createClient(config: Config): Client;

View File

@@ -0,0 +1,23 @@
import { expandConfig } from "@libsql/core/config";
import { _createClient as _createSqlite3Client } from "./sqlite3.js";
import { _createClient as _createWsClient } from "./ws.js";
import { _createClient as _createHttpClient } from "./http.js";
export * from "@libsql/core/api";
/** Creates a {@link Client} object.
*
* You must pass at least an `url` in the {@link Config} object.
*/
export function createClient(config) {
return _createClient(expandConfig(config, true));
}
function _createClient(config) {
if (config.scheme === "wss" || config.scheme === "ws") {
return _createWsClient(config);
}
else if (config.scheme === "https" || config.scheme === "http") {
return _createHttpClient(config);
}
else {
return _createSqlite3Client(config);
}
}

View File

@@ -0,0 +1,7 @@
import type * as hrana from "@libsql/hrana-client";
export declare class SqlCache {
#private;
capacity: number;
constructor(owner: hrana.SqlOwner, capacity: number);
apply(hranaStmts: Array<hrana.Stmt>): void;
}

View File

@@ -0,0 +1,87 @@
export class SqlCache {
#owner;
#sqls;
capacity;
constructor(owner, capacity) {
this.#owner = owner;
this.#sqls = new Lru();
this.capacity = capacity;
}
// Replaces SQL strings with cached `hrana.Sql` objects in the statements in `hranaStmts`. After this
// function returns, we guarantee that all `hranaStmts` refer to valid (not closed) `hrana.Sql` objects,
// but _we may invalidate any other `hrana.Sql` objects_ (by closing them, thus removing them from the
// server).
//
// In practice, this means that after calling this function, you can use the statements only up to the
// first `await`, because concurrent code may also use the cache and invalidate those statements.
apply(hranaStmts) {
if (this.capacity <= 0) {
return;
}
const usedSqlObjs = new Set();
for (const hranaStmt of hranaStmts) {
if (typeof hranaStmt.sql !== "string") {
continue;
}
const sqlText = hranaStmt.sql;
// Stored SQL cannot exceed 5kb.
// https://github.com/tursodatabase/libsql/blob/e9d637e051685f92b0da43849507b5ef4232fbeb/libsql-server/src/hrana/http/request.rs#L10
if (sqlText.length >= 5000) {
continue;
}
let sqlObj = this.#sqls.get(sqlText);
if (sqlObj === undefined) {
while (this.#sqls.size + 1 > this.capacity) {
const [evictSqlText, evictSqlObj] = this.#sqls.peekLru();
if (usedSqlObjs.has(evictSqlObj)) {
// The SQL object that we are trying to evict is already in use in this batch, so we
// must not evict and close it.
break;
}
evictSqlObj.close();
this.#sqls.delete(evictSqlText);
}
if (this.#sqls.size + 1 <= this.capacity) {
sqlObj = this.#owner.storeSql(sqlText);
this.#sqls.set(sqlText, sqlObj);
}
}
if (sqlObj !== undefined) {
hranaStmt.sql = sqlObj;
usedSqlObjs.add(sqlObj);
}
}
}
}
class Lru {
// This maps keys to the cache values. The entries are ordered by their last use (entires that were used
// most recently are at the end).
#cache;
constructor() {
this.#cache = new Map();
}
get(key) {
const value = this.#cache.get(key);
if (value !== undefined) {
// move the entry to the back of the Map
this.#cache.delete(key);
this.#cache.set(key, value);
}
return value;
}
set(key, value) {
this.#cache.set(key, value);
}
peekLru() {
for (const entry of this.#cache.entries()) {
return entry;
}
return undefined;
}
delete(key) {
this.#cache.delete(key);
}
get size() {
return this.#cache.size;
}
}

View File

@@ -0,0 +1,35 @@
import Database from "libsql";
import type { Config, IntMode, Client, Transaction, TransactionMode, ResultSet, InStatement, InArgs, Replicated } from "@libsql/core/api";
import type { ExpandedConfig } from "@libsql/core/config";
export * from "@libsql/core/api";
export declare function createClient(config: Config): Client;
/** @private */
export declare function _createClient(config: ExpandedConfig): Client;
export declare class Sqlite3Client implements Client {
#private;
closed: boolean;
protocol: "file";
/** @private */
constructor(path: string, options: Database.Options, db: Database.Database, intMode: IntMode);
execute(stmtOrSql: InStatement | string, args?: InArgs): Promise<ResultSet>;
batch(stmts: Array<InStatement | [string, InArgs?]>, mode?: TransactionMode): Promise<Array<ResultSet>>;
migrate(stmts: Array<InStatement>): Promise<Array<ResultSet>>;
transaction(mode?: TransactionMode): Promise<Transaction>;
executeMultiple(sql: string): Promise<void>;
sync(): Promise<Replicated>;
reconnect(): Promise<void>;
close(): void;
}
export declare class Sqlite3Transaction implements Transaction {
#private;
/** @private */
constructor(database: Database.Database, intMode: IntMode);
execute(stmt: InStatement): Promise<ResultSet>;
execute(sql: string, args?: InArgs): Promise<ResultSet>;
batch(stmts: Array<InStatement | [string, InArgs?]>): Promise<Array<ResultSet>>;
executeMultiple(sql: string): Promise<void>;
rollback(): Promise<void>;
commit(): Promise<void>;
close(): void;
get closed(): boolean;
}

View File

@@ -0,0 +1,476 @@
import Database from "libsql";
import { Buffer } from "node:buffer";
import { LibsqlError, LibsqlBatchError } from "@libsql/core/api";
import { expandConfig, isInMemoryConfig } from "@libsql/core/config";
import { supportedUrlLink, transactionModeToBegin, ResultSetImpl, } from "@libsql/core/util";
export * from "@libsql/core/api";
export function createClient(config) {
return _createClient(expandConfig(config, true));
}
/** @private */
export function _createClient(config) {
if (config.scheme !== "file") {
throw new LibsqlError(`URL scheme ${JSON.stringify(config.scheme + ":")} is not supported by the local sqlite3 client. ` +
`For more information, please read ${supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
const authority = config.authority;
if (authority !== undefined) {
const host = authority.host.toLowerCase();
if (host !== "" && host !== "localhost") {
throw new LibsqlError(`Invalid host in file URL: ${JSON.stringify(authority.host)}. ` +
'A "file:" URL with an absolute path should start with one slash ("file:/absolute/path.db") ' +
'or with three slashes ("file:///absolute/path.db"). ' +
`For more information, please read ${supportedUrlLink}`, "URL_INVALID");
}
if (authority.port !== undefined) {
throw new LibsqlError("File URL cannot have a port", "URL_INVALID");
}
if (authority.userinfo !== undefined) {
throw new LibsqlError("File URL cannot have username and password", "URL_INVALID");
}
}
let isInMemory = isInMemoryConfig(config);
if (isInMemory && config.syncUrl) {
throw new LibsqlError(`Embedded replica must use file for local db but URI with in-memory mode were provided instead: ${config.path}`, "URL_INVALID");
}
let path = config.path;
if (isInMemory) {
// note: we should prepend file scheme in order for SQLite3 to recognize :memory: connection query parameters
path = `${config.scheme}:${config.path}`;
}
const options = {
authToken: config.authToken,
encryptionKey: config.encryptionKey,
remoteEncryptionKey: config.remoteEncryptionKey,
syncUrl: config.syncUrl,
syncPeriod: config.syncInterval,
readYourWrites: config.readYourWrites,
offline: config.offline,
};
const db = new Database(path, options);
executeStmt(db, "SELECT 1 AS checkThatTheDatabaseCanBeOpened", config.intMode);
return new Sqlite3Client(path, options, db, config.intMode);
}
export class Sqlite3Client {
#path;
#options;
#db;
#intMode;
closed;
protocol;
/** @private */
constructor(path, options, db, intMode) {
this.#path = path;
this.#options = options;
this.#db = db;
this.#intMode = intMode;
this.closed = false;
this.protocol = "file";
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
this.#checkNotClosed();
return executeStmt(this.#getDb(), stmt, this.#intMode);
}
async batch(stmts, mode = "deferred") {
this.#checkNotClosed();
const db = this.#getDb();
try {
executeStmt(db, transactionModeToBegin(mode), this.#intMode);
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
if (!db.inTransaction) {
throw new LibsqlBatchError("The transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
const stmt = stmts[i];
const normalizedStmt = Array.isArray(stmt)
? { sql: stmt[0], args: stmt[1] || [] }
: stmt;
resultSets.push(executeStmt(db, normalizedStmt, this.#intMode));
}
catch (e) {
if (e instanceof LibsqlBatchError) {
throw e;
}
if (e instanceof LibsqlError) {
throw new LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
executeStmt(db, "COMMIT", this.#intMode);
return resultSets;
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
}
}
async migrate(stmts) {
this.#checkNotClosed();
const db = this.#getDb();
try {
executeStmt(db, "PRAGMA foreign_keys=off", this.#intMode);
executeStmt(db, transactionModeToBegin("deferred"), this.#intMode);
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
if (!db.inTransaction) {
throw new LibsqlBatchError("The transaction has been rolled back", i, "TRANSACTION_CLOSED");
}
resultSets.push(executeStmt(db, stmts[i], this.#intMode));
}
catch (e) {
if (e instanceof LibsqlBatchError) {
throw e;
}
if (e instanceof LibsqlError) {
throw new LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
executeStmt(db, "COMMIT", this.#intMode);
return resultSets;
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
executeStmt(db, "PRAGMA foreign_keys=on", this.#intMode);
}
}
async transaction(mode = "write") {
const db = this.#getDb();
executeStmt(db, transactionModeToBegin(mode), this.#intMode);
this.#db = null; // A new connection will be lazily created on next use
return new Sqlite3Transaction(db, this.#intMode);
}
async executeMultiple(sql) {
this.#checkNotClosed();
const db = this.#getDb();
try {
return executeMultiple(db, sql);
}
finally {
if (db.inTransaction) {
executeStmt(db, "ROLLBACK", this.#intMode);
}
}
}
async sync() {
this.#checkNotClosed();
const rep = await this.#getDb().sync();
return {
frames_synced: rep.frames_synced,
frame_no: rep.frame_no,
};
}
async reconnect() {
try {
if (!this.closed && this.#db !== null) {
this.#db.close();
}
}
finally {
this.#db = new Database(this.#path, this.#options);
this.closed = false;
}
}
close() {
this.closed = true;
if (this.#db !== null) {
this.#db.close();
this.#db = null;
}
}
#checkNotClosed() {
if (this.closed) {
throw new LibsqlError("The client is closed", "CLIENT_CLOSED");
}
}
// Lazily creates the database connection and returns it
#getDb() {
if (this.#db === null) {
this.#db = new Database(this.#path, this.#options);
}
return this.#db;
}
}
export class Sqlite3Transaction {
#database;
#intMode;
/** @private */
constructor(database, intMode) {
this.#database = database;
this.#intMode = intMode;
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
this.#checkNotClosed();
return executeStmt(this.#database, stmt, this.#intMode);
}
async batch(stmts) {
const resultSets = [];
for (let i = 0; i < stmts.length; i++) {
try {
this.#checkNotClosed();
const stmt = stmts[i];
const normalizedStmt = Array.isArray(stmt)
? { sql: stmt[0], args: stmt[1] || [] }
: stmt;
resultSets.push(executeStmt(this.#database, normalizedStmt, this.#intMode));
}
catch (e) {
if (e instanceof LibsqlBatchError) {
throw e;
}
if (e instanceof LibsqlError) {
throw new LibsqlBatchError(e.message, i, e.code, e.extendedCode, e.rawCode, e.cause instanceof Error ? e.cause : undefined);
}
throw e;
}
}
return resultSets;
}
async executeMultiple(sql) {
this.#checkNotClosed();
return executeMultiple(this.#database, sql);
}
async rollback() {
if (!this.#database.open) {
return;
}
this.#checkNotClosed();
executeStmt(this.#database, "ROLLBACK", this.#intMode);
}
async commit() {
this.#checkNotClosed();
executeStmt(this.#database, "COMMIT", this.#intMode);
}
close() {
if (this.#database.inTransaction) {
executeStmt(this.#database, "ROLLBACK", this.#intMode);
}
}
get closed() {
return !this.#database.inTransaction;
}
#checkNotClosed() {
if (this.closed) {
throw new LibsqlError("The transaction is closed", "TRANSACTION_CLOSED");
}
}
}
function executeStmt(db, stmt, intMode) {
let sql;
let args;
if (typeof stmt === "string") {
sql = stmt;
args = [];
}
else {
sql = stmt.sql;
if (Array.isArray(stmt.args)) {
args = stmt.args.map((value) => valueToSql(value, intMode));
}
else {
args = {};
for (const name in stmt.args) {
const argName = name[0] === "@" || name[0] === "$" || name[0] === ":"
? name.substring(1)
: name;
args[argName] = valueToSql(stmt.args[name], intMode);
}
}
}
try {
const sqlStmt = db.prepare(sql);
sqlStmt.safeIntegers(true);
let returnsData = true;
try {
sqlStmt.raw(true);
}
catch {
// raw() throws an exception if the statement does not return data
returnsData = false;
}
if (returnsData) {
const columns = Array.from(sqlStmt.columns().map((col) => col.name));
const columnTypes = Array.from(sqlStmt.columns().map((col) => col.type ?? ""));
const rows = sqlStmt.all(args).map((sqlRow) => {
return rowFromSql(sqlRow, columns, intMode);
});
// TODO: can we get this info from better-sqlite3?
const rowsAffected = 0;
const lastInsertRowid = undefined;
return new ResultSetImpl(columns, columnTypes, rows, rowsAffected, lastInsertRowid);
}
else {
const info = sqlStmt.run(args);
const rowsAffected = info.changes;
const lastInsertRowid = BigInt(info.lastInsertRowid);
return new ResultSetImpl([], [], [], rowsAffected, lastInsertRowid);
}
}
catch (e) {
throw mapSqliteError(e);
}
}
function rowFromSql(sqlRow, columns, intMode) {
const row = {};
// make sure that the "length" property is not enumerable
Object.defineProperty(row, "length", { value: sqlRow.length });
for (let i = 0; i < sqlRow.length; ++i) {
const value = valueFromSql(sqlRow[i], intMode);
Object.defineProperty(row, i, { value });
const column = columns[i];
if (!Object.hasOwn(row, column)) {
Object.defineProperty(row, column, {
value,
enumerable: true,
configurable: true,
writable: true,
});
}
}
return row;
}
function valueFromSql(sqlValue, intMode) {
if (typeof sqlValue === "bigint") {
if (intMode === "number") {
if (sqlValue < minSafeBigint || sqlValue > maxSafeBigint) {
throw new RangeError("Received integer which cannot be safely represented as a JavaScript number");
}
return Number(sqlValue);
}
else if (intMode === "bigint") {
return sqlValue;
}
else if (intMode === "string") {
return "" + sqlValue;
}
else {
throw new Error("Invalid value for IntMode");
}
}
else if (sqlValue instanceof Buffer) {
return sqlValue.buffer;
}
return sqlValue;
}
const minSafeBigint = -9007199254740991n;
const maxSafeBigint = 9007199254740991n;
function valueToSql(value, intMode) {
if (typeof value === "number") {
if (!Number.isFinite(value)) {
throw new RangeError("Only finite numbers (not Infinity or NaN) can be passed as arguments");
}
return value;
}
else if (typeof value === "bigint") {
if (value < minInteger || value > maxInteger) {
throw new RangeError("bigint is too large to be represented as a 64-bit integer and passed as argument");
}
return value;
}
else if (typeof value === "boolean") {
switch (intMode) {
case "bigint":
return value ? 1n : 0n;
case "string":
return value ? "1" : "0";
default:
return value ? 1 : 0;
}
}
else if (value instanceof ArrayBuffer) {
return Buffer.from(value);
}
else if (value instanceof Date) {
return value.valueOf();
}
else if (value === undefined) {
throw new TypeError("undefined cannot be passed as argument to the database");
}
else {
return value;
}
}
const minInteger = -9223372036854775808n;
const maxInteger = 9223372036854775807n;
function executeMultiple(db, sql) {
try {
db.exec(sql);
}
catch (e) {
throw mapSqliteError(e);
}
}
function mapSqliteError(e) {
if (e instanceof Database.SqliteError) {
const extendedCode = e.code;
const code = mapToBaseCode(e.rawCode);
return new LibsqlError(e.message, code, extendedCode, e.rawCode, e);
}
return e;
}
// Map SQLite raw error code to base error code string.
// Extended error codes are (base | (extended << 8)), so base = rawCode & 0xFF
function mapToBaseCode(rawCode) {
if (rawCode === undefined) {
return "SQLITE_UNKNOWN";
}
const baseCode = rawCode & 0xff;
return (sqliteErrorCodes[baseCode] ?? `SQLITE_UNKNOWN_${baseCode.toString()}`);
}
const sqliteErrorCodes = {
1: "SQLITE_ERROR",
2: "SQLITE_INTERNAL",
3: "SQLITE_PERM",
4: "SQLITE_ABORT",
5: "SQLITE_BUSY",
6: "SQLITE_LOCKED",
7: "SQLITE_NOMEM",
8: "SQLITE_READONLY",
9: "SQLITE_INTERRUPT",
10: "SQLITE_IOERR",
11: "SQLITE_CORRUPT",
12: "SQLITE_NOTFOUND",
13: "SQLITE_FULL",
14: "SQLITE_CANTOPEN",
15: "SQLITE_PROTOCOL",
16: "SQLITE_EMPTY",
17: "SQLITE_SCHEMA",
18: "SQLITE_TOOBIG",
19: "SQLITE_CONSTRAINT",
20: "SQLITE_MISMATCH",
21: "SQLITE_MISUSE",
22: "SQLITE_NOLFS",
23: "SQLITE_AUTH",
24: "SQLITE_FORMAT",
25: "SQLITE_RANGE",
26: "SQLITE_NOTADB",
27: "SQLITE_NOTICE",
28: "SQLITE_WARNING",
};

View File

@@ -0,0 +1,6 @@
import type { Config, Client } from "@libsql/core/api";
import type { ExpandedConfig } from "@libsql/core/config";
export * from "@libsql/core/api";
export declare function createClient(config: Config): Client;
/** @private */
export declare function _createClient(config: ExpandedConfig): Client;

View File

@@ -0,0 +1,22 @@
import { LibsqlError } from "@libsql/core/api";
import { expandConfig } from "@libsql/core/config";
import { supportedUrlLink } from "@libsql/core/util";
import { _createClient as _createWsClient } from "./ws.js";
import { _createClient as _createHttpClient } from "./http.js";
export * from "@libsql/core/api";
export function createClient(config) {
return _createClient(expandConfig(config, true));
}
/** @private */
export function _createClient(config) {
if (config.scheme === "ws" || config.scheme === "wss") {
return _createWsClient(config);
}
else if (config.scheme === "http" || config.scheme === "https") {
return _createHttpClient(config);
}
else {
throw new LibsqlError('The client that uses Web standard APIs supports only "libsql:", "wss:", "ws:", "https:" and "http:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
}

View File

@@ -0,0 +1,49 @@
import * as hrana from "@libsql/hrana-client";
import type { Config, IntMode, Client, Transaction, ResultSet, InStatement, InArgs, Replicated } from "@libsql/core/api";
import { TransactionMode } from "@libsql/core/api";
import type { ExpandedConfig } from "@libsql/core/config";
import { HranaTransaction } from "./hrana.js";
import { SqlCache } from "./sql_cache.js";
export * from "@libsql/core/api";
export declare function createClient(config: Config): WsClient;
/** @private */
export declare function _createClient(config: ExpandedConfig): WsClient;
interface ConnState {
client: hrana.WsClient;
useSqlCache: boolean | undefined;
sqlCache: SqlCache;
openTime: Date;
streamStates: Set<StreamState>;
}
interface StreamState {
conn: ConnState;
stream: hrana.WsStream;
}
export declare class WsClient implements Client {
#private;
closed: boolean;
protocol: "ws";
/** @private */
constructor(client: hrana.WsClient, url: URL, authToken: string | undefined, intMode: IntMode, concurrency: number | undefined);
private limit;
execute(stmtOrSql: InStatement | string, args?: InArgs): Promise<ResultSet>;
batch(stmts: Array<InStatement | [string, InArgs?]>, mode?: TransactionMode): Promise<Array<ResultSet>>;
migrate(stmts: Array<InStatement>): Promise<Array<ResultSet>>;
transaction(mode?: TransactionMode): Promise<WsTransaction>;
executeMultiple(sql: string): Promise<void>;
sync(): Promise<Replicated>;
reconnect(): Promise<void>;
_closeStream(streamState: StreamState): void;
close(): void;
}
export declare class WsTransaction extends HranaTransaction implements Transaction {
#private;
/** @private */
constructor(client: WsClient, state: StreamState, mode: TransactionMode, version: hrana.ProtocolVersion);
/** @private */
_getStream(): hrana.Stream;
/** @private */
_getSqlCache(): SqlCache;
close(): void;
get closed(): boolean;
}

View File

@@ -0,0 +1,359 @@
import * as hrana from "@libsql/hrana-client";
import { LibsqlError } from "@libsql/core/api";
import { expandConfig } from "@libsql/core/config";
import { HranaTransaction, executeHranaBatch, stmtToHrana, resultSetFromHrana, mapHranaError, } from "./hrana.js";
import { SqlCache } from "./sql_cache.js";
import { encodeBaseUrl } from "@libsql/core/uri";
import { supportedUrlLink } from "@libsql/core/util";
import promiseLimit from "promise-limit";
export * from "@libsql/core/api";
export function createClient(config) {
return _createClient(expandConfig(config, false));
}
/** @private */
export function _createClient(config) {
if (config.scheme !== "wss" && config.scheme !== "ws") {
throw new LibsqlError('The WebSocket client supports only "libsql:", "wss:" and "ws:" URLs, ' +
`got ${JSON.stringify(config.scheme + ":")}. For more information, please read ${supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
if (config.encryptionKey !== undefined) {
throw new LibsqlError("Encryption key is not supported by the remote client.", "ENCRYPTION_KEY_NOT_SUPPORTED");
}
if (config.scheme === "ws" && config.tls) {
throw new LibsqlError(`A "ws:" URL cannot opt into TLS by using ?tls=1`, "URL_INVALID");
}
else if (config.scheme === "wss" && !config.tls) {
throw new LibsqlError(`A "wss:" URL cannot opt out of TLS by using ?tls=0`, "URL_INVALID");
}
const url = encodeBaseUrl(config.scheme, config.authority, config.path);
let client;
try {
client = hrana.openWs(url, config.authToken);
}
catch (e) {
if (e instanceof hrana.WebSocketUnsupportedError) {
const suggestedScheme = config.scheme === "wss" ? "https" : "http";
const suggestedUrl = encodeBaseUrl(suggestedScheme, config.authority, config.path);
throw new LibsqlError("This environment does not support WebSockets, please switch to the HTTP client by using " +
`a "${suggestedScheme}:" URL (${JSON.stringify(suggestedUrl)}). ` +
`For more information, please read ${supportedUrlLink}`, "WEBSOCKETS_NOT_SUPPORTED");
}
throw mapHranaError(e);
}
return new WsClient(client, url, config.authToken, config.intMode, config.concurrency);
}
const maxConnAgeMillis = 60 * 1000;
const sqlCacheCapacity = 100;
export class WsClient {
#url;
#authToken;
#intMode;
// State of the current connection. The `hrana.WsClient` inside may be closed at any moment due to an
// asynchronous error.
#connState;
// If defined, this is a connection that will be used in the future, once it is ready.
#futureConnState;
closed;
protocol;
#isSchemaDatabase;
#promiseLimitFunction;
/** @private */
constructor(client, url, authToken, intMode, concurrency) {
this.#url = url;
this.#authToken = authToken;
this.#intMode = intMode;
this.#connState = this.#openConn(client);
this.#futureConnState = undefined;
this.closed = false;
this.protocol = "ws";
this.#promiseLimitFunction = promiseLimit(concurrency);
}
async limit(fn) {
return this.#promiseLimitFunction(fn);
}
async execute(stmtOrSql, args) {
let stmt;
if (typeof stmtOrSql === "string") {
stmt = {
sql: stmtOrSql,
args: args || [],
};
}
else {
stmt = stmtOrSql;
}
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const hranaStmt = stmtToHrana(stmt);
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
streamState.conn.sqlCache.apply([hranaStmt]);
const hranaRowsPromise = streamState.stream.query(hranaStmt);
streamState.stream.closeGracefully();
const hranaRowsResult = await hranaRowsPromise;
return resultSetFromHrana(hranaRowsResult);
}
catch (e) {
throw mapHranaError(e);
}
finally {
this._closeStream(streamState);
}
});
}
async batch(stmts, mode = "deferred") {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const normalizedStmts = stmts.map((stmt) => {
if (Array.isArray(stmt)) {
return {
sql: stmt[0],
args: stmt[1] || [],
};
}
return stmt;
});
const hranaStmts = normalizedStmts.map(stmtToHrana);
const version = await streamState.conn.client.getVersion();
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
streamState.conn.sqlCache.apply(hranaStmts);
const batch = streamState.stream.batch(version >= 3);
const resultsPromise = executeHranaBatch(mode, version, batch, hranaStmts);
const results = await resultsPromise;
return results;
}
catch (e) {
throw mapHranaError(e);
}
finally {
this._closeStream(streamState);
}
});
}
async migrate(stmts) {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const hranaStmts = stmts.map(stmtToHrana);
const version = await streamState.conn.client.getVersion();
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
const batch = streamState.stream.batch(version >= 3);
const resultsPromise = executeHranaBatch("deferred", version, batch, hranaStmts, true);
const results = await resultsPromise;
return results;
}
catch (e) {
throw mapHranaError(e);
}
finally {
this._closeStream(streamState);
}
});
}
async transaction(mode = "write") {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
const version = await streamState.conn.client.getVersion();
// the BEGIN statement will be batched with the first statement on the transaction to save a
// network roundtrip
return new WsTransaction(this, streamState, mode, version);
}
catch (e) {
this._closeStream(streamState);
throw mapHranaError(e);
}
});
}
async executeMultiple(sql) {
return this.limit(async () => {
const streamState = await this.#openStream();
try {
// Schedule all operations synchronously, so they will be pipelined and executed in a single
// network roundtrip.
const promise = streamState.stream.sequence(sql);
streamState.stream.closeGracefully();
await promise;
}
catch (e) {
throw mapHranaError(e);
}
finally {
this._closeStream(streamState);
}
});
}
sync() {
throw new LibsqlError("sync not supported in ws mode", "SYNC_NOT_SUPPORTED");
}
async #openStream() {
if (this.closed) {
throw new LibsqlError("The client is closed", "CLIENT_CLOSED");
}
const now = new Date();
const ageMillis = now.valueOf() - this.#connState.openTime.valueOf();
if (ageMillis > maxConnAgeMillis &&
this.#futureConnState === undefined) {
// The existing connection is too old, let's open a new one.
const futureConnState = this.#openConn();
this.#futureConnState = futureConnState;
// However, if we used `futureConnState` immediately, we would introduce additional latency,
// because we would have to wait for the WebSocket handshake to complete, even though we may a
// have perfectly good existing connection in `this.#connState`!
//
// So we wait until the `hrana.Client.getVersion()` operation completes (which happens when the
// WebSocket hanshake completes), and only then we replace `this.#connState` with
// `futureConnState`, which is stored in `this.#futureConnState` in the meantime.
futureConnState.client.getVersion().then((_version) => {
if (this.#connState !== futureConnState) {
// We need to close `this.#connState` before we replace it. However, it is possible
// that `this.#connState` has already been replaced: see the code below.
if (this.#connState.streamStates.size === 0) {
this.#connState.client.close();
}
else {
// If there are existing streams on the connection, we must not close it, because
// these streams would be broken. The last stream to be closed will also close the
// connection in `_closeStream()`.
}
}
this.#connState = futureConnState;
this.#futureConnState = undefined;
}, (_e) => {
// If the new connection could not be established, let's just ignore the error and keep
// using the existing connection.
this.#futureConnState = undefined;
});
}
if (this.#connState.client.closed) {
// An error happened on this connection and it has been closed. Let's try to seamlessly reconnect.
try {
if (this.#futureConnState !== undefined) {
// We are already in the process of opening a new connection, so let's just use it
// immediately.
this.#connState = this.#futureConnState;
}
else {
this.#connState = this.#openConn();
}
}
catch (e) {
throw mapHranaError(e);
}
}
const connState = this.#connState;
try {
// Now we wait for the WebSocket handshake to complete (if it hasn't completed yet). Note that
// this does not increase latency, because any messages that we would send on the WebSocket before
// the handshake would be queued until the handshake is completed anyway.
if (connState.useSqlCache === undefined) {
connState.useSqlCache =
(await connState.client.getVersion()) >= 2;
if (connState.useSqlCache) {
connState.sqlCache.capacity = sqlCacheCapacity;
}
}
const stream = connState.client.openStream();
stream.intMode = this.#intMode;
const streamState = { conn: connState, stream };
connState.streamStates.add(streamState);
return streamState;
}
catch (e) {
throw mapHranaError(e);
}
}
#openConn(client) {
try {
client ??= hrana.openWs(this.#url, this.#authToken);
return {
client,
useSqlCache: undefined,
sqlCache: new SqlCache(client, 0),
openTime: new Date(),
streamStates: new Set(),
};
}
catch (e) {
throw mapHranaError(e);
}
}
async reconnect() {
try {
for (const st of Array.from(this.#connState.streamStates)) {
try {
st.stream.close();
}
catch { }
}
this.#connState.client.close();
}
catch { }
if (this.#futureConnState) {
try {
this.#futureConnState.client.close();
}
catch { }
this.#futureConnState = undefined;
}
const next = this.#openConn();
const version = await next.client.getVersion();
next.useSqlCache = version >= 2;
if (next.useSqlCache) {
next.sqlCache.capacity = sqlCacheCapacity;
}
this.#connState = next;
this.closed = false;
}
_closeStream(streamState) {
streamState.stream.close();
const connState = streamState.conn;
connState.streamStates.delete(streamState);
if (connState.streamStates.size === 0 &&
connState !== this.#connState) {
// We are not using this connection anymore and this is the last stream that was using it, so we
// must close it now.
connState.client.close();
}
}
close() {
this.#connState.client.close();
this.closed = true;
if (this.#futureConnState) {
try {
this.#futureConnState.client.close();
}
catch { }
this.#futureConnState = undefined;
}
this.closed = true;
}
}
export class WsTransaction extends HranaTransaction {
#client;
#streamState;
/** @private */
constructor(client, state, mode, version) {
super(mode, version);
this.#client = client;
this.#streamState = state;
}
/** @private */
_getStream() {
return this.#streamState.stream;
}
/** @private */
_getSqlCache() {
return this.#streamState.conn.sqlCache;
}
close() {
this.#client._closeStream(this.#streamState);
}
get closed() {
return this.#streamState.stream.closed;
}
}

View File

@@ -0,0 +1,123 @@
{
"name": "@libsql/client",
"version": "0.17.2",
"keywords": [
"libsql",
"database",
"sqlite",
"serverless",
"vercel",
"netlify",
"lambda"
],
"description": "libSQL driver for TypeScript and JavaScript",
"repository": {
"type": "git",
"url": "git+https://github.com/tursodatabase/libsql-client-ts",
"directory": "packages/libsql-client"
},
"authors": [
"Jan Špaček <honza@chiselstrike.com>",
"Pekka Enberg <penberg@chiselstrike.com>",
"Jan Plhak <jp@chiselstrike.com>"
],
"license": "MIT",
"type": "module",
"main": "lib-cjs/node.js",
"types": "lib-esm/node.d.ts",
"exports": {
".": {
"types": "./lib-esm/node.d.ts",
"import": {
"workerd": "./lib-esm/web.js",
"deno": "./lib-esm/node.js",
"edge-light": "./lib-esm/web.js",
"netlify": "./lib-esm/web.js",
"node": "./lib-esm/node.js",
"browser": "./lib-esm/web.js",
"default": "./lib-esm/node.js"
},
"require": "./lib-cjs/node.js"
},
"./node": {
"types": "./lib-esm/node.d.ts",
"import": "./lib-esm/node.js",
"require": "./lib-cjs/node.js"
},
"./http": {
"types": "./lib-esm/http.d.ts",
"import": "./lib-esm/http.js",
"require": "./lib-cjs/http.js"
},
"./ws": {
"types": "./lib-esm/ws.d.ts",
"import": "./lib-esm/ws.js",
"require": "./lib-cjs/ws.js"
},
"./sqlite3": {
"types": "./lib-esm/sqlite3.d.ts",
"import": "./lib-esm/sqlite3.js",
"require": "./lib-cjs/sqlite3.js"
},
"./web": {
"types": "./lib-esm/web.d.ts",
"import": "./lib-esm/web.js",
"require": "./lib-cjs/web.js"
}
},
"typesVersions": {
"*": {
".": [
"./lib-esm/node.d.ts"
],
"http": [
"./lib-esm/http.d.ts"
],
"hrana": [
"./lib-esm/hrana.d.ts"
],
"sqlite3": [
"./lib-esm/sqlite3.d.ts"
],
"web": [
"./lib-esm/web.d.ts"
]
}
},
"files": [
"lib-cjs/**",
"lib-esm/**",
"README.md"
],
"scripts": {
"prepublishOnly": "npm run build",
"prebuild": "rm -rf ./lib-cjs ./lib-esm",
"build": "npm run build:cjs && npm run build:esm",
"build:cjs": "tsc -p tsconfig.build-cjs.json",
"build:esm": "tsc -p tsconfig.build-esm.json",
"format:check": "prettier --check .",
"postbuild": "cp package-cjs.json ./lib-cjs/package.json",
"test": "jest --runInBand",
"typecheck": "tsc --noEmit",
"typedoc": "rm -rf ./docs && typedoc",
"lint-staged": "lint-staged"
},
"dependencies": {
"@libsql/core": "^0.17.2",
"@libsql/hrana-client": "^0.9.0",
"js-base64": "^3.7.5",
"libsql": "^0.5.28",
"promise-limit": "^2.7.0"
},
"devDependencies": {
"@types/jest": "^29.2.5",
"@types/node": "^18.15.5",
"jest": "^29.3.1",
"lint-staged": "^15.2.2",
"msw": "^2.3.0",
"prettier": "3.2.5",
"ts-jest": "^29.0.5",
"typedoc": "^0.23.28",
"typescript": "^4.9.4"
}
}

View File

@@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.LibsqlBatchError = exports.LibsqlError = void 0;
/** Error thrown by the client. */
class LibsqlError extends Error {
/** Machine-readable error code. */
code;
/** Extended error code with more specific information (e.g., SQLITE_CONSTRAINT_PRIMARYKEY). */
extendedCode;
/** Raw numeric error code */
rawCode;
constructor(message, code, extendedCode, rawCode, cause) {
if (code !== undefined) {
message = `${code}: ${message}`;
}
super(message, { cause });
this.code = code;
this.extendedCode = extendedCode;
this.rawCode = rawCode;
this.name = "LibsqlError";
}
}
exports.LibsqlError = LibsqlError;
/** Error thrown by the client during batch operations. */
class LibsqlBatchError extends LibsqlError {
/** The zero-based index of the statement that failed in the batch. */
statementIndex;
constructor(message, statementIndex, code, extendedCode, rawCode, cause) {
super(message, code, extendedCode, rawCode, cause);
this.statementIndex = statementIndex;
this.name = "LibsqlBatchError";
}
}
exports.LibsqlBatchError = LibsqlBatchError;

View File

@@ -0,0 +1,143 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.expandConfig = exports.isInMemoryConfig = void 0;
const api_js_1 = require("./api.js");
const uri_js_1 = require("./uri.js");
const util_js_1 = require("./util.js");
const inMemoryMode = ":memory:";
function isInMemoryConfig(config) {
return (config.scheme === "file" &&
(config.path === ":memory:" || config.path.startsWith(":memory:?")));
}
exports.isInMemoryConfig = isInMemoryConfig;
function expandConfig(config, preferHttp) {
if (typeof config !== "object") {
// produce a reasonable error message in the common case where users type
// `createClient("libsql://...")` instead of `createClient({url: "libsql://..."})`
throw new TypeError(`Expected client configuration as object, got ${typeof config}`);
}
let { url, authToken, tls, intMode, concurrency } = config;
// fill simple defaults right here
concurrency = Math.max(0, concurrency || 20);
intMode ??= "number";
let connectionQueryParams = []; // recognized query parameters which we sanitize through white list of valid key-value pairs
// convert plain :memory: url to URI format to make logic more uniform
if (url === inMemoryMode) {
url = "file::memory:";
}
// parse url parameters first and override config with update values
const uri = (0, uri_js_1.parseUri)(url);
const originalUriScheme = uri.scheme.toLowerCase();
const isInMemoryMode = originalUriScheme === "file" &&
uri.path === inMemoryMode &&
uri.authority === undefined;
let queryParamsDef;
if (isInMemoryMode) {
queryParamsDef = {
cache: {
values: ["shared", "private"],
update: (key, value) => connectionQueryParams.push(`${key}=${value}`),
},
};
}
else {
queryParamsDef = {
tls: {
values: ["0", "1"],
update: (_, value) => (tls = value === "1"),
},
authToken: {
update: (_, value) => (authToken = value),
},
};
}
for (const { key, value } of uri.query?.pairs ?? []) {
if (!Object.hasOwn(queryParamsDef, key)) {
throw new api_js_1.LibsqlError(`Unsupported URL query parameter ${JSON.stringify(key)}`, "URL_PARAM_NOT_SUPPORTED");
}
const queryParamDef = queryParamsDef[key];
if (queryParamDef.values !== undefined &&
!queryParamDef.values.includes(value)) {
throw new api_js_1.LibsqlError(`Unknown value for the "${key}" query argument: ${JSON.stringify(value)}. Supported values are: [${queryParamDef.values.map((x) => '"' + x + '"').join(", ")}]`, "URL_INVALID");
}
if (queryParamDef.update !== undefined) {
queryParamDef?.update(key, value);
}
}
// fill complex defaults & validate config
const connectionQueryParamsString = connectionQueryParams.length === 0
? ""
: `?${connectionQueryParams.join("&")}`;
const path = uri.path + connectionQueryParamsString;
let scheme;
if (originalUriScheme === "libsql") {
if (tls === false) {
if (uri.authority?.port === undefined) {
throw new api_js_1.LibsqlError('A "libsql:" URL with ?tls=0 must specify an explicit port', "URL_INVALID");
}
scheme = preferHttp ? "http" : "ws";
}
else {
scheme = preferHttp ? "https" : "wss";
}
}
else {
scheme = originalUriScheme;
}
if (scheme === "http" || scheme === "ws") {
tls ??= false;
}
else {
tls ??= true;
}
if (scheme !== "http" &&
scheme !== "ws" &&
scheme !== "https" &&
scheme !== "wss" &&
scheme !== "file") {
throw new api_js_1.LibsqlError('The client supports only "libsql:", "wss:", "ws:", "https:", "http:" and "file:" URLs, ' +
`got ${JSON.stringify(uri.scheme + ":")}. ` +
`For more information, please read ${util_js_1.supportedUrlLink}`, "URL_SCHEME_NOT_SUPPORTED");
}
if (intMode !== "number" && intMode !== "bigint" && intMode !== "string") {
throw new TypeError(`Invalid value for intMode, expected "number", "bigint" or "string", got ${JSON.stringify(intMode)}`);
}
if (uri.fragment !== undefined) {
throw new api_js_1.LibsqlError(`URL fragments are not supported: ${JSON.stringify("#" + uri.fragment)}`, "URL_INVALID");
}
if (isInMemoryMode) {
return {
scheme: "file",
tls: false,
path,
intMode,
concurrency,
syncUrl: config.syncUrl,
syncInterval: config.syncInterval,
readYourWrites: config.readYourWrites,
offline: config.offline,
fetch: config.fetch,
authToken: undefined,
encryptionKey: undefined,
remoteEncryptionKey: undefined,
authority: undefined,
};
}
return {
scheme,
tls,
authority: uri.authority,
path,
authToken,
intMode,
concurrency,
encryptionKey: config.encryptionKey,
remoteEncryptionKey: config.remoteEncryptionKey,
syncUrl: config.syncUrl,
syncInterval: config.syncInterval,
readYourWrites: config.readYourWrites,
offline: config.offline,
fetch: config.fetch,
};
}
exports.expandConfig = expandConfig;

View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

View File

@@ -0,0 +1,125 @@
"use strict";
// URI parser based on RFC 3986
// We can't use the standard `URL` object, because we want to support relative `file:` URLs like
// `file:relative/path/database.db`, which are not correct according to RFC 8089, which standardizes the
// `file` scheme.
Object.defineProperty(exports, "__esModule", { value: true });
exports.encodeBaseUrl = exports.parseUri = void 0;
const api_js_1 = require("./api.js");
function parseUri(text) {
const match = URI_RE.exec(text);
if (match === null) {
throw new api_js_1.LibsqlError(`The URL '${text}' is not in a valid format`, "URL_INVALID");
}
const groups = match.groups;
const scheme = groups["scheme"];
const authority = groups["authority"] !== undefined
? parseAuthority(groups["authority"])
: undefined;
const path = percentDecode(groups["path"]);
const query = groups["query"] !== undefined ? parseQuery(groups["query"]) : undefined;
const fragment = groups["fragment"] !== undefined
? percentDecode(groups["fragment"])
: undefined;
return { scheme, authority, path, query, fragment };
}
exports.parseUri = parseUri;
const URI_RE = (() => {
const SCHEME = "(?<scheme>[A-Za-z][A-Za-z.+-]*)";
const AUTHORITY = "(?<authority>[^/?#]*)";
const PATH = "(?<path>[^?#]*)";
const QUERY = "(?<query>[^#]*)";
const FRAGMENT = "(?<fragment>.*)";
return new RegExp(`^${SCHEME}:(//${AUTHORITY})?${PATH}(\\?${QUERY})?(#${FRAGMENT})?$`, "su");
})();
function parseAuthority(text) {
const match = AUTHORITY_RE.exec(text);
if (match === null) {
throw new api_js_1.LibsqlError("The authority part of the URL is not in a valid format", "URL_INVALID");
}
const groups = match.groups;
const host = percentDecode(groups["host_br"] ?? groups["host"]);
const port = groups["port"] ? parseInt(groups["port"], 10) : undefined;
const userinfo = groups["username"] !== undefined
? {
username: percentDecode(groups["username"]),
password: groups["password"] !== undefined
? percentDecode(groups["password"])
: undefined,
}
: undefined;
return { host, port, userinfo };
}
const AUTHORITY_RE = (() => {
return new RegExp(`^((?<username>[^:]*)(:(?<password>.*))?@)?((?<host>[^:\\[\\]]*)|(\\[(?<host_br>[^\\[\\]]*)\\]))(:(?<port>[0-9]*))?$`, "su");
})();
// Query string is parsed as application/x-www-form-urlencoded according to the Web URL standard:
// https://url.spec.whatwg.org/#urlencoded-parsing
function parseQuery(text) {
const sequences = text.split("&");
const pairs = [];
for (const sequence of sequences) {
if (sequence === "") {
continue;
}
let key;
let value;
const splitIdx = sequence.indexOf("=");
if (splitIdx < 0) {
key = sequence;
value = "";
}
else {
key = sequence.substring(0, splitIdx);
value = sequence.substring(splitIdx + 1);
}
pairs.push({
key: percentDecode(key.replaceAll("+", " ")),
value: percentDecode(value.replaceAll("+", " ")),
});
}
return { pairs };
}
function percentDecode(text) {
try {
return decodeURIComponent(text);
}
catch (e) {
if (e instanceof URIError) {
throw new api_js_1.LibsqlError(`URL component has invalid percent encoding: ${e}`, "URL_INVALID", undefined, undefined, e);
}
throw e;
}
}
function encodeBaseUrl(scheme, authority, path) {
if (authority === undefined) {
throw new api_js_1.LibsqlError(`URL with scheme ${JSON.stringify(scheme + ":")} requires authority (the "//" part)`, "URL_INVALID");
}
const schemeText = `${scheme}:`;
const hostText = encodeHost(authority.host);
const portText = encodePort(authority.port);
const userinfoText = encodeUserinfo(authority.userinfo);
const authorityText = `//${userinfoText}${hostText}${portText}`;
let pathText = path.split("/").map(encodeURIComponent).join("/");
if (pathText !== "" && !pathText.startsWith("/")) {
pathText = "/" + pathText;
}
return new URL(`${schemeText}${authorityText}${pathText}`);
}
exports.encodeBaseUrl = encodeBaseUrl;
function encodeHost(host) {
return host.includes(":") ? `[${encodeURI(host)}]` : encodeURI(host);
}
function encodePort(port) {
return port !== undefined ? `:${port}` : "";
}
function encodeUserinfo(userinfo) {
if (userinfo === undefined) {
return "";
}
const usernameText = encodeURIComponent(userinfo.username);
const passwordText = userinfo.password !== undefined
? `:${encodeURIComponent(userinfo.password)}`
: "";
return `${usernameText}${passwordText}@`;
}

Some files were not shown because too many files have changed in this diff Show More