Initial commit: New MoreminiMore website with fresh design

This commit is contained in:
MoreminiMore
2026-04-22 01:59:05 +07:00
commit 76409638cc
14010 changed files with 2052041 additions and 0 deletions

53
node_modules/@astrojs/compiler/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,53 @@
MIT License
Copyright (c) 2021 [Astro contributors](https://github.com/withastro/compiler/graphs/contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
This license applies to parts of the `internal/` subdirectory originating from
the https://cs.opensource.google/go/x/net/+/master:html/ repository:
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

72
node_modules/@astrojs/compiler/README.md generated vendored Normal file
View File

@@ -0,0 +1,72 @@
# Astro Compiler
Astros [Go](https://golang.org/) + WASM compiler.
## Install
```
npm install @astrojs/compiler
```
## Usage
#### Transform `.astro` to valid TypeScript
The Astro compiler can convert `.astro` syntax to a TypeScript Module whose default export generates HTML.
**Some notes**...
- TypeScript is valid `.astro` syntax! The output code may need an additional post-processing step to generate valid JavaScript.
- `.astro` files rely on a server implementation exposed as `astro/runtime/server/index.js` in the Node ecosystem. Other runtimes currently need to bring their own rendering implementation and reference it via `internalURL`. This is a pain point we're looking into fixing.
```js
import { transform, type TransformResult } from "@astrojs/compiler";
const result = await transform(source, {
filename: "/Users/astro/Code/project/src/pages/index.astro",
sourcemap: "both",
internalURL: "astro/runtime/server/index.js",
});
```
#### Parse `.astro` and return an AST
The Astro compiler can emit an AST using the `parse` method.
**Some notes**...
- Position data is currently incomplete and in some cases incorrect. We're working on it!
- A `TextNode` can represent both HTML `text` and JavaScript/TypeScript source code.
- The `@astrojs/compiler/utils` entrypoint exposes `walk` and `walkAsync` functions that can be used to traverse the AST. It also exposes the `is` helper which can be used as guards to derive the proper types for each `node`.
```js
import { parse } from "@astrojs/compiler";
import { walk, walkAsync, is } from "@astrojs/compiler/utils";
const result = await parse(source, {
position: false, // defaults to `true`
});
walk(result.ast, (node) => {
// `tag` nodes are `element` | `custom-element` | `component`
if (is.tag(node)) {
console.log(node.name);
}
});
await walkAsync(result.ast, async (node) => {
if (is.tag(node)) {
node.value = await expensiveCalculation(node)
}
});
```
## Develop
### VSCode / CodeSpaces
A `devcontainer` configuration is available for use with VSCode's [Remote Development extension pack](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack) and GitHub CodeSpaces.
## Contributing
[CONTRIBUTING.md](/CONTRIBUTING.md)

BIN
node_modules/@astrojs/compiler/dist/astro.wasm generated vendored Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

11
node_modules/@astrojs/compiler/dist/browser/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
import { transform as transform$1, parse as parse$1, convertToTSX as convertToTSX$1, teardown as teardown$1, initialize as initialize$1 } from '../shared/types.js';
import '../shared/ast.js';
import '../shared/diagnostics.js';
declare const transform: typeof transform$1;
declare const parse: typeof parse$1;
declare const convertToTSX: typeof convertToTSX$1;
declare const teardown: typeof teardown$1;
declare const initialize: typeof initialize$1;
export { convertToTSX, initialize, parse, teardown, transform };

1
node_modules/@astrojs/compiler/dist/browser/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
import{a as f}from"../chunk-QR6QDSEV.js";var u=(t,e)=>p().transform(t,e),S=(t,e)=>p().parse(t,e),v=(t,e)=>p().convertToTSX(t,e),a,i,h=()=>{a=void 0,i=void 0,globalThis["@astrojs/compiler"]=void 0},T=async t=>{let e=t.wasmURL;if(!e)throw new Error('Must provide the "wasmURL" option');e+="",a||(a=m(e).catch(n=>{throw a=void 0,n})),i=i||await a},p=()=>{if(!a)throw new Error('You need to call "initialize" before calling this');if(!i)throw new Error('You need to wait for the promise returned from "initialize" to be resolved before calling this');return i},y=async(t,e)=>{let n;return WebAssembly.instantiateStreaming?n=await WebAssembly.instantiateStreaming(fetch(t),e):n=await(async()=>{let s=await fetch(t).then(o=>o.arrayBuffer());return WebAssembly.instantiate(s,e)})(),n},m=async t=>{let e=new f,n=await y(t,e.importObject);e.run(n.instance);let c=globalThis["@astrojs/compiler"];return{transform:(s,o)=>new Promise(r=>r(c.transform(s,o||{}))),convertToTSX:(s,o)=>new Promise(r=>r(c.convertToTSX(s,o||{}))).then(r=>({...r,map:JSON.parse(r.map)})),parse:(s,o)=>new Promise(r=>r(c.parse(s,o||{}))).then(r=>({...r,ast:JSON.parse(r.ast)}))}};export{v as convertToTSX,T as initialize,S as parse,h as teardown,u as transform};

View File

@@ -0,0 +1,3 @@
"use strict";var c=Object.defineProperty;var d=Object.getOwnPropertyDescriptor;var p=Object.getOwnPropertyNames;var N=Object.prototype.hasOwnProperty;var u=(o,e)=>{for(var t in e)c(o,t,{get:e[t],enumerable:!0})},f=(o,e,t,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of p(e))!N.call(o,r)&&r!==t&&c(o,r,{get:()=>e[r],enumerable:!(a=d(e,r))||a.enumerable});return o};var y=o=>f(c({},"__esModule",{value:!0}),o);var v={};u(v,{is:()=>s,serialize:()=>k,walk:()=>h,walkAsync:()=>x});module.exports=y(v);function n(o){return e=>e.type===o}var s={parent(o){return Array.isArray(o.children)},literal(o){return typeof o.value=="string"},tag(o){return o.type==="element"||o.type==="custom-element"||o.type==="component"||o.type==="fragment"},whitespace(o){return o.type==="text"&&o.value.trim().length===0},root:n("root"),element:n("element"),customElement:n("custom-element"),component:n("component"),fragment:n("fragment"),expression:n("expression"),text:n("text"),doctype:n("doctype"),comment:n("comment"),frontmatter:n("frontmatter")},l=class{constructor(e){this.callback=e}async visit(e,t,a){if(await this.callback(e,t,a),s.parent(e)){let r=[];for(let i=0;i<e.children.length;i++){let m=e.children[i];r.push(this.callback(m,e,i))}await Promise.all(r)}}};function h(o,e){new l(e).visit(o)}function x(o,e){return new l(e).visit(o)}function g(o){let e="";for(let t of o.attributes)switch(e+=" ",t.kind){case"empty":{e+=`${t.name}`;break}case"expression":{e+=`${t.name}={${t.value}}`;break}case"quoted":{e+=`${t.name}=${t.raw}`;break}case"template-literal":{e+=`${t.name}=\`${t.value}\``;break}case"shorthand":{e+=`{${t.name}}`;break}case"spread":{e+=`{...${t.value}}`;break}}return e}function k(o,e={selfClose:!0}){let t="";function a(r){if(s.root(r))for(let i of r.children)a(i);else if(s.frontmatter(r))t+=`---${r.value}---
`;else if(s.comment(r))t+=`<!--${r.value}-->`;else if(s.expression(r)){t+="{";for(let i of r.children)a(i);t+="}"}else if(s.literal(r))t+=r.value;else if(s.tag(r))if(t+=`<${r.name}`,t+=g(r),r.children.length===0&&e.selfClose)t+=" />";else{t+=">";for(let i of r.children)a(i);t+=`</${r.name}>`}}return a(o),t}0&&(module.exports={is,serialize,walk,walkAsync});

29
node_modules/@astrojs/compiler/dist/browser/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
import { Node, ParentNode, LiteralNode, TagLikeNode, TextNode, RootNode, ElementNode, CustomElementNode, ComponentNode, FragmentNode, ExpressionNode, DoctypeNode, CommentNode, FrontmatterNode } from '../shared/ast.js';
type Visitor = (node: Node, parent?: ParentNode, index?: number) => void | Promise<void>;
declare const is: {
parent(node: Node): node is ParentNode;
literal(node: Node): node is LiteralNode;
tag(node: Node): node is TagLikeNode;
whitespace(node: Node): node is TextNode;
root: (node: Node) => node is RootNode;
element: (node: Node) => node is ElementNode;
customElement: (node: Node) => node is CustomElementNode;
component: (node: Node) => node is ComponentNode;
fragment: (node: Node) => node is FragmentNode;
expression: (node: Node) => node is ExpressionNode;
text: (node: Node) => node is TextNode;
doctype: (node: Node) => node is DoctypeNode;
comment: (node: Node) => node is CommentNode;
frontmatter: (node: Node) => node is FrontmatterNode;
};
declare function walk(node: ParentNode, callback: Visitor): void;
declare function walkAsync(node: ParentNode, callback: Visitor): Promise<void>;
interface SerializeOptions {
selfClose: boolean;
}
/** @deprecated Please use `SerializeOptions` */
type SerializeOtions = SerializeOptions;
declare function serialize(root: Node, opts?: SerializeOptions): string;
export { SerializeOptions, SerializeOtions, Visitor, is, serialize, walk, walkAsync };

3
node_modules/@astrojs/compiler/dist/browser/utils.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
function n(o){return t=>t.type===o}var a={parent(o){return Array.isArray(o.children)},literal(o){return typeof o.value=="string"},tag(o){return o.type==="element"||o.type==="custom-element"||o.type==="component"||o.type==="fragment"},whitespace(o){return o.type==="text"&&o.value.trim().length===0},root:n("root"),element:n("element"),customElement:n("custom-element"),component:n("component"),fragment:n("fragment"),expression:n("expression"),text:n("text"),doctype:n("doctype"),comment:n("comment"),frontmatter:n("frontmatter")},l=class{constructor(t){this.callback=t}async visit(t,e,s){if(await this.callback(t,e,s),a.parent(t)){let r=[];for(let i=0;i<t.children.length;i++){let c=t.children[i];r.push(this.callback(c,t,i))}await Promise.all(r)}}};function N(o,t){new l(t).visit(o)}function u(o,t){return new l(t).visit(o)}function m(o){let t="";for(let e of o.attributes)switch(t+=" ",e.kind){case"empty":{t+=`${e.name}`;break}case"expression":{t+=`${e.name}={${e.value}}`;break}case"quoted":{t+=`${e.name}=${e.raw}`;break}case"template-literal":{t+=`${e.name}=\`${e.value}\``;break}case"shorthand":{t+=`{${e.name}}`;break}case"spread":{t+=`{...${e.value}}`;break}}return t}function f(o,t={selfClose:!0}){let e="";function s(r){if(a.root(r))for(let i of r.children)s(i);else if(a.frontmatter(r))e+=`---${r.value}---
`;else if(a.comment(r))e+=`<!--${r.value}-->`;else if(a.expression(r)){e+="{";for(let i of r.children)s(i);e+="}"}else if(a.literal(r))e+=r.value;else if(a.tag(r))if(e+=`<${r.name}`,e+=m(r),r.children.length===0&&t.selfClose)e+=" />";else{e+=">";for(let i of r.children)s(i);e+=`</${r.name}>`}}return s(o),e}export{a as is,f as serialize,N as walk,u as walkAsync};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,37 @@
declare class Go {
importObject: {
gojs: {
'runtime.wasmExit': (sp: any) => void;
'runtime.wasmWrite': (sp: any) => void;
'runtime.resetMemoryDataView': (sp: any) => void;
'runtime.nanotime1': (sp: any) => void;
'runtime.walltime': (sp: any) => void;
'runtime.scheduleTimeoutEvent': (sp: any) => void;
'runtime.clearTimeoutEvent': (sp: any) => void;
'runtime.getRandomData': (sp: any) => void;
'syscall/js.finalizeRef': (sp: any) => void;
'syscall/js.stringVal': (sp: any) => void;
'syscall/js.valueGet': (sp: any) => void;
'syscall/js.valueSet': (sp: any) => void;
'syscall/js.valueDelete': (sp: any) => void;
'syscall/js.valueIndex': (sp: any) => void;
'syscall/js.valueSetIndex': (sp: any) => void;
'syscall/js.valueCall': (sp: any) => void;
'syscall/js.valueInvoke': (sp: any) => void;
'syscall/js.valueNew': (sp: any) => void;
'syscall/js.valueLength': (sp: any) => void;
'syscall/js.valuePrepareString': (sp: any) => void;
'syscall/js.valueLoadString': (sp: any) => void;
'syscall/js.valueInstanceOf': (sp: any) => void;
'syscall/js.copyBytesToGo': (sp: any) => void;
'syscall/js.copyBytesToJS': (sp: any) => void;
debug: (value: any) => void;
};
};
constructor();
run(instance: any): Promise<void>;
private _resume;
private _makeFuncWrapper;
}
export { Go as default };

View File

@@ -0,0 +1 @@
import{a}from"../chunk-QR6QDSEV.js";export{a as default};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
node_modules/@astrojs/compiler/dist/node/index.cjs generated vendored Normal file

File diff suppressed because one or more lines are too long

12
node_modules/@astrojs/compiler/dist/node/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,12 @@
import { transform as transform$1, parse as parse$1, convertToTSX as convertToTSX$1, teardown as teardown$1 } from '../shared/types.js';
export { HoistedScript, ParseOptions, ParseResult, PreprocessorResult, TransformOptions, TransformResult } from '../shared/types.js';
import '../shared/ast.js';
import '../shared/diagnostics.js';
declare const transform: typeof transform$1;
declare const parse: typeof parse$1;
declare const convertToTSX: typeof convertToTSX$1;
declare const compile: (template: string) => Promise<string>;
declare const teardown: typeof teardown$1;
export { compile, convertToTSX, parse, teardown, transform };

1
node_modules/@astrojs/compiler/dist/node/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
import{a as c}from"../chunk-W5DTLHV4.js";import{promises as m}from"fs";import{fileURLToPath as f}from"url";var w=async(t,s)=>i().then(r=>r.transform(t,s)),l=async(t,s)=>i().then(r=>r.parse(t,s)),b=async(t,s)=>i().then(r=>r.convertToTSX(t,s)),P=async t=>{let{default:s}=await import(`data:text/javascript;charset=utf-8;base64,${Buffer.from(t).toString("base64")}`);return s},n,g=()=>{n=void 0,globalThis["@astrojs/compiler"]=void 0},i=()=>(n||(n=d().catch(t=>{throw n=void 0,t})),n),y=async(t,s)=>{let r;return r=await(async()=>{let o=await m.readFile(t).then(e=>e.buffer);return WebAssembly.instantiate(new Uint8Array(o),s)})(),r},d=async()=>{let t=new c,s=await y(f(new URL("../astro.wasm",import.meta.url)),t.importObject);t.run(s.instance);let r=globalThis["@astrojs/compiler"];return{transform:(a,o)=>new Promise(e=>{try{e(r.transform(a,o||{}))}catch(p){throw n=void 0,p}}),parse:(a,o)=>new Promise(e=>e(r.parse(a,o||{}))).catch(e=>{throw n=void 0,e}).then(e=>({...e,ast:JSON.parse(e.ast)})),convertToTSX:(a,o)=>new Promise(e=>e(r.convertToTSX(a,o||{}))).catch(e=>{throw n=void 0,e}).then(e=>({...e,map:JSON.parse(e.map)}))}};export{P as compile,b as convertToTSX,l as parse,g as teardown,w as transform};

1
node_modules/@astrojs/compiler/dist/node/sync.cjs generated vendored Normal file

File diff suppressed because one or more lines are too long

16
node_modules/@astrojs/compiler/dist/node/sync.d.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import { TransformOptions, TransformResult, ParseOptions, ParseResult, ConvertToTSXOptions, TSXResult, transform as transform$1, parse as parse$1, convertToTSX as convertToTSX$1 } from '../shared/types.js';
import '../shared/ast.js';
import '../shared/diagnostics.js';
type UnwrappedPromise<T> = T extends (...params: any) => Promise<infer Return> ? (...params: Parameters<T>) => Return : T;
interface Service {
transform: UnwrappedPromise<typeof transform$1>;
parse: UnwrappedPromise<typeof parse$1>;
convertToTSX: UnwrappedPromise<typeof convertToTSX$1>;
}
declare const transform: (input: string, options: TransformOptions | undefined) => TransformResult;
declare const parse: (input: string, options: ParseOptions | undefined) => ParseResult;
declare const convertToTSX: (input: string, options: ConvertToTSXOptions | undefined) => TSXResult;
declare function startRunningService(): Service;
export { convertToTSX, parse, startRunningService, transform };

1
node_modules/@astrojs/compiler/dist/node/sync.js generated vendored Normal file
View File

@@ -0,0 +1 @@
import{a as c}from"../chunk-W5DTLHV4.js";import{readFileSync as p}from"fs";import{fileURLToPath as m}from"url";function i(){return s||(s=f()),s}var s,l=(e,t)=>i().transform(e,t),w=(e,t)=>i().parse(e,t),h=(e,t)=>i().convertToTSX(e,t);function f(){let e=new c,t=v(m(new URL("../astro.wasm",import.meta.url)),e.importObject);e.run(t);let o=globalThis["@astrojs/compiler"];return{transform:(n,a)=>{try{return o.transform(n,a||{})}catch(r){throw s=void 0,r}},parse:(n,a)=>{try{let r=o.parse(n,a||{});return{...r,ast:JSON.parse(r.ast)}}catch(r){throw s=void 0,r}},convertToTSX:(n,a)=>{try{let r=o.convertToTSX(n,a||{});return{...r,map:JSON.parse(r.map)}}catch(r){throw s=void 0,r}}}}function v(e,t){let o=p(e);return new WebAssembly.Instance(new WebAssembly.Module(o),t)}export{h as convertToTSX,w as parse,f as startRunningService,l as transform};

3
node_modules/@astrojs/compiler/dist/node/utils.cjs generated vendored Normal file
View File

@@ -0,0 +1,3 @@
"use strict";var m=Object.defineProperty;var d=Object.getOwnPropertyDescriptor;var p=Object.getOwnPropertyNames;var N=Object.prototype.hasOwnProperty;var u=(o,e)=>{for(var t in e)m(o,t,{get:e[t],enumerable:!0})},f=(o,e,t,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of p(e))!N.call(o,r)&&r!==t&&m(o,r,{get:()=>e[r],enumerable:!(a=d(e,r))||a.enumerable});return o};var y=o=>f(m({},"__esModule",{value:!0}),o);var v={};u(v,{is:()=>s,serialize:()=>k,walk:()=>h,walkAsync:()=>x});module.exports=y(v);function n(o){return e=>e.type===o}var s={parent(o){return Array.isArray(o.children)},literal(o){return typeof o.value=="string"},tag(o){return o.type==="element"||o.type==="custom-element"||o.type==="component"||o.type==="fragment"},whitespace(o){return o.type==="text"&&o.value.trim().length===0},root:n("root"),element:n("element"),customElement:n("custom-element"),component:n("component"),fragment:n("fragment"),expression:n("expression"),text:n("text"),doctype:n("doctype"),comment:n("comment"),frontmatter:n("frontmatter")},l=class{constructor(e){this.callback=e}async visit(e,t,a){if(await this.callback(e,t,a),s.parent(e)){let r=[];for(let i=0;i<e.children.length;i++){let c=e.children[i];r.push(this.callback(c,e,i))}await Promise.all(r)}}};function h(o,e){new l(e).visit(o)}function x(o,e){return new l(e).visit(o)}function g(o){let e="";for(let t of o.attributes)switch(e+=" ",t.kind){case"empty":{e+=`${t.name}`;break}case"expression":{e+=`${t.name}={${t.value}}`;break}case"quoted":{e+=`${t.name}=${t.raw}`;break}case"template-literal":{e+=`${t.name}=\`${t.value}\``;break}case"shorthand":{e+=`{${t.name}}`;break}case"spread":{e+=`{...${t.name}}`;break}}return e}function k(o,e={selfClose:!0}){let t="";function a(r){if(s.root(r))for(let i of r.children)a(i);else if(s.frontmatter(r))t+=`---${r.value}---
`;else if(s.comment(r))t+=`<!--${r.value}-->`;else if(s.expression(r)){t+="{";for(let i of r.children)a(i);t+="}"}else if(s.literal(r))t+=r.value;else if(s.tag(r))if(t+=`<${r.name}`,t+=g(r),r.children.length===0&&e.selfClose)t+=" />";else{t+=">";for(let i of r.children)a(i);t+=`</${r.name}>`}}return a(o),t}0&&(module.exports={is,serialize,walk,walkAsync});

29
node_modules/@astrojs/compiler/dist/node/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
import { Node, ParentNode, LiteralNode, TagLikeNode, TextNode, RootNode, ElementNode, CustomElementNode, ComponentNode, FragmentNode, ExpressionNode, DoctypeNode, CommentNode, FrontmatterNode } from '../shared/ast.js';
type Visitor = (node: Node, parent?: ParentNode, index?: number) => void | Promise<void>;
declare const is: {
parent(node: Node): node is ParentNode;
literal(node: Node): node is LiteralNode;
tag(node: Node): node is TagLikeNode;
whitespace(node: Node): node is TextNode;
root: (node: Node) => node is RootNode;
element: (node: Node) => node is ElementNode;
customElement: (node: Node) => node is CustomElementNode;
component: (node: Node) => node is ComponentNode;
fragment: (node: Node) => node is FragmentNode;
expression: (node: Node) => node is ExpressionNode;
text: (node: Node) => node is TextNode;
doctype: (node: Node) => node is DoctypeNode;
comment: (node: Node) => node is CommentNode;
frontmatter: (node: Node) => node is FrontmatterNode;
};
declare function walk(node: ParentNode, callback: Visitor): void;
declare function walkAsync(node: ParentNode, callback: Visitor): Promise<void>;
interface SerializeOptions {
selfClose: boolean;
}
/** @deprecated Please use `SerializeOptions` */
type SerializeOtions = SerializeOptions;
declare function serialize(root: Node, opts?: SerializeOptions): string;
export { SerializeOptions, SerializeOtions, Visitor, is, serialize, walk, walkAsync };

3
node_modules/@astrojs/compiler/dist/node/utils.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
function n(o){return t=>t.type===o}var a={parent(o){return Array.isArray(o.children)},literal(o){return typeof o.value=="string"},tag(o){return o.type==="element"||o.type==="custom-element"||o.type==="component"||o.type==="fragment"},whitespace(o){return o.type==="text"&&o.value.trim().length===0},root:n("root"),element:n("element"),customElement:n("custom-element"),component:n("component"),fragment:n("fragment"),expression:n("expression"),text:n("text"),doctype:n("doctype"),comment:n("comment"),frontmatter:n("frontmatter")},l=class{constructor(t){this.callback=t}async visit(t,e,s){if(await this.callback(t,e,s),a.parent(t)){let r=[];for(let i=0;i<t.children.length;i++){let m=t.children[i];r.push(this.callback(m,t,i))}await Promise.all(r)}}};function N(o,t){new l(t).visit(o)}function u(o,t){return new l(t).visit(o)}function c(o){let t="";for(let e of o.attributes)switch(t+=" ",e.kind){case"empty":{t+=`${e.name}`;break}case"expression":{t+=`${e.name}={${e.value}}`;break}case"quoted":{t+=`${e.name}=${e.raw}`;break}case"template-literal":{t+=`${e.name}=\`${e.value}\``;break}case"shorthand":{t+=`{${e.name}}`;break}case"spread":{t+=`{...${e.name}}`;break}}return t}function f(o,t={selfClose:!0}){let e="";function s(r){if(a.root(r))for(let i of r.children)s(i);else if(a.frontmatter(r))e+=`---${r.value}---
`;else if(a.comment(r))e+=`<!--${r.value}-->`;else if(a.expression(r)){e+="{";for(let i of r.children)s(i);e+="}"}else if(a.literal(r))e+=r.value;else if(a.tag(r))if(e+=`<${r.name}`,e+=c(r),r.children.length===0&&t.selfClose)e+=" />";else{e+=">";for(let i of r.children)s(i);e+=`</${r.name}>`}}return s(o),e}export{a as is,f as serialize,N as walk,u as walkAsync};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,37 @@
declare class Go {
importObject: {
gojs: {
'runtime.wasmExit': (sp: any) => void;
'runtime.wasmWrite': (sp: any) => void;
'runtime.resetMemoryDataView': (sp: any) => void;
'runtime.nanotime1': (sp: any) => void;
'runtime.walltime': (sp: any) => void;
'runtime.scheduleTimeoutEvent': (sp: any) => void;
'runtime.clearTimeoutEvent': (sp: any) => void;
'runtime.getRandomData': (sp: any) => void;
'syscall/js.finalizeRef': (sp: any) => void;
'syscall/js.stringVal': (sp: any) => void;
'syscall/js.valueGet': (sp: any) => void;
'syscall/js.valueSet': (sp: any) => void;
'syscall/js.valueDelete': (sp: any) => void;
'syscall/js.valueIndex': (sp: any) => void;
'syscall/js.valueSetIndex': (sp: any) => void;
'syscall/js.valueCall': (sp: any) => void;
'syscall/js.valueInvoke': (sp: any) => void;
'syscall/js.valueNew': (sp: any) => void;
'syscall/js.valueLength': (sp: any) => void;
'syscall/js.valuePrepareString': (sp: any) => void;
'syscall/js.valueLoadString': (sp: any) => void;
'syscall/js.valueInstanceOf': (sp: any) => void;
'syscall/js.copyBytesToGo': (sp: any) => void;
'syscall/js.copyBytesToJS': (sp: any) => void;
debug: (value: any) => void;
};
};
constructor();
run(instance: any): Promise<void>;
private _resume;
private _makeFuncWrapper;
}
export { Go as default };

View File

@@ -0,0 +1 @@
import{a}from"../chunk-W5DTLHV4.js";export{a as default};

1
node_modules/@astrojs/compiler/dist/shared/ast.cjs generated vendored Normal file
View File

@@ -0,0 +1 @@
"use strict";var r=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var i=Object.getOwnPropertyNames;var N=Object.prototype.hasOwnProperty;var p=(t,e,d,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of i(e))!N.call(t,o)&&o!==d&&r(t,o,{get:()=>e[o],enumerable:!(n=a(e,o))||n.enumerable});return t};var s=t=>p(r({},"__esModule",{value:!0}),t);var m={};module.exports=s(m);

74
node_modules/@astrojs/compiler/dist/shared/ast.d.ts generated vendored Normal file
View File

@@ -0,0 +1,74 @@
type ParentNode = RootNode | ElementNode | ComponentNode | CustomElementNode | FragmentNode | ExpressionNode;
type LiteralNode = TextNode | DoctypeNode | CommentNode | FrontmatterNode;
type Node = RootNode | ElementNode | ComponentNode | CustomElementNode | FragmentNode | ExpressionNode | TextNode | FrontmatterNode | DoctypeNode | CommentNode;
interface Position {
start: Point;
end?: Point;
}
interface Point {
/** 1-based line number */
line: number;
/** 1-based column number, per-line */
column: number;
/** 0-based byte offset */
offset: number;
}
interface BaseNode {
type: string;
position?: Position;
}
interface ParentLikeNode extends BaseNode {
type: 'element' | 'component' | 'custom-element' | 'fragment' | 'expression' | 'root';
children: Node[];
}
interface ValueNode extends BaseNode {
value: string;
}
interface RootNode extends ParentLikeNode {
type: 'root';
}
interface AttributeNode extends BaseNode {
type: 'attribute';
kind: 'quoted' | 'empty' | 'expression' | 'spread' | 'shorthand' | 'template-literal';
name: string;
value: string;
raw?: string;
}
interface TextNode extends ValueNode {
type: 'text';
}
interface ElementNode extends ParentLikeNode {
type: 'element';
name: string;
attributes: AttributeNode[];
}
interface FragmentNode extends ParentLikeNode {
type: 'fragment';
name: string;
attributes: AttributeNode[];
}
interface ComponentNode extends ParentLikeNode {
type: 'component';
name: string;
attributes: AttributeNode[];
}
interface CustomElementNode extends ParentLikeNode {
type: 'custom-element';
name: string;
attributes: AttributeNode[];
}
type TagLikeNode = ElementNode | FragmentNode | ComponentNode | CustomElementNode;
interface DoctypeNode extends ValueNode {
type: 'doctype';
}
interface CommentNode extends ValueNode {
type: 'comment';
}
interface FrontmatterNode extends ValueNode {
type: 'frontmatter';
}
interface ExpressionNode extends ParentLikeNode {
type: 'expression';
}
export { AttributeNode, BaseNode, CommentNode, ComponentNode, CustomElementNode, DoctypeNode, ElementNode, ExpressionNode, FragmentNode, FrontmatterNode, LiteralNode, Node, ParentLikeNode, ParentNode, Point, Position, RootNode, TagLikeNode, TextNode, ValueNode };

0
node_modules/@astrojs/compiler/dist/shared/ast.js generated vendored Normal file
View File

View File

@@ -0,0 +1 @@
"use strict";var I=Object.defineProperty;var M=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var S=Object.prototype.hasOwnProperty;var U=(E,N)=>{for(var _ in N)I(E,_,{get:N[_],enumerable:!0})},H=(E,N,_,A)=>{if(N&&typeof N=="object"||typeof N=="function")for(let T of G(N))!S.call(E,T)&&T!==_&&I(E,T,{get:()=>N[T],enumerable:!(A=M(N,T))||A.enumerable});return E};var W=E=>H(I({},"__esModule",{value:!0}),E);var P={};U(P,{DiagnosticCode:()=>O});module.exports=W(P);var O=(R=>(R[R.ERROR=1e3]="ERROR",R[R.ERROR_UNTERMINATED_JS_COMMENT=1001]="ERROR_UNTERMINATED_JS_COMMENT",R[R.ERROR_FRAGMENT_SHORTHAND_ATTRS=1002]="ERROR_FRAGMENT_SHORTHAND_ATTRS",R[R.ERROR_UNMATCHED_IMPORT=1003]="ERROR_UNMATCHED_IMPORT",R[R.ERROR_UNSUPPORTED_SLOT_ATTRIBUTE=1004]="ERROR_UNSUPPORTED_SLOT_ATTRIBUTE",R[R.WARNING=2e3]="WARNING",R[R.WARNING_UNTERMINATED_HTML_COMMENT=2001]="WARNING_UNTERMINATED_HTML_COMMENT",R[R.WARNING_UNCLOSED_HTML_TAG=2002]="WARNING_UNCLOSED_HTML_TAG",R[R.WARNING_DEPRECATED_DIRECTIVE=2003]="WARNING_DEPRECATED_DIRECTIVE",R[R.WARNING_IGNORED_DIRECTIVE=2004]="WARNING_IGNORED_DIRECTIVE",R[R.WARNING_UNSUPPORTED_EXPRESSION=2005]="WARNING_UNSUPPORTED_EXPRESSION",R[R.WARNING_SET_WITH_CHILDREN=2006]="WARNING_SET_WITH_CHILDREN",R[R.INFO=3e3]="INFO",R[R.HINT=4e3]="HINT",R))(O||{});0&&(module.exports={DiagnosticCode});

View File

@@ -0,0 +1,18 @@
declare enum DiagnosticCode {
ERROR = 1000,
ERROR_UNTERMINATED_JS_COMMENT = 1001,
ERROR_FRAGMENT_SHORTHAND_ATTRS = 1002,
ERROR_UNMATCHED_IMPORT = 1003,
ERROR_UNSUPPORTED_SLOT_ATTRIBUTE = 1004,
WARNING = 2000,
WARNING_UNTERMINATED_HTML_COMMENT = 2001,
WARNING_UNCLOSED_HTML_TAG = 2002,
WARNING_DEPRECATED_DIRECTIVE = 2003,
WARNING_IGNORED_DIRECTIVE = 2004,
WARNING_UNSUPPORTED_EXPRESSION = 2005,
WARNING_SET_WITH_CHILDREN = 2006,
INFO = 3000,
HINT = 4000
}
export { DiagnosticCode };

View File

@@ -0,0 +1 @@
var N=(R=>(R[R.ERROR=1e3]="ERROR",R[R.ERROR_UNTERMINATED_JS_COMMENT=1001]="ERROR_UNTERMINATED_JS_COMMENT",R[R.ERROR_FRAGMENT_SHORTHAND_ATTRS=1002]="ERROR_FRAGMENT_SHORTHAND_ATTRS",R[R.ERROR_UNMATCHED_IMPORT=1003]="ERROR_UNMATCHED_IMPORT",R[R.ERROR_UNSUPPORTED_SLOT_ATTRIBUTE=1004]="ERROR_UNSUPPORTED_SLOT_ATTRIBUTE",R[R.WARNING=2e3]="WARNING",R[R.WARNING_UNTERMINATED_HTML_COMMENT=2001]="WARNING_UNTERMINATED_HTML_COMMENT",R[R.WARNING_UNCLOSED_HTML_TAG=2002]="WARNING_UNCLOSED_HTML_TAG",R[R.WARNING_DEPRECATED_DIRECTIVE=2003]="WARNING_DEPRECATED_DIRECTIVE",R[R.WARNING_IGNORED_DIRECTIVE=2004]="WARNING_IGNORED_DIRECTIVE",R[R.WARNING_UNSUPPORTED_EXPRESSION=2005]="WARNING_UNSUPPORTED_EXPRESSION",R[R.WARNING_SET_WITH_CHILDREN=2006]="WARNING_SET_WITH_CHILDREN",R[R.INFO=3e3]="INFO",R[R.HINT=4e3]="HINT",R))(N||{});export{N as DiagnosticCode};

1
node_modules/@astrojs/compiler/dist/shared/types.cjs generated vendored Normal file
View File

@@ -0,0 +1 @@
"use strict";var o=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var p=Object.getOwnPropertyNames;var c=Object.prototype.hasOwnProperty;var l=(r,e)=>{for(var n in e)o(r,n,{get:e[n],enumerable:!0})},g=(r,e,n,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let t of p(e))!c.call(r,t)&&t!==n&&o(r,t,{get:()=>e[t],enumerable:!(s=a(e,t))||s.enumerable});return r};var m=r=>g(o({},"__esModule",{value:!0}),r);var d={};l(d,{DiagnosticSeverity:()=>i});module.exports=m(d);var i=(t=>(t[t.Error=1]="Error",t[t.Warning=2]="Warning",t[t.Information=3]="Information",t[t.Hint=4]="Hint",t))(i||{});0&&(module.exports={DiagnosticSeverity});

153
node_modules/@astrojs/compiler/dist/shared/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,153 @@
import { RootNode } from './ast.js';
export { AttributeNode, BaseNode, CommentNode, ComponentNode, CustomElementNode, DoctypeNode, ElementNode, ExpressionNode, FragmentNode, FrontmatterNode, LiteralNode, Node, ParentLikeNode, ParentNode, Point, Position, TagLikeNode, TextNode, ValueNode } from './ast.js';
import { DiagnosticCode } from './diagnostics.js';
interface PreprocessorResult {
code: string;
map?: string;
}
interface PreprocessorError {
error: string;
}
interface ParseOptions {
position?: boolean;
}
declare enum DiagnosticSeverity {
Error = 1,
Warning = 2,
Information = 3,
Hint = 4
}
interface DiagnosticMessage {
severity: DiagnosticSeverity;
code: DiagnosticCode;
location: DiagnosticLocation;
hint?: string;
text: string;
}
interface DiagnosticLocation {
file: string;
line: number;
column: number;
length: number;
}
interface TransformOptions {
internalURL?: string;
filename?: string;
normalizedFilename?: string;
sourcemap?: boolean | 'inline' | 'external' | 'both';
astroGlobalArgs?: string;
compact?: boolean;
resultScopedSlot?: boolean;
scopedStyleStrategy?: 'where' | 'class' | 'attribute';
/**
* @deprecated "as" has been removed and no longer has any effect!
*/
as?: 'document' | 'fragment';
transitionsAnimationURL?: string;
resolvePath?: (specifier: string) => Promise<string> | string;
preprocessStyle?: (content: string, attrs: Record<string, string>) => null | Promise<PreprocessorResult | PreprocessorError>;
annotateSourceFile?: boolean;
}
type ConvertToTSXOptions = Pick<TransformOptions, 'filename' | 'normalizedFilename' | 'sourcemap'> & {
/** If set to true, script tags content will be included in the generated TSX
* Scripts will be wrapped in an arrow function to be compatible with JSX's spec
*/
includeScripts?: boolean;
/** If set to true, style tags content will be included in the generated TSX
* Styles will be wrapped in a template literal to be compatible with JSX's spec
*/
includeStyles?: boolean;
};
type HoistedScript = {
type: string;
} & ({
type: 'external';
src: string;
} | {
type: 'inline';
code: string;
map: string;
});
interface HydratedComponent {
exportName: string;
localName: string;
specifier: string;
resolvedPath: string;
}
interface TransformResult {
code: string;
map: string;
scope: string;
styleError: string[];
diagnostics: DiagnosticMessage[];
css: string[];
scripts: HoistedScript[];
hydratedComponents: HydratedComponent[];
clientOnlyComponents: HydratedComponent[];
serverComponents: HydratedComponent[];
containsHead: boolean;
propagation: boolean;
}
interface SourceMap {
file: string;
mappings: string;
names: string[];
sources: string[];
sourcesContent: string[];
version: number;
}
/**
* Represents a location in a TSX file.
* Both the `start` and `end` properties are 0-based, and are based off utf-16 code units. (i.e. JavaScript's `String.prototype.length`)
*/
interface TSXLocation {
start: number;
end: number;
}
interface TSXExtractedTag {
position: TSXLocation;
content: string;
}
interface TSXExtractedScript extends TSXExtractedTag {
type: 'processed-module' | 'module' | 'inline' | 'event-attribute' | 'json' | 'raw' | 'unknown';
}
interface TSXExtractedStyle extends TSXExtractedTag {
type: 'tag' | 'style-attribute';
lang: 'css' | 'scss' | 'sass' | 'less' | 'stylus' | 'styl' | 'postcss' | 'pcss' | 'unknown' | (string & {});
}
interface TSXResult {
code: string;
map: SourceMap;
diagnostics: DiagnosticMessage[];
metaRanges: {
frontmatter: TSXLocation;
body: TSXLocation;
scripts?: TSXExtractedScript[];
styles?: TSXExtractedStyle[];
};
}
interface ParseResult {
ast: RootNode;
diagnostics: DiagnosticMessage[];
}
declare function transform(input: string, options?: TransformOptions): Promise<TransformResult>;
declare function parse(input: string, options?: ParseOptions): Promise<ParseResult>;
declare function convertToTSX(input: string, options?: ConvertToTSXOptions): Promise<TSXResult>;
declare function initialize(options: InitializeOptions): Promise<void>;
/**
* When calling the core compiler APIs, e.g. `transform`, `parse`, etc, they
* would automatically instantiate a WASM instance to process the input. When
* done, you can call this to manually teardown the WASM instance.
*
* If the APIs are called again, they will automatically instantiate a new WASM
* instance. In browsers, you have to call `initialize()` again before using the APIs.
*
* Note: Calling teardown is optional and exists mostly as an optimization only.
*/
declare function teardown(): void;
interface InitializeOptions {
wasmURL?: string;
}
export { ConvertToTSXOptions, DiagnosticLocation, DiagnosticMessage, DiagnosticSeverity, HoistedScript, HydratedComponent, InitializeOptions, ParseOptions, ParseResult, PreprocessorError, PreprocessorResult, RootNode, SourceMap, TSXExtractedScript, TSXExtractedStyle, TSXExtractedTag, TSXLocation, TSXResult, TransformOptions, TransformResult, convertToTSX, initialize, parse, teardown, transform };

1
node_modules/@astrojs/compiler/dist/shared/types.js generated vendored Normal file
View File

@@ -0,0 +1 @@
var e=(t=>(t[t.Error=1]="Error",t[t.Warning=2]="Warning",t[t.Information=3]="Information",t[t.Hint=4]="Hint",t))(e||{});export{e as DiagnosticSeverity};

58
node_modules/@astrojs/compiler/package.json generated vendored Normal file
View File

@@ -0,0 +1,58 @@
{
"name": "@astrojs/compiler",
"author": "withastro",
"license": "MIT",
"type": "module",
"bugs": "https://github.com/withastro/compiler/issues",
"homepage": "https://astro.build",
"version": "3.0.1",
"main": "./dist/node/index.js",
"types": "./dist/shared/types.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/withastro/compiler.git"
},
"files": [
"dist",
"types.d.ts",
"utils.d.ts",
"sync.d.ts"
],
"exports": {
".": {
"types": "./dist/node/index.d.ts",
"browser": "./dist/browser/index.js",
"import": "./dist/node/index.js",
"require": "./dist/node/index.cjs",
"default": "./dist/browser/index.js"
},
"./sync": {
"types": "./dist/node/sync.d.ts",
"import": "./dist/node/sync.js",
"require": "./dist/node/sync.cjs",
"default": "./dist/node/sync.js"
},
"./utils": {
"types": "./dist/node/utils.d.ts",
"browser": "./dist/browser/utils.js",
"import": "./dist/node/utils.js",
"require": "./dist/node/utils.cjs",
"default": "./dist/browser/utils.js"
},
"./astro.wasm": "./dist/astro.wasm",
"./types": "./dist/shared/types.d.ts",
"./package.json": "./package.json"
},
"devDependencies": {
"@jridgewell/trace-mapping": "^0.3.16",
"@types/node": "^18.15.11",
"@types/sass": "^1.43.1",
"acorn": "^8.8.1",
"esbuild": "^0.17.17",
"tsup": "^6.7.0",
"typescript": "~5.0.2"
},
"scripts": {
"build": "tsup"
}
}

1
node_modules/@astrojs/compiler/sync.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/node/sync.js';

1
node_modules/@astrojs/compiler/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export type * from './dist/shared/types.js';

1
node_modules/@astrojs/compiler/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export * from './dist/node/utils.js';

59
node_modules/@astrojs/db/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,59 @@
MIT License
Copyright (c) 2021 Fred K. Schott
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/sveltejs/kit repository:
Copyright (c) 2020 [these people](https://github.com/sveltejs/kit/graphs/contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""
This license applies to parts of the `packages/create-astro` and `packages/astro` subdirectories originating from the https://github.com/vitejs/vite repository:
MIT License
Copyright (c) 2019-present, Yuxi (Evan) You and Vite contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

38
node_modules/@astrojs/db/README.md generated vendored Normal file
View File

@@ -0,0 +1,38 @@
# @astrojs/db (experimental) 💿
This **[Astro integration][astro-integration]** enables the usage of [SQLite](https://www.sqlite.org/) in Astro Projects.
## Documentation
Read the [`@astrojs/db` docs][docs]
## Support
- Get help in the [Astro Discord][discord]. Post questions in our `#support` forum, or visit our dedicated `#dev` channel to discuss current development and more!
- Check our [Astro Integration Documentation][astro-integration] for more on integrations.
- Submit bug reports and feature requests as [GitHub issues][issues].
## Contributing
This package is maintained by Astro's Core team. You're welcome to submit an issue or PR! These links will help you get started:
- [Contributor Manual][contributing]
- [Code of Conduct][coc]
- [Community Guide][community]
## License
MIT
Copyright (c) 2023present [Astro][astro]
[astro]: https://astro.build/
[docs]: https://docs.astro.build/en/guides/integrations-guide/db/
[contributing]: https://github.com/withastro/astro/blob/main/CONTRIBUTING.md
[coc]: https://github.com/withastro/.github/blob/main/CODE_OF_CONDUCT.md
[community]: https://github.com/withastro/.github/blob/main/COMMUNITY_GUIDE.md
[discord]: https://astro.build/chat/
[issues]: https://github.com/withastro/astro/issues
[astro-integration]: https://docs.astro.build/en/guides/integrations-guide/

View File

@@ -0,0 +1,2 @@
import type { $ZodErrorMap } from 'zod/v4/core';
export declare const errorMap: $ZodErrorMap;

1444
node_modules/@astrojs/db/dist/_internal/core/schemas.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
import type * as z from 'zod/v4';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

View File

@@ -0,0 +1,92 @@
import type { ColumnBaseConfig, ColumnDataType } from 'drizzle-orm';
import type { SQLiteColumn, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core';
import type { ColumnsConfig, DBColumn, OutputColumnsConfig } from '../core/types.js';
type GeneratedConfig<T extends ColumnDataType = ColumnDataType> = Pick<ColumnBaseConfig<T, string>, 'name' | 'tableName' | 'notNull' | 'hasDefault' | 'hasRuntimeDefault' | 'isPrimaryKey'>;
type AstroText<T extends GeneratedConfig<'string'>, E extends readonly [string, ...string[]] | string> = SQLiteColumn<T & {
data: E extends readonly (infer U)[] ? U : string;
dataType: 'string';
columnType: 'SQLiteText';
driverParam: string;
enumValues: E extends [string, ...string[]] ? E : never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroDate<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: Date;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroBoolean<T extends GeneratedConfig<'boolean'>> = SQLiteColumn<T & {
data: boolean;
dataType: 'boolean';
columnType: 'SQLiteBoolean';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroNumber<T extends GeneratedConfig<'number'>> = SQLiteColumn<T & {
data: number;
dataType: 'number';
columnType: 'SQLiteInteger';
driverParam: number;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type AstroJson<T extends GeneratedConfig<'custom'>> = SQLiteColumn<T & {
data: unknown;
dataType: 'custom';
columnType: 'SQLiteCustomColumn';
driverParam: string;
enumValues: never;
baseColumn: never;
isAutoincrement: boolean;
identity: undefined;
generated: undefined;
}>;
type Column<T extends DBColumn['type'], E extends readonly [string, ...string[]] | string, S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S, E> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TColumns extends OutputColumnsConfig | ColumnsConfig> = SQLiteTableWithColumns<{
name: TTableName;
schema: undefined;
dialect: 'sqlite';
columns: {
[K in Extract<keyof TColumns, string>]: Column<TColumns[K]['type'], TColumns[K]['schema'] extends {
enum: infer E;
} ? E extends readonly [string, ...string[]] ? E : string : string, {
tableName: TTableName;
name: K;
isPrimaryKey: TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : TColumns[K]['schema'] extends {
primaryKey: true;
} ? true : false;
hasRuntimeDefault: TColumns[K]['schema'] extends {
default: NonNullable<unknown>;
} ? true : false;
notNull: TColumns[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};

View File

@@ -0,0 +1,9 @@
import { LibsqlError } from '@libsql/client';
import { AstroError } from 'astro/errors';
import type { DBColumn } from '../core/types.js';
export declare function hasPrimaryKey(column: DBColumn): boolean;
export declare class AstroDbError extends AstroError {
name: string;
}
export declare function isDbError(err: unknown): err is LibsqlError;
export declare function pathToFileURL(path: string): URL;

View File

@@ -0,0 +1,48 @@
import type { BooleanColumnInput, ColumnsConfig, DateColumnInput, DBConfigInput, JsonColumnInput, NumberColumnOpts, TableConfig, TextColumnOpts } from '../core/types.js';
export declare const column: {
number: <T extends NumberColumnOpts>(opts?: T) => {
type: "number";
/**
* @internal
*/
schema: T;
};
boolean: <T extends BooleanColumnInput["schema"]>(opts?: T) => {
type: "boolean";
/**
* @internal
*/
schema: T;
};
text: <T extends TextColumnOpts, const E extends T["enum"] extends readonly [string, ...string[]] ? Omit<T, "enum"> & T["enum"] : T>(opts?: E) => {
type: "text";
/**
* @internal
*/
schema: E;
};
date<T extends DateColumnInput["schema"]>(opts?: T): {
type: "date";
/**
* @internal
*/
schema: T;
};
json<T extends JsonColumnInput["schema"]>(opts?: T): {
type: "json";
/**
* @internal
*/
schema: T;
};
};
export declare function defineTable<TColumns extends ColumnsConfig>(userConfig: TableConfig<TColumns>): TableConfig<TColumns>;
export declare function defineDb(userConfig: DBConfigInput): {
tables?: unknown;
};
export declare const NOW: import("drizzle-orm").SQL<unknown>;
export declare const TRUE: import("drizzle-orm").SQL<unknown>;
export declare const FALSE: import("drizzle-orm").SQL<unknown>;
export { and, asc, avg, avgDistinct, between, count, countDistinct, desc, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, max, min, ne, not, notBetween, notExists, notIlike, notInArray, or, sql, sum, sumDistinct, } from 'drizzle-orm';
export { alias } from 'drizzle-orm/sqlite-core';
export { isDbError } from './utils.js';

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ astroConfig, dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,65 @@
import { existsSync } from "node:fs";
import colors from "piccolore";
import { isDbError } from "../../../../runtime/utils.js";
import {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
MISSING_EXECUTE_PATH_ERROR
} from "../../../errors.js";
import {
getLocalVirtualModContents,
getRemoteVirtualModContents
} from "../../../integration/vite-plugin-db.js";
import { bundleFile, importBundledFile } from "../../../load-file.js";
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
astroConfig,
dbConfig,
flags
}) {
const filePath = flags._[4];
if (typeof filePath !== "string") {
console.error(MISSING_EXECUTE_PATH_ERROR);
process.exit(1);
}
const fileUrl = new URL(filePath, astroConfig.root);
if (!existsSync(fileUrl)) {
console.error(FILE_NOT_FOUND_ERROR(filePath));
process.exit(1);
}
let virtualModContents;
if (flags.remote) {
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
virtualModContents = getRemoteVirtualModContents({
tables: dbConfig.tables ?? {},
appToken,
isBuild: false,
output: "server",
localExecution: true
});
} else {
virtualModContents = getLocalVirtualModContents({
tables: dbConfig.tables ?? {},
root: astroConfig.root,
localExecution: true
});
}
const { code } = await bundleFile({ virtualModContents, root: astroConfig.root, fileUrl });
const mod = await importBundledFile({ code, root: astroConfig.root });
if (typeof mod.default !== "function") {
console.error(EXEC_DEFAULT_EXPORT_ERROR(filePath));
process.exit(1);
}
try {
await mod.default();
console.info(`${colors.green("\u2714")} File run successfully.`);
} catch (e) {
if (isDbError(e)) throw new Error(EXEC_ERROR(e.message));
else throw e;
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,106 @@
import * as clack from "@clack/prompts";
import { sql } from "drizzle-orm";
import { MIGRATION_VERSION } from "../../../consts.js";
import { createClient } from "../../../db-client/libsql-node.js";
import {
getRemoteDatabaseInfo,
resolveDbAppToken
} from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isDryRun = flags.dryRun;
const isForceReset = flags.forceReset;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const isFromScratch = !productionSnapshot;
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: isFromScratch ? createEmptySnapshot() : productionSnapshot,
newSnapshot: currentSnapshot,
reset: isForceReset
});
if (migrationQueries.length === 0) {
console.log("Database schema is up to date.");
} else {
console.log(`Database schema is out of date.`);
}
if (isForceReset) {
const begin = await clack.confirm({
message: `Reset your database? All of your data will be erased and your schema created from scratch.`,
initialValue: false,
withGuide: false
});
if (begin !== true) {
console.log("Canceled.");
process.exit(0);
}
console.log(`Force-pushing to the database. All existing data will be erased.`);
} else if (confirmations.length > 0) {
console.log("\n" + formatDataLossMessage(confirmations) + "\n");
throw new Error("Exiting.");
}
if (isDryRun) {
console.log("Statements:", JSON.stringify(migrationQueries, void 0, 2));
} else {
console.log(`Pushing database schema updates...`);
await pushSchema({
statements: migrationQueries,
dbInfo,
appToken,
isDryRun,
currentSnapshot
});
}
console.info("Push complete!");
}
async function pushSchema({
statements,
dbInfo,
appToken,
isDryRun,
currentSnapshot
}) {
const requestBody = {
snapshot: currentSnapshot,
sql: statements,
version: MIGRATION_VERSION
};
if (isDryRun) {
console.info("[DRY RUN] Batch query:", JSON.stringify(requestBody, null, 2));
return new Response(null, { status: 200 });
}
return pushToDb(requestBody, appToken, dbInfo.url);
}
async function pushToDb(requestBody, appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
await client.run(sql`create table if not exists _astro_db_snapshot (
id INTEGER PRIMARY KEY AUTOINCREMENT,
version TEXT,
snapshot BLOB
);`);
await client.transaction(async (tx) => {
for (const stmt of requestBody.sql) {
await tx.run(sql.raw(stmt));
}
await tx.run(sql`insert into _astro_db_snapshot (version, snapshot) values (
${requestBody.version},
${JSON.stringify(requestBody.snapshot)}
)`);
});
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfigInput } from '../../../types.js';
export declare function cmd({ flags, astroConfig, }: {
dbConfig: DBConfigInput;
astroConfig: AstroConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,36 @@
import { sql } from "drizzle-orm";
import { normalizeDatabaseUrl } from "../../../../runtime/index.js";
import { DB_PATH } from "../../../consts.js";
import { createClient as createLocalDatabaseClient } from "../../../db-client/libsql-local.js";
import { createClient as createRemoteDatabaseClient } from "../../../db-client/libsql-node.js";
import { SHELL_QUERY_MISSING_ERROR } from "../../../errors.js";
import { getAstroEnv, getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
async function cmd({
flags,
astroConfig
}) {
const query = flags.query;
if (!query) {
console.error(SHELL_QUERY_MISSING_ERROR);
process.exit(1);
}
const dbInfo = getRemoteDatabaseInfo();
if (flags.remote) {
const appToken = resolveDbAppToken(flags, dbInfo.token);
const db = createRemoteDatabaseClient({ ...dbInfo, token: appToken });
const result = await db.run(sql.raw(query));
console.log(result);
} else {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(
ASTRO_DATABASE_FILE,
new URL(DB_PATH, astroConfig.root).href
);
const db = createLocalDatabaseClient({ url: dbUrl });
const result = await db.run(sql.raw(query));
console.log(result);
}
}
export {
cmd
};

View File

@@ -0,0 +1,8 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
import type { DBConfig } from '../../../types.js';
export declare function cmd({ dbConfig, flags, }: {
astroConfig: AstroConfig;
dbConfig: DBConfig;
flags: Arguments;
}): Promise<void>;

View File

@@ -0,0 +1,46 @@
import { getRemoteDatabaseInfo, resolveDbAppToken } from "../../../utils.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot
} from "../../migration-queries.js";
async function cmd({
dbConfig,
flags
}) {
const isJson = flags.json;
const dbInfo = getRemoteDatabaseInfo();
const appToken = resolveDbAppToken(flags, dbInfo.token);
const productionSnapshot = await getProductionCurrentSnapshot({ ...dbInfo, token: appToken });
const currentSnapshot = createCurrentSnapshot(dbConfig);
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot: productionSnapshot || createEmptySnapshot(),
newSnapshot: currentSnapshot
});
const result = { exitCode: 0, message: "", code: "", data: void 0 };
if (migrationQueries.length === 0) {
result.code = "MATCH";
result.message = `Database schema is up to date.`;
} else {
result.code = "NO_MATCH";
result.message = `Database schema is out of date.
Run 'astro db push' to push up your latest changes.`;
}
if (confirmations.length > 0) {
result.code = "DATA_LOSS";
result.exitCode = 1;
result.data = confirmations;
result.message = formatDataLossMessage(confirmations, !isJson);
}
if (isJson) {
console.log(JSON.stringify(result));
} else {
console.log(result.message);
}
process.exit(result.exitCode);
}
export {
cmd
};

6
node_modules/@astrojs/db/dist/core/cli/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
export declare function cli({ flags, config: astroConfig, }: {
flags: Arguments;
config: AstroConfig;
}): Promise<void>;

75
node_modules/@astrojs/db/dist/core/cli/index.js generated vendored Normal file
View File

@@ -0,0 +1,75 @@
import { resolveDbConfig } from "../load-file.js";
import { printHelp } from "./print-help.js";
async function cli({
flags,
config: astroConfig
}) {
const args = flags._;
const command = args[2] === "db" ? args[3] : args[2];
validateDbAppTokenFlag(command, flags);
const { dbConfig } = await resolveDbConfig(astroConfig);
switch (command) {
case "shell": {
const { cmd } = await import("./commands/shell/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "gen": {
console.log('"astro db gen" is no longer needed! Visit the docs for more information.');
return;
}
case "sync": {
console.log('"astro db sync" is no longer needed! Visit the docs for more information.');
return;
}
case "push": {
const { cmd } = await import("./commands/push/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "verify": {
const { cmd } = await import("./commands/verify/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
case "execute": {
const { cmd } = await import("./commands/execute/index.js");
return await cmd({ astroConfig, dbConfig, flags });
}
default: {
if (command != null) {
console.error(`Unknown command: ${command}`);
}
printHelp({
commandName: "astro db",
usage: "[command] [...flags]",
headline: " ",
tables: {
Commands: [
["push", "Push table schema updates to libSQL."],
["verify", "Test schema updates with libSQL (good for CI)."],
[
"astro db execute <file-path>",
"Execute a ts/js file using astro:db. Use --remote to connect to libSQL."
],
[
"astro db shell --query <sql-string>",
"Execute a SQL string. Use --remote to connect to libSQL."
]
]
}
});
return;
}
}
}
function validateDbAppTokenFlag(command, flags) {
if (command !== "execute" && command !== "push" && command !== "verify" && command !== "shell")
return;
const dbAppToken = flags.dbAppToken;
if (dbAppToken == null) return;
if (typeof dbAppToken !== "string") {
console.error(`Invalid value for --db-app-token; expected a string.`);
process.exit(1);
}
}
export {
cli
};

View File

@@ -0,0 +1,22 @@
import type { DBConfig, DBSnapshot, ResolvedDBTable } from '../types.js';
import type { RemoteDatabaseInfo } from '../utils.js';
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, reset, }: {
oldSnapshot: DBSnapshot;
newSnapshot: DBSnapshot;
reset?: boolean;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getTableChangeQueries({ tableName, oldTable, newTable, }: {
tableName: string;
oldTable: ResolvedDBTable;
newTable: ResolvedDBTable;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getProductionCurrentSnapshot({ url, token, }: RemoteDatabaseInfo): Promise<DBSnapshot | undefined>;
export declare function createCurrentSnapshot({ tables }: DBConfig): DBSnapshot;
export declare function createEmptySnapshot(): DBSnapshot;
export declare function formatDataLossMessage(confirmations: string[], isColor?: boolean): string;

View File

@@ -0,0 +1,373 @@
import { stripVTControlCharacters } from "node:util";
import diff from "microdiff";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { customAlphabet } from "nanoid";
import color from "piccolore";
import { isSerializedSQL } from "../../runtime/types.js";
import { hasPrimaryKey, isDbError } from "../../runtime/utils.js";
import { MIGRATION_VERSION } from "../consts.js";
import { createClient } from "../db-client/libsql-node.js";
import { RENAME_COLUMN_ERROR, RENAME_TABLE_ERROR } from "../errors.js";
import {
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
} from "../queries.js";
import { columnSchema } from "../schemas.js";
const sqlite = new SQLiteAsyncDialect();
const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);
async function getMigrationQueries({
oldSnapshot,
newSnapshot,
reset = false
}) {
const queries = [];
const confirmations = [];
if (reset) {
const currentSnapshot = oldSnapshot;
oldSnapshot = createEmptySnapshot();
queries.push(...getDropTableQueriesForSnapshot(currentSnapshot));
}
const addedTables = getAddedTables(oldSnapshot, newSnapshot);
const droppedTables = getDroppedTables(oldSnapshot, newSnapshot);
const notDeprecatedDroppedTables = Object.fromEntries(
Object.entries(droppedTables).filter(([, table]) => !table.deprecated)
);
if (!isEmpty(addedTables) && !isEmpty(notDeprecatedDroppedTables)) {
const oldTable = Object.keys(notDeprecatedDroppedTables)[0];
const newTable = Object.keys(addedTables)[0];
throw new Error(RENAME_TABLE_ERROR(oldTable, newTable));
}
for (const [tableName, table] of Object.entries(addedTables)) {
queries.push(getCreateTableQuery(tableName, table));
queries.push(...getCreateIndexQueries(tableName, table));
}
for (const [tableName] of Object.entries(droppedTables)) {
const dropQuery = `DROP TABLE ${sqlite.escapeName(tableName)}`;
queries.push(dropQuery);
}
for (const [tableName, newTable] of Object.entries(newSnapshot.schema)) {
const oldTable = oldSnapshot.schema[tableName];
if (!oldTable) continue;
const addedColumns = getAdded(oldTable.columns, newTable.columns);
const droppedColumns = getDropped(oldTable.columns, newTable.columns);
const notDeprecatedDroppedColumns = Object.fromEntries(
Object.entries(droppedColumns).filter(([, col]) => !col.schema.deprecated)
);
if (!isEmpty(addedColumns) && !isEmpty(notDeprecatedDroppedColumns)) {
throw new Error(
RENAME_COLUMN_ERROR(
`${tableName}.${Object.keys(addedColumns)[0]}`,
`${tableName}.${Object.keys(notDeprecatedDroppedColumns)[0]}`
)
);
}
const result = await getTableChangeQueries({
tableName,
oldTable,
newTable
});
queries.push(...result.queries);
confirmations.push(...result.confirmations);
}
return { queries, confirmations };
}
async function getTableChangeQueries({
tableName,
oldTable,
newTable
}) {
const queries = [];
const confirmations = [];
const updated = getUpdatedColumns(oldTable.columns, newTable.columns);
const added = getAdded(oldTable.columns, newTable.columns);
const dropped = getDropped(oldTable.columns, newTable.columns);
const hasForeignKeyChanges = diff(oldTable.foreignKeys ?? [], newTable.foreignKeys ?? []).length > 0;
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
return {
queries: getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
}),
confirmations
};
}
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
queries.push(
...getAlterTableQueries(tableName, added, dropped),
...getChangeIndexQueries({
tableName,
oldIndexes: oldTable.indexes,
newIndexes: newTable.indexes
})
);
return { queries, confirmations };
}
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
if (dataLossCheck.dataLoss) {
const { reason, columnName } = dataLossCheck;
const reasonMsgs = {
"added-required": `You added new required column '${color.bold(
tableName + "." + columnName
)}' with no default value.
This cannot be executed on an existing table.`,
"updated-type": `Updating existing column ${color.bold(
tableName + "." + columnName
)} to a new type that cannot be handled automatically.`
};
confirmations.push(reasonMsgs[reason]);
}
const primaryKeyExists = Object.entries(newTable.columns).find(
([, column]) => hasPrimaryKey(column)
);
const droppedPrimaryKey = Object.entries(dropped).find(([, column]) => hasPrimaryKey(column));
const recreateTableQueries = getRecreateTableQueries({
tableName,
newTable,
added,
hasDataLoss: dataLossCheck.dataLoss,
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
});
queries.push(...recreateTableQueries, ...getCreateIndexQueries(tableName, newTable));
return { queries, confirmations };
}
function getChangeIndexQueries({
tableName,
oldIndexes = {},
newIndexes = {}
}) {
const added = getAdded(oldIndexes, newIndexes);
const dropped = getDropped(oldIndexes, newIndexes);
const updated = getUpdated(oldIndexes, newIndexes);
Object.assign(dropped, updated);
Object.assign(added, updated);
const queries = [];
for (const indexName of Object.keys(dropped)) {
const dropQuery = `DROP INDEX ${sqlite.escapeName(indexName)}`;
queries.push(dropQuery);
}
queries.push(...getCreateIndexQueries(tableName, { indexes: added }));
return queries;
}
function getAddedTables(oldTables, newTables) {
const added = {};
for (const [key, newTable] of Object.entries(newTables.schema)) {
if (!(key in oldTables.schema)) added[key] = newTable;
}
return added;
}
function getDroppedTables(oldTables, newTables) {
const dropped = {};
for (const [key, oldTable] of Object.entries(oldTables.schema)) {
if (!(key in newTables.schema)) dropped[key] = oldTable;
}
return dropped;
}
function getAlterTableQueries(unescTableName, added, dropped) {
const queries = [];
const tableName = sqlite.escapeName(unescTableName);
for (const [unescColumnName, column] of Object.entries(added)) {
const columnName = sqlite.escapeName(unescColumnName);
const type = schemaTypeToSqlType(column.type);
const q = `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${type}${getModifiers(
columnName,
column
)}`;
queries.push(q);
}
for (const unescColumnName of Object.keys(dropped)) {
const columnName = sqlite.escapeName(unescColumnName);
const q = `ALTER TABLE ${tableName} DROP COLUMN ${columnName}`;
queries.push(q);
}
return queries;
}
function getRecreateTableQueries({
tableName: unescTableName,
newTable,
added,
hasDataLoss,
migrateHiddenPrimaryKey
}) {
const unescTempName = `${unescTableName}_${genTempTableName()}`;
const tempName = sqlite.escapeName(unescTempName);
const tableName = sqlite.escapeName(unescTableName);
if (hasDataLoss) {
return [`DROP TABLE ${tableName}`, getCreateTableQuery(unescTableName, newTable)];
}
const newColumns = [...Object.keys(newTable.columns)];
if (migrateHiddenPrimaryKey) {
newColumns.unshift("_id");
}
const escapedColumns = newColumns.filter((i) => !(i in added)).map((c) => sqlite.escapeName(c)).join(", ");
return [
getCreateTableQuery(unescTempName, newTable),
`INSERT INTO ${tempName} (${escapedColumns}) SELECT ${escapedColumns} FROM ${tableName}`,
`DROP TABLE ${tableName}`,
`ALTER TABLE ${tempName} RENAME TO ${tableName}`
];
}
function isEmpty(obj) {
return Object.keys(obj).length === 0;
}
function canAlterTableAddColumn(column) {
if (column.schema.unique) return false;
if (hasRuntimeDefault(column)) return false;
if (!column.schema.optional && !hasDefault(column)) return false;
if (hasPrimaryKey(column)) return false;
if (getReferencesConfig(column)) return false;
return true;
}
function canAlterTableDropColumn(column) {
if (column.schema.unique) return false;
if (hasPrimaryKey(column)) return false;
return true;
}
function canRecreateTableWithoutDataLoss(added, updated) {
for (const [columnName, a] of Object.entries(added)) {
if (hasPrimaryKey(a) && a.type !== "number" && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
if (!a.schema.optional && !hasDefault(a)) {
return { dataLoss: true, columnName, reason: "added-required" };
}
}
for (const [columnName, u] of Object.entries(updated)) {
if (u.old.type !== u.new.type && !canChangeTypeWithoutQuery(u.old, u.new)) {
return { dataLoss: true, columnName, reason: "updated-type" };
}
}
return { dataLoss: false };
}
function getAdded(oldObj, newObj) {
const added = {};
for (const [key, value] of Object.entries(newObj)) {
if (!(key in oldObj)) added[key] = value;
}
return added;
}
function getDropped(oldObj, newObj) {
const dropped = {};
for (const [key, value] of Object.entries(oldObj)) {
if (!(key in newObj)) dropped[key] = value;
}
return dropped;
}
function getUpdated(oldObj, newObj) {
const updated = {};
for (const [key, value] of Object.entries(newObj)) {
const oldValue = oldObj[key];
if (!oldValue) continue;
if (diff(oldValue, value).length > 0) updated[key] = value;
}
return updated;
}
function getUpdatedColumns(oldColumns, newColumns) {
const updated = {};
for (const [key, newColumn] of Object.entries(newColumns)) {
let oldColumn = oldColumns[key];
if (!oldColumn) continue;
if (oldColumn.type !== newColumn.type && canChangeTypeWithoutQuery(oldColumn, newColumn)) {
const asNewColumn = columnSchema.safeParse({
type: newColumn.type,
schema: oldColumn.schema
});
if (asNewColumn.success) {
oldColumn = asNewColumn.data;
}
}
const diffResult = diff(oldColumn, newColumn);
if (diffResult.length > 0) {
updated[key] = { old: oldColumn, new: newColumn };
}
}
return updated;
}
const typeChangesWithoutQuery = [
{ from: "boolean", to: "number" },
{ from: "date", to: "text" },
{ from: "json", to: "text" }
];
function canChangeTypeWithoutQuery(oldColumn, newColumn) {
return typeChangesWithoutQuery.some(
({ from, to }) => oldColumn.type === from && newColumn.type === to
);
}
function hasRuntimeDefault(column) {
return !!(column.schema.default && isSerializedSQL(column.schema.default));
}
function getProductionCurrentSnapshot({
url,
token
}) {
return getDbCurrentSnapshot(token, url);
}
async function getDbCurrentSnapshot(appToken, remoteUrl) {
const client = createClient({
token: appToken,
url: remoteUrl
});
try {
const res = await client.get(
// Latest snapshot
sql`select snapshot from _astro_db_snapshot order by id desc limit 1;`
);
return JSON.parse(res.snapshot);
} catch (error) {
if (isDbError(error) && // If the schema was never pushed to the database yet the table won't exist.
// Treat a missing snapshot table as an empty table.
// When connecting to a remote database in that condition
// the query will fail with the following error code and message.
(error.code === "SQLITE_UNKNOWN" && error.message === "SQLITE_UNKNOWN: SQLite error: no such table: _astro_db_snapshot" || // When connecting to a local or in-memory database that does not have a snapshot table yet
// the query will fail with the following error code and message.
error.code === "SQLITE_ERROR" && error.message === "SQLITE_ERROR: no such table: _astro_db_snapshot")) {
return;
}
throw error;
}
}
function getDropTableQueriesForSnapshot(snapshot) {
const queries = [];
for (const tableName of Object.keys(snapshot.schema)) {
const dropQuery = getDropTableIfExistsQuery(tableName);
queries.unshift(dropQuery);
}
return queries;
}
function createCurrentSnapshot({ tables = {} }) {
const schema = JSON.parse(JSON.stringify(tables));
return { version: MIGRATION_VERSION, schema };
}
function createEmptySnapshot() {
return { version: MIGRATION_VERSION, schema: {} };
}
function formatDataLossMessage(confirmations, isColor = true) {
const messages = [];
messages.push(color.red("\u2716 We found some schema changes that cannot be handled automatically:"));
messages.push(``);
messages.push(...confirmations.map((m, i) => color.red(` (${i + 1}) `) + m));
messages.push(``);
messages.push(`To resolve, revert these changes or update your schema, and re-run the command.`);
messages.push(
`You may also run 'astro db push --force-reset' to ignore all warnings and force-push your local database schema to production instead. All data will be lost and the database will be reset.`
);
let finalMessage = messages.join("\n");
if (!isColor) {
finalMessage = stripVTControlCharacters(finalMessage);
}
return finalMessage;
}
export {
createCurrentSnapshot,
createEmptySnapshot,
formatDataLossMessage,
getMigrationQueries,
getProductionCurrentSnapshot,
getTableChangeQueries
};

11
node_modules/@astrojs/db/dist/core/cli/print-help.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
/**
* Uses implementation from Astro core
* @see https://github.com/withastro/astro/blob/main/packages/astro/src/core/messages.ts#L303
*/
export declare function printHelp({ commandName, headline, usage, tables, description, }: {
commandName: string;
headline?: string;
usage?: string;
tables?: Record<string, [command: string, help: string][]>;
description?: string;
}): void;

55
node_modules/@astrojs/db/dist/core/cli/print-help.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
import colors from "piccolore";
function printHelp({
commandName,
headline,
usage,
tables,
description
}) {
const linebreak = () => "";
const title = (label) => ` ${colors.bgWhite(colors.black(` ${label} `))}`;
const table = (rows, { padding }) => {
const split = process.stdout.columns < 60;
let raw = "";
for (const row of rows) {
if (split) {
raw += ` ${row[0]}
`;
} else {
raw += `${`${row[0]}`.padStart(padding)}`;
}
raw += " " + colors.dim(row[1]) + "\n";
}
return raw.slice(0, -1);
};
let message = [];
if (headline) {
message.push(
linebreak(),
` ${colors.bgGreen(colors.black(` ${commandName} `))} ${colors.green(
`v${"0.20.1"}`
)} ${headline}`
);
}
if (usage) {
message.push(linebreak(), ` ${colors.green(commandName)} ${colors.bold(usage)}`);
}
if (tables) {
let calculateTablePadding2 = function(rows) {
return rows.reduce((val, [first]) => Math.max(val, first.length), 0) + 2;
};
var calculateTablePadding = calculateTablePadding2;
const tableEntries = Object.entries(tables);
const padding = Math.max(...tableEntries.map(([, rows]) => calculateTablePadding2(rows)));
for (const [tableTitle, tableRows] of tableEntries) {
message.push(linebreak(), title(tableTitle), table(tableRows, { padding }));
}
}
if (description) {
message.push(linebreak(), `${description}`);
}
console.log(message.join("\n") + "\n");
}
export {
printHelp
};

12
node_modules/@astrojs/db/dist/core/consts.d.ts generated vendored Normal file
View File

@@ -0,0 +1,12 @@
export declare const RUNTIME_IMPORT: string;
export declare const RUNTIME_VIRTUAL_IMPORT: string;
export declare const VIRTUAL_MODULE_ID = "astro:db";
export declare const DB_PATH = ".astro/content.db";
export declare const CONFIG_FILE_NAMES: string[];
export declare const MIGRATION_VERSION = "2024-03-12";
export declare const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
export declare const DB_CLIENTS: {
node: string;
web: string;
local: string;
};

26
node_modules/@astrojs/db/dist/core/consts.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
import { readFileSync } from "node:fs";
const PACKAGE_NAME = JSON.parse(
readFileSync(new URL("../../package.json", import.meta.url), "utf8")
).name;
const RUNTIME_IMPORT = JSON.stringify(`${PACKAGE_NAME}/runtime`);
const RUNTIME_VIRTUAL_IMPORT = JSON.stringify(`${PACKAGE_NAME}/dist/runtime/virtual.js`);
const VIRTUAL_MODULE_ID = "astro:db";
const DB_PATH = ".astro/content.db";
const CONFIG_FILE_NAMES = ["config.ts", "config.js", "config.mts", "config.mjs"];
const MIGRATION_VERSION = "2024-03-12";
const VIRTUAL_CLIENT_MODULE_ID = "virtual:astro:db-client";
const DB_CLIENTS = {
node: `${PACKAGE_NAME}/db-client/libsql-node.js`,
web: `${PACKAGE_NAME}/db-client/libsql-web.js`,
local: `${PACKAGE_NAME}/db-client/libsql-local.js`
};
export {
CONFIG_FILE_NAMES,
DB_CLIENTS,
DB_PATH,
MIGRATION_VERSION,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
};

View File

@@ -0,0 +1,6 @@
import { type LibSQLDatabase } from 'drizzle-orm/libsql';
type LocalDbClientOptions = {
url: string;
};
export declare function createClient(options: LocalDbClientOptions): LibSQLDatabase;
export {};

View File

@@ -0,0 +1,12 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
const isWebContainer = !!process.versions?.webcontainer;
function createClient(options) {
const url = isWebContainer ? "file:content.db" : options.url;
const client = createLibsqlClient({ url });
const db = drizzleLibsql(client);
return db;
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client").Client;
};
export {};

View File

@@ -0,0 +1,21 @@
import { createClient as createLibsqlClient } from "@libsql/client";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
if (parsedUrl.protocol === "memory:") {
url = ":memory:";
} else if (parsedUrl.protocol === "file:" && parsedUrl.pathname.startsWith("/") && !rawUrl.startsWith("file:/")) {
url = "file:" + parsedUrl.pathname.substring(1);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,8 @@
type RemoteDbClientOptions = {
token: string;
url: string;
};
export declare function createClient(opts: RemoteDbClientOptions): import("drizzle-orm/libsql").LibSQLDatabase<Record<string, never>> & {
$client: import("@libsql/client/web").Client;
};
export {};

View File

@@ -0,0 +1,22 @@
import { createClient as createLibsqlClient } from "@libsql/client/web";
import { drizzle as drizzleLibsql } from "drizzle-orm/libsql/web";
import { parseLibSQLConfig } from "./utils.js";
function createClient(opts) {
const { token, url: rawUrl } = opts;
let parsedUrl = new URL(rawUrl);
const options = Object.fromEntries(parsedUrl.searchParams.entries());
parsedUrl.search = "";
let url = parsedUrl.toString();
const supportedProtocols = ["http:", "https:", "libsql:"];
if (!supportedProtocols.includes(parsedUrl.protocol)) {
throw new Error(
`Unsupported protocol "${parsedUrl.protocol}" for libSQL web client. Supported protocols are: ${supportedProtocols.join(", ")}.`
);
}
const libSQLOptions = parseLibSQLConfig(options);
const client = createLibsqlClient({ ...libSQLOptions, url, authToken: token });
return drizzleLibsql(client);
}
export {
createClient
};

View File

@@ -0,0 +1,2 @@
import type { Config as LibSQLConfig } from '@libsql/client';
export declare const parseLibSQLConfig: (config: Record<string, string>) => Partial<LibSQLConfig>;

46
node_modules/@astrojs/db/dist/core/db-client/utils.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
import * as z from "zod/v4";
const rawLibSQLOptions = z.record(z.string(), z.string());
const parseNumber = (value) => z.coerce.number().parse(value);
const parseBoolean = (value) => z.coerce.boolean().parse(value);
const booleanValues = ["true", "false"];
const parseOptionalBoolean = (value) => {
if (booleanValues.includes(value)) {
return parseBoolean(value);
}
return true;
};
const libSQLConfigTransformed = rawLibSQLOptions.transform((raw) => {
const parsed = {};
for (const [key, value] of Object.entries(raw)) {
switch (key) {
case "syncInterval":
case "concurrency":
parsed[key] = parseNumber(value);
break;
case "readYourWrites":
case "offline":
case "tls":
parsed[key] = parseOptionalBoolean(value);
break;
case "authToken":
case "encryptionKey":
case "syncUrl":
parsed[key] = value;
break;
}
}
return parsed;
});
const parseLibSQLConfig = (config) => {
try {
return libSQLConfigTransformed.parse(config);
} catch (error) {
if (error instanceof z.ZodError) {
throw new Error(`Invalid LibSQL config: ${error.issues.map((e) => e.message).join(", ")}`);
}
throw error;
}
};
export {
parseLibSQLConfig
};

8
node_modules/@astrojs/db/dist/core/errors.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
export declare const MISSING_EXECUTE_PATH_ERROR: string;
export declare const RENAME_TABLE_ERROR: (oldTable: string, newTable: string) => string;
export declare const RENAME_COLUMN_ERROR: (oldSelector: string, newSelector: string) => string;
export declare const FILE_NOT_FOUND_ERROR: (path: string) => string;
export declare const SHELL_QUERY_MISSING_ERROR: string;
export declare const EXEC_ERROR: (error: string) => string;
export declare const EXEC_DEFAULT_EXPORT_ERROR: (fileName: string) => string;
export declare const INTEGRATION_TABLE_CONFLICT_ERROR: (integrationName: string, tableName: string, isUserConflict: boolean) => string;

48
node_modules/@astrojs/db/dist/core/errors.js generated vendored Normal file
View File

@@ -0,0 +1,48 @@
import colors from "piccolore";
const MISSING_EXECUTE_PATH_ERROR = `${colors.red(
"\u25B6 No file path provided."
)} Provide a path by running ${colors.cyan("astro db execute <path>")}
`;
const RENAME_TABLE_ERROR = (oldTable, newTable) => {
return colors.red("\u25B6 Potential table rename detected: " + oldTable + " -> " + newTable) + `
You cannot add and remove tables in the same schema update batch.
1. Use "deprecated: true" to deprecate a table before renaming.
2. Use "--force-reset" to ignore this warning and reset the database (deleting all of your data).
Visit https://docs.astro.build/en/guides/astro-db/#renaming-tables to learn more.`;
};
const RENAME_COLUMN_ERROR = (oldSelector, newSelector) => {
return colors.red("\u25B6 Potential column rename detected: " + oldSelector + ", " + newSelector) + `
You cannot add and remove columns in the same table.
To resolve, add a 'deprecated: true' flag to '${oldSelector}' instead.`;
};
const FILE_NOT_FOUND_ERROR = (path) => `${colors.red("\u25B6 File not found:")} ${colors.bold(path)}
`;
const SHELL_QUERY_MISSING_ERROR = `${colors.red(
"\u25B6 Please provide a query to execute using the --query flag."
)}
`;
const EXEC_ERROR = (error) => {
return `${colors.red(`Error while executing file:`)}
${error}`;
};
const EXEC_DEFAULT_EXPORT_ERROR = (fileName) => {
return EXEC_ERROR(`Missing default function export in ${colors.bold(fileName)}`);
};
const INTEGRATION_TABLE_CONFLICT_ERROR = (integrationName, tableName, isUserConflict) => {
return colors.red("\u25B6 Conflicting table name in integration " + colors.bold(integrationName)) + isUserConflict ? `
A user-defined table named ${colors.bold(tableName)} already exists` : `
Another integration already added a table named ${colors.bold(tableName)}`;
};
export {
EXEC_DEFAULT_EXPORT_ERROR,
EXEC_ERROR,
FILE_NOT_FOUND_ERROR,
INTEGRATION_TABLE_CONFLICT_ERROR,
MISSING_EXECUTE_PATH_ERROR,
RENAME_COLUMN_ERROR,
RENAME_TABLE_ERROR,
SHELL_QUERY_MISSING_ERROR
};

View File

@@ -0,0 +1,2 @@
import type { $ZodErrorMap } from 'zod/v4/core';
export declare const errorMap: $ZodErrorMap;

View File

@@ -0,0 +1,101 @@
const errorMap = (issue) => {
const baseErrorPath = flattenErrorPath(issue.path ?? []);
if (issue.code === "invalid_union") {
let typeOrLiteralErrByPath = /* @__PURE__ */ new Map();
for (const unionError of issue.errors.flat()) {
if (unionError.code === "invalid_type") {
const flattenedErrorPath = flattenErrorPath(unionError.path);
if (typeOrLiteralErrByPath.has(flattenedErrorPath)) {
typeOrLiteralErrByPath.get(flattenedErrorPath).expected.push(unionError.expected);
} else {
typeOrLiteralErrByPath.set(flattenedErrorPath, {
code: unionError.code,
received: unionError.received,
expected: [unionError.expected],
message: unionError.message
});
}
}
}
const messages = [prefix(baseErrorPath, "Did not match union.")];
const details = [...typeOrLiteralErrByPath.entries()].filter(([, error]) => error.expected.length === issue.errors.flat().length).map(
([key, error]) => key === baseErrorPath ? (
// Avoid printing the key again if it's a base error
`> ${getTypeOrLiteralMsg(error)}`
) : `> ${prefix(key, getTypeOrLiteralMsg(error))}`
);
if (details.length === 0) {
const expectedShapes = [];
for (const unionErrors of issue.errors) {
const expectedShape = [];
for (const _issue of unionErrors) {
if (_issue.code === "invalid_union") {
return errorMap(_issue);
}
const relativePath = flattenErrorPath(_issue.path).replace(baseErrorPath, "").replace(leadingPeriod, "");
if ("expected" in _issue && typeof _issue.expected === "string") {
expectedShape.push(
relativePath ? `${relativePath}: ${_issue.expected}` : _issue.expected
);
} else if ("values" in _issue) {
expectedShape.push(
..._issue.values.filter((v) => typeof v === "string").map((v) => `"${v}"`)
);
} else if (relativePath) {
expectedShape.push(relativePath);
}
}
if (expectedShape.length === 1 && !expectedShape[0]?.includes(":")) {
expectedShapes.push(expectedShape.join(""));
} else if (expectedShape.length > 0) {
expectedShapes.push(`{ ${expectedShape.join("; ")} }`);
}
}
if (expectedShapes.length) {
details.push("> Expected type `" + expectedShapes.join(" | ") + "`");
details.push("> Received `" + stringify(issue.input) + "`");
}
}
return {
message: messages.concat(details).join("\n")
};
} else if (issue.code === "invalid_type") {
return {
message: prefix(
baseErrorPath,
getTypeOrLiteralMsg({
code: issue.code,
received: typeof issue.input,
expected: [issue.expected],
message: issue.message
})
)
};
} else if (issue.message) {
return { message: prefix(baseErrorPath, issue.message) };
}
};
const getTypeOrLiteralMsg = (error) => {
if (typeof error.received === "undefined" || error.received === "undefined")
return error.message ?? "Required";
const expectedDeduped = new Set(error.expected);
switch (error.code) {
case "invalid_type":
return `Expected type \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
error.received
)}\``;
case "invalid_literal":
return `Expected \`${unionExpectedVals(expectedDeduped)}\`, received \`${stringify(
error.received
)}\``;
}
};
const prefix = (key, msg) => key.length ? `**${key}**: ${msg}` : msg;
const unionExpectedVals = (expectedVals) => [...expectedVals].map((expectedVal) => stringify(expectedVal)).join(" | ");
const flattenErrorPath = (errorPath) => errorPath.join(".");
const stringify = (val) => JSON.stringify(val, null, 1).split(newlinePlusWhitespace).join(" ");
const newlinePlusWhitespace = /\n\s*/;
const leadingPeriod = /^\./;
export {
errorMap
};

View File

@@ -0,0 +1,2 @@
import type { AstroIntegration } from 'astro';
export declare function fileURLIntegration(): AstroIntegration;

View File

@@ -0,0 +1,84 @@
import fs from "node:fs";
import path from "node:path";
import { pathToFileURL } from "node:url";
async function copyFile(toDir, fromUrl, toUrl) {
await fs.promises.mkdir(toDir, { recursive: true });
await fs.promises.rename(fromUrl, toUrl);
}
function fileURLIntegration() {
const fileNames = [];
function createVitePlugin(command) {
let referenceIds = [];
return {
name: "@astrojs/db/file-url",
enforce: "pre",
load: {
filter: {
id: /\?fileurl$/
},
async handler(id) {
const filePath = id.slice(0, id.indexOf("?"));
if (command === "build") {
const data = await fs.promises.readFile(filePath);
const name = path.basename(filePath);
const referenceId = this.emitFile({
name,
source: data,
type: "asset"
});
referenceIds.push(referenceId);
return `export default import.meta.ROLLUP_FILE_URL_${referenceId};`;
} else {
return `export default new URL(${JSON.stringify(pathToFileURL(filePath).toString())})`;
}
}
},
generateBundle() {
for (const referenceId of referenceIds) {
fileNames.push(this.getFileName(referenceId));
}
referenceIds = [];
}
};
}
let config;
return {
name: "@astrojs/db/file-url",
hooks: {
"astro:config:setup"({ updateConfig, command }) {
updateConfig({
vite: {
plugins: [createVitePlugin(command)]
}
});
},
"astro:config:done": ({ config: _config }) => {
config = _config;
},
async "astro:build:done"() {
if (config.output === "static") {
const unlinks = [];
for (const fileName of fileNames) {
const url = new URL(fileName, config.outDir);
unlinks.push(fs.promises.unlink(url));
}
await Promise.all(unlinks);
const assetDir = new URL(config.build.assets, config.outDir);
await fs.promises.rmdir(assetDir).catch(() => []);
} else {
const moves = [];
for (const fileName of fileNames) {
const fromUrl = new URL(fileName, config.build.client);
const toUrl = new URL(fileName, config.build.server);
const toDir = new URL("./", toUrl);
moves.push(copyFile(toDir, fromUrl, toUrl));
}
await Promise.all(moves);
}
}
}
};
}
export {
fileURLIntegration
};

View File

@@ -0,0 +1,8 @@
import type { AstroIntegration } from 'astro';
import * as z from 'zod/v4';
declare const astroDBConfigSchema: z.ZodPrefault<z.ZodOptional<z.ZodObject<{
mode: z.ZodDefault<z.ZodOptional<z.ZodUnion<readonly [z.ZodLiteral<"node">, z.ZodLiteral<"web">]>>>;
}, z.core.$strip>>>;
export type AstroDBConfig = z.infer<typeof astroDBConfigSchema>;
export declare function integration(options?: AstroDBConfig): AstroIntegration[];
export {};

215
node_modules/@astrojs/db/dist/core/integration/index.js generated vendored Normal file
View File

@@ -0,0 +1,215 @@
import { existsSync } from "node:fs";
import { mkdir, writeFile } from "node:fs/promises";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import colors from "piccolore";
import {
createServer,
loadEnv,
mergeConfig
} from "vite";
import parseArgs from "yargs-parser";
import * as z from "zod/v4";
import { AstroDbError, isDbError } from "../../runtime/utils.js";
import { CONFIG_FILE_NAMES, DB_PATH, VIRTUAL_MODULE_ID } from "../consts.js";
import { EXEC_DEFAULT_EXPORT_ERROR, EXEC_ERROR } from "../errors.js";
import { resolveDbConfig } from "../load-file.js";
import { SEED_DEV_FILE_NAME } from "../queries.js";
import { getDbDirectoryUrl, getRemoteDatabaseInfo } from "../utils.js";
import { fileURLIntegration } from "./file-url.js";
import { getDtsContent } from "./typegen.js";
import {
vitePluginDb
} from "./vite-plugin-db.js";
import { vitePluginDbClient } from "./vite-plugin-db-client.js";
const astroDBConfigSchema = z.object({
/**
* Sets the mode of the underlying `@libsql/client` connection.
*
* In most cases, the default 'node' mode is sufficient. On platforms like Cloudflare, or Deno, you may need to set this to 'web'.
*
* @default 'node'
*/
mode: z.union([z.literal("node"), z.literal("web")]).optional().default("node")
}).optional().prefault({});
function astroDBIntegration(options) {
const resolvedConfig = astroDBConfigSchema.parse(options);
let connectToRemote = false;
let configFileDependencies = [];
let root;
let tempViteServer;
let tables = {
get() {
throw new Error("[astro:db] INTERNAL Tables not loaded yet");
}
};
let seedFiles = {
get() {
throw new Error("[astro:db] INTERNAL Seed files not loaded yet");
}
};
let seedHandler = {
execute: () => {
throw new Error("[astro:db] INTERNAL Seed handler not loaded yet");
},
inProgress: false
};
let command;
let finalBuildOutput;
return {
name: "astro:db",
hooks: {
"astro:config:setup": async ({ updateConfig, config, command: _command, logger }) => {
command = _command;
root = config.root;
if (command === "preview") return;
let dbPlugin = void 0;
const args = parseArgs(process.argv.slice(3));
connectToRemote = process.env.ASTRO_INTERNAL_TEST_REMOTE || args["remote"];
const dbClientPlugin = vitePluginDbClient({
connectToRemote,
mode: resolvedConfig.mode
});
if (connectToRemote) {
dbPlugin = vitePluginDb({
connectToRemote,
appToken: getRemoteDatabaseInfo().token,
tables,
root: config.root,
srcDir: config.srcDir,
output: config.output,
seedHandler
});
} else {
dbPlugin = vitePluginDb({
connectToRemote,
tables,
seedFiles,
root: config.root,
srcDir: config.srcDir,
output: config.output,
logger,
seedHandler
});
}
updateConfig({
vite: {
assetsInclude: [DB_PATH],
plugins: [dbClientPlugin, dbPlugin]
}
});
},
"astro:config:done": async ({ config, injectTypes, buildOutput }) => {
if (command === "preview") return;
finalBuildOutput = buildOutput;
const { dbConfig, dependencies, integrationSeedPaths } = await resolveDbConfig(config);
tables.get = () => dbConfig.tables;
seedFiles.get = () => integrationSeedPaths;
configFileDependencies = dependencies;
const localDbUrl = new URL(DB_PATH, config.root);
if (!connectToRemote && !existsSync(localDbUrl)) {
await mkdir(dirname(fileURLToPath(localDbUrl)), { recursive: true });
await writeFile(localDbUrl, "");
}
injectTypes({
filename: "db.d.ts",
content: getDtsContent(tables.get() ?? {})
});
},
"astro:server:setup": async ({ server, logger }) => {
const environment = server.environments.ssr;
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, environment });
};
const filesToWatch = [
...CONFIG_FILE_NAMES.map((c) => new URL(c, getDbDirectoryUrl(root))),
...configFileDependencies.map((c) => new URL(c, root))
];
server.watcher.on("all", (_event, relativeEntry) => {
const entry = new URL(relativeEntry, root);
if (filesToWatch.some((f) => entry.href === f.href)) {
server.restart();
}
});
logger.info(
connectToRemote ? "Connected to remote database." : "New local database created."
);
if (connectToRemote) return;
const localSeedPaths = SEED_DEV_FILE_NAME.map(
(name) => new URL(name, getDbDirectoryUrl(root))
);
if (seedFiles.get().length || localSeedPaths.find((path) => existsSync(path))) {
await environment.runner.import(VIRTUAL_MODULE_ID).catch((e) => {
logger.error(e instanceof Error ? e.message : String(e));
});
}
},
"astro:build:start": async ({ logger }) => {
if (!connectToRemote && !databaseFileEnvDefined() && finalBuildOutput === "server") {
const message = `Attempting to build without the --remote flag or the ASTRO_DATABASE_FILE environment variable defined. You probably want to pass --remote to astro build.`;
const hint = "Learn more connecting to libSQL: https://docs.astro.build/en/guides/astro-db/#connect-a-libsql-database-for-production";
throw new AstroDbError(message, hint);
}
logger.info(
"database: " + (connectToRemote ? colors.yellow("remote") : colors.blue("local database."))
);
},
"astro:build:setup": async ({ vite }) => {
tempViteServer = await getTempViteServer({ viteConfig: vite });
const environment = tempViteServer.environments.ssr;
seedHandler.execute = async (fileUrl) => {
await executeSeedFile({ fileUrl, environment });
};
},
"astro:build:done": async () => {
await tempViteServer?.close();
}
}
};
}
function databaseFileEnvDefined() {
const env = loadEnv("", process.cwd());
return env.ASTRO_DATABASE_FILE != null || process.env.ASTRO_DATABASE_FILE != null;
}
function integration(options) {
return [astroDBIntegration(options), fileURLIntegration()];
}
async function executeSeedFile({
fileUrl,
environment
}) {
const pathname = decodeURIComponent(fileUrl.pathname);
const mod = await environment.runner.import(pathname);
if (typeof mod.default !== "function") {
throw new AstroDbError(EXEC_DEFAULT_EXPORT_ERROR(fileURLToPath(fileUrl)));
}
try {
await mod.default();
} catch (e) {
if (isDbError(e)) {
throw new AstroDbError(EXEC_ERROR(e.message));
}
throw e;
}
}
async function getTempViteServer({ viteConfig }) {
const tempViteServer = await createServer(
mergeConfig(viteConfig, {
server: { middlewareMode: true, hmr: false, watch: null, ws: false },
optimizeDeps: { noDiscovery: true },
ssr: { external: [] },
logLevel: "silent"
})
);
const hotSend = tempViteServer.environments.client.hot.send;
tempViteServer.environments.client.hot.send = (payload) => {
if (payload.type === "error") {
throw payload.err;
}
return hotSend(payload);
};
return tempViteServer;
}
export {
integration
};

View File

@@ -0,0 +1,2 @@
import type { DBTables } from '../types.js';
export declare function getDtsContent(tables: DBTables): string;

View File

@@ -0,0 +1,21 @@
import { RUNTIME_IMPORT } from "../consts.js";
function getDtsContent(tables) {
const content = `// This file is generated by Astro DB
declare module 'astro:db' {
${Object.entries(tables).map(([name, table]) => generateTableType(name, table)).join("\n")}
}
`;
return content;
}
function generateTableType(name, table) {
const sanitizedColumnsList = Object.entries(table.columns).filter(([, val]) => !val.schema.deprecated);
const sanitizedColumns = Object.fromEntries(sanitizedColumnsList);
let tableType = ` export const ${name}: import(${RUNTIME_IMPORT}).Table<
${JSON.stringify(name)},
${JSON.stringify(sanitizedColumns)}
>;`;
return tableType;
}
export {
getDtsContent
};

View File

@@ -0,0 +1,7 @@
import type { VitePlugin } from '../utils.js';
type VitePluginDBClientParams = {
connectToRemote: boolean;
mode: 'node' | 'web';
};
export declare function vitePluginDbClient(params: VitePluginDBClientParams): VitePlugin;
export {};

View File

@@ -0,0 +1,50 @@
import { DB_CLIENTS, VIRTUAL_CLIENT_MODULE_ID } from "../consts.js";
function getRemoteClientModule(mode) {
switch (mode) {
case "web":
return `export { createClient } from '${DB_CLIENTS.web}';`;
case "node":
default:
return `export { createClient } from '${DB_CLIENTS.node}';`;
}
}
function getLocalClientModule(mode) {
switch (mode) {
case "node":
case "web":
default:
return `export { createClient } from '${DB_CLIENTS.local}';`;
}
}
const resolved = "\0" + VIRTUAL_CLIENT_MODULE_ID;
function vitePluginDbClient(params) {
return {
name: "virtual:astro:db-client",
enforce: "pre",
resolveId: {
filter: {
id: new RegExp(`^${VIRTUAL_CLIENT_MODULE_ID}$`)
},
handler() {
return resolved;
}
},
load: {
filter: {
id: new RegExp(`^${resolved}$`)
},
handler() {
switch (params.connectToRemote) {
case true:
return getRemoteClientModule(params.mode);
case false:
default:
return getLocalClientModule(params.mode);
}
}
}
};
}
export {
vitePluginDbClient
};

View File

@@ -0,0 +1,60 @@
import type { AstroConfig, AstroIntegrationLogger } from 'astro';
import type { DBTables } from '../types.js';
import { type VitePlugin } from '../utils.js';
export type LateTables = {
get: () => DBTables;
};
export type LateSeedFiles = {
get: () => Array<string | URL>;
};
export type SeedHandler = {
inProgress: boolean;
execute: (fileUrl: URL) => Promise<void>;
};
type VitePluginDBParams = {
connectToRemote: false;
tables: LateTables;
seedFiles: LateSeedFiles;
srcDir: URL;
root: URL;
logger?: AstroIntegrationLogger;
output: AstroConfig['output'];
seedHandler: SeedHandler;
} | {
connectToRemote: true;
tables: LateTables;
appToken: string;
srcDir: URL;
root: URL;
output: AstroConfig['output'];
seedHandler: SeedHandler;
};
export declare function vitePluginDb(params: VitePluginDBParams): VitePlugin;
export declare function getConfigVirtualModContents(): string;
export declare function getLocalVirtualModContents({ tables, root, localExecution, }: {
tables: DBTables;
root: URL;
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export declare function getRemoteVirtualModContents({ tables, appToken, isBuild, output, localExecution, }: {
tables: DBTables;
appToken: string;
isBuild: boolean;
output: AstroConfig['output'];
/**
* Used for the execute command to import the client directly.
* In other cases, we use the runtime only vite virtual module.
*
* This is used to ensure that the client is imported correctly
* when executing commands like `astro db execute`.
*/
localExecution: boolean;
}): string;
export {};

View File

@@ -0,0 +1,191 @@
import { existsSync } from "node:fs";
import { fileURLToPath } from "node:url";
import { sql } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { normalizeDatabaseUrl } from "../../runtime/index.js";
import {
DB_CLIENTS,
DB_PATH,
RUNTIME_IMPORT,
RUNTIME_VIRTUAL_IMPORT,
VIRTUAL_CLIENT_MODULE_ID,
VIRTUAL_MODULE_ID
} from "../consts.js";
import { createClient } from "../db-client/libsql-local.js";
import { getResolvedFileUrl } from "../load-file.js";
import { getCreateIndexQueries, getCreateTableQuery, SEED_DEV_FILE_NAME } from "../queries.js";
import {
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo
} from "../utils.js";
const resolved = {
module: "\0" + VIRTUAL_MODULE_ID,
importedFromSeedFile: "\0" + VIRTUAL_MODULE_ID + ":seed"
};
function vitePluginDb(params) {
let command = "build";
return {
name: "astro:db",
enforce: "pre",
configResolved(resolvedConfig) {
command = resolvedConfig.command;
},
resolveId: {
filter: {
id: new RegExp(`^${VIRTUAL_MODULE_ID}$`)
},
handler() {
if (params.seedHandler.inProgress) {
return resolved.importedFromSeedFile;
}
return resolved.module;
}
},
load: {
filter: {
id: new RegExp(`^(${resolved.module}|${resolved.importedFromSeedFile})$`)
},
async handler(id) {
if (params.connectToRemote) {
return getRemoteVirtualModContents({
appToken: params.appToken,
tables: params.tables.get(),
isBuild: command === "build",
output: params.output,
localExecution: false
});
}
if (id === resolved.importedFromSeedFile) {
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
await recreateTables(params);
const seedFiles = getResolvedSeedFiles(params);
for await (const seedFile of seedFiles) {
this.addWatchFile(fileURLToPath(seedFile));
if (existsSync(seedFile)) {
params.seedHandler.inProgress = true;
await params.seedHandler.execute(seedFile);
}
}
if (params.seedHandler.inProgress) {
(params.logger ?? console).info("Seeded database.");
params.seedHandler.inProgress = false;
}
return getLocalVirtualModContents({
root: params.root,
tables: params.tables.get(),
localExecution: false
});
}
}
};
}
function getConfigVirtualModContents() {
return `export * from ${RUNTIME_VIRTUAL_IMPORT}`;
}
function getDBModule(localExecution) {
return localExecution ? `import { createClient } from '${DB_CLIENTS.node}';` : `import { createClient } from '${VIRTUAL_CLIENT_MODULE_ID}';`;
}
function getLocalVirtualModContents({
tables,
root,
localExecution
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = new URL(DB_PATH, root);
const clientImport = getDBModule(localExecution);
return `
import { asDrizzleTable, normalizeDatabaseUrl } from ${RUNTIME_IMPORT};
${clientImport}
const dbUrl = normalizeDatabaseUrl(${JSON.stringify(ASTRO_DATABASE_FILE)}, ${JSON.stringify(dbUrl)});
export const db = createClient({ url: dbUrl });
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}`;
}
function getRemoteVirtualModContents({
tables,
appToken,
isBuild,
output,
localExecution
}) {
const dbInfo = getRemoteDatabaseInfo();
function appTokenArg() {
if (isBuild) {
if (output === "server") {
return `process.env.ASTRO_DB_APP_TOKEN`;
} else {
return `process.env.ASTRO_DB_APP_TOKEN ?? ${JSON.stringify(appToken)}`;
}
} else {
return JSON.stringify(appToken);
}
}
function dbUrlArg() {
const dbStr = JSON.stringify(dbInfo.url);
if (isBuild) {
return `import.meta.env.ASTRO_DB_REMOTE_URL ?? ${dbStr}`;
} else {
return dbStr;
}
}
const clientImport = getDBModule(localExecution);
return `
import {asDrizzleTable} from ${RUNTIME_IMPORT};
${clientImport}
export const db = await createClient({
url: ${dbUrlArg()},
token: ${appTokenArg()},
});
export * from ${RUNTIME_VIRTUAL_IMPORT};
${getStringifiedTableExports(tables)}
`;
}
function getStringifiedTableExports(tables) {
return Object.entries(tables).map(
([name, table]) => `export const ${name} = asDrizzleTable(${JSON.stringify(name)}, ${JSON.stringify(
table
)}, false)`
).join("\n");
}
const sqlite = new SQLiteAsyncDialect();
async function recreateTables({ tables, root }) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const dbUrl = normalizeDatabaseUrl(ASTRO_DATABASE_FILE, new URL(DB_PATH, root).href);
const db = createClient({ url: dbUrl });
const setupQueries = [];
for (const [name, table] of Object.entries(tables.get() ?? {})) {
const dropQuery = sql.raw(`DROP TABLE IF EXISTS ${sqlite.escapeName(name)}`);
const createQuery = sql.raw(getCreateTableQuery(name, table));
const indexQueries = getCreateIndexQueries(name, table);
setupQueries.push(dropQuery, createQuery, ...indexQueries.map((s) => sql.raw(s)));
}
await db.batch([
db.run(sql`pragma defer_foreign_keys=true;`),
...setupQueries.map((q) => db.run(q))
]);
}
function getResolvedSeedFiles({ root, seedFiles }) {
const localSeedFiles = SEED_DEV_FILE_NAME.map((name) => new URL(name, getDbDirectoryUrl(root)));
const integrationSeedFiles = seedFiles.get().map((s) => getResolvedFileUrl(root, s));
return [...integrationSeedFiles, ...localSeedFiles];
}
export {
getConfigVirtualModContents,
getLocalVirtualModContents,
getRemoteVirtualModContents,
vitePluginDb
};

126
node_modules/@astrojs/db/dist/core/load-file.d.ts generated vendored Normal file
View File

@@ -0,0 +1,126 @@
import type { AstroConfig } from 'astro';
import './types.js';
/**
* Load a users `astro:db` configuration file and additional configuration files provided by integrations.
*/
export declare function resolveDbConfig({ root, integrations, }: Pick<AstroConfig, 'root' | 'integrations'>): Promise<{
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig: {
tables: Record<string, {
indexes: Record<string, {
on: string | string[];
unique?: boolean | undefined;
}>;
columns: Record<string, {
type: "boolean";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: boolean | import("../runtime/types.js").SerializedSQL | undefined;
};
} | {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
primaryKey: false;
optional: boolean;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
optional?: false | undefined;
default?: undefined;
})) & {
references?: import("./types.js").NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
primaryKey: false;
optional: boolean;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: import("./types.js").TextColumn;
};
} | {
type: "date";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
};
} | {
type: "json";
schema: {
optional: boolean;
unique: boolean;
deprecated: boolean;
label?: string | undefined;
name?: string | undefined;
collection?: string | undefined;
default?: unknown;
};
}>;
deprecated: boolean;
foreignKeys?: (Omit<{
columns: import("./schemas.js").MaybeArray<string>;
references: () => import("./schemas.js").MaybeArray<Omit<import("zod/v4").input<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
}, "references"> & {
references: import("./schemas.js").MaybeArray<Omit<import("zod/v4").infer<typeof import("./schemas.js").referenceableColumnSchema>, "references">>;
})[] | undefined;
}>;
};
/** Dependencies imported into the user config file. */
dependencies: string[];
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths: (string | URL)[];
}>;
export declare function getResolvedFileUrl(root: URL, filePathOrUrl: string | URL): URL;
/**
* Bundle arbitrary `mjs` or `ts` file.
* Simplified fork from Vite's `bundleConfigFile` function.
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L961
*/
export declare function bundleFile({ fileUrl, root, virtualModContents, }: {
fileUrl: URL;
root: URL;
virtualModContents: string;
}): Promise<{
code: string;
dependencies: string[];
}>;
/**
* Forked from Vite config loader, replacing CJS-based path concat with ESM only
*
* @see https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L1074
*/
export declare function importBundledFile({ code, root, }: {
code: string;
root: URL;
}): Promise<{
default?: unknown;
}>;

170
node_modules/@astrojs/db/dist/core/load-file.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
import { existsSync } from "node:fs";
import { unlink, writeFile } from "node:fs/promises";
import { createRequire } from "node:module";
import { fileURLToPath, pathToFileURL } from "node:url";
import { build as esbuild } from "esbuild";
import { CONFIG_FILE_NAMES, VIRTUAL_MODULE_ID } from "./consts.js";
import { INTEGRATION_TABLE_CONFLICT_ERROR } from "./errors.js";
import { errorMap } from "./integration/error-map.js";
import { getConfigVirtualModContents } from "./integration/vite-plugin-db.js";
import { dbConfigSchema } from "./schemas.js";
import "./types.js";
import { getAstroEnv, getDbDirectoryUrl } from "./utils.js";
async function resolveDbConfig({
root,
integrations
}) {
const { mod, dependencies } = await loadUserConfigFile(root);
const userDbConfig = dbConfigSchema.parse(mod?.default ?? {}, { error: errorMap });
const dbConfig = { tables: userDbConfig.tables ?? {} };
const integrationDbConfigPaths = [];
const integrationSeedPaths = [];
for (const integration of integrations) {
const { name, hooks } = integration;
if (hooks["astro:db:setup"]) {
hooks["astro:db:setup"]({
extendDb({ configEntrypoint, seedEntrypoint }) {
if (configEntrypoint) {
integrationDbConfigPaths.push({ name, configEntrypoint });
}
if (seedEntrypoint) {
integrationSeedPaths.push(seedEntrypoint);
}
}
});
}
}
for (const { name, configEntrypoint } of integrationDbConfigPaths) {
const loadedConfig = await loadIntegrationConfigFile(root, configEntrypoint);
const integrationDbConfig = dbConfigSchema.parse(loadedConfig.mod?.default ?? {}, {
error: errorMap
});
for (const key in integrationDbConfig.tables) {
if (key in dbConfig.tables) {
const isUserConflict = key in (userDbConfig.tables ?? {});
throw new Error(INTEGRATION_TABLE_CONFLICT_ERROR(name, key, isUserConflict));
} else {
dbConfig.tables[key] = integrationDbConfig.tables[key];
}
}
}
return {
/** Resolved `astro:db` config, including tables added by integrations. */
dbConfig,
/** Dependencies imported into the user config file. */
dependencies,
/** Additional `astro:db` seed file paths provided by integrations. */
integrationSeedPaths
};
}
async function loadUserConfigFile(root) {
let configFileUrl;
for (const fileName of CONFIG_FILE_NAMES) {
const fileUrl = new URL(fileName, getDbDirectoryUrl(root));
if (existsSync(fileUrl)) {
configFileUrl = fileUrl;
}
}
return await loadAndBundleDbConfigFile({ root, fileUrl: configFileUrl });
}
function getResolvedFileUrl(root, filePathOrUrl) {
if (typeof filePathOrUrl === "string") {
const { resolve } = createRequire(root);
const resolvedFilePath = resolve(filePathOrUrl);
return pathToFileURL(resolvedFilePath);
}
return filePathOrUrl;
}
async function loadIntegrationConfigFile(root, filePathOrUrl) {
const fileUrl = getResolvedFileUrl(root, filePathOrUrl);
return await loadAndBundleDbConfigFile({ root, fileUrl });
}
async function loadAndBundleDbConfigFile({
root,
fileUrl
}) {
if (!fileUrl) {
return { mod: void 0, dependencies: [] };
}
const { code, dependencies } = await bundleFile({
virtualModContents: getConfigVirtualModContents(),
root,
fileUrl
});
return {
mod: await importBundledFile({ code, root }),
dependencies
};
}
async function bundleFile({
fileUrl,
root,
virtualModContents
}) {
const { ASTRO_DATABASE_FILE } = getAstroEnv();
const result = await esbuild({
absWorkingDir: process.cwd(),
entryPoints: [fileURLToPath(fileUrl)],
outfile: "out.js",
packages: "external",
write: false,
target: ["node16"],
platform: "node",
bundle: true,
format: "esm",
sourcemap: "inline",
metafile: true,
define: {
"import.meta.env.ASTRO_DATABASE_FILE": JSON.stringify(ASTRO_DATABASE_FILE ?? "")
},
plugins: [
{
name: "resolve-astro-db",
setup(build) {
build.onResolve({ filter: /^astro:db$/ }, ({ path }) => {
return { path, namespace: VIRTUAL_MODULE_ID };
});
build.onLoad({ namespace: VIRTUAL_MODULE_ID, filter: /.*/ }, () => {
return {
contents: virtualModContents,
// Needed to resolve runtime dependencies
resolveDir: fileURLToPath(root)
};
});
}
}
]
});
const file = result.outputFiles[0];
if (!file) {
throw new Error(`Unexpected: no output file`);
}
return {
code: file.text,
dependencies: Object.keys(result.metafile.inputs)
};
}
async function importBundledFile({
code,
root
}) {
const tmpFileUrl = new URL(`./db.timestamp-${Date.now()}.mjs`, root);
await writeFile(tmpFileUrl, code, { encoding: "utf8" });
try {
return await import(
/* @vite-ignore */
tmpFileUrl.toString()
);
} finally {
try {
await unlink(tmpFileUrl);
} catch {
}
}
}
export {
bundleFile,
getResolvedFileUrl,
importBundledFile,
resolveDbConfig
};

53
node_modules/@astrojs/db/dist/core/queries.d.ts generated vendored Normal file
View File

@@ -0,0 +1,53 @@
import type { BooleanColumn, ColumnType, DateColumn, DBColumn, DBTable, JsonColumn, NumberColumn, TextColumn } from './types.js';
export declare const SEED_DEV_FILE_NAME: string[];
export declare function getDropTableIfExistsQuery(tableName: string): string;
export declare function getCreateTableQuery(tableName: string, table: DBTable): string;
export declare function getCreateIndexQueries(tableName: string, table: Pick<DBTable, 'indexes'>): string[];
export declare function schemaTypeToSqlType(type: ColumnType): 'text' | 'integer';
export declare function getModifiers(columnName: string, column: DBColumn): string;
export declare function getReferencesConfig(column: DBColumn): {
type: "number";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
} & ({
primaryKey: false;
optional: boolean;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} | {
primaryKey: true;
optional?: false | undefined;
default?: undefined;
})) & {
references?: NumberColumn;
};
} | {
type: "text";
schema: ({
unique: boolean;
deprecated: boolean;
name?: string | undefined;
label?: string | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
enum?: [string, ...string[]] | undefined;
} & ({
primaryKey: false;
optional: boolean;
} | {
primaryKey: true;
optional?: false | undefined;
})) & {
references?: TextColumn;
};
} | undefined;
type WithDefaultDefined<T extends DBColumn> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBColumnWithDefault = WithDefaultDefined<TextColumn> | WithDefaultDefined<DateColumn> | WithDefaultDefined<NumberColumn> | WithDefaultDefined<BooleanColumn> | WithDefaultDefined<JsonColumn>;
export declare function hasDefault(column: DBColumn): column is DBColumnWithDefault;
export {};

166
node_modules/@astrojs/db/dist/core/queries.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import colors from "piccolore";
import {
FOREIGN_KEY_DNE_ERROR,
FOREIGN_KEY_REFERENCES_EMPTY_ERROR,
FOREIGN_KEY_REFERENCES_LENGTH_ERROR,
REFERENCE_DNE_ERROR
} from "../runtime/errors.js";
import { isSerializedSQL } from "../runtime/types.js";
import { hasPrimaryKey } from "../runtime/utils.js";
const sqlite = new SQLiteAsyncDialect();
const SEED_DEV_FILE_NAME = ["seed.ts", "seed.js", "seed.mjs", "seed.mts"];
function getDropTableIfExistsQuery(tableName) {
return `DROP TABLE IF EXISTS ${sqlite.escapeName(tableName)}`;
}
function getCreateTableQuery(tableName, table) {
let query = `CREATE TABLE ${sqlite.escapeName(tableName)} (`;
const colQueries = [];
const colHasPrimaryKey = Object.entries(table.columns).find(
([, column]) => hasPrimaryKey(column)
);
if (!colHasPrimaryKey) {
colQueries.push("_id INTEGER PRIMARY KEY");
}
for (const [columnName, column] of Object.entries(table.columns)) {
const colQuery = `${sqlite.escapeName(columnName)} ${schemaTypeToSqlType(
column.type
)}${getModifiers(columnName, column)}`;
colQueries.push(colQuery);
}
colQueries.push(...getCreateForeignKeyQueries(tableName, table));
query += colQueries.join(", ") + ")";
return query;
}
function getCreateIndexQueries(tableName, table) {
let queries = [];
for (const [indexName, indexProps] of Object.entries(table.indexes ?? {})) {
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));
const unique = indexProps.unique ? "UNIQUE " : "";
const indexQuery = `CREATE ${unique}INDEX ${sqlite.escapeName(
indexName
)} ON ${sqlite.escapeName(tableName)} (${onCols.join(", ")})`;
queries.push(indexQuery);
}
return queries;
}
function getCreateForeignKeyQueries(tableName, table) {
let queries = [];
for (const foreignKey of table.foreignKeys ?? []) {
const columns = asArray(foreignKey.columns);
const references = asArray(foreignKey.references);
if (columns.length !== references.length) {
throw new Error(FOREIGN_KEY_REFERENCES_LENGTH_ERROR(tableName));
}
const firstReference = references[0];
if (!firstReference) {
throw new Error(FOREIGN_KEY_REFERENCES_EMPTY_ERROR(tableName));
}
const referencedTable = firstReference.schema.collection;
if (!referencedTable) {
throw new Error(FOREIGN_KEY_DNE_ERROR(tableName));
}
const query = `FOREIGN KEY (${columns.map((f) => sqlite.escapeName(f)).join(", ")}) REFERENCES ${sqlite.escapeName(referencedTable)}(${references.map((r) => sqlite.escapeName(r.schema.name)).join(", ")})`;
queries.push(query);
}
return queries;
}
function asArray(value) {
return Array.isArray(value) ? value : [value];
}
function schemaTypeToSqlType(type) {
switch (type) {
case "date":
case "text":
case "json":
return "text";
case "number":
case "boolean":
return "integer";
}
}
function getModifiers(columnName, column) {
let modifiers = "";
if (hasPrimaryKey(column)) {
return " PRIMARY KEY";
}
if (!column.schema.optional) {
modifiers += " NOT NULL";
}
if (column.schema.unique) {
modifiers += " UNIQUE";
}
if (hasDefault(column)) {
modifiers += ` DEFAULT ${getDefaultValueSql(columnName, column)}`;
}
const references = getReferencesConfig(column);
if (references) {
const { collection: tableName, name } = references.schema;
if (!tableName || !name) {
throw new Error(REFERENCE_DNE_ERROR(columnName));
}
modifiers += ` REFERENCES ${sqlite.escapeName(tableName)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
function getReferencesConfig(column) {
const canHaveReferences = column.type === "number" || column.type === "text";
if (!canHaveReferences) return void 0;
return column.schema.references;
}
function hasDefault(column) {
if (column.schema.default !== void 0) {
return true;
}
if (hasPrimaryKey(column) && column.type === "number") {
return true;
}
return false;
}
function toDefault(def) {
const type = typeof def;
if (type === "string") {
return sqlite.escapeString(def);
} else if (type === "boolean") {
return def ? "TRUE" : "FALSE";
} else {
return def + "";
}
}
function getDefaultValueSql(columnName, column) {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case "boolean":
case "number":
case "text":
case "date":
return toDefault(column.schema.default);
case "json": {
let stringified = "";
try {
stringified = JSON.stringify(column.schema.default);
} catch {
console.log(
`Invalid default value for column ${colors.bold(
columnName
)}. Defaults must be valid JSON when using the \`json()\` type.`
);
process.exit(0);
}
return sqlite.escapeString(stringified);
}
}
}
export {
SEED_DEV_FILE_NAME,
getCreateIndexQueries,
getCreateTableQuery,
getDropTableIfExistsQuery,
getModifiers,
getReferencesConfig,
hasDefault,
schemaTypeToSqlType
};

1444
node_modules/@astrojs/db/dist/core/schemas.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

193
node_modules/@astrojs/db/dist/core/schemas.js generated vendored Normal file
View File

@@ -0,0 +1,193 @@
import { SQL } from "drizzle-orm";
import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import * as z from "zod/v4";
import { SERIALIZED_SQL_KEY } from "../runtime/types.js";
import { errorMap } from "./integration/error-map.js";
import { mapObject } from "./utils.js";
const sqlite = new SQLiteAsyncDialect();
const sqlSchema = z.instanceof(SQL).transform(
(sqlObj) => ({
[SERIALIZED_SQL_KEY]: true,
sql: sqlite.sqlToQuery(sqlObj).sql
})
);
const baseColumnSchema = z.object({
label: z.string().optional(),
optional: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
deprecated: z.boolean().optional().default(false),
// Defined when `defineDb()` is called to resolve `references`
name: z.string().optional(),
// TODO: Update to `table`. Will need migration file version change
collection: z.string().optional()
});
const booleanColumnSchema = z.object({
type: z.literal("boolean"),
schema: baseColumnSchema.extend({
default: z.union([z.boolean(), sqlSchema]).optional()
})
});
const numberColumnBaseSchema = baseColumnSchema.omit({ optional: true }).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional,
default: z.union([z.number(), sqlSchema]).optional()
}),
z.object({
// `integer primary key` uses ROWID as the default value.
// `optional` and `default` do not have an effect,
// so disable these config options for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional(),
default: z.literal(void 0).optional()
})
])
);
const numberColumnOptsSchema = numberColumnBaseSchema.and(
z.object({
references: z.function({ output: z.lazy(() => numberColumnSchema) }).optional().transform((fn) => fn?.())
})
);
const numberColumnSchema = z.object({
type: z.literal("number"),
schema: numberColumnOptsSchema
});
const textColumnBaseSchema = baseColumnSchema.omit({ optional: true }).extend({
default: z.union([z.string(), sqlSchema]).optional(),
multiline: z.boolean().optional(),
enum: z.tuple([z.string()]).rest(z.string()).optional()
// At least one value required,
}).and(
z.union([
z.object({
primaryKey: z.literal(false).optional().default(false),
optional: baseColumnSchema.shape.optional
}),
z.object({
// text primary key allows NULL values.
// NULL values bypass unique checks, which could
// lead to duplicate URLs per record.
// disable `optional` for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional()
})
])
);
const textColumnOptsSchema = textColumnBaseSchema.and(
z.object({
references: z.function({ output: z.lazy(() => textColumnSchema) }).optional().transform((fn) => fn?.())
})
);
const textColumnSchema = z.object({
type: z.literal("text"),
schema: textColumnOptsSchema
});
const dateColumnSchema = z.object({
type: z.literal("date"),
schema: baseColumnSchema.extend({
default: z.union([
sqlSchema,
// transform to ISO string for serialization
z.date().transform((d) => d.toISOString())
]).optional()
})
});
const jsonColumnSchema = z.object({
type: z.literal("json"),
schema: baseColumnSchema.extend({
default: z.unknown().optional()
})
});
const columnSchema = z.discriminatedUnion("type", [
booleanColumnSchema,
numberColumnSchema,
textColumnSchema,
dateColumnSchema,
jsonColumnSchema
]);
const referenceableColumnSchema = z.union([textColumnSchema, numberColumnSchema]);
const columnsSchema = z.record(z.string(), columnSchema);
const foreignKeysSchema = z.object({
columns: z.string().or(z.array(z.string())),
references: z.function({
output: z.lazy(() => referenceableColumnSchema.or(z.array(referenceableColumnSchema)))
}).transform((fn) => fn())
});
const resolvedIndexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional()
});
const legacyIndexesSchema = z.record(z.string(), resolvedIndexSchema);
const indexSchema = z.object({
on: z.string().or(z.array(z.string())),
unique: z.boolean().optional(),
name: z.string().optional()
});
const indexesSchema = z.array(indexSchema);
const tableSchema = z.object({
columns: columnsSchema,
indexes: indexesSchema.or(legacyIndexesSchema).optional(),
foreignKeys: z.array(foreignKeysSchema).optional(),
deprecated: z.boolean().optional().default(false)
});
const tablesSchema = z.preprocess(
(rawTables) => {
const tables = z.record(z.string(), z.any()).parse(rawTables, { error: errorMap });
for (const [tableName, table] of Object.entries(tables)) {
table.getName = () => tableName;
const { columns } = z.object({ columns: z.record(z.string(), z.any()) }).parse(table, { error: errorMap });
for (const [columnName, column] of Object.entries(columns)) {
column.schema.name = columnName;
column.schema.collection = tableName;
}
}
return rawTables;
},
z.record(z.string(), tableSchema)
);
const dbConfigSchema = z.object({
tables: tablesSchema.optional()
}).transform(({ tables = {}, ...config }) => {
return {
...config,
tables: mapObject(tables, (tableName, table) => {
const { indexes = {} } = table;
if (!Array.isArray(indexes)) {
return { ...table, indexes };
}
const resolvedIndexes = {};
for (const index of indexes) {
if (index.name) {
const { name: name2, ...rest } = index;
resolvedIndexes[index.name] = rest;
continue;
}
const indexOn = Array.isArray(index.on) ? index.on.sort().join("_") : index.on;
const name = tableName + "_" + indexOn + "_idx";
resolvedIndexes[name] = index;
}
return {
...table,
indexes: resolvedIndexes
};
})
};
});
export {
booleanColumnSchema,
columnSchema,
columnsSchema,
dateColumnSchema,
dbConfigSchema,
indexSchema,
jsonColumnSchema,
numberColumnOptsSchema,
numberColumnSchema,
referenceableColumnSchema,
resolvedIndexSchema,
tableSchema,
tablesSchema,
textColumnOptsSchema,
textColumnSchema
};

60
node_modules/@astrojs/db/dist/core/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
import type * as z from 'zod/v4';
import type { booleanColumnSchema, columnSchema, columnsSchema, dateColumnSchema, dbConfigSchema, indexSchema, jsonColumnSchema, MaybeArray, numberColumnOptsSchema, numberColumnSchema, referenceableColumnSchema, resolvedIndexSchema, tableSchema, textColumnOptsSchema, textColumnSchema } from './schemas.js';
export type ResolvedIndexes = z.output<typeof dbConfigSchema>['tables'][string]['indexes'];
export type BooleanColumn = z.infer<typeof booleanColumnSchema>;
export type BooleanColumnInput = z.input<typeof booleanColumnSchema>;
export type NumberColumn = z.infer<typeof numberColumnSchema>;
export type NumberColumnInput = z.input<typeof numberColumnSchema>;
export type TextColumn = z.infer<typeof textColumnSchema>;
export type TextColumnInput = z.input<typeof textColumnSchema>;
export type DateColumn = z.infer<typeof dateColumnSchema>;
export type DateColumnInput = z.input<typeof dateColumnSchema>;
export type JsonColumn = z.infer<typeof jsonColumnSchema>;
export type JsonColumnInput = z.input<typeof jsonColumnSchema>;
export type ColumnType = BooleanColumn['type'] | NumberColumn['type'] | TextColumn['type'] | DateColumn['type'] | JsonColumn['type'];
export type DBColumn = z.infer<typeof columnSchema>;
export type DBColumnInput = DateColumnInput | BooleanColumnInput | NumberColumnInput | TextColumnInput | JsonColumnInput;
export type DBColumns = z.infer<typeof columnsSchema>;
export type DBTable = z.infer<typeof tableSchema>;
export type DBTables = Record<string, DBTable>;
export type ResolvedDBTables = z.output<typeof dbConfigSchema>['tables'];
export type ResolvedDBTable = z.output<typeof dbConfigSchema>['tables'][string];
export type DBSnapshot = {
schema: Record<string, ResolvedDBTable>;
version: string;
};
export type DBConfigInput = z.input<typeof dbConfigSchema>;
export type DBConfig = z.infer<typeof dbConfigSchema>;
export type ColumnsConfig = z.input<typeof tableSchema>['columns'];
export type OutputColumnsConfig = z.output<typeof tableSchema>['columns'];
export interface TableConfig<TColumns extends ColumnsConfig = ColumnsConfig> extends Pick<z.input<typeof tableSchema>, 'columns' | 'indexes' | 'foreignKeys'> {
columns: TColumns;
foreignKeys?: Array<{
columns: MaybeArray<Extract<keyof TColumns, string>>;
references: () => MaybeArray<z.input<typeof referenceableColumnSchema>>;
}>;
indexes?: Array<IndexConfig<TColumns>> | Record<string, LegacyIndexConfig<TColumns>>;
deprecated?: boolean;
}
interface IndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof indexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
/** @deprecated */
interface LegacyIndexConfig<TColumns extends ColumnsConfig> extends z.input<typeof resolvedIndexSchema> {
on: MaybeArray<Extract<keyof TColumns, string>>;
}
export type NumberColumnOpts = z.input<typeof numberColumnOptsSchema>;
export type TextColumnOpts = z.input<typeof textColumnOptsSchema>;
declare global {
namespace Astro {
interface IntegrationHooks {
'astro:db:setup'?: (options: {
extendDb: (options: {
configEntrypoint?: URL | string;
seedEntrypoint?: URL | string;
}) => void;
}) => void | Promise<void>;
}
}
}
export {};

0
node_modules/@astrojs/db/dist/core/types.js generated vendored Normal file
View File

19
node_modules/@astrojs/db/dist/core/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,19 @@
import type { AstroConfig, AstroIntegration } from 'astro';
import type { Arguments } from 'yargs-parser';
import './types.js';
export type VitePlugin = Required<AstroConfig['vite']>['plugins'][number];
export declare function getAstroEnv(envMode?: string): Record<`ASTRO_${string}`, string>;
export type RemoteDatabaseInfo = {
url: string;
token: string;
};
export declare function getRemoteDatabaseInfo(): RemoteDatabaseInfo;
export declare function resolveDbAppToken(flags: Arguments, envToken: string): string;
export declare function resolveDbAppToken(flags: Arguments, envToken: string | undefined): string | undefined;
export declare function getDbDirectoryUrl(root: URL | string): URL;
export declare function defineDbIntegration(integration: AstroIntegration): AstroIntegration;
/**
* Map an object's values to a new set of values
* while preserving types.
*/
export declare function mapObject<T, U = T>(item: Record<string, T>, callback: (key: string, value: T) => U): Record<string, U>;

37
node_modules/@astrojs/db/dist/core/utils.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
import { loadEnv } from "vite";
import "./types.js";
function getAstroEnv(envMode = "") {
const env = loadEnv(envMode, process.cwd(), "ASTRO_");
return env;
}
function getRemoteDatabaseInfo() {
const astroEnv = getAstroEnv();
return {
url: astroEnv.ASTRO_DB_REMOTE_URL,
token: astroEnv.ASTRO_DB_APP_TOKEN
};
}
function resolveDbAppToken(flags, envToken) {
const dbAppToken = flags.dbAppToken;
if (typeof dbAppToken === "string") return dbAppToken;
return envToken;
}
function getDbDirectoryUrl(root) {
return new URL("db/", root);
}
function defineDbIntegration(integration) {
return integration;
}
function mapObject(item, callback) {
return Object.fromEntries(
Object.entries(item).map(([key, value]) => [key, callback(key, value)])
);
}
export {
defineDbIntegration,
getAstroEnv,
getDbDirectoryUrl,
getRemoteDatabaseInfo,
mapObject,
resolveDbAppToken
};

0
node_modules/@astrojs/db/dist/db-client.d.js generated vendored Normal file
View File

3
node_modules/@astrojs/db/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export { cli } from './core/cli/index.js';
export { type AstroDBConfig, integration as default } from './core/integration/index.js';
export type { TableConfig } from './core/types.js';

6
node_modules/@astrojs/db/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import { cli } from "./core/cli/index.js";
import { integration } from "./core/integration/index.js";
export {
cli,
integration as default
};

4
node_modules/@astrojs/db/dist/runtime/errors.d.ts generated vendored Normal file
View File

@@ -0,0 +1,4 @@
export declare const FOREIGN_KEY_DNE_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_LENGTH_ERROR: (tableName: string) => string;
export declare const FOREIGN_KEY_REFERENCES_EMPTY_ERROR: (tableName: string) => string;
export declare const REFERENCE_DNE_ERROR: (columnName: string) => string;

Some files were not shown because too many files have changed in this diff Show More