Initial commit: New MoreminiMore website with fresh design

This commit is contained in:
MoreminiMore
2026-04-22 01:59:05 +07:00
commit 76409638cc
14010 changed files with 2052041 additions and 0 deletions

View File

@@ -0,0 +1,128 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpClient = exports.checkEndpoints = void 0;
const isomorphic_fetch_1 = require("@libsql/isomorphic-fetch");
const client_js_1 = require("../client.js");
const errors_js_1 = require("../errors.js");
const stream_js_1 = require("./stream.js");
exports.checkEndpoints = [
{
versionPath: "v3-protobuf",
pipelinePath: "v3-protobuf/pipeline",
cursorPath: "v3-protobuf/cursor",
version: 3,
encoding: "protobuf",
},
/*
{
versionPath: "v3",
pipelinePath: "v3/pipeline",
cursorPath: "v3/cursor",
version: 3,
encoding: "json",
},
*/
];
const fallbackEndpoint = {
versionPath: "v2",
pipelinePath: "v2/pipeline",
cursorPath: undefined,
version: 2,
encoding: "json",
};
/** A client for the Hrana protocol over HTTP. */
class HttpClient extends client_js_1.Client {
#url;
#jwt;
#fetch;
#closed;
#streams;
/** @private */
_endpointPromise;
/** @private */
_endpoint;
/** @private */
constructor(url, jwt, customFetch, protocolVersion = 2) {
super();
this.#url = url;
this.#jwt = jwt;
this.#fetch = customFetch ?? isomorphic_fetch_1.fetch;
this.#closed = undefined;
this.#streams = new Set();
if (protocolVersion == 3) {
this._endpointPromise = findEndpoint(this.#fetch, this.#url);
this._endpointPromise.then((endpoint) => this._endpoint = endpoint, (error) => this.#setClosed(error));
}
else {
this._endpointPromise = Promise.resolve(fallbackEndpoint);
this._endpointPromise.then((endpoint) => this._endpoint = endpoint, (error) => this.#setClosed(error));
}
}
/** Get the protocol version supported by the server. */
async getVersion() {
if (this._endpoint !== undefined) {
return this._endpoint.version;
}
return (await this._endpointPromise).version;
}
// Make sure that the negotiated version is at least `minVersion`.
/** @private */
_ensureVersion(minVersion, feature) {
if (minVersion <= fallbackEndpoint.version) {
return;
}
else if (this._endpoint === undefined) {
throw new errors_js_1.ProtocolVersionError(`${feature} is supported only on protocol version ${minVersion} and higher, ` +
"but the version supported by the HTTP server is not yet known. " +
"Use Client.getVersion() to wait until the version is available.");
}
else if (this._endpoint.version < minVersion) {
throw new errors_js_1.ProtocolVersionError(`${feature} is supported only on protocol version ${minVersion} and higher, ` +
`but the HTTP server only supports version ${this._endpoint.version}.`);
}
}
/** Open a {@link HttpStream}, a stream for executing SQL statements. */
openStream() {
if (this.#closed !== undefined) {
throw new errors_js_1.ClosedError("Client is closed", this.#closed);
}
const stream = new stream_js_1.HttpStream(this, this.#url, this.#jwt, this.#fetch);
this.#streams.add(stream);
return stream;
}
/** @private */
_streamClosed(stream) {
this.#streams.delete(stream);
}
/** Close the client and all its streams. */
close() {
this.#setClosed(new errors_js_1.ClientError("Client was manually closed"));
}
/** True if the client is closed. */
get closed() {
return this.#closed !== undefined;
}
#setClosed(error) {
if (this.#closed !== undefined) {
return;
}
this.#closed = error;
for (const stream of Array.from(this.#streams)) {
stream._setClosed(new errors_js_1.ClosedError("Client was closed", error));
}
}
}
exports.HttpClient = HttpClient;
async function findEndpoint(customFetch, clientUrl) {
const fetch = customFetch;
for (const endpoint of exports.checkEndpoints) {
const url = new URL(endpoint.versionPath, clientUrl);
const request = new isomorphic_fetch_1.Request(url.toString(), { method: "GET" });
const response = await fetch(request);
await response.arrayBuffer();
if (response.ok) {
return endpoint;
}
}
return fallbackEndpoint;
}

View File

@@ -0,0 +1,162 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpCursor = void 0;
const byte_queue_js_1 = require("../byte_queue.js");
const cursor_js_1 = require("../cursor.js");
const jsond = __importStar(require("../encoding/json/decode.js"));
const protobufd = __importStar(require("../encoding/protobuf/decode.js"));
const errors_js_1 = require("../errors.js");
const util_js_1 = require("../util.js");
const json_decode_js_1 = require("./json_decode.js");
const protobuf_decode_js_1 = require("./protobuf_decode.js");
const json_decode_js_2 = require("../shared/json_decode.js");
const protobuf_decode_js_2 = require("../shared/protobuf_decode.js");
class HttpCursor extends cursor_js_1.Cursor {
#stream;
#encoding;
#reader;
#queue;
#closed;
#done;
/** @private */
constructor(stream, encoding) {
super();
this.#stream = stream;
this.#encoding = encoding;
this.#reader = undefined;
this.#queue = new byte_queue_js_1.ByteQueue(16 * 1024);
this.#closed = undefined;
this.#done = false;
}
async open(response) {
if (response.body === null) {
throw new errors_js_1.ProtoError("No response body for cursor request");
}
this.#reader = response.body.getReader();
const respBody = await this.#nextItem(json_decode_js_1.CursorRespBody, protobuf_decode_js_1.CursorRespBody);
if (respBody === undefined) {
throw new errors_js_1.ProtoError("Empty response to cursor request");
}
return respBody;
}
/** Fetch the next entry from the cursor. */
next() {
return this.#nextItem(json_decode_js_2.CursorEntry, protobuf_decode_js_2.CursorEntry);
}
/** Close the cursor. */
close() {
this._setClosed(new errors_js_1.ClientError("Cursor was manually closed"));
}
/** @private */
_setClosed(error) {
if (this.#closed !== undefined) {
return;
}
this.#closed = error;
this.#stream._cursorClosed(this);
if (this.#reader !== undefined) {
this.#reader.cancel();
}
}
/** True if the cursor is closed. */
get closed() {
return this.#closed !== undefined;
}
async #nextItem(jsonFun, protobufDef) {
for (;;) {
if (this.#done) {
return undefined;
}
else if (this.#closed !== undefined) {
throw new errors_js_1.ClosedError("Cursor is closed", this.#closed);
}
if (this.#encoding === "json") {
const jsonData = this.#parseItemJson();
if (jsonData !== undefined) {
const jsonText = new TextDecoder().decode(jsonData);
const jsonValue = JSON.parse(jsonText);
return jsond.readJsonObject(jsonValue, jsonFun);
}
}
else if (this.#encoding === "protobuf") {
const protobufData = this.#parseItemProtobuf();
if (protobufData !== undefined) {
return protobufd.readProtobufMessage(protobufData, protobufDef);
}
}
else {
throw (0, util_js_1.impossible)(this.#encoding, "Impossible encoding");
}
if (this.#reader === undefined) {
throw new errors_js_1.InternalError("Attempted to read from HTTP cursor before it was opened");
}
const { value, done } = await this.#reader.read();
if (done && this.#queue.length === 0) {
this.#done = true;
}
else if (done) {
throw new errors_js_1.ProtoError("Unexpected end of cursor stream");
}
else {
this.#queue.push(value);
}
}
}
#parseItemJson() {
const data = this.#queue.data();
const newlineByte = 10;
const newlinePos = data.indexOf(newlineByte);
if (newlinePos < 0) {
return undefined;
}
const jsonData = data.slice(0, newlinePos);
this.#queue.shift(newlinePos + 1);
return jsonData;
}
#parseItemProtobuf() {
const data = this.#queue.data();
let varintValue = 0;
let varintLength = 0;
for (;;) {
if (varintLength >= data.byteLength) {
return undefined;
}
const byte = data[varintLength];
varintValue |= (byte & 0x7f) << (7 * varintLength);
varintLength += 1;
if (!(byte & 0x80)) {
break;
}
}
if (data.byteLength < varintLength + varintValue) {
return undefined;
}
const protobufData = data.slice(varintLength, varintLength + varintValue);
this.#queue.shift(varintLength + varintValue);
return protobufData;
}
}
exports.HttpCursor = HttpCursor;

View File

@@ -0,0 +1,90 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.CursorRespBody = exports.PipelineRespBody = void 0;
const errors_js_1 = require("../errors.js");
const d = __importStar(require("../encoding/json/decode.js"));
const json_decode_js_1 = require("../shared/json_decode.js");
function PipelineRespBody(obj) {
const baton = d.stringOpt(obj["baton"]);
const baseUrl = d.stringOpt(obj["base_url"]);
const results = d.arrayObjectsMap(obj["results"], StreamResult);
return { baton, baseUrl, results };
}
exports.PipelineRespBody = PipelineRespBody;
function StreamResult(obj) {
const type = d.string(obj["type"]);
if (type === "ok") {
const response = StreamResponse(d.object(obj["response"]));
return { type: "ok", response };
}
else if (type === "error") {
const error = (0, json_decode_js_1.Error)(d.object(obj["error"]));
return { type: "error", error };
}
else {
throw new errors_js_1.ProtoError("Unexpected type of StreamResult");
}
}
function StreamResponse(obj) {
const type = d.string(obj["type"]);
if (type === "close") {
return { type: "close" };
}
else if (type === "execute") {
const result = (0, json_decode_js_1.StmtResult)(d.object(obj["result"]));
return { type: "execute", result };
}
else if (type === "batch") {
const result = (0, json_decode_js_1.BatchResult)(d.object(obj["result"]));
return { type: "batch", result };
}
else if (type === "sequence") {
return { type: "sequence" };
}
else if (type === "describe") {
const result = (0, json_decode_js_1.DescribeResult)(d.object(obj["result"]));
return { type: "describe", result };
}
else if (type === "store_sql") {
return { type: "store_sql" };
}
else if (type === "close_sql") {
return { type: "close_sql" };
}
else if (type === "get_autocommit") {
const isAutocommit = d.boolean(obj["is_autocommit"]);
return { type: "get_autocommit", isAutocommit };
}
else {
throw new errors_js_1.ProtoError("Unexpected type of StreamResponse");
}
}
function CursorRespBody(obj) {
const baton = d.stringOpt(obj["baton"]);
const baseUrl = d.stringOpt(obj["base_url"]);
return { baton, baseUrl };
}
exports.CursorRespBody = CursorRespBody;

View File

@@ -0,0 +1,60 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CursorReqBody = exports.PipelineReqBody = void 0;
const json_encode_js_1 = require("../shared/json_encode.js");
const util_js_1 = require("../util.js");
function PipelineReqBody(w, msg) {
if (msg.baton !== undefined) {
w.string("baton", msg.baton);
}
w.arrayObjects("requests", msg.requests, StreamRequest);
}
exports.PipelineReqBody = PipelineReqBody;
function StreamRequest(w, msg) {
w.stringRaw("type", msg.type);
if (msg.type === "close") {
// do nothing
}
else if (msg.type === "execute") {
w.object("stmt", msg.stmt, json_encode_js_1.Stmt);
}
else if (msg.type === "batch") {
w.object("batch", msg.batch, json_encode_js_1.Batch);
}
else if (msg.type === "sequence") {
if (msg.sql !== undefined) {
w.string("sql", msg.sql);
}
if (msg.sqlId !== undefined) {
w.number("sql_id", msg.sqlId);
}
}
else if (msg.type === "describe") {
if (msg.sql !== undefined) {
w.string("sql", msg.sql);
}
if (msg.sqlId !== undefined) {
w.number("sql_id", msg.sqlId);
}
}
else if (msg.type === "store_sql") {
w.number("sql_id", msg.sqlId);
w.string("sql", msg.sql);
}
else if (msg.type === "close_sql") {
w.number("sql_id", msg.sqlId);
}
else if (msg.type === "get_autocommit") {
// do nothing
}
else {
throw (0, util_js_1.impossible)(msg, "Impossible type of StreamRequest");
}
}
function CursorReqBody(w, msg) {
if (msg.baton !== undefined) {
w.string("baton", msg.baton);
}
w.object("batch", msg.batch, json_encode_js_1.Batch);
}
exports.CursorReqBody = CursorReqBody;

View File

@@ -0,0 +1,18 @@
"use strict";
// Types for the structures specific to Hrana over HTTP.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("../shared/proto.js"), exports);

View File

@@ -0,0 +1,47 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CursorRespBody = exports.PipelineRespBody = void 0;
const protobuf_decode_js_1 = require("../shared/protobuf_decode.js");
exports.PipelineRespBody = {
default() { return { baton: undefined, baseUrl: undefined, results: [] }; },
1(r, msg) { msg.baton = r.string(); },
2(r, msg) { msg.baseUrl = r.string(); },
3(r, msg) { msg.results.push(r.message(StreamResult)); },
};
const StreamResult = {
default() { return { type: "none" }; },
1(r) { return { type: "ok", response: r.message(StreamResponse) }; },
2(r) { return { type: "error", error: r.message(protobuf_decode_js_1.Error) }; },
};
const StreamResponse = {
default() { return { type: "none" }; },
1(r) { return { type: "close" }; },
2(r) { return r.message(ExecuteStreamResp); },
3(r) { return r.message(BatchStreamResp); },
4(r) { return { type: "sequence" }; },
5(r) { return r.message(DescribeStreamResp); },
6(r) { return { type: "store_sql" }; },
7(r) { return { type: "close_sql" }; },
8(r) { return r.message(GetAutocommitStreamResp); },
};
const ExecuteStreamResp = {
default() { return { type: "execute", result: protobuf_decode_js_1.StmtResult.default() }; },
1(r, msg) { msg.result = r.message(protobuf_decode_js_1.StmtResult); },
};
const BatchStreamResp = {
default() { return { type: "batch", result: protobuf_decode_js_1.BatchResult.default() }; },
1(r, msg) { msg.result = r.message(protobuf_decode_js_1.BatchResult); },
};
const DescribeStreamResp = {
default() { return { type: "describe", result: protobuf_decode_js_1.DescribeResult.default() }; },
1(r, msg) { msg.result = r.message(protobuf_decode_js_1.DescribeResult); },
};
const GetAutocommitStreamResp = {
default() { return { type: "get_autocommit", isAutocommit: false }; },
1(r, msg) { msg.isAutocommit = r.bool(); },
};
exports.CursorRespBody = {
default() { return { baton: undefined, baseUrl: undefined }; },
1(r, msg) { msg.baton = r.string(); },
2(r, msg) { msg.baseUrl = r.string(); },
};

View File

@@ -0,0 +1,83 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CursorReqBody = exports.PipelineReqBody = void 0;
const protobuf_encode_js_1 = require("../shared/protobuf_encode.js");
const util_js_1 = require("../util.js");
function PipelineReqBody(w, msg) {
if (msg.baton !== undefined) {
w.string(1, msg.baton);
}
for (const req of msg.requests) {
w.message(2, req, StreamRequest);
}
}
exports.PipelineReqBody = PipelineReqBody;
function StreamRequest(w, msg) {
if (msg.type === "close") {
w.message(1, msg, CloseStreamReq);
}
else if (msg.type === "execute") {
w.message(2, msg, ExecuteStreamReq);
}
else if (msg.type === "batch") {
w.message(3, msg, BatchStreamReq);
}
else if (msg.type === "sequence") {
w.message(4, msg, SequenceStreamReq);
}
else if (msg.type === "describe") {
w.message(5, msg, DescribeStreamReq);
}
else if (msg.type === "store_sql") {
w.message(6, msg, StoreSqlStreamReq);
}
else if (msg.type === "close_sql") {
w.message(7, msg, CloseSqlStreamReq);
}
else if (msg.type === "get_autocommit") {
w.message(8, msg, GetAutocommitStreamReq);
}
else {
throw (0, util_js_1.impossible)(msg, "Impossible type of StreamRequest");
}
}
function CloseStreamReq(_w, _msg) {
}
function ExecuteStreamReq(w, msg) {
w.message(1, msg.stmt, protobuf_encode_js_1.Stmt);
}
function BatchStreamReq(w, msg) {
w.message(1, msg.batch, protobuf_encode_js_1.Batch);
}
function SequenceStreamReq(w, msg) {
if (msg.sql !== undefined) {
w.string(1, msg.sql);
}
if (msg.sqlId !== undefined) {
w.int32(2, msg.sqlId);
}
}
function DescribeStreamReq(w, msg) {
if (msg.sql !== undefined) {
w.string(1, msg.sql);
}
if (msg.sqlId !== undefined) {
w.int32(2, msg.sqlId);
}
}
function StoreSqlStreamReq(w, msg) {
w.int32(1, msg.sqlId);
w.string(2, msg.sql);
}
function CloseSqlStreamReq(w, msg) {
w.int32(1, msg.sqlId);
}
function GetAutocommitStreamReq(_w, _msg) {
}
function CursorReqBody(w, msg) {
if (msg.baton !== undefined) {
w.string(1, msg.baton);
}
w.message(2, msg.batch, protobuf_encode_js_1.Batch);
}
exports.CursorReqBody = CursorReqBody;

View File

@@ -0,0 +1,367 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpStream = void 0;
const isomorphic_fetch_1 = require("@libsql/isomorphic-fetch");
const errors_js_1 = require("../errors.js");
const index_js_1 = require("../encoding/index.js");
const id_alloc_js_1 = require("../id_alloc.js");
const queue_js_1 = require("../queue.js");
const queue_microtask_js_1 = require("../queue_microtask.js");
const result_js_1 = require("../result.js");
const sql_js_1 = require("../sql.js");
const stream_js_1 = require("../stream.js");
const util_js_1 = require("../util.js");
const cursor_js_1 = require("./cursor.js");
const json_encode_js_1 = require("./json_encode.js");
const protobuf_encode_js_1 = require("./protobuf_encode.js");
const json_encode_js_2 = require("./json_encode.js");
const protobuf_encode_js_2 = require("./protobuf_encode.js");
const json_decode_js_1 = require("./json_decode.js");
const protobuf_decode_js_1 = require("./protobuf_decode.js");
class HttpStream extends stream_js_1.Stream {
#client;
#baseUrl;
#jwt;
#fetch;
#baton;
#queue;
#flushing;
#cursor;
#closing;
#closeQueued;
#closed;
#sqlIdAlloc;
/** @private */
constructor(client, baseUrl, jwt, customFetch) {
super(client.intMode);
this.#client = client;
this.#baseUrl = baseUrl.toString();
this.#jwt = jwt;
this.#fetch = customFetch;
this.#baton = undefined;
this.#queue = new queue_js_1.Queue();
this.#flushing = false;
this.#closing = false;
this.#closeQueued = false;
this.#closed = undefined;
this.#sqlIdAlloc = new id_alloc_js_1.IdAlloc();
}
/** Get the {@link HttpClient} object that this stream belongs to. */
client() {
return this.#client;
}
/** @private */
_sqlOwner() {
return this;
}
/** Cache a SQL text on the server. */
storeSql(sql) {
const sqlId = this.#sqlIdAlloc.alloc();
this.#sendStreamRequest({ type: "store_sql", sqlId, sql }).then(() => undefined, (error) => this._setClosed(error));
return new sql_js_1.Sql(this, sqlId);
}
/** @private */
_closeSql(sqlId) {
if (this.#closed !== undefined) {
return;
}
this.#sendStreamRequest({ type: "close_sql", sqlId }).then(() => this.#sqlIdAlloc.free(sqlId), (error) => this._setClosed(error));
}
/** @private */
_execute(stmt) {
return this.#sendStreamRequest({ type: "execute", stmt }).then((response) => {
return response.result;
});
}
/** @private */
_batch(batch) {
return this.#sendStreamRequest({ type: "batch", batch }).then((response) => {
return response.result;
});
}
/** @private */
_describe(protoSql) {
return this.#sendStreamRequest({
type: "describe",
sql: protoSql.sql,
sqlId: protoSql.sqlId
}).then((response) => {
return response.result;
});
}
/** @private */
_sequence(protoSql) {
return this.#sendStreamRequest({
type: "sequence",
sql: protoSql.sql,
sqlId: protoSql.sqlId,
}).then((_response) => {
return undefined;
});
}
/** Check whether the SQL connection underlying this stream is in autocommit state (i.e., outside of an
* explicit transaction). This requires protocol version 3 or higher.
*/
getAutocommit() {
this.#client._ensureVersion(3, "getAutocommit()");
return this.#sendStreamRequest({
type: "get_autocommit",
}).then((response) => {
return response.isAutocommit;
});
}
#sendStreamRequest(request) {
return new Promise((responseCallback, errorCallback) => {
this.#pushToQueue({ type: "pipeline", request, responseCallback, errorCallback });
});
}
/** @private */
_openCursor(batch) {
return new Promise((cursorCallback, errorCallback) => {
this.#pushToQueue({ type: "cursor", batch, cursorCallback, errorCallback });
});
}
/** @private */
_cursorClosed(cursor) {
if (cursor !== this.#cursor) {
throw new errors_js_1.InternalError("Cursor was closed, but it was not associated with the stream");
}
this.#cursor = undefined;
(0, queue_microtask_js_1.queueMicrotask)(() => this.#flushQueue());
}
/** Immediately close the stream. */
close() {
this._setClosed(new errors_js_1.ClientError("Stream was manually closed"));
}
/** Gracefully close the stream. */
closeGracefully() {
this.#closing = true;
(0, queue_microtask_js_1.queueMicrotask)(() => this.#flushQueue());
}
/** True if the stream is closed. */
get closed() {
return this.#closed !== undefined || this.#closing;
}
/** @private */
_setClosed(error) {
if (this.#closed !== undefined) {
return;
}
this.#closed = error;
if (this.#cursor !== undefined) {
this.#cursor._setClosed(error);
}
this.#client._streamClosed(this);
for (;;) {
const entry = this.#queue.shift();
if (entry !== undefined) {
entry.errorCallback(error);
}
else {
break;
}
}
if ((this.#baton !== undefined || this.#flushing) && !this.#closeQueued) {
this.#queue.push({
type: "pipeline",
request: { type: "close" },
responseCallback: () => undefined,
errorCallback: () => undefined,
});
this.#closeQueued = true;
(0, queue_microtask_js_1.queueMicrotask)(() => this.#flushQueue());
}
}
#pushToQueue(entry) {
if (this.#closed !== undefined) {
throw new errors_js_1.ClosedError("Stream is closed", this.#closed);
}
else if (this.#closing) {
throw new errors_js_1.ClosedError("Stream is closing", undefined);
}
else {
this.#queue.push(entry);
(0, queue_microtask_js_1.queueMicrotask)(() => this.#flushQueue());
}
}
#flushQueue() {
if (this.#flushing || this.#cursor !== undefined) {
return;
}
if (this.#closing && this.#queue.length === 0) {
this._setClosed(new errors_js_1.ClientError("Stream was gracefully closed"));
return;
}
const endpoint = this.#client._endpoint;
if (endpoint === undefined) {
this.#client._endpointPromise.then(() => this.#flushQueue(), (error) => this._setClosed(error));
return;
}
const firstEntry = this.#queue.shift();
if (firstEntry === undefined) {
return;
}
else if (firstEntry.type === "pipeline") {
const pipeline = [firstEntry];
for (;;) {
const entry = this.#queue.first();
if (entry !== undefined && entry.type === "pipeline") {
pipeline.push(entry);
this.#queue.shift();
}
else if (entry === undefined && this.#closing && !this.#closeQueued) {
pipeline.push({
type: "pipeline",
request: { type: "close" },
responseCallback: () => undefined,
errorCallback: () => undefined,
});
this.#closeQueued = true;
break;
}
else {
break;
}
}
this.#flushPipeline(endpoint, pipeline);
}
else if (firstEntry.type === "cursor") {
this.#flushCursor(endpoint, firstEntry);
}
else {
throw (0, util_js_1.impossible)(firstEntry, "Impossible type of QueueEntry");
}
}
#flushPipeline(endpoint, pipeline) {
this.#flush(() => this.#createPipelineRequest(pipeline, endpoint), (resp) => decodePipelineResponse(resp, endpoint.encoding), (respBody) => respBody.baton, (respBody) => respBody.baseUrl, (respBody) => handlePipelineResponse(pipeline, respBody), (error) => pipeline.forEach((entry) => entry.errorCallback(error)));
}
#flushCursor(endpoint, entry) {
const cursor = new cursor_js_1.HttpCursor(this, endpoint.encoding);
this.#cursor = cursor;
this.#flush(() => this.#createCursorRequest(entry, endpoint), (resp) => cursor.open(resp), (respBody) => respBody.baton, (respBody) => respBody.baseUrl, (_respBody) => entry.cursorCallback(cursor), (error) => entry.errorCallback(error));
}
#flush(createRequest, decodeResponse, getBaton, getBaseUrl, handleResponse, handleError) {
let promise;
try {
const request = createRequest();
const fetch = this.#fetch;
promise = fetch(request);
}
catch (error) {
promise = Promise.reject(error);
}
this.#flushing = true;
promise.then((resp) => {
if (!resp.ok) {
return errorFromResponse(resp).then((error) => {
throw error;
});
}
return decodeResponse(resp);
}).then((r) => {
this.#baton = getBaton(r);
this.#baseUrl = getBaseUrl(r) ?? this.#baseUrl;
handleResponse(r);
}).catch((error) => {
this._setClosed(error);
handleError(error);
}).finally(() => {
this.#flushing = false;
this.#flushQueue();
});
}
#createPipelineRequest(pipeline, endpoint) {
return this.#createRequest(new URL(endpoint.pipelinePath, this.#baseUrl), {
baton: this.#baton,
requests: pipeline.map((entry) => entry.request),
}, endpoint.encoding, json_encode_js_1.PipelineReqBody, protobuf_encode_js_1.PipelineReqBody);
}
#createCursorRequest(entry, endpoint) {
if (endpoint.cursorPath === undefined) {
throw new errors_js_1.ProtocolVersionError("Cursors are supported only on protocol version 3 and higher, " +
`but the HTTP server only supports version ${endpoint.version}.`);
}
return this.#createRequest(new URL(endpoint.cursorPath, this.#baseUrl), {
baton: this.#baton,
batch: entry.batch,
}, endpoint.encoding, json_encode_js_2.CursorReqBody, protobuf_encode_js_2.CursorReqBody);
}
#createRequest(url, reqBody, encoding, jsonFun, protobufFun) {
let bodyData;
let contentType;
if (encoding === "json") {
bodyData = (0, index_js_1.writeJsonObject)(reqBody, jsonFun);
contentType = "application/json";
}
else if (encoding === "protobuf") {
bodyData = (0, index_js_1.writeProtobufMessage)(reqBody, protobufFun);
contentType = "application/x-protobuf";
}
else {
throw (0, util_js_1.impossible)(encoding, "Impossible encoding");
}
const headers = new isomorphic_fetch_1.Headers();
headers.set("content-type", contentType);
if (this.#jwt !== undefined) {
headers.set("authorization", `Bearer ${this.#jwt}`);
}
return new isomorphic_fetch_1.Request(url.toString(), { method: "POST", headers, body: bodyData });
}
}
exports.HttpStream = HttpStream;
function handlePipelineResponse(pipeline, respBody) {
if (respBody.results.length !== pipeline.length) {
throw new errors_js_1.ProtoError("Server returned unexpected number of pipeline results");
}
for (let i = 0; i < pipeline.length; ++i) {
const result = respBody.results[i];
const entry = pipeline[i];
if (result.type === "ok") {
if (result.response.type !== entry.request.type) {
throw new errors_js_1.ProtoError("Received unexpected type of response");
}
entry.responseCallback(result.response);
}
else if (result.type === "error") {
entry.errorCallback((0, result_js_1.errorFromProto)(result.error));
}
else if (result.type === "none") {
throw new errors_js_1.ProtoError("Received unrecognized type of StreamResult");
}
else {
throw (0, util_js_1.impossible)(result, "Received impossible type of StreamResult");
}
}
}
async function decodePipelineResponse(resp, encoding) {
if (encoding === "json") {
const respJson = await resp.json();
return (0, index_js_1.readJsonObject)(respJson, json_decode_js_1.PipelineRespBody);
}
if (encoding === "protobuf") {
const respData = await resp.arrayBuffer();
return (0, index_js_1.readProtobufMessage)(new Uint8Array(respData), protobuf_decode_js_1.PipelineRespBody);
}
await resp.body?.cancel();
throw (0, util_js_1.impossible)(encoding, "Impossible encoding");
}
async function errorFromResponse(resp) {
const respType = resp.headers.get("content-type") ?? "text/plain";
let message = `Server returned HTTP status ${resp.status}`;
if (respType === "application/json") {
const respBody = await resp.json();
if ("message" in respBody) {
return (0, result_js_1.errorFromProto)(respBody);
}
return new errors_js_1.HttpServerError(message, resp.status);
}
if (respType === "text/plain") {
const respBody = (await resp.text()).trim();
if (respBody !== "") {
message += `: ${respBody}`;
}
return new errors_js_1.HttpServerError(message, resp.status);
}
await resp.body?.cancel();
return new errors_js_1.HttpServerError(message, resp.status);
}