brass-runtime 1.12.1 → 1.13.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -228
- package/dist/agent/cli/main.js +2021 -15
- package/dist/agent/cli/main.mjs +2022 -0
- package/dist/agent/index.d.mts +1 -1
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.js +153 -1
- package/dist/agent/index.mjs +153 -0
- package/dist/chunk-3IF374MG.js +407 -0
- package/dist/chunk-6ECUD4N3.mjs +2879 -0
- package/dist/chunk-HRVX2IYW.js +2879 -0
- package/dist/chunk-QRPYH5J7.mjs +407 -0
- package/dist/chunk-T5XJDGTQ.mjs +2556 -0
- package/dist/chunk-TGOMLZ65.js +2556 -0
- package/dist/effect-ISvXPLgc.d.mts +797 -0
- package/dist/effect-ISvXPLgc.d.ts +797 -0
- package/dist/http/index.d.mts +2 -2
- package/dist/http/index.d.ts +2 -2
- package/dist/http/index.js +453 -1
- package/dist/http/index.mjs +453 -0
- package/dist/index.d.mts +159 -41
- package/dist/index.d.ts +159 -41
- package/dist/index.js +855 -1
- package/dist/index.mjs +855 -0
- package/dist/{stream-DNTGNv-G.d.ts → stream-BvukHxCv.d.ts} +1 -1
- package/dist/{stream-FwtnWmgX.d.mts → stream-C0-LWnUP.d.mts} +1 -1
- package/package.json +25 -16
- package/dist/agent/cli/main.cjs +0 -16
- package/dist/agent/index.cjs +0 -1
- package/dist/chunk-63MXGA7P.js +0 -1
- package/dist/chunk-63ODH5W4.cjs +0 -25
- package/dist/chunk-7PHP7KQB.cjs +0 -1
- package/dist/chunk-MAR4TNUH.js +0 -1
- package/dist/chunk-P4IND5C3.js +0 -25
- package/dist/chunk-T3QEEHK6.cjs +0 -1
- package/dist/effect-NSaHksNl.d.mts +0 -367
- package/dist/effect-NSaHksNl.d.ts +0 -367
- package/dist/http/index.cjs +0 -1
- package/dist/index.cjs +0 -1
|
@@ -0,0 +1,453 @@
|
|
|
1
|
+
import {
|
|
2
|
+
streamFromReadableStream
|
|
3
|
+
} from "../chunk-QRPYH5J7.mjs";
|
|
4
|
+
import {
|
|
5
|
+
asyncFail,
|
|
6
|
+
asyncFlatMap,
|
|
7
|
+
asyncFold,
|
|
8
|
+
asyncSucceed,
|
|
9
|
+
fromPromiseAbortable,
|
|
10
|
+
mapTryAsync,
|
|
11
|
+
toPromise,
|
|
12
|
+
withAsyncPromise
|
|
13
|
+
} from "../chunk-T5XJDGTQ.mjs";
|
|
14
|
+
|
|
15
|
+
// src/http/optics/lens.ts
|
|
16
|
+
var Lens = {
|
|
17
|
+
make(get, set) {
|
|
18
|
+
return { get, set };
|
|
19
|
+
},
|
|
20
|
+
over(ln, f) {
|
|
21
|
+
return (s) => ln.set(f(ln.get(s)))(s);
|
|
22
|
+
},
|
|
23
|
+
compose(ab, sa) {
|
|
24
|
+
return Lens.make(
|
|
25
|
+
(s) => ab.get(sa.get(s)),
|
|
26
|
+
(b) => (s) => sa.set(ab.set(b)(sa.get(s)))(s)
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
// src/http/optics/request.ts
|
|
32
|
+
var Request = {
|
|
33
|
+
headers: Lens.make(
|
|
34
|
+
(req) => req.headers ?? {},
|
|
35
|
+
(headers) => (req) => ({ ...req, headers })
|
|
36
|
+
)
|
|
37
|
+
};
|
|
38
|
+
var mergeHeaders = (extra) => (req) => Lens.over(Request.headers, (h) => ({ ...h, ...extra }))(req);
|
|
39
|
+
var mergeHeadersUnder = (under) => (req) => Lens.over(Request.headers, (h) => ({ ...under, ...h }))(req);
|
|
40
|
+
var setHeaderIfMissing = (k, v) => (req) => Lens.over(Request.headers, (h) => h[k] ? h : { ...h, [k]: v })(req);
|
|
41
|
+
|
|
42
|
+
// src/http/sleep.ts
|
|
43
|
+
var isHttpError = (e) => typeof e === "object" && e !== null && "_tag" in e;
|
|
44
|
+
var normalizeHttpError = (e) => {
|
|
45
|
+
if (isHttpError(e)) return e;
|
|
46
|
+
if (typeof e === "object" && e !== null && e.name === "AbortError") {
|
|
47
|
+
return { _tag: "Abort" };
|
|
48
|
+
}
|
|
49
|
+
return { _tag: "FetchError", message: String(e) };
|
|
50
|
+
};
|
|
51
|
+
var sleepMs = (ms) => fromPromiseAbortable(
|
|
52
|
+
(signal) => new Promise((resolve, reject) => {
|
|
53
|
+
if (signal.aborted) return reject({ _tag: "Abort" });
|
|
54
|
+
const id = setTimeout(resolve, ms);
|
|
55
|
+
const onAbort = () => {
|
|
56
|
+
clearTimeout(id);
|
|
57
|
+
reject({ _tag: "Abort" });
|
|
58
|
+
};
|
|
59
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
60
|
+
}),
|
|
61
|
+
normalizeHttpError
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
// src/http/client.ts
|
|
65
|
+
var withMiddleware = (mw) => (c) => decorate(mw(c));
|
|
66
|
+
var decorate = (run) => Object.assign(((req) => run(req)), {
|
|
67
|
+
with: (mw) => decorate(mw(run))
|
|
68
|
+
});
|
|
69
|
+
var normalizeHttpError2 = (e) => {
|
|
70
|
+
if (e instanceof DOMException && e.name === "AbortError") return { _tag: "Abort" };
|
|
71
|
+
if (typeof e === "object" && e && "_tag" in e) return e;
|
|
72
|
+
return { _tag: "FetchError", message: String(e) };
|
|
73
|
+
};
|
|
74
|
+
var normalizeHeadersInit = (h) => {
|
|
75
|
+
if (!h) return void 0;
|
|
76
|
+
if (typeof Headers !== "undefined" && h instanceof Headers) {
|
|
77
|
+
const out = {};
|
|
78
|
+
h.forEach((v, k) => out[k] = v);
|
|
79
|
+
return out;
|
|
80
|
+
}
|
|
81
|
+
if (Array.isArray(h)) return Object.fromEntries(h);
|
|
82
|
+
if (typeof h === "object") return { ...h };
|
|
83
|
+
return void 0;
|
|
84
|
+
};
|
|
85
|
+
var normalizeRequest = (defaultHeaders) => (req0) => {
|
|
86
|
+
let req = Object.keys(defaultHeaders).length ? mergeHeadersUnder(defaultHeaders)(req0) : req0;
|
|
87
|
+
const initHeaders = normalizeHeadersInit(req0.init?.headers);
|
|
88
|
+
if (initHeaders && Object.keys(initHeaders).length) {
|
|
89
|
+
req = mergeHeadersUnder(initHeaders)(req);
|
|
90
|
+
}
|
|
91
|
+
return req;
|
|
92
|
+
};
|
|
93
|
+
function makeHttpStream(cfg = {}) {
|
|
94
|
+
const baseUrl = cfg.baseUrl ?? "";
|
|
95
|
+
const defaultHeaders = cfg.headers ?? {};
|
|
96
|
+
const normalize = normalizeRequest(defaultHeaders);
|
|
97
|
+
return (req0) => fromPromiseAbortable(
|
|
98
|
+
async (signal) => {
|
|
99
|
+
const req = normalize(req0);
|
|
100
|
+
let url;
|
|
101
|
+
try {
|
|
102
|
+
url = new URL(req.url, baseUrl);
|
|
103
|
+
} catch {
|
|
104
|
+
throw { _tag: "BadUrl", message: `URL inv\xE1lida: ${req.url}` };
|
|
105
|
+
}
|
|
106
|
+
const started = performance.now();
|
|
107
|
+
const res = await fetch(url, {
|
|
108
|
+
...req.init ?? {},
|
|
109
|
+
method: req.method,
|
|
110
|
+
headers: Request.headers.get(req),
|
|
111
|
+
// 👈 optics: headers ya normalizados
|
|
112
|
+
body: req.body,
|
|
113
|
+
signal
|
|
114
|
+
});
|
|
115
|
+
const headers = {};
|
|
116
|
+
res.headers.forEach((v, k) => headers[k] = v);
|
|
117
|
+
const body = streamFromReadableStream(res.body, normalizeHttpError2);
|
|
118
|
+
return {
|
|
119
|
+
status: res.status,
|
|
120
|
+
statusText: res.statusText,
|
|
121
|
+
headers,
|
|
122
|
+
body,
|
|
123
|
+
ms: Math.round(performance.now() - started)
|
|
124
|
+
};
|
|
125
|
+
},
|
|
126
|
+
normalizeHttpError2
|
|
127
|
+
);
|
|
128
|
+
}
|
|
129
|
+
function makeHttp(cfg = {}) {
|
|
130
|
+
const baseUrl = cfg.baseUrl ?? "";
|
|
131
|
+
const defaultHeaders = cfg.headers ?? {};
|
|
132
|
+
const normalize = normalizeRequest(defaultHeaders);
|
|
133
|
+
const run = (req0) => fromPromiseAbortable(
|
|
134
|
+
async (signal) => {
|
|
135
|
+
const req = normalize(req0);
|
|
136
|
+
let url;
|
|
137
|
+
try {
|
|
138
|
+
url = new URL(req.url, baseUrl);
|
|
139
|
+
} catch {
|
|
140
|
+
throw { _tag: "BadUrl", message: `URL inv\xE1lida: ${req.url}` };
|
|
141
|
+
}
|
|
142
|
+
const started = performance.now();
|
|
143
|
+
const res = await fetch(url, {
|
|
144
|
+
...req.init ?? {},
|
|
145
|
+
method: req.method,
|
|
146
|
+
headers: Request.headers.get(req),
|
|
147
|
+
// 👈 optics
|
|
148
|
+
body: req.body,
|
|
149
|
+
signal
|
|
150
|
+
});
|
|
151
|
+
const bodyText = await res.text();
|
|
152
|
+
const headers = {};
|
|
153
|
+
res.headers.forEach((v, k) => headers[k] = v);
|
|
154
|
+
return {
|
|
155
|
+
status: res.status,
|
|
156
|
+
statusText: res.statusText,
|
|
157
|
+
headers,
|
|
158
|
+
bodyText,
|
|
159
|
+
ms: Math.round(performance.now() - started)
|
|
160
|
+
};
|
|
161
|
+
},
|
|
162
|
+
normalizeHttpError2
|
|
163
|
+
);
|
|
164
|
+
return decorate(run);
|
|
165
|
+
}
|
|
166
|
+
var clamp = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
167
|
+
var defaultRetryOnError = (e) => e._tag === "FetchError";
|
|
168
|
+
var defaultRetryOnStatus = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
169
|
+
var backoffDelayMs = (attempt, base, cap) => {
|
|
170
|
+
const exp = base * Math.pow(2, attempt);
|
|
171
|
+
const lim = clamp(exp, 0, cap);
|
|
172
|
+
return Math.floor(Math.random() * lim);
|
|
173
|
+
};
|
|
174
|
+
var retryAfterMs = (headers) => {
|
|
175
|
+
const key = Object.keys(headers).find((k) => k.toLowerCase() === "retry-after");
|
|
176
|
+
if (!key) return void 0;
|
|
177
|
+
const v = headers[key]?.trim();
|
|
178
|
+
if (!v) return void 0;
|
|
179
|
+
const secs = Number(v);
|
|
180
|
+
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
181
|
+
const t = Date.parse(v);
|
|
182
|
+
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
183
|
+
return void 0;
|
|
184
|
+
};
|
|
185
|
+
var withRetryStream = (p) => (next) => ((req) => {
|
|
186
|
+
const loop = (attempt) => asyncFold(
|
|
187
|
+
next(req),
|
|
188
|
+
(e) => {
|
|
189
|
+
if (e._tag === "Abort" || e._tag === "BadUrl") return asyncFail(e);
|
|
190
|
+
const canRetry = attempt < p.maxRetries && (p.retryOnError ?? defaultRetryOnError)(e);
|
|
191
|
+
if (!canRetry) return asyncFail(e);
|
|
192
|
+
const d = backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs);
|
|
193
|
+
return asyncFlatMap(sleepMs(d), () => loop(attempt + 1));
|
|
194
|
+
},
|
|
195
|
+
(w) => {
|
|
196
|
+
const canRetry = attempt < p.maxRetries && (p.retryOnStatus ?? defaultRetryOnStatus)(w.status);
|
|
197
|
+
if (!canRetry) return asyncSucceed(w);
|
|
198
|
+
const ra = retryAfterMs(w.headers);
|
|
199
|
+
const d = ra ?? backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs);
|
|
200
|
+
return asyncFlatMap(sleepMs(d), () => loop(attempt + 1));
|
|
201
|
+
}
|
|
202
|
+
);
|
|
203
|
+
return loop(0);
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
// src/http/retry/retry.ts
|
|
207
|
+
var defaultRetryableMethods = ["GET", "HEAD", "OPTIONS"];
|
|
208
|
+
var defaultRetryOnStatus2 = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
209
|
+
var defaultRetryOnError2 = (e) => e._tag === "FetchError";
|
|
210
|
+
var clamp2 = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
211
|
+
var backoffDelayMs2 = (attempt, base, cap) => {
|
|
212
|
+
const b = Math.max(0, base);
|
|
213
|
+
const c = Math.max(0, cap);
|
|
214
|
+
const exp = b * Math.pow(2, attempt);
|
|
215
|
+
const lim = clamp2(exp, 0, c);
|
|
216
|
+
return Math.floor(Math.random() * lim);
|
|
217
|
+
};
|
|
218
|
+
var headerCI = (h, name) => {
|
|
219
|
+
const k = Object.keys(h).find((x) => x.toLowerCase() === name.toLowerCase());
|
|
220
|
+
return k ? h[k] : void 0;
|
|
221
|
+
};
|
|
222
|
+
var retryAfterMs2 = (headers) => {
|
|
223
|
+
const v = headerCI(headers, "retry-after")?.trim();
|
|
224
|
+
if (!v) return void 0;
|
|
225
|
+
const secs = Number(v);
|
|
226
|
+
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
227
|
+
const t = Date.parse(v);
|
|
228
|
+
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
229
|
+
return void 0;
|
|
230
|
+
};
|
|
231
|
+
var withRetry = (p) => (next) => {
|
|
232
|
+
const retryOnMethods = p.retryOnMethods ?? defaultRetryableMethods;
|
|
233
|
+
const retryOnStatus = p.retryOnStatus ?? defaultRetryOnStatus2;
|
|
234
|
+
const retryOnError = p.retryOnError ?? defaultRetryOnError2;
|
|
235
|
+
const isMethodRetryable = (req) => retryOnMethods.includes(req.method);
|
|
236
|
+
const loop = (req, attempt) => {
|
|
237
|
+
if (!isMethodRetryable(req)) return next(req);
|
|
238
|
+
return asyncFold(
|
|
239
|
+
next(req),
|
|
240
|
+
(e) => {
|
|
241
|
+
if (e._tag === "Abort" || e._tag === "BadUrl") return asyncFail(e);
|
|
242
|
+
const canRetry = attempt < p.maxRetries && retryOnError(e);
|
|
243
|
+
if (!canRetry) return asyncFail(e);
|
|
244
|
+
const d = backoffDelayMs2(attempt, p.baseDelayMs, p.maxDelayMs);
|
|
245
|
+
return asyncFlatMap(sleepMs(d), () => loop(req, attempt + 1));
|
|
246
|
+
},
|
|
247
|
+
(w) => {
|
|
248
|
+
const canRetry = attempt < p.maxRetries && retryOnStatus(w.status);
|
|
249
|
+
if (!canRetry) return asyncSucceed(w);
|
|
250
|
+
const ra = retryAfterMs2(w.headers);
|
|
251
|
+
const d = ra ?? backoffDelayMs2(attempt, p.baseDelayMs, p.maxDelayMs);
|
|
252
|
+
return asyncFlatMap(sleepMs(d), () => loop(req, attempt + 1));
|
|
253
|
+
}
|
|
254
|
+
);
|
|
255
|
+
};
|
|
256
|
+
return (req) => loop(req, 0);
|
|
257
|
+
};
|
|
258
|
+
|
|
259
|
+
// src/http/httpClient.ts
|
|
260
|
+
var resolveFinalUrl = (baseUrl, url) => {
|
|
261
|
+
try {
|
|
262
|
+
return new URL(url, baseUrl ?? "").toString();
|
|
263
|
+
} catch {
|
|
264
|
+
return (baseUrl ?? "") + url;
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
var createHttpCore = (cfg = {}) => {
|
|
268
|
+
const wire = makeHttp(cfg);
|
|
269
|
+
const withPromise = (eff) => withAsyncPromise((e, env) => toPromise(e, env))(eff);
|
|
270
|
+
const requestRaw = (req) => wire(req);
|
|
271
|
+
const splitInit = (init) => {
|
|
272
|
+
const { headers, ...rest } = init ?? {};
|
|
273
|
+
return {
|
|
274
|
+
headers: normalizeHeadersInit(headers),
|
|
275
|
+
init: rest
|
|
276
|
+
};
|
|
277
|
+
};
|
|
278
|
+
const applyInitHeaders = (headers) => (req) => headers ? mergeHeaders(headers)(req) : req;
|
|
279
|
+
const buildReq = (method, url, init, body) => {
|
|
280
|
+
const s = splitInit(init);
|
|
281
|
+
const req = {
|
|
282
|
+
method,
|
|
283
|
+
url,
|
|
284
|
+
...body && body.length > 0 ? { body } : {},
|
|
285
|
+
init: s.init
|
|
286
|
+
};
|
|
287
|
+
return applyInitHeaders(s.headers)(req);
|
|
288
|
+
};
|
|
289
|
+
const toResponse = (w, body) => ({
|
|
290
|
+
status: w.status,
|
|
291
|
+
statusText: w.statusText,
|
|
292
|
+
headers: w.headers,
|
|
293
|
+
body
|
|
294
|
+
});
|
|
295
|
+
return {
|
|
296
|
+
cfg,
|
|
297
|
+
wire,
|
|
298
|
+
withPromise,
|
|
299
|
+
requestRaw,
|
|
300
|
+
splitInit,
|
|
301
|
+
applyInitHeaders,
|
|
302
|
+
buildReq,
|
|
303
|
+
toResponse
|
|
304
|
+
};
|
|
305
|
+
};
|
|
306
|
+
function httpClient(cfg = {}) {
|
|
307
|
+
const core = createHttpCore(cfg);
|
|
308
|
+
const make = (wire) => {
|
|
309
|
+
const requestRaw = (req) => wire(req);
|
|
310
|
+
const request = (req) => core.withPromise(requestRaw(req));
|
|
311
|
+
const get = (url, init) => request(core.buildReq("GET", url, init));
|
|
312
|
+
const post = (url, body, init) => request(core.buildReq("POST", url, init, body));
|
|
313
|
+
const getText = (url, init) => {
|
|
314
|
+
const req = core.buildReq("GET", url, init);
|
|
315
|
+
return core.withPromise(mapTryAsync(requestRaw(req), (w) => core.toResponse(w, w.bodyText)));
|
|
316
|
+
};
|
|
317
|
+
const getJson = (url, init) => {
|
|
318
|
+
const base = core.buildReq("GET", url, init);
|
|
319
|
+
const req = setHeaderIfMissing("accept", "application/json")(base);
|
|
320
|
+
return core.withPromise(mapTryAsync(requestRaw(req), (w) => core.toResponse(w, JSON.parse(w.bodyText))));
|
|
321
|
+
};
|
|
322
|
+
const postJson = (url, bodyObj, init) => {
|
|
323
|
+
const base = core.buildReq("POST", url, init, JSON.stringify(bodyObj ?? {}));
|
|
324
|
+
const req = setHeaderIfMissing("content-type", "application/json")(
|
|
325
|
+
setHeaderIfMissing("accept", "application/json")(base)
|
|
326
|
+
);
|
|
327
|
+
return core.withPromise(
|
|
328
|
+
mapTryAsync(requestRaw(req), (w) => core.toResponse(w, JSON.parse(w.bodyText)))
|
|
329
|
+
);
|
|
330
|
+
};
|
|
331
|
+
return {
|
|
332
|
+
request,
|
|
333
|
+
get,
|
|
334
|
+
post,
|
|
335
|
+
getText,
|
|
336
|
+
getJson,
|
|
337
|
+
postJson,
|
|
338
|
+
with: (mw) => make(wire.with(mw)),
|
|
339
|
+
withRetry: (p) => make(wire.with(withRetry(p))),
|
|
340
|
+
wire
|
|
341
|
+
};
|
|
342
|
+
};
|
|
343
|
+
return make(core.wire);
|
|
344
|
+
}
|
|
345
|
+
function httpClientWithMeta(cfg = {}) {
|
|
346
|
+
const core = createHttpCore(cfg);
|
|
347
|
+
const mkMeta = (req, w, startedAt) => ({
|
|
348
|
+
request: req,
|
|
349
|
+
urlFinal: resolveFinalUrl(core.cfg.baseUrl, req.url),
|
|
350
|
+
startedAt,
|
|
351
|
+
durationMs: w.ms
|
|
352
|
+
});
|
|
353
|
+
const request = (req) => {
|
|
354
|
+
const startedAt = Date.now();
|
|
355
|
+
return core.withPromise(
|
|
356
|
+
mapTryAsync(core.requestRaw(req), (w) => ({
|
|
357
|
+
wire: w,
|
|
358
|
+
meta: mkMeta(req, w, startedAt)
|
|
359
|
+
}))
|
|
360
|
+
);
|
|
361
|
+
};
|
|
362
|
+
const get = (url, init) => {
|
|
363
|
+
const req = core.buildReq("GET", url, init);
|
|
364
|
+
return request(req);
|
|
365
|
+
};
|
|
366
|
+
const post = (url, body, init) => {
|
|
367
|
+
const req = core.buildReq("POST", url, init, body);
|
|
368
|
+
return request(req);
|
|
369
|
+
};
|
|
370
|
+
const postJson = (url, bodyObj, init) => {
|
|
371
|
+
const base = core.buildReq("POST", url, init, JSON.stringify(bodyObj ?? {}));
|
|
372
|
+
const req = setHeaderIfMissing("content-type", "application/json")(
|
|
373
|
+
setHeaderIfMissing("accept", "application/json")(base)
|
|
374
|
+
);
|
|
375
|
+
const startedAt = Date.now();
|
|
376
|
+
return core.withPromise(
|
|
377
|
+
mapTryAsync(core.requestRaw(req), (w) => ({
|
|
378
|
+
wire: w,
|
|
379
|
+
response: core.toResponse(w, JSON.parse(w.bodyText)),
|
|
380
|
+
meta: mkMeta(req, w, startedAt)
|
|
381
|
+
}))
|
|
382
|
+
);
|
|
383
|
+
};
|
|
384
|
+
const getText = (url, init) => {
|
|
385
|
+
const req = core.buildReq("GET", url, init);
|
|
386
|
+
const startedAt = Date.now();
|
|
387
|
+
return core.withPromise(
|
|
388
|
+
mapTryAsync(core.requestRaw(req), (w) => ({
|
|
389
|
+
wire: w,
|
|
390
|
+
response: core.toResponse(w, w.bodyText),
|
|
391
|
+
meta: mkMeta(req, w, startedAt)
|
|
392
|
+
}))
|
|
393
|
+
);
|
|
394
|
+
};
|
|
395
|
+
const getJson = (url, init) => {
|
|
396
|
+
const base = core.buildReq("GET", url, init);
|
|
397
|
+
const req = setHeaderIfMissing("accept", "application/json")(base);
|
|
398
|
+
const startedAt = Date.now();
|
|
399
|
+
return core.withPromise(
|
|
400
|
+
mapTryAsync(core.requestRaw(req), (w) => ({
|
|
401
|
+
wire: w,
|
|
402
|
+
response: core.toResponse(w, JSON.parse(w.bodyText)),
|
|
403
|
+
meta: mkMeta(req, w, startedAt)
|
|
404
|
+
}))
|
|
405
|
+
);
|
|
406
|
+
};
|
|
407
|
+
return {
|
|
408
|
+
request,
|
|
409
|
+
// => { wire, meta }
|
|
410
|
+
get,
|
|
411
|
+
// => { wire, meta }
|
|
412
|
+
getText,
|
|
413
|
+
// => { wire, response(text), meta }
|
|
414
|
+
getJson,
|
|
415
|
+
// => { wire, response(json), meta }
|
|
416
|
+
post,
|
|
417
|
+
// => { wire, meta }
|
|
418
|
+
postJson
|
|
419
|
+
// => { wire, meta } (y además setea headers via optics)
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
function httpClientStream(cfg = {}) {
|
|
423
|
+
const wire = makeHttpStream(cfg);
|
|
424
|
+
const make = (w) => {
|
|
425
|
+
const withPromise = (eff) => withAsyncPromise((e, env) => toPromise(e, env))(eff);
|
|
426
|
+
const request = (req) => withPromise(w(req));
|
|
427
|
+
const getStream = (url, init) => {
|
|
428
|
+
const base = { method: "GET", url, init };
|
|
429
|
+
const req = setHeaderIfMissing("accept", "*/*")(base);
|
|
430
|
+
return request(req);
|
|
431
|
+
};
|
|
432
|
+
return {
|
|
433
|
+
request,
|
|
434
|
+
getStream,
|
|
435
|
+
get: getStream,
|
|
436
|
+
with: (mw) => make(mw(w)),
|
|
437
|
+
withRetry: (p) => make(withRetryStream(p)(w)),
|
|
438
|
+
wire: w
|
|
439
|
+
};
|
|
440
|
+
};
|
|
441
|
+
return make(wire);
|
|
442
|
+
}
|
|
443
|
+
export {
|
|
444
|
+
decorate,
|
|
445
|
+
httpClient,
|
|
446
|
+
httpClientStream,
|
|
447
|
+
httpClientWithMeta,
|
|
448
|
+
makeHttp,
|
|
449
|
+
makeHttpStream,
|
|
450
|
+
normalizeHeadersInit,
|
|
451
|
+
withMiddleware,
|
|
452
|
+
withRetryStream
|
|
453
|
+
};
|
package/dist/index.d.mts
CHANGED
|
@@ -1,46 +1,123 @@
|
|
|
1
|
-
import { A as Async,
|
|
2
|
-
export { a as AsyncWithPromise, C as CancelToken,
|
|
3
|
-
import { Z as ZStream } from './stream-
|
|
4
|
-
export { C as Concat, E as Emit, a as Empty, F as Flatten, b as FromPull, M as Managed, c as Merge, N as Normalize, S as Scoped, d as assertNever, e as collectStream, f as concatStream, g as emitStream, h as emptyStream, i as flattenStream, j as foreachStream, k as fromArray, l as fromPull, m as managedStream, n as mapStream, o as merge, p as mergeStream, r as rangeStream, s as streamFromReadableStream, u as uncons, q as unwrapScoped, w as widenOpt, z as zip } from './stream-
|
|
1
|
+
import { F as FiberEngine, W as WasmEngineRuntime, A as Async, R as RuntimeFiber, b as FiberEngineStats, c as Fiber, d as FiberId, e as FiberStatus, E as Exit, f as RuntimeEvent, g as WasmBridge, h as OpcodeProgram, i as FiberId$1, j as EngineEvent, k as RefId, N as NodeId, l as OpcodeNode, S as Scope, m as RingBufferOptions, n as EngineStats, O as Option } from './effect-ISvXPLgc.mjs';
|
|
2
|
+
export { o as AsyncRegisterRef, a as AsyncWithPromise, p as BoundedRingBuffer, C as CancelToken, q as Canceler, r as Cause, s as CustomHostAction, D as DbHostAction, t as DecodeRef, u as DefaultHostExecutor, v as EngineKind, w as EngineSelection, x as EngineSelectionMode, y as FiberEngineKind, z as FlatMapRef, B as FoldFailureRef, G as FoldSuccessRef, H as HostAction, I as HostActionKind, J as HostActionResult, K as HostExecutionContext, L as HostExecutor, M as HostRegistry, P as HttpHostAction, Q as Interrupted, T as Joiner, U as None, V as NoopHooks, X as ProgramBuilder, Y as ProgramPatch, _ as PushStatus, $ as QueueHostAction, a0 as RingBuffer, a1 as RingBufferEngine, a2 as RingBufferStatsData, a3 as Runtime, a4 as RuntimeCapabilities, a5 as RuntimeEngineMode, a6 as RuntimeOptions, a7 as Scheduler, a8 as SchedulerEngine, a9 as SchedulerOptions, aa as SchedulerStats, ab as SchedulerStatsData, ac as ScopeId, ad as Some, ae as SyncRef, af as Task, ag as WasmFiberEngine, ah as WasmFiberEngineOptions, Z as ZIO, ai as acquireRelease, aj as async, ak as asyncCatchAll, al as asyncFail, am as asyncFlatMap, an as asyncFold, ao as asyncInterruptible, ap as asyncMap, aq as asyncMapError, ar as asyncSucceed, as as asyncSync, at as asyncTotal, au as catchAll, av as end, aw as engineStats, ax as fail, ay as flatMap, az as fork, aA as fromPromiseAbortable, aB as getBenchmarkBudget, aC as getCurrentFiber, aD as globalScheduler, aE as linkAbortController, aF as makeBoundedRingBuffer, aG as makeCancelToken, aH as map, aI as mapAsync, aJ as mapError, aK as mapTryAsync, aL as none, aM as orElseOptional, aN as runtimeCapabilities, aO as selectedEngineStats, aP as setBenchmarkBudget, aQ as some, aR as succeed, aS as sync, aT as toPromise, aU as unit, aV as unsafeGetCurrentRuntime, aW as unsafeRunAsync, aX as unsafeRunFoldWithEnv, aY as withAsyncPromise, aZ as withCurrentFiber, a_ as withScope, a$ as withScopeAsync } from './effect-ISvXPLgc.mjs';
|
|
3
|
+
import { Z as ZStream } from './stream-C0-LWnUP.mjs';
|
|
4
|
+
export { C as Concat, E as Emit, a as Empty, F as Flatten, b as FromPull, M as Managed, c as Merge, N as Normalize, S as Scoped, d as assertNever, e as collectStream, f as concatStream, g as emitStream, h as emptyStream, i as flattenStream, j as foreachStream, k as fromArray, l as fromPull, m as managedStream, n as mapStream, o as merge, p as mergeStream, r as rangeStream, s as streamFromReadableStream, u as uncons, q as unwrapScoped, w as widenOpt, z as zip } from './stream-C0-LWnUP.mjs';
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
private head;
|
|
14
|
-
private tail;
|
|
15
|
-
private len;
|
|
16
|
-
get length(): number;
|
|
17
|
-
isEmpty(): boolean;
|
|
18
|
-
push(value: T): Node<T>;
|
|
19
|
-
shift(): T | undefined;
|
|
20
|
-
remove(node: Node<T>): void;
|
|
21
|
-
private unlink;
|
|
6
|
+
declare class JsFiberEngine<R> implements FiberEngine<R> {
|
|
7
|
+
private readonly runtime;
|
|
8
|
+
readonly kind: "js";
|
|
9
|
+
private startedFibers;
|
|
10
|
+
constructor(runtime: WasmEngineRuntime<R> & any);
|
|
11
|
+
fork<E, A>(effect: Async<R, E, A>, scopeId?: number): RuntimeFiber<R, E, A>;
|
|
12
|
+
stats(): FiberEngineStats;
|
|
22
13
|
}
|
|
23
14
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
15
|
+
type InternalFiberStatus = "queued" | "running" | "suspended" | "done" | "failed" | "interrupted";
|
|
16
|
+
declare class EngineFiberHandle<R, E, A> implements Fiber<E, A> {
|
|
17
|
+
private readonly onScheduledStep;
|
|
18
|
+
private readonly onInterrupt;
|
|
19
|
+
private readonly onJoiner?;
|
|
20
|
+
private readonly onQueued?;
|
|
21
|
+
readonly id: FiberId;
|
|
22
|
+
readonly runtime: WasmEngineRuntime<R> & any;
|
|
23
|
+
fiberContext: any;
|
|
24
|
+
name?: string;
|
|
25
|
+
scopeId?: number;
|
|
26
|
+
parentFiberId?: number;
|
|
27
|
+
private result;
|
|
28
|
+
private readonly joiners;
|
|
29
|
+
private readonly finalizers;
|
|
30
|
+
private finalizersDrained;
|
|
31
|
+
private internalStatus;
|
|
32
|
+
private queued;
|
|
33
|
+
constructor(id: FiberId, runtime: WasmEngineRuntime<R> & any, onScheduledStep: (fiberId: FiberId) => void, onInterrupt: (fiberId: FiberId, reason: unknown) => void, onJoiner?: ((fiberId: FiberId) => void) | undefined, onQueued?: ((fiberId: FiberId) => void) | undefined);
|
|
34
|
+
status(): FiberStatus;
|
|
35
|
+
engineStatus(): InternalFiberStatus;
|
|
36
|
+
setEngineStatus(status: InternalFiberStatus): void;
|
|
37
|
+
join(cb: (exit: Exit<E, A>) => void): void;
|
|
38
|
+
interrupt(): void;
|
|
39
|
+
addFinalizer(f: (exit: Exit<E, A>) => void): void;
|
|
40
|
+
schedule(tag?: string): void;
|
|
41
|
+
emit(ev: RuntimeEvent): void;
|
|
42
|
+
succeed(value: A): void;
|
|
43
|
+
fail(error: E): void;
|
|
44
|
+
die(defect: unknown): void;
|
|
45
|
+
interrupted(): void;
|
|
46
|
+
complete(exit: Exit<E, A>): void;
|
|
47
|
+
private runFinalizersOnce;
|
|
28
48
|
}
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
private
|
|
33
|
-
private
|
|
34
|
-
private
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
49
|
+
|
|
50
|
+
declare class ReferenceWasmBridge implements WasmBridge {
|
|
51
|
+
readonly kind: "wasm-reference";
|
|
52
|
+
private nextFiberId;
|
|
53
|
+
private readonly fibers;
|
|
54
|
+
private started;
|
|
55
|
+
private completed;
|
|
56
|
+
private failed;
|
|
57
|
+
private interrupted;
|
|
58
|
+
createFiber(program: OpcodeProgram): FiberId$1;
|
|
59
|
+
poll(fiberId: FiberId$1): EngineEvent;
|
|
60
|
+
provideValue(fiberId: FiberId$1, valueRef: RefId): EngineEvent;
|
|
61
|
+
provideError(fiberId: FiberId$1, errorRef: RefId): EngineEvent;
|
|
62
|
+
provideEffect(fiberId: FiberId$1, root: NodeId, nodes: OpcodeNode[]): EngineEvent;
|
|
63
|
+
interrupt(fiberId: FiberId$1, reasonRef: RefId): EngineEvent;
|
|
64
|
+
dropFiber(fiberId: FiberId$1): void;
|
|
65
|
+
stats(): unknown;
|
|
66
|
+
private mustFiber;
|
|
67
|
+
private step;
|
|
68
|
+
private success;
|
|
69
|
+
private failure;
|
|
70
|
+
private suspend;
|
|
71
|
+
private markDone;
|
|
72
|
+
private markFailed;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
declare class WasmPackFiberBridge implements WasmBridge {
|
|
76
|
+
readonly kind: "wasm";
|
|
77
|
+
private readonly vm;
|
|
78
|
+
constructor(modulePath?: string);
|
|
79
|
+
createFiber(program: OpcodeProgram): FiberId$1;
|
|
80
|
+
poll(fiberId: FiberId$1): EngineEvent;
|
|
81
|
+
provideValue(fiberId: FiberId$1, valueRef: RefId): EngineEvent;
|
|
82
|
+
provideError(fiberId: FiberId$1, errorRef: RefId): EngineEvent;
|
|
83
|
+
provideEffect(fiberId: FiberId$1, root: NodeId, nodes: OpcodeNode[]): EngineEvent;
|
|
84
|
+
interrupt(fiberId: FiberId$1, reasonRef: RefId): EngineEvent;
|
|
85
|
+
dropFiber(fiberId: FiberId$1): void;
|
|
86
|
+
stats(): unknown;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
type WasmFiberRegistryStats = {
|
|
90
|
+
readonly live: number;
|
|
91
|
+
readonly queued: number;
|
|
92
|
+
readonly running: number;
|
|
93
|
+
readonly suspended: number;
|
|
94
|
+
readonly done: number;
|
|
95
|
+
readonly failed: number;
|
|
96
|
+
readonly interrupted: number;
|
|
97
|
+
readonly wakeQueueLen: number;
|
|
98
|
+
readonly registered: number;
|
|
99
|
+
readonly completed: number;
|
|
100
|
+
readonly wakeups: number;
|
|
101
|
+
readonly duplicateWakeups: number;
|
|
102
|
+
readonly joins: number;
|
|
103
|
+
};
|
|
104
|
+
type FiberRegistryStatus = "queued" | "running" | "suspended" | "done" | "failed" | "interrupted";
|
|
105
|
+
declare class WasmFiberRegistryBridge {
|
|
106
|
+
private readonly registry;
|
|
107
|
+
constructor();
|
|
108
|
+
registerFiber(fiberId: FiberId$1, parentId?: number, scopeId?: number): void;
|
|
109
|
+
markQueued(fiberId: FiberId$1): void;
|
|
110
|
+
markRunning(fiberId: FiberId$1): void;
|
|
111
|
+
markSuspended(fiberId: FiberId$1): void;
|
|
112
|
+
markDone(fiberId: FiberId$1, status: Exclude<FiberRegistryStatus, "queued" | "running" | "suspended">): number;
|
|
113
|
+
dropFiber(fiberId: FiberId$1): void;
|
|
114
|
+
addJoiner(fiberId: FiberId$1): void;
|
|
115
|
+
wake(fiberId: FiberId$1): boolean;
|
|
116
|
+
drainWakeup(): FiberId$1 | undefined;
|
|
117
|
+
drainWakeups(): FiberId$1[];
|
|
118
|
+
wakeQueueLength(): number;
|
|
119
|
+
stateOf(fiberId: FiberId$1): FiberRegistryStatus | "missing";
|
|
120
|
+
stats(): WasmFiberRegistryStats;
|
|
44
121
|
}
|
|
45
122
|
|
|
46
123
|
declare function buffer<R, E, A>(stream: ZStream<{} & R, E, A>, capacity: number, strategy?: "backpressure" | "dropping" | "sliding"): ZStream<{} & R, E, A>;
|
|
@@ -78,7 +155,8 @@ type Queue<A> = {
|
|
|
78
155
|
size: () => number;
|
|
79
156
|
shutdown: () => void;
|
|
80
157
|
};
|
|
81
|
-
|
|
158
|
+
type QueueOptions = RingBufferOptions;
|
|
159
|
+
declare function bounded<A>(capacity: number, strategy?: Strategy, options?: QueueOptions): Async<unknown, unknown, Queue<A>>;
|
|
82
160
|
|
|
83
161
|
type HubStrategy = "BackPressure" | "Dropping" | "Sliding";
|
|
84
162
|
type HubClosed = {
|
|
@@ -98,6 +176,42 @@ declare const broadcast: typeof makeHub;
|
|
|
98
176
|
declare function broadcastToHub<R, E, A>(stream: ZStream<R, E, A>, hub: Hub<A>): Async<R, E, void>;
|
|
99
177
|
declare function fromHub<A>(hub: Hub<A>): ZStream<unknown, HubClosed, A>;
|
|
100
178
|
|
|
179
|
+
type StreamChunkEngine = "auto" | "js" | "wasm";
|
|
180
|
+
type StreamChunkOptions = {
|
|
181
|
+
/**
|
|
182
|
+
* auto: use WASM when wasm/pkg is available, otherwise JS.
|
|
183
|
+
* js: always use the JS array chunker.
|
|
184
|
+
* wasm: require BrassWasmChunkBuffer from wasm/pkg.
|
|
185
|
+
*/
|
|
186
|
+
engine?: StreamChunkEngine;
|
|
187
|
+
};
|
|
188
|
+
type StreamChunkStats = {
|
|
189
|
+
len: number;
|
|
190
|
+
maxChunkSize: number;
|
|
191
|
+
emittedChunks: number;
|
|
192
|
+
emittedItems: number;
|
|
193
|
+
flushes: number;
|
|
194
|
+
};
|
|
195
|
+
type Chunker<A> = {
|
|
196
|
+
readonly length: number;
|
|
197
|
+
readonly maxChunkSize: number;
|
|
198
|
+
push(value: A): boolean;
|
|
199
|
+
isFull(): boolean;
|
|
200
|
+
isEmpty(): boolean;
|
|
201
|
+
takeChunk(): readonly A[];
|
|
202
|
+
clear(): void;
|
|
203
|
+
stats(): EngineStats<StreamChunkStats>;
|
|
204
|
+
};
|
|
205
|
+
declare function makeStreamChunker<A>(chunkSize: number, options?: StreamChunkOptions): Chunker<A>;
|
|
206
|
+
/**
|
|
207
|
+
* Re-chunk a stream so downstream operators receive arrays instead of single
|
|
208
|
+
* items. This is the intended WASM boundary: pay the JS↔WASM crossing while
|
|
209
|
+
* assembling chunks, then process bigger batches downstream.
|
|
210
|
+
*/
|
|
211
|
+
declare function chunks<R, E, A>(input: ZStream<R, E, A>, chunkSize: number, options?: StreamChunkOptions): ZStream<R, E, readonly A[]>;
|
|
212
|
+
declare function mapChunks<R, E, A, B>(input: ZStream<R, E, A>, chunkSize: number, f: (chunk: readonly A[]) => readonly B[], options?: StreamChunkOptions): ZStream<R, E, B>;
|
|
213
|
+
declare function mapChunksEffect<Rp, Ep, A, B>(chunkSize: number, f: (chunk: readonly A[]) => Async<Rp, Ep, readonly B[]>, options?: StreamChunkOptions): <R, E>(input: ZStream<R, E, A>) => ZStream<R & Rp, E | Ep, B>;
|
|
214
|
+
|
|
101
215
|
/**
|
|
102
216
|
* ZPipeline-style transformer.
|
|
103
217
|
*
|
|
@@ -129,6 +243,10 @@ declare function dropP<A>(n: number): ZPipeline<unknown, never, A, A>;
|
|
|
129
243
|
declare function mapEffectP<Rp, Ep, A, B>(f: (a: A) => Async<Rp, Ep, B>): ZPipeline<Rp, Ep, A, B>;
|
|
130
244
|
/** Tap each element with an effect, preserving the element. */
|
|
131
245
|
declare function tapEffectP<Rp, Ep, A>(f: (a: A) => Async<Rp, Ep, any>): ZPipeline<Rp, Ep, A, A>;
|
|
246
|
+
/** Re-chunk a stream into arrays of up to `chunkSize` elements. */
|
|
247
|
+
declare function chunksP<A>(chunkSize: number, options?: StreamChunkOptions): ZPipeline<unknown, never, A, readonly A[]>;
|
|
248
|
+
/** Apply one effect per chunk and flatten the returned chunk back to elements. */
|
|
249
|
+
declare function mapChunksEffectP<Rp, Ep, A, B>(chunkSize: number, f: (chunk: readonly A[]) => Async<Rp, Ep, readonly B[]>, options?: StreamChunkOptions): ZPipeline<Rp, Ep, A, B>;
|
|
132
250
|
/** Buffer upstream using your existing queue-based buffer implementation. */
|
|
133
251
|
declare function bufferP<A>(capacity: number, strategy?: "backpressure" | "dropping" | "sliding"): ZPipeline<unknown, never, A, A>;
|
|
134
252
|
/**
|
|
@@ -137,4 +255,4 @@ declare function bufferP<A>(capacity: number, strategy?: "backpressure" | "dropp
|
|
|
137
255
|
*/
|
|
138
256
|
declare function groupedP<A>(n: number): ZPipeline<unknown, never, A, A[]>;
|
|
139
257
|
|
|
140
|
-
export { Async, Exit, Fiber, type Hub, type HubClosed, type HubStrategy,
|
|
258
|
+
export { Async, EngineEvent, EngineFiberHandle, EngineStats, Exit, Fiber, FiberEngine, FiberEngineStats, FiberId, FiberStatus, type Hub, type HubClosed, type HubStrategy, type InternalFiberStatus, JsFiberEngine, NodeId, OpcodeNode, OpcodeProgram, Option, type Queue, type QueueClosed, type QueueOptions, RefId, ReferenceWasmBridge, RingBufferOptions, RuntimeFiber, Scope, type Strategy, type StreamChunkEngine, type StreamChunkOptions, type StreamChunkStats, type Subscription, WasmBridge, WasmEngineRuntime, WasmFiberRegistryBridge, type WasmFiberRegistryStats, WasmPackFiberBridge, type ZPipeline, ZStream, andThen, bounded, broadcast, broadcastToHub, buffer, bufferP, chunks, chunksP, collectAllPar, compose, dropP, filterMapP, filterP, fromHub, groupedP, identity, makeHub, makeStreamChunker, mapChunks, mapChunksEffect, mapChunksEffectP, mapEffectP, mapP, race, raceWith, takeP, tapEffectP, via, zipPar };
|