@langchain/google-common 0.2.17 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/LICENSE +6 -6
- package/dist/_virtual/rolldown_runtime.cjs +25 -0
- package/dist/auth.cjs +82 -116
- package/dist/auth.cjs.map +1 -0
- package/dist/auth.d.cts +46 -0
- package/dist/auth.d.cts.map +1 -0
- package/dist/auth.d.ts +41 -36
- package/dist/auth.d.ts.map +1 -0
- package/dist/auth.js +80 -110
- package/dist/auth.js.map +1 -0
- package/dist/chat_models.cjs +251 -466
- package/dist/chat_models.cjs.map +1 -0
- package/dist/chat_models.d.cts +98 -0
- package/dist/chat_models.d.cts.map +1 -0
- package/dist/chat_models.d.ts +87 -73
- package/dist/chat_models.d.ts.map +1 -0
- package/dist/chat_models.js +245 -457
- package/dist/chat_models.js.map +1 -0
- package/dist/connection.cjs +321 -466
- package/dist/connection.cjs.map +1 -0
- package/dist/connection.d.cts +109 -0
- package/dist/connection.d.cts.map +1 -0
- package/dist/connection.d.ts +98 -91
- package/dist/connection.d.ts.map +1 -0
- package/dist/connection.js +317 -459
- package/dist/connection.js.map +1 -0
- package/dist/embeddings.cjs +135 -186
- package/dist/embeddings.cjs.map +1 -0
- package/dist/embeddings.d.cts +44 -0
- package/dist/embeddings.d.cts.map +1 -0
- package/dist/embeddings.d.ts +38 -32
- package/dist/embeddings.d.ts.map +1 -0
- package/dist/embeddings.js +133 -181
- package/dist/embeddings.js.map +1 -0
- package/dist/experimental/media.cjs +380 -482
- package/dist/experimental/media.cjs.map +1 -0
- package/dist/experimental/media.d.cts +198 -0
- package/dist/experimental/media.d.cts.map +1 -0
- package/dist/experimental/media.d.ts +190 -202
- package/dist/experimental/media.d.ts.map +1 -0
- package/dist/experimental/media.js +369 -468
- package/dist/experimental/media.js.map +1 -0
- package/dist/experimental/utils/media_core.cjs +403 -517
- package/dist/experimental/utils/media_core.cjs.map +1 -0
- package/dist/experimental/utils/media_core.d.cts +215 -0
- package/dist/experimental/utils/media_core.d.cts.map +1 -0
- package/dist/experimental/utils/media_core.d.ts +171 -165
- package/dist/experimental/utils/media_core.d.ts.map +1 -0
- package/dist/experimental/utils/media_core.js +395 -506
- package/dist/experimental/utils/media_core.js.map +1 -0
- package/dist/index.cjs +58 -27
- package/dist/index.d.cts +13 -0
- package/dist/index.d.ts +13 -11
- package/dist/index.js +13 -11
- package/dist/llms.cjs +157 -244
- package/dist/llms.cjs.map +1 -0
- package/dist/llms.d.cts +72 -0
- package/dist/llms.d.cts.map +1 -0
- package/dist/llms.d.ts +64 -54
- package/dist/llms.d.ts.map +1 -0
- package/dist/llms.js +154 -238
- package/dist/llms.js.map +1 -0
- package/dist/output_parsers.cjs +148 -173
- package/dist/output_parsers.cjs.map +1 -0
- package/dist/output_parsers.d.cts +53 -0
- package/dist/output_parsers.d.cts.map +1 -0
- package/dist/output_parsers.d.ts +46 -42
- package/dist/output_parsers.d.ts.map +1 -0
- package/dist/output_parsers.js +146 -168
- package/dist/output_parsers.js.map +1 -0
- package/dist/types-anthropic.d.cts +229 -0
- package/dist/types-anthropic.d.cts.map +1 -0
- package/dist/types-anthropic.d.ts +221 -215
- package/dist/types-anthropic.d.ts.map +1 -0
- package/dist/types.cjs +51 -62
- package/dist/types.cjs.map +1 -0
- package/dist/types.d.cts +748 -0
- package/dist/types.d.cts.map +1 -0
- package/dist/types.d.ts +669 -656
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +46 -45
- package/dist/types.js.map +1 -0
- package/dist/utils/anthropic.cjs +598 -821
- package/dist/utils/anthropic.cjs.map +1 -0
- package/dist/utils/anthropic.js +597 -818
- package/dist/utils/anthropic.js.map +1 -0
- package/dist/utils/common.cjs +130 -211
- package/dist/utils/common.cjs.map +1 -0
- package/dist/utils/common.d.cts +13 -0
- package/dist/utils/common.d.cts.map +1 -0
- package/dist/utils/common.d.ts +12 -7
- package/dist/utils/common.d.ts.map +1 -0
- package/dist/utils/common.js +128 -207
- package/dist/utils/common.js.map +1 -0
- package/dist/utils/failed_handler.cjs +28 -30
- package/dist/utils/failed_handler.cjs.map +1 -0
- package/dist/utils/failed_handler.d.cts +9 -0
- package/dist/utils/failed_handler.d.cts.map +1 -0
- package/dist/utils/failed_handler.d.ts +8 -2
- package/dist/utils/failed_handler.d.ts.map +1 -0
- package/dist/utils/failed_handler.js +28 -28
- package/dist/utils/failed_handler.js.map +1 -0
- package/dist/utils/gemini.cjs +1020 -1488
- package/dist/utils/gemini.cjs.map +1 -0
- package/dist/utils/gemini.d.cts +51 -0
- package/dist/utils/gemini.d.cts.map +1 -0
- package/dist/utils/gemini.d.ts +51 -48
- package/dist/utils/gemini.d.ts.map +1 -0
- package/dist/utils/gemini.js +1015 -1479
- package/dist/utils/gemini.js.map +1 -0
- package/dist/utils/index.cjs +38 -23
- package/dist/utils/index.d.cts +8 -0
- package/dist/utils/index.d.ts +8 -7
- package/dist/utils/index.js +8 -7
- package/dist/utils/palm.d.cts +11 -0
- package/dist/utils/palm.d.cts.map +1 -0
- package/dist/utils/palm.d.ts +9 -4
- package/dist/utils/palm.d.ts.map +1 -0
- package/dist/utils/safety.cjs +13 -22
- package/dist/utils/safety.cjs.map +1 -0
- package/dist/utils/safety.d.cts +12 -0
- package/dist/utils/safety.d.cts.map +1 -0
- package/dist/utils/safety.d.ts +10 -4
- package/dist/utils/safety.d.ts.map +1 -0
- package/dist/utils/safety.js +13 -19
- package/dist/utils/safety.js.map +1 -0
- package/dist/utils/stream.cjs +296 -475
- package/dist/utils/stream.cjs.map +1 -0
- package/dist/utils/stream.d.cts +165 -0
- package/dist/utils/stream.d.cts.map +1 -0
- package/dist/utils/stream.d.ts +156 -131
- package/dist/utils/stream.d.ts.map +1 -0
- package/dist/utils/stream.js +293 -469
- package/dist/utils/stream.js.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.cjs +43 -81
- package/dist/utils/zod_to_gemini_parameters.cjs.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.d.cts +22 -0
- package/dist/utils/zod_to_gemini_parameters.d.cts.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.d.ts +21 -6
- package/dist/utils/zod_to_gemini_parameters.d.ts.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.js +40 -76
- package/dist/utils/zod_to_gemini_parameters.js.map +1 -0
- package/package.json +69 -85
- package/dist/types-anthropic.cjs +0 -2
- package/dist/types-anthropic.js +0 -1
- package/dist/utils/anthropic.d.ts +0 -4
- package/dist/utils/palm.cjs +0 -2
- package/dist/utils/palm.js +0 -1
- package/experimental/media.cjs +0 -1
- package/experimental/media.d.cts +0 -1
- package/experimental/media.d.ts +0 -1
- package/experimental/media.js +0 -1
- package/experimental/utils/media_core.cjs +0 -1
- package/experimental/utils/media_core.d.cts +0 -1
- package/experimental/utils/media_core.d.ts +0 -1
- package/experimental/utils/media_core.js +0 -1
- package/index.cjs +0 -1
- package/index.d.cts +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/types.cjs +0 -1
- package/types.d.cts +0 -1
- package/types.d.ts +0 -1
- package/types.js +0 -1
- package/utils.cjs +0 -1
- package/utils.d.cts +0 -1
- package/utils.d.ts +0 -1
- package/utils.js +0 -1
package/dist/utils/stream.cjs
CHANGED
|
@@ -1,484 +1,305 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
exports.ReadableSseJsonStream = exports.SseJsonStream = exports.ReadableSseStream = exports.SseStream = exports.ReadableJsonStream = exports.ReadableAbstractStream = exports.ComplexJsonStream = exports.JsonStream = void 0;
|
|
4
|
-
exports.complexValue = complexValue;
|
|
5
|
-
exports.simpleValue = simpleValue;
|
|
1
|
+
|
|
2
|
+
//#region src/utils/stream.ts
|
|
6
3
|
function complexValue(value) {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
const v = value;
|
|
21
|
-
Object.keys(v).forEach((key) => {
|
|
22
|
-
ret[key] = complexValue(v[key]);
|
|
23
|
-
});
|
|
24
|
-
return { struct_val: ret };
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
else if (typeof value === "number") {
|
|
28
|
-
if (Number.isInteger(value)) {
|
|
29
|
-
return { int_val: value };
|
|
30
|
-
}
|
|
31
|
-
else {
|
|
32
|
-
return { float_val: value };
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
else {
|
|
36
|
-
return {
|
|
37
|
-
string_val: [value],
|
|
38
|
-
};
|
|
39
|
-
}
|
|
4
|
+
if (value === null || typeof value === "undefined") return void 0;
|
|
5
|
+
else if (typeof value === "object") if (Array.isArray(value)) return { list_val: value.map((avalue) => complexValue(avalue)) };
|
|
6
|
+
else {
|
|
7
|
+
const ret = {};
|
|
8
|
+
const v = value;
|
|
9
|
+
Object.keys(v).forEach((key) => {
|
|
10
|
+
ret[key] = complexValue(v[key]);
|
|
11
|
+
});
|
|
12
|
+
return { struct_val: ret };
|
|
13
|
+
}
|
|
14
|
+
else if (typeof value === "number") if (Number.isInteger(value)) return { int_val: value };
|
|
15
|
+
else return { float_val: value };
|
|
16
|
+
else return { string_val: [value] };
|
|
40
17
|
}
|
|
41
18
|
function simpleValue(val) {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
}
|
|
65
|
-
else {
|
|
66
|
-
const ret = {};
|
|
67
|
-
const struct = val;
|
|
68
|
-
Object.keys(struct).forEach((key) => {
|
|
69
|
-
ret[key] = simpleValue(struct[key]);
|
|
70
|
-
});
|
|
71
|
-
return ret;
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
else if (Array.isArray(val)) {
|
|
75
|
-
return val.map((aval) => simpleValue(aval));
|
|
76
|
-
}
|
|
77
|
-
else {
|
|
78
|
-
return val;
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
class JsonStream {
|
|
82
|
-
constructor() {
|
|
83
|
-
Object.defineProperty(this, "_buffer", {
|
|
84
|
-
enumerable: true,
|
|
85
|
-
configurable: true,
|
|
86
|
-
writable: true,
|
|
87
|
-
value: ""
|
|
88
|
-
});
|
|
89
|
-
Object.defineProperty(this, "_bufferOpen", {
|
|
90
|
-
enumerable: true,
|
|
91
|
-
configurable: true,
|
|
92
|
-
writable: true,
|
|
93
|
-
value: true
|
|
94
|
-
});
|
|
95
|
-
Object.defineProperty(this, "_firstRun", {
|
|
96
|
-
enumerable: true,
|
|
97
|
-
configurable: true,
|
|
98
|
-
writable: true,
|
|
99
|
-
value: true
|
|
100
|
-
});
|
|
101
|
-
// Set up a potential Promise that the handler can resolve.
|
|
102
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
103
|
-
Object.defineProperty(this, "_chunkResolution", {
|
|
104
|
-
enumerable: true,
|
|
105
|
-
configurable: true,
|
|
106
|
-
writable: true,
|
|
107
|
-
value: void 0
|
|
108
|
-
});
|
|
109
|
-
// If there is no Promise (it is null), the handler must add it to the queue
|
|
110
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
111
|
-
Object.defineProperty(this, "_chunkPending", {
|
|
112
|
-
enumerable: true,
|
|
113
|
-
configurable: true,
|
|
114
|
-
writable: true,
|
|
115
|
-
value: null
|
|
116
|
-
});
|
|
117
|
-
// A queue that will collect chunks while there is no Promise
|
|
118
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
119
|
-
Object.defineProperty(this, "_chunkQueue", {
|
|
120
|
-
enumerable: true,
|
|
121
|
-
configurable: true,
|
|
122
|
-
writable: true,
|
|
123
|
-
value: []
|
|
124
|
-
});
|
|
125
|
-
}
|
|
126
|
-
/**
|
|
127
|
-
* Add data to the buffer. This may cause chunks to be generated, if available.
|
|
128
|
-
* @param data
|
|
129
|
-
*/
|
|
130
|
-
appendBuffer(data) {
|
|
131
|
-
this._buffer += data;
|
|
132
|
-
// Our first time, skip to the opening of the array
|
|
133
|
-
if (this._firstRun) {
|
|
134
|
-
this._skipTo("[");
|
|
135
|
-
this._firstRun = false;
|
|
136
|
-
}
|
|
137
|
-
this._parseBuffer();
|
|
138
|
-
}
|
|
139
|
-
/**
|
|
140
|
-
* Indicate there is no more data that will be added to the text buffer.
|
|
141
|
-
* This should be called when all the data has been read and added to indicate
|
|
142
|
-
* that we should process everything remaining in the buffer.
|
|
143
|
-
*/
|
|
144
|
-
closeBuffer() {
|
|
145
|
-
this._bufferOpen = false;
|
|
146
|
-
this._parseBuffer();
|
|
147
|
-
}
|
|
148
|
-
/**
|
|
149
|
-
* Skip characters in the buffer till we get to the start of an object.
|
|
150
|
-
* Then attempt to read a full object.
|
|
151
|
-
* If we do read a full object, turn it into a chunk and send it to the chunk handler.
|
|
152
|
-
* Repeat this for as much as we can.
|
|
153
|
-
*/
|
|
154
|
-
_parseBuffer() {
|
|
155
|
-
let obj = null;
|
|
156
|
-
do {
|
|
157
|
-
this._skipTo("{");
|
|
158
|
-
obj = this._getFullObject();
|
|
159
|
-
if (obj !== null) {
|
|
160
|
-
const chunk = this._simplifyObject(obj);
|
|
161
|
-
this._handleChunk(chunk);
|
|
162
|
-
}
|
|
163
|
-
} while (obj !== null);
|
|
164
|
-
if (!this._bufferOpen) {
|
|
165
|
-
// No more data will be added, and we have parsed everything we could,
|
|
166
|
-
// so everything else is garbage.
|
|
167
|
-
this._handleChunk(null);
|
|
168
|
-
this._buffer = "";
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
/**
|
|
172
|
-
* If the string is present, move the start of the buffer to the first occurrence
|
|
173
|
-
* of that string. This is useful for skipping over elements or parts that we're not
|
|
174
|
-
* really interested in parsing. (ie - the opening characters, comma separators, etc.)
|
|
175
|
-
* @param start The string to start the buffer with
|
|
176
|
-
*/
|
|
177
|
-
_skipTo(start) {
|
|
178
|
-
const index = this._buffer.indexOf(start);
|
|
179
|
-
if (index > 0) {
|
|
180
|
-
this._buffer = this._buffer.slice(index);
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
/**
|
|
184
|
-
* Given what is in the buffer, parse a single object out of it.
|
|
185
|
-
* If a complete object isn't available, return null.
|
|
186
|
-
* Assumes that we are at the start of an object to parse.
|
|
187
|
-
*/
|
|
188
|
-
_getFullObject() {
|
|
189
|
-
let ret = null;
|
|
190
|
-
// Loop while we don't have something to return AND we have something in the buffer
|
|
191
|
-
let index = 0;
|
|
192
|
-
while (ret === null && this._buffer.length > index) {
|
|
193
|
-
// Advance to the next close bracket after our current index
|
|
194
|
-
index = this._buffer.indexOf("}", index + 1);
|
|
195
|
-
// If we don't find one, exit with null
|
|
196
|
-
if (index === -1) {
|
|
197
|
-
return null;
|
|
198
|
-
}
|
|
199
|
-
// If we have one, try to turn it into an object to return
|
|
200
|
-
try {
|
|
201
|
-
const objStr = this._buffer.substring(0, index + 1);
|
|
202
|
-
ret = JSON.parse(objStr);
|
|
203
|
-
// We only get here if it parsed it ok
|
|
204
|
-
// If we did turn it into an object, remove it from the buffer
|
|
205
|
-
this._buffer = this._buffer.slice(index + 1);
|
|
206
|
-
}
|
|
207
|
-
catch (xx) {
|
|
208
|
-
// It didn't parse it correctly, so we swallow the exception and continue
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
return ret;
|
|
212
|
-
}
|
|
213
|
-
_simplifyObject(obj) {
|
|
214
|
-
return obj;
|
|
215
|
-
}
|
|
216
|
-
/**
|
|
217
|
-
* Register that we have another chunk available for consumption.
|
|
218
|
-
* If we are waiting for a chunk, resolve the promise waiting for it immediately.
|
|
219
|
-
* If not, then add it to the queue.
|
|
220
|
-
* @param chunk
|
|
221
|
-
*/
|
|
222
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
223
|
-
_handleChunk(chunk) {
|
|
224
|
-
if (this._chunkPending) {
|
|
225
|
-
this._chunkResolution(chunk);
|
|
226
|
-
this._chunkPending = null;
|
|
227
|
-
}
|
|
228
|
-
else {
|
|
229
|
-
this._chunkQueue.push(chunk);
|
|
230
|
-
}
|
|
231
|
-
}
|
|
232
|
-
/**
|
|
233
|
-
* Get the next chunk that is coming from the stream.
|
|
234
|
-
* This chunk may be null, usually indicating the last chunk in the stream.
|
|
235
|
-
*/
|
|
236
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
237
|
-
async nextChunk() {
|
|
238
|
-
if (this._chunkQueue.length > 0) {
|
|
239
|
-
// If there is data in the queue, return the next queue chunk
|
|
240
|
-
return this._chunkQueue.shift();
|
|
241
|
-
}
|
|
242
|
-
else {
|
|
243
|
-
// Otherwise, set up a promise that handleChunk will cause to be resolved
|
|
244
|
-
this._chunkPending = new Promise((resolve) => {
|
|
245
|
-
this._chunkResolution = resolve;
|
|
246
|
-
});
|
|
247
|
-
return this._chunkPending;
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
/**
|
|
251
|
-
* Is the stream done?
|
|
252
|
-
* A stream is only done if all of the following are true:
|
|
253
|
-
* - There is no more data to be added to the text buffer
|
|
254
|
-
* - There is no more data in the text buffer
|
|
255
|
-
* - There are no chunks that are waiting to be consumed
|
|
256
|
-
*/
|
|
257
|
-
get streamDone() {
|
|
258
|
-
return (!this._bufferOpen &&
|
|
259
|
-
this._buffer.length === 0 &&
|
|
260
|
-
this._chunkQueue.length === 0 &&
|
|
261
|
-
this._chunkPending === null);
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
exports.JsonStream = JsonStream;
|
|
265
|
-
class ComplexJsonStream extends JsonStream {
|
|
266
|
-
_simplifyObject(obj) {
|
|
267
|
-
return simpleValue(obj);
|
|
268
|
-
}
|
|
19
|
+
if (val && typeof val === "object" && !Array.isArray(val)) if (val.hasOwnProperty("stringVal")) return val.stringVal[0];
|
|
20
|
+
else if (val.hasOwnProperty("boolVal")) return val.boolVal[0];
|
|
21
|
+
else if (val.hasOwnProperty("listVal")) {
|
|
22
|
+
const { listVal } = val;
|
|
23
|
+
return listVal.map((aval) => simpleValue(aval));
|
|
24
|
+
} else if (val.hasOwnProperty("structVal")) {
|
|
25
|
+
const ret = {};
|
|
26
|
+
const struct = val.structVal;
|
|
27
|
+
Object.keys(struct).forEach((key) => {
|
|
28
|
+
ret[key] = simpleValue(struct[key]);
|
|
29
|
+
});
|
|
30
|
+
return ret;
|
|
31
|
+
} else {
|
|
32
|
+
const ret = {};
|
|
33
|
+
const struct = val;
|
|
34
|
+
Object.keys(struct).forEach((key) => {
|
|
35
|
+
ret[key] = simpleValue(struct[key]);
|
|
36
|
+
});
|
|
37
|
+
return ret;
|
|
38
|
+
}
|
|
39
|
+
else if (Array.isArray(val)) return val.map((aval) => simpleValue(aval));
|
|
40
|
+
else return val;
|
|
269
41
|
}
|
|
42
|
+
var JsonStream = class {
|
|
43
|
+
_buffer = "";
|
|
44
|
+
_bufferOpen = true;
|
|
45
|
+
_firstRun = true;
|
|
46
|
+
/**
|
|
47
|
+
* Add data to the buffer. This may cause chunks to be generated, if available.
|
|
48
|
+
* @param data
|
|
49
|
+
*/
|
|
50
|
+
appendBuffer(data) {
|
|
51
|
+
this._buffer += data;
|
|
52
|
+
if (this._firstRun) {
|
|
53
|
+
this._skipTo("[");
|
|
54
|
+
this._firstRun = false;
|
|
55
|
+
}
|
|
56
|
+
this._parseBuffer();
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Indicate there is no more data that will be added to the text buffer.
|
|
60
|
+
* This should be called when all the data has been read and added to indicate
|
|
61
|
+
* that we should process everything remaining in the buffer.
|
|
62
|
+
*/
|
|
63
|
+
closeBuffer() {
|
|
64
|
+
this._bufferOpen = false;
|
|
65
|
+
this._parseBuffer();
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Skip characters in the buffer till we get to the start of an object.
|
|
69
|
+
* Then attempt to read a full object.
|
|
70
|
+
* If we do read a full object, turn it into a chunk and send it to the chunk handler.
|
|
71
|
+
* Repeat this for as much as we can.
|
|
72
|
+
*/
|
|
73
|
+
_parseBuffer() {
|
|
74
|
+
let obj = null;
|
|
75
|
+
do {
|
|
76
|
+
this._skipTo("{");
|
|
77
|
+
obj = this._getFullObject();
|
|
78
|
+
if (obj !== null) {
|
|
79
|
+
const chunk = this._simplifyObject(obj);
|
|
80
|
+
this._handleChunk(chunk);
|
|
81
|
+
}
|
|
82
|
+
} while (obj !== null);
|
|
83
|
+
if (!this._bufferOpen) {
|
|
84
|
+
this._handleChunk(null);
|
|
85
|
+
this._buffer = "";
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* If the string is present, move the start of the buffer to the first occurrence
|
|
90
|
+
* of that string. This is useful for skipping over elements or parts that we're not
|
|
91
|
+
* really interested in parsing. (ie - the opening characters, comma separators, etc.)
|
|
92
|
+
* @param start The string to start the buffer with
|
|
93
|
+
*/
|
|
94
|
+
_skipTo(start) {
|
|
95
|
+
const index = this._buffer.indexOf(start);
|
|
96
|
+
if (index > 0) this._buffer = this._buffer.slice(index);
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Given what is in the buffer, parse a single object out of it.
|
|
100
|
+
* If a complete object isn't available, return null.
|
|
101
|
+
* Assumes that we are at the start of an object to parse.
|
|
102
|
+
*/
|
|
103
|
+
_getFullObject() {
|
|
104
|
+
let ret = null;
|
|
105
|
+
let index = 0;
|
|
106
|
+
while (ret === null && this._buffer.length > index) {
|
|
107
|
+
index = this._buffer.indexOf("}", index + 1);
|
|
108
|
+
if (index === -1) return null;
|
|
109
|
+
try {
|
|
110
|
+
const objStr = this._buffer.substring(0, index + 1);
|
|
111
|
+
ret = JSON.parse(objStr);
|
|
112
|
+
this._buffer = this._buffer.slice(index + 1);
|
|
113
|
+
} catch {}
|
|
114
|
+
}
|
|
115
|
+
return ret;
|
|
116
|
+
}
|
|
117
|
+
_simplifyObject(obj) {
|
|
118
|
+
return obj;
|
|
119
|
+
}
|
|
120
|
+
_chunkResolution;
|
|
121
|
+
_chunkPending = null;
|
|
122
|
+
_chunkQueue = [];
|
|
123
|
+
/**
|
|
124
|
+
* Register that we have another chunk available for consumption.
|
|
125
|
+
* If we are waiting for a chunk, resolve the promise waiting for it immediately.
|
|
126
|
+
* If not, then add it to the queue.
|
|
127
|
+
* @param chunk
|
|
128
|
+
*/
|
|
129
|
+
_handleChunk(chunk) {
|
|
130
|
+
if (this._chunkPending) {
|
|
131
|
+
this._chunkResolution(chunk);
|
|
132
|
+
this._chunkPending = null;
|
|
133
|
+
} else this._chunkQueue.push(chunk);
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Get the next chunk that is coming from the stream.
|
|
137
|
+
* This chunk may be null, usually indicating the last chunk in the stream.
|
|
138
|
+
*/
|
|
139
|
+
async nextChunk() {
|
|
140
|
+
if (this._chunkQueue.length > 0) return this._chunkQueue.shift();
|
|
141
|
+
else {
|
|
142
|
+
this._chunkPending = new Promise((resolve) => {
|
|
143
|
+
this._chunkResolution = resolve;
|
|
144
|
+
});
|
|
145
|
+
return this._chunkPending;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Is the stream done?
|
|
150
|
+
* A stream is only done if all of the following are true:
|
|
151
|
+
* - There is no more data to be added to the text buffer
|
|
152
|
+
* - There is no more data in the text buffer
|
|
153
|
+
* - There are no chunks that are waiting to be consumed
|
|
154
|
+
*/
|
|
155
|
+
get streamDone() {
|
|
156
|
+
return !this._bufferOpen && this._buffer.length === 0 && this._chunkQueue.length === 0 && this._chunkPending === null;
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
var ComplexJsonStream = class extends JsonStream {
|
|
160
|
+
_simplifyObject(obj) {
|
|
161
|
+
return simpleValue(obj);
|
|
162
|
+
}
|
|
163
|
+
};
|
|
164
|
+
var ReadableAbstractStream = class {
|
|
165
|
+
baseStream;
|
|
166
|
+
decoder;
|
|
167
|
+
constructor(baseStream, body) {
|
|
168
|
+
this.baseStream = baseStream;
|
|
169
|
+
this.decoder = new TextDecoder("utf-8");
|
|
170
|
+
if (body) this.run(body);
|
|
171
|
+
else console.error("Unexpected empty body while streaming");
|
|
172
|
+
}
|
|
173
|
+
appendBuffer(data) {
|
|
174
|
+
return this.baseStream.appendBuffer(data);
|
|
175
|
+
}
|
|
176
|
+
closeBuffer() {
|
|
177
|
+
return this.baseStream.closeBuffer();
|
|
178
|
+
}
|
|
179
|
+
nextChunk() {
|
|
180
|
+
return this.baseStream.nextChunk();
|
|
181
|
+
}
|
|
182
|
+
get streamDone() {
|
|
183
|
+
return this.baseStream.streamDone;
|
|
184
|
+
}
|
|
185
|
+
async run(body) {
|
|
186
|
+
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
187
|
+
for await (const value of body) {
|
|
188
|
+
const svalue = this.decoder.decode(value, { stream: true });
|
|
189
|
+
this.appendBuffer(svalue);
|
|
190
|
+
}
|
|
191
|
+
this.closeBuffer();
|
|
192
|
+
} else throw Error("Stream must implement async iterator.");
|
|
193
|
+
}
|
|
194
|
+
};
|
|
195
|
+
var ReadableJsonStream = class extends ReadableAbstractStream {
|
|
196
|
+
constructor(body) {
|
|
197
|
+
super(new JsonStream(), body);
|
|
198
|
+
}
|
|
199
|
+
};
|
|
200
|
+
var SseStream = class {
|
|
201
|
+
_buffer = "";
|
|
202
|
+
_bufferOpen = true;
|
|
203
|
+
appendBuffer(data) {
|
|
204
|
+
this._buffer += data;
|
|
205
|
+
this._parseBuffer();
|
|
206
|
+
}
|
|
207
|
+
closeBuffer() {
|
|
208
|
+
this._bufferOpen = false;
|
|
209
|
+
this._parseBuffer();
|
|
210
|
+
}
|
|
211
|
+
/**
|
|
212
|
+
* Attempt to load an entire event.
|
|
213
|
+
* For each entire event we load,
|
|
214
|
+
* send them to be handled.
|
|
215
|
+
*/
|
|
216
|
+
_parseBuffer() {
|
|
217
|
+
const events = this._buffer.split(/\n\n/);
|
|
218
|
+
this._buffer = events.pop() ?? "";
|
|
219
|
+
events.forEach((event) => this._handleEvent(event.trim()));
|
|
220
|
+
if (!this._bufferOpen) {
|
|
221
|
+
this._handleEvent(null);
|
|
222
|
+
this._buffer = "";
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Given an event string, get all the fields
|
|
227
|
+
* in the event. It is assumed there is one field
|
|
228
|
+
* per line, but that field names can be duplicated,
|
|
229
|
+
* indicating to append the new value to the previous value
|
|
230
|
+
* @param event
|
|
231
|
+
*/
|
|
232
|
+
_parseEvent(event) {
|
|
233
|
+
if (!event || event.trim() === "") return null;
|
|
234
|
+
const ret = {};
|
|
235
|
+
const lines = event.split(/\n/);
|
|
236
|
+
lines.forEach((line) => {
|
|
237
|
+
const match = line.match(/^([^:]+): \s*(.+)\n*$/);
|
|
238
|
+
if (match && match.length === 3) {
|
|
239
|
+
const key = match[1];
|
|
240
|
+
const val = match[2];
|
|
241
|
+
const cur = ret[key] ?? "";
|
|
242
|
+
ret[key] = `${cur}${val}`;
|
|
243
|
+
}
|
|
244
|
+
});
|
|
245
|
+
return ret;
|
|
246
|
+
}
|
|
247
|
+
_chunkResolution;
|
|
248
|
+
_chunkPending = null;
|
|
249
|
+
_chunkQueue = [];
|
|
250
|
+
_handleEvent(event) {
|
|
251
|
+
const chunk = this._parseEvent(event);
|
|
252
|
+
if (this._chunkPending) {
|
|
253
|
+
this._chunkResolution(chunk);
|
|
254
|
+
this._chunkPending = null;
|
|
255
|
+
} else this._chunkQueue.push(chunk);
|
|
256
|
+
}
|
|
257
|
+
async nextChunk() {
|
|
258
|
+
if (this._chunkQueue.length > 0) return this._chunkQueue.shift();
|
|
259
|
+
else {
|
|
260
|
+
this._chunkPending = new Promise((resolve) => {
|
|
261
|
+
this._chunkResolution = resolve;
|
|
262
|
+
});
|
|
263
|
+
return this._chunkPending;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
get streamDone() {
|
|
267
|
+
return !this._bufferOpen && this._buffer.length === 0 && this._chunkQueue.length === 0 && this._chunkPending === null;
|
|
268
|
+
}
|
|
269
|
+
};
|
|
270
|
+
var ReadableSseStream = class extends ReadableAbstractStream {
|
|
271
|
+
constructor(body) {
|
|
272
|
+
super(new SseStream(), body);
|
|
273
|
+
}
|
|
274
|
+
};
|
|
275
|
+
var SseJsonStream = class extends SseStream {
|
|
276
|
+
_jsonAttribute = "data";
|
|
277
|
+
constructor(jsonAttribute) {
|
|
278
|
+
super();
|
|
279
|
+
this._jsonAttribute = jsonAttribute ?? this._jsonAttribute;
|
|
280
|
+
}
|
|
281
|
+
async nextChunk() {
|
|
282
|
+
const eventRecord = await super.nextChunk();
|
|
283
|
+
const json = eventRecord?.[this._jsonAttribute];
|
|
284
|
+
if (!json) return null;
|
|
285
|
+
else return JSON.parse(json);
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
var ReadableSseJsonStream = class extends ReadableAbstractStream {
|
|
289
|
+
constructor(body) {
|
|
290
|
+
super(new SseJsonStream(), body);
|
|
291
|
+
}
|
|
292
|
+
};
|
|
293
|
+
|
|
294
|
+
//#endregion
|
|
270
295
|
exports.ComplexJsonStream = ComplexJsonStream;
|
|
271
|
-
|
|
272
|
-
constructor(baseStream, body) {
|
|
273
|
-
Object.defineProperty(this, "baseStream", {
|
|
274
|
-
enumerable: true,
|
|
275
|
-
configurable: true,
|
|
276
|
-
writable: true,
|
|
277
|
-
value: void 0
|
|
278
|
-
});
|
|
279
|
-
Object.defineProperty(this, "decoder", {
|
|
280
|
-
enumerable: true,
|
|
281
|
-
configurable: true,
|
|
282
|
-
writable: true,
|
|
283
|
-
value: void 0
|
|
284
|
-
});
|
|
285
|
-
this.baseStream = baseStream;
|
|
286
|
-
this.decoder = new TextDecoder("utf-8");
|
|
287
|
-
if (body) {
|
|
288
|
-
void this.run(body);
|
|
289
|
-
}
|
|
290
|
-
else {
|
|
291
|
-
console.error("Unexpected empty body while streaming");
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
appendBuffer(data) {
|
|
295
|
-
return this.baseStream.appendBuffer(data);
|
|
296
|
-
}
|
|
297
|
-
closeBuffer() {
|
|
298
|
-
return this.baseStream.closeBuffer();
|
|
299
|
-
}
|
|
300
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
301
|
-
nextChunk() {
|
|
302
|
-
return this.baseStream.nextChunk();
|
|
303
|
-
}
|
|
304
|
-
get streamDone() {
|
|
305
|
-
return this.baseStream.streamDone;
|
|
306
|
-
}
|
|
307
|
-
// Should be a ReadableStream, but the Gaxios Readable stream isn't.
|
|
308
|
-
// But both should support async iterators, so make sure of that.
|
|
309
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
310
|
-
async run(body) {
|
|
311
|
-
if (typeof body[Symbol.asyncIterator] === "function") {
|
|
312
|
-
for await (const value of body) {
|
|
313
|
-
const svalue = this.decoder.decode(value, { stream: true });
|
|
314
|
-
this.appendBuffer(svalue);
|
|
315
|
-
}
|
|
316
|
-
this.closeBuffer();
|
|
317
|
-
}
|
|
318
|
-
else {
|
|
319
|
-
throw Error("Stream must implement async iterator.");
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
}
|
|
296
|
+
exports.JsonStream = JsonStream;
|
|
323
297
|
exports.ReadableAbstractStream = ReadableAbstractStream;
|
|
324
|
-
class ReadableJsonStream extends ReadableAbstractStream {
|
|
325
|
-
constructor(body) {
|
|
326
|
-
super(new JsonStream(), body);
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
298
|
exports.ReadableJsonStream = ReadableJsonStream;
|
|
330
|
-
|
|
331
|
-
constructor() {
|
|
332
|
-
Object.defineProperty(this, "_buffer", {
|
|
333
|
-
enumerable: true,
|
|
334
|
-
configurable: true,
|
|
335
|
-
writable: true,
|
|
336
|
-
value: ""
|
|
337
|
-
});
|
|
338
|
-
Object.defineProperty(this, "_bufferOpen", {
|
|
339
|
-
enumerable: true,
|
|
340
|
-
configurable: true,
|
|
341
|
-
writable: true,
|
|
342
|
-
value: true
|
|
343
|
-
});
|
|
344
|
-
// Set up a potential Promise that the handler can resolve.
|
|
345
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
346
|
-
Object.defineProperty(this, "_chunkResolution", {
|
|
347
|
-
enumerable: true,
|
|
348
|
-
configurable: true,
|
|
349
|
-
writable: true,
|
|
350
|
-
value: void 0
|
|
351
|
-
});
|
|
352
|
-
// If there is no Promise (it is null), the handler must add it to the queue
|
|
353
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
354
|
-
Object.defineProperty(this, "_chunkPending", {
|
|
355
|
-
enumerable: true,
|
|
356
|
-
configurable: true,
|
|
357
|
-
writable: true,
|
|
358
|
-
value: null
|
|
359
|
-
});
|
|
360
|
-
// A queue that will collect chunks while there is no Promise
|
|
361
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
362
|
-
Object.defineProperty(this, "_chunkQueue", {
|
|
363
|
-
enumerable: true,
|
|
364
|
-
configurable: true,
|
|
365
|
-
writable: true,
|
|
366
|
-
value: []
|
|
367
|
-
});
|
|
368
|
-
}
|
|
369
|
-
appendBuffer(data) {
|
|
370
|
-
this._buffer += data;
|
|
371
|
-
this._parseBuffer();
|
|
372
|
-
}
|
|
373
|
-
closeBuffer() {
|
|
374
|
-
this._bufferOpen = false;
|
|
375
|
-
this._parseBuffer();
|
|
376
|
-
}
|
|
377
|
-
/**
|
|
378
|
-
* Attempt to load an entire event.
|
|
379
|
-
* For each entire event we load,
|
|
380
|
-
* send them to be handled.
|
|
381
|
-
*/
|
|
382
|
-
_parseBuffer() {
|
|
383
|
-
const events = this._buffer.split(/\n\n/);
|
|
384
|
-
this._buffer = events.pop() ?? "";
|
|
385
|
-
events.forEach((event) => this._handleEvent(event.trim()));
|
|
386
|
-
if (!this._bufferOpen) {
|
|
387
|
-
// No more data will be added, and we have parsed
|
|
388
|
-
// everything. So dump the rest.
|
|
389
|
-
this._handleEvent(null);
|
|
390
|
-
this._buffer = "";
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
/**
|
|
394
|
-
* Given an event string, get all the fields
|
|
395
|
-
* in the event. It is assumed there is one field
|
|
396
|
-
* per line, but that field names can be duplicated,
|
|
397
|
-
* indicating to append the new value to the previous value
|
|
398
|
-
* @param event
|
|
399
|
-
*/
|
|
400
|
-
_parseEvent(event) {
|
|
401
|
-
if (!event || event.trim() === "") {
|
|
402
|
-
return null;
|
|
403
|
-
}
|
|
404
|
-
const ret = {};
|
|
405
|
-
const lines = event.split(/\n/);
|
|
406
|
-
lines.forEach((line) => {
|
|
407
|
-
const match = line.match(/^([^:]+): \s*(.+)\n*$/);
|
|
408
|
-
if (match && match.length === 3) {
|
|
409
|
-
const key = match[1];
|
|
410
|
-
const val = match[2];
|
|
411
|
-
const cur = ret[key] ?? "";
|
|
412
|
-
ret[key] = `${cur}${val}`;
|
|
413
|
-
}
|
|
414
|
-
});
|
|
415
|
-
return ret;
|
|
416
|
-
}
|
|
417
|
-
_handleEvent(event) {
|
|
418
|
-
const chunk = this._parseEvent(event);
|
|
419
|
-
if (this._chunkPending) {
|
|
420
|
-
this._chunkResolution(chunk);
|
|
421
|
-
this._chunkPending = null;
|
|
422
|
-
}
|
|
423
|
-
else {
|
|
424
|
-
this._chunkQueue.push(chunk);
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
428
|
-
async nextChunk() {
|
|
429
|
-
if (this._chunkQueue.length > 0) {
|
|
430
|
-
// If there is data in the queue, return the next queue chunk
|
|
431
|
-
return this._chunkQueue.shift();
|
|
432
|
-
}
|
|
433
|
-
else {
|
|
434
|
-
// Otherwise, set up a promise that handleChunk will cause to be resolved
|
|
435
|
-
this._chunkPending = new Promise((resolve) => {
|
|
436
|
-
this._chunkResolution = resolve;
|
|
437
|
-
});
|
|
438
|
-
return this._chunkPending;
|
|
439
|
-
}
|
|
440
|
-
}
|
|
441
|
-
get streamDone() {
|
|
442
|
-
return (!this._bufferOpen &&
|
|
443
|
-
this._buffer.length === 0 &&
|
|
444
|
-
this._chunkQueue.length === 0 &&
|
|
445
|
-
this._chunkPending === null);
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
exports.SseStream = SseStream;
|
|
449
|
-
class ReadableSseStream extends ReadableAbstractStream {
|
|
450
|
-
constructor(body) {
|
|
451
|
-
super(new SseStream(), body);
|
|
452
|
-
}
|
|
453
|
-
}
|
|
299
|
+
exports.ReadableSseJsonStream = ReadableSseJsonStream;
|
|
454
300
|
exports.ReadableSseStream = ReadableSseStream;
|
|
455
|
-
class SseJsonStream extends SseStream {
|
|
456
|
-
constructor(jsonAttribute) {
|
|
457
|
-
super();
|
|
458
|
-
Object.defineProperty(this, "_jsonAttribute", {
|
|
459
|
-
enumerable: true,
|
|
460
|
-
configurable: true,
|
|
461
|
-
writable: true,
|
|
462
|
-
value: "data"
|
|
463
|
-
});
|
|
464
|
-
this._jsonAttribute = jsonAttribute ?? this._jsonAttribute;
|
|
465
|
-
}
|
|
466
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
467
|
-
async nextChunk() {
|
|
468
|
-
const eventRecord = (await super.nextChunk());
|
|
469
|
-
const json = eventRecord?.[this._jsonAttribute];
|
|
470
|
-
if (!json) {
|
|
471
|
-
return null;
|
|
472
|
-
}
|
|
473
|
-
else {
|
|
474
|
-
return JSON.parse(json);
|
|
475
|
-
}
|
|
476
|
-
}
|
|
477
|
-
}
|
|
478
301
|
exports.SseJsonStream = SseJsonStream;
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
}
|
|
484
|
-
exports.ReadableSseJsonStream = ReadableSseJsonStream;
|
|
302
|
+
exports.SseStream = SseStream;
|
|
303
|
+
exports.complexValue = complexValue;
|
|
304
|
+
exports.simpleValue = simpleValue;
|
|
305
|
+
//# sourceMappingURL=stream.cjs.map
|