drizzle-cube 0.4.19 → 0.4.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/anthropic-BTkjgFpT.cjs +1 -0
- package/dist/adapters/anthropic-CTu9E801.js +126 -0
- package/dist/adapters/express/index.cjs +6 -6
- package/dist/adapters/express/index.js +73 -69
- package/dist/adapters/fastify/index.cjs +6 -6
- package/dist/adapters/fastify/index.js +133 -129
- package/dist/adapters/google-BAK9pnQf.cjs +2 -0
- package/dist/adapters/google-DficVAsJ.js +146 -0
- package/dist/adapters/{handler-BV4JuWNW.js → handler-9Rdn7zM2.js} +537 -457
- package/dist/adapters/handler-B-tEntiU.cjs +39 -0
- package/dist/adapters/hono/index.cjs +6 -6
- package/dist/adapters/hono/index.js +199 -195
- package/dist/adapters/index-BIMhF5KZ.cjs +23 -0
- package/dist/adapters/index-BgCeQBuN.cjs +2 -0
- package/dist/adapters/index-C45_meK_.js +719 -0
- package/dist/adapters/index-CFEJ62GJ.js +5337 -0
- package/dist/adapters/nextjs/index.cjs +5 -5
- package/dist/adapters/nextjs/index.js +215 -211
- package/dist/adapters/openai-CUSRuKTk.js +131 -0
- package/dist/adapters/openai-mLo2MCat.cjs +1 -0
- package/dist/client/components/AgenticNotebook/AgentChatPanel.d.ts +3 -0
- package/dist/client/components/AgenticNotebook/index.d.ts +6 -0
- package/dist/client/hooks/useAgentChat.d.ts +6 -0
- package/dist/client/index.js +730 -697
- package/dist/client/index.js.map +1 -1
- package/dist/client/styles.css +1 -1
- package/dist/client-bundle-stats.html +1 -1
- package/dist/server/anthropic-BTkjgFpT.cjs +1 -0
- package/dist/server/anthropic-CTu9E801.js +126 -0
- package/dist/server/google-BAK9pnQf.cjs +2 -0
- package/dist/server/google-DficVAsJ.js +146 -0
- package/dist/server/index-BIMhF5KZ.cjs +23 -0
- package/dist/server/index-BgCeQBuN.cjs +2 -0
- package/dist/server/index-C45_meK_.js +719 -0
- package/dist/server/index-CFEJ62GJ.js +5337 -0
- package/dist/server/index.cjs +51 -45
- package/dist/server/index.d.ts +49 -10
- package/dist/server/index.js +1978 -1898
- package/dist/server/openai-CUSRuKTk.js +131 -0
- package/dist/server/openai-mLo2MCat.cjs +1 -0
- package/package.json +12 -2
- package/dist/adapters/handler-D4MVKkVy.cjs +0 -33
|
@@ -0,0 +1,719 @@
|
|
|
1
|
+
var T;
|
|
2
|
+
(function(t) {
|
|
3
|
+
t.STRING = "string", t.NUMBER = "number", t.INTEGER = "integer", t.BOOLEAN = "boolean", t.ARRAY = "array", t.OBJECT = "object";
|
|
4
|
+
})(T || (T = {}));
|
|
5
|
+
var m;
|
|
6
|
+
(function(t) {
|
|
7
|
+
t.LANGUAGE_UNSPECIFIED = "language_unspecified", t.PYTHON = "python";
|
|
8
|
+
})(m || (m = {}));
|
|
9
|
+
var w;
|
|
10
|
+
(function(t) {
|
|
11
|
+
t.OUTCOME_UNSPECIFIED = "outcome_unspecified", t.OUTCOME_OK = "outcome_ok", t.OUTCOME_FAILED = "outcome_failed", t.OUTCOME_DEADLINE_EXCEEDED = "outcome_deadline_exceeded";
|
|
12
|
+
})(w || (w = {}));
|
|
13
|
+
const b = ["user", "model", "function", "system"];
|
|
14
|
+
var M;
|
|
15
|
+
(function(t) {
|
|
16
|
+
t.HARM_CATEGORY_UNSPECIFIED = "HARM_CATEGORY_UNSPECIFIED", t.HARM_CATEGORY_HATE_SPEECH = "HARM_CATEGORY_HATE_SPEECH", t.HARM_CATEGORY_SEXUALLY_EXPLICIT = "HARM_CATEGORY_SEXUALLY_EXPLICIT", t.HARM_CATEGORY_HARASSMENT = "HARM_CATEGORY_HARASSMENT", t.HARM_CATEGORY_DANGEROUS_CONTENT = "HARM_CATEGORY_DANGEROUS_CONTENT", t.HARM_CATEGORY_CIVIC_INTEGRITY = "HARM_CATEGORY_CIVIC_INTEGRITY";
|
|
17
|
+
})(M || (M = {}));
|
|
18
|
+
var L;
|
|
19
|
+
(function(t) {
|
|
20
|
+
t.HARM_BLOCK_THRESHOLD_UNSPECIFIED = "HARM_BLOCK_THRESHOLD_UNSPECIFIED", t.BLOCK_LOW_AND_ABOVE = "BLOCK_LOW_AND_ABOVE", t.BLOCK_MEDIUM_AND_ABOVE = "BLOCK_MEDIUM_AND_ABOVE", t.BLOCK_ONLY_HIGH = "BLOCK_ONLY_HIGH", t.BLOCK_NONE = "BLOCK_NONE";
|
|
21
|
+
})(L || (L = {}));
|
|
22
|
+
var D;
|
|
23
|
+
(function(t) {
|
|
24
|
+
t.HARM_PROBABILITY_UNSPECIFIED = "HARM_PROBABILITY_UNSPECIFIED", t.NEGLIGIBLE = "NEGLIGIBLE", t.LOW = "LOW", t.MEDIUM = "MEDIUM", t.HIGH = "HIGH";
|
|
25
|
+
})(D || (D = {}));
|
|
26
|
+
var G;
|
|
27
|
+
(function(t) {
|
|
28
|
+
t.BLOCKED_REASON_UNSPECIFIED = "BLOCKED_REASON_UNSPECIFIED", t.SAFETY = "SAFETY", t.OTHER = "OTHER";
|
|
29
|
+
})(G || (G = {}));
|
|
30
|
+
var R;
|
|
31
|
+
(function(t) {
|
|
32
|
+
t.FINISH_REASON_UNSPECIFIED = "FINISH_REASON_UNSPECIFIED", t.STOP = "STOP", t.MAX_TOKENS = "MAX_TOKENS", t.SAFETY = "SAFETY", t.RECITATION = "RECITATION", t.LANGUAGE = "LANGUAGE", t.BLOCKLIST = "BLOCKLIST", t.PROHIBITED_CONTENT = "PROHIBITED_CONTENT", t.SPII = "SPII", t.MALFORMED_FUNCTION_CALL = "MALFORMED_FUNCTION_CALL", t.OTHER = "OTHER";
|
|
33
|
+
})(R || (R = {}));
|
|
34
|
+
var x;
|
|
35
|
+
(function(t) {
|
|
36
|
+
t.TASK_TYPE_UNSPECIFIED = "TASK_TYPE_UNSPECIFIED", t.RETRIEVAL_QUERY = "RETRIEVAL_QUERY", t.RETRIEVAL_DOCUMENT = "RETRIEVAL_DOCUMENT", t.SEMANTIC_SIMILARITY = "SEMANTIC_SIMILARITY", t.CLASSIFICATION = "CLASSIFICATION", t.CLUSTERING = "CLUSTERING";
|
|
37
|
+
})(x || (x = {}));
|
|
38
|
+
var U;
|
|
39
|
+
(function(t) {
|
|
40
|
+
t.MODE_UNSPECIFIED = "MODE_UNSPECIFIED", t.AUTO = "AUTO", t.ANY = "ANY", t.NONE = "NONE";
|
|
41
|
+
})(U || (U = {}));
|
|
42
|
+
var H;
|
|
43
|
+
(function(t) {
|
|
44
|
+
t.MODE_UNSPECIFIED = "MODE_UNSPECIFIED", t.MODE_DYNAMIC = "MODE_DYNAMIC";
|
|
45
|
+
})(H || (H = {}));
|
|
46
|
+
class u extends Error {
|
|
47
|
+
constructor(e) {
|
|
48
|
+
super(`[GoogleGenerativeAI Error]: ${e}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
class I extends u {
|
|
52
|
+
constructor(e, n) {
|
|
53
|
+
super(e), this.response = n;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
class q extends u {
|
|
57
|
+
constructor(e, n, s, o) {
|
|
58
|
+
super(e), this.status = n, this.statusText = s, this.errorDetails = o;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
class E extends u {
|
|
62
|
+
}
|
|
63
|
+
class P extends u {
|
|
64
|
+
}
|
|
65
|
+
const X = "https://generativelanguage.googleapis.com", Q = "v1beta", z = "0.24.1", Z = "genai-js";
|
|
66
|
+
var C;
|
|
67
|
+
(function(t) {
|
|
68
|
+
t.GENERATE_CONTENT = "generateContent", t.STREAM_GENERATE_CONTENT = "streamGenerateContent", t.COUNT_TOKENS = "countTokens", t.EMBED_CONTENT = "embedContent", t.BATCH_EMBED_CONTENTS = "batchEmbedContents";
|
|
69
|
+
})(C || (C = {}));
|
|
70
|
+
class tt {
|
|
71
|
+
constructor(e, n, s, o, i) {
|
|
72
|
+
this.model = e, this.task = n, this.apiKey = s, this.stream = o, this.requestOptions = i;
|
|
73
|
+
}
|
|
74
|
+
toString() {
|
|
75
|
+
var e, n;
|
|
76
|
+
const s = ((e = this.requestOptions) === null || e === void 0 ? void 0 : e.apiVersion) || Q;
|
|
77
|
+
let i = `${((n = this.requestOptions) === null || n === void 0 ? void 0 : n.baseUrl) || X}/${s}/${this.model}:${this.task}`;
|
|
78
|
+
return this.stream && (i += "?alt=sse"), i;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function et(t) {
|
|
82
|
+
const e = [];
|
|
83
|
+
return t?.apiClient && e.push(t.apiClient), e.push(`${Z}/${z}`), e.join(" ");
|
|
84
|
+
}
|
|
85
|
+
async function nt(t) {
|
|
86
|
+
var e;
|
|
87
|
+
const n = new Headers();
|
|
88
|
+
n.append("Content-Type", "application/json"), n.append("x-goog-api-client", et(t.requestOptions)), n.append("x-goog-api-key", t.apiKey);
|
|
89
|
+
let s = (e = t.requestOptions) === null || e === void 0 ? void 0 : e.customHeaders;
|
|
90
|
+
if (s) {
|
|
91
|
+
if (!(s instanceof Headers))
|
|
92
|
+
try {
|
|
93
|
+
s = new Headers(s);
|
|
94
|
+
} catch (o) {
|
|
95
|
+
throw new E(`unable to convert customHeaders value ${JSON.stringify(s)} to Headers: ${o.message}`);
|
|
96
|
+
}
|
|
97
|
+
for (const [o, i] of s.entries()) {
|
|
98
|
+
if (o === "x-goog-api-key")
|
|
99
|
+
throw new E(`Cannot set reserved header name ${o}`);
|
|
100
|
+
if (o === "x-goog-api-client")
|
|
101
|
+
throw new E(`Header name ${o} can only be set using the apiClient field`);
|
|
102
|
+
n.append(o, i);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
return n;
|
|
106
|
+
}
|
|
107
|
+
async function st(t, e, n, s, o, i) {
|
|
108
|
+
const a = new tt(t, e, n, s, i);
|
|
109
|
+
return {
|
|
110
|
+
url: a.toString(),
|
|
111
|
+
fetchOptions: Object.assign(Object.assign({}, rt(i)), { method: "POST", headers: await nt(a), body: o })
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
async function y(t, e, n, s, o, i = {}, a = fetch) {
|
|
115
|
+
const { url: r, fetchOptions: d } = await st(t, e, n, s, o, i);
|
|
116
|
+
return ot(r, d, a);
|
|
117
|
+
}
|
|
118
|
+
async function ot(t, e, n = fetch) {
|
|
119
|
+
let s;
|
|
120
|
+
try {
|
|
121
|
+
s = await n(t, e);
|
|
122
|
+
} catch (o) {
|
|
123
|
+
it(o, t);
|
|
124
|
+
}
|
|
125
|
+
return s.ok || await at(s, t), s;
|
|
126
|
+
}
|
|
127
|
+
function it(t, e) {
|
|
128
|
+
let n = t;
|
|
129
|
+
throw n.name === "AbortError" ? (n = new P(`Request aborted when fetching ${e.toString()}: ${t.message}`), n.stack = t.stack) : t instanceof q || t instanceof E || (n = new u(`Error fetching from ${e.toString()}: ${t.message}`), n.stack = t.stack), n;
|
|
130
|
+
}
|
|
131
|
+
async function at(t, e) {
|
|
132
|
+
let n = "", s;
|
|
133
|
+
try {
|
|
134
|
+
const o = await t.json();
|
|
135
|
+
n = o.error.message, o.error.details && (n += ` ${JSON.stringify(o.error.details)}`, s = o.error.details);
|
|
136
|
+
} catch {
|
|
137
|
+
}
|
|
138
|
+
throw new q(`Error fetching from ${e.toString()}: [${t.status} ${t.statusText}] ${n}`, t.status, t.statusText, s);
|
|
139
|
+
}
|
|
140
|
+
function rt(t) {
|
|
141
|
+
const e = {};
|
|
142
|
+
if (t?.signal !== void 0 || t?.timeout >= 0) {
|
|
143
|
+
const n = new AbortController();
|
|
144
|
+
t?.timeout >= 0 && setTimeout(() => n.abort(), t.timeout), t?.signal && t.signal.addEventListener("abort", () => {
|
|
145
|
+
n.abort();
|
|
146
|
+
}), e.signal = n.signal;
|
|
147
|
+
}
|
|
148
|
+
return e;
|
|
149
|
+
}
|
|
150
|
+
function N(t) {
|
|
151
|
+
return t.text = () => {
|
|
152
|
+
if (t.candidates && t.candidates.length > 0) {
|
|
153
|
+
if (t.candidates.length > 1 && console.warn(`This response had ${t.candidates.length} candidates. Returning text from the first candidate only. Access response.candidates directly to use the other candidates.`), S(t.candidates[0]))
|
|
154
|
+
throw new I(`${g(t)}`, t);
|
|
155
|
+
return ct(t);
|
|
156
|
+
} else if (t.promptFeedback)
|
|
157
|
+
throw new I(`Text not available. ${g(t)}`, t);
|
|
158
|
+
return "";
|
|
159
|
+
}, t.functionCall = () => {
|
|
160
|
+
if (t.candidates && t.candidates.length > 0) {
|
|
161
|
+
if (t.candidates.length > 1 && console.warn(`This response had ${t.candidates.length} candidates. Returning function calls from the first candidate only. Access response.candidates directly to use the other candidates.`), S(t.candidates[0]))
|
|
162
|
+
throw new I(`${g(t)}`, t);
|
|
163
|
+
return console.warn("response.functionCall() is deprecated. Use response.functionCalls() instead."), F(t)[0];
|
|
164
|
+
} else if (t.promptFeedback)
|
|
165
|
+
throw new I(`Function call not available. ${g(t)}`, t);
|
|
166
|
+
}, t.functionCalls = () => {
|
|
167
|
+
if (t.candidates && t.candidates.length > 0) {
|
|
168
|
+
if (t.candidates.length > 1 && console.warn(`This response had ${t.candidates.length} candidates. Returning function calls from the first candidate only. Access response.candidates directly to use the other candidates.`), S(t.candidates[0]))
|
|
169
|
+
throw new I(`${g(t)}`, t);
|
|
170
|
+
return F(t);
|
|
171
|
+
} else if (t.promptFeedback)
|
|
172
|
+
throw new I(`Function call not available. ${g(t)}`, t);
|
|
173
|
+
}, t;
|
|
174
|
+
}
|
|
175
|
+
function ct(t) {
|
|
176
|
+
var e, n, s, o;
|
|
177
|
+
const i = [];
|
|
178
|
+
if (!((n = (e = t.candidates) === null || e === void 0 ? void 0 : e[0].content) === null || n === void 0) && n.parts)
|
|
179
|
+
for (const a of (o = (s = t.candidates) === null || s === void 0 ? void 0 : s[0].content) === null || o === void 0 ? void 0 : o.parts)
|
|
180
|
+
a.text && i.push(a.text), a.executableCode && i.push("\n```" + a.executableCode.language + `
|
|
181
|
+
` + a.executableCode.code + "\n```\n"), a.codeExecutionResult && i.push("\n```\n" + a.codeExecutionResult.output + "\n```\n");
|
|
182
|
+
return i.length > 0 ? i.join("") : "";
|
|
183
|
+
}
|
|
184
|
+
function F(t) {
|
|
185
|
+
var e, n, s, o;
|
|
186
|
+
const i = [];
|
|
187
|
+
if (!((n = (e = t.candidates) === null || e === void 0 ? void 0 : e[0].content) === null || n === void 0) && n.parts)
|
|
188
|
+
for (const a of (o = (s = t.candidates) === null || s === void 0 ? void 0 : s[0].content) === null || o === void 0 ? void 0 : o.parts)
|
|
189
|
+
a.functionCall && i.push(a.functionCall);
|
|
190
|
+
if (i.length > 0)
|
|
191
|
+
return i;
|
|
192
|
+
}
|
|
193
|
+
const dt = [
|
|
194
|
+
R.RECITATION,
|
|
195
|
+
R.SAFETY,
|
|
196
|
+
R.LANGUAGE
|
|
197
|
+
];
|
|
198
|
+
function S(t) {
|
|
199
|
+
return !!t.finishReason && dt.includes(t.finishReason);
|
|
200
|
+
}
|
|
201
|
+
function g(t) {
|
|
202
|
+
var e, n, s;
|
|
203
|
+
let o = "";
|
|
204
|
+
if ((!t.candidates || t.candidates.length === 0) && t.promptFeedback)
|
|
205
|
+
o += "Response was blocked", !((e = t.promptFeedback) === null || e === void 0) && e.blockReason && (o += ` due to ${t.promptFeedback.blockReason}`), !((n = t.promptFeedback) === null || n === void 0) && n.blockReasonMessage && (o += `: ${t.promptFeedback.blockReasonMessage}`);
|
|
206
|
+
else if (!((s = t.candidates) === null || s === void 0) && s[0]) {
|
|
207
|
+
const i = t.candidates[0];
|
|
208
|
+
S(i) && (o += `Candidate was blocked due to ${i.finishReason}`, i.finishMessage && (o += `: ${i.finishMessage}`));
|
|
209
|
+
}
|
|
210
|
+
return o;
|
|
211
|
+
}
|
|
212
|
+
function p(t) {
|
|
213
|
+
return this instanceof p ? (this.v = t, this) : new p(t);
|
|
214
|
+
}
|
|
215
|
+
function lt(t, e, n) {
|
|
216
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
217
|
+
var s = n.apply(t, e || []), o, i = [];
|
|
218
|
+
return o = {}, a("next"), a("throw"), a("return"), o[Symbol.asyncIterator] = function() {
|
|
219
|
+
return this;
|
|
220
|
+
}, o;
|
|
221
|
+
function a(l) {
|
|
222
|
+
s[l] && (o[l] = function(c) {
|
|
223
|
+
return new Promise(function(f, O) {
|
|
224
|
+
i.push([l, c, f, O]) > 1 || r(l, c);
|
|
225
|
+
});
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
function r(l, c) {
|
|
229
|
+
try {
|
|
230
|
+
d(s[l](c));
|
|
231
|
+
} catch (f) {
|
|
232
|
+
v(i[0][3], f);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
function d(l) {
|
|
236
|
+
l.value instanceof p ? Promise.resolve(l.value.v).then(h, _) : v(i[0][2], l);
|
|
237
|
+
}
|
|
238
|
+
function h(l) {
|
|
239
|
+
r("next", l);
|
|
240
|
+
}
|
|
241
|
+
function _(l) {
|
|
242
|
+
r("throw", l);
|
|
243
|
+
}
|
|
244
|
+
function v(l, c) {
|
|
245
|
+
l(c), i.shift(), i.length && r(i[0][0], i[0][1]);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
const $ = /^data\: (.*)(?:\n\n|\r\r|\r\n\r\n)/;
|
|
249
|
+
function ut(t) {
|
|
250
|
+
const e = t.body.pipeThrough(new TextDecoderStream("utf8", { fatal: !0 })), n = gt(e), [s, o] = n.tee();
|
|
251
|
+
return {
|
|
252
|
+
stream: ht(s),
|
|
253
|
+
response: ft(o)
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
async function ft(t) {
|
|
257
|
+
const e = [], n = t.getReader();
|
|
258
|
+
for (; ; ) {
|
|
259
|
+
const { done: s, value: o } = await n.read();
|
|
260
|
+
if (s)
|
|
261
|
+
return N(Et(e));
|
|
262
|
+
e.push(o);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
function ht(t) {
|
|
266
|
+
return lt(this, arguments, function* () {
|
|
267
|
+
const n = t.getReader();
|
|
268
|
+
for (; ; ) {
|
|
269
|
+
const { value: s, done: o } = yield p(n.read());
|
|
270
|
+
if (o)
|
|
271
|
+
break;
|
|
272
|
+
yield yield p(N(s));
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
function gt(t) {
|
|
277
|
+
const e = t.getReader();
|
|
278
|
+
return new ReadableStream({
|
|
279
|
+
start(s) {
|
|
280
|
+
let o = "";
|
|
281
|
+
return i();
|
|
282
|
+
function i() {
|
|
283
|
+
return e.read().then(({ value: a, done: r }) => {
|
|
284
|
+
if (r) {
|
|
285
|
+
if (o.trim()) {
|
|
286
|
+
s.error(new u("Failed to parse stream"));
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
s.close();
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
o += a;
|
|
293
|
+
let d = o.match($), h;
|
|
294
|
+
for (; d; ) {
|
|
295
|
+
try {
|
|
296
|
+
h = JSON.parse(d[1]);
|
|
297
|
+
} catch {
|
|
298
|
+
s.error(new u(`Error parsing JSON response: "${d[1]}"`));
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
s.enqueue(h), o = o.substring(d[0].length), d = o.match($);
|
|
302
|
+
}
|
|
303
|
+
return i();
|
|
304
|
+
}).catch((a) => {
|
|
305
|
+
let r = a;
|
|
306
|
+
throw r.stack = a.stack, r.name === "AbortError" ? r = new P("Request aborted when reading from the stream") : r = new u("Error reading from the stream"), r;
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
function Et(t) {
|
|
313
|
+
const e = t[t.length - 1], n = {
|
|
314
|
+
promptFeedback: e?.promptFeedback
|
|
315
|
+
};
|
|
316
|
+
for (const s of t) {
|
|
317
|
+
if (s.candidates) {
|
|
318
|
+
let o = 0;
|
|
319
|
+
for (const i of s.candidates)
|
|
320
|
+
if (n.candidates || (n.candidates = []), n.candidates[o] || (n.candidates[o] = {
|
|
321
|
+
index: o
|
|
322
|
+
}), n.candidates[o].citationMetadata = i.citationMetadata, n.candidates[o].groundingMetadata = i.groundingMetadata, n.candidates[o].finishReason = i.finishReason, n.candidates[o].finishMessage = i.finishMessage, n.candidates[o].safetyRatings = i.safetyRatings, i.content && i.content.parts) {
|
|
323
|
+
n.candidates[o].content || (n.candidates[o].content = {
|
|
324
|
+
role: i.content.role || "user",
|
|
325
|
+
parts: []
|
|
326
|
+
});
|
|
327
|
+
const a = {};
|
|
328
|
+
for (const r of i.content.parts)
|
|
329
|
+
r.text && (a.text = r.text), r.functionCall && (a.functionCall = r.functionCall), r.executableCode && (a.executableCode = r.executableCode), r.codeExecutionResult && (a.codeExecutionResult = r.codeExecutionResult), Object.keys(a).length === 0 && (a.text = ""), n.candidates[o].content.parts.push(a);
|
|
330
|
+
}
|
|
331
|
+
o++;
|
|
332
|
+
}
|
|
333
|
+
s.usageMetadata && (n.usageMetadata = s.usageMetadata);
|
|
334
|
+
}
|
|
335
|
+
return n;
|
|
336
|
+
}
|
|
337
|
+
async function V(t, e, n, s) {
|
|
338
|
+
const o = await y(
|
|
339
|
+
e,
|
|
340
|
+
C.STREAM_GENERATE_CONTENT,
|
|
341
|
+
t,
|
|
342
|
+
/* stream */
|
|
343
|
+
!0,
|
|
344
|
+
JSON.stringify(n),
|
|
345
|
+
s
|
|
346
|
+
);
|
|
347
|
+
return ut(o);
|
|
348
|
+
}
|
|
349
|
+
async function J(t, e, n, s) {
|
|
350
|
+
const i = await (await y(
|
|
351
|
+
e,
|
|
352
|
+
C.GENERATE_CONTENT,
|
|
353
|
+
t,
|
|
354
|
+
/* stream */
|
|
355
|
+
!1,
|
|
356
|
+
JSON.stringify(n),
|
|
357
|
+
s
|
|
358
|
+
)).json();
|
|
359
|
+
return {
|
|
360
|
+
response: N(i)
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
function W(t) {
|
|
364
|
+
if (t != null) {
|
|
365
|
+
if (typeof t == "string")
|
|
366
|
+
return { role: "system", parts: [{ text: t }] };
|
|
367
|
+
if (t.text)
|
|
368
|
+
return { role: "system", parts: [t] };
|
|
369
|
+
if (t.parts)
|
|
370
|
+
return t.role ? t : { role: "system", parts: t.parts };
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
function A(t) {
|
|
374
|
+
let e = [];
|
|
375
|
+
if (typeof t == "string")
|
|
376
|
+
e = [{ text: t }];
|
|
377
|
+
else
|
|
378
|
+
for (const n of t)
|
|
379
|
+
typeof n == "string" ? e.push({ text: n }) : e.push(n);
|
|
380
|
+
return Ct(e);
|
|
381
|
+
}
|
|
382
|
+
function Ct(t) {
|
|
383
|
+
const e = { role: "user", parts: [] }, n = { role: "function", parts: [] };
|
|
384
|
+
let s = !1, o = !1;
|
|
385
|
+
for (const i of t)
|
|
386
|
+
"functionResponse" in i ? (n.parts.push(i), o = !0) : (e.parts.push(i), s = !0);
|
|
387
|
+
if (s && o)
|
|
388
|
+
throw new u("Within a single message, FunctionResponse cannot be mixed with other type of part in the request for sending chat message.");
|
|
389
|
+
if (!s && !o)
|
|
390
|
+
throw new u("No content is provided for sending chat message.");
|
|
391
|
+
return s ? e : n;
|
|
392
|
+
}
|
|
393
|
+
function _t(t, e) {
|
|
394
|
+
var n;
|
|
395
|
+
let s = {
|
|
396
|
+
model: e?.model,
|
|
397
|
+
generationConfig: e?.generationConfig,
|
|
398
|
+
safetySettings: e?.safetySettings,
|
|
399
|
+
tools: e?.tools,
|
|
400
|
+
toolConfig: e?.toolConfig,
|
|
401
|
+
systemInstruction: e?.systemInstruction,
|
|
402
|
+
cachedContent: (n = e?.cachedContent) === null || n === void 0 ? void 0 : n.name,
|
|
403
|
+
contents: []
|
|
404
|
+
};
|
|
405
|
+
const o = t.generateContentRequest != null;
|
|
406
|
+
if (t.contents) {
|
|
407
|
+
if (o)
|
|
408
|
+
throw new E("CountTokensRequest must have one of contents or generateContentRequest, not both.");
|
|
409
|
+
s.contents = t.contents;
|
|
410
|
+
} else if (o)
|
|
411
|
+
s = Object.assign(Object.assign({}, s), t.generateContentRequest);
|
|
412
|
+
else {
|
|
413
|
+
const i = A(t);
|
|
414
|
+
s.contents = [i];
|
|
415
|
+
}
|
|
416
|
+
return { generateContentRequest: s };
|
|
417
|
+
}
|
|
418
|
+
function j(t) {
|
|
419
|
+
let e;
|
|
420
|
+
return t.contents ? e = t : e = { contents: [A(t)] }, t.systemInstruction && (e.systemInstruction = W(t.systemInstruction)), e;
|
|
421
|
+
}
|
|
422
|
+
function vt(t) {
|
|
423
|
+
return typeof t == "string" || Array.isArray(t) ? { content: A(t) } : t;
|
|
424
|
+
}
|
|
425
|
+
const K = [
|
|
426
|
+
"text",
|
|
427
|
+
"inlineData",
|
|
428
|
+
"functionCall",
|
|
429
|
+
"functionResponse",
|
|
430
|
+
"executableCode",
|
|
431
|
+
"codeExecutionResult"
|
|
432
|
+
], Ot = {
|
|
433
|
+
user: ["text", "inlineData"],
|
|
434
|
+
function: ["functionResponse"],
|
|
435
|
+
model: ["text", "functionCall", "executableCode", "codeExecutionResult"],
|
|
436
|
+
// System instructions shouldn't be in history anyway.
|
|
437
|
+
system: ["text"]
|
|
438
|
+
};
|
|
439
|
+
function It(t) {
|
|
440
|
+
let e = !1;
|
|
441
|
+
for (const n of t) {
|
|
442
|
+
const { role: s, parts: o } = n;
|
|
443
|
+
if (!e && s !== "user")
|
|
444
|
+
throw new u(`First content should be with role 'user', got ${s}`);
|
|
445
|
+
if (!b.includes(s))
|
|
446
|
+
throw new u(`Each item should include role field. Got ${s} but valid roles are: ${JSON.stringify(b)}`);
|
|
447
|
+
if (!Array.isArray(o))
|
|
448
|
+
throw new u("Content should have 'parts' property with an array of Parts");
|
|
449
|
+
if (o.length === 0)
|
|
450
|
+
throw new u("Each Content should have at least one part");
|
|
451
|
+
const i = {
|
|
452
|
+
text: 0,
|
|
453
|
+
inlineData: 0,
|
|
454
|
+
functionCall: 0,
|
|
455
|
+
functionResponse: 0,
|
|
456
|
+
fileData: 0,
|
|
457
|
+
executableCode: 0,
|
|
458
|
+
codeExecutionResult: 0
|
|
459
|
+
};
|
|
460
|
+
for (const r of o)
|
|
461
|
+
for (const d of K)
|
|
462
|
+
d in r && (i[d] += 1);
|
|
463
|
+
const a = Ot[s];
|
|
464
|
+
for (const r of K)
|
|
465
|
+
if (!a.includes(r) && i[r] > 0)
|
|
466
|
+
throw new u(`Content with role '${s}' can't contain '${r}' part`);
|
|
467
|
+
e = !0;
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
function Y(t) {
|
|
471
|
+
var e;
|
|
472
|
+
if (t.candidates === void 0 || t.candidates.length === 0)
|
|
473
|
+
return !1;
|
|
474
|
+
const n = (e = t.candidates[0]) === null || e === void 0 ? void 0 : e.content;
|
|
475
|
+
if (n === void 0 || n.parts === void 0 || n.parts.length === 0)
|
|
476
|
+
return !1;
|
|
477
|
+
for (const s of n.parts)
|
|
478
|
+
if (s === void 0 || Object.keys(s).length === 0 || s.text !== void 0 && s.text === "")
|
|
479
|
+
return !1;
|
|
480
|
+
return !0;
|
|
481
|
+
}
|
|
482
|
+
const B = "SILENT_ERROR";
|
|
483
|
+
class Rt {
|
|
484
|
+
constructor(e, n, s, o = {}) {
|
|
485
|
+
this.model = n, this.params = s, this._requestOptions = o, this._history = [], this._sendPromise = Promise.resolve(), this._apiKey = e, s?.history && (It(s.history), this._history = s.history);
|
|
486
|
+
}
|
|
487
|
+
/**
|
|
488
|
+
* Gets the chat history so far. Blocked prompts are not added to history.
|
|
489
|
+
* Blocked candidates are not added to history, nor are the prompts that
|
|
490
|
+
* generated them.
|
|
491
|
+
*/
|
|
492
|
+
async getHistory() {
|
|
493
|
+
return await this._sendPromise, this._history;
|
|
494
|
+
}
|
|
495
|
+
/**
|
|
496
|
+
* Sends a chat message and receives a non-streaming
|
|
497
|
+
* {@link GenerateContentResult}.
|
|
498
|
+
*
|
|
499
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
500
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
501
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
502
|
+
*/
|
|
503
|
+
async sendMessage(e, n = {}) {
|
|
504
|
+
var s, o, i, a, r, d;
|
|
505
|
+
await this._sendPromise;
|
|
506
|
+
const h = A(e), _ = {
|
|
507
|
+
safetySettings: (s = this.params) === null || s === void 0 ? void 0 : s.safetySettings,
|
|
508
|
+
generationConfig: (o = this.params) === null || o === void 0 ? void 0 : o.generationConfig,
|
|
509
|
+
tools: (i = this.params) === null || i === void 0 ? void 0 : i.tools,
|
|
510
|
+
toolConfig: (a = this.params) === null || a === void 0 ? void 0 : a.toolConfig,
|
|
511
|
+
systemInstruction: (r = this.params) === null || r === void 0 ? void 0 : r.systemInstruction,
|
|
512
|
+
cachedContent: (d = this.params) === null || d === void 0 ? void 0 : d.cachedContent,
|
|
513
|
+
contents: [...this._history, h]
|
|
514
|
+
}, v = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
515
|
+
let l;
|
|
516
|
+
return this._sendPromise = this._sendPromise.then(() => J(this._apiKey, this.model, _, v)).then((c) => {
|
|
517
|
+
var f;
|
|
518
|
+
if (Y(c.response)) {
|
|
519
|
+
this._history.push(h);
|
|
520
|
+
const O = Object.assign({
|
|
521
|
+
parts: [],
|
|
522
|
+
// Response seems to come back without a role set.
|
|
523
|
+
role: "model"
|
|
524
|
+
}, (f = c.response.candidates) === null || f === void 0 ? void 0 : f[0].content);
|
|
525
|
+
this._history.push(O);
|
|
526
|
+
} else {
|
|
527
|
+
const O = g(c.response);
|
|
528
|
+
O && console.warn(`sendMessage() was unsuccessful. ${O}. Inspect response object for details.`);
|
|
529
|
+
}
|
|
530
|
+
l = c;
|
|
531
|
+
}).catch((c) => {
|
|
532
|
+
throw this._sendPromise = Promise.resolve(), c;
|
|
533
|
+
}), await this._sendPromise, l;
|
|
534
|
+
}
|
|
535
|
+
/**
|
|
536
|
+
* Sends a chat message and receives the response as a
|
|
537
|
+
* {@link GenerateContentStreamResult} containing an iterable stream
|
|
538
|
+
* and a response promise.
|
|
539
|
+
*
|
|
540
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
541
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
542
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
543
|
+
*/
|
|
544
|
+
async sendMessageStream(e, n = {}) {
|
|
545
|
+
var s, o, i, a, r, d;
|
|
546
|
+
await this._sendPromise;
|
|
547
|
+
const h = A(e), _ = {
|
|
548
|
+
safetySettings: (s = this.params) === null || s === void 0 ? void 0 : s.safetySettings,
|
|
549
|
+
generationConfig: (o = this.params) === null || o === void 0 ? void 0 : o.generationConfig,
|
|
550
|
+
tools: (i = this.params) === null || i === void 0 ? void 0 : i.tools,
|
|
551
|
+
toolConfig: (a = this.params) === null || a === void 0 ? void 0 : a.toolConfig,
|
|
552
|
+
systemInstruction: (r = this.params) === null || r === void 0 ? void 0 : r.systemInstruction,
|
|
553
|
+
cachedContent: (d = this.params) === null || d === void 0 ? void 0 : d.cachedContent,
|
|
554
|
+
contents: [...this._history, h]
|
|
555
|
+
}, v = Object.assign(Object.assign({}, this._requestOptions), n), l = V(this._apiKey, this.model, _, v);
|
|
556
|
+
return this._sendPromise = this._sendPromise.then(() => l).catch((c) => {
|
|
557
|
+
throw new Error(B);
|
|
558
|
+
}).then((c) => c.response).then((c) => {
|
|
559
|
+
if (Y(c)) {
|
|
560
|
+
this._history.push(h);
|
|
561
|
+
const f = Object.assign({}, c.candidates[0].content);
|
|
562
|
+
f.role || (f.role = "model"), this._history.push(f);
|
|
563
|
+
} else {
|
|
564
|
+
const f = g(c);
|
|
565
|
+
f && console.warn(`sendMessageStream() was unsuccessful. ${f}. Inspect response object for details.`);
|
|
566
|
+
}
|
|
567
|
+
}).catch((c) => {
|
|
568
|
+
c.message !== B && console.error(c);
|
|
569
|
+
}), l;
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
async function pt(t, e, n, s) {
|
|
573
|
+
return (await y(e, C.COUNT_TOKENS, t, !1, JSON.stringify(n), s)).json();
|
|
574
|
+
}
|
|
575
|
+
async function At(t, e, n, s) {
|
|
576
|
+
return (await y(e, C.EMBED_CONTENT, t, !1, JSON.stringify(n), s)).json();
|
|
577
|
+
}
|
|
578
|
+
async function yt(t, e, n, s) {
|
|
579
|
+
const o = n.requests.map((a) => Object.assign(Object.assign({}, a), { model: e }));
|
|
580
|
+
return (await y(e, C.BATCH_EMBED_CONTENTS, t, !1, JSON.stringify({ requests: o }), s)).json();
|
|
581
|
+
}
|
|
582
|
+
class k {
|
|
583
|
+
constructor(e, n, s = {}) {
|
|
584
|
+
this.apiKey = e, this._requestOptions = s, n.model.includes("/") ? this.model = n.model : this.model = `models/${n.model}`, this.generationConfig = n.generationConfig || {}, this.safetySettings = n.safetySettings || [], this.tools = n.tools, this.toolConfig = n.toolConfig, this.systemInstruction = W(n.systemInstruction), this.cachedContent = n.cachedContent;
|
|
585
|
+
}
|
|
586
|
+
/**
|
|
587
|
+
* Makes a single non-streaming call to the model
|
|
588
|
+
* and returns an object containing a single {@link GenerateContentResponse}.
|
|
589
|
+
*
|
|
590
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
591
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
592
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
593
|
+
*/
|
|
594
|
+
async generateContent(e, n = {}) {
|
|
595
|
+
var s;
|
|
596
|
+
const o = j(e), i = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
597
|
+
return J(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (s = this.cachedContent) === null || s === void 0 ? void 0 : s.name }, o), i);
|
|
598
|
+
}
|
|
599
|
+
/**
|
|
600
|
+
* Makes a single streaming call to the model and returns an object
|
|
601
|
+
* containing an iterable stream that iterates over all chunks in the
|
|
602
|
+
* streaming response as well as a promise that returns the final
|
|
603
|
+
* aggregated response.
|
|
604
|
+
*
|
|
605
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
606
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
607
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
608
|
+
*/
|
|
609
|
+
async generateContentStream(e, n = {}) {
|
|
610
|
+
var s;
|
|
611
|
+
const o = j(e), i = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
612
|
+
return V(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (s = this.cachedContent) === null || s === void 0 ? void 0 : s.name }, o), i);
|
|
613
|
+
}
|
|
614
|
+
/**
|
|
615
|
+
* Gets a new {@link ChatSession} instance which can be used for
|
|
616
|
+
* multi-turn chats.
|
|
617
|
+
*/
|
|
618
|
+
startChat(e) {
|
|
619
|
+
var n;
|
|
620
|
+
return new Rt(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (n = this.cachedContent) === null || n === void 0 ? void 0 : n.name }, e), this._requestOptions);
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Counts the tokens in the provided request.
|
|
624
|
+
*
|
|
625
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
626
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
627
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
628
|
+
*/
|
|
629
|
+
async countTokens(e, n = {}) {
|
|
630
|
+
const s = _t(e, {
|
|
631
|
+
model: this.model,
|
|
632
|
+
generationConfig: this.generationConfig,
|
|
633
|
+
safetySettings: this.safetySettings,
|
|
634
|
+
tools: this.tools,
|
|
635
|
+
toolConfig: this.toolConfig,
|
|
636
|
+
systemInstruction: this.systemInstruction,
|
|
637
|
+
cachedContent: this.cachedContent
|
|
638
|
+
}), o = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
639
|
+
return pt(this.apiKey, this.model, s, o);
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Embeds the provided content.
|
|
643
|
+
*
|
|
644
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
645
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
646
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
647
|
+
*/
|
|
648
|
+
async embedContent(e, n = {}) {
|
|
649
|
+
const s = vt(e), o = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
650
|
+
return At(this.apiKey, this.model, s, o);
|
|
651
|
+
}
|
|
652
|
+
/**
|
|
653
|
+
* Embeds an array of {@link EmbedContentRequest}s.
|
|
654
|
+
*
|
|
655
|
+
* Fields set in the optional {@link SingleRequestOptions} parameter will
|
|
656
|
+
* take precedence over the {@link RequestOptions} values provided to
|
|
657
|
+
* {@link GoogleGenerativeAI.getGenerativeModel }.
|
|
658
|
+
*/
|
|
659
|
+
async batchEmbedContents(e, n = {}) {
|
|
660
|
+
const s = Object.assign(Object.assign({}, this._requestOptions), n);
|
|
661
|
+
return yt(this.apiKey, this.model, e, s);
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
class St {
|
|
665
|
+
constructor(e) {
|
|
666
|
+
this.apiKey = e;
|
|
667
|
+
}
|
|
668
|
+
/**
|
|
669
|
+
* Gets a {@link GenerativeModel} instance for the provided model name.
|
|
670
|
+
*/
|
|
671
|
+
getGenerativeModel(e, n) {
|
|
672
|
+
if (!e.model)
|
|
673
|
+
throw new u("Must provide a model name. Example: genai.getGenerativeModel({ model: 'my-model-name' })");
|
|
674
|
+
return new k(this.apiKey, e, n);
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Creates a {@link GenerativeModel} instance from provided content cache.
|
|
678
|
+
*/
|
|
679
|
+
getGenerativeModelFromCachedContent(e, n, s) {
|
|
680
|
+
if (!e.name)
|
|
681
|
+
throw new E("Cached content must contain a `name` field.");
|
|
682
|
+
if (!e.model)
|
|
683
|
+
throw new E("Cached content must contain a `model` field.");
|
|
684
|
+
const o = ["model", "systemInstruction"];
|
|
685
|
+
for (const a of o)
|
|
686
|
+
if (n?.[a] && e[a] && n?.[a] !== e[a]) {
|
|
687
|
+
if (a === "model") {
|
|
688
|
+
const r = n.model.startsWith("models/") ? n.model.replace("models/", "") : n.model, d = e.model.startsWith("models/") ? e.model.replace("models/", "") : e.model;
|
|
689
|
+
if (r === d)
|
|
690
|
+
continue;
|
|
691
|
+
}
|
|
692
|
+
throw new E(`Different value for "${a}" specified in modelParams (${n[a]}) and cachedContent (${e[a]})`);
|
|
693
|
+
}
|
|
694
|
+
const i = Object.assign(Object.assign({}, n), { model: e.model, tools: e.tools, toolConfig: e.toolConfig, systemInstruction: e.systemInstruction, cachedContent: e });
|
|
695
|
+
return new k(this.apiKey, i, s);
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
export {
|
|
699
|
+
G as BlockReason,
|
|
700
|
+
Rt as ChatSession,
|
|
701
|
+
H as DynamicRetrievalMode,
|
|
702
|
+
m as ExecutableCodeLanguage,
|
|
703
|
+
R as FinishReason,
|
|
704
|
+
U as FunctionCallingMode,
|
|
705
|
+
k as GenerativeModel,
|
|
706
|
+
St as GoogleGenerativeAI,
|
|
707
|
+
P as GoogleGenerativeAIAbortError,
|
|
708
|
+
u as GoogleGenerativeAIError,
|
|
709
|
+
q as GoogleGenerativeAIFetchError,
|
|
710
|
+
E as GoogleGenerativeAIRequestInputError,
|
|
711
|
+
I as GoogleGenerativeAIResponseError,
|
|
712
|
+
L as HarmBlockThreshold,
|
|
713
|
+
M as HarmCategory,
|
|
714
|
+
D as HarmProbability,
|
|
715
|
+
w as Outcome,
|
|
716
|
+
b as POSSIBLE_ROLES,
|
|
717
|
+
T as SchemaType,
|
|
718
|
+
x as TaskType
|
|
719
|
+
};
|