prompt-api-polyfill 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +143 -66
- package/json-schema-converter.js +3 -1
- package/multimodal-converter.js +138 -12
- package/package.json +19 -4
- package/prompt-api-polyfill.js +478 -444
package/prompt-api-polyfill.js
CHANGED
|
@@ -1,548 +1,582 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Polyfill for the Prompt API (`LanguageModel`)
|
|
3
|
-
*
|
|
3
|
+
* Backends:
|
|
4
|
+
* - Firebase AI Logic (via `firebase/ai`)
|
|
5
|
+
* - Google Gemini API (via `@google/generative-ai`)
|
|
6
|
+
* - OpenAI API (via `openai`)
|
|
7
|
+
*
|
|
4
8
|
* Spec: https://github.com/webmachinelearning/prompt-api/blob/main/README.md
|
|
5
9
|
*
|
|
6
|
-
*
|
|
10
|
+
* Instructions:
|
|
7
11
|
* 1. Include this script in your HTML type="module".
|
|
8
|
-
* 2.
|
|
12
|
+
* 2. Configure the backend:
|
|
13
|
+
* - For Firebase: Define `window.FIREBASE_CONFIG`.
|
|
14
|
+
* - For Gemini: Define `window.GEMINI_CONFIG`.
|
|
15
|
+
* - For OpenAI: Define `window.OPENAI_CONFIG`.
|
|
9
16
|
*/
|
|
10
17
|
|
|
11
|
-
import
|
|
12
|
-
import {
|
|
13
|
-
getAI,
|
|
14
|
-
getGenerativeModel,
|
|
15
|
-
GoogleAIBackend,
|
|
16
|
-
InferenceMode,
|
|
17
|
-
} from 'https://esm.run/firebase/ai';
|
|
18
|
-
|
|
19
|
-
import './async-iterator-polyfill.js'; // Still needed for Safari 26.2.
|
|
18
|
+
import './async-iterator-polyfill.js';
|
|
20
19
|
import MultimodalConverter from './multimodal-converter.js';
|
|
21
20
|
import { convertJsonSchemaToVertexSchema } from './json-schema-converter.js';
|
|
22
21
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
22
|
+
// --- Helper to convert initial History ---
|
|
23
|
+
async function convertToHistory(prompts) {
|
|
24
|
+
const history = [];
|
|
25
|
+
for (const p of prompts) {
|
|
26
|
+
const role = p.role === 'assistant' ? 'model' : 'user';
|
|
27
|
+
let parts = [];
|
|
28
|
+
|
|
29
|
+
if (Array.isArray(p.content)) {
|
|
30
|
+
// Mixed content
|
|
31
|
+
for (const item of p.content) {
|
|
32
|
+
if (item.type === 'text') {
|
|
33
|
+
parts.push({ text: item.value || item.text || '' });
|
|
34
|
+
} else {
|
|
35
|
+
const part = await MultimodalConverter.convert(item.type, item.value);
|
|
36
|
+
parts.push(part);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
} else {
|
|
40
|
+
// Simple string
|
|
41
|
+
parts.push({ text: p.content });
|
|
42
|
+
}
|
|
43
|
+
history.push({ role, parts });
|
|
26
44
|
}
|
|
45
|
+
return history;
|
|
46
|
+
}
|
|
27
47
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
48
|
+
/**
|
|
49
|
+
* Main LanguageModel Class
|
|
50
|
+
*/
|
|
51
|
+
export class LanguageModel extends EventTarget {
|
|
52
|
+
#backend;
|
|
53
|
+
#model;
|
|
54
|
+
#history;
|
|
55
|
+
#options;
|
|
56
|
+
#inCloudParams;
|
|
57
|
+
#destroyed;
|
|
58
|
+
#inputUsage;
|
|
59
|
+
#topK;
|
|
60
|
+
#temperature;
|
|
61
|
+
#onquotaoverflow;
|
|
62
|
+
|
|
63
|
+
constructor(backend, model, initialHistory, options = {}, inCloudParams) {
|
|
64
|
+
super();
|
|
65
|
+
this.#backend = backend;
|
|
66
|
+
this.#model = model;
|
|
67
|
+
this.#history = initialHistory || [];
|
|
68
|
+
this.#options = options;
|
|
69
|
+
this.#inCloudParams = inCloudParams;
|
|
70
|
+
this.#destroyed = false;
|
|
71
|
+
this.#inputUsage = 0;
|
|
72
|
+
|
|
73
|
+
this.#topK = options.topK;
|
|
74
|
+
this.#temperature = options.temperature;
|
|
34
75
|
}
|
|
35
76
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
if (Array.isArray(p.content)) {
|
|
49
|
-
// Mixed content
|
|
50
|
-
for (const item of p.content) {
|
|
51
|
-
if (item.type === 'text') {
|
|
52
|
-
parts.push({ text: item.value || item.text || '' });
|
|
53
|
-
} else {
|
|
54
|
-
const part = await MultimodalConverter.convert(
|
|
55
|
-
item.type,
|
|
56
|
-
item.value
|
|
57
|
-
);
|
|
58
|
-
parts.push(part);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
} else {
|
|
62
|
-
// Simple string
|
|
63
|
-
parts.push({ text: p.content });
|
|
64
|
-
}
|
|
65
|
-
history.push({ role, parts });
|
|
66
|
-
}
|
|
67
|
-
return history;
|
|
77
|
+
get inputUsage() {
|
|
78
|
+
return this.#inputUsage;
|
|
79
|
+
}
|
|
80
|
+
get inputQuota() {
|
|
81
|
+
return 1000000;
|
|
82
|
+
}
|
|
83
|
+
get topK() {
|
|
84
|
+
return this.#topK;
|
|
85
|
+
}
|
|
86
|
+
get temperature() {
|
|
87
|
+
return this.#temperature;
|
|
68
88
|
}
|
|
69
89
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
class LanguageModel extends EventTarget {
|
|
74
|
-
#model;
|
|
75
|
-
#history;
|
|
76
|
-
#options;
|
|
77
|
-
#inCloudParams;
|
|
78
|
-
#destroyed;
|
|
79
|
-
#inputUsage;
|
|
80
|
-
#topK;
|
|
81
|
-
#temperature;
|
|
82
|
-
#onquotaoverflow;
|
|
83
|
-
|
|
84
|
-
constructor(model, initialHistory, options = {}, inCloudParams) {
|
|
85
|
-
super();
|
|
86
|
-
this.#model = model;
|
|
87
|
-
this.#history = initialHistory || [];
|
|
88
|
-
this.#options = options;
|
|
89
|
-
this.#inCloudParams = inCloudParams;
|
|
90
|
-
this.#destroyed = false;
|
|
91
|
-
this.#inputUsage = 0;
|
|
92
|
-
|
|
93
|
-
this.#topK = options.topK;
|
|
94
|
-
this.#temperature = options.temperature;
|
|
95
|
-
}
|
|
90
|
+
get onquotaoverflow() {
|
|
91
|
+
return this.#onquotaoverflow;
|
|
92
|
+
}
|
|
96
93
|
|
|
97
|
-
|
|
98
|
-
|
|
94
|
+
set onquotaoverflow(handler) {
|
|
95
|
+
if (this.#onquotaoverflow) {
|
|
96
|
+
this.removeEventListener('quotaoverflow', this.#onquotaoverflow);
|
|
99
97
|
}
|
|
100
|
-
|
|
101
|
-
|
|
98
|
+
this.#onquotaoverflow = handler;
|
|
99
|
+
if (typeof handler === 'function') {
|
|
100
|
+
this.addEventListener('quotaoverflow', handler);
|
|
102
101
|
}
|
|
103
|
-
|
|
104
|
-
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
static async availability(options = {}) {
|
|
105
|
+
await LanguageModel.#validateOptions(options);
|
|
106
|
+
const backendClass = await LanguageModel.#getBackendClass();
|
|
107
|
+
return backendClass.availability(options);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
static #backends = [
|
|
111
|
+
{
|
|
112
|
+
config: 'FIREBASE_CONFIG',
|
|
113
|
+
path: './backends/firebase.js',
|
|
114
|
+
},
|
|
115
|
+
{
|
|
116
|
+
config: 'GEMINI_CONFIG',
|
|
117
|
+
path: './backends/gemini.js',
|
|
118
|
+
},
|
|
119
|
+
{
|
|
120
|
+
config: 'OPENAI_CONFIG',
|
|
121
|
+
path: './backends/openai.js',
|
|
122
|
+
},
|
|
123
|
+
];
|
|
124
|
+
|
|
125
|
+
static #getBackendInfo() {
|
|
126
|
+
for (const b of LanguageModel.#backends) {
|
|
127
|
+
const config = window[b.config];
|
|
128
|
+
if (config && config.apiKey) {
|
|
129
|
+
return { ...b, configValue: config };
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
throw new DOMException(
|
|
133
|
+
'Prompt API Polyfill: No backend configuration found. Please set window.FIREBASE_CONFIG, window.GEMINI_CONFIG, or window.OPENAI_CONFIG.',
|
|
134
|
+
'NotSupportedError'
|
|
135
|
+
);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
static async #getBackendClass() {
|
|
139
|
+
const info = LanguageModel.#getBackendInfo();
|
|
140
|
+
return (await import(/* @vite-ignore */ info.path)).default;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
static async #validateOptions(options = {}) {
|
|
144
|
+
const { maxTemperature, maxTopK } = await LanguageModel.params();
|
|
145
|
+
|
|
146
|
+
const hasTemperature = Object.prototype.hasOwnProperty.call(
|
|
147
|
+
options,
|
|
148
|
+
'temperature'
|
|
149
|
+
);
|
|
150
|
+
const hasTopK = Object.prototype.hasOwnProperty.call(options, 'topK');
|
|
151
|
+
|
|
152
|
+
if (hasTemperature !== hasTopK) {
|
|
153
|
+
throw new DOMException(
|
|
154
|
+
'Initializing a new session must either specify both topK and temperature, or neither of them.',
|
|
155
|
+
'NotSupportedError'
|
|
156
|
+
);
|
|
105
157
|
}
|
|
106
|
-
|
|
107
|
-
|
|
158
|
+
|
|
159
|
+
// If neither temperature nor topK are provided, nothing to validate.
|
|
160
|
+
if (!hasTemperature && !hasTopK) {
|
|
161
|
+
return;
|
|
108
162
|
}
|
|
109
163
|
|
|
110
|
-
|
|
111
|
-
|
|
164
|
+
const { temperature, topK } = options;
|
|
165
|
+
|
|
166
|
+
if (
|
|
167
|
+
typeof temperature !== 'number' ||
|
|
168
|
+
Number.isNaN(temperature) ||
|
|
169
|
+
typeof topK !== 'number' ||
|
|
170
|
+
Number.isNaN(topK)
|
|
171
|
+
) {
|
|
172
|
+
throw new DOMException(
|
|
173
|
+
'The provided temperature and topK must be numbers.',
|
|
174
|
+
'NotSupportedError'
|
|
175
|
+
);
|
|
112
176
|
}
|
|
113
177
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
this.addEventListener('quotaoverflow', handler);
|
|
178
|
+
if (temperature < 0 || temperature > maxTemperature || topK > maxTopK) {
|
|
179
|
+
throw new DOMException(
|
|
180
|
+
'The provided temperature or topK is outside the supported range.',
|
|
181
|
+
'NotSupportedError'
|
|
182
|
+
);
|
|
120
183
|
}
|
|
184
|
+
}
|
|
121
185
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
186
|
+
static async params() {
|
|
187
|
+
return {
|
|
188
|
+
// Values from https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/2-5-flash-lite#:~:text=%2C%20audio/webm-,Parameter%20defaults,-tune.
|
|
189
|
+
defaultTemperature: 1.0,
|
|
190
|
+
defaultTopK: 64,
|
|
191
|
+
maxTemperature: 2.0,
|
|
192
|
+
maxTopK: 64, // Fixed
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
static async create(options = {}) {
|
|
197
|
+
const availability = await LanguageModel.availability(options);
|
|
198
|
+
if (availability === 'downloadable' || availability === 'downloading') {
|
|
199
|
+
throw new DOMException(
|
|
200
|
+
'Requires a user gesture when availability is "downloading" or "downloadable".',
|
|
201
|
+
'NotAllowedError'
|
|
202
|
+
);
|
|
125
203
|
}
|
|
126
204
|
|
|
127
|
-
|
|
128
|
-
|
|
205
|
+
// --- Backend Selection Logic ---
|
|
206
|
+
const info = LanguageModel.#getBackendInfo();
|
|
207
|
+
|
|
208
|
+
const BackendClass = await LanguageModel.#getBackendClass();
|
|
209
|
+
const backend = new BackendClass(info.configValue);
|
|
210
|
+
|
|
211
|
+
const defaults = {
|
|
212
|
+
temperature: 1.0,
|
|
213
|
+
topK: 3,
|
|
214
|
+
};
|
|
129
215
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
216
|
+
const resolvedOptions = { ...defaults, ...options };
|
|
217
|
+
|
|
218
|
+
const inCloudParams = {
|
|
219
|
+
model: backend.modelName,
|
|
220
|
+
generationConfig: {
|
|
221
|
+
temperature: resolvedOptions.temperature,
|
|
222
|
+
topK: resolvedOptions.topK,
|
|
223
|
+
},
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
let initialHistory = [];
|
|
227
|
+
|
|
228
|
+
if (
|
|
229
|
+
resolvedOptions.initialPrompts &&
|
|
230
|
+
Array.isArray(resolvedOptions.initialPrompts)
|
|
231
|
+
) {
|
|
232
|
+
const systemPrompts = resolvedOptions.initialPrompts.filter(
|
|
233
|
+
(p) => p.role === 'system'
|
|
234
|
+
);
|
|
235
|
+
const conversationPrompts = resolvedOptions.initialPrompts.filter(
|
|
236
|
+
(p) => p.role !== 'system'
|
|
133
237
|
);
|
|
134
|
-
const hasTopK = Object.prototype.hasOwnProperty.call(options, 'topK');
|
|
135
238
|
|
|
136
|
-
if (
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
'
|
|
140
|
-
);
|
|
239
|
+
if (systemPrompts.length > 0) {
|
|
240
|
+
inCloudParams.systemInstruction = systemPrompts
|
|
241
|
+
.map((p) => p.content)
|
|
242
|
+
.join('\n');
|
|
141
243
|
}
|
|
244
|
+
// Await the conversion of history items (in case of images in history)
|
|
245
|
+
initialHistory = await convertToHistory(conversationPrompts);
|
|
246
|
+
}
|
|
142
247
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
248
|
+
const model = backend.createSession(resolvedOptions, inCloudParams);
|
|
249
|
+
|
|
250
|
+
// If a monitor callback is provided, simulate simple downloadprogress events
|
|
251
|
+
if (typeof resolvedOptions.monitor === 'function') {
|
|
252
|
+
const monitorTarget = new EventTarget();
|
|
147
253
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
Number.isNaN(temperature) ||
|
|
153
|
-
typeof topK !== 'number' ||
|
|
154
|
-
Number.isNaN(topK)
|
|
155
|
-
) {
|
|
156
|
-
throw new DOMException(
|
|
157
|
-
'The provided temperature and topK must be numbers.',
|
|
158
|
-
'NotSupportedError'
|
|
159
|
-
);
|
|
254
|
+
try {
|
|
255
|
+
resolvedOptions.monitor(monitorTarget);
|
|
256
|
+
} catch (e) {
|
|
257
|
+
console.error('Error in monitor callback:', e);
|
|
160
258
|
}
|
|
161
259
|
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
);
|
|
260
|
+
try {
|
|
261
|
+
const startEvent = new ProgressEvent('downloadprogress', {
|
|
262
|
+
loaded: 0,
|
|
263
|
+
total: 1,
|
|
264
|
+
});
|
|
265
|
+
const endEvent = new ProgressEvent('downloadprogress', {
|
|
266
|
+
loaded: 1,
|
|
267
|
+
total: 1,
|
|
268
|
+
});
|
|
269
|
+
monitorTarget.dispatchEvent(startEvent);
|
|
270
|
+
monitorTarget.dispatchEvent(endEvent);
|
|
271
|
+
} catch (e) {
|
|
272
|
+
console.error('Error dispatching downloadprogress events:', e);
|
|
167
273
|
}
|
|
168
274
|
}
|
|
169
275
|
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
276
|
+
return new LanguageModel(
|
|
277
|
+
backend,
|
|
278
|
+
model,
|
|
279
|
+
initialHistory,
|
|
280
|
+
resolvedOptions,
|
|
281
|
+
inCloudParams
|
|
282
|
+
);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Instance Methods
|
|
286
|
+
|
|
287
|
+
async clone(options = {}) {
|
|
288
|
+
if (this.#destroyed) {
|
|
289
|
+
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
178
290
|
}
|
|
179
291
|
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
// model that needs downloading and simulates the Prompt API's behavior.
|
|
184
|
-
if (availability === 'downloadable' || availability === 'downloading') {
|
|
185
|
-
throw new DOMException(
|
|
186
|
-
'Requires a user gesture when availability is "downloading" or "downloadable".',
|
|
187
|
-
'NotAllowedError'
|
|
188
|
-
);
|
|
189
|
-
}
|
|
190
|
-
const defaults = {
|
|
191
|
-
temperature: 1.0,
|
|
192
|
-
topK: 3,
|
|
193
|
-
};
|
|
194
|
-
|
|
195
|
-
const resolvedOptions = { ...defaults, ...options };
|
|
196
|
-
|
|
197
|
-
const inCloudParams = {
|
|
198
|
-
model: MODEL_NAME,
|
|
199
|
-
generationConfig: {
|
|
200
|
-
temperature: resolvedOptions.temperature,
|
|
201
|
-
topK: resolvedOptions.topK,
|
|
202
|
-
},
|
|
203
|
-
};
|
|
204
|
-
|
|
205
|
-
let initialHistory = [];
|
|
206
|
-
let systemInstruction = undefined;
|
|
207
|
-
|
|
208
|
-
if (
|
|
209
|
-
resolvedOptions.initialPrompts &&
|
|
210
|
-
Array.isArray(resolvedOptions.initialPrompts)
|
|
211
|
-
) {
|
|
212
|
-
const systemPrompts = resolvedOptions.initialPrompts.filter(
|
|
213
|
-
(p) => p.role === 'system'
|
|
214
|
-
);
|
|
215
|
-
const conversationPrompts = resolvedOptions.initialPrompts.filter(
|
|
216
|
-
(p) => p.role !== 'system'
|
|
217
|
-
);
|
|
218
|
-
|
|
219
|
-
if (systemPrompts.length > 0) {
|
|
220
|
-
inCloudParams.systemInstruction = systemPrompts
|
|
221
|
-
.map((p) => p.content)
|
|
222
|
-
.join('\n');
|
|
223
|
-
}
|
|
224
|
-
// Await the conversion of history items (in case of images in history)
|
|
225
|
-
initialHistory = await convertToFirebaseHistory(conversationPrompts);
|
|
226
|
-
}
|
|
292
|
+
const historyCopy = JSON.parse(JSON.stringify(this.#history));
|
|
293
|
+
const mergedOptions = { ...this.#options, ...options };
|
|
294
|
+
const mergedInCloudParams = { ...this.#inCloudParams };
|
|
227
295
|
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
296
|
+
if (options.temperature !== undefined) {
|
|
297
|
+
mergedInCloudParams.generationConfig.temperature = options.temperature;
|
|
298
|
+
}
|
|
299
|
+
if (options.topK !== undefined) {
|
|
300
|
+
mergedInCloudParams.generationConfig.topK = options.topK;
|
|
301
|
+
}
|
|
232
302
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
303
|
+
// Re-create the backend for the clone since it now holds state (#model)
|
|
304
|
+
const BackendClass = await LanguageModel.#getBackendClass();
|
|
305
|
+
const info = LanguageModel.#getBackendInfo();
|
|
306
|
+
const newBackend = new BackendClass(info.configValue);
|
|
307
|
+
const newModel = newBackend.createSession(
|
|
308
|
+
mergedOptions,
|
|
309
|
+
mergedInCloudParams
|
|
310
|
+
);
|
|
236
311
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
312
|
+
return new LanguageModel(
|
|
313
|
+
newBackend,
|
|
314
|
+
newModel,
|
|
315
|
+
historyCopy,
|
|
316
|
+
mergedOptions,
|
|
317
|
+
mergedInCloudParams
|
|
318
|
+
);
|
|
319
|
+
}
|
|
243
320
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
total: 1,
|
|
249
|
-
});
|
|
250
|
-
const endEvent = new ProgressEvent('downloadprogress', {
|
|
251
|
-
loaded: 1,
|
|
252
|
-
total: 1,
|
|
253
|
-
});
|
|
254
|
-
// The `ProgressEvent`'s `currentTarget`, `srcElement` and `target`
|
|
255
|
-
// properties are `EventTarget`, not `CreateMonitor`, when using the
|
|
256
|
-
// polyfill. Hopefully developers won't rely on these properties.
|
|
257
|
-
monitorTarget.dispatchEvent(startEvent);
|
|
258
|
-
monitorTarget.dispatchEvent(endEvent);
|
|
259
|
-
} catch (e) {
|
|
260
|
-
console.error('Error dispatching downloadprogress events:', e);
|
|
261
|
-
}
|
|
262
|
-
}
|
|
321
|
+
destroy() {
|
|
322
|
+
this.#destroyed = true;
|
|
323
|
+
this.#history = null;
|
|
324
|
+
}
|
|
263
325
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
);
|
|
326
|
+
async prompt(input, options = {}) {
|
|
327
|
+
if (this.#destroyed) {
|
|
328
|
+
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
329
|
+
}
|
|
330
|
+
if (options.signal?.aborted) {
|
|
331
|
+
throw new DOMException('Aborted', 'AbortError');
|
|
270
332
|
}
|
|
271
333
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
277
|
-
// Clone private history
|
|
278
|
-
const historyCopy = JSON.parse(JSON.stringify(this.#history));
|
|
279
|
-
return new LanguageModel(
|
|
280
|
-
this.#model,
|
|
281
|
-
historyCopy,
|
|
282
|
-
{
|
|
283
|
-
...this.#options,
|
|
284
|
-
...options,
|
|
285
|
-
},
|
|
286
|
-
this.#inCloudParams
|
|
334
|
+
if (options.responseConstraint) {
|
|
335
|
+
// Update Schema
|
|
336
|
+
const schema = convertJsonSchemaToVertexSchema(
|
|
337
|
+
options.responseConstraint
|
|
287
338
|
);
|
|
288
|
-
|
|
339
|
+
this.#inCloudParams.generationConfig.responseMimeType =
|
|
340
|
+
'application/json';
|
|
341
|
+
this.#inCloudParams.generationConfig.responseSchema = schema;
|
|
289
342
|
|
|
290
|
-
|
|
291
|
-
this.#
|
|
292
|
-
|
|
343
|
+
// Re-create model with new config/schema (stored in backend)
|
|
344
|
+
this.#model = this.#backend.createSession(
|
|
345
|
+
this.#options,
|
|
346
|
+
this.#inCloudParams
|
|
347
|
+
);
|
|
293
348
|
}
|
|
294
349
|
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
if (options.signal?.aborted)
|
|
299
|
-
throw new DOMException('Aborted', 'AbortError');
|
|
300
|
-
|
|
301
|
-
if (options.responseConstraint) {
|
|
302
|
-
const vertexSchema = convertJsonSchemaToVertexSchema(
|
|
303
|
-
options.responseConstraint
|
|
304
|
-
);
|
|
305
|
-
this.#inCloudParams.generationConfig.responseMimeType =
|
|
306
|
-
'application/json';
|
|
307
|
-
this.#inCloudParams.generationConfig.responseSchema = vertexSchema;
|
|
308
|
-
this.#model = getGenerativeModel(ai, {
|
|
309
|
-
mode: InferenceMode.ONLY_IN_CLOUD,
|
|
310
|
-
inCloudParams: this.#inCloudParams,
|
|
311
|
-
});
|
|
312
|
-
}
|
|
350
|
+
// Process Input (Async conversion of Blob/Canvas/AudioBuffer)
|
|
351
|
+
const parts = await this.#processInput(input);
|
|
352
|
+
const userContent = { role: 'user', parts: parts };
|
|
313
353
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
const
|
|
354
|
+
try {
|
|
355
|
+
// Estimate usage
|
|
356
|
+
const totalTokens = await this.#backend.countTokens([
|
|
357
|
+
{ role: 'user', parts },
|
|
358
|
+
]);
|
|
317
359
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
contents: [{ role: 'user', parts }],
|
|
322
|
-
});
|
|
323
|
-
if (this.#inputUsage + totalTokens > this.inputQuota)
|
|
324
|
-
this.dispatchEvent(new Event('quotaoverflow'));
|
|
360
|
+
if (this.#inputUsage + totalTokens > this.inputQuota) {
|
|
361
|
+
this.dispatchEvent(new Event('quotaoverflow'));
|
|
362
|
+
}
|
|
325
363
|
|
|
326
|
-
|
|
364
|
+
const requestContents = [...this.#history, userContent];
|
|
327
365
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
});
|
|
366
|
+
const { text, usage } =
|
|
367
|
+
await this.#backend.generateContent(requestContents);
|
|
331
368
|
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
}
|
|
369
|
+
if (usage) {
|
|
370
|
+
this.#inputUsage = usage;
|
|
371
|
+
}
|
|
336
372
|
|
|
337
|
-
|
|
373
|
+
this.#history.push(userContent);
|
|
374
|
+
this.#history.push({ role: 'model', parts: [{ text }] });
|
|
338
375
|
|
|
339
|
-
|
|
340
|
-
|
|
376
|
+
return text;
|
|
377
|
+
} catch (error) {
|
|
378
|
+
console.error('Prompt API Polyfill Error:', error);
|
|
379
|
+
throw error;
|
|
380
|
+
}
|
|
381
|
+
}
|
|
341
382
|
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
383
|
+
promptStreaming(input, options = {}) {
|
|
384
|
+
if (this.#destroyed) {
|
|
385
|
+
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
386
|
+
}
|
|
387
|
+
if (options.signal?.aborted) {
|
|
388
|
+
throw new DOMException('Aborted', 'AbortError');
|
|
347
389
|
}
|
|
348
390
|
|
|
349
|
-
|
|
350
|
-
if (this.#destroyed)
|
|
351
|
-
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
352
|
-
if (options.signal?.aborted)
|
|
353
|
-
throw new DOMException('Aborted', 'AbortError');
|
|
354
|
-
|
|
355
|
-
const _this = this; // Capture 'this' to access private fields in callback
|
|
356
|
-
|
|
357
|
-
if (options.responseConstraint) {
|
|
358
|
-
const vertexSchema = convertJsonSchemaToVertexSchema(
|
|
359
|
-
options.responseConstraint
|
|
360
|
-
);
|
|
361
|
-
this.#inCloudParams.generationConfig.responseMimeType =
|
|
362
|
-
'application/json';
|
|
363
|
-
this.#inCloudParams.generationConfig.responseSchema = vertexSchema;
|
|
364
|
-
this.#model = getGenerativeModel(ai, {
|
|
365
|
-
mode: InferenceMode.ONLY_IN_CLOUD,
|
|
366
|
-
inCloudParams: this.#inCloudParams,
|
|
367
|
-
});
|
|
368
|
-
}
|
|
391
|
+
const _this = this; // Capture 'this' to access private fields in callback
|
|
369
392
|
|
|
370
|
-
|
|
393
|
+
const signal = options.signal;
|
|
371
394
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
395
|
+
return new ReadableStream({
|
|
396
|
+
async start(controller) {
|
|
397
|
+
const abortError = new DOMException('Aborted', 'AbortError');
|
|
375
398
|
|
|
376
|
-
|
|
377
|
-
|
|
399
|
+
if (signal?.aborted) {
|
|
400
|
+
controller.error(abortError);
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
let aborted = false;
|
|
405
|
+
const onAbort = () => {
|
|
406
|
+
aborted = true;
|
|
407
|
+
try {
|
|
378
408
|
controller.error(abortError);
|
|
379
|
-
|
|
409
|
+
} catch {
|
|
410
|
+
// Ignore
|
|
380
411
|
}
|
|
412
|
+
};
|
|
381
413
|
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
try {
|
|
386
|
-
controller.error(abortError);
|
|
387
|
-
} catch {
|
|
388
|
-
// Controller might already be closed/errored; ignore.
|
|
389
|
-
}
|
|
390
|
-
};
|
|
414
|
+
if (signal) {
|
|
415
|
+
signal.addEventListener('abort', onAbort);
|
|
416
|
+
}
|
|
391
417
|
|
|
392
|
-
|
|
393
|
-
|
|
418
|
+
try {
|
|
419
|
+
if (options.responseConstraint) {
|
|
420
|
+
const schema = convertJsonSchemaToVertexSchema(
|
|
421
|
+
options.responseConstraint
|
|
422
|
+
);
|
|
423
|
+
_this.#inCloudParams.generationConfig.responseMimeType =
|
|
424
|
+
'application/json';
|
|
425
|
+
_this.#inCloudParams.generationConfig.responseSchema = schema;
|
|
426
|
+
_this.#model = _this.#backend.createSession(
|
|
427
|
+
_this.#options,
|
|
428
|
+
_this.#inCloudParams
|
|
429
|
+
);
|
|
394
430
|
}
|
|
395
431
|
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
const parts = await _this.#processInput(input);
|
|
399
|
-
const userContent = { role: 'user', parts: parts };
|
|
432
|
+
const parts = await _this.#processInput(input);
|
|
433
|
+
const userContent = { role: 'user', parts: parts };
|
|
400
434
|
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
});
|
|
405
|
-
if (_this.#inputUsage + totalTokens > this.inputQuota)
|
|
406
|
-
this.dispatchEvent(new Event('quotaoverflow'));
|
|
435
|
+
const totalTokens = await _this.#backend.countTokens([
|
|
436
|
+
{ role: 'user', parts },
|
|
437
|
+
]);
|
|
407
438
|
|
|
408
|
-
|
|
439
|
+
if (_this.#inputUsage + totalTokens > _this.inputQuota) {
|
|
440
|
+
_this.dispatchEvent(new Event('quotaoverflow'));
|
|
441
|
+
}
|
|
409
442
|
|
|
410
|
-
|
|
411
|
-
contents: requestContents,
|
|
412
|
-
});
|
|
443
|
+
const requestContents = [..._this.#history, userContent];
|
|
413
444
|
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
}
|
|
425
|
-
}
|
|
426
|
-
return;
|
|
427
|
-
}
|
|
428
|
-
if (chunk.usageMetadata?.totalTokenCount) {
|
|
429
|
-
_this.#inputUsage += chunk.usageMetadata.totalTokenCount;
|
|
445
|
+
const stream =
|
|
446
|
+
await _this.#backend.generateContentStream(requestContents);
|
|
447
|
+
|
|
448
|
+
let fullResponseText = '';
|
|
449
|
+
|
|
450
|
+
for await (const chunk of stream) {
|
|
451
|
+
if (aborted) {
|
|
452
|
+
// Try to cancel if supported
|
|
453
|
+
if (typeof stream.return === 'function') {
|
|
454
|
+
await stream.return();
|
|
430
455
|
}
|
|
431
|
-
|
|
432
|
-
fullResponseText += chunkText;
|
|
433
|
-
controller.enqueue(chunkText);
|
|
456
|
+
return;
|
|
434
457
|
}
|
|
435
458
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
_this.#history.push({
|
|
439
|
-
role: 'model',
|
|
440
|
-
parts: [{ text: fullResponseText }],
|
|
441
|
-
});
|
|
459
|
+
const chunkText = chunk.text();
|
|
460
|
+
fullResponseText += chunkText;
|
|
442
461
|
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
} catch (error) {
|
|
446
|
-
// If we aborted, we've already signaled an AbortError; otherwise surface the error.
|
|
447
|
-
if (!aborted) {
|
|
448
|
-
controller.error(error);
|
|
449
|
-
}
|
|
450
|
-
} finally {
|
|
451
|
-
if (signal) {
|
|
452
|
-
signal.removeEventListener('abort', onAbort);
|
|
462
|
+
if (chunk.usageMetadata?.totalTokenCount) {
|
|
463
|
+
_this.#inputUsage = chunk.usageMetadata.totalTokenCount;
|
|
453
464
|
}
|
|
465
|
+
|
|
466
|
+
controller.enqueue(chunkText);
|
|
454
467
|
}
|
|
455
|
-
},
|
|
456
|
-
});
|
|
457
|
-
}
|
|
458
468
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
469
|
+
if (!aborted) {
|
|
470
|
+
_this.#history.push(userContent);
|
|
471
|
+
_this.#history.push({
|
|
472
|
+
role: 'model',
|
|
473
|
+
parts: [{ text: fullResponseText }],
|
|
474
|
+
});
|
|
464
475
|
|
|
465
|
-
|
|
466
|
-
|
|
476
|
+
controller.close();
|
|
477
|
+
}
|
|
478
|
+
} catch (error) {
|
|
479
|
+
if (!aborted) {
|
|
480
|
+
controller.error(error);
|
|
481
|
+
}
|
|
482
|
+
} finally {
|
|
483
|
+
if (signal) {
|
|
484
|
+
signal.removeEventListener('abort', onAbort);
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
},
|
|
488
|
+
});
|
|
489
|
+
}
|
|
467
490
|
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
491
|
+
async append(input, options = {}) {
|
|
492
|
+
if (this.#destroyed) {
|
|
493
|
+
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
494
|
+
}
|
|
495
|
+
if (options.signal?.aborted) {
|
|
496
|
+
throw new DOMException('Aborted', 'AbortError');
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
const parts = await this.#processInput(input);
|
|
500
|
+
const content = { role: 'user', parts: parts };
|
|
501
|
+
|
|
502
|
+
try {
|
|
503
|
+
const totalTokens = await this.#backend.countTokens([
|
|
504
|
+
...this.#history,
|
|
505
|
+
content,
|
|
506
|
+
]);
|
|
507
|
+
this.#inputUsage = totalTokens;
|
|
508
|
+
} catch {
|
|
509
|
+
// Do nothing.
|
|
510
|
+
}
|
|
477
511
|
|
|
478
|
-
|
|
512
|
+
this.#history.push(content);
|
|
479
513
|
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
}
|
|
514
|
+
if (this.#inputUsage > this.inputQuota) {
|
|
515
|
+
this.dispatchEvent(new Event('quotaoverflow'));
|
|
483
516
|
}
|
|
517
|
+
}
|
|
484
518
|
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
519
|
+
async measureInputUsage(input) {
|
|
520
|
+
if (this.#destroyed) {
|
|
521
|
+
throw new DOMException('Session is destroyed', 'InvalidStateError');
|
|
522
|
+
}
|
|
488
523
|
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
return 0;
|
|
501
|
-
}
|
|
524
|
+
try {
|
|
525
|
+
const parts = await this.#processInput(input);
|
|
526
|
+
const totalTokens = await this.#backend.countTokens([
|
|
527
|
+
{ role: 'user', parts },
|
|
528
|
+
]);
|
|
529
|
+
return totalTokens || 0;
|
|
530
|
+
} catch (e) {
|
|
531
|
+
console.warn(
|
|
532
|
+
'The underlying API call failed, quota usage (0) is not reported accurately.'
|
|
533
|
+
);
|
|
534
|
+
return 0;
|
|
502
535
|
}
|
|
536
|
+
}
|
|
503
537
|
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
538
|
+
// Private Helper to process diverse input types
|
|
539
|
+
async #processInput(input) {
|
|
540
|
+
if (typeof input === 'string') {
|
|
541
|
+
return [{ text: input }];
|
|
542
|
+
}
|
|
509
543
|
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
);
|
|
529
|
-
combinedParts.push(part);
|
|
530
|
-
}
|
|
544
|
+
if (Array.isArray(input)) {
|
|
545
|
+
if (input.length > 0 && input[0].role) {
|
|
546
|
+
let combinedParts = [];
|
|
547
|
+
for (const msg of input) {
|
|
548
|
+
if (typeof msg.content === 'string') {
|
|
549
|
+
combinedParts.push({ text: msg.content });
|
|
550
|
+
if (msg.prefix) {
|
|
551
|
+
console.warn(
|
|
552
|
+
"The `prefix` flag isn't supported and was ignored."
|
|
553
|
+
);
|
|
554
|
+
}
|
|
555
|
+
} else if (Array.isArray(msg.content)) {
|
|
556
|
+
for (const c of msg.content) {
|
|
557
|
+
if (c.type === 'text') {
|
|
558
|
+
combinedParts.push({ text: c.value });
|
|
559
|
+
} else {
|
|
560
|
+
const part = await MultimodalConverter.convert(c.type, c.value);
|
|
561
|
+
combinedParts.push(part);
|
|
531
562
|
}
|
|
532
563
|
}
|
|
533
564
|
}
|
|
534
|
-
return combinedParts;
|
|
535
565
|
}
|
|
536
|
-
return
|
|
566
|
+
return combinedParts;
|
|
537
567
|
}
|
|
538
|
-
|
|
539
|
-
return [{ text: JSON.stringify(input) }];
|
|
568
|
+
return input.map((s) => ({ text: String(s) }));
|
|
540
569
|
}
|
|
570
|
+
|
|
571
|
+
return [{ text: JSON.stringify(input) }];
|
|
541
572
|
}
|
|
573
|
+
}
|
|
542
574
|
|
|
575
|
+
if (!('LanguageModel' in window) || window.__FORCE_PROMPT_API_POLYFILL__) {
|
|
543
576
|
// Attach to window
|
|
544
577
|
window.LanguageModel = LanguageModel;
|
|
578
|
+
LanguageModel.__isPolyfill = true;
|
|
545
579
|
console.log(
|
|
546
|
-
'Polyfill: window.LanguageModel is now backed by
|
|
580
|
+
'Polyfill: window.LanguageModel is now backed by the Prompt API polyfill.'
|
|
547
581
|
);
|
|
548
|
-
}
|
|
582
|
+
}
|