@posthog/ai 3.1.1 → 3.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/lib/index.cjs.js +21 -35
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.esm.js +22 -36
- package/lib/index.esm.js.map +1 -1
- package/package.json +1 -1
- package/src/anthropic/index.ts +6 -8
- package/src/openai/azure.ts +6 -8
- package/src/openai/index.ts +12 -18
- package/src/vercel/middleware.ts +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
# 3.2.1 - 2025-02-11
|
|
2
|
+
|
|
3
|
+
- fix: add experimental_wrapLanguageModel to vercel middleware supporting older versions of ai
|
|
4
|
+
|
|
5
|
+
# 3.2.0 - 2025-02-11
|
|
6
|
+
|
|
7
|
+
- feat: change how we handle streaming support for openai and anthropic
|
|
8
|
+
|
|
1
9
|
# 3.1.1 - 2025-02-07
|
|
2
10
|
|
|
3
11
|
- fix: bump ai to 4.1.0
|
package/lib/index.cjs.js
CHANGED
|
@@ -4,7 +4,6 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
4
4
|
|
|
5
5
|
var OpenAIOrignal = require('openai');
|
|
6
6
|
var uuid = require('uuid');
|
|
7
|
-
var stream = require('stream');
|
|
8
7
|
var ai = require('ai');
|
|
9
8
|
var AnthropicOriginal = require('@anthropic-ai/sdk');
|
|
10
9
|
|
|
@@ -186,19 +185,16 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
186
185
|
const parentPromise = super.create(openAIParams, options);
|
|
187
186
|
if (openAIParams.stream) {
|
|
188
187
|
return parentPromise.then(value => {
|
|
189
|
-
const passThroughStream = new stream.PassThrough({
|
|
190
|
-
objectMode: true
|
|
191
|
-
});
|
|
192
|
-
let accumulatedContent = '';
|
|
193
|
-
let usage = {
|
|
194
|
-
inputTokens: 0,
|
|
195
|
-
outputTokens: 0
|
|
196
|
-
};
|
|
197
188
|
if ('tee' in value) {
|
|
198
|
-
const
|
|
189
|
+
const [stream1, stream2] = value.tee();
|
|
199
190
|
(async () => {
|
|
200
191
|
try {
|
|
201
|
-
|
|
192
|
+
let accumulatedContent = '';
|
|
193
|
+
let usage = {
|
|
194
|
+
inputTokens: 0,
|
|
195
|
+
outputTokens: 0
|
|
196
|
+
};
|
|
197
|
+
for await (const chunk of stream1) {
|
|
202
198
|
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
203
199
|
accumulatedContent += delta;
|
|
204
200
|
if (chunk.usage) {
|
|
@@ -207,7 +203,6 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
207
203
|
outputTokens: chunk.usage.completion_tokens ?? 0
|
|
208
204
|
};
|
|
209
205
|
}
|
|
210
|
-
passThroughStream.write(chunk);
|
|
211
206
|
}
|
|
212
207
|
const latency = (Date.now() - startTime) / 1000;
|
|
213
208
|
sendEventToPosthog({
|
|
@@ -227,9 +222,7 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
227
222
|
httpStatus: 200,
|
|
228
223
|
usage
|
|
229
224
|
});
|
|
230
|
-
passThroughStream.end();
|
|
231
225
|
} catch (error) {
|
|
232
|
-
// error handling
|
|
233
226
|
sendEventToPosthog({
|
|
234
227
|
client: this.phClient,
|
|
235
228
|
distinctId: posthogDistinctId ?? traceId,
|
|
@@ -249,11 +242,12 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
249
242
|
isError: true,
|
|
250
243
|
error: JSON.stringify(error)
|
|
251
244
|
});
|
|
252
|
-
passThroughStream.emit('error', error);
|
|
253
245
|
}
|
|
254
246
|
})();
|
|
247
|
+
// Return the other stream to the user
|
|
248
|
+
return stream2;
|
|
255
249
|
}
|
|
256
|
-
return
|
|
250
|
+
return value;
|
|
257
251
|
});
|
|
258
252
|
} else {
|
|
259
253
|
const wrappedPromise = parentPromise.then(result => {
|
|
@@ -343,9 +337,6 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
343
337
|
const parentPromise = super.create(openAIParams, options);
|
|
344
338
|
if (openAIParams.stream) {
|
|
345
339
|
return parentPromise.then(value => {
|
|
346
|
-
const passThroughStream = new stream.PassThrough({
|
|
347
|
-
objectMode: true
|
|
348
|
-
});
|
|
349
340
|
let accumulatedContent = '';
|
|
350
341
|
let usage = {
|
|
351
342
|
inputTokens: 0,
|
|
@@ -353,10 +344,10 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
353
344
|
};
|
|
354
345
|
let model = openAIParams.model;
|
|
355
346
|
if ('tee' in value) {
|
|
356
|
-
const
|
|
347
|
+
const [stream1, stream2] = value.tee();
|
|
357
348
|
(async () => {
|
|
358
349
|
try {
|
|
359
|
-
for await (const chunk of
|
|
350
|
+
for await (const chunk of stream1) {
|
|
360
351
|
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
361
352
|
accumulatedContent += delta;
|
|
362
353
|
if (chunk.usage) {
|
|
@@ -368,7 +359,6 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
368
359
|
outputTokens: chunk.usage.completion_tokens ?? 0
|
|
369
360
|
};
|
|
370
361
|
}
|
|
371
|
-
passThroughStream.write(chunk);
|
|
372
362
|
}
|
|
373
363
|
const latency = (Date.now() - startTime) / 1000;
|
|
374
364
|
sendEventToPosthog({
|
|
@@ -388,7 +378,6 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
388
378
|
httpStatus: 200,
|
|
389
379
|
usage
|
|
390
380
|
});
|
|
391
|
-
passThroughStream.end();
|
|
392
381
|
} catch (error) {
|
|
393
382
|
// error handling
|
|
394
383
|
sendEventToPosthog({
|
|
@@ -410,11 +399,12 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
410
399
|
isError: true,
|
|
411
400
|
error: JSON.stringify(error)
|
|
412
401
|
});
|
|
413
|
-
passThroughStream.emit('error', error);
|
|
414
402
|
}
|
|
415
403
|
})();
|
|
404
|
+
// Return the other stream to the user
|
|
405
|
+
return stream2;
|
|
416
406
|
}
|
|
417
|
-
return
|
|
407
|
+
return value;
|
|
418
408
|
});
|
|
419
409
|
} else {
|
|
420
410
|
const wrappedPromise = parentPromise.then(result => {
|
|
@@ -663,7 +653,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
663
653
|
posthogTraceId: traceId,
|
|
664
654
|
posthogDistinctId: options.posthogDistinctId ?? traceId
|
|
665
655
|
});
|
|
666
|
-
const wrappedModel = ai.
|
|
656
|
+
const wrappedModel = ai.experimental_wrapLanguageModel({
|
|
667
657
|
model,
|
|
668
658
|
middleware
|
|
669
659
|
});
|
|
@@ -701,19 +691,16 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
701
691
|
const parentPromise = super.create(anthropicParams, options);
|
|
702
692
|
if (anthropicParams.stream) {
|
|
703
693
|
return parentPromise.then(value => {
|
|
704
|
-
const passThroughStream = new stream.PassThrough({
|
|
705
|
-
objectMode: true
|
|
706
|
-
});
|
|
707
694
|
let accumulatedContent = '';
|
|
708
695
|
const usage = {
|
|
709
696
|
inputTokens: 0,
|
|
710
697
|
outputTokens: 0
|
|
711
698
|
};
|
|
712
699
|
if ('tee' in value) {
|
|
713
|
-
const
|
|
700
|
+
const [stream1, stream2] = value.tee();
|
|
714
701
|
(async () => {
|
|
715
702
|
try {
|
|
716
|
-
for await (const chunk of
|
|
703
|
+
for await (const chunk of stream1) {
|
|
717
704
|
if ('delta' in chunk) {
|
|
718
705
|
if ('text' in chunk.delta) {
|
|
719
706
|
const delta = chunk?.delta?.text ?? '';
|
|
@@ -726,7 +713,6 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
726
713
|
if ('usage' in chunk) {
|
|
727
714
|
usage.outputTokens = chunk.usage.output_tokens ?? 0;
|
|
728
715
|
}
|
|
729
|
-
passThroughStream.write(chunk);
|
|
730
716
|
}
|
|
731
717
|
const latency = (Date.now() - startTime) / 1000;
|
|
732
718
|
sendEventToPosthog({
|
|
@@ -746,7 +732,6 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
746
732
|
httpStatus: 200,
|
|
747
733
|
usage
|
|
748
734
|
});
|
|
749
|
-
passThroughStream.end();
|
|
750
735
|
} catch (error) {
|
|
751
736
|
// error handling
|
|
752
737
|
sendEventToPosthog({
|
|
@@ -768,11 +753,12 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
768
753
|
isError: true,
|
|
769
754
|
error: JSON.stringify(error)
|
|
770
755
|
});
|
|
771
|
-
passThroughStream.emit('error', error);
|
|
772
756
|
}
|
|
773
757
|
})();
|
|
758
|
+
// Return the other stream to the user
|
|
759
|
+
return stream2;
|
|
774
760
|
}
|
|
775
|
-
return
|
|
761
|
+
return value;
|
|
776
762
|
});
|
|
777
763
|
} else {
|
|
778
764
|
const wrappedPromise = parentPromise.then(result => {
|