@posthog/ai 7.0.0 → 7.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +2 -2
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +2 -2
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +2 -2
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.mjs +2 -2
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +138 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +19 -0
- package/dist/index.mjs +138 -2
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +2 -2
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs +2 -2
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +155 -2
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.d.ts +20 -1
- package/dist/openai/index.mjs +154 -3
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +2 -2
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +2 -2
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.d.ts
CHANGED
|
@@ -39,6 +39,8 @@ declare const Chat: typeof OpenAI.Chat;
|
|
|
39
39
|
declare const Completions: typeof OpenAI.Chat.Completions;
|
|
40
40
|
declare const Responses: typeof OpenAI.Responses;
|
|
41
41
|
declare const Embeddings: typeof OpenAI.Embeddings;
|
|
42
|
+
declare const Audio: typeof OpenAI.Audio;
|
|
43
|
+
declare const Transcriptions: typeof OpenAI.Audio.Transcriptions;
|
|
42
44
|
type ChatCompletion$1 = OpenAI.ChatCompletion;
|
|
43
45
|
type ChatCompletionChunk$1 = OpenAI.ChatCompletionChunk;
|
|
44
46
|
type ChatCompletionCreateParamsBase$1 = OpenAI.Chat.Completions.ChatCompletionCreateParams;
|
|
@@ -60,6 +62,7 @@ declare class PostHogOpenAI extends OpenAI {
|
|
|
60
62
|
chat: WrappedChat$1;
|
|
61
63
|
responses: WrappedResponses;
|
|
62
64
|
embeddings: WrappedEmbeddings$1;
|
|
65
|
+
audio: WrappedAudio;
|
|
63
66
|
constructor(config: MonitoringOpenAIConfig$1);
|
|
64
67
|
}
|
|
65
68
|
declare class WrappedChat$1 extends Chat {
|
|
@@ -89,6 +92,22 @@ declare class WrappedEmbeddings$1 extends Embeddings {
|
|
|
89
92
|
constructor(client: OpenAI, phClient: PostHog);
|
|
90
93
|
create(body: EmbeddingCreateParams$1 & MonitoringParams, options?: RequestOptions$2): APIPromise<CreateEmbeddingResponse$1>;
|
|
91
94
|
}
|
|
95
|
+
declare class WrappedAudio extends Audio {
|
|
96
|
+
constructor(parentClient: PostHogOpenAI, phClient: PostHog);
|
|
97
|
+
transcriptions: WrappedTranscriptions;
|
|
98
|
+
}
|
|
99
|
+
declare class WrappedTranscriptions extends Transcriptions {
|
|
100
|
+
private readonly phClient;
|
|
101
|
+
private readonly baseURL;
|
|
102
|
+
constructor(client: OpenAI, phClient: PostHog);
|
|
103
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'json' | undefined> & MonitoringParams, options?: RequestOptions$2): APIPromise<OpenAI.Audio.Transcriptions.Transcription>;
|
|
104
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'verbose_json'> & MonitoringParams, options?: RequestOptions$2): APIPromise<OpenAI.Audio.Transcriptions.TranscriptionVerbose>;
|
|
105
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming<'srt' | 'vtt' | 'text'> & MonitoringParams, options?: RequestOptions$2): APIPromise<string>;
|
|
106
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsNonStreaming, options?: RequestOptions$2): APIPromise<OpenAI.Audio.Transcriptions.Transcription>;
|
|
107
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsStreaming & MonitoringParams, options?: RequestOptions$2): APIPromise<Stream<OpenAI.Audio.Transcriptions.TranscriptionStreamEvent>>;
|
|
108
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParamsStreaming & MonitoringParams, options?: RequestOptions$2): APIPromise<OpenAI.Audio.Transcriptions.TranscriptionCreateResponse | string | Stream<OpenAI.Audio.Transcriptions.TranscriptionStreamEvent>>;
|
|
109
|
+
create(body: OpenAI.Audio.Transcriptions.TranscriptionCreateParams & MonitoringParams, options?: RequestOptions$2): APIPromise<OpenAI.Audio.Transcriptions.TranscriptionCreateResponse | string | Stream<OpenAI.Audio.Transcriptions.TranscriptionStreamEvent>>;
|
|
110
|
+
}
|
|
92
111
|
|
|
93
112
|
type ChatCompletion = OpenAIOrignal.ChatCompletion;
|
|
94
113
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk;
|
package/dist/index.mjs
CHANGED
|
@@ -6,7 +6,7 @@ import { wrapLanguageModel } from 'ai';
|
|
|
6
6
|
import AnthropicOriginal from '@anthropic-ai/sdk';
|
|
7
7
|
import { GoogleGenAI } from '@google/genai';
|
|
8
8
|
|
|
9
|
-
var version = "7.
|
|
9
|
+
var version = "7.1.0";
|
|
10
10
|
|
|
11
11
|
// Type guards for safer type checking
|
|
12
12
|
const isString = value => {
|
|
@@ -49,7 +49,7 @@ const getModelParams = params => {
|
|
|
49
49
|
return {};
|
|
50
50
|
}
|
|
51
51
|
const modelParams = {};
|
|
52
|
-
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
|
|
52
|
+
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming', 'language', 'response_format', 'timestamp_granularities'];
|
|
53
53
|
for (const key of paramKeys) {
|
|
54
54
|
if (key in params && params[key] !== undefined) {
|
|
55
55
|
modelParams[key] = params[key];
|
|
@@ -776,6 +776,8 @@ const Chat = OpenAI.Chat;
|
|
|
776
776
|
const Completions = Chat.Completions;
|
|
777
777
|
const Responses = OpenAI.Responses;
|
|
778
778
|
const Embeddings = OpenAI.Embeddings;
|
|
779
|
+
const Audio = OpenAI.Audio;
|
|
780
|
+
const Transcriptions = OpenAI.Audio.Transcriptions;
|
|
779
781
|
class PostHogOpenAI extends OpenAI {
|
|
780
782
|
constructor(config) {
|
|
781
783
|
const {
|
|
@@ -787,6 +789,7 @@ class PostHogOpenAI extends OpenAI {
|
|
|
787
789
|
this.chat = new WrappedChat$1(this, this.phClient);
|
|
788
790
|
this.responses = new WrappedResponses$1(this, this.phClient);
|
|
789
791
|
this.embeddings = new WrappedEmbeddings$1(this, this.phClient);
|
|
792
|
+
this.audio = new WrappedAudio(this, this.phClient);
|
|
790
793
|
}
|
|
791
794
|
}
|
|
792
795
|
let WrappedChat$1 = class WrappedChat extends Chat {
|
|
@@ -1283,6 +1286,139 @@ let WrappedEmbeddings$1 = class WrappedEmbeddings extends Embeddings {
|
|
|
1283
1286
|
return wrappedPromise;
|
|
1284
1287
|
}
|
|
1285
1288
|
};
|
|
1289
|
+
class WrappedAudio extends Audio {
|
|
1290
|
+
constructor(parentClient, phClient) {
|
|
1291
|
+
super(parentClient);
|
|
1292
|
+
this.transcriptions = new WrappedTranscriptions(parentClient, phClient);
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
class WrappedTranscriptions extends Transcriptions {
|
|
1296
|
+
constructor(client, phClient) {
|
|
1297
|
+
super(client);
|
|
1298
|
+
this.phClient = phClient;
|
|
1299
|
+
this.baseURL = client.baseURL;
|
|
1300
|
+
}
|
|
1301
|
+
// --- Implementation Signature
|
|
1302
|
+
create(body, options) {
|
|
1303
|
+
const {
|
|
1304
|
+
providerParams: openAIParams,
|
|
1305
|
+
posthogParams
|
|
1306
|
+
} = extractPosthogParams(body);
|
|
1307
|
+
const startTime = Date.now();
|
|
1308
|
+
const parentPromise = openAIParams.stream ? super.create(openAIParams, options) : super.create(openAIParams, options);
|
|
1309
|
+
if (openAIParams.stream) {
|
|
1310
|
+
return parentPromise.then(value => {
|
|
1311
|
+
if ('tee' in value && typeof value.tee === 'function') {
|
|
1312
|
+
const [stream1, stream2] = value.tee();
|
|
1313
|
+
(async () => {
|
|
1314
|
+
try {
|
|
1315
|
+
let finalContent = '';
|
|
1316
|
+
let usage = {
|
|
1317
|
+
inputTokens: 0,
|
|
1318
|
+
outputTokens: 0
|
|
1319
|
+
};
|
|
1320
|
+
const doneEvent = 'transcript.text.done';
|
|
1321
|
+
for await (const chunk of stream1) {
|
|
1322
|
+
if (chunk.type === doneEvent && 'text' in chunk && chunk.text && chunk.text.length > 0) {
|
|
1323
|
+
finalContent = chunk.text;
|
|
1324
|
+
}
|
|
1325
|
+
if ('usage' in chunk && chunk.usage) {
|
|
1326
|
+
usage = {
|
|
1327
|
+
inputTokens: chunk.usage?.type === 'tokens' ? chunk.usage.input_tokens ?? 0 : 0,
|
|
1328
|
+
outputTokens: chunk.usage?.type === 'tokens' ? chunk.usage.output_tokens ?? 0 : 0
|
|
1329
|
+
};
|
|
1330
|
+
}
|
|
1331
|
+
}
|
|
1332
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
1333
|
+
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
1334
|
+
await sendEventToPosthog({
|
|
1335
|
+
client: this.phClient,
|
|
1336
|
+
...posthogParams,
|
|
1337
|
+
model: openAIParams.model,
|
|
1338
|
+
provider: 'openai',
|
|
1339
|
+
input: openAIParams.prompt,
|
|
1340
|
+
output: finalContent,
|
|
1341
|
+
latency,
|
|
1342
|
+
baseURL: this.baseURL,
|
|
1343
|
+
params: body,
|
|
1344
|
+
httpStatus: 200,
|
|
1345
|
+
usage,
|
|
1346
|
+
tools: availableTools
|
|
1347
|
+
});
|
|
1348
|
+
} catch (error) {
|
|
1349
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1350
|
+
await sendEventToPosthog({
|
|
1351
|
+
client: this.phClient,
|
|
1352
|
+
...posthogParams,
|
|
1353
|
+
model: openAIParams.model,
|
|
1354
|
+
provider: 'openai',
|
|
1355
|
+
input: openAIParams.prompt,
|
|
1356
|
+
output: [],
|
|
1357
|
+
latency: 0,
|
|
1358
|
+
baseURL: this.baseURL,
|
|
1359
|
+
params: body,
|
|
1360
|
+
httpStatus,
|
|
1361
|
+
usage: {
|
|
1362
|
+
inputTokens: 0,
|
|
1363
|
+
outputTokens: 0
|
|
1364
|
+
},
|
|
1365
|
+
isError: true,
|
|
1366
|
+
error: JSON.stringify(error)
|
|
1367
|
+
});
|
|
1368
|
+
}
|
|
1369
|
+
})();
|
|
1370
|
+
return stream2;
|
|
1371
|
+
}
|
|
1372
|
+
return value;
|
|
1373
|
+
});
|
|
1374
|
+
} else {
|
|
1375
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
1376
|
+
if ('text' in result) {
|
|
1377
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
1378
|
+
await sendEventToPosthog({
|
|
1379
|
+
client: this.phClient,
|
|
1380
|
+
...posthogParams,
|
|
1381
|
+
model: String(openAIParams.model ?? ''),
|
|
1382
|
+
provider: 'openai',
|
|
1383
|
+
input: openAIParams.prompt,
|
|
1384
|
+
output: result.text,
|
|
1385
|
+
latency,
|
|
1386
|
+
baseURL: this.baseURL,
|
|
1387
|
+
params: body,
|
|
1388
|
+
httpStatus: 200,
|
|
1389
|
+
usage: {
|
|
1390
|
+
inputTokens: result.usage?.type === 'tokens' ? result.usage.input_tokens ?? 0 : 0,
|
|
1391
|
+
outputTokens: result.usage?.type === 'tokens' ? result.usage.output_tokens ?? 0 : 0
|
|
1392
|
+
}
|
|
1393
|
+
});
|
|
1394
|
+
return result;
|
|
1395
|
+
}
|
|
1396
|
+
}, async error => {
|
|
1397
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1398
|
+
await sendEventToPosthog({
|
|
1399
|
+
client: this.phClient,
|
|
1400
|
+
...posthogParams,
|
|
1401
|
+
model: String(openAIParams.model ?? ''),
|
|
1402
|
+
provider: 'openai',
|
|
1403
|
+
input: openAIParams.prompt,
|
|
1404
|
+
output: [],
|
|
1405
|
+
latency: 0,
|
|
1406
|
+
baseURL: this.baseURL,
|
|
1407
|
+
params: body,
|
|
1408
|
+
httpStatus,
|
|
1409
|
+
usage: {
|
|
1410
|
+
inputTokens: 0,
|
|
1411
|
+
outputTokens: 0
|
|
1412
|
+
},
|
|
1413
|
+
isError: true,
|
|
1414
|
+
error: JSON.stringify(error)
|
|
1415
|
+
});
|
|
1416
|
+
throw error;
|
|
1417
|
+
});
|
|
1418
|
+
return wrappedPromise;
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1286
1422
|
|
|
1287
1423
|
class PostHogAzureOpenAI extends AzureOpenAI {
|
|
1288
1424
|
constructor(config) {
|