@posthog/ai 7.0.0 → 7.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,7 @@ var openai = require('openai');
6
6
  var buffer = require('buffer');
7
7
  var uuid = require('uuid');
8
8
 
9
- var version = "7.0.0";
9
+ var version = "7.1.1";
10
10
 
11
11
  // Type guards for safer type checking
12
12
 
@@ -49,7 +49,7 @@ const getModelParams = params => {
49
49
  return {};
50
50
  }
51
51
  const modelParams = {};
52
- const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
52
+ const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming', 'language', 'response_format', 'timestamp_granularities'];
53
53
  for (const key of paramKeys) {
54
54
  if (key in params && params[key] !== undefined) {
55
55
  modelParams[key] = params[key];
@@ -567,6 +567,8 @@ const Chat = openai.OpenAI.Chat;
567
567
  const Completions = Chat.Completions;
568
568
  const Responses = openai.OpenAI.Responses;
569
569
  const Embeddings = openai.OpenAI.Embeddings;
570
+ const Audio = openai.OpenAI.Audio;
571
+ const Transcriptions = openai.OpenAI.Audio.Transcriptions;
570
572
  class PostHogOpenAI extends openai.OpenAI {
571
573
  constructor(config) {
572
574
  const {
@@ -578,6 +580,7 @@ class PostHogOpenAI extends openai.OpenAI {
578
580
  this.chat = new WrappedChat(this, this.phClient);
579
581
  this.responses = new WrappedResponses(this, this.phClient);
580
582
  this.embeddings = new WrappedEmbeddings(this, this.phClient);
583
+ this.audio = new WrappedAudio(this, this.phClient);
581
584
  }
582
585
  }
583
586
  class WrappedChat extends Chat {
@@ -1096,12 +1099,162 @@ class WrappedEmbeddings extends Embeddings {
1096
1099
  return wrappedPromise;
1097
1100
  }
1098
1101
  }
1102
+ class WrappedAudio extends Audio {
1103
+ constructor(parentClient, phClient) {
1104
+ super(parentClient);
1105
+ this.transcriptions = new WrappedTranscriptions(parentClient, phClient);
1106
+ }
1107
+ }
1108
+ class WrappedTranscriptions extends Transcriptions {
1109
+ constructor(client, phClient) {
1110
+ super(client);
1111
+ this.phClient = phClient;
1112
+ this.baseURL = client.baseURL;
1113
+ }
1114
+
1115
+ // --- Overload #1: Non-streaming
1116
+
1117
+ // --- Overload #2: Non-streaming
1118
+
1119
+ // --- Overload #3: Non-streaming
1120
+
1121
+ // --- Overload #4: Non-streaming
1122
+
1123
+ // --- Overload #5: Streaming
1124
+
1125
+ // --- Overload #6: Streaming
1126
+
1127
+ // --- Overload #7: Generic base
1128
+
1129
+ // --- Implementation Signature
1130
+ create(body, options) {
1131
+ const {
1132
+ providerParams: openAIParams,
1133
+ posthogParams
1134
+ } = extractPosthogParams(body);
1135
+ const startTime = Date.now();
1136
+ const parentPromise = openAIParams.stream ? super.create(openAIParams, options) : super.create(openAIParams, options);
1137
+ if (openAIParams.stream) {
1138
+ return parentPromise.then(value => {
1139
+ if ('tee' in value && typeof value.tee === 'function') {
1140
+ const [stream1, stream2] = value.tee();
1141
+ (async () => {
1142
+ try {
1143
+ let finalContent = '';
1144
+ let usage = {
1145
+ inputTokens: 0,
1146
+ outputTokens: 0
1147
+ };
1148
+ const doneEvent = 'transcript.text.done';
1149
+ for await (const chunk of stream1) {
1150
+ if (chunk.type === doneEvent && 'text' in chunk && chunk.text && chunk.text.length > 0) {
1151
+ finalContent = chunk.text;
1152
+ }
1153
+ if ('usage' in chunk && chunk.usage) {
1154
+ usage = {
1155
+ inputTokens: chunk.usage?.type === 'tokens' ? chunk.usage.input_tokens ?? 0 : 0,
1156
+ outputTokens: chunk.usage?.type === 'tokens' ? chunk.usage.output_tokens ?? 0 : 0
1157
+ };
1158
+ }
1159
+ }
1160
+ const latency = (Date.now() - startTime) / 1000;
1161
+ const availableTools = extractAvailableToolCalls('openai', openAIParams);
1162
+ await sendEventToPosthog({
1163
+ client: this.phClient,
1164
+ ...posthogParams,
1165
+ model: openAIParams.model,
1166
+ provider: 'openai',
1167
+ input: openAIParams.prompt,
1168
+ output: finalContent,
1169
+ latency,
1170
+ baseURL: this.baseURL,
1171
+ params: body,
1172
+ httpStatus: 200,
1173
+ usage,
1174
+ tools: availableTools
1175
+ });
1176
+ } catch (error) {
1177
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1178
+ await sendEventToPosthog({
1179
+ client: this.phClient,
1180
+ ...posthogParams,
1181
+ model: openAIParams.model,
1182
+ provider: 'openai',
1183
+ input: openAIParams.prompt,
1184
+ output: [],
1185
+ latency: 0,
1186
+ baseURL: this.baseURL,
1187
+ params: body,
1188
+ httpStatus,
1189
+ usage: {
1190
+ inputTokens: 0,
1191
+ outputTokens: 0
1192
+ },
1193
+ isError: true,
1194
+ error: JSON.stringify(error)
1195
+ });
1196
+ }
1197
+ })();
1198
+ return stream2;
1199
+ }
1200
+ return value;
1201
+ });
1202
+ } else {
1203
+ const wrappedPromise = parentPromise.then(async result => {
1204
+ if ('text' in result) {
1205
+ const latency = (Date.now() - startTime) / 1000;
1206
+ await sendEventToPosthog({
1207
+ client: this.phClient,
1208
+ ...posthogParams,
1209
+ model: String(openAIParams.model ?? ''),
1210
+ provider: 'openai',
1211
+ input: openAIParams.prompt,
1212
+ output: result.text,
1213
+ latency,
1214
+ baseURL: this.baseURL,
1215
+ params: body,
1216
+ httpStatus: 200,
1217
+ usage: {
1218
+ inputTokens: result.usage?.type === 'tokens' ? result.usage.input_tokens ?? 0 : 0,
1219
+ outputTokens: result.usage?.type === 'tokens' ? result.usage.output_tokens ?? 0 : 0
1220
+ }
1221
+ });
1222
+ return result;
1223
+ }
1224
+ }, async error => {
1225
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1226
+ await sendEventToPosthog({
1227
+ client: this.phClient,
1228
+ ...posthogParams,
1229
+ model: String(openAIParams.model ?? ''),
1230
+ provider: 'openai',
1231
+ input: openAIParams.prompt,
1232
+ output: [],
1233
+ latency: 0,
1234
+ baseURL: this.baseURL,
1235
+ params: body,
1236
+ httpStatus,
1237
+ usage: {
1238
+ inputTokens: 0,
1239
+ outputTokens: 0
1240
+ },
1241
+ isError: true,
1242
+ error: JSON.stringify(error)
1243
+ });
1244
+ throw error;
1245
+ });
1246
+ return wrappedPromise;
1247
+ }
1248
+ }
1249
+ }
1099
1250
 
1100
1251
  exports.OpenAI = PostHogOpenAI;
1101
1252
  exports.PostHogOpenAI = PostHogOpenAI;
1253
+ exports.WrappedAudio = WrappedAudio;
1102
1254
  exports.WrappedChat = WrappedChat;
1103
1255
  exports.WrappedCompletions = WrappedCompletions;
1104
1256
  exports.WrappedEmbeddings = WrappedEmbeddings;
1105
1257
  exports.WrappedResponses = WrappedResponses;
1258
+ exports.WrappedTranscriptions = WrappedTranscriptions;
1106
1259
  exports.default = PostHogOpenAI;
1107
1260
  //# sourceMappingURL=index.cjs.map