@providerprotocol/ai 0.0.31 → 0.0.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -13,11 +13,12 @@ import {
13
13
  createStreamResult,
14
14
  messageStart,
15
15
  messageStop,
16
+ objectDelta,
16
17
  textDelta,
17
18
  toolCallDelta,
18
19
  toolExecutionEnd,
19
20
  toolExecutionStart
20
- } from "./chunk-73IIE3QT.js";
21
+ } from "./chunk-6S222DHN.js";
21
22
  import {
22
23
  AssistantMessage,
23
24
  Message,
@@ -114,10 +115,104 @@ function isBinaryBlock(block) {
114
115
  return block.type === ContentBlockType.Binary;
115
116
  }
116
117
 
118
+ // src/middleware/runner.ts
119
+ async function runHook(middlewares, hook, ctx, reverse = false) {
120
+ const ordered = reverse ? [...middlewares].reverse() : middlewares;
121
+ for (const mw of ordered) {
122
+ const fn = mw[hook];
123
+ if (fn) {
124
+ await fn.call(mw, ctx);
125
+ }
126
+ }
127
+ }
128
+ async function runErrorHook(middlewares, error, ctx) {
129
+ for (const mw of middlewares) {
130
+ if (mw.onError) {
131
+ try {
132
+ await mw.onError(error, ctx);
133
+ } catch (hookError) {
134
+ console.error(`[${mw.name}] Error in onError hook:`, hookError);
135
+ }
136
+ }
137
+ }
138
+ }
139
+ async function runToolHook(middlewares, hook, tool, data, ctx) {
140
+ for (const mw of middlewares) {
141
+ const fn = mw[hook];
142
+ if (fn) {
143
+ await fn.call(mw, tool, data, ctx);
144
+ }
145
+ }
146
+ }
147
+ function createStreamTransformer(middlewares, ctx) {
148
+ const streamMiddlewares = middlewares.filter((mw) => mw.onStreamEvent);
149
+ if (streamMiddlewares.length === 0) {
150
+ return (event) => event;
151
+ }
152
+ return (event) => {
153
+ let current = event;
154
+ for (const mw of streamMiddlewares) {
155
+ if (current === null) {
156
+ return null;
157
+ }
158
+ if (Array.isArray(current)) {
159
+ const results = [];
160
+ for (const e of current) {
161
+ const result = mw.onStreamEvent(e, ctx);
162
+ if (result === null) {
163
+ continue;
164
+ }
165
+ if (Array.isArray(result)) {
166
+ results.push(...result);
167
+ } else {
168
+ results.push(result);
169
+ }
170
+ }
171
+ current = results.length > 0 ? results : null;
172
+ } else {
173
+ current = mw.onStreamEvent(current, ctx);
174
+ }
175
+ }
176
+ return current;
177
+ };
178
+ }
179
+ async function runStreamEndHook(middlewares, ctx) {
180
+ for (const mw of middlewares) {
181
+ if (mw.onStreamEnd) {
182
+ await mw.onStreamEnd(ctx);
183
+ }
184
+ }
185
+ }
186
+ function createMiddlewareContext(modality, modelId, provider, streaming, request) {
187
+ return {
188
+ modality,
189
+ modelId,
190
+ provider,
191
+ streaming,
192
+ request,
193
+ response: void 0,
194
+ state: /* @__PURE__ */ new Map(),
195
+ startTime: Date.now(),
196
+ endTime: void 0
197
+ };
198
+ }
199
+ function createStreamContext(state) {
200
+ return { state };
201
+ }
202
+
117
203
  // src/core/llm.ts
118
204
  var DEFAULT_MAX_ITERATIONS = 10;
119
205
  function llm(options) {
120
- const { model: modelRef, config: explicitConfig = {}, params, system, tools, toolStrategy, structure } = options;
206
+ const {
207
+ model: modelRef,
208
+ config: explicitConfig = {},
209
+ params,
210
+ system,
211
+ tools,
212
+ toolStrategy,
213
+ structure,
214
+ middleware = []
215
+ } = options;
121
216
  const providerConfig = modelRef.providerConfig ?? {};
122
217
  const config = {
123
218
  ...providerConfig,
@@ -171,7 +266,8 @@ function llm(options) {
171
266
  toolStrategy,
172
267
  structure,
173
268
  history,
174
- messages
269
+ messages,
270
+ middleware
175
271
  );
176
272
  },
177
273
  stream(historyOrInput, ...inputs) {
@@ -193,7 +289,8 @@ function llm(options) {
193
289
  toolStrategy,
194
290
  structure,
195
291
  history,
196
- messages
292
+ messages,
293
+ middleware
197
294
  );
198
295
  }
199
296
  };
@@ -261,7 +358,7 @@ function inputToMessage(input) {
261
358
  }
262
359
  throw new Error("Invalid inference input");
263
360
  }
264
- async function executeGenerate(model, config, system, params, tools, toolStrategy, structure, history, newMessages) {
361
+ async function executeGenerate(model, config, system, params, tools, toolStrategy, structure, history, newMessages, middleware) {
265
362
  validateMediaCapabilities(
266
363
  [...history, ...newMessages],
267
364
  model.capabilities,
@@ -273,56 +370,92 @@ async function executeGenerate(model, config, system, params, tools, toolStrateg
273
370
  const usages = [];
274
371
  let cycles = 0;
275
372
  let structuredData;
276
- while (cycles < maxIterations + 1) {
277
- cycles++;
278
- const request = {
279
- messages: allMessages,
280
- system,
281
- params,
282
- tools,
283
- structure,
284
- config
285
- };
286
- const response = await model.complete(request);
287
- usages.push(response.usage);
288
- allMessages.push(response.message);
289
- if (response.data !== void 0) {
290
- structuredData = response.data;
291
- }
292
- if (response.message.hasToolCalls && tools && tools.length > 0) {
373
+ const initialRequest = {
374
+ messages: allMessages,
375
+ system,
376
+ params,
377
+ tools,
378
+ structure,
379
+ config
380
+ };
381
+ const ctx = createMiddlewareContext(
382
+ "llm",
383
+ model.modelId,
384
+ model.provider.name,
385
+ false,
386
+ initialRequest
387
+ );
388
+ try {
389
+ await runHook(middleware, "onStart", ctx);
390
+ await runHook(middleware, "onRequest", ctx);
391
+ while (cycles < maxIterations + 1) {
392
+ cycles++;
393
+ const request = {
394
+ messages: allMessages,
395
+ system,
396
+ params,
397
+ tools,
398
+ structure,
399
+ config
400
+ };
401
+ const response = await model.complete(request);
402
+ usages.push(response.usage);
403
+ allMessages.push(response.message);
293
404
  if (response.data !== void 0) {
294
- break;
405
+ structuredData = response.data;
295
406
  }
296
- if (cycles >= maxIterations) {
297
- await toolStrategy?.onMaxIterations?.(maxIterations);
298
- throw new UPPError(
299
- `Tool execution exceeded maximum iterations (${maxIterations})`,
300
- ErrorCode.InvalidRequest,
301
- model.provider.name,
302
- ModalityType.LLM
407
+ if (response.message.hasToolCalls && tools && tools.length > 0) {
408
+ if (response.data !== void 0) {
409
+ break;
410
+ }
411
+ if (cycles >= maxIterations) {
412
+ await toolStrategy?.onMaxIterations?.(maxIterations);
413
+ throw new UPPError(
414
+ `Tool execution exceeded maximum iterations (${maxIterations})`,
415
+ ErrorCode.InvalidRequest,
416
+ model.provider.name,
417
+ ModalityType.LLM
418
+ );
419
+ }
420
+ const results = await executeTools(
421
+ response.message,
422
+ tools,
423
+ toolStrategy,
424
+ toolExecutions,
425
+ void 0,
426
+ middleware,
427
+ ctx
303
428
  );
429
+ allMessages.push(new ToolResultMessage(results));
430
+ continue;
304
431
  }
305
- const results = await executeTools(
306
- response.message,
307
- tools,
308
- toolStrategy,
309
- toolExecutions
310
- );
311
- allMessages.push(new ToolResultMessage(results));
312
- continue;
432
+ break;
313
433
  }
314
- break;
434
+ const data = structure ? structuredData : void 0;
435
+ const turn = createTurn(
436
+ allMessages.slice(history.length),
437
+ toolExecutions,
438
+ aggregateUsage(usages),
439
+ cycles,
440
+ data
441
+ );
442
+ ctx.response = {
443
+ message: turn.response,
444
+ usage: turn.usage,
445
+ stopReason: "end_turn",
446
+ data
447
+ };
448
+ ctx.endTime = Date.now();
449
+ await runHook(middleware, "onResponse", ctx, true);
450
+ await runHook(middleware, "onEnd", ctx, true);
451
+ return turn;
452
+ } catch (error) {
453
+ const err = toError(error);
454
+ await runErrorHook(middleware, err, ctx);
455
+ throw err;
315
456
  }
316
- const data = structure ? structuredData : void 0;
317
- return createTurn(
318
- allMessages.slice(history.length),
319
- toolExecutions,
320
- aggregateUsage(usages),
321
- cycles,
322
- data
323
- );
324
457
  }
325
- function executeStream(model, config, system, params, tools, toolStrategy, structure, history, newMessages) {
458
+ function executeStream(model, config, system, params, tools, toolStrategy, structure, history, newMessages, middleware) {
326
459
  validateMediaCapabilities(
327
460
  [...history, ...newMessages],
328
461
  model.capabilities,
@@ -336,6 +469,23 @@ function executeStream(model, config, system, params, tools, toolStrategy, struc
336
469
  let generatorError = null;
337
470
  let structuredData;
338
471
  let generatorCompleted = false;
472
+ const initialRequest = {
473
+ messages: allMessages,
474
+ system,
475
+ params,
476
+ tools,
477
+ structure,
478
+ config
479
+ };
480
+ const ctx = createMiddlewareContext(
481
+ "llm",
482
+ model.modelId,
483
+ model.provider.name,
484
+ true,
485
+ initialRequest
486
+ );
487
+ const streamCtx = createStreamContext(ctx.state);
488
+ const transformer = createStreamTransformer(middleware, streamCtx);
339
489
  let resolveGenerator;
340
490
  let rejectGenerator;
341
491
  let generatorSettled = false;
@@ -373,6 +523,8 @@ function executeStream(model, config, system, params, tools, toolStrategy, struc
373
523
  async function* generateStream() {
374
524
  try {
375
525
  ensureNotAborted();
526
+ await runHook(middleware, "onStart", ctx);
527
+ await runHook(middleware, "onRequest", ctx);
376
528
  while (cycles < maxIterations + 1) {
377
529
  cycles++;
378
530
  ensureNotAborted();
@@ -388,7 +540,15 @@ function executeStream(model, config, system, params, tools, toolStrategy, struc
388
540
  const streamResult = model.stream(request);
389
541
  for await (const event of streamResult) {
390
542
  ensureNotAborted();
391
- yield event;
543
+ const transformed = transformer(event);
544
+ if (transformed === null) continue;
545
+ if (Array.isArray(transformed)) {
546
+ for (const e of transformed) {
547
+ yield e;
548
+ }
549
+ } else {
550
+ yield transformed;
551
+ }
392
552
  }
393
553
  const response = await streamResult.response;
394
554
  usages.push(response.usage);
@@ -415,22 +575,34 @@ function executeStream(model, config, system, params, tools, toolStrategy, struc
415
575
  tools,
416
576
  toolStrategy,
417
577
  toolExecutions,
418
- (event) => toolEvents.push(event)
578
+ (event) => toolEvents.push(event),
579
+ middleware,
580
+ ctx
419
581
  );
420
582
  for (const event of toolEvents) {
421
583
  ensureNotAborted();
422
- yield event;
584
+ const transformed = transformer(event);
585
+ if (transformed === null) continue;
586
+ if (Array.isArray(transformed)) {
587
+ for (const e of transformed) {
588
+ yield e;
589
+ }
590
+ } else {
591
+ yield transformed;
592
+ }
423
593
  }
424
594
  allMessages.push(new ToolResultMessage(results));
425
595
  continue;
426
596
  }
427
597
  break;
428
598
  }
599
+ await runStreamEndHook(middleware, streamCtx);
429
600
  generatorCompleted = true;
430
601
  resolveGenerator();
431
602
  } catch (error) {
432
603
  const err = toError(error);
433
604
  generatorError = err;
605
+ await runErrorHook(middleware, err, ctx);
434
606
  rejectGenerator(err);
435
607
  throw err;
436
608
  } finally {
@@ -451,17 +623,27 @@ function executeStream(model, config, system, params, tools, toolStrategy, struc
451
623
  throw generatorError;
452
624
  }
453
625
  const data = structure ? structuredData : void 0;
454
- return createTurn(
626
+ const turn = createTurn(
455
627
  allMessages.slice(history.length),
456
628
  toolExecutions,
457
629
  aggregateUsage(usages),
458
630
  cycles,
459
631
  data
460
632
  );
633
+ ctx.response = {
634
+ message: turn.response,
635
+ usage: turn.usage,
636
+ stopReason: "end_turn",
637
+ data
638
+ };
639
+ ctx.endTime = Date.now();
640
+ await runHook(middleware, "onResponse", ctx, true);
641
+ await runHook(middleware, "onEnd", ctx, true);
642
+ return turn;
461
643
  };
462
644
  return createStreamResult(generateStream(), createTurnPromise, abortController);
463
645
  }
464
- async function executeTools(message, tools, toolStrategy, executions, onEvent) {
646
+ async function executeTools(message, tools, toolStrategy, executions, onEvent, middleware = [], ctx) {
465
647
  const toolCalls = message.toolCalls ?? [];
466
648
  const results = [];
467
649
  const toolMap = new Map(tools.map((t) => [t.name, t]));
@@ -498,6 +680,9 @@ async function executeTools(message, tools, toolStrategy, executions, onEvent) {
498
680
  }
499
681
  try {
500
682
  await toolStrategy?.onToolCall?.(tool, effectiveParams);
683
+ if (ctx) {
684
+ await runToolHook(middleware, "onToolCall", tool, effectiveParams, ctx);
685
+ }
501
686
  } catch (error) {
502
687
  return endWithError(toError(error).message);
503
688
  }
@@ -557,6 +742,9 @@ async function executeTools(message, tools, toolStrategy, executions, onEvent) {
557
742
  result = afterResult.result;
558
743
  }
559
744
  }
745
+ if (ctx) {
746
+ await runToolHook(middleware, "onToolResult", tool, result, ctx);
747
+ }
560
748
  const execution = {
561
749
  toolName,
562
750
  toolCallId: call.toolCallId,
@@ -640,7 +828,7 @@ function validateMediaCapabilities(messages, capabilities, providerName) {
640
828
 
641
829
  // src/core/embedding.ts
642
830
  function embedding(options) {
643
- const { model: modelRef, config: explicitConfig = {}, params } = options;
831
+ const { model: modelRef, config: explicitConfig = {}, params, middleware = [] } = options;
644
832
  const providerConfig = modelRef.providerConfig ?? {};
645
833
  const config = {
646
834
  ...providerConfig,
@@ -664,9 +852,9 @@ function embedding(options) {
664
852
  function embed(input, embedOptions) {
665
853
  const inputs = Array.isArray(input) ? input : [input];
666
854
  if (embedOptions?.chunked) {
667
- return createChunkedStream(boundModel, inputs, params, config, embedOptions);
855
+ return createChunkedStream(boundModel, inputs, params, config, embedOptions, middleware);
668
856
  }
669
- return executeEmbed(boundModel, inputs, params, config, embedOptions?.signal);
857
+ return executeEmbed(boundModel, inputs, params, config, embedOptions?.signal, embedOptions?.inputType, middleware);
670
858
  }
671
859
  return {
672
860
  model: boundModel,
@@ -674,14 +862,36 @@ function embedding(options) {
674
862
  embed
675
863
  };
676
864
  }
677
- async function executeEmbed(model, inputs, params, config, signal) {
678
- const response = await model.embed({
865
+ async function executeEmbed(model, inputs, params, config, signal, inputType, middleware = []) {
866
+ const request = {
679
867
  inputs,
680
868
  params,
681
869
  config: config ?? {},
682
- signal
683
- });
684
- return normalizeResponse(response, model.provider.name);
870
+ signal,
871
+ inputType
872
+ };
873
+ const ctx = createMiddlewareContext(
874
+ "embedding",
875
+ model.modelId,
876
+ model.provider.name,
877
+ false,
878
+ request
879
+ );
880
+ try {
881
+ await runHook(middleware, "onStart", ctx);
882
+ await runHook(middleware, "onRequest", ctx);
883
+ const response = await model.embed(request);
884
+ const result = normalizeResponse(response, model.provider.name);
885
+ ctx.response = response;
886
+ ctx.endTime = Date.now();
887
+ await runHook(middleware, "onResponse", ctx, true);
888
+ await runHook(middleware, "onEnd", ctx, true);
889
+ return result;
890
+ } catch (error) {
891
+ const err = toError(error);
892
+ await runErrorHook(middleware, err, ctx);
893
+ throw err;
894
+ }
685
895
  }
686
896
  function normalizeResponse(response, providerName) {
687
897
  return {
@@ -726,7 +936,7 @@ function decodeBase64(b64, providerName) {
726
936
  );
727
937
  }
728
938
  }
729
- function createChunkedStream(model, inputs, params, config, options) {
939
+ function createChunkedStream(model, inputs, params, config, options, middleware = []) {
730
940
  const abortController = new AbortController();
731
941
  const batchSize = options.batchSize ?? model.maxBatchSize;
732
942
  const concurrency = options.concurrency ?? 1;
@@ -767,6 +977,20 @@ function createChunkedStream(model, inputs, params, config, options) {
767
977
  options.signal.removeEventListener("abort", onExternalAbort);
768
978
  }
769
979
  };
980
+ const request = {
981
+ inputs,
982
+ params,
983
+ config: config ?? {},
984
+ signal: abortController.signal,
985
+ inputType: options.inputType
986
+ };
987
+ const ctx = createMiddlewareContext(
988
+ "embedding",
989
+ model.modelId,
990
+ model.provider.name,
991
+ true,
992
+ request
993
+ );
770
994
  async function* generate() {
771
995
  const total = inputs.length;
772
996
  const allEmbeddings = [];
@@ -776,6 +1000,8 @@ function createChunkedStream(model, inputs, params, config, options) {
776
1000
  batches.push({ inputs: inputs.slice(i, i + batchSize), startIndex: i });
777
1001
  }
778
1002
  try {
1003
+ await runHook(middleware, "onStart", ctx);
1004
+ await runHook(middleware, "onRequest", ctx);
779
1005
  for (let i = 0; i < batches.length; i += concurrency) {
780
1006
  if (abortController.signal.aborted || options.signal?.aborted) {
781
1007
  throw cancelError();
@@ -821,12 +1047,18 @@ function createChunkedStream(model, inputs, params, config, options) {
821
1047
  const orderedEmbeddings = [...allEmbeddings].sort(
822
1048
  (left, right) => left.index - right.index
823
1049
  );
824
- resolveResult({
1050
+ const result = {
825
1051
  embeddings: orderedEmbeddings,
826
1052
  usage: { totalTokens }
827
- });
1053
+ };
1054
+ ctx.response = { embeddings: orderedEmbeddings.map((e) => ({ vector: e.vector, index: e.index })), usage: { totalTokens } };
1055
+ ctx.endTime = Date.now();
1056
+ await runHook(middleware, "onResponse", ctx, true);
1057
+ await runHook(middleware, "onEnd", ctx, true);
1058
+ resolveResult(result);
828
1059
  } catch (error) {
829
1060
  const err = toError(error);
1061
+ await runErrorHook(middleware, err, ctx);
830
1062
  rejectResult(err);
831
1063
  throw err;
832
1064
  } finally {
@@ -843,7 +1075,7 @@ function createChunkedStream(model, inputs, params, config, options) {
843
1075
 
844
1076
  // src/core/image.ts
845
1077
  function image(options) {
846
- const { model: modelRef, config: explicitConfig = {}, params } = options;
1078
+ const { model: modelRef, config: explicitConfig = {}, params, middleware = [] } = options;
847
1079
  const providerConfig = modelRef.providerConfig ?? {};
848
1080
  const config = {
849
1081
  ...providerConfig,
@@ -878,47 +1110,88 @@ function image(options) {
878
1110
  capabilities,
879
1111
  async generate(input, options2) {
880
1112
  const prompt = normalizeInput(input);
1113
+ const request = {
1114
+ prompt,
1115
+ params,
1116
+ config,
1117
+ signal: options2?.signal
1118
+ };
1119
+ const ctx = createMiddlewareContext(
1120
+ "image",
1121
+ boundModel.modelId,
1122
+ provider.name,
1123
+ false,
1124
+ request
1125
+ );
881
1126
  try {
882
- const response = await boundModel.generate({
883
- prompt,
884
- params,
885
- config,
886
- signal: options2?.signal
887
- });
888
- return {
1127
+ await runHook(middleware, "onStart", ctx);
1128
+ await runHook(middleware, "onRequest", ctx);
1129
+ const response = await boundModel.generate(request);
1130
+ const result = {
889
1131
  images: response.images,
890
1132
  metadata: response.metadata,
891
1133
  usage: response.usage
892
1134
  };
1135
+ ctx.response = response;
1136
+ ctx.endTime = Date.now();
1137
+ await runHook(middleware, "onResponse", ctx, true);
1138
+ await runHook(middleware, "onEnd", ctx, true);
1139
+ return result;
893
1140
  } catch (error) {
1141
+ const err = toError(error);
1142
+ await runErrorHook(middleware, err, ctx);
894
1143
  throw normalizeImageError(error);
895
1144
  }
896
1145
  }
897
1146
  };
898
1147
  if (capabilities.streaming && boundModel.stream) {
899
- const stream = boundModel.stream;
1148
+ const streamFn = boundModel.stream;
900
1149
  instance.stream = function(input) {
901
1150
  const prompt = normalizeInput(input);
902
1151
  const abortController = new AbortController();
903
- const providerStream = stream({
1152
+ const request = {
904
1153
  prompt,
905
1154
  params,
906
1155
  config,
907
1156
  signal: abortController.signal
908
- });
909
- const resultPromise = providerStream.response.then((response) => ({
910
- images: response.images,
911
- metadata: response.metadata,
912
- usage: response.usage
913
- })).catch((error) => {
914
- throw normalizeImageError(error);
915
- });
1157
+ };
1158
+ const ctx = createMiddlewareContext(
1159
+ "image",
1160
+ boundModel.modelId,
1161
+ provider.name,
1162
+ true,
1163
+ request
1164
+ );
1165
+ const providerStream = streamFn(request);
1166
+ const resultPromise = (async () => {
1167
+ try {
1168
+ const response = await providerStream.response;
1169
+ const result = {
1170
+ images: response.images,
1171
+ metadata: response.metadata,
1172
+ usage: response.usage
1173
+ };
1174
+ ctx.response = response;
1175
+ ctx.endTime = Date.now();
1176
+ await runHook(middleware, "onResponse", ctx, true);
1177
+ await runHook(middleware, "onEnd", ctx, true);
1178
+ return result;
1179
+ } catch (error) {
1180
+ const err = toError(error);
1181
+ await runErrorHook(middleware, err, ctx);
1182
+ throw normalizeImageError(error);
1183
+ }
1184
+ })();
916
1185
  async function* wrappedStream() {
917
1186
  try {
1187
+ await runHook(middleware, "onStart", ctx);
1188
+ await runHook(middleware, "onRequest", ctx);
918
1189
  for await (const event of providerStream) {
919
1190
  yield event;
920
1191
  }
921
1192
  } catch (error) {
1193
+ const err = toError(error);
1194
+ await runErrorHook(middleware, err, ctx);
922
1195
  throw normalizeImageError(error);
923
1196
  }
924
1197
  }
@@ -1818,6 +2091,274 @@ var EmbeddingInputType = {
1818
2091
  Query: "query"
1819
2092
  };
1820
2093
 
2094
+ // src/utils/partial-json.ts
2095
+ function parsePartialJson(json) {
2096
+ const trimmed = json.trim();
2097
+ if (trimmed === "") {
2098
+ return { value: void 0, isComplete: false };
2099
+ }
2100
+ try {
2101
+ const value = JSON.parse(trimmed);
2102
+ return { value, isComplete: true };
2103
+ } catch {
2104
+ }
2105
+ try {
2106
+ const repaired = repairJson(trimmed);
2107
+ const value = JSON.parse(repaired);
2108
+ return { value, isComplete: false };
2109
+ } catch {
2110
+ return { value: void 0, isComplete: false };
2111
+ }
2112
+ }
2113
+ function repairJson(json) {
2114
+ let result = json;
2115
+ const stack = [];
2116
+ let inString = false;
2117
+ let escape = false;
2118
+ for (let i = 0; i < result.length; i++) {
2119
+ const char = result[i];
2120
+ if (escape) {
2121
+ escape = false;
2122
+ continue;
2123
+ }
2124
+ if (char === "\\" && inString) {
2125
+ escape = true;
2126
+ continue;
2127
+ }
2128
+ if (char === '"' && !escape) {
2129
+ inString = !inString;
2130
+ }
2131
+ if (!inString) {
2132
+ if (char === "{") {
2133
+ stack.push("{");
2134
+ } else if (char === "[") {
2135
+ stack.push("[");
2136
+ } else if (char === "}") {
2137
+ if (stack.length > 0 && stack[stack.length - 1] === "{") {
2138
+ stack.pop();
2139
+ }
2140
+ } else if (char === "]") {
2141
+ if (stack.length > 0 && stack[stack.length - 1] === "[") {
2142
+ stack.pop();
2143
+ }
2144
+ }
2145
+ }
2146
+ }
2147
+ if (inString) {
2148
+ const unicodeMatch = result.match(/\\u[0-9a-fA-F]{0,3}$/);
2149
+ if (unicodeMatch) {
2150
+ result = result.slice(0, -unicodeMatch[0].length);
2151
+ }
2152
+ if (result.endsWith("\\")) {
2153
+ result = result.slice(0, -1);
2154
+ }
2155
+ result += '"';
2156
+ inString = false;
2157
+ }
2158
+ result = cleanupTrailingIncomplete(result);
2159
+ while (stack.length > 0) {
2160
+ const open = stack.pop();
2161
+ if (open === "{") {
2162
+ result += "}";
2163
+ } else {
2164
+ result += "]";
2165
+ }
2166
+ }
2167
+ return result;
2168
+ }
2169
+ function cleanupTrailingIncomplete(json) {
2170
+ let result = json.trim();
2171
+ let changed = true;
2172
+ while (changed) {
2173
+ changed = false;
2174
+ const trimmed = result.trim();
2175
+ if (trimmed.endsWith(",")) {
2176
+ result = trimmed.slice(0, -1);
2177
+ changed = true;
2178
+ continue;
2179
+ }
2180
+ if (trimmed.endsWith(":")) {
2181
+ const colonIndex = trimmed.length - 1;
2182
+ let keyStart = colonIndex - 1;
2183
+ while (keyStart >= 0 && /\s/.test(trimmed[keyStart])) {
2184
+ keyStart--;
2185
+ }
2186
+ if (keyStart >= 0 && trimmed[keyStart] === '"') {
2187
+ keyStart--;
2188
+ while (keyStart >= 0 && trimmed[keyStart] !== '"') {
2189
+ keyStart--;
2190
+ }
2191
+ keyStart--;
2192
+ while (keyStart >= 0 && /\s/.test(trimmed[keyStart])) {
2193
+ keyStart--;
2194
+ }
2195
+ if (keyStart >= 0 && trimmed[keyStart] === ",") {
2196
+ result = trimmed.slice(0, keyStart);
2197
+ } else {
2198
+ result = trimmed.slice(0, keyStart + 1);
2199
+ }
2200
+ changed = true;
2201
+ continue;
2202
+ }
2203
+ }
2204
+ const literalMatch = trimmed.match(/(,?\s*)(t(?:r(?:ue?)?)?|f(?:a(?:l(?:se?)?)?)?|n(?:u(?:ll?)?)?)$/i);
2205
+ if (literalMatch && literalMatch[2]) {
2206
+ const partial = literalMatch[2].toLowerCase();
2207
+ const literals = ["true", "false", "null"];
2208
+ const match = literals.find((lit) => lit.startsWith(partial) && partial !== lit);
2209
+ if (match) {
2210
+ result = trimmed.slice(0, -literalMatch[2].length) + match;
2211
+ changed = true;
2212
+ continue;
2213
+ }
2214
+ }
2215
+ const numberMatch = trimmed.match(/(,?\s*)(-?(?:\d+\.|\d*\.?\d+[eE][+-]?|\d+[eE]|-))$/);
2216
+ if (numberMatch && numberMatch[2]) {
2217
+ const partial = numberMatch[2];
2218
+ if (/[.eE+-]$/.test(partial)) {
2219
+ if (partial === "-") {
2220
+ result = trimmed.slice(0, -(numberMatch[0]?.length ?? 0)).trimEnd();
2221
+ } else {
2222
+ result = trimmed.slice(0, -1);
2223
+ }
2224
+ changed = true;
2225
+ continue;
2226
+ }
2227
+ }
2228
+ }
2229
+ return result;
2230
+ }
2231
+
2232
+ // src/middleware/parsed-object.ts
2233
+ var ACCUMULATED_TEXT_KEY = "parsedObject:text";
2234
+ var ACCUMULATED_ARGS_KEY = "parsedObject:args";
2235
+ function getAccumulatedText(state) {
2236
+ let map = state.get(ACCUMULATED_TEXT_KEY);
2237
+ if (!map) {
2238
+ map = /* @__PURE__ */ new Map();
2239
+ state.set(ACCUMULATED_TEXT_KEY, map);
2240
+ }
2241
+ return map;
2242
+ }
2243
+ function getAccumulatedArgs(state) {
2244
+ let map = state.get(ACCUMULATED_ARGS_KEY);
2245
+ if (!map) {
2246
+ map = /* @__PURE__ */ new Map();
2247
+ state.set(ACCUMULATED_ARGS_KEY, map);
2248
+ }
2249
+ return map;
2250
+ }
2251
+ function parsedObjectMiddleware(options = {}) {
2252
+ const { parseObjects = true, parseToolCalls = true } = options;
2253
+ return {
2254
+ name: "parsed-object",
2255
+ onStreamEvent(event, ctx) {
2256
+ if (parseObjects && event.type === StreamEventType.ObjectDelta) {
2257
+ const accumulatedText = getAccumulatedText(ctx.state);
2258
+ const current = accumulatedText.get(event.index) ?? "";
2259
+ const newText = current + (event.delta.text ?? "");
2260
+ accumulatedText.set(event.index, newText);
2261
+ const parseResult = parsePartialJson(newText);
2262
+ const parsedEvent = {
2263
+ ...event,
2264
+ delta: {
2265
+ ...event.delta,
2266
+ parsed: parseResult.value
2267
+ }
2268
+ };
2269
+ return parsedEvent;
2270
+ }
2271
+ if (parseToolCalls && event.type === StreamEventType.ToolCallDelta) {
2272
+ const accumulatedArgs = getAccumulatedArgs(ctx.state);
2273
+ const current = accumulatedArgs.get(event.index) ?? "";
2274
+ const newJson = current + (event.delta.argumentsJson ?? "");
2275
+ accumulatedArgs.set(event.index, newJson);
2276
+ const parseResult = parsePartialJson(newJson);
2277
+ const parsedEvent = {
2278
+ ...event,
2279
+ delta: {
2280
+ ...event.delta,
2281
+ parsed: parseResult.value
2282
+ }
2283
+ };
2284
+ return parsedEvent;
2285
+ }
2286
+ return event;
2287
+ },
2288
+ onStreamEnd(ctx) {
2289
+ ctx.state.delete(ACCUMULATED_TEXT_KEY);
2290
+ ctx.state.delete(ACCUMULATED_ARGS_KEY);
2291
+ }
2292
+ };
2293
+ }
2294
+
2295
+ // src/middleware/logging.ts
2296
+ var LOG_LEVELS = {
2297
+ debug: 0,
2298
+ info: 1,
2299
+ warn: 2,
2300
+ error: 3
2301
+ };
2302
+ function loggingMiddleware(options = {}) {
2303
+ const {
2304
+ level = "info",
2305
+ logStreamEvents = false,
2306
+ logToolCalls = true,
2307
+ logger,
2308
+ prefix = "[PP]"
2309
+ } = options;
2310
+ const minLevel = LOG_LEVELS[level];
2311
+ const log = (logLevel, message, data) => {
2312
+ if (LOG_LEVELS[logLevel] < minLevel) {
2313
+ return;
2314
+ }
2315
+ const fullMessage = `${prefix} ${message}`;
2316
+ if (logger) {
2317
+ logger(logLevel, fullMessage, data);
2318
+ } else {
2319
+ const consoleMethod = logLevel === "error" ? console.error : logLevel === "warn" ? console.warn : console.log;
2320
+ if (data) {
2321
+ consoleMethod(fullMessage, data);
2322
+ } else {
2323
+ consoleMethod(fullMessage);
2324
+ }
2325
+ }
2326
+ };
2327
+ return {
2328
+ name: "logging",
2329
+ onStart(ctx) {
2330
+ const streamingLabel = ctx.streaming ? "(streaming)" : "";
2331
+ log("info", `[${ctx.provider}] Starting ${ctx.modality} request ${streamingLabel}`.trim());
2332
+ log("debug", `[${ctx.provider}] Model: ${ctx.modelId}`);
2333
+ },
2334
+ onEnd(ctx) {
2335
+ const duration = ctx.endTime ? ctx.endTime - ctx.startTime : 0;
2336
+ log("info", `[${ctx.provider}] Completed in ${duration}ms`);
2337
+ },
2338
+ onError(error, ctx) {
2339
+ const duration = Date.now() - ctx.startTime;
2340
+ log("error", `[${ctx.provider}] Error after ${duration}ms: ${error.message}`);
2341
+ },
2342
+ onStreamEvent(event, ctx) {
2343
+ if (logStreamEvents) {
2344
+ log("debug", `Stream event: ${event.type}`, { index: event.index });
2345
+ }
2346
+ return event;
2347
+ },
2348
+ onToolCall(tool, params, ctx) {
2349
+ if (logToolCalls) {
2350
+ log("info", `[${ctx.provider}] Tool call: ${tool.name}`);
2351
+ log("debug", `[${ctx.provider}] Tool params:`, { params });
2352
+ }
2353
+ },
2354
+ onToolResult(tool, result, ctx) {
2355
+ if (logToolCalls) {
2356
+ log("debug", `[${ctx.provider}] Tool result: ${tool.name}`, { result });
2357
+ }
2358
+ }
2359
+ };
2360
+ }
2361
+
1821
2362
  // src/index.ts
1822
2363
  var ai = {
1823
2364
  /** LLM instance factory */
@@ -1875,11 +2416,16 @@ export {
1875
2416
  isUserMessage,
1876
2417
  isVideoBlock,
1877
2418
  llm,
2419
+ loggingMiddleware,
1878
2420
  messageStart,
1879
2421
  messageStop,
2422
+ objectDelta,
2423
+ parsedObjectMiddleware,
1880
2424
  reasoning,
1881
2425
  text,
1882
2426
  textDelta,
1883
- toolCallDelta
2427
+ toolCallDelta,
2428
+ toolExecutionEnd,
2429
+ toolExecutionStart
1884
2430
  };
1885
2431
  //# sourceMappingURL=index.js.map