@reverbia/sdk 1.0.0-next.20251219154503 → 1.0.0-next.20251219170400

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1795,7 +1795,7 @@ var import_react2 = require("react");
1795
1795
  var import_watermelondb = require("@nozbe/watermelondb");
1796
1796
  var import_migrations = require("@nozbe/watermelondb/Schema/migrations");
1797
1797
  var chatStorageSchema = (0, import_watermelondb.appSchema)({
1798
- version: 2,
1798
+ version: 3,
1799
1799
  tables: [
1800
1800
  (0, import_watermelondb.tableSchema)({
1801
1801
  name: "history",
@@ -1813,7 +1813,8 @@ var chatStorageSchema = (0, import_watermelondb.appSchema)({
1813
1813
  { name: "usage", type: "string", isOptional: true },
1814
1814
  { name: "sources", type: "string", isOptional: true },
1815
1815
  { name: "response_duration", type: "number", isOptional: true },
1816
- { name: "was_stopped", type: "boolean", isOptional: true }
1816
+ { name: "was_stopped", type: "boolean", isOptional: true },
1817
+ { name: "error", type: "string", isOptional: true }
1817
1818
  ]
1818
1819
  }),
1819
1820
  (0, import_watermelondb.tableSchema)({
@@ -1838,6 +1839,15 @@ var chatStorageMigrations = (0, import_migrations.schemaMigrations)({
1838
1839
  columns: [{ name: "was_stopped", type: "boolean", isOptional: true }]
1839
1840
  })
1840
1841
  ]
1842
+ },
1843
+ {
1844
+ toVersion: 3,
1845
+ steps: [
1846
+ (0, import_migrations.addColumns)({
1847
+ table: "history",
1848
+ columns: [{ name: "error", type: "string", isOptional: true }]
1849
+ })
1850
+ ]
1841
1851
  }
1842
1852
  ]
1843
1853
  });
@@ -1893,6 +1903,9 @@ __decorateClass([
1893
1903
  __decorateClass([
1894
1904
  (0, import_decorators.field)("was_stopped")
1895
1905
  ], Message.prototype, "wasStopped", 2);
1906
+ __decorateClass([
1907
+ (0, import_decorators.text)("error")
1908
+ ], Message.prototype, "error", 2);
1896
1909
  var Conversation = class extends import_watermelondb2.Model {
1897
1910
  };
1898
1911
  Conversation.table = "conversations";
@@ -1947,7 +1960,8 @@ function messageToStored(message) {
1947
1960
  usage: message.usage,
1948
1961
  sources: message.sources,
1949
1962
  responseDuration: message.responseDuration,
1950
- wasStopped: message.wasStopped
1963
+ wasStopped: message.wasStopped,
1964
+ error: message.error
1951
1965
  };
1952
1966
  }
1953
1967
  function conversationToStored(conversation) {
@@ -2037,6 +2051,7 @@ async function createMessageOp(ctx, opts) {
2037
2051
  if (opts.vector) msg._setRaw("vector", JSON.stringify(opts.vector));
2038
2052
  if (opts.embeddingModel) msg._setRaw("embedding_model", opts.embeddingModel);
2039
2053
  if (opts.wasStopped) msg._setRaw("was_stopped", opts.wasStopped);
2054
+ if (opts.error) msg._setRaw("error", opts.error);
2040
2055
  });
2041
2056
  });
2042
2057
  return messageToStored(created);
@@ -2056,6 +2071,20 @@ async function updateMessageEmbeddingOp(ctx, uniqueId, vector, embeddingModel) {
2056
2071
  });
2057
2072
  return messageToStored(message);
2058
2073
  }
2074
+ async function updateMessageErrorOp(ctx, uniqueId, error) {
2075
+ let message;
2076
+ try {
2077
+ message = await ctx.messagesCollection.find(uniqueId);
2078
+ } catch {
2079
+ return null;
2080
+ }
2081
+ await ctx.database.write(async () => {
2082
+ await message.update((msg) => {
2083
+ msg._setRaw("error", error);
2084
+ });
2085
+ });
2086
+ return messageToStored(message);
2087
+ }
2059
2088
  function cosineSimilarity(a, b) {
2060
2089
  if (a.length !== b.length) return 0;
2061
2090
  let dotProduct = 0;
@@ -2273,7 +2302,8 @@ function useChatStorage(options) {
2273
2302
  let messagesToSend = [];
2274
2303
  if (includeHistory && !providedMessages) {
2275
2304
  const storedMessages = await getMessages(convId);
2276
- const limitedMessages = storedMessages.slice(-maxHistoryMessages);
2305
+ const validMessages = storedMessages.filter((msg) => !msg.error);
2306
+ const limitedMessages = validMessages.slice(-maxHistoryMessages);
2277
2307
  messagesToSend = limitedMessages.map(storedToLlmapiMessage);
2278
2308
  } else if (providedMessages) {
2279
2309
  messagesToSend = providedMessages;
@@ -2372,14 +2402,37 @@ function useChatStorage(options) {
2372
2402
  userMessage: storedUserMessage,
2373
2403
  assistantMessage: storedAssistantMessage2
2374
2404
  };
2375
- } catch (err) {
2405
+ } catch {
2406
+ return {
2407
+ data: null,
2408
+ error: "Request aborted",
2409
+ toolExecution: abortedResult.toolExecution,
2410
+ userMessage: storedUserMessage
2411
+ };
2376
2412
  }
2377
2413
  }
2414
+ const errorMessage = result.error || "No response data received";
2415
+ try {
2416
+ await updateMessageErrorOp(
2417
+ storageCtx,
2418
+ storedUserMessage.uniqueId,
2419
+ errorMessage
2420
+ );
2421
+ await createMessageOp(storageCtx, {
2422
+ conversationId: convId,
2423
+ role: "assistant",
2424
+ content: "",
2425
+ model: model || "",
2426
+ responseDuration,
2427
+ error: errorMessage
2428
+ });
2429
+ } catch {
2430
+ }
2378
2431
  return {
2379
2432
  data: null,
2380
- error: result.error || "No response data received",
2433
+ error: errorMessage,
2381
2434
  toolExecution: result.toolExecution,
2382
- userMessage: storedUserMessage
2435
+ userMessage: { ...storedUserMessage, error: errorMessage }
2383
2436
  };
2384
2437
  }
2385
2438
  const responseData = result.data;
@@ -2517,7 +2570,7 @@ __decorateClass([
2517
2570
  ], ModelPreference.prototype, "models", 2);
2518
2571
 
2519
2572
  // src/lib/db/schema.ts
2520
- var SDK_SCHEMA_VERSION = 4;
2573
+ var SDK_SCHEMA_VERSION = 5;
2521
2574
  var sdkSchema = (0, import_watermelondb6.appSchema)({
2522
2575
  version: SDK_SCHEMA_VERSION,
2523
2576
  tables: [
@@ -2538,7 +2591,8 @@ var sdkSchema = (0, import_watermelondb6.appSchema)({
2538
2591
  { name: "usage", type: "string", isOptional: true },
2539
2592
  { name: "sources", type: "string", isOptional: true },
2540
2593
  { name: "response_duration", type: "number", isOptional: true },
2541
- { name: "was_stopped", type: "boolean", isOptional: true }
2594
+ { name: "was_stopped", type: "boolean", isOptional: true },
2595
+ { name: "error", type: "string", isOptional: true }
2542
2596
  ]
2543
2597
  }),
2544
2598
  (0, import_watermelondb6.tableSchema)({
@@ -2605,6 +2659,16 @@ var sdkMigrations = (0, import_migrations2.schemaMigrations)({
2605
2659
  ]
2606
2660
  })
2607
2661
  ]
2662
+ },
2663
+ // v4 -> v5: Added error column to history for error persistence
2664
+ {
2665
+ toVersion: 5,
2666
+ steps: [
2667
+ (0, import_migrations2.addColumns)({
2668
+ table: "history",
2669
+ columns: [{ name: "error", type: "string", isOptional: true }]
2670
+ })
2671
+ ]
2608
2672
  }
2609
2673
  ]
2610
2674
  });
@@ -173,6 +173,10 @@ type LlmapiMessage = {
173
173
  */
174
174
  content?: Array<LlmapiMessageContentPart>;
175
175
  role?: LlmapiRole;
176
+ /**
177
+ * ToolCalls contains tool/function calls made by the assistant (only for assistant role)
178
+ */
179
+ tool_calls?: Array<LlmapiToolCall>;
176
180
  };
177
181
  /**
178
182
  * ImageURL is used when Type=image_url
@@ -347,6 +351,30 @@ type LlmapiSearchUsage = {
347
351
  */
348
352
  cost_micro_usd?: number;
349
353
  };
354
+ type LlmapiToolCall = {
355
+ function?: LlmapiToolCallFunction;
356
+ /**
357
+ * ID is the unique identifier for this tool call
358
+ */
359
+ id?: string;
360
+ /**
361
+ * Type is the type of tool call (always "function" for now)
362
+ */
363
+ type?: string;
364
+ };
365
+ /**
366
+ * Function contains the function call details
367
+ */
368
+ type LlmapiToolCallFunction = {
369
+ /**
370
+ * Arguments is the JSON string of arguments to pass to the function
371
+ */
372
+ arguments?: string;
373
+ /**
374
+ * Name is the name of the function to call
375
+ */
376
+ name?: string;
377
+ };
350
378
 
351
379
  type AuthToken = string | undefined;
352
380
  interface Auth {
@@ -978,6 +1006,8 @@ interface StoredMessage {
978
1006
  sources?: SearchSource[];
979
1007
  responseDuration?: number;
980
1008
  wasStopped?: boolean;
1009
+ /** If set, indicates the message failed with this error */
1010
+ error?: string;
981
1011
  }
982
1012
  interface StoredConversation {
983
1013
  uniqueId: string;
@@ -1002,6 +1032,8 @@ interface CreateMessageOptions {
1002
1032
  vector?: number[];
1003
1033
  embeddingModel?: string;
1004
1034
  wasStopped?: boolean;
1035
+ /** If set, indicates the message failed with this error */
1036
+ error?: string;
1005
1037
  }
1006
1038
  interface CreateConversationOptions {
1007
1039
  conversationId?: string;
@@ -1063,6 +1095,7 @@ declare class Message extends Model {
1063
1095
  sources?: SearchSource[];
1064
1096
  responseDuration?: number;
1065
1097
  wasStopped?: boolean;
1098
+ error?: string;
1066
1099
  }
1067
1100
  declare class Conversation extends Model {
1068
1101
  static table: string;
@@ -1248,6 +1281,7 @@ declare const sdkSchema: Readonly<{
1248
1281
  * Migration history:
1249
1282
  * - v2 → v3: Added `was_stopped` column to history table
1250
1283
  * - v3 → v4: Added `modelPreferences` table for settings storage
1284
+ * - v4 → v5: Added `error` column to history table for error persistence
1251
1285
  */
1252
1286
  declare const sdkMigrations: Readonly<{
1253
1287
  validated: true;
@@ -173,6 +173,10 @@ type LlmapiMessage = {
173
173
  */
174
174
  content?: Array<LlmapiMessageContentPart>;
175
175
  role?: LlmapiRole;
176
+ /**
177
+ * ToolCalls contains tool/function calls made by the assistant (only for assistant role)
178
+ */
179
+ tool_calls?: Array<LlmapiToolCall>;
176
180
  };
177
181
  /**
178
182
  * ImageURL is used when Type=image_url
@@ -347,6 +351,30 @@ type LlmapiSearchUsage = {
347
351
  */
348
352
  cost_micro_usd?: number;
349
353
  };
354
+ type LlmapiToolCall = {
355
+ function?: LlmapiToolCallFunction;
356
+ /**
357
+ * ID is the unique identifier for this tool call
358
+ */
359
+ id?: string;
360
+ /**
361
+ * Type is the type of tool call (always "function" for now)
362
+ */
363
+ type?: string;
364
+ };
365
+ /**
366
+ * Function contains the function call details
367
+ */
368
+ type LlmapiToolCallFunction = {
369
+ /**
370
+ * Arguments is the JSON string of arguments to pass to the function
371
+ */
372
+ arguments?: string;
373
+ /**
374
+ * Name is the name of the function to call
375
+ */
376
+ name?: string;
377
+ };
350
378
 
351
379
  type AuthToken = string | undefined;
352
380
  interface Auth {
@@ -978,6 +1006,8 @@ interface StoredMessage {
978
1006
  sources?: SearchSource[];
979
1007
  responseDuration?: number;
980
1008
  wasStopped?: boolean;
1009
+ /** If set, indicates the message failed with this error */
1010
+ error?: string;
981
1011
  }
982
1012
  interface StoredConversation {
983
1013
  uniqueId: string;
@@ -1002,6 +1032,8 @@ interface CreateMessageOptions {
1002
1032
  vector?: number[];
1003
1033
  embeddingModel?: string;
1004
1034
  wasStopped?: boolean;
1035
+ /** If set, indicates the message failed with this error */
1036
+ error?: string;
1005
1037
  }
1006
1038
  interface CreateConversationOptions {
1007
1039
  conversationId?: string;
@@ -1063,6 +1095,7 @@ declare class Message extends Model {
1063
1095
  sources?: SearchSource[];
1064
1096
  responseDuration?: number;
1065
1097
  wasStopped?: boolean;
1098
+ error?: string;
1066
1099
  }
1067
1100
  declare class Conversation extends Model {
1068
1101
  static table: string;
@@ -1248,6 +1281,7 @@ declare const sdkSchema: Readonly<{
1248
1281
  * Migration history:
1249
1282
  * - v2 → v3: Added `was_stopped` column to history table
1250
1283
  * - v3 → v4: Added `modelPreferences` table for settings storage
1284
+ * - v4 → v5: Added `error` column to history table for error persistence
1251
1285
  */
1252
1286
  declare const sdkMigrations: Readonly<{
1253
1287
  validated: true;
@@ -1704,7 +1704,7 @@ import {
1704
1704
  addColumns
1705
1705
  } from "@nozbe/watermelondb/Schema/migrations";
1706
1706
  var chatStorageSchema = appSchema({
1707
- version: 2,
1707
+ version: 3,
1708
1708
  tables: [
1709
1709
  tableSchema({
1710
1710
  name: "history",
@@ -1722,7 +1722,8 @@ var chatStorageSchema = appSchema({
1722
1722
  { name: "usage", type: "string", isOptional: true },
1723
1723
  { name: "sources", type: "string", isOptional: true },
1724
1724
  { name: "response_duration", type: "number", isOptional: true },
1725
- { name: "was_stopped", type: "boolean", isOptional: true }
1725
+ { name: "was_stopped", type: "boolean", isOptional: true },
1726
+ { name: "error", type: "string", isOptional: true }
1726
1727
  ]
1727
1728
  }),
1728
1729
  tableSchema({
@@ -1747,6 +1748,15 @@ var chatStorageMigrations = schemaMigrations({
1747
1748
  columns: [{ name: "was_stopped", type: "boolean", isOptional: true }]
1748
1749
  })
1749
1750
  ]
1751
+ },
1752
+ {
1753
+ toVersion: 3,
1754
+ steps: [
1755
+ addColumns({
1756
+ table: "history",
1757
+ columns: [{ name: "error", type: "string", isOptional: true }]
1758
+ })
1759
+ ]
1750
1760
  }
1751
1761
  ]
1752
1762
  });
@@ -1802,6 +1812,9 @@ __decorateClass([
1802
1812
  __decorateClass([
1803
1813
  field("was_stopped")
1804
1814
  ], Message.prototype, "wasStopped", 2);
1815
+ __decorateClass([
1816
+ text("error")
1817
+ ], Message.prototype, "error", 2);
1805
1818
  var Conversation = class extends Model {
1806
1819
  };
1807
1820
  Conversation.table = "conversations";
@@ -1856,7 +1869,8 @@ function messageToStored(message) {
1856
1869
  usage: message.usage,
1857
1870
  sources: message.sources,
1858
1871
  responseDuration: message.responseDuration,
1859
- wasStopped: message.wasStopped
1872
+ wasStopped: message.wasStopped,
1873
+ error: message.error
1860
1874
  };
1861
1875
  }
1862
1876
  function conversationToStored(conversation) {
@@ -1946,6 +1960,7 @@ async function createMessageOp(ctx, opts) {
1946
1960
  if (opts.vector) msg._setRaw("vector", JSON.stringify(opts.vector));
1947
1961
  if (opts.embeddingModel) msg._setRaw("embedding_model", opts.embeddingModel);
1948
1962
  if (opts.wasStopped) msg._setRaw("was_stopped", opts.wasStopped);
1963
+ if (opts.error) msg._setRaw("error", opts.error);
1949
1964
  });
1950
1965
  });
1951
1966
  return messageToStored(created);
@@ -1965,6 +1980,20 @@ async function updateMessageEmbeddingOp(ctx, uniqueId, vector, embeddingModel) {
1965
1980
  });
1966
1981
  return messageToStored(message);
1967
1982
  }
1983
+ async function updateMessageErrorOp(ctx, uniqueId, error) {
1984
+ let message;
1985
+ try {
1986
+ message = await ctx.messagesCollection.find(uniqueId);
1987
+ } catch {
1988
+ return null;
1989
+ }
1990
+ await ctx.database.write(async () => {
1991
+ await message.update((msg) => {
1992
+ msg._setRaw("error", error);
1993
+ });
1994
+ });
1995
+ return messageToStored(message);
1996
+ }
1968
1997
  function cosineSimilarity(a, b) {
1969
1998
  if (a.length !== b.length) return 0;
1970
1999
  let dotProduct = 0;
@@ -2182,7 +2211,8 @@ function useChatStorage(options) {
2182
2211
  let messagesToSend = [];
2183
2212
  if (includeHistory && !providedMessages) {
2184
2213
  const storedMessages = await getMessages(convId);
2185
- const limitedMessages = storedMessages.slice(-maxHistoryMessages);
2214
+ const validMessages = storedMessages.filter((msg) => !msg.error);
2215
+ const limitedMessages = validMessages.slice(-maxHistoryMessages);
2186
2216
  messagesToSend = limitedMessages.map(storedToLlmapiMessage);
2187
2217
  } else if (providedMessages) {
2188
2218
  messagesToSend = providedMessages;
@@ -2281,14 +2311,37 @@ function useChatStorage(options) {
2281
2311
  userMessage: storedUserMessage,
2282
2312
  assistantMessage: storedAssistantMessage2
2283
2313
  };
2284
- } catch (err) {
2314
+ } catch {
2315
+ return {
2316
+ data: null,
2317
+ error: "Request aborted",
2318
+ toolExecution: abortedResult.toolExecution,
2319
+ userMessage: storedUserMessage
2320
+ };
2285
2321
  }
2286
2322
  }
2323
+ const errorMessage = result.error || "No response data received";
2324
+ try {
2325
+ await updateMessageErrorOp(
2326
+ storageCtx,
2327
+ storedUserMessage.uniqueId,
2328
+ errorMessage
2329
+ );
2330
+ await createMessageOp(storageCtx, {
2331
+ conversationId: convId,
2332
+ role: "assistant",
2333
+ content: "",
2334
+ model: model || "",
2335
+ responseDuration,
2336
+ error: errorMessage
2337
+ });
2338
+ } catch {
2339
+ }
2287
2340
  return {
2288
2341
  data: null,
2289
- error: result.error || "No response data received",
2342
+ error: errorMessage,
2290
2343
  toolExecution: result.toolExecution,
2291
- userMessage: storedUserMessage
2344
+ userMessage: { ...storedUserMessage, error: errorMessage }
2292
2345
  };
2293
2346
  }
2294
2347
  const responseData = result.data;
@@ -2430,7 +2483,7 @@ __decorateClass([
2430
2483
  ], ModelPreference.prototype, "models", 2);
2431
2484
 
2432
2485
  // src/lib/db/schema.ts
2433
- var SDK_SCHEMA_VERSION = 4;
2486
+ var SDK_SCHEMA_VERSION = 5;
2434
2487
  var sdkSchema = appSchema2({
2435
2488
  version: SDK_SCHEMA_VERSION,
2436
2489
  tables: [
@@ -2451,7 +2504,8 @@ var sdkSchema = appSchema2({
2451
2504
  { name: "usage", type: "string", isOptional: true },
2452
2505
  { name: "sources", type: "string", isOptional: true },
2453
2506
  { name: "response_duration", type: "number", isOptional: true },
2454
- { name: "was_stopped", type: "boolean", isOptional: true }
2507
+ { name: "was_stopped", type: "boolean", isOptional: true },
2508
+ { name: "error", type: "string", isOptional: true }
2455
2509
  ]
2456
2510
  }),
2457
2511
  tableSchema2({
@@ -2518,6 +2572,16 @@ var sdkMigrations = schemaMigrations2({
2518
2572
  ]
2519
2573
  })
2520
2574
  ]
2575
+ },
2576
+ // v4 -> v5: Added error column to history for error persistence
2577
+ {
2578
+ toVersion: 5,
2579
+ steps: [
2580
+ addColumns2({
2581
+ table: "history",
2582
+ columns: [{ name: "error", type: "string", isOptional: true }]
2583
+ })
2584
+ ]
2521
2585
  }
2522
2586
  ]
2523
2587
  });
@@ -9,6 +9,10 @@ type LlmapiMessage = {
9
9
  */
10
10
  content?: Array<LlmapiMessageContentPart>;
11
11
  role?: LlmapiRole;
12
+ /**
13
+ * ToolCalls contains tool/function calls made by the assistant (only for assistant role)
14
+ */
15
+ tool_calls?: Array<LlmapiToolCall>;
12
16
  };
13
17
  /**
14
18
  * ImageURL is used when Type=image_url
@@ -38,6 +42,30 @@ type LlmapiMessageContentPart = {
38
42
  * Role is the message role (system, user, assistant)
39
43
  */
40
44
  type LlmapiRole = string;
45
+ type LlmapiToolCall = {
46
+ function?: LlmapiToolCallFunction;
47
+ /**
48
+ * ID is the unique identifier for this tool call
49
+ */
50
+ id?: string;
51
+ /**
52
+ * Type is the type of tool call (always "function" for now)
53
+ */
54
+ type?: string;
55
+ };
56
+ /**
57
+ * Function contains the function call details
58
+ */
59
+ type LlmapiToolCallFunction = {
60
+ /**
61
+ * Arguments is the JSON string of arguments to pass to the function
62
+ */
63
+ arguments?: string;
64
+ /**
65
+ * Name is the name of the function to call
66
+ */
67
+ name?: string;
68
+ };
41
69
 
42
70
  /**
43
71
  * Converts an array of Vercel AI {@link UIMessage} objects into the
@@ -9,6 +9,10 @@ type LlmapiMessage = {
9
9
  */
10
10
  content?: Array<LlmapiMessageContentPart>;
11
11
  role?: LlmapiRole;
12
+ /**
13
+ * ToolCalls contains tool/function calls made by the assistant (only for assistant role)
14
+ */
15
+ tool_calls?: Array<LlmapiToolCall>;
12
16
  };
13
17
  /**
14
18
  * ImageURL is used when Type=image_url
@@ -38,6 +42,30 @@ type LlmapiMessageContentPart = {
38
42
  * Role is the message role (system, user, assistant)
39
43
  */
40
44
  type LlmapiRole = string;
45
+ type LlmapiToolCall = {
46
+ function?: LlmapiToolCallFunction;
47
+ /**
48
+ * ID is the unique identifier for this tool call
49
+ */
50
+ id?: string;
51
+ /**
52
+ * Type is the type of tool call (always "function" for now)
53
+ */
54
+ type?: string;
55
+ };
56
+ /**
57
+ * Function contains the function call details
58
+ */
59
+ type LlmapiToolCallFunction = {
60
+ /**
61
+ * Arguments is the JSON string of arguments to pass to the function
62
+ */
63
+ arguments?: string;
64
+ /**
65
+ * Name is the name of the function to call
66
+ */
67
+ name?: string;
68
+ };
41
69
 
42
70
  /**
43
71
  * Converts an array of Vercel AI {@link UIMessage} objects into the
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@reverbia/sdk",
3
- "version": "1.0.0-next.20251219154503",
3
+ "version": "1.0.0-next.20251219170400",
4
4
  "description": "",
5
5
  "main": "./dist/index.cjs",
6
6
  "module": "./dist/index.mjs",
@@ -72,7 +72,7 @@
72
72
  "homepage": "https://github.com/zeta-chain/ai-sdk#readme",
73
73
  "dependencies": {
74
74
  "@huggingface/transformers": "^3.8.0",
75
- "@reverbia/portal": "1.0.0-next.20251217051358",
75
+ "@reverbia/portal": "1.0.0-next.20251219155822",
76
76
  "ai": "5.0.93",
77
77
  "pdfjs-dist": "^4.10.38",
78
78
  "tesseract.js": "^6.0.1"