@hebo-ai/gateway 0.4.0-beta.3 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/README.md +40 -5
  2. package/dist/config.js +21 -7
  3. package/dist/endpoints/chat-completions/converters.d.ts +3 -3
  4. package/dist/endpoints/chat-completions/converters.js +16 -8
  5. package/dist/endpoints/chat-completions/handler.js +34 -27
  6. package/dist/endpoints/chat-completions/otel.d.ts +6 -0
  7. package/dist/endpoints/chat-completions/otel.js +127 -0
  8. package/dist/endpoints/embeddings/handler.js +19 -10
  9. package/dist/endpoints/embeddings/otel.d.ts +6 -0
  10. package/dist/endpoints/embeddings/otel.js +35 -0
  11. package/dist/endpoints/models/handler.js +3 -4
  12. package/dist/errors/gateway.d.ts +1 -1
  13. package/dist/errors/gateway.js +3 -4
  14. package/dist/errors/openai.js +11 -12
  15. package/dist/errors/utils.d.ts +3 -4
  16. package/dist/errors/utils.js +6 -6
  17. package/dist/gateway.js +1 -1
  18. package/dist/lifecycle.js +71 -29
  19. package/dist/middleware/matcher.js +1 -1
  20. package/dist/models/amazon/presets.d.ts +37 -37
  21. package/dist/models/amazon/presets.js +1 -1
  22. package/dist/models/anthropic/presets.d.ts +56 -56
  23. package/dist/models/cohere/presets.d.ts +54 -54
  24. package/dist/models/cohere/presets.js +2 -2
  25. package/dist/models/google/presets.d.ts +31 -31
  26. package/dist/models/google/presets.js +1 -1
  27. package/dist/models/meta/presets.d.ts +42 -42
  28. package/dist/models/openai/presets.d.ts +96 -96
  29. package/dist/models/openai/presets.js +1 -1
  30. package/dist/models/types.d.ts +1 -1
  31. package/dist/models/voyage/presets.d.ts +92 -92
  32. package/dist/models/voyage/presets.js +1 -1
  33. package/dist/providers/registry.js +2 -2
  34. package/dist/telemetry/baggage.d.ts +1 -0
  35. package/dist/telemetry/baggage.js +24 -0
  36. package/dist/telemetry/fetch.d.ts +2 -1
  37. package/dist/telemetry/fetch.js +13 -3
  38. package/dist/telemetry/gen-ai.d.ts +5 -0
  39. package/dist/telemetry/gen-ai.js +60 -0
  40. package/dist/telemetry/http.d.ts +3 -0
  41. package/dist/telemetry/http.js +57 -0
  42. package/dist/telemetry/memory.d.ts +2 -0
  43. package/dist/telemetry/memory.js +27 -0
  44. package/dist/telemetry/span.d.ts +6 -3
  45. package/dist/telemetry/span.js +24 -36
  46. package/dist/telemetry/stream.d.ts +3 -7
  47. package/dist/telemetry/stream.js +26 -29
  48. package/dist/types.d.ts +16 -15
  49. package/dist/utils/headers.d.ts +1 -1
  50. package/dist/utils/headers.js +7 -9
  51. package/dist/utils/request.d.ts +0 -4
  52. package/dist/utils/request.js +0 -9
  53. package/dist/utils/response.js +1 -1
  54. package/package.json +5 -2
  55. package/src/config.ts +28 -7
  56. package/src/endpoints/chat-completions/converters.ts +18 -11
  57. package/src/endpoints/chat-completions/handler.ts +46 -28
  58. package/src/endpoints/chat-completions/otel.ts +161 -0
  59. package/src/endpoints/embeddings/handler.test.ts +2 -2
  60. package/src/endpoints/embeddings/handler.ts +28 -10
  61. package/src/endpoints/embeddings/otel.ts +56 -0
  62. package/src/endpoints/models/handler.ts +3 -5
  63. package/src/errors/gateway.ts +5 -5
  64. package/src/errors/openai.ts +25 -17
  65. package/src/errors/utils.ts +6 -7
  66. package/src/gateway.ts +1 -1
  67. package/src/lifecycle.ts +85 -32
  68. package/src/middleware/matcher.ts +1 -1
  69. package/src/models/amazon/presets.ts +1 -1
  70. package/src/models/cohere/presets.ts +2 -2
  71. package/src/models/google/presets.ts +1 -1
  72. package/src/models/openai/presets.ts +1 -1
  73. package/src/models/types.ts +1 -1
  74. package/src/models/voyage/presets.ts +1 -1
  75. package/src/providers/registry.ts +2 -2
  76. package/src/telemetry/baggage.ts +27 -0
  77. package/src/telemetry/fetch.ts +15 -3
  78. package/src/telemetry/gen-ai.ts +88 -0
  79. package/src/telemetry/http.ts +65 -0
  80. package/src/telemetry/memory.ts +36 -0
  81. package/src/telemetry/span.ts +28 -40
  82. package/src/telemetry/stream.ts +36 -40
  83. package/src/types.ts +18 -18
  84. package/src/utils/headers.ts +8 -19
  85. package/src/utils/request.ts +0 -11
  86. package/src/utils/response.ts +1 -1
  87. package/dist/telemetry/otel.d.ts +0 -2
  88. package/dist/telemetry/otel.js +0 -50
  89. package/dist/telemetry/utils.d.ts +0 -4
  90. package/dist/telemetry/utils.js +0 -223
  91. package/src/telemetry/otel.ts +0 -91
  92. package/src/telemetry/utils.ts +0 -273
@@ -4,7 +4,7 @@ export declare const llama31_8b: <const O extends {
4
4
  knowledge?: string | undefined;
5
5
  modalities?: {
6
6
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
7
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
7
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
8
8
  } | undefined;
9
9
  context?: number | undefined;
10
10
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -82,7 +82,7 @@ export declare const llama31_70b: <const O extends {
82
82
  knowledge?: string | undefined;
83
83
  modalities?: {
84
84
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
85
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
85
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
86
86
  } | undefined;
87
87
  context?: number | undefined;
88
88
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -160,7 +160,7 @@ export declare const llama31_405b: <const O extends {
160
160
  knowledge?: string | undefined;
161
161
  modalities?: {
162
162
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
163
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
163
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
164
164
  } | undefined;
165
165
  context?: number | undefined;
166
166
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -238,7 +238,7 @@ export declare const llama32_1b: <const O extends {
238
238
  knowledge?: string | undefined;
239
239
  modalities?: {
240
240
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
241
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
241
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
242
242
  } | undefined;
243
243
  context?: number | undefined;
244
244
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -316,7 +316,7 @@ export declare const llama32_3b: <const O extends {
316
316
  knowledge?: string | undefined;
317
317
  modalities?: {
318
318
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
319
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
319
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
320
320
  } | undefined;
321
321
  context?: number | undefined;
322
322
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -394,7 +394,7 @@ export declare const llama32_11b: <const O extends {
394
394
  knowledge?: string | undefined;
395
395
  modalities?: {
396
396
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
397
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
397
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
398
398
  } | undefined;
399
399
  context?: number | undefined;
400
400
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -472,7 +472,7 @@ export declare const llama32_90b: <const O extends {
472
472
  knowledge?: string | undefined;
473
473
  modalities?: {
474
474
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
475
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
475
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
476
476
  } | undefined;
477
477
  context?: number | undefined;
478
478
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -550,7 +550,7 @@ export declare const llama33_70b: <const O extends {
550
550
  knowledge?: string | undefined;
551
551
  modalities?: {
552
552
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
553
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
553
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
554
554
  } | undefined;
555
555
  context?: number | undefined;
556
556
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -628,7 +628,7 @@ export declare const llama4Scout: <const O extends {
628
628
  knowledge?: string | undefined;
629
629
  modalities?: {
630
630
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
631
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
631
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
632
632
  } | undefined;
633
633
  context?: number | undefined;
634
634
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -706,7 +706,7 @@ export declare const llama4Maverick: <const O extends {
706
706
  knowledge?: string | undefined;
707
707
  modalities?: {
708
708
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
709
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
709
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
710
710
  } | undefined;
711
711
  context?: number | undefined;
712
712
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -785,7 +785,7 @@ export declare const llama: {
785
785
  knowledge?: string | undefined;
786
786
  modalities?: {
787
787
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
788
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
788
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
789
789
  } | undefined;
790
790
  context?: number | undefined;
791
791
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -862,7 +862,7 @@ export declare const llama: {
862
862
  knowledge?: string | undefined;
863
863
  modalities?: {
864
864
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
865
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
865
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
866
866
  } | undefined;
867
867
  context?: number | undefined;
868
868
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -940,7 +940,7 @@ export declare const llama: {
940
940
  knowledge?: string | undefined;
941
941
  modalities?: {
942
942
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
943
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
943
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
944
944
  } | undefined;
945
945
  context?: number | undefined;
946
946
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1017,7 +1017,7 @@ export declare const llama: {
1017
1017
  knowledge?: string | undefined;
1018
1018
  modalities?: {
1019
1019
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1020
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1020
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1021
1021
  } | undefined;
1022
1022
  context?: number | undefined;
1023
1023
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1094,7 +1094,7 @@ export declare const llama: {
1094
1094
  knowledge?: string | undefined;
1095
1095
  modalities?: {
1096
1096
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1097
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1097
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1098
1098
  } | undefined;
1099
1099
  context?: number | undefined;
1100
1100
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1171,7 +1171,7 @@ export declare const llama: {
1171
1171
  knowledge?: string | undefined;
1172
1172
  modalities?: {
1173
1173
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1174
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1174
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1175
1175
  } | undefined;
1176
1176
  context?: number | undefined;
1177
1177
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1248,7 +1248,7 @@ export declare const llama: {
1248
1248
  knowledge?: string | undefined;
1249
1249
  modalities?: {
1250
1250
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1251
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1251
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1252
1252
  } | undefined;
1253
1253
  context?: number | undefined;
1254
1254
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1325,7 +1325,7 @@ export declare const llama: {
1325
1325
  knowledge?: string | undefined;
1326
1326
  modalities?: {
1327
1327
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1328
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1328
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1329
1329
  } | undefined;
1330
1330
  context?: number | undefined;
1331
1331
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1402,7 +1402,7 @@ export declare const llama: {
1402
1402
  knowledge?: string | undefined;
1403
1403
  modalities?: {
1404
1404
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1405
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1405
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1406
1406
  } | undefined;
1407
1407
  context?: number | undefined;
1408
1408
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1479,7 +1479,7 @@ export declare const llama: {
1479
1479
  knowledge?: string | undefined;
1480
1480
  modalities?: {
1481
1481
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1482
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1482
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1483
1483
  } | undefined;
1484
1484
  context?: number | undefined;
1485
1485
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1556,7 +1556,7 @@ export declare const llama: {
1556
1556
  knowledge?: string | undefined;
1557
1557
  modalities?: {
1558
1558
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1559
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1559
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1560
1560
  } | undefined;
1561
1561
  context?: number | undefined;
1562
1562
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1633,7 +1633,7 @@ export declare const llama: {
1633
1633
  knowledge?: string | undefined;
1634
1634
  modalities?: {
1635
1635
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1636
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1636
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1637
1637
  } | undefined;
1638
1638
  context?: number | undefined;
1639
1639
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1711,7 +1711,7 @@ export declare const llama: {
1711
1711
  knowledge?: string | undefined;
1712
1712
  modalities?: {
1713
1713
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1714
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1714
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1715
1715
  } | undefined;
1716
1716
  context?: number | undefined;
1717
1717
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1788,7 +1788,7 @@ export declare const llama: {
1788
1788
  knowledge?: string | undefined;
1789
1789
  modalities?: {
1790
1790
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1791
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1791
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1792
1792
  } | undefined;
1793
1793
  context?: number | undefined;
1794
1794
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1865,7 +1865,7 @@ export declare const llama: {
1865
1865
  knowledge?: string | undefined;
1866
1866
  modalities?: {
1867
1867
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1868
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1868
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1869
1869
  } | undefined;
1870
1870
  context?: number | undefined;
1871
1871
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -1942,7 +1942,7 @@ export declare const llama: {
1942
1942
  knowledge?: string | undefined;
1943
1943
  modalities?: {
1944
1944
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
1945
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
1945
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
1946
1946
  } | undefined;
1947
1947
  context?: number | undefined;
1948
1948
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2019,7 +2019,7 @@ export declare const llama: {
2019
2019
  knowledge?: string | undefined;
2020
2020
  modalities?: {
2021
2021
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2022
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2022
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2023
2023
  } | undefined;
2024
2024
  context?: number | undefined;
2025
2025
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2096,7 +2096,7 @@ export declare const llama: {
2096
2096
  knowledge?: string | undefined;
2097
2097
  modalities?: {
2098
2098
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2099
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2099
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2100
2100
  } | undefined;
2101
2101
  context?: number | undefined;
2102
2102
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2173,7 +2173,7 @@ export declare const llama: {
2173
2173
  knowledge?: string | undefined;
2174
2174
  modalities?: {
2175
2175
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2176
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2176
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2177
2177
  } | undefined;
2178
2178
  context?: number | undefined;
2179
2179
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2250,7 +2250,7 @@ export declare const llama: {
2250
2250
  knowledge?: string | undefined;
2251
2251
  modalities?: {
2252
2252
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2253
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2253
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2254
2254
  } | undefined;
2255
2255
  context?: number | undefined;
2256
2256
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2328,7 +2328,7 @@ export declare const llama: {
2328
2328
  knowledge?: string | undefined;
2329
2329
  modalities?: {
2330
2330
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2331
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2331
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2332
2332
  } | undefined;
2333
2333
  context?: number | undefined;
2334
2334
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2405,7 +2405,7 @@ export declare const llama: {
2405
2405
  knowledge?: string | undefined;
2406
2406
  modalities?: {
2407
2407
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2408
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2408
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2409
2409
  } | undefined;
2410
2410
  context?: number | undefined;
2411
2411
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2483,7 +2483,7 @@ export declare const llama: {
2483
2483
  knowledge?: string | undefined;
2484
2484
  modalities?: {
2485
2485
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2486
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2486
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2487
2487
  } | undefined;
2488
2488
  context?: number | undefined;
2489
2489
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2560,7 +2560,7 @@ export declare const llama: {
2560
2560
  knowledge?: string | undefined;
2561
2561
  modalities?: {
2562
2562
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2563
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2563
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2564
2564
  } | undefined;
2565
2565
  context?: number | undefined;
2566
2566
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2637,7 +2637,7 @@ export declare const llama: {
2637
2637
  knowledge?: string | undefined;
2638
2638
  modalities?: {
2639
2639
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2640
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2640
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2641
2641
  } | undefined;
2642
2642
  context?: number | undefined;
2643
2643
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2715,7 +2715,7 @@ export declare const llama: {
2715
2715
  knowledge?: string | undefined;
2716
2716
  modalities?: {
2717
2717
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2718
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2718
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2719
2719
  } | undefined;
2720
2720
  context?: number | undefined;
2721
2721
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2792,7 +2792,7 @@ export declare const llama: {
2792
2792
  knowledge?: string | undefined;
2793
2793
  modalities?: {
2794
2794
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2795
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2795
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2796
2796
  } | undefined;
2797
2797
  context?: number | undefined;
2798
2798
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2869,7 +2869,7 @@ export declare const llama: {
2869
2869
  knowledge?: string | undefined;
2870
2870
  modalities?: {
2871
2871
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2872
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2872
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2873
2873
  } | undefined;
2874
2874
  context?: number | undefined;
2875
2875
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -2946,7 +2946,7 @@ export declare const llama: {
2946
2946
  knowledge?: string | undefined;
2947
2947
  modalities?: {
2948
2948
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
2949
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
2949
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
2950
2950
  } | undefined;
2951
2951
  context?: number | undefined;
2952
2952
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -3024,7 +3024,7 @@ export declare const llama: {
3024
3024
  knowledge?: string | undefined;
3025
3025
  modalities?: {
3026
3026
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
3027
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
3027
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
3028
3028
  } | undefined;
3029
3029
  context?: number | undefined;
3030
3030
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -3102,7 +3102,7 @@ export declare const llama: {
3102
3102
  knowledge?: string | undefined;
3103
3103
  modalities?: {
3104
3104
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
3105
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
3105
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
3106
3106
  } | undefined;
3107
3107
  context?: number | undefined;
3108
3108
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;
@@ -3179,7 +3179,7 @@ export declare const llama: {
3179
3179
  knowledge?: string | undefined;
3180
3180
  modalities?: {
3181
3181
  input?: readonly ("text" | "file" | "image" | "audio" | "video" | "pdf")[] | undefined;
3182
- output?: readonly ("text" | "image" | "audio" | "video" | "embeddings")[] | undefined;
3182
+ output?: readonly ("text" | "embedding" | "image" | "audio" | "video")[] | undefined;
3183
3183
  } | undefined;
3184
3184
  context?: number | undefined;
3185
3185
  capabilities?: readonly ("temperature" | "reasoning" | "attachments" | "tool_call" | "structured_output")[] | undefined;