@llumiverse/core 0.17.0 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. package/README.md +1 -1
  2. package/lib/cjs/CompletionStream.js.map +1 -1
  3. package/lib/cjs/Driver.js +6 -6
  4. package/lib/cjs/Driver.js.map +1 -1
  5. package/lib/cjs/async.js +3 -3
  6. package/lib/cjs/async.js.map +1 -1
  7. package/lib/cjs/capability/bedrock.js +183 -0
  8. package/lib/cjs/capability/bedrock.js.map +1 -0
  9. package/lib/cjs/capability/openai.js +122 -0
  10. package/lib/cjs/capability/openai.js.map +1 -0
  11. package/lib/cjs/capability/vertexai.js +86 -0
  12. package/lib/cjs/capability/vertexai.js.map +1 -0
  13. package/lib/cjs/capability.js +52 -0
  14. package/lib/cjs/capability.js.map +1 -0
  15. package/lib/cjs/formatters/generic.js +6 -6
  16. package/lib/cjs/formatters/generic.js.map +1 -1
  17. package/lib/cjs/formatters/index.js.map +1 -1
  18. package/lib/cjs/formatters/nova.js +11 -11
  19. package/lib/cjs/formatters/nova.js.map +1 -1
  20. package/lib/cjs/formatters/openai.js +25 -11
  21. package/lib/cjs/formatters/openai.js.map +1 -1
  22. package/lib/cjs/index.js +2 -1
  23. package/lib/cjs/index.js.map +1 -1
  24. package/lib/cjs/json.js +1 -1
  25. package/lib/cjs/json.js.map +1 -1
  26. package/lib/cjs/options.js +8 -43
  27. package/lib/cjs/options.js.map +1 -1
  28. package/lib/cjs/resolver.js +2 -2
  29. package/lib/esm/CompletionStream.js.map +1 -1
  30. package/lib/esm/Driver.js +3 -3
  31. package/lib/esm/Driver.js.map +1 -1
  32. package/lib/esm/async.js +3 -3
  33. package/lib/esm/async.js.map +1 -1
  34. package/lib/esm/capability/bedrock.js +180 -0
  35. package/lib/esm/capability/bedrock.js.map +1 -0
  36. package/lib/esm/capability/openai.js +119 -0
  37. package/lib/esm/capability/openai.js.map +1 -0
  38. package/lib/esm/capability/vertexai.js +83 -0
  39. package/lib/esm/capability/vertexai.js.map +1 -0
  40. package/lib/esm/capability.js +47 -0
  41. package/lib/esm/capability.js.map +1 -0
  42. package/lib/esm/formatters/generic.js +1 -1
  43. package/lib/esm/formatters/generic.js.map +1 -1
  44. package/lib/esm/formatters/index.js.map +1 -1
  45. package/lib/esm/formatters/nova.js +5 -5
  46. package/lib/esm/formatters/nova.js.map +1 -1
  47. package/lib/esm/formatters/openai.js +17 -4
  48. package/lib/esm/formatters/openai.js.map +1 -1
  49. package/lib/esm/index.js +2 -1
  50. package/lib/esm/index.js.map +1 -1
  51. package/lib/esm/json.js +1 -1
  52. package/lib/esm/json.js.map +1 -1
  53. package/lib/esm/options.js +3 -37
  54. package/lib/esm/options.js.map +1 -1
  55. package/lib/esm/resolver.js +2 -2
  56. package/lib/types/CompletionStream.d.ts +1 -1
  57. package/lib/types/CompletionStream.d.ts.map +1 -1
  58. package/lib/types/Driver.d.ts +3 -3
  59. package/lib/types/Driver.d.ts.map +1 -1
  60. package/lib/types/async.d.ts +2 -2
  61. package/lib/types/async.d.ts.map +1 -1
  62. package/lib/types/capability/bedrock.d.ts +7 -0
  63. package/lib/types/capability/bedrock.d.ts.map +1 -0
  64. package/lib/types/capability/openai.d.ts +11 -0
  65. package/lib/types/capability/openai.d.ts.map +1 -0
  66. package/lib/types/capability/vertexai.d.ts +11 -0
  67. package/lib/types/capability/vertexai.d.ts.map +1 -0
  68. package/lib/types/capability.d.ts +5 -0
  69. package/lib/types/capability.d.ts.map +1 -0
  70. package/lib/types/formatters/commons.d.ts +1 -1
  71. package/lib/types/formatters/commons.d.ts.map +1 -1
  72. package/lib/types/formatters/generic.d.ts +2 -2
  73. package/lib/types/formatters/generic.d.ts.map +1 -1
  74. package/lib/types/formatters/index.d.ts +0 -3
  75. package/lib/types/formatters/index.d.ts.map +1 -1
  76. package/lib/types/formatters/nova.d.ts +2 -2
  77. package/lib/types/formatters/nova.d.ts.map +1 -1
  78. package/lib/types/formatters/openai.d.ts +3 -2
  79. package/lib/types/formatters/openai.d.ts.map +1 -1
  80. package/lib/types/index.d.ts +2 -1
  81. package/lib/types/index.d.ts.map +1 -1
  82. package/lib/types/json.d.ts +1 -7
  83. package/lib/types/json.d.ts.map +1 -1
  84. package/lib/types/options.d.ts +2 -13
  85. package/lib/types/options.d.ts.map +1 -1
  86. package/lib/types/validation.d.ts +1 -1
  87. package/lib/types/validation.d.ts.map +1 -1
  88. package/package.json +3 -2
  89. package/src/CompletionStream.ts +5 -5
  90. package/src/Driver.ts +5 -5
  91. package/src/async.ts +5 -8
  92. package/src/capability/bedrock.ts +187 -0
  93. package/src/capability/openai.ts +124 -0
  94. package/src/capability/vertexai.ts +88 -0
  95. package/src/capability.ts +49 -0
  96. package/src/formatters/commons.ts +1 -1
  97. package/src/formatters/generic.ts +2 -2
  98. package/src/formatters/index.ts +0 -5
  99. package/src/formatters/nova.ts +6 -6
  100. package/src/formatters/openai.ts +19 -5
  101. package/src/index.ts +3 -2
  102. package/src/json.ts +2 -10
  103. package/src/options.ts +12 -50
  104. package/src/resolver.ts +2 -2
  105. package/src/validation.ts +3 -3
  106. package/lib/cjs/options/bedrock.js +0 -343
  107. package/lib/cjs/options/bedrock.js.map +0 -1
  108. package/lib/cjs/options/groq.js +0 -37
  109. package/lib/cjs/options/groq.js.map +0 -1
  110. package/lib/cjs/options/openai.js +0 -123
  111. package/lib/cjs/options/openai.js.map +0 -1
  112. package/lib/cjs/options/vertexai.js +0 -257
  113. package/lib/cjs/options/vertexai.js.map +0 -1
  114. package/lib/cjs/types.js +0 -80
  115. package/lib/cjs/types.js.map +0 -1
  116. package/lib/esm/options/bedrock.js +0 -340
  117. package/lib/esm/options/bedrock.js.map +0 -1
  118. package/lib/esm/options/groq.js +0 -34
  119. package/lib/esm/options/groq.js.map +0 -1
  120. package/lib/esm/options/openai.js +0 -120
  121. package/lib/esm/options/openai.js.map +0 -1
  122. package/lib/esm/options/vertexai.js +0 -253
  123. package/lib/esm/options/vertexai.js.map +0 -1
  124. package/lib/esm/types.js +0 -77
  125. package/lib/esm/types.js.map +0 -1
  126. package/lib/types/options/bedrock.d.ts +0 -32
  127. package/lib/types/options/bedrock.d.ts.map +0 -1
  128. package/lib/types/options/groq.d.ts +0 -12
  129. package/lib/types/options/groq.d.ts.map +0 -1
  130. package/lib/types/options/openai.d.ts +0 -21
  131. package/lib/types/options/openai.d.ts.map +0 -1
  132. package/lib/types/options/vertexai.d.ts +0 -52
  133. package/lib/types/options/vertexai.d.ts.map +0 -1
  134. package/lib/types/types.d.ts +0 -323
  135. package/lib/types/types.d.ts.map +0 -1
  136. package/src/options/bedrock.ts +0 -388
  137. package/src/options/groq.ts +0 -47
  138. package/src/options/openai.ts +0 -148
  139. package/src/options/vertexai.ts +0 -312
  140. package/src/types.ts +0 -405
@@ -1,6 +1,3 @@
1
- import { JSONSchema } from "../types.js";
2
- import { PromptSegment } from "../types.js";
3
- export type PromptFormatter<T = any> = (messages: PromptSegment[], schema?: JSONSchema) => T;
4
1
  export * from "./commons.js";
5
2
  export * from "./generic.js";
6
3
  export * from "./openai.js";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/formatters/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAE5C,MAAM,MAAM,eAAe,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC,CAAC;AAE7F,cAAc,cAAc,CAAC;AAC7B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC;AAC5B,cAAc,WAAW,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/formatters/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAC7B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC;AAC5B,cAAc,WAAW,CAAC"}
@@ -1,5 +1,5 @@
1
- import { JSONSchema } from "../types.js";
2
- import { PromptSegment } from "../index.js";
1
+ import { JSONSchema } from "@llumiverse/common";
2
+ import { PromptSegment } from "@llumiverse/common";
3
3
  export interface NovaMessage {
4
4
  role: 'user' | 'assistant';
5
5
  content: NovaMessagePart[];
@@ -1 +1 @@
1
- {"version":3,"file":"nova.d.ts","sourceRoot":"","sources":["../../../src/formatters/nova.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAc,aAAa,EAAsB,MAAM,aAAa,CAAC;AAI5E,MAAM,WAAW,WAAW;IACxB,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;IAC3B,OAAO,EAAE,eAAe,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,iBAAiB;IAC9B,IAAI,EAAE,MAAM,CAAA;CACf;AAED,UAAU,eAAe;IACrB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE;QACJ,MAAM,EAAE,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,MAAM,CAAC;QACxC,MAAM,EAAE;YACJ,KAAK,EAAE,MAAM,CAAA;SAChB,CAAA;KACJ,CAAA;IACD,KAAK,CAAC,EAAE;QACJ,MAAM,EAAE,KAAK,GAAG,KAAK,GAAG,KAAK,GAAG,MAAM,GAAG,UAAU,GAAG,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;QACrF,MAAM,EAAE;YAEJ,UAAU,CAAC,EAAE;gBACT,GAAG,EAAE,MAAM,CAAC;gBACZ,WAAW,EAAE,MAAM,CAAA;aACtB,CAAA;YAED,KAAK,CAAC,EAAE,MAAM,CAAA;SACjB,CAAA;KACJ,CAAA;CACJ;AAED,MAAM,WAAW,kBAAkB;IAC/B,MAAM,CAAC,EAAE,iBAAiB,EAAE,CAAC;IAC7B,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,IAAI,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AAEH,wBAAsB,gBAAgB,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,MAAM,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC,CA6FlH"}
1
+ {"version":3,"file":"nova.d.ts","sourceRoot":"","sources":["../../../src/formatters/nova.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAChD,OAAO,EAAc,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAI/D,MAAM,WAAW,WAAW;IACxB,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;IAC3B,OAAO,EAAE,eAAe,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,iBAAiB;IAC9B,IAAI,EAAE,MAAM,CAAA;CACf;AAED,UAAU,eAAe;IACrB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE;QACJ,MAAM,EAAE,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,MAAM,CAAC;QACxC,MAAM,EAAE;YACJ,KAAK,EAAE,MAAM,CAAA;SAChB,CAAA;KACJ,CAAA;IACD,KAAK,CAAC,EAAE;QACJ,MAAM,EAAE,KAAK,GAAG,KAAK,GAAG,KAAK,GAAG,MAAM,GAAG,UAAU,GAAG,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;QACrF,MAAM,EAAE;YAEJ,UAAU,CAAC,EAAE;gBACT,GAAG,EAAE,MAAM,CAAC;gBACZ,WAAW,EAAE,MAAM,CAAA;aACtB,CAAA;YAED,KAAK,CAAC,EAAE,MAAM,CAAA;SACjB,CAAA;KACJ,CAAA;CACJ;AAED,MAAM,WAAW,kBAAkB;IAC/B,MAAM,CAAC,EAAE,iBAAiB,EAAE,CAAC;IAC7B,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,IAAI,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AAEH,wBAAsB,gBAAgB,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,MAAM,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC,CA6FlH"}
@@ -1,5 +1,5 @@
1
- import { PromptOptions } from "../index.js";
2
- import { PromptSegment } from "../types.js";
1
+ import { PromptOptions } from "@llumiverse/common";
2
+ import { PromptSegment } from "@llumiverse/common";
3
3
  export interface OpenAITextMessage {
4
4
  content: string;
5
5
  role: "system" | "user" | "assistant";
@@ -26,6 +26,7 @@ export interface OpenAIContentPartImage {
26
26
  url: string;
27
27
  };
28
28
  }
29
+ export declare const noStructuredOutputModels: string[];
29
30
  /**
30
31
  * OpenAI text only prompts
31
32
  * @param segments
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/formatters/openai.ts"],"names":[],"mappings":"AAAA,OAAO,EAAc,aAAa,EAAE,MAAM,aAAa,CAAC;AAExD,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAG5C,MAAM,WAAW,iBAAiB;IAC9B,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;CACzC;AAED,MAAM,WAAW,aAAa;IAC1B,OAAO,EAAE,CAAC,qBAAqB,GAAG,sBAAsB,CAAC,EAAE,CAAA;IAC3D,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;IACtC,IAAI,CAAC,EAAE,MAAM,CAAC;CACjB;AACD,MAAM,WAAW,iBAAiB;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;CACnB;AACD,MAAM,MAAM,kBAAkB,GAAG,aAAa,GAAG,iBAAiB,CAAC;AAEnE,MAAM,WAAW,qBAAqB;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,sBAAsB;IACnC,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE;QACP,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAA;QAChC,GAAG,EAAE,MAAM,CAAA;KACd,CAAA;CACJ;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,iBAAiB,EAAE,CAoBzF;AAGD,wBAAsB,gCAAgC,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,aAAa,GAAG,4BAA4B,GAAG,OAAO,CAAC,kBAAkB,EAAE,CAAC,CAyFnK;AAED,MAAM,WAAW,4BAA4B;IACzC,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,CAAA;CAClB"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/formatters/openai.ts"],"names":[],"mappings":"AAAA,OAAO,EAAc,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAE/D,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAGnD,MAAM,WAAW,iBAAiB;IAC9B,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;CACzC;AAED,MAAM,WAAW,aAAa;IAC1B,OAAO,EAAE,CAAC,qBAAqB,GAAG,sBAAsB,CAAC,EAAE,CAAA;IAC3D,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;IACtC,IAAI,CAAC,EAAE,MAAM,CAAC;CACjB;AACD,MAAM,WAAW,iBAAiB;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;CACnB;AACD,MAAM,MAAM,kBAAkB,GAAG,aAAa,GAAG,iBAAiB,CAAC;AAEnE,MAAM,WAAW,qBAAqB;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,sBAAsB;IACnC,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE;QACP,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,CAAA;QAChC,GAAG,EAAE,MAAM,CAAA;KACd,CAAA;CACJ;AAID,eAAO,MAAM,wBAAwB,EAAE,MAAM,EAU5C,CAAA;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,iBAAiB,EAAE,CAoBzF;AAGD,wBAAsB,gCAAgC,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,aAAa,GAAG,4BAA4B,GAAG,OAAO,CAAC,kBAAkB,EAAE,CAAC,CAyFnK;AAED,MAAM,WAAW,4BAA4B;IACzC,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,CAAA;CAClB"}
@@ -1,6 +1,7 @@
1
1
  export * from "./Driver.js";
2
2
  export * from "./json.js";
3
3
  export * from "./stream.js";
4
- export * from "./types.js";
5
4
  export * from "./options.js";
5
+ export * from "./capability.js";
6
+ export * from "@llumiverse/common";
6
7
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,cAAc,CAAC;AAC7B,cAAc,iBAAiB,CAAC;AAChC,cAAc,oBAAoB,CAAC"}
@@ -1,11 +1,5 @@
1
+ import { JSONValue } from "@llumiverse/common";
1
2
  export declare function extractAndParseJSON(text: string): JSONValue;
2
- export type JSONPrimitive = string | number | boolean | null;
3
- export type JSONArray = JSONValue[];
4
- export type JSONObject = {
5
- [key: string]: JSONValue;
6
- };
7
- export type JSONComposite = JSONArray | JSONObject;
8
- export type JSONValue = JSONPrimitive | JSONComposite;
9
3
  export declare class JsonParser {
10
4
  text: string;
11
5
  pos: number;
@@ -1 +1 @@
1
- {"version":3,"file":"json.d.ts","sourceRoot":"","sources":["../../src/json.ts"],"names":[],"mappings":"AAQA,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,CAE3D;AAED,MAAM,MAAM,aAAa,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,IAAI,CAAC;AAC7D,MAAM,MAAM,SAAS,GAAG,SAAS,EAAE,CAAC;AACpC,MAAM,MAAM,UAAU,GAAG;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,CAAA;CAAE,CAAC;AACtD,MAAM,MAAM,aAAa,GAAG,SAAS,GAAG,UAAU,CAAC;AACnD,MAAM,MAAM,SAAS,GAAG,aAAa,GAAG,aAAa,CAAC;AAqBtD,qBAAa,UAAU;IAGA,IAAI,EAAE,MAAM;IAF/B,GAAG,EAAE,MAAM,CAAK;gBAEG,IAAI,EAAE,MAAM;IAE/B,IAAI,CAAC,CAAC,EAAE,MAAM;IAKd,kBAAkB;IAQlB,OAAO;IAwBP,UAAU;IAkCV,UAAU;IA+BV,SAAS;IAoBT,SAAS;IAWT,MAAM,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM;CAQ5B;AAGD,wBAAgB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,CAYjD"}
1
+ {"version":3,"file":"json.d.ts","sourceRoot":"","sources":["../../src/json.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAC;AAS/C,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,CAE3D;AAkBD,qBAAa,UAAU;IAGA,IAAI,EAAE,MAAM;IAF/B,GAAG,EAAE,MAAM,CAAK;gBAEG,IAAI,EAAE,MAAM;IAE/B,IAAI,CAAC,CAAC,EAAE,MAAM;IAKd,kBAAkB;IAQlB,OAAO;IAwBP,UAAU;IAkCV,UAAU;IA+BV,SAAS;IAoBT,SAAS;IAWT,MAAM,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM;CAQ5B;AAGD,wBAAgB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,CAYjD"}
@@ -1,14 +1,3 @@
1
- import { ModelOptions, ModelOptionsInfo } from "./types.js";
2
- export interface TextFallbackOptions {
3
- _option_id: "text-fallback";
4
- max_tokens?: number;
5
- temperature?: number;
6
- top_p?: number;
7
- top_k?: number;
8
- presence_penalty?: number;
9
- frequency_penalty?: number;
10
- stop_sequence?: string[];
11
- }
12
- export declare const textOptionsFallback: ModelOptionsInfo;
13
- export declare function getOptions(provider?: string, model?: string, options?: ModelOptions): ModelOptionsInfo;
1
+ import { ModelOptions, ModelOptionsInfo } from "@llumiverse/common";
2
+ export declare function getOptions(model: string, provider?: string, options?: ModelOptions): ModelOptionsInfo;
14
3
  //# sourceMappingURL=options.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAA6B,MAAM,YAAY,CAAC;AAMvF,MAAM,WAAW,mBAAmB;IAChC,UAAU,EAAE,eAAe,CAAC;IAC5B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;CAC5B;AAED,eAAO,MAAM,mBAAmB,EAAE,gBA6BjC,CAAC;AAEF,wBAAgB,UAAU,CAAC,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,gBAAgB,CAatG"}
1
+ {"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../src/options.ts"],"names":[],"mappings":"AAAA,OAAO,EAMH,YAAY,EACZ,gBAAgB,EACnB,MAAM,oBAAoB,CAAC;AAE5B,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,YAAY,GAAG,gBAAgB,CAarG"}
@@ -1,4 +1,4 @@
1
- import { ResultValidationError } from "./types.js";
1
+ import { ResultValidationError } from "@llumiverse/common";
2
2
  export declare class ValidationError extends Error implements ResultValidationError {
3
3
  code: 'validation_error' | 'json_error';
4
4
  constructor(code: 'validation_error' | 'json_error', message: string);
@@ -1 +1 @@
1
- {"version":3,"file":"validation.d.ts","sourceRoot":"","sources":["../../src/validation.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC;AAgBnD,qBAAa,eAAgB,SAAQ,KAAM,YAAW,qBAAqB;IAE5D,IAAI,EAAE,kBAAkB,GAAG,YAAY;gBAAvC,IAAI,EAAE,kBAAkB,GAAG,YAAY,EAC9C,OAAO,EAAE,MAAM;CAKtB;AAED,wBAAgB,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,OA4CvD"}
1
+ {"version":3,"file":"validation.d.ts","sourceRoot":"","sources":["../../src/validation.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,oBAAoB,CAAC;AAgB3D,qBAAa,eAAgB,SAAQ,KAAM,YAAW,qBAAqB;IAE5D,IAAI,EAAE,kBAAkB,GAAG,YAAY;gBAAvC,IAAI,EAAE,kBAAkB,GAAG,YAAY,EAC9C,OAAO,EAAE,MAAM;CAKtB;AAED,wBAAgB,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,OA4CvD"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@llumiverse/core",
3
- "version": "0.17.0",
3
+ "version": "0.18.0",
4
4
  "type": "module",
5
5
  "description": "Provide an universal API to LLMs. Support for existing LLMs can be added by writing a driver.",
6
6
  "files": [
@@ -72,7 +72,8 @@
72
72
  "dependencies": {
73
73
  "@types/node": "^22.5.0",
74
74
  "ajv": "^8.16.0",
75
- "ajv-formats": "^3.0.1"
75
+ "ajv-formats": "^3.0.1",
76
+ "@llumiverse/common": "0.18.0"
76
77
  },
77
78
  "ts_dual_module": {
78
79
  "outDir": "lib",
@@ -1,5 +1,5 @@
1
1
  import { AbstractDriver } from "./Driver.js";
2
- import { CompletionStream, DriverOptions, ExecutionOptions, ExecutionResponse, ExecutionTokenUsage } from "./types.js";
2
+ import { CompletionStream, DriverOptions, ExecutionOptions, ExecutionResponse, ExecutionTokenUsage } from "@llumiverse/common";
3
3
 
4
4
  export class DefaultCompletionStream<PromptT = any> implements CompletionStream<PromptT> {
5
5
 
@@ -35,7 +35,7 @@ export class DefaultCompletionStream<PromptT = any> implements CompletionStream<
35
35
  if (typeof chunk === 'string') {
36
36
  chunks.push(chunk);
37
37
  yield chunk;
38
- }else{
38
+ } else {
39
39
  if (chunk.finish_reason) { //Do not replace non-null values with null values
40
40
  finish_reason = chunk.finish_reason; //Used to skip empty finish_reason chunks coming after "stop" or "length"
41
41
  }
@@ -43,14 +43,14 @@ export class DefaultCompletionStream<PromptT = any> implements CompletionStream<
43
43
  //Tokens returned include prior parts of stream,
44
44
  //so overwrite rather than accumulate
45
45
  //Math.max used as some models report final token count at beginning of stream
46
- promptTokens = Math.max(promptTokens,chunk.token_usage.prompt ?? 0);
47
- resultTokens = Math.max(resultTokens ?? 0,chunk.token_usage.result ?? 0);
46
+ promptTokens = Math.max(promptTokens, chunk.token_usage.prompt ?? 0);
47
+ resultTokens = Math.max(resultTokens ?? 0, chunk.token_usage.result ?? 0);
48
48
  }
49
49
  if (chunk.result) {
50
50
  chunks.push(chunk.result);
51
51
  yield chunk.result;
52
52
  }
53
- }
53
+ }
54
54
  }
55
55
  }
56
56
 
package/src/Driver.ts CHANGED
@@ -26,7 +26,7 @@ import {
26
26
  TrainingJob,
27
27
  TrainingOptions,
28
28
  TrainingPromptOptions
29
- } from "./types.js";
29
+ } from "@llumiverse/common";
30
30
  import { validateResult } from "./validation.js";
31
31
 
32
32
  const ConsoleLogger: Logger = {
@@ -78,7 +78,7 @@ export interface Driver<PromptT = unknown> {
78
78
 
79
79
  getTrainingJob(jobId: string): Promise<TrainingJob>;
80
80
 
81
- //list models available for this environement
81
+ //list models available for this environment
82
82
  listModels(params?: ModelSearchPayload): Promise<AIModel[]>;
83
83
 
84
84
  //list models that can be trained
@@ -199,13 +199,13 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
199
199
  }
200
200
 
201
201
  /**
202
- * Must be overrided if the implementation cannot stream.
202
+ * Must be overridden if the implementation cannot stream.
203
203
  * Some implementation may be able to stream for certain models but not for others.
204
204
  * You must overwrite and return false if the current model doesn't support streaming.
205
205
  * The default implementation returns true, so it is assumed that the streaming can be done.
206
206
  * If this method returns false then the streaming execution will fallback on a blocking execution streaming the entire response as a single event.
207
207
  * @param options the execution options containing the target model name.
208
- * @returns true if the exeuction can be streamed false otherwise.
208
+ * @returns true if the execution can be streamed false otherwise.
209
209
  */
210
210
  protected canStream(_options: ExecutionOptions) {
211
211
  return Promise.resolve(true);
@@ -229,7 +229,7 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
229
229
  //Cannot be made abstract, as abstract methods are required in the derived class
230
230
  }
231
231
 
232
- //list models available for this environement
232
+ //list models available for this environment
233
233
  abstract listModels(params?: ModelSearchPayload): Promise<AIModel[]>;
234
234
 
235
235
  //check that it is possible to connect to the environment
package/src/async.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import type { ServerSentEvent } from "api-fetch-client";
2
- import { CompletionChunk } from "./types.js";
2
+ import { CompletionChunk } from "@llumiverse/common";
3
3
 
4
4
  export async function* asyncMap<T, R>(asyncIterable: AsyncIterable<T>, callback: (value: T, index: number) => R) {
5
5
  let i = 0;
@@ -19,7 +19,7 @@ export function oneAsyncIterator<T>(value: T): AsyncIterable<T> {
19
19
  * Given a ReadableStream of server sent events, tran
20
20
  */
21
21
  export function transformSSEStream(stream: ReadableStream<ServerSentEvent>, transform: (data: string) => CompletionChunk): ReadableStream<CompletionChunk> & AsyncIterable<CompletionChunk> {
22
- // on node and bun the readablestream is an async iterable
22
+ // on node and bun the ReadableStream is an async iterable
23
23
  return stream.pipeThrough(new TransformStream<ServerSentEvent, CompletionChunk>({
24
24
  transform(event: ServerSentEvent, controller) {
25
25
  if (event.type === 'event' && event.data && event.data !== '[DONE]') {
@@ -27,7 +27,7 @@ export function transformSSEStream(stream: ReadableStream<ServerSentEvent>, tran
27
27
  const result = transform(event.data) ?? ''
28
28
  controller.enqueue(result);
29
29
  } catch (err) {
30
- // double check for the last event whicb is not a JSON - at this time togetherai and mistralai returrns the string [DONE]
30
+ // double check for the last event which is not a JSON - at this time togetherai and mistralai returns the string [DONE]
31
31
  // do nothing - happens if data is not a JSON - the last event data is the [DONE] string
32
32
  }
33
33
  }
@@ -35,7 +35,7 @@ export function transformSSEStream(stream: ReadableStream<ServerSentEvent>, tran
35
35
  })) as ReadableStream<CompletionChunk> & AsyncIterable<CompletionChunk>;
36
36
  }
37
37
 
38
- export class EventStream<T, ReturnT = any> implements AsyncIterable<T>{
38
+ export class EventStream<T, ReturnT = any> implements AsyncIterable<T> {
39
39
 
40
40
  private queue: T[] = [];
41
41
  private pending?: {
@@ -58,7 +58,7 @@ export class EventStream<T, ReturnT = any> implements AsyncIterable<T>{
58
58
  }
59
59
 
60
60
  /**
61
- * Close the stream. This means the stream cannot be feeded anymore.
61
+ * Close the stream. This means the stream cannot be fed anymore.
62
62
  * But the consumer can still consume the remaining events.
63
63
  */
64
64
  close(value?: ReturnT) {
@@ -140,6 +140,3 @@ export async function* transformAsyncIterator<T, V>(
140
140
  // for await (const chunk of stream) {
141
141
  // console.log('++++chunk:', chunk);
142
142
  // }
143
-
144
-
145
-
@@ -0,0 +1,187 @@
1
+ import { ModelModalities, ModelCapabilities } from "@llumiverse/common";
2
+
3
+ // Record of Bedrock model capabilities keyed by model ID.
4
+ const RECORD_MODEL_CAPABILITIES: Record<string, ModelCapabilities> = {
5
+ "foundation-model/ai21.jamba-1-5-large-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
6
+ "foundation-model/ai21.jamba-1-5-mini-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
7
+ "foundation-model/ai21.jamba-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
8
+ "foundation-model/amazon.nova-canvas-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { image: true, text: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
9
+ "foundation-model/amazon.nova-lite-v1:0": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
10
+ "foundation-model/amazon.nova-micro-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
11
+ "foundation-model/amazon.nova-pro-v1:0": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
12
+ "foundation-model/amazon.titan-text-express-v1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
13
+ "foundation-model/amazon.titan-text-lite-v1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
14
+ "foundation-model/amazon.titan-text-premier-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
15
+ "foundation-model/amazon.titan-tg1-large": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
16
+ "foundation-model/anthropic.claude-3-5-haiku-20241022-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
17
+ "foundation-model/anthropic.claude-3-5-sonnet-20240620-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
18
+ "foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
19
+ "foundation-model/anthropic.claude-3-haiku-20240307-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
20
+ "foundation-model/anthropic.claude-3-opus-20240229-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
21
+ "foundation-model/anthropic.claude-3-sonnet-20240229-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
22
+ "foundation-model/anthropic.claude-instant-v1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
23
+ "foundation-model/anthropic.claude-v2": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
24
+ "foundation-model/anthropic.claude-v2:1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
25
+ "foundation-model/cohere.command-light-text-v14": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
26
+ "foundation-model/cohere.command-r-plus-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
27
+ "foundation-model/cohere.command-r-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
28
+ "foundation-model/cohere.command-text-v14": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
29
+ "foundation-model/meta.llama3-1-405b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
30
+ "foundation-model/meta.llama3-1-70b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
31
+ "foundation-model/meta.llama3-1-8b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
32
+ "foundation-model/meta.llama3-70b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
33
+ "foundation-model/meta.llama3-8b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
34
+ "foundation-model/mistral.mixtral-8x7b-instruct-v0:1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
35
+ "foundation-model/mistral.mistral-7b-instruct-v0:2": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
36
+ "foundation-model/mistral.mistral-large-2402-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
37
+ "foundation-model/mistral.mistral-large-2407-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
38
+ "foundation-model/mistral.mistral-small-2402-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
39
+ "inference-profile/us.amazon.nova-lite-v1:0": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
40
+ "inference-profile/us.amazon.nova-micro-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
41
+ "inference-profile/us.amazon.nova-premier-v1:0": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
42
+ "inference-profile/us.amazon.nova-pro-v1:0": { input: { text: true, image: true, video: true, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
43
+ "inference-profile/us.anthropic.claude-3-5-haiku-20241022-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
44
+ "inference-profile/us.anthropic.claude-3-5-sonnet-20240620-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
45
+ "inference-profile/us.anthropic.claude-3-5-sonnet-20241022-v2:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
46
+ "inference-profile/us.anthropic.claude-3-7-sonnet-20250219-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
47
+ "inference-profile/us.anthropic.claude-3-haiku-20240307-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
48
+ "inference-profile/us.anthropic.claude-3-opus-20240229-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
49
+ "inference-profile/us.anthropic.claude-3-sonnet-20240229-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
50
+ "inference-profile/us.deepseek.r1-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
51
+ "inference-profile/us.meta.llama3-1-70b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
52
+ "inference-profile/us.meta.llama3-1-8b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
53
+ "inference-profile/us.meta.llama3-2-1b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
54
+ "inference-profile/us.meta.llama3-2-11b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
55
+ "inference-profile/us.meta.llama3-2-3b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
56
+ "inference-profile/us.meta.llama3-2-90b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
57
+ "inference-profile/us.meta.llama3-3-70b-instruct-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
58
+ "inference-profile/us.meta.llama4-maverick-17b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
59
+ "inference-profile/us.meta.llama4-scout-17b-instruct-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
60
+ "inference-profile/us.mistral.pixtral-large-2502-v1:0": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
61
+ "inference-profile/us.writer.palmyra-x4-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
62
+ "inference-profile/us.writer.palmyra-x5-v1:0": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false }
63
+ };
64
+
65
+ // Populate RECORD_FAMILY_CAPABILITIES as a const record (lowest common denominator for each family)
66
+ const RECORD_FAMILY_CAPABILITIES: Record<string, ModelCapabilities> = {
67
+ "foundation-model/ai21.jamba": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
68
+ "foundation-model/amazon.nova": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: false, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
69
+ "foundation-model/amazon.titan": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
70
+ "foundation-model/anthropic.claude": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
71
+ "foundation-model/anthropic.claude-3": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
72
+ "foundation-model/anthropic.claude-3-5": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
73
+ "foundation-model/anthropic.claude-3-7": { input: { text: true, image: true, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
74
+ "foundation-model/cohere.command": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
75
+ "foundation-model/meta.llama3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
76
+ "foundation-model/mistral.mistral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
77
+ "foundation-model/mistral.mistral-large": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
78
+ "foundation-model/mistral.mixtral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
79
+ "inference-profile/us.anthropic.claude-3-haiku": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
80
+ "inference-profile/us.anthropic.claude-3-5-sonnet": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: true },
81
+ "inference-profile/us.anthropic.claude-3-opus": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
82
+ "inference-profile/us.anthropic.claude-3-sonnet": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
83
+ "inference-profile/us.deepseek.r1": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
84
+ "inference-profile/us.meta.llama3": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
85
+ "inference-profile/us.meta.llama4-maverick-17b": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
86
+ "inference-profile/us.meta.llama4-scout-17b": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: false, tool_support_streaming: false },
87
+ "inference-profile/us.mistral.pixtral": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false },
88
+ "inference-profile/us.writer.palmyra": { input: { text: true, image: false, video: false, audio: false, embed: false }, output: { text: true, image: false, video: false, audio: false, embed: false }, tool_support: true, tool_support_streaming: false }
89
+ };
90
+
91
+ /**
92
+ * Extract the model identifier from an ARN or inference profile
93
+ * @param modelName The full model ARN or name
94
+ * @returns The normalized model identifier
95
+ */
96
+ function normalizeModelName(modelName: string): string {
97
+ const modelLower = modelName.toLowerCase();
98
+ if (modelLower.includes("inference-profile")) {
99
+ const parts = modelLower.split("/");
100
+ if (parts.length > 1) {
101
+ const providerModel = parts[parts.length - 1];
102
+ const modelParts = providerModel.split(".");
103
+ if (modelParts.length > 1 && modelParts[1] === "deepseek") {
104
+ return `deepseek-${modelParts.slice(2).join(".")}`;
105
+ }
106
+ return modelParts.length > 2 ? modelParts.slice(2).join(".") : providerModel;
107
+ }
108
+ }
109
+ return modelLower;
110
+ }
111
+
112
+ // Fallback pattern lists for inferring modalities and tool support
113
+ const IMAGE_INPUT_MODELS = ["image"]; // fallback: if model id contains 'image', supports image input
114
+ const VIDEO_INPUT_MODELS = ["video"];
115
+ const AUDIO_INPUT_MODELS = ["audio"];
116
+ const TEXT_INPUT_MODELS = ["text"];
117
+ const IMAGE_OUTPUT_MODELS = ["image"];
118
+ const VIDEO_OUTPUT_MODELS = ["video"];
119
+ const AUDIO_OUTPUT_MODELS = ["audio"];
120
+ const TEXT_OUTPUT_MODELS = ["text"];
121
+ const EMBEDDING_OUTPUT_MODELS = ["embed"];
122
+ const TOOL_SUPPORT_MODELS = ["tool", "sonnet", "opus", "nova", "palmyra", "command-r", "mistral-large", "pixtral"];
123
+
124
+ function modelMatches(modelName: string, patterns: string[]): boolean {
125
+ return patterns.some(pattern => modelName.includes(pattern));
126
+ }
127
+
128
+ /**
129
+ * Get the full ModelCapabilities for a Bedrock model.
130
+ * Checks RECORD_MODEL_CAPABILITIES first, then falls back to pattern-based inference.
131
+ */
132
+ export function getModelCapabilitiesBedrock(model: string): ModelCapabilities {
133
+ // Normalize ARN or inference-profile to model ID
134
+ let normalized = model;
135
+ const arnPattern = /^arn:aws:bedrock:[^:]+:[^:]*:(inference-profile|foundation-model)\/.+/i;
136
+ if (arnPattern.test(model)) {
137
+ // Extract after last occurrence of 'foundation-model/' or 'inference-profile/'
138
+ const foundationIdx = model.lastIndexOf('foundation-model/');
139
+ const inferenceIdx = model.lastIndexOf('inference-profile/');
140
+ if (foundationIdx !== -1) {
141
+ normalized = model.substring(foundationIdx);
142
+ } else if (inferenceIdx !== -1) {
143
+ normalized = model.substring(inferenceIdx);
144
+ }
145
+ }
146
+ // Standardize region for inference-profile to 'us' for record lookup
147
+ // This allows support for different AWS regions by mapping all to 'us'
148
+ if (normalized.startsWith("inference-profile/")) {
149
+ normalized = normalized.replace(/^inference-profile\/[^.]+\./, "inference-profile/us.");
150
+ }
151
+
152
+ // 1. Exact match in record
153
+ const record = RECORD_MODEL_CAPABILITIES[normalized];
154
+ if (record) return record;
155
+
156
+ // 2. Fallback: find the longest matching family prefix in RECORD_FAMILY_CAPABILITIES
157
+ let bestFamilyKey = undefined;
158
+ let bestFamilyLength = 0;
159
+ for (const key of Object.keys(RECORD_FAMILY_CAPABILITIES)) {
160
+ if (normalized.startsWith(key) && key.length > bestFamilyLength) {
161
+ bestFamilyKey = key;
162
+ bestFamilyLength = key.length;
163
+ }
164
+ }
165
+ if (bestFamilyKey) {
166
+ return RECORD_FAMILY_CAPABILITIES[bestFamilyKey];
167
+ }
168
+
169
+ // 3. Fallback: infer from normalized name
170
+ normalized = normalizeModelName(normalized);
171
+ const input: ModelModalities = {
172
+ text: modelMatches(normalized, TEXT_INPUT_MODELS) || undefined,
173
+ image: modelMatches(normalized, IMAGE_INPUT_MODELS) || undefined,
174
+ video: modelMatches(normalized, VIDEO_INPUT_MODELS) || undefined,
175
+ audio: modelMatches(normalized, AUDIO_INPUT_MODELS) || undefined,
176
+ embed: false
177
+ };
178
+ const output: ModelModalities = {
179
+ text: modelMatches(normalized, TEXT_OUTPUT_MODELS) || undefined,
180
+ image: modelMatches(normalized, IMAGE_OUTPUT_MODELS) || undefined,
181
+ video: modelMatches(normalized, VIDEO_OUTPUT_MODELS) || undefined,
182
+ audio: modelMatches(normalized, AUDIO_OUTPUT_MODELS) || undefined,
183
+ embed: modelMatches(normalized, EMBEDDING_OUTPUT_MODELS) || undefined
184
+ };
185
+ const tool_support = modelMatches(normalized, TOOL_SUPPORT_MODELS) || undefined;
186
+ return { input, output, tool_support };
187
+ }