@llumiverse/core 0.13.0 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/README.md +12 -10
  2. package/lib/cjs/CompletionStream.js +6 -8
  3. package/lib/cjs/CompletionStream.js.map +1 -1
  4. package/lib/cjs/Driver.js +20 -16
  5. package/lib/cjs/Driver.js.map +1 -1
  6. package/lib/cjs/async.js +9 -6
  7. package/lib/cjs/async.js.map +1 -1
  8. package/lib/cjs/formatters/claude.js +37 -9
  9. package/lib/cjs/formatters/claude.js.map +1 -1
  10. package/lib/cjs/formatters/commons.js +2 -3
  11. package/lib/cjs/formatters/commons.js.map +1 -1
  12. package/lib/cjs/formatters/generic.js +2 -2
  13. package/lib/cjs/formatters/generic.js.map +1 -1
  14. package/lib/cjs/formatters/index.js +2 -1
  15. package/lib/cjs/formatters/index.js.map +1 -1
  16. package/lib/cjs/formatters/llama2.js +1 -2
  17. package/lib/cjs/formatters/llama2.js.map +1 -1
  18. package/lib/cjs/formatters/llama3.js +42 -0
  19. package/lib/cjs/formatters/llama3.js.map +1 -0
  20. package/lib/cjs/formatters/openai.js +59 -5
  21. package/lib/cjs/formatters/openai.js.map +1 -1
  22. package/lib/cjs/index.js +1 -0
  23. package/lib/cjs/index.js.map +1 -1
  24. package/lib/cjs/json.js +3 -3
  25. package/lib/cjs/json.js.map +1 -1
  26. package/lib/cjs/resolver.js +45 -0
  27. package/lib/cjs/resolver.js.map +1 -0
  28. package/lib/cjs/stream.js +11 -0
  29. package/lib/cjs/stream.js.map +1 -0
  30. package/lib/cjs/types.js.map +1 -1
  31. package/lib/cjs/validation.js +43 -6
  32. package/lib/cjs/validation.js.map +1 -1
  33. package/lib/esm/CompletionStream.js +6 -8
  34. package/lib/esm/CompletionStream.js.map +1 -1
  35. package/lib/esm/Driver.js +19 -15
  36. package/lib/esm/Driver.js.map +1 -1
  37. package/lib/esm/async.js +4 -1
  38. package/lib/esm/async.js.map +1 -1
  39. package/lib/esm/formatters/claude.js +36 -7
  40. package/lib/esm/formatters/claude.js.map +1 -1
  41. package/lib/esm/formatters/commons.js +1 -1
  42. package/lib/esm/formatters/commons.js.map +1 -1
  43. package/lib/esm/formatters/index.js +2 -1
  44. package/lib/esm/formatters/index.js.map +1 -1
  45. package/lib/esm/formatters/llama3.js +39 -0
  46. package/lib/esm/formatters/llama3.js.map +1 -0
  47. package/lib/esm/formatters/openai.js +57 -3
  48. package/lib/esm/formatters/openai.js.map +1 -1
  49. package/lib/esm/index.js +1 -0
  50. package/lib/esm/index.js.map +1 -1
  51. package/lib/esm/resolver.js +42 -0
  52. package/lib/esm/resolver.js.map +1 -0
  53. package/lib/esm/stream.js +8 -0
  54. package/lib/esm/stream.js.map +1 -0
  55. package/lib/esm/types.js.map +1 -1
  56. package/lib/esm/validation.js +38 -4
  57. package/lib/esm/validation.js.map +1 -1
  58. package/lib/types/CompletionStream.d.ts +5 -5
  59. package/lib/types/CompletionStream.d.ts.map +1 -1
  60. package/lib/types/Driver.d.ts +5 -5
  61. package/lib/types/Driver.d.ts.map +1 -1
  62. package/lib/types/async.d.ts +1 -1
  63. package/lib/types/async.d.ts.map +1 -1
  64. package/lib/types/formatters/claude.d.ts +12 -6
  65. package/lib/types/formatters/claude.d.ts.map +1 -1
  66. package/lib/types/formatters/index.d.ts +2 -1
  67. package/lib/types/formatters/index.d.ts.map +1 -1
  68. package/lib/types/formatters/llama3.d.ts +7 -0
  69. package/lib/types/formatters/llama3.d.ts.map +1 -0
  70. package/lib/types/formatters/openai.d.ts +18 -1
  71. package/lib/types/formatters/openai.d.ts.map +1 -1
  72. package/lib/types/index.d.ts +1 -0
  73. package/lib/types/index.d.ts.map +1 -1
  74. package/lib/types/resolver.d.ts +2 -0
  75. package/lib/types/resolver.d.ts.map +1 -0
  76. package/lib/types/stream.d.ts +2 -0
  77. package/lib/types/stream.d.ts.map +1 -0
  78. package/lib/types/types.d.ts +8 -5
  79. package/lib/types/types.d.ts.map +1 -1
  80. package/lib/types/validation.d.ts +1 -2
  81. package/lib/types/validation.d.ts.map +1 -1
  82. package/package.json +87 -85
  83. package/src/CompletionStream.ts +4 -10
  84. package/src/Driver.ts +28 -27
  85. package/src/async.ts +5 -1
  86. package/src/formatters/claude.ts +58 -20
  87. package/src/formatters/commons.ts +1 -1
  88. package/src/formatters/index.ts +5 -3
  89. package/src/formatters/llama3.ts +55 -0
  90. package/src/formatters/openai.ts +95 -6
  91. package/src/index.ts +2 -1
  92. package/src/resolver.ts +39 -0
  93. package/src/stream.ts +8 -0
  94. package/src/types.ts +15 -12
  95. package/src/validation.ts +48 -7
package/package.json CHANGED
@@ -1,89 +1,91 @@
1
1
  {
2
- "name": "@llumiverse/core",
3
- "version": "0.13.0",
4
- "type": "module",
5
- "description": "Provide an universal API to LLMs. Support for existing LLMs can be added by writing a driver.",
6
- "files": [
7
- "lib",
8
- "src"
9
- ],
10
- "keywords": [
11
- "llm",
12
- "ai",
13
- "prompt",
14
- "prompt engineering",
15
- "ml",
16
- "machine learning",
17
- "embeddings",
18
- "training",
19
- "model",
20
- "universal",
21
- "api",
22
- "chatgpt",
23
- "openai",
24
- "vertexai",
25
- "bedrock",
26
- "replicate",
27
- "huggingface",
28
- "togetherai"
29
- ],
30
- "types": "./lib/types/index.d.ts",
31
- "typesVersions": {
32
- "*": {
33
- "async": [
34
- "./lib/types/async.d.ts"
35
- ],
36
- "formatters": [
37
- "./lib/types/formatters/index.d.ts"
38
- ]
39
- }
40
- },
41
- "exports": {
42
- ".": {
43
- "types": "./lib/types/index.d.ts",
44
- "import": "./lib/esm/index.js",
45
- "require": "./lib/cjs/index.js"
46
- },
47
- "./async": {
48
- "types": "./lib/types/async.d.ts",
49
- "import": "./lib/esm/async.js",
50
- "require": "./lib/cjs/async.js"
51
- },
52
- "./formatters": {
53
- "types": "./lib/types/formatters/index.d.ts",
54
- "import": "./lib/esm/formatters/index.js",
55
- "require": "./lib/cjs/formatters/index.js"
56
- }
57
- },
58
- "scripts": {
59
- "test": "vitest run",
60
- "build": "tsmod build",
61
- "clean": "rimraf ./lib tsconfig.tsbuildinfo"
62
- },
63
- "author": "Llumiverse",
64
- "license": "Apache-2.0",
65
- "homepage": "https://github.com/llumiverse/llumiverse",
66
- "repository": {
67
- "type": "git",
68
- "url": "git+ssh://git@github.com/llumiverse/llumiverse.git"
69
- },
70
- "devDependencies": {
71
- "@types/eventsource": "^1.1.15",
72
- "@types/json-schema": "^7.0.15",
73
- "api-fetch-client": "^0.8.6",
74
- "rimraf": "^5.0.5",
75
- "ts-dual-module": "^0.6.3",
76
- "typescript": "^5.4.2",
77
- "vitest": "^1.4.0"
2
+ "name": "@llumiverse/core",
3
+ "version": "0.15.0",
4
+ "type": "module",
5
+ "description": "Provide an universal API to LLMs. Support for existing LLMs can be added by writing a driver.",
6
+ "files": [
7
+ "lib",
8
+ "src"
9
+ ],
10
+ "keywords": [
11
+ "llm",
12
+ "ai",
13
+ "prompt",
14
+ "prompt engineering",
15
+ "ml",
16
+ "machine learning",
17
+ "embeddings",
18
+ "training",
19
+ "model",
20
+ "universal",
21
+ "api",
22
+ "chatgpt",
23
+ "openai",
24
+ "vertexai",
25
+ "bedrock",
26
+ "replicate",
27
+ "huggingface",
28
+ "togetherai"
29
+ ],
30
+ "types": "./lib/types/index.d.ts",
31
+ "typesVersions": {
32
+ "*": {
33
+ "async": [
34
+ "./lib/types/async.d.ts"
35
+ ],
36
+ "formatters": [
37
+ "./lib/types/formatters/index.d.ts"
38
+ ]
39
+ }
40
+ },
41
+ "exports": {
42
+ ".": {
43
+ "types": "./lib/types/index.d.ts",
44
+ "import": "./lib/esm/index.js",
45
+ "require": "./lib/cjs/index.js"
78
46
  },
79
- "dependencies": {
80
- "json-schema": "^0.4.0"
47
+ "./async": {
48
+ "types": "./lib/types/async.d.ts",
49
+ "import": "./lib/esm/async.js",
50
+ "require": "./lib/cjs/async.js"
81
51
  },
82
- "ts_dual_module": {
83
- "outDir": "lib",
84
- "exports": {
85
- "async": "async.js",
86
- "formatters": "formatters/index.js"
87
- }
52
+ "./formatters": {
53
+ "types": "./lib/types/formatters/index.d.ts",
54
+ "import": "./lib/esm/formatters/index.js",
55
+ "require": "./lib/cjs/formatters/index.js"
56
+ }
57
+ },
58
+ "author": "Llumiverse",
59
+ "license": "Apache-2.0",
60
+ "homepage": "https://github.com/llumiverse/llumiverse",
61
+ "repository": {
62
+ "type": "git",
63
+ "url": "git+ssh://git@github.com/llumiverse/llumiverse.git"
64
+ },
65
+ "devDependencies": {
66
+ "@types/eventsource": "^1.1.15",
67
+ "@types/json-schema": "^7.0.15",
68
+ "api-fetch-client": "^0.13.0",
69
+ "rimraf": "^5.0.5",
70
+ "ts-dual-module": "^0.6.3",
71
+ "typescript": "^5.4.2",
72
+ "vitest": "^1.4.0"
73
+ },
74
+ "dependencies": {
75
+ "@types/node": "^22.5.0",
76
+ "ajv": "^8.16.0",
77
+ "ajv-formats": "^3.0.1"
78
+ },
79
+ "ts_dual_module": {
80
+ "outDir": "lib",
81
+ "exports": {
82
+ "async": "async.js",
83
+ "formatters": "formatters/index.js"
88
84
  }
89
- }
85
+ },
86
+ "scripts": {
87
+ "test": "vitest run",
88
+ "build": "npx tsmod build",
89
+ "clean": "rimraf ./lib tsconfig.tsbuildinfo"
90
+ }
91
+ }
@@ -1,17 +1,14 @@
1
1
  import { AbstractDriver } from "./Driver.js";
2
- import { ExecutionResponse, CompletionStream, DriverOptions, ExecutionOptions, PromptSegment } from "./types.js";
2
+ import { CompletionStream, DriverOptions, ExecutionOptions, ExecutionResponse } from "./types.js";
3
3
 
4
4
  export class DefaultCompletionStream<PromptT = any> implements CompletionStream<PromptT> {
5
5
 
6
6
  chunks: string[];
7
- prompt: PromptT;
8
7
  completion: ExecutionResponse<PromptT> | undefined;
9
8
 
10
9
  constructor(public driver: AbstractDriver<DriverOptions, PromptT>,
11
- segments: PromptSegment[],
10
+ public prompt: PromptT,
12
11
  public options: ExecutionOptions) {
13
- this.driver = driver;
14
- this.prompt = this.driver.createPrompt(segments, options);
15
12
  this.chunks = [];
16
13
  }
17
14
 
@@ -38,6 +35,7 @@ export class DefaultCompletionStream<PromptT = any> implements CompletionStream<
38
35
  }
39
36
 
40
37
  const content = chunks.join('');
38
+
41
39
  const promptTokens = typeof this.prompt === 'string' ? this.prompt.length : JSON.stringify(this.prompt).length;
42
40
  const resultTokens = content.length; //TODO use chunks.length ?
43
41
 
@@ -59,14 +57,11 @@ export class DefaultCompletionStream<PromptT = any> implements CompletionStream<
59
57
 
60
58
  export class FallbackCompletionStream<PromptT = any> implements CompletionStream<PromptT> {
61
59
 
62
- prompt: PromptT;
63
60
  completion: ExecutionResponse<PromptT> | undefined;
64
61
 
65
62
  constructor(public driver: AbstractDriver<DriverOptions, PromptT>,
66
- segments: PromptSegment[],
63
+ public prompt: PromptT,
67
64
  public options: ExecutionOptions) {
68
- this.driver = driver;
69
- this.prompt = this.driver.createPrompt(segments, options);
70
65
  }
71
66
 
72
67
  async *[Symbol.asyncIterator]() {
@@ -82,4 +77,3 @@ export class FallbackCompletionStream<PromptT = any> implements CompletionStream
82
77
  this.completion = completion;
83
78
  }
84
79
  }
85
-
package/src/Driver.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  */
6
6
 
7
7
  import { DefaultCompletionStream, FallbackCompletionStream } from "./CompletionStream.js";
8
- import { formatLlama2Prompt, formatTextPrompt } from "./formatters/index.js";
8
+ import { formatLlama2Prompt, formatLlama3Prompt, formatTextPrompt } from "./formatters/index.js";
9
9
  import {
10
10
  AIModel,
11
11
  Completion,
@@ -62,14 +62,14 @@ function applyExecutionDefaults(options: ExecutionOptions): ExecutionOptions {
62
62
  export interface Driver<PromptT = unknown> {
63
63
 
64
64
  /**
65
- *
66
- * @param segments
67
- * @param completion
65
+ *
66
+ * @param segments
67
+ * @param completion
68
68
  * @param model the model to train
69
69
  */
70
- createTrainingPrompt(options: TrainingPromptOptions): string;
70
+ createTrainingPrompt(options: TrainingPromptOptions): Promise<string>;
71
71
 
72
- createPrompt(segments: PromptSegment[], opts: PromptOptions): PromptT;
72
+ createPrompt(segments: PromptSegment[], opts: PromptOptions): Promise<PromptT>;
73
73
 
74
74
  execute(segments: PromptSegment[], options: ExecutionOptions): Promise<ExecutionResponse<PromptT>>;
75
75
 
@@ -111,8 +111,8 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
111
111
  this.logger = createLogger(opts.logger);
112
112
  }
113
113
 
114
- createTrainingPrompt(options: TrainingPromptOptions): string {
115
- const prompt = this.createPrompt(options.segments, { resultSchema: options.schema, model: options.model })
114
+ async createTrainingPrompt(options: TrainingPromptOptions): Promise<string> {
115
+ const prompt = await this.createPrompt(options.segments, { result_schema: options.schema, model: options.model })
116
116
  return JSON.stringify({
117
117
  prompt,
118
118
  completion: typeof options.completion === 'string' ? options.completion : JSON.stringify(options.completion)
@@ -132,9 +132,9 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
132
132
  }
133
133
 
134
134
  validateResult(result: Completion, options: ExecutionOptions) {
135
- if (!result.error && options.resultSchema) {
135
+ if (!result.error && options.result_schema) {
136
136
  try {
137
- result.result = validateResult(result.result, options.resultSchema);
137
+ result.result = validateResult(result.result, options.result_schema);
138
138
  } catch (error: any) {
139
139
  this.logger?.error({ err: error, data: result.result }, `[${this.provider}] [${options.model}] ${error.code ? '[' + error.code + '] ' : ''}Result validation error: ${error.message}`);
140
140
  result.error = {
@@ -148,14 +148,13 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
148
148
 
149
149
  async execute(segments: PromptSegment[], options: ExecutionOptions): Promise<ExecutionResponse<PromptT>> {
150
150
  options = applyExecutionDefaults(options);
151
- const prompt = this.createPrompt(segments, options);
151
+ const prompt = await this.createPrompt(segments, options);
152
152
  return this._execute(prompt, options);
153
153
  }
154
154
 
155
155
  async _execute(prompt: PromptT, options: ExecutionOptions): Promise<ExecutionResponse<PromptT>> {
156
156
  this.logger.debug(
157
- `[${this.provider}] Executing ${options.model} with prompt`, prompt,
158
- );
157
+ `[${this.provider}] Executing prompt on ${options.model}`);
159
158
  try {
160
159
  const start = Date.now();
161
160
  const result = await this.requestCompletion(prompt, options);
@@ -171,35 +170,38 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
171
170
  // by default no stream is supported. we block and we return all at once
172
171
  async stream(segments: PromptSegment[], options: ExecutionOptions): Promise<CompletionStream<PromptT>> {
173
172
  options = applyExecutionDefaults(options);
173
+ const prompt = await this.createPrompt(segments, options);
174
174
  const canStream = await this.canStream(options);
175
175
  if (canStream) {
176
- return new DefaultCompletionStream(this, segments, options);
176
+ return new DefaultCompletionStream(this, prompt, options);
177
177
  } else {
178
- return new FallbackCompletionStream(this, segments, options);
178
+ return new FallbackCompletionStream(this, prompt, options);
179
179
  }
180
180
  }
181
181
 
182
182
  /**
183
183
  * Override this method to provide a custom prompt formatter
184
- * @param segments
185
- * @param options
186
- * @returns
184
+ * @param segments
185
+ * @param options
186
+ * @returns
187
187
  */
188
- protected formatPrompt(segments: PromptSegment[], opts: PromptOptions): PromptT {
189
- if (/\bllama2?\b/i.test(opts.model)) {
190
- return formatLlama2Prompt(segments, opts.resultSchema) as PromptT;
188
+ protected async formatPrompt(segments: PromptSegment[], opts: PromptOptions): Promise<PromptT> {
189
+ if (/\bllama.?2\b/i.test(opts.model)) {
190
+ return formatLlama2Prompt(segments, opts.result_schema) as PromptT;
191
+ } else if (/\bllama.?3\b/i.test(opts.model)) {
192
+ return formatLlama3Prompt(segments, opts.result_schema) as PromptT;
191
193
  } else {
192
- return formatTextPrompt(segments, opts.resultSchema) as PromptT;
194
+ return formatTextPrompt(segments, opts.result_schema) as PromptT;
193
195
  }
194
196
  }
195
197
 
196
- public createPrompt(segments: PromptSegment[], opts: PromptOptions): PromptT {
197
- return opts.format ? opts.format(segments, opts.resultSchema) : this.formatPrompt(segments, opts);
198
+ public async createPrompt(segments: PromptSegment[], opts: PromptOptions): Promise<PromptT> {
199
+ return await (opts.format ? opts.format(segments, opts.result_schema) : this.formatPrompt(segments, opts));
198
200
  }
199
201
 
200
202
  /**
201
203
  * Must be overrided if the implementation cannot stream.
202
- * Some implementation may be able to stream for certain models but not for others.
204
+ * Some implementation may be able to stream for certain models but not for others.
203
205
  * You must overwrite and return false if the current model doesn't support streaming.
204
206
  * The default implementation returns true, so it is assumed that the streaming can be done.
205
207
  * If this method returns false then the streaming execution will fallback on a blocking execution streaming the entire response as a single event.
@@ -213,7 +215,7 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
213
215
  /**
214
216
  * Get a list of models that can be trained.
215
217
  * The default is to return an empty array
216
- * @returns
218
+ * @returns
217
219
  */
218
220
  async listTrainableModels(): Promise<AIModel[]> {
219
221
  return [];
@@ -233,4 +235,3 @@ export abstract class AbstractDriver<OptionsT extends DriverOptions = DriverOpti
233
235
  abstract generateEmbeddings(options: EmbeddingsOptions): Promise<EmbeddingsResult>;
234
236
 
235
237
  }
236
-
package/src/async.ts CHANGED
@@ -104,8 +104,12 @@ export class EventStream<T, ReturnT = any> implements AsyncIterable<T>{
104
104
  **/
105
105
  export async function* transformAsyncIterator<T, V>(
106
106
  originalGenerator: AsyncIterable<T>,
107
- transform: (value: T) => V | Promise<V>
107
+ transform: (value: T) => V | Promise<V>,
108
+ initCallback?: () => V | Promise<V>
108
109
  ): AsyncIterable<V> {
110
+ if (initCallback) {
111
+ yield initCallback();
112
+ }
109
113
  for await (const value of originalGenerator) {
110
114
  yield transform(value);
111
115
  }
@@ -1,14 +1,21 @@
1
1
  import { JSONSchema4 } from "json-schema";
2
2
  import { PromptRole, PromptSegment } from "../index.js";
3
+ import { readStreamAsBase64 } from "../stream.js";
3
4
  import { getJSONSafetyNotice } from "./commons.js";
4
5
 
5
6
  export interface ClaudeMessage {
6
7
  role: 'user' | 'assistant',
7
- content: {
8
- type: "image" | "text",
9
- source?: string, // only set for images
10
- text?: string // only set for text messages
11
- }[]
8
+ content: ClaudeMessagePart[]
9
+ }
10
+
11
+ interface ClaudeMessagePart {
12
+ type: "image" | "text",
13
+ source?: {
14
+ type: "base64",
15
+ media_type: string,
16
+ data: string,
17
+ }, // only set for images
18
+ text?: string // only set for text messages
12
19
  }
13
20
 
14
21
  export interface ClaudeMessagesPrompt {
@@ -20,19 +27,50 @@ export interface ClaudeMessagesPrompt {
20
27
  * A formatter user by Bedrock to format prompts for claude related models
21
28
  */
22
29
 
23
- export function formatClaudePrompt(segments: PromptSegment[], schema?: JSONSchema4): ClaudeMessagesPrompt {
30
+ export async function formatClaudePrompt(segments: PromptSegment[], schema?: JSONSchema4): Promise<ClaudeMessagesPrompt> {
24
31
  const system: string[] = [];
25
32
  const safety: string[] = [];
26
33
  const messages: ClaudeMessage[] = [];
27
34
 
28
- for (const msg of segments) {
29
- if (msg.role === PromptRole.system) {
30
- system.push(msg.content);
31
- } else if (msg.role === PromptRole.safety) {
32
- safety.push(msg.content);
35
+ //TODO type: 'image' -> detect from f.mime_type
36
+ for (const segment of segments) {
37
+
38
+ const parts: ClaudeMessagePart[] = [];
39
+ if (segment.files) for (const f of segment.files) {
40
+ const source = await f.getStream();
41
+ const data = await readStreamAsBase64(source);
42
+ parts.push({
43
+ type: 'image',
44
+ source: {
45
+ type: "base64",
46
+ media_type: f.mime_type || 'image/png',
47
+ data
48
+ }
49
+ })
50
+ }
51
+
52
+ if (segment.content) {
53
+ parts.push({
54
+ type: "text",
55
+ text: segment.content
56
+ })
57
+ }
58
+
59
+ if (segment.role === PromptRole.system) {
60
+ system.push(segment.content);
61
+ } else if (segment.role === PromptRole.safety) {
62
+ safety.push(segment.content);
63
+ } else if (messages.length > 0 && messages[messages.length - 1].role === segment.role) {
64
+ //concatenate messages of the same role (Claude requires alternative user and assistant roles)
65
+ messages[messages.length - 1].content.push(...parts);
33
66
  } else {
34
- messages.push({ content: [{ type: "text", text: msg.content }], role: msg.role });
67
+ messages.push({
68
+ role: segment.role,
69
+ content: parts
70
+ });
35
71
  }
72
+
73
+
36
74
  }
37
75
 
38
76
  if (schema) {
@@ -57,24 +95,24 @@ export function formatClaudePrompt(segments: PromptSegment[], schema?: JSONSchem
57
95
  /*if (schema) {
58
96
  messages.push({
59
97
  role: "user",
60
- content: [{
98
+ content: [{
61
99
  type: "text",
62
100
  text: getJSONSafetyNotice(schema)
63
101
  }]
64
102
  });
65
103
  }*/
66
104
 
67
- /*start Claude's message to amke sure it answers properly in JSON
68
- if enabled, this requires to add the { to Claude's response*/
69
- if (schema) {
105
+ /*start Claude's message to amke sure it answers properly in JSON
106
+ if enabled, this requires to add the { to Claude's response*/
107
+ if (schema) {
70
108
  messages.push({
71
109
  role: "assistant",
72
- content: [{
73
- type: "text",
110
+ content: [{
111
+ type: "text",
74
112
  text: "{"
75
- }]});
113
+ }]
114
+ });
76
115
  }
77
-
78
116
  // put system mesages first and safety last
79
117
  return {
80
118
  system: systemMessage,
@@ -1,5 +1,5 @@
1
1
  import { JSONSchema4 } from "json-schema";
2
2
 
3
3
  export function getJSONSafetyNotice(schema: JSONSchema4) {
4
- return "The answer must be a JSON object using the following JSON Schema:\n" + JSON.stringify(schema);
4
+ return "The answer must be a JSON object using the following JSON Schema:\n" + JSON.stringify(schema, undefined, 2);
5
5
  }
@@ -3,8 +3,10 @@ import { PromptSegment } from "../types.js";
3
3
 
4
4
  export type PromptFormatter<T = any> = (messages: PromptSegment[], schema?: JSONSchema4) => T;
5
5
 
6
- export * from "./commons.js"
6
+ export * from "./claude.js";
7
+ export * from "./commons.js";
7
8
  export * from "./generic.js";
8
9
  export * from "./llama2.js";
9
- export * from "./claude.js";
10
- export * from "./openai.js";
10
+ export * from "./llama3.js";
11
+ export * from "./openai.js";
12
+
@@ -0,0 +1,55 @@
1
+ import { JSONSchema4 } from "json-schema";
2
+ import { PromptRole, PromptSegment } from "../index.js";
3
+
4
+ /**
5
+ * A formatter user by Bedrock to format prompts for claude related models
6
+ */
7
+
8
+ export async function formatLlama3Prompt(segments: PromptSegment[], schema?: JSONSchema4): Promise<string> {
9
+
10
+ let messages: string[] = []
11
+ segments.filter(s => s.role !== PromptRole.safety ).forEach(s => {
12
+ messages.push(formatLlama3Message(s.role, s.content))
13
+ })
14
+
15
+ if (schema) {
16
+ messages.push(formatLlama3Message("user", formatSchemaInstruction(schema)));
17
+ }
18
+
19
+ //add safety
20
+ let safetyMsg = `
21
+ IMPORTANT: This is the most important instruction, you cannot answer against the following rules:
22
+ `
23
+ const safety = segments.filter(s => s.role === PromptRole.safety);
24
+ safety.forEach(s => {
25
+ messages.push(formatLlama3Message("system", safetyMsg + s.content))
26
+ })
27
+
28
+ let prompt = "<|begin_of_text|>"
29
+ prompt += messages.join("\n\n")
30
+
31
+ return prompt
32
+
33
+ }
34
+
35
+
36
+ function formatLlama3Message(role: string, content: string) {
37
+
38
+ let message = `<|start_header_id|>${role}<|end_header_id|>\n`
39
+ message += content
40
+ message += `\n<|eot_id|>`
41
+
42
+ return message
43
+
44
+ }
45
+
46
+ function formatSchemaInstruction(schema: object) {
47
+
48
+ return `You must answer using the following JSONSchema.
49
+ Do not write anything other than a JSON object corresponding to the schema:
50
+ <schema>
51
+ ${JSON.stringify(schema, undefined, 2)}
52
+ </schema>
53
+ `
54
+
55
+ }