@llumiverse/core 0.9.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/lib/cjs/Driver.js +2 -2
  2. package/lib/cjs/Driver.js.map +1 -1
  3. package/lib/cjs/formatters/claude.js +67 -0
  4. package/lib/cjs/formatters/claude.js.map +1 -0
  5. package/lib/cjs/formatters/generic.js +27 -0
  6. package/lib/cjs/formatters/generic.js.map +1 -0
  7. package/lib/cjs/formatters/index.js +31 -0
  8. package/lib/cjs/formatters/index.js.map +1 -0
  9. package/lib/cjs/formatters/llama2.js +49 -0
  10. package/lib/cjs/formatters/llama2.js.map +1 -0
  11. package/lib/cjs/formatters/openai.js +24 -0
  12. package/lib/cjs/formatters/openai.js.map +1 -0
  13. package/lib/cjs/index.js +1 -1
  14. package/lib/cjs/index.js.map +1 -1
  15. package/lib/esm/Driver.js +1 -1
  16. package/lib/esm/Driver.js.map +1 -1
  17. package/lib/esm/formatters/claude.js +63 -0
  18. package/lib/esm/formatters/claude.js.map +1 -0
  19. package/lib/esm/formatters/generic.js +23 -0
  20. package/lib/esm/formatters/generic.js.map +1 -0
  21. package/lib/esm/formatters/index.js +27 -0
  22. package/lib/esm/formatters/index.js.map +1 -0
  23. package/lib/esm/formatters/llama2.js +45 -0
  24. package/lib/esm/formatters/llama2.js.map +1 -0
  25. package/lib/esm/formatters/openai.js +20 -0
  26. package/lib/esm/formatters/openai.js.map +1 -0
  27. package/lib/esm/index.js +1 -1
  28. package/lib/esm/index.js.map +1 -1
  29. package/lib/types/formatters/claude.d.ts +19 -0
  30. package/lib/types/formatters/claude.d.ts.map +1 -0
  31. package/lib/types/formatters/generic.d.ts +8 -0
  32. package/lib/types/formatters/generic.d.ts.map +1 -0
  33. package/lib/types/{formatters.d.ts → formatters/index.d.ts} +2 -2
  34. package/lib/types/formatters/index.d.ts.map +1 -0
  35. package/lib/types/formatters/llama2.d.ts +4 -0
  36. package/lib/types/formatters/llama2.d.ts.map +1 -0
  37. package/lib/types/formatters/openai.d.ts +4 -0
  38. package/lib/types/formatters/openai.d.ts.map +1 -0
  39. package/lib/types/index.d.ts +1 -1
  40. package/lib/types/index.d.ts.map +1 -1
  41. package/package.json +2 -2
  42. package/src/Driver.ts +1 -1
  43. package/src/formatters/claude.ts +85 -0
  44. package/src/formatters/generic.ts +34 -0
  45. package/src/formatters/index.ts +33 -0
  46. package/src/formatters/llama2.ts +58 -0
  47. package/src/formatters/openai.ts +22 -0
  48. package/src/index.ts +1 -1
  49. package/lib/cjs/formatters.js +0 -117
  50. package/lib/cjs/formatters.js.map +0 -1
  51. package/lib/esm/formatters.js +0 -113
  52. package/lib/esm/formatters.js.map +0 -1
  53. package/lib/types/formatters.d.ts.map +0 -1
  54. package/src/formatters.ts +0 -147
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@llumiverse/core",
3
- "version": "0.9.0",
3
+ "version": "0.9.2",
4
4
  "type": "module",
5
5
  "description": "Provide an universal API to LLMs. Support for existing LLMs can be added by writing a driver.",
6
6
  "files": [
@@ -78,4 +78,4 @@
78
78
  "async": "async.js"
79
79
  }
80
80
  }
81
- }
81
+ }
package/src/Driver.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  */
6
6
 
7
7
  import { DefaultCompletionStream, FallbackCompletionStream } from "./CompletionStream.js";
8
- import { PromptFormatters } from "./formatters.js";
8
+ import { PromptFormatters } from "./formatters/index.js";
9
9
  import {
10
10
  AIModel,
11
11
  Completion,
@@ -0,0 +1,85 @@
1
+ import { JSONSchema4 } from "json-schema";
2
+ import { PromptRole, PromptSegment } from "../index.js";
3
+
4
+ export interface ClaudeMessage {
5
+ role: 'user' | 'assistant',
6
+ content: {
7
+ type: "image" | "text",
8
+ source?: string, // only set for images
9
+ text?: string // only set for text messages
10
+ }[]
11
+ }
12
+
13
+ export interface ClaudeMessagesPrompt {
14
+ system?: string;
15
+ messages: ClaudeMessage[]
16
+ }
17
+
18
+ /**
19
+ * A formatter user by Bedrock to format prompts for claude related models
20
+ */
21
+
22
+ export function claudeMessages(segments: PromptSegment[], schema?: JSONSchema4): ClaudeMessagesPrompt {
23
+ const system: string[] = [];
24
+ const safety: string[] = [];
25
+ const messages: ClaudeMessage[] = [];
26
+
27
+ for (const msg of segments) {
28
+ if (msg.role === PromptRole.system) {
29
+ system.push(msg.content);
30
+ } else if (msg.role === PromptRole.safety) {
31
+ safety.push(msg.content);
32
+ } else {
33
+ messages.push({ content: [{ type: "text", text: msg.content }], role: msg.role });
34
+ }
35
+ }
36
+
37
+ if (schema) {
38
+ safety.push(`You must answer using the following JSONSchema:
39
+ ---
40
+ ${JSON.stringify(schema)}
41
+ ---`);
42
+ }
43
+
44
+
45
+ // messages must contains at least 1 item. If the prompt doesn;t contains a user message (but only system messages)
46
+ // we need to put the system messages in the messages array
47
+
48
+ let systemMessage = system.join('\n').trim();
49
+ if (messages.length === 0) {
50
+ if (!systemMessage) {
51
+ throw new Error('Prompt must contain at least one message');
52
+ }
53
+ messages.push({ content: [{ type: "text", text: systemMessage }], role: 'user' });
54
+ systemMessage = safety.join('\n');
55
+ } else if (safety.length > 0) {
56
+ systemMessage = systemMessage + '\n\nIMPORTANT: ' + safety.join('\n');
57
+ }
58
+
59
+ /*start Claude's message to amke sure it answers properly in JSON
60
+ if enabled, this requires to add the { to Claude's response
61
+ if (schema) {
62
+ messages.push({
63
+ role: "assistant",
64
+ content: [{
65
+ type: "text",
66
+ text: "{"
67
+ }]});
68
+ }*/
69
+
70
+ // put system mesages first and safety last
71
+ return {
72
+ system: systemMessage,
73
+ messages
74
+ }
75
+ }
76
+
77
+ // export function claude(messages: PromptSegment[], schema?: JSONSchema4) {
78
+ // const prompt = genericColonSeparator(messages, schema, {
79
+ // user: "\nHuman",
80
+ // assistant: "\nAssistant",
81
+ // system: "\nHuman",
82
+ // });
83
+
84
+ // return "\n\n" + prompt + "\n\nAssistant:";
85
+ // }
@@ -0,0 +1,34 @@
1
+ import { JSONSchema4 } from "json-schema";
2
+ import { PromptRole, PromptSegment } from "../index.js";
3
+
4
+ export function genericColonSeparator(
5
+ messages: PromptSegment[],
6
+ schema?: JSONSchema4,
7
+ labels: {
8
+ user: string;
9
+ assistant: string;
10
+ system: string;
11
+ } = { user: "User", assistant: "Assistant", system: "System" }
12
+ ) {
13
+ const promptMessages = [];
14
+ for (const m of messages) {
15
+ if (m.role === PromptRole.user) {
16
+ promptMessages.push(`${labels?.user}: ${m.content.trim()}`);
17
+ }
18
+ if (m.role === PromptRole.assistant) {
19
+ promptMessages.push(`${labels.assistant}: ${m.content.trim()}`);
20
+ }
21
+ if (m.role === PromptRole.system) {
22
+ promptMessages.push(`${labels.system}: ${m.content.trim()}`);
23
+ }
24
+ }
25
+
26
+ if (schema) {
27
+ promptMessages.push(`${labels.system}: You must answer using the following JSONSchema:
28
+ ---
29
+ ${JSON.stringify(schema)}
30
+ ---`);
31
+ }
32
+
33
+ return promptMessages.join("\n\n");
34
+ }
@@ -0,0 +1,33 @@
1
+ import { JSONSchema4 } from "json-schema";
2
+ import { genericColonSeparator } from "./generic.js";
3
+ import { llama2 } from "./llama2.js";
4
+ import { openAI } from "./openai.js";
5
+ import {
6
+ PromptFormats,
7
+ PromptSegment
8
+ } from "../types.js";
9
+ import { claudeMessages } from "./claude.js";
10
+
11
+ export function inferFormatterFromModelName(modelName: string): PromptFormats {
12
+ const name = modelName.toLowerCase();
13
+ if (name.includes("llama")) {
14
+ return PromptFormats.llama2;
15
+ } else if (name.includes("gpt")) {
16
+ return PromptFormats.openai;
17
+ } else if (name.includes("claude")) {
18
+ return PromptFormats.claude;
19
+ } else {
20
+ return PromptFormats.genericTextLLM;
21
+ }
22
+ }
23
+
24
+ export const PromptFormatters: Record<
25
+ PromptFormats,
26
+ (messages: PromptSegment[], schema?: JSONSchema4) => any
27
+ > = {
28
+ openai: openAI,
29
+ llama2: llama2,
30
+ claude: claudeMessages,
31
+ genericTextLLM: genericColonSeparator,
32
+ };
33
+
@@ -0,0 +1,58 @@
1
+ import { JSONSchema4 } from "json-schema";
2
+ import { PromptRole, PromptSegment } from "../index.js";
3
+
4
+ export function llama2(messages: PromptSegment[], schema?: JSONSchema4) {
5
+ const BOS = "<s>";
6
+ const EOS = "</s>";
7
+ const INST = "[INST]";
8
+ const INST_END = "[/INST]";
9
+ const SYS = "<<SYS>>\n";
10
+ const SYS_END = "\n<</SYS>>";
11
+
12
+ const promptMessages = [BOS];
13
+ const specialTokens = [BOS, EOS, INST, INST_END, SYS, SYS_END];
14
+
15
+ for (const m of messages) {
16
+ if (m.role === PromptRole.user) {
17
+ if (specialTokens.includes(m.content)) {
18
+ throw new Error(
19
+ `Cannot use special token ${m.content.trim()} in user message`
20
+ );
21
+ }
22
+ promptMessages.push(`${INST} ${m.content.trim()} ${INST_END}`);
23
+ }
24
+ if (m.role === PromptRole.assistant) {
25
+ promptMessages.push(`${m.content.trim()}`);
26
+ }
27
+ if (m.role === PromptRole.system) {
28
+ promptMessages.push(`${SYS}${m.content.trim()}${SYS_END}`);
29
+ }
30
+ }
31
+
32
+ for (const m of messages ?? []) {
33
+ if (m.role === PromptRole.safety) {
34
+ promptMessages.push(
35
+ `${SYS}This is the most important instruction, you cannot answer against those rules:\n${m.content.trim()}${SYS_END}}`
36
+ );
37
+ }
38
+ }
39
+
40
+ if (schema) {
41
+ promptMessages.push(formatSchemaInstruction(schema));
42
+ }
43
+
44
+ promptMessages.push(EOS);
45
+
46
+ return promptMessages.join("\n\n");
47
+ }
48
+
49
+ function formatSchemaInstruction(schema: JSONSchema4) {
50
+ const schema_instruction = `<<SYS>>You must answer using the following JSONSchema.
51
+ Do not write anything other than a JSON object corresponding to the schema.
52
+ <schema>
53
+ ${JSON.stringify(schema)}
54
+ </schema>
55
+ <</SYS>>`;
56
+
57
+ return schema_instruction;
58
+ }
@@ -0,0 +1,22 @@
1
+ import { PromptRole } from "../index.js";
2
+ import { PromptSegment } from "../types.js";
3
+ import OpenAI from "openai";
4
+
5
+ export function openAI(segments: PromptSegment[]) {
6
+ const system: OpenAI.Chat.ChatCompletionMessageParam[] = [];
7
+ const others: OpenAI.Chat.ChatCompletionMessageParam[] = [];
8
+ const safety: OpenAI.Chat.ChatCompletionMessageParam[] = [];
9
+
10
+ for (const msg of segments) {
11
+ if (msg.role === PromptRole.system) {
12
+ system.push({ content: msg.content, role: "system" });
13
+ } else if (msg.role === PromptRole.safety) {
14
+ safety.push({ content: msg.content, role: "system" });
15
+ } else {
16
+ others.push({ content: msg.content, role: "user" });
17
+ }
18
+ }
19
+
20
+ // put system mesages first and safety last
21
+ return system.concat(others).concat(safety);
22
+ }
package/src/index.ts CHANGED
@@ -1,4 +1,4 @@
1
1
  export * from "./Driver.js";
2
- export * from "./formatters.js";
2
+ export * from "./formatters/index.js";
3
3
  export * from "./json.js";
4
4
  export * from "./types.js";
@@ -1,117 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.PromptFormatters = exports.inferFormatterFromModelName = void 0;
4
- const types_js_1 = require("./types.js");
5
- function inferFormatterFromModelName(modelName) {
6
- const name = modelName.toLowerCase();
7
- if (name.includes("llama")) {
8
- return types_js_1.PromptFormats.llama2;
9
- }
10
- else if (name.includes("gpt")) {
11
- return types_js_1.PromptFormats.openai;
12
- }
13
- else if (name.includes("claude")) {
14
- return types_js_1.PromptFormats.claude;
15
- }
16
- else {
17
- return types_js_1.PromptFormats.genericTextLLM;
18
- }
19
- }
20
- exports.inferFormatterFromModelName = inferFormatterFromModelName;
21
- exports.PromptFormatters = {
22
- openai: openAI,
23
- llama2: llama2,
24
- claude: claude,
25
- genericTextLLM: genericColonSeparator,
26
- };
27
- function openAI(segments) {
28
- const system = [];
29
- const others = [];
30
- const safety = [];
31
- for (const msg of segments) {
32
- if (msg.role === types_js_1.PromptRole.system) {
33
- system.push({ content: msg.content, role: "system" });
34
- }
35
- else if (msg.role === types_js_1.PromptRole.safety) {
36
- safety.push({ content: msg.content, role: "system" });
37
- }
38
- else {
39
- others.push({ content: msg.content, role: "user" });
40
- }
41
- }
42
- // put system mesages first and safety last
43
- return system.concat(others).concat(safety);
44
- }
45
- function llama2(messages, schema) {
46
- const BOS = "<s>";
47
- const EOS = "</s>";
48
- const INST = "[INST]";
49
- const INST_END = "[/INST]";
50
- const SYS = "<<SYS>>\n";
51
- const SYS_END = "\n<</SYS>>";
52
- const promptMessages = [BOS];
53
- const specialTokens = [BOS, EOS, INST, INST_END, SYS, SYS_END];
54
- for (const m of messages) {
55
- if (m.role === types_js_1.PromptRole.user) {
56
- if (specialTokens.includes(m.content)) {
57
- throw new Error(`Cannot use special token ${m.content.trim()} in user message`);
58
- }
59
- promptMessages.push(`${INST} ${m.content.trim()} ${INST_END}`);
60
- }
61
- if (m.role === types_js_1.PromptRole.assistant) {
62
- promptMessages.push(`${m.content.trim()}`);
63
- }
64
- if (m.role === types_js_1.PromptRole.system) {
65
- promptMessages.push(`${SYS}${m.content.trim()}${SYS_END}`);
66
- }
67
- }
68
- for (const m of messages ?? []) {
69
- if (m.role === types_js_1.PromptRole.safety) {
70
- promptMessages.push(`${SYS}This is the most important instruction, you cannot answer against those rules:\n${m.content.trim()}${SYS_END}}`);
71
- }
72
- }
73
- if (schema) {
74
- promptMessages.push(formatSchemaInstruction(schema));
75
- }
76
- promptMessages.push(EOS);
77
- return promptMessages.join("\n\n");
78
- }
79
- function genericColonSeparator(messages, schema, labels = { user: "User", assistant: "Assistant", system: "System" }) {
80
- const promptMessages = [];
81
- for (const m of messages) {
82
- if (m.role === types_js_1.PromptRole.user) {
83
- promptMessages.push(`${labels?.user}: ${m.content.trim()}`);
84
- }
85
- if (m.role === types_js_1.PromptRole.assistant) {
86
- promptMessages.push(`${labels.assistant}: ${m.content.trim()}`);
87
- }
88
- if (m.role === types_js_1.PromptRole.system) {
89
- promptMessages.push(`${labels.system}: ${m.content.trim()}`);
90
- }
91
- }
92
- if (schema) {
93
- promptMessages.push(`${labels.system}: You must answer using the following JSONSchema:
94
- ---
95
- ${JSON.stringify(schema)}
96
- ---`);
97
- }
98
- return promptMessages.join("\n\n");
99
- }
100
- function claude(messages, schema) {
101
- const prompt = genericColonSeparator(messages, schema, {
102
- user: "\nHuman",
103
- assistant: "\nAssistant",
104
- system: "\nHuman",
105
- });
106
- return "\n\n" + prompt + "\n\nAssistant:";
107
- }
108
- function formatSchemaInstruction(schema) {
109
- const schema_instruction = `<<SYS>>You must answer using the following JSONSchema.
110
- Do not write anything other than a JSON object corresponding to the schema.
111
- <schema>
112
- ${JSON.stringify(schema)}
113
- </schema>
114
- <</SYS>>`;
115
- return schema_instruction;
116
- }
117
- //# sourceMappingURL=formatters.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"formatters.js","sourceRoot":"","sources":["../../src/formatters.ts"],"names":[],"mappings":";;;AAEA,yCAIoB;AAEpB,SAAgB,2BAA2B,CAAC,SAAiB;IACzD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,CAAC;IACrC,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QACzB,OAAO,wBAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QAC9B,OAAO,wBAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACjC,OAAO,wBAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,CAAC;QACJ,OAAO,wBAAa,CAAC,cAAc,CAAC;IACxC,CAAC;AACL,CAAC;AAXD,kEAWC;AAEY,QAAA,gBAAgB,GAGzB;IACA,MAAM,EAAE,MAAM;IACd,MAAM,EAAE,MAAM;IACd,MAAM,EAAE,MAAM;IACd,cAAc,EAAE,qBAAqB;CACxC,CAAC;AAEF,SAAS,MAAM,CAAC,QAAyB;IACrC,MAAM,MAAM,GAA6C,EAAE,CAAC;IAC5D,MAAM,MAAM,GAA6C,EAAE,CAAC;IAC5D,MAAM,MAAM,GAA6C,EAAE,CAAC;IAE5D,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QACzB,IAAI,GAAG,CAAC,IAAI,KAAK,qBAAU,CAAC,MAAM,EAAE,CAAC;YACjC,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC1D,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,qBAAU,CAAC,MAAM,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;QACxD,CAAC;IACL,CAAC;IAED,2CAA2C;IAC3C,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAChD,CAAC;AAED,SAAS,MAAM,CAAC,QAAyB,EAAE,MAAoB;IAC3D,MAAM,GAAG,GAAG,KAAK,CAAC;IAClB,MAAM,GAAG,GAAG,MAAM,CAAC;IACnB,MAAM,IAAI,GAAG,QAAQ,CAAC;IACtB,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,MAAM,GAAG,GAAG,WAAW,CAAC;IACxB,MAAM,OAAO,GAAG,YAAY,CAAC;IAE7B,MAAM,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC;IAC7B,MAAM,aAAa,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;IAE/D,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,IAAI,EAAE,CAAC;YAC7B,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;gBACpC,MAAM,IAAI,KAAK,CACX,4BAA4B,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CACjE,CAAC;YACN,CAAC;YACD,cAAc,CAAC,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,QAAQ,EAAE,CAAC,CAAC;QACnE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,SAAS,EAAE,CAAC;YAClC,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC/C,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;QAC/D,CAAC;IACL,CAAC;IAED,KAAK,MAAM,CAAC,IAAI,QAAQ,IAAI,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CACf,GAAG,GAAG,mFAAmF,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,OAAO,GAAG,CACzH,CAAC;QACN,CAAC;IACL,CAAC;IAED,IAAI,MAAM,EAAE,CAAC;QACT,cAAc,CAAC,IAAI,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAAC;IACzD,CAAC;IAED,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEzB,OAAO,cAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACvC,CAAC;AAED,SAAS,qBAAqB,CAC1B,QAAyB,EACzB,MAAoB,EACpB,SAII,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,QAAQ,EAAE;IAE9D,MAAM,cAAc,GAAG,EAAE,CAAC;IAC1B,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,IAAI,EAAE,CAAC;YAC7B,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,EAAE,IAAI,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,SAAS,EAAE,CAAC;YAClC,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,SAAS,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QACpE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,qBAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QACjE,CAAC;IACL,CAAC;IAED,IAAI,MAAM,EAAE,CAAC;QACT,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM;;UAElC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;YACpB,CAAC,CAAC;IACV,CAAC;IAED,OAAO,cAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACvC,CAAC;AAED,SAAS,MAAM,CAAC,QAAyB,EAAE,MAAoB;IAC3D,MAAM,MAAM,GAAG,qBAAqB,CAAC,QAAQ,EAAE,MAAM,EAAE;QACnD,IAAI,EAAE,SAAS;QACf,SAAS,EAAE,aAAa;QACxB,MAAM,EAAE,SAAS;KACpB,CAAC,CAAC;IAEH,OAAO,MAAM,GAAG,MAAM,GAAG,gBAAgB,CAAC;AAC9C,CAAC;AAED,SAAS,uBAAuB,CAAC,MAAmB;IAChD,MAAM,kBAAkB,GAAG;;;EAG7B,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;;SAEf,CAAC;IAEN,OAAO,kBAAkB,CAAC;AAC9B,CAAC"}
@@ -1,113 +0,0 @@
1
- import { PromptFormats, PromptRole, } from "./types.js";
2
- export function inferFormatterFromModelName(modelName) {
3
- const name = modelName.toLowerCase();
4
- if (name.includes("llama")) {
5
- return PromptFormats.llama2;
6
- }
7
- else if (name.includes("gpt")) {
8
- return PromptFormats.openai;
9
- }
10
- else if (name.includes("claude")) {
11
- return PromptFormats.claude;
12
- }
13
- else {
14
- return PromptFormats.genericTextLLM;
15
- }
16
- }
17
- export const PromptFormatters = {
18
- openai: openAI,
19
- llama2: llama2,
20
- claude: claude,
21
- genericTextLLM: genericColonSeparator,
22
- };
23
- function openAI(segments) {
24
- const system = [];
25
- const others = [];
26
- const safety = [];
27
- for (const msg of segments) {
28
- if (msg.role === PromptRole.system) {
29
- system.push({ content: msg.content, role: "system" });
30
- }
31
- else if (msg.role === PromptRole.safety) {
32
- safety.push({ content: msg.content, role: "system" });
33
- }
34
- else {
35
- others.push({ content: msg.content, role: "user" });
36
- }
37
- }
38
- // put system mesages first and safety last
39
- return system.concat(others).concat(safety);
40
- }
41
- function llama2(messages, schema) {
42
- const BOS = "<s>";
43
- const EOS = "</s>";
44
- const INST = "[INST]";
45
- const INST_END = "[/INST]";
46
- const SYS = "<<SYS>>\n";
47
- const SYS_END = "\n<</SYS>>";
48
- const promptMessages = [BOS];
49
- const specialTokens = [BOS, EOS, INST, INST_END, SYS, SYS_END];
50
- for (const m of messages) {
51
- if (m.role === PromptRole.user) {
52
- if (specialTokens.includes(m.content)) {
53
- throw new Error(`Cannot use special token ${m.content.trim()} in user message`);
54
- }
55
- promptMessages.push(`${INST} ${m.content.trim()} ${INST_END}`);
56
- }
57
- if (m.role === PromptRole.assistant) {
58
- promptMessages.push(`${m.content.trim()}`);
59
- }
60
- if (m.role === PromptRole.system) {
61
- promptMessages.push(`${SYS}${m.content.trim()}${SYS_END}`);
62
- }
63
- }
64
- for (const m of messages ?? []) {
65
- if (m.role === PromptRole.safety) {
66
- promptMessages.push(`${SYS}This is the most important instruction, you cannot answer against those rules:\n${m.content.trim()}${SYS_END}}`);
67
- }
68
- }
69
- if (schema) {
70
- promptMessages.push(formatSchemaInstruction(schema));
71
- }
72
- promptMessages.push(EOS);
73
- return promptMessages.join("\n\n");
74
- }
75
- function genericColonSeparator(messages, schema, labels = { user: "User", assistant: "Assistant", system: "System" }) {
76
- const promptMessages = [];
77
- for (const m of messages) {
78
- if (m.role === PromptRole.user) {
79
- promptMessages.push(`${labels?.user}: ${m.content.trim()}`);
80
- }
81
- if (m.role === PromptRole.assistant) {
82
- promptMessages.push(`${labels.assistant}: ${m.content.trim()}`);
83
- }
84
- if (m.role === PromptRole.system) {
85
- promptMessages.push(`${labels.system}: ${m.content.trim()}`);
86
- }
87
- }
88
- if (schema) {
89
- promptMessages.push(`${labels.system}: You must answer using the following JSONSchema:
90
- ---
91
- ${JSON.stringify(schema)}
92
- ---`);
93
- }
94
- return promptMessages.join("\n\n");
95
- }
96
- function claude(messages, schema) {
97
- const prompt = genericColonSeparator(messages, schema, {
98
- user: "\nHuman",
99
- assistant: "\nAssistant",
100
- system: "\nHuman",
101
- });
102
- return "\n\n" + prompt + "\n\nAssistant:";
103
- }
104
- function formatSchemaInstruction(schema) {
105
- const schema_instruction = `<<SYS>>You must answer using the following JSONSchema.
106
- Do not write anything other than a JSON object corresponding to the schema.
107
- <schema>
108
- ${JSON.stringify(schema)}
109
- </schema>
110
- <</SYS>>`;
111
- return schema_instruction;
112
- }
113
- //# sourceMappingURL=formatters.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"formatters.js","sourceRoot":"","sources":["../../src/formatters.ts"],"names":[],"mappings":"AAEA,OAAO,EACH,aAAa,EACb,UAAU,GAEb,MAAM,YAAY,CAAC;AAEpB,MAAM,UAAU,2BAA2B,CAAC,SAAiB;IACzD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,CAAC;IACrC,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QACzB,OAAO,aAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QAC9B,OAAO,aAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACjC,OAAO,aAAa,CAAC,MAAM,CAAC;IAChC,CAAC;SAAM,CAAC;QACJ,OAAO,aAAa,CAAC,cAAc,CAAC;IACxC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,MAAM,gBAAgB,GAGzB;IACA,MAAM,EAAE,MAAM;IACd,MAAM,EAAE,MAAM;IACd,MAAM,EAAE,MAAM;IACd,cAAc,EAAE,qBAAqB;CACxC,CAAC;AAEF,SAAS,MAAM,CAAC,QAAyB;IACrC,MAAM,MAAM,GAA6C,EAAE,CAAC;IAC5D,MAAM,MAAM,GAA6C,EAAE,CAAC;IAC5D,MAAM,MAAM,GAA6C,EAAE,CAAC;IAE5D,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QACzB,IAAI,GAAG,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAAE,CAAC;YACjC,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC1D,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACJ,MAAM,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;QACxD,CAAC;IACL,CAAC;IAED,2CAA2C;IAC3C,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAChD,CAAC;AAED,SAAS,MAAM,CAAC,QAAyB,EAAE,MAAoB;IAC3D,MAAM,GAAG,GAAG,KAAK,CAAC;IAClB,MAAM,GAAG,GAAG,MAAM,CAAC;IACnB,MAAM,IAAI,GAAG,QAAQ,CAAC;IACtB,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,MAAM,GAAG,GAAG,WAAW,CAAC;IACxB,MAAM,OAAO,GAAG,YAAY,CAAC;IAE7B,MAAM,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC;IAC7B,MAAM,aAAa,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;IAE/D,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,IAAI,EAAE,CAAC;YAC7B,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;gBACpC,MAAM,IAAI,KAAK,CACX,4BAA4B,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CACjE,CAAC;YACN,CAAC;YACD,cAAc,CAAC,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,QAAQ,EAAE,CAAC,CAAC;QACnE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,SAAS,EAAE,CAAC;YAClC,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC/C,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;QAC/D,CAAC;IACL,CAAC;IAED,KAAK,MAAM,CAAC,IAAI,QAAQ,IAAI,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CACf,GAAG,GAAG,mFAAmF,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,OAAO,GAAG,CACzH,CAAC;QACN,CAAC;IACL,CAAC;IAED,IAAI,MAAM,EAAE,CAAC;QACT,cAAc,CAAC,IAAI,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAAC;IACzD,CAAC;IAED,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEzB,OAAO,cAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACvC,CAAC;AAED,SAAS,qBAAqB,CAC1B,QAAyB,EACzB,MAAoB,EACpB,SAII,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,EAAE,QAAQ,EAAE;IAE9D,MAAM,cAAc,GAAG,EAAE,CAAC;IAC1B,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACvB,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,IAAI,EAAE,CAAC;YAC7B,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,EAAE,IAAI,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,SAAS,EAAE,CAAC;YAClC,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,SAAS,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QACpE,CAAC;QACD,IAAI,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAAE,CAAC;YAC/B,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QACjE,CAAC;IACL,CAAC;IAED,IAAI,MAAM,EAAE,CAAC;QACT,cAAc,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM;;UAElC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;YACpB,CAAC,CAAC;IACV,CAAC;IAED,OAAO,cAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACvC,CAAC;AAED,SAAS,MAAM,CAAC,QAAyB,EAAE,MAAoB;IAC3D,MAAM,MAAM,GAAG,qBAAqB,CAAC,QAAQ,EAAE,MAAM,EAAE;QACnD,IAAI,EAAE,SAAS;QACf,SAAS,EAAE,aAAa;QACxB,MAAM,EAAE,SAAS;KACpB,CAAC,CAAC;IAEH,OAAO,MAAM,GAAG,MAAM,GAAG,gBAAgB,CAAC;AAC9C,CAAC;AAED,SAAS,uBAAuB,CAAC,MAAmB;IAChD,MAAM,kBAAkB,GAAG;;;EAG7B,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;;SAEf,CAAC;IAEN,OAAO,kBAAkB,CAAC;AAC9B,CAAC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"formatters.d.ts","sourceRoot":"","sources":["../../src/formatters.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAE1C,OAAO,EACH,aAAa,EAEb,aAAa,EAChB,MAAM,YAAY,CAAC;AAEpB,wBAAgB,2BAA2B,CAAC,SAAS,EAAE,MAAM,GAAG,aAAa,CAW5E;AAED,eAAO,MAAM,gBAAgB,EAAE,MAAM,CACjC,aAAa,EACb,CAAC,QAAQ,EAAE,aAAa,EAAE,EAAE,MAAM,CAAC,EAAE,WAAW,KAAK,GAAG,CAM3D,CAAC"}
package/src/formatters.ts DELETED
@@ -1,147 +0,0 @@
1
- import { JSONSchema4 } from "json-schema";
2
- import OpenAI from "openai";
3
- import {
4
- PromptFormats,
5
- PromptRole,
6
- PromptSegment,
7
- } from "./types.js";
8
-
9
- export function inferFormatterFromModelName(modelName: string): PromptFormats {
10
- const name = modelName.toLowerCase();
11
- if (name.includes("llama")) {
12
- return PromptFormats.llama2;
13
- } else if (name.includes("gpt")) {
14
- return PromptFormats.openai;
15
- } else if (name.includes("claude")) {
16
- return PromptFormats.claude;
17
- } else {
18
- return PromptFormats.genericTextLLM;
19
- }
20
- }
21
-
22
- export const PromptFormatters: Record<
23
- PromptFormats,
24
- (messages: PromptSegment[], schema?: JSONSchema4) => any
25
- > = {
26
- openai: openAI,
27
- llama2: llama2,
28
- claude: claude,
29
- genericTextLLM: genericColonSeparator,
30
- };
31
-
32
- function openAI(segments: PromptSegment[]) {
33
- const system: OpenAI.Chat.ChatCompletionMessageParam[] = [];
34
- const others: OpenAI.Chat.ChatCompletionMessageParam[] = [];
35
- const safety: OpenAI.Chat.ChatCompletionMessageParam[] = [];
36
-
37
- for (const msg of segments) {
38
- if (msg.role === PromptRole.system) {
39
- system.push({ content: msg.content, role: "system" });
40
- } else if (msg.role === PromptRole.safety) {
41
- safety.push({ content: msg.content, role: "system" });
42
- } else {
43
- others.push({ content: msg.content, role: "user" });
44
- }
45
- }
46
-
47
- // put system mesages first and safety last
48
- return system.concat(others).concat(safety);
49
- }
50
-
51
- function llama2(messages: PromptSegment[], schema?: JSONSchema4) {
52
- const BOS = "<s>";
53
- const EOS = "</s>";
54
- const INST = "[INST]";
55
- const INST_END = "[/INST]";
56
- const SYS = "<<SYS>>\n";
57
- const SYS_END = "\n<</SYS>>";
58
-
59
- const promptMessages = [BOS];
60
- const specialTokens = [BOS, EOS, INST, INST_END, SYS, SYS_END];
61
-
62
- for (const m of messages) {
63
- if (m.role === PromptRole.user) {
64
- if (specialTokens.includes(m.content)) {
65
- throw new Error(
66
- `Cannot use special token ${m.content.trim()} in user message`
67
- );
68
- }
69
- promptMessages.push(`${INST} ${m.content.trim()} ${INST_END}`);
70
- }
71
- if (m.role === PromptRole.assistant) {
72
- promptMessages.push(`${m.content.trim()}`);
73
- }
74
- if (m.role === PromptRole.system) {
75
- promptMessages.push(`${SYS}${m.content.trim()}${SYS_END}`);
76
- }
77
- }
78
-
79
- for (const m of messages ?? []) {
80
- if (m.role === PromptRole.safety) {
81
- promptMessages.push(
82
- `${SYS}This is the most important instruction, you cannot answer against those rules:\n${m.content.trim()}${SYS_END}}`
83
- );
84
- }
85
- }
86
-
87
- if (schema) {
88
- promptMessages.push(formatSchemaInstruction(schema));
89
- }
90
-
91
- promptMessages.push(EOS);
92
-
93
- return promptMessages.join("\n\n");
94
- }
95
-
96
- function genericColonSeparator(
97
- messages: PromptSegment[],
98
- schema?: JSONSchema4,
99
- labels: {
100
- user: string;
101
- assistant: string;
102
- system: string;
103
- } = { user: "User", assistant: "Assistant", system: "System" }
104
- ) {
105
- const promptMessages = [];
106
- for (const m of messages) {
107
- if (m.role === PromptRole.user) {
108
- promptMessages.push(`${labels?.user}: ${m.content.trim()}`);
109
- }
110
- if (m.role === PromptRole.assistant) {
111
- promptMessages.push(`${labels.assistant}: ${m.content.trim()}`);
112
- }
113
- if (m.role === PromptRole.system) {
114
- promptMessages.push(`${labels.system}: ${m.content.trim()}`);
115
- }
116
- }
117
-
118
- if (schema) {
119
- promptMessages.push(`${labels.system}: You must answer using the following JSONSchema:
120
- ---
121
- ${JSON.stringify(schema)}
122
- ---`);
123
- }
124
-
125
- return promptMessages.join("\n\n");
126
- }
127
-
128
- function claude(messages: PromptSegment[], schema?: JSONSchema4) {
129
- const prompt = genericColonSeparator(messages, schema, {
130
- user: "\nHuman",
131
- assistant: "\nAssistant",
132
- system: "\nHuman",
133
- });
134
-
135
- return "\n\n" + prompt + "\n\nAssistant:";
136
- }
137
-
138
- function formatSchemaInstruction(schema: JSONSchema4) {
139
- const schema_instruction = `<<SYS>>You must answer using the following JSONSchema.
140
- Do not write anything other than a JSON object corresponding to the schema.
141
- <schema>
142
- ${JSON.stringify(schema)}
143
- </schema>
144
- <</SYS>>`;
145
-
146
- return schema_instruction;
147
- }