modprompt 0.10.12 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cls.d.ts +10 -9
- package/dist/interfaces.d.ts +18 -31
- package/dist/main.d.ts +2 -2
- package/dist/main.js +60 -26
- package/dist/main.min.js +1 -1
- package/dist/utils.d.ts +3 -2
- package/package.json +1 -1
package/dist/cls.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { LmTemplate, PromptBlock,
|
|
1
|
+
import { LmTemplate, PromptBlock, HistoryTurn, SpacingSlots, LmToolsDef, ToolSpec, ToolTurn, LmTags } from "./interfaces.js";
|
|
2
2
|
/**
|
|
3
3
|
* Represents a modified language model template.
|
|
4
4
|
*
|
|
@@ -13,8 +13,9 @@ declare class PromptTemplate {
|
|
|
13
13
|
history: Array<HistoryTurn>;
|
|
14
14
|
toolsDef: LmToolsDef | null;
|
|
15
15
|
tools: Array<ToolSpec>;
|
|
16
|
+
tags: LmTags;
|
|
16
17
|
system?: PromptBlock;
|
|
17
|
-
shots?: Array<
|
|
18
|
+
shots?: Array<HistoryTurn>;
|
|
18
19
|
stop?: Array<string>;
|
|
19
20
|
linebreaks?: SpacingSlots;
|
|
20
21
|
afterShot?: string;
|
|
@@ -122,14 +123,14 @@ declare class PromptTemplate {
|
|
|
122
123
|
* @example
|
|
123
124
|
* tpl.addShot('Is it raining?', 'No, it is sunny.');
|
|
124
125
|
*/
|
|
125
|
-
addShot(user: string, assistant: string,
|
|
126
|
+
addShot(user: string, assistant: string, tools?: Record<string, ToolTurn>): PromptTemplate;
|
|
126
127
|
/**
|
|
127
128
|
* Adds multiple shots (user-assistant interactions) to the template.
|
|
128
129
|
*
|
|
129
130
|
* This function allows you to add multiple turns to the conversation. Each turn is represented by an object
|
|
130
131
|
* with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
|
|
131
132
|
*
|
|
132
|
-
* @param {Array<
|
|
133
|
+
* @param {Array<HistoryTurn>} shots - An array of objects, where each object represents a user-assistant interaction.
|
|
133
134
|
* @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
|
|
134
135
|
*
|
|
135
136
|
* @example
|
|
@@ -139,14 +140,14 @@ declare class PromptTemplate {
|
|
|
139
140
|
* { user: 'What is the weather like tomorrow?', assistant: 'I am sorry, but I can\'t predict the future.' }
|
|
140
141
|
* ]);
|
|
141
142
|
*/
|
|
142
|
-
addShots(shots: Array<
|
|
143
|
+
addShots(shots: Array<HistoryTurn>): PromptTemplate;
|
|
143
144
|
/**
|
|
144
145
|
* Render a turn block
|
|
145
146
|
*
|
|
146
|
-
* @param {
|
|
147
|
+
* @param {HistoryTurn} shot the shot to render
|
|
147
148
|
* @returns {string} ther rendered text
|
|
148
149
|
*/
|
|
149
|
-
renderShot(shot:
|
|
150
|
+
renderShot(shot: HistoryTurn): string;
|
|
150
151
|
/**
|
|
151
152
|
* Renders the template into a string representation.
|
|
152
153
|
*
|
|
@@ -174,9 +175,9 @@ declare class PromptTemplate {
|
|
|
174
175
|
* @param {HistoryTurn} turn the history turn
|
|
175
176
|
* @returns {PromptTemplate}
|
|
176
177
|
*/
|
|
177
|
-
pushToHistory(turn: HistoryTurn): PromptTemplate;
|
|
178
|
+
pushToHistory(turn: HistoryTurn, extractThinking?: boolean): PromptTemplate;
|
|
178
179
|
private _buildSystemBlock;
|
|
179
|
-
private
|
|
180
|
+
private _buildToolsResponse;
|
|
180
181
|
private _buildToolsBlock;
|
|
181
182
|
private _buildUserBlock;
|
|
182
183
|
private _buildAssistantBlock;
|
package/dist/interfaces.d.ts
CHANGED
|
@@ -86,32 +86,6 @@ interface PromptBlock {
|
|
|
86
86
|
*/
|
|
87
87
|
message?: string;
|
|
88
88
|
}
|
|
89
|
-
/**
|
|
90
|
-
* Represents a single turn in a conversation, consisting of a user message followed by an assistant response.
|
|
91
|
-
*
|
|
92
|
-
* @interface TurnBlock
|
|
93
|
-
* @typedef {TurnBlock}
|
|
94
|
-
*
|
|
95
|
-
* @example
|
|
96
|
-
* const turnExample: TurnBlock = {
|
|
97
|
-
* user: 'What's the weather like?',
|
|
98
|
-
* assistant: 'It's sunny today!'
|
|
99
|
-
* };
|
|
100
|
-
*/
|
|
101
|
-
interface TurnBlock {
|
|
102
|
-
/**
|
|
103
|
-
* The message content from the user.
|
|
104
|
-
*/
|
|
105
|
-
user: string;
|
|
106
|
-
/**
|
|
107
|
-
* The corresponding response from the assistant.
|
|
108
|
-
*/
|
|
109
|
-
assistant: string;
|
|
110
|
-
/**
|
|
111
|
-
* Optional tool usage in the turn.
|
|
112
|
-
*/
|
|
113
|
-
tool?: string;
|
|
114
|
-
}
|
|
115
89
|
/**
|
|
116
90
|
* Definition of language model tools.
|
|
117
91
|
*
|
|
@@ -139,6 +113,10 @@ interface LmToolsDef {
|
|
|
139
113
|
*/
|
|
140
114
|
response: string;
|
|
141
115
|
}
|
|
116
|
+
interface LmTags {
|
|
117
|
+
think?: string;
|
|
118
|
+
endThink?: string;
|
|
119
|
+
}
|
|
142
120
|
/**
|
|
143
121
|
* Represents a template for language modeling, detailing the structure and interaction elements of a conversation.
|
|
144
122
|
*
|
|
@@ -194,7 +172,7 @@ interface LmTemplate {
|
|
|
194
172
|
*
|
|
195
173
|
* Useful for simulating multi-turn interactions.
|
|
196
174
|
*/
|
|
197
|
-
shots?: Array<
|
|
175
|
+
shots?: Array<HistoryTurn>;
|
|
198
176
|
/**
|
|
199
177
|
* Tool definitions for the template.
|
|
200
178
|
*/
|
|
@@ -219,6 +197,7 @@ interface LmTemplate {
|
|
|
219
197
|
* A prefix like a bos token to insert before content
|
|
220
198
|
*/
|
|
221
199
|
prefix?: string;
|
|
200
|
+
tags?: LmTags;
|
|
222
201
|
}
|
|
223
202
|
/**
|
|
224
203
|
* Image data associated with a message or response.
|
|
@@ -242,6 +221,10 @@ interface ImgData {
|
|
|
242
221
|
*/
|
|
243
222
|
data: string;
|
|
244
223
|
}
|
|
224
|
+
interface ToolTurn {
|
|
225
|
+
call: ToolCallSpec;
|
|
226
|
+
response: Array<Record<string, any>>;
|
|
227
|
+
}
|
|
245
228
|
/**
|
|
246
229
|
* Represents a turn in the conversation history, including user and assistant messages, optional tool usage, and associated images.
|
|
247
230
|
*
|
|
@@ -261,13 +244,17 @@ interface HistoryTurn {
|
|
|
261
244
|
*/
|
|
262
245
|
user: string;
|
|
263
246
|
/**
|
|
264
|
-
* The
|
|
247
|
+
* The final response from the assistant.
|
|
265
248
|
*/
|
|
266
249
|
assistant: string;
|
|
267
250
|
/**
|
|
268
|
-
* Optional
|
|
251
|
+
* Optional thinking tag content
|
|
252
|
+
*/
|
|
253
|
+
think?: string;
|
|
254
|
+
/**
|
|
255
|
+
* Optional tools usage in the turn.
|
|
269
256
|
*/
|
|
270
|
-
|
|
257
|
+
tools?: Record<string, ToolTurn>;
|
|
271
258
|
/**
|
|
272
259
|
* Array of images associated with the turn.
|
|
273
260
|
*/
|
|
@@ -314,4 +301,4 @@ interface ToolSpec {
|
|
|
314
301
|
};
|
|
315
302
|
};
|
|
316
303
|
}
|
|
317
|
-
export { SpacingSlots, PromptBlock,
|
|
304
|
+
export { SpacingSlots, PromptBlock, LmTemplate, HistoryTurn, ImgData, LmToolsDef, ToolSpec, ToolCallSpec, ToolTurn, LmTags, };
|
package/dist/main.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import { templates } from "./db.js";
|
|
2
2
|
import { PromptTemplate } from "./cls.js";
|
|
3
|
-
import { SpacingSlots, PromptBlock,
|
|
4
|
-
export { templates, PromptTemplate, SpacingSlots, PromptBlock,
|
|
3
|
+
import { SpacingSlots, PromptBlock, LmTemplate, ImgData, HistoryTurn, ToolSpec } from "./interfaces.js";
|
|
4
|
+
export { templates, PromptTemplate, SpacingSlots, PromptBlock, LmTemplate, ImgData, HistoryTurn, ToolSpec };
|
package/dist/main.js
CHANGED
|
@@ -30,6 +30,10 @@ const templates = {
|
|
|
30
30
|
"system": {
|
|
31
31
|
"schema": "<|im_start|>system\n{system}<|im_end|>"
|
|
32
32
|
},
|
|
33
|
+
"tags": {
|
|
34
|
+
"endThink": "</think>",
|
|
35
|
+
"think": "<think>"
|
|
36
|
+
},
|
|
33
37
|
"user": "<|im_start|>user\n{prompt}<|im_end|>"
|
|
34
38
|
},
|
|
35
39
|
"chatml-tools": {
|
|
@@ -49,6 +53,10 @@ const templates = {
|
|
|
49
53
|
"message": "You are a helpful assistant with tool calling capabilities. You may call one or more functions to assist with the user query.\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>\n{tools}\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n[{\"name\": <function-name>, \"arguments\": <args-json-object>}]\n</tool_call>",
|
|
50
54
|
"schema": "<|im_start|>system\n{system}<|im_end|>"
|
|
51
55
|
},
|
|
56
|
+
"tags": {
|
|
57
|
+
"endThink": "</think>",
|
|
58
|
+
"think": "<think>"
|
|
59
|
+
},
|
|
52
60
|
"tools": {
|
|
53
61
|
"call": "<tool_call>\n{tools}\n</tool_call>",
|
|
54
62
|
"def": "{system}",
|
|
@@ -452,7 +460,7 @@ const templates = {
|
|
|
452
460
|
"<|im_sep|>"
|
|
453
461
|
],
|
|
454
462
|
"system": {
|
|
455
|
-
"message": "You are a helpful assistant with some tools.\n<|tool|>\n{tools}\n<|/tool
|
|
463
|
+
"message": "You are a helpful assistant with some tools.\n<|tool|>\n{tools}\n<|/tool|>",
|
|
456
464
|
"schema": "<|im_start|>system<|im_sep|>{system}<|im_end|>"
|
|
457
465
|
},
|
|
458
466
|
"tools": {
|
|
@@ -545,14 +553,14 @@ function extractBetweenTags(text, startTag, endTag) {
|
|
|
545
553
|
// Find start position
|
|
546
554
|
const startIndex = text.indexOf(startTag);
|
|
547
555
|
if (startIndex === -1)
|
|
548
|
-
return
|
|
556
|
+
return text;
|
|
549
557
|
// Calculate content boundaries
|
|
550
558
|
let contentStart = startIndex + startTag.length;
|
|
551
559
|
let contentEnd;
|
|
552
560
|
if (endTag) {
|
|
553
561
|
contentEnd = text.indexOf(endTag, contentStart);
|
|
554
562
|
if (contentEnd === -1)
|
|
555
|
-
return
|
|
563
|
+
return text;
|
|
556
564
|
}
|
|
557
565
|
else {
|
|
558
566
|
// Find next newline for self-closing tags
|
|
@@ -561,7 +569,16 @@ function extractBetweenTags(text, startTag, endTag) {
|
|
|
561
569
|
contentEnd = text.length;
|
|
562
570
|
}
|
|
563
571
|
// Extract content
|
|
564
|
-
|
|
572
|
+
return text.substring(contentStart, contentEnd).trim();
|
|
573
|
+
}
|
|
574
|
+
catch (error) {
|
|
575
|
+
throw new Error(`Error parsing content between tags ${startTag} ${endTag}: ${error}`);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
function extractToolSpec(text, startTag, endTag) {
|
|
579
|
+
try {
|
|
580
|
+
// Extract content
|
|
581
|
+
const content = extractBetweenTags(text, startTag, endTag);
|
|
565
582
|
// Parse JSON content
|
|
566
583
|
let parsed = JSON.parse(content);
|
|
567
584
|
if (!Array.isArray(parsed)) {
|
|
@@ -588,6 +605,7 @@ class PromptTemplate {
|
|
|
588
605
|
history = [];
|
|
589
606
|
toolsDef = null;
|
|
590
607
|
tools = [];
|
|
608
|
+
tags = {};
|
|
591
609
|
system;
|
|
592
610
|
shots;
|
|
593
611
|
stop;
|
|
@@ -621,12 +639,15 @@ class PromptTemplate {
|
|
|
621
639
|
this.name = tpl.name;
|
|
622
640
|
this.user = tpl.user;
|
|
623
641
|
this.assistant = tpl.assistant;
|
|
624
|
-
this.system = tpl
|
|
625
|
-
this.shots = tpl
|
|
626
|
-
this.stop = tpl
|
|
627
|
-
this.linebreaks = tpl
|
|
628
|
-
this.afterShot = tpl
|
|
629
|
-
this.prefix = tpl
|
|
642
|
+
this.system = tpl?.system;
|
|
643
|
+
this.shots = tpl?.shots;
|
|
644
|
+
this.stop = tpl?.stop;
|
|
645
|
+
this.linebreaks = tpl?.linebreaks;
|
|
646
|
+
this.afterShot = tpl?.afterShot;
|
|
647
|
+
this.prefix = tpl?.prefix;
|
|
648
|
+
if (tpl?.tags) {
|
|
649
|
+
this.tags = tpl?.tags;
|
|
650
|
+
}
|
|
630
651
|
if (tpl?.tools) {
|
|
631
652
|
this.toolsDef = tpl.tools;
|
|
632
653
|
const toolCallStartEnd = this.toolsDef?.call.split("{tools}");
|
|
@@ -831,14 +852,14 @@ class PromptTemplate {
|
|
|
831
852
|
* @example
|
|
832
853
|
* tpl.addShot('Is it raining?', 'No, it is sunny.');
|
|
833
854
|
*/
|
|
834
|
-
addShot(user, assistant,
|
|
835
|
-
if (
|
|
855
|
+
addShot(user, assistant, tools) {
|
|
856
|
+
if (tools && !this.toolsDef) {
|
|
836
857
|
throw new Error("This template does not support tools");
|
|
837
858
|
}
|
|
838
859
|
if (!this.shots) {
|
|
839
860
|
this.shots = [];
|
|
840
861
|
}
|
|
841
|
-
this.shots.push({ user, assistant,
|
|
862
|
+
this.shots.push({ user, assistant, tools });
|
|
842
863
|
return this;
|
|
843
864
|
}
|
|
844
865
|
/**
|
|
@@ -847,7 +868,7 @@ class PromptTemplate {
|
|
|
847
868
|
* This function allows you to add multiple turns to the conversation. Each turn is represented by an object
|
|
848
869
|
* with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
|
|
849
870
|
*
|
|
850
|
-
* @param {Array<
|
|
871
|
+
* @param {Array<HistoryTurn>} shots - An array of objects, where each object represents a user-assistant interaction.
|
|
851
872
|
* @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
|
|
852
873
|
*
|
|
853
874
|
* @example
|
|
@@ -864,7 +885,7 @@ class PromptTemplate {
|
|
|
864
885
|
/**
|
|
865
886
|
* Render a turn block
|
|
866
887
|
*
|
|
867
|
-
* @param {
|
|
888
|
+
* @param {HistoryTurn} shot the shot to render
|
|
868
889
|
* @returns {string} ther rendered text
|
|
869
890
|
*/
|
|
870
891
|
renderShot(shot) {
|
|
@@ -879,8 +900,8 @@ class PromptTemplate {
|
|
|
879
900
|
_assistantMsg += "\n\n"
|
|
880
901
|
}*/
|
|
881
902
|
buf.push(this._buildAssistantBlock(_assistantMsg));
|
|
882
|
-
if (shot?.
|
|
883
|
-
buf.push(this.
|
|
903
|
+
if (shot?.tools) {
|
|
904
|
+
buf.push(this._buildToolsResponse(shot.tools));
|
|
884
905
|
}
|
|
885
906
|
return buf.join("");
|
|
886
907
|
}
|
|
@@ -930,7 +951,7 @@ class PromptTemplate {
|
|
|
930
951
|
for (const turn of this.history) {
|
|
931
952
|
buf.push(this.renderShot(turn));
|
|
932
953
|
}
|
|
933
|
-
if (this.history[this.history.length - 1]?.
|
|
954
|
+
if (this.history[this.history.length - 1]?.tools) {
|
|
934
955
|
isToolResponse = true;
|
|
935
956
|
}
|
|
936
957
|
}
|
|
@@ -962,7 +983,16 @@ class PromptTemplate {
|
|
|
962
983
|
* @param {HistoryTurn} turn the history turn
|
|
963
984
|
* @returns {PromptTemplate}
|
|
964
985
|
*/
|
|
965
|
-
pushToHistory(turn) {
|
|
986
|
+
pushToHistory(turn, extractThinking = true) {
|
|
987
|
+
if (extractThinking) {
|
|
988
|
+
if (this.tags?.endThink && this.tags?.think) {
|
|
989
|
+
const tks = turn.assistant.split(this.tags.endThink);
|
|
990
|
+
if (tks.length > 1) {
|
|
991
|
+
turn.think = extractBetweenTags(turn.assistant, this.tags.think, this.tags.endThink);
|
|
992
|
+
turn.assistant = tks[1];
|
|
993
|
+
}
|
|
994
|
+
}
|
|
995
|
+
}
|
|
966
996
|
this.history.push(turn);
|
|
967
997
|
return this;
|
|
968
998
|
}
|
|
@@ -1002,11 +1032,15 @@ class PromptTemplate {
|
|
|
1002
1032
|
}
|
|
1003
1033
|
return res;
|
|
1004
1034
|
}
|
|
1005
|
-
|
|
1035
|
+
_buildToolsResponse(toolTurns) {
|
|
1006
1036
|
if (!this.toolsDef) {
|
|
1007
1037
|
throw new Error("No tools def in template to build tool response");
|
|
1008
1038
|
}
|
|
1009
|
-
|
|
1039
|
+
const buf = new Array();
|
|
1040
|
+
for (const v of Object.values(toolTurns)) {
|
|
1041
|
+
buf.push(this.toolsDef.response.replace("{tools_response}", JSON.stringify(v.response)));
|
|
1042
|
+
}
|
|
1043
|
+
return buf.join("");
|
|
1010
1044
|
}
|
|
1011
1045
|
_buildToolsBlock(raw = false) {
|
|
1012
1046
|
if (!this.toolsDef) {
|
|
@@ -1045,7 +1079,7 @@ class PromptTemplate {
|
|
|
1045
1079
|
return buf.join("");
|
|
1046
1080
|
}
|
|
1047
1081
|
_buildAssistantBlock(msg) {
|
|
1048
|
-
let
|
|
1082
|
+
let txt = "";
|
|
1049
1083
|
let amsg = this.assistant;
|
|
1050
1084
|
if (this?.linebreaks?.assistant) {
|
|
1051
1085
|
amsg += "\n".repeat(this.linebreaks.assistant);
|
|
@@ -1053,12 +1087,12 @@ class PromptTemplate {
|
|
|
1053
1087
|
if (this._extraAssistant.length > 0) {
|
|
1054
1088
|
amsg += this._extraAssistant;
|
|
1055
1089
|
}
|
|
1056
|
-
|
|
1090
|
+
txt += amsg;
|
|
1057
1091
|
if (msg) {
|
|
1058
1092
|
// this is a shot
|
|
1059
|
-
|
|
1093
|
+
txt += msg;
|
|
1060
1094
|
}
|
|
1061
|
-
return
|
|
1095
|
+
return txt;
|
|
1062
1096
|
}
|
|
1063
1097
|
_load(name) {
|
|
1064
1098
|
try {
|
|
@@ -1075,7 +1109,7 @@ class PromptTemplate {
|
|
|
1075
1109
|
}
|
|
1076
1110
|
}
|
|
1077
1111
|
_parseToolCallString(raw) {
|
|
1078
|
-
return
|
|
1112
|
+
return extractToolSpec(raw, this._toolCallStart, this._toolCallEnd ?? undefined);
|
|
1079
1113
|
}
|
|
1080
1114
|
}
|
|
1081
1115
|
|
package/dist/main.min.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
var $tpl=function(s){"use strict";const t={alpaca:{assistant:"### Response:",id:"alpaca",linebreaks:{system:2,user:2},name:"Alpaca",system:{message:"Below is an instruction that describes a task. Write a response that appropriately completes the request.",schema:"{system}"},user:"### Instruction:\n{prompt}"},chatml:{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant",id:"chatml",linebreaks:{assistant:1,system:1,user:1},name:"ChatMl",stop:["<|im_end|>"],system:{schema:"<|im_start|>system\n{system}<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>"},"chatml-tools":{afterShot:"<|im_end|>",assistant:"<|im_start|>assistant",id:"chatml-tools",linebreaks:{assistant:1,system:1,user:1},name:"ChatMl tools",stop:["<|im_end|>"],system:{message:'You are a helpful assistant with tool calling capabilities. You may call one or more functions to assist with the user query.\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>\n{tools}\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n[{"name": <function-name>, "arguments": <args-json-object>}]\n</tool_call>',schema:"<|im_start|>system\n{system}<|im_end|>"},tools:{call:"<tool_call>\n{tools}\n</tool_call>",def:"{system}",response:"<|im_start|>user\n<tool_response>\n{tools_response}\n</tool_response><|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>"},codestral:{afterShot:"\n",assistant:" [/INST]",id:"codestral",linebreaks:{system:2},name:"Codestral",stop:["</s>"],system:{schema:"<<SYS>>\n{system}\n<</SYS>>"},user:"[INST] {prompt}"},"command-r":{assistant:"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",id:"command-r",linebreaks:{user:1},name:"Command-R",prefix:"<BOS_TOKEN>",stop:["<|END_OF_TURN_TOKEN|>"],system:{schema:"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"},user:"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>"},deephermes:{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"deephermes",name:"Deephermes",stop:["<|eot_id|>","<|end_of_text|>"],system:{message:'You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don\'t make assumptions about what values to plug into functions. Here are the available tools: <tools> {tools} </tools>. For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n<tool_call>\n[{"arguments": <args-dict>, "name": <function-name>}]\n</tool_call>',schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},tools:{call:"<tool_call>\n{tools}\n</tool_call>",def:"{system}",response:"<|start_header_id|>user<|end_header_id|>\n<tool_response>\n{tools_response}\n</tool_response><|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n{prompt}<|eot_id|>"},deepseek:{afterShot:"\n",assistant:"### Response:",id:"deepseek",linebreaks:{system:1,user:1},name:"Deepseek",stop:["<|EOT|>","### Instruction:"],system:{message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer.",schema:"{system}"},user:"### Instruction:\n{prompt}"},deepseek2:{assistant:"Assistant:",id:"deepseek2",linebreaks:{system:2,user:2},name:"Deepseek 2",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"User: {prompt}"},deepseek3:{afterShot:"<|end▁of▁sentence|>",assistant:"<|Assistant|>",id:"deepseek3",linebreaks:{system:2,user:2},name:"Deepseek 3",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"<|User|>{prompt}"},exaone:{afterShot:"[|endofturn|]",assistant:"[|assistant|]",id:"exaone",linebreaks:{system:1,user:1},name:"Exaone",stop:["[|endofturn|]"],system:{message:"You are EXAONE model from LG AI Research, a helpful assistant.",schema:"[|system|]{system}[|endofturn|]"},user:"[|user|]{prompt}[|endofturn|]"},gemma:{afterShot:"<end_of_turn>",assistant:"<start_of_turn>model",id:"gemma",name:"Gemma",stop:["<end_of_turn>"],user:"<start_of_turn>user\n{prompt}\n <end_of_turn>\n "},granite:{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite",linebreaks:{system:1,user:1},name:"Granite",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},"granite-think":{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite-think",linebreaks:{system:1,user:1},name:"Granite think",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant. Respond to every user query in a comprehensive and detailed way. You can write down your thoughts and reasoning process before responding. In the thought process, engage in a comprehensive cycle of analysis, summarization, exploration, reassessment, reflection, backtracing, and iteration to develop well-considered thinking process. In the response section, based on various attempts, explorations, and reflections from the thoughts section, systematically present the final solution that you deem correct. The response should summarize the thought process. Write your thoughts after 'Here is my thought process:' and write your response after 'Here is my response:' for each user query.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},"granite-tools":{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite-tools",linebreaks:{system:1,tools:1,user:1},name:"Granite tools",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant with access to the following tools. When a tool is required to answer the user's query, respond with <|tool_call|> followed by a JSON list of tools used. If a tool does not exist in the provided list of tools, notify the user that you do not have the ability to fulfill the request.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},tools:{call:"<|tool_call|>{tools}",def:"<|start_of_role|>tools<|end_of_role|>{tools}<|end_of_text|>",response:"<|start_of_role|>tool_response<|end_of_role|>{tools_response}<|end_of_text|>\n"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},llama:{assistant:" [/INST] ",id:"llama",linebreaks:{system:2,user:0},name:"Llama",prefix:"<s>",stop:["</s>"],system:{message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.",schema:"[INST] <<SYS>>\n{system}\n<</SYS>>"},user:"{prompt}"},llama3:{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"llama3",name:"Llama 3",stop:["<|eot_id|>","<|end_of_text|>"],system:{schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"},"llama3-think":{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"llama3-think",name:"Llama 3 think",stop:["<|eot_id|>","<|end_of_text|>"],system:{message:"You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside <think> </think> tags, and then provide your solution or response to the problem.",schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"},llava:{assistant:"ASSISTANT:",id:"llava",linebreaks:{user:1},name:"Llava",user:"USER: {prompt}"},minichat:{afterShot:"\n",assistant:"[|Assistant|]",id:"minichat",name:"Minichat",prefix:"<s> ",stop:["</s>","[|User|]"],user:"[|User|] {prompt} </s>"},mistral:{afterShot:"\n",assistant:" [/INST]",id:"mistral",name:"Mistral",stop:["</s>"],user:"[INST] {prompt}"},"mistral-system":{afterShot:"\n",assistant:" [/INST]",id:"mistral-system",name:"Mistral system",stop:["</s>"],system:{schema:"[SYSTEM_PROMPT]{system}[/SYSTEM_PROMPT] "},user:"[INST] {prompt}"},"mistral-system-tools":{afterShot:"\n",assistant:"",id:"mistral-system-tools",name:"Mistral system tools",stop:["</s>"],system:{schema:"[SYSTEM_PROMPT]{system}[/SYSTEM_PROMPT] "},tools:{call:"[TOOL_CALLS]{tools}",def:"[AVAILABLE_TOOLS]{tools}[/AVAILABLE_TOOLS]",response:"[TOOL_RESULTS]{tools_response}[/TOOL_RESULTS]"},user:"[INST] {prompt} [/INST]"},nemotron:{afterShot:"\n\n",assistant:"<extra_id_1>Assistant\n",id:"nemotron",linebreaks:{system:2,user:1},name:"Nemotron",system:{schema:"<extra_id_0>System\n{system}"},user:"<extra_id_1>User\n{prompt}"},none:{assistant:"",id:"none",name:"No template",user:"{prompt}"},openchat:{assistant:"GPT4 Assistant:",id:"openchat",name:"OpenChat",stop:["<|end_of_turn|>"],user:"GPT4 User: {prompt}<|end_of_turn|>"},"openchat-correct":{assistant:"GPT4 Correct Assistant:",id:"openchat-correct",name:"OpenChat correct",stop:["<|end_of_turn|>"],user:"GPT4 Correct User: {prompt}<|end_of_turn|>"},orca:{assistant:"### Response:",id:"orca",linebreaks:{system:2,user:2},name:"Orca",system:{message:"You are an AI assistant that follows instruction extremely well. Help as much as you can.",schema:"### System:\n{system}"},user:"### User:\n{prompt}"},phi3:{afterShot:"<|end|>\n",assistant:"<|assistant|>",id:"phi3",name:"Phi 3",stop:["<|end|>","<|user|>"],system:{schema:"<|system|> {system}<|end|>"},user:"<|user|> {prompt}<|end|>"},phi4:{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant<|im_sep|>",id:"phi4",name:"Phi 4",stop:["<|im_end|>","<|im_sep|>"],system:{schema:"<|im_start|>system<|im_sep|>{system}<|im_end|>"},user:"<|im_start|>user<|im_sep|>{prompt}<|im_end|>"},"phi4-tools":{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant<|im_sep|>",id:"phi4-tools",name:"Phi 4 tools",stop:["<|im_end|>","<|im_sep|>"],system:{message:'You are a helpful assistant with some tools.\n<|tool|>\n{tools}\n<|/tool|>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n[{"name": <function-name>, "arguments": <args-json-object>}]\n</tool_call>',schema:"<|im_start|>system<|im_sep|>{system}<|im_end|>"},tools:{call:"<|tool_call|>\n{tools}\n<|/tool_call|>",def:"{system}",response:"<|im_start|>user\n<|tool_response|>\n{tools_response}\n<|/tool_response|><|im_end|>"},user:"<|im_start|>user<|im_sep|>{prompt}<|im_end|>"},reka:{afterShot:" <sep> ",assistant:"assistant:",id:"reka",name:"Reka",stop:["<sep>","<|endoftext|>"],user:"human: {prompt} <sep> "},vicuna:{assistant:"### ASSISTANT:",id:"vicuna",linebreaks:{user:2},name:"Vicuna",user:"USER: {prompt}"},vicuna_system:{assistant:"### ASSISTANT:",id:"vicuna_system",linebreaks:{system:2,user:2},name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}"},wizard_vicuna:{assistant:"### ASSISTANT:",id:"wizard_vicuna",linebreaks:{user:2},name:"Wizard Vicuna",stop:["<|endoftext|>"],user:"### Human:\n{prompt}"},wizardlm:{assistant:"ASSISTANT:",id:"wizardlm",linebreaks:{user:1},name:"WizardLM",system:{message:"You are a helpful AI assistant.",schema:"{system}"},user:"USER: {prompt}"},zephyr:{afterShot:"\n",assistant:"<|assistant|>",id:"zephyr",linebreaks:{assistant:1,system:1,user:1},name:"Zephyr",stop:["<|endoftext|>"],system:{schema:"<|system|>\n{system}<|endoftext|>"},user:"<|user|>\n{prompt}<|endoftext|>"}};class e{id;name;user;assistant;history=[];toolsDef=null;tools=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";_toolCallStart="";_toolCallEnd=null;constructor(s){let t;if(t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t.system,this.shots=t.shots,this.stop=t.stop,this.linebreaks=t.linebreaks,this.afterShot=t.afterShot,this.prefix=t.prefix,t?.tools){this.toolsDef=t.tools;const s=this.toolsDef?.call.split("{tools}");if(!s)throw new Error(`Tool definition malformed in template ${this.name}`);if(0==s.length)throw new Error(`Tool definition malformed in template ${this.name}: no start tool call definition`);this._toolCallStart=s[0],s.length>1&&(this._toolCallEnd=s[1])}}get hasTools(){return this.tools.length>0}addTool(s){if(!this?.toolsDef)throw new Error("This template does not support tools");return this.tools.push(s),this}processAnswer(s){if(!this.hasTools)return{isToolCall:!1,toolsCall:[]};let t=!1,e=new Array;if(s.trim().includes(this._toolCallStart)){t=!0;const o=this._parseToolCallString(s);try{if(!Array.isArray(o))throw new Error(`error parsing tool call response from model: the response object is not an Array:\n${o}`);e=o}catch(t){throw new Error(`error parsing tool call response from model:\n${s}`)}}return{isToolCall:t,toolsCall:e}}encodeToolResponse(s){if(!this.toolsDef)throw new Error("can not encode tool response: the template has no tools definition");return this.toolsDef.response.replace("{tools_response}",`${s}`)}cloneTo(s,t=!0){const o=new e(s);return t&&this?.shots&&this.shots.forEach((s=>{o.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&o.afterSystem(this._extraSystem),this._replaceSystem.length>0&&o.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&o.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&o.replacePrompt(this._replacePrompt),o}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t,e){if(e&&!this.toolsDef)throw new Error("This template does not support tools");return this.shots||(this.shots=[]),this.shots.push({user:s,assistant:t,tool:e}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot&&(e+=this.afterShot),t.push(this._buildAssistantBlock(e)),s?.tool&&t.push(this._buildToolResponse(s.tool)),t.join("")}render(s=!0){const t=new Array;this.prefix&&t.push(this.prefix);const e="{system}"==this?.toolsDef?.def,o=this._buildSystemBlock(s,e);if(o.length>0&&(t.push(o),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this.toolsDef&&!e){const s=this._buildToolsBlock();s.length>0&&(t.push(s),this?.linebreaks?.tools&&t.push("\n".repeat(this.linebreaks.tools)))}if(this?.shots)for(const s of this.shots)t.push(this.renderShot(s));let a=!1;if(this.history.length>0){for(const s of this.history)t.push(this.renderShot(s));this.history[this.history.length-1]?.tool&&(a=!0)}return a||t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s,t=!0){return this.render(t).replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s,t=!1){let e="";return this?.system?(this._replaceSystem.length>0&&(this.system.message=this._replaceSystem),this.system?.message?(this._extraSystem.length>0&&(this.system.message=this.system.message+this._extraSystem),e=this.system.schema.replace("{system}",this.system.message)):this._extraSystem.length>0&&(e=this.system.schema.replace("{system}",this._extraSystem)),""==e&&(s||(e=this.system.schema)),t&&this.tools.length>0&&(e=e.replace("{tools}",this._buildToolsBlock(!0))),e):""}_buildToolResponse(s){if(!this.toolsDef)throw new Error("No tools def in template to build tool response");return this.toolsDef.response.replace("{tools_response}",s)}_buildToolsBlock(s=!1){if(!this.toolsDef)throw new Error("Can not build tools block: no tools definition found in template");let t="";if(0==this.tools.length)return"";const e=JSON.stringify(this.tools);return s?e:(t+=this.toolsDef.def.replace("{tools}",e),t)}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=this.user.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t=[],e=this.assistant;return this?.linebreaks?.assistant&&(e+="\n".repeat(this.linebreaks.assistant)),this._extraAssistant.length>0&&(e+=this._extraAssistant),t.push(e),s&&t.push(s),t.join("")}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}_parseToolCallString(s){return function(s,t,e){try{const o=s.indexOf(t);if(-1===o)return[];let a,n=o+t.length;if(e){if(a=s.indexOf(e,n),-1===a)return[]}else a=s.indexOf("\n",n),-1===a&&(a=s.length);const r=s.substring(n,a).trim();let i=JSON.parse(r);return Array.isArray(i)||(i=[i]),i}catch(s){throw new Error(`Error parsing tool response content: ${s}`)}}(s,this._toolCallStart,this._toolCallEnd??void 0)}}return s.PromptTemplate=e,s.templates=t,s}({});
|
|
1
|
+
var $tpl=function(s){"use strict";const t={alpaca:{assistant:"### Response:",id:"alpaca",linebreaks:{system:2,user:2},name:"Alpaca",system:{message:"Below is an instruction that describes a task. Write a response that appropriately completes the request.",schema:"{system}"},user:"### Instruction:\n{prompt}"},chatml:{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant",id:"chatml",linebreaks:{assistant:1,system:1,user:1},name:"ChatMl",stop:["<|im_end|>"],system:{schema:"<|im_start|>system\n{system}<|im_end|>"},tags:{endThink:"</think>",think:"<think>"},user:"<|im_start|>user\n{prompt}<|im_end|>"},"chatml-tools":{afterShot:"<|im_end|>",assistant:"<|im_start|>assistant",id:"chatml-tools",linebreaks:{assistant:1,system:1,user:1},name:"ChatMl tools",stop:["<|im_end|>"],system:{message:'You are a helpful assistant with tool calling capabilities. You may call one or more functions to assist with the user query.\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>\n{tools}\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n[{"name": <function-name>, "arguments": <args-json-object>}]\n</tool_call>',schema:"<|im_start|>system\n{system}<|im_end|>"},tags:{endThink:"</think>",think:"<think>"},tools:{call:"<tool_call>\n{tools}\n</tool_call>",def:"{system}",response:"<|im_start|>user\n<tool_response>\n{tools_response}\n</tool_response><|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>"},codestral:{afterShot:"\n",assistant:" [/INST]",id:"codestral",linebreaks:{system:2},name:"Codestral",stop:["</s>"],system:{schema:"<<SYS>>\n{system}\n<</SYS>>"},user:"[INST] {prompt}"},"command-r":{assistant:"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",id:"command-r",linebreaks:{user:1},name:"Command-R",prefix:"<BOS_TOKEN>",stop:["<|END_OF_TURN_TOKEN|>"],system:{schema:"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"},user:"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>"},deephermes:{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"deephermes",name:"Deephermes",stop:["<|eot_id|>","<|end_of_text|>"],system:{message:'You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don\'t make assumptions about what values to plug into functions. Here are the available tools: <tools> {tools} </tools>. For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n<tool_call>\n[{"arguments": <args-dict>, "name": <function-name>}]\n</tool_call>',schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},tools:{call:"<tool_call>\n{tools}\n</tool_call>",def:"{system}",response:"<|start_header_id|>user<|end_header_id|>\n<tool_response>\n{tools_response}\n</tool_response><|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n{prompt}<|eot_id|>"},deepseek:{afterShot:"\n",assistant:"### Response:",id:"deepseek",linebreaks:{system:1,user:1},name:"Deepseek",stop:["<|EOT|>","### Instruction:"],system:{message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer.",schema:"{system}"},user:"### Instruction:\n{prompt}"},deepseek2:{assistant:"Assistant:",id:"deepseek2",linebreaks:{system:2,user:2},name:"Deepseek 2",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"User: {prompt}"},deepseek3:{afterShot:"<|end▁of▁sentence|>",assistant:"<|Assistant|>",id:"deepseek3",linebreaks:{system:2,user:2},name:"Deepseek 3",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"<|User|>{prompt}"},exaone:{afterShot:"[|endofturn|]",assistant:"[|assistant|]",id:"exaone",linebreaks:{system:1,user:1},name:"Exaone",stop:["[|endofturn|]"],system:{message:"You are EXAONE model from LG AI Research, a helpful assistant.",schema:"[|system|]{system}[|endofturn|]"},user:"[|user|]{prompt}[|endofturn|]"},gemma:{afterShot:"<end_of_turn>",assistant:"<start_of_turn>model",id:"gemma",name:"Gemma",stop:["<end_of_turn>"],user:"<start_of_turn>user\n{prompt}\n <end_of_turn>\n "},granite:{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite",linebreaks:{system:1,user:1},name:"Granite",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},"granite-think":{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite-think",linebreaks:{system:1,user:1},name:"Granite think",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant. Respond to every user query in a comprehensive and detailed way. You can write down your thoughts and reasoning process before responding. In the thought process, engage in a comprehensive cycle of analysis, summarization, exploration, reassessment, reflection, backtracing, and iteration to develop well-considered thinking process. In the response section, based on various attempts, explorations, and reflections from the thoughts section, systematically present the final solution that you deem correct. The response should summarize the thought process. Write your thoughts after 'Here is my thought process:' and write your response after 'Here is my response:' for each user query.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},"granite-tools":{afterShot:"<|end_of_text|>\n",assistant:"<|start_of_role|>assistant<|end_of_role|>",id:"granite-tools",linebreaks:{system:1,tools:1,user:1},name:"Granite tools",stop:["<|end_of_text|>","<|start_of_role|>"],system:{message:"You are Granite, developed by IBM. You are a helpful AI assistant with access to the following tools. When a tool is required to answer the user's query, respond with <|tool_call|> followed by a JSON list of tools used. If a tool does not exist in the provided list of tools, notify the user that you do not have the ability to fulfill the request.",schema:"<|start_of_role|>system<|end_of_role|>{system}<|end_of_text|>"},tools:{call:"<|tool_call|>{tools}",def:"<|start_of_role|>tools<|end_of_role|>{tools}<|end_of_text|>",response:"<|start_of_role|>tool_response<|end_of_role|>{tools_response}<|end_of_text|>\n"},user:"<|start_of_role|>user<|end_of_role|>{prompt}<|end_of_text|>"},llama:{assistant:" [/INST] ",id:"llama",linebreaks:{system:2,user:0},name:"Llama",prefix:"<s>",stop:["</s>"],system:{message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.",schema:"[INST] <<SYS>>\n{system}\n<</SYS>>"},user:"{prompt}"},llama3:{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"llama3",name:"Llama 3",stop:["<|eot_id|>","<|end_of_text|>"],system:{schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"},"llama3-think":{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"llama3-think",name:"Llama 3 think",stop:["<|eot_id|>","<|end_of_text|>"],system:{message:"You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside <think> </think> tags, and then provide your solution or response to the problem.",schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"},llava:{assistant:"ASSISTANT:",id:"llava",linebreaks:{user:1},name:"Llava",user:"USER: {prompt}"},minichat:{afterShot:"\n",assistant:"[|Assistant|]",id:"minichat",name:"Minichat",prefix:"<s> ",stop:["</s>","[|User|]"],user:"[|User|] {prompt} </s>"},mistral:{afterShot:"\n",assistant:" [/INST]",id:"mistral",name:"Mistral",stop:["</s>"],user:"[INST] {prompt}"},"mistral-system":{afterShot:"\n",assistant:" [/INST]",id:"mistral-system",name:"Mistral system",stop:["</s>"],system:{schema:"[SYSTEM_PROMPT]{system}[/SYSTEM_PROMPT] "},user:"[INST] {prompt}"},"mistral-system-tools":{afterShot:"\n",assistant:"",id:"mistral-system-tools",name:"Mistral system tools",stop:["</s>"],system:{schema:"[SYSTEM_PROMPT]{system}[/SYSTEM_PROMPT] "},tools:{call:"[TOOL_CALLS]{tools}",def:"[AVAILABLE_TOOLS]{tools}[/AVAILABLE_TOOLS]",response:"[TOOL_RESULTS]{tools_response}[/TOOL_RESULTS]"},user:"[INST] {prompt} [/INST]"},nemotron:{afterShot:"\n\n",assistant:"<extra_id_1>Assistant\n",id:"nemotron",linebreaks:{system:2,user:1},name:"Nemotron",system:{schema:"<extra_id_0>System\n{system}"},user:"<extra_id_1>User\n{prompt}"},none:{assistant:"",id:"none",name:"No template",user:"{prompt}"},openchat:{assistant:"GPT4 Assistant:",id:"openchat",name:"OpenChat",stop:["<|end_of_turn|>"],user:"GPT4 User: {prompt}<|end_of_turn|>"},"openchat-correct":{assistant:"GPT4 Correct Assistant:",id:"openchat-correct",name:"OpenChat correct",stop:["<|end_of_turn|>"],user:"GPT4 Correct User: {prompt}<|end_of_turn|>"},orca:{assistant:"### Response:",id:"orca",linebreaks:{system:2,user:2},name:"Orca",system:{message:"You are an AI assistant that follows instruction extremely well. Help as much as you can.",schema:"### System:\n{system}"},user:"### User:\n{prompt}"},phi3:{afterShot:"<|end|>\n",assistant:"<|assistant|>",id:"phi3",name:"Phi 3",stop:["<|end|>","<|user|>"],system:{schema:"<|system|> {system}<|end|>"},user:"<|user|> {prompt}<|end|>"},phi4:{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant<|im_sep|>",id:"phi4",name:"Phi 4",stop:["<|im_end|>","<|im_sep|>"],system:{schema:"<|im_start|>system<|im_sep|>{system}<|im_end|>"},user:"<|im_start|>user<|im_sep|>{prompt}<|im_end|>"},"phi4-tools":{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant<|im_sep|>",id:"phi4-tools",name:"Phi 4 tools",stop:["<|im_end|>","<|im_sep|>"],system:{message:"You are a helpful assistant with some tools.\n<|tool|>\n{tools}\n<|/tool|>",schema:"<|im_start|>system<|im_sep|>{system}<|im_end|>"},tools:{call:"<|tool_call|>\n{tools}\n<|/tool_call|>",def:"{system}",response:"<|im_start|>user\n<|tool_response|>\n{tools_response}\n<|/tool_response|><|im_end|>"},user:"<|im_start|>user<|im_sep|>{prompt}<|im_end|>"},reka:{afterShot:" <sep> ",assistant:"assistant:",id:"reka",name:"Reka",stop:["<sep>","<|endoftext|>"],user:"human: {prompt} <sep> "},vicuna:{assistant:"### ASSISTANT:",id:"vicuna",linebreaks:{user:2},name:"Vicuna",user:"USER: {prompt}"},vicuna_system:{assistant:"### ASSISTANT:",id:"vicuna_system",linebreaks:{system:2,user:2},name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}"},wizard_vicuna:{assistant:"### ASSISTANT:",id:"wizard_vicuna",linebreaks:{user:2},name:"Wizard Vicuna",stop:["<|endoftext|>"],user:"### Human:\n{prompt}"},wizardlm:{assistant:"ASSISTANT:",id:"wizardlm",linebreaks:{user:1},name:"WizardLM",system:{message:"You are a helpful AI assistant.",schema:"{system}"},user:"USER: {prompt}"},zephyr:{afterShot:"\n",assistant:"<|assistant|>",id:"zephyr",linebreaks:{assistant:1,system:1,user:1},name:"Zephyr",stop:["<|endoftext|>"],system:{schema:"<|system|>\n{system}<|endoftext|>"},user:"<|user|>\n{prompt}<|endoftext|>"}};function e(s,t,e){try{const o=s.indexOf(t);if(-1===o)return s;let a,n=o+t.length;if(e){if(a=s.indexOf(e,n),-1===a)return s}else a=s.indexOf("\n",n),-1===a&&(a=s.length);return s.substring(n,a).trim()}catch(s){throw new Error(`Error parsing content between tags ${t} ${e}: ${s}`)}}class o{id;name;user;assistant;history=[];toolsDef=null;tools=[];tags={};system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";_toolCallStart="";_toolCallEnd=null;constructor(s){let t;if(t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t?.system,this.shots=t?.shots,this.stop=t?.stop,this.linebreaks=t?.linebreaks,this.afterShot=t?.afterShot,this.prefix=t?.prefix,t?.tags&&(this.tags=t?.tags),t?.tools){this.toolsDef=t.tools;const s=this.toolsDef?.call.split("{tools}");if(!s)throw new Error(`Tool definition malformed in template ${this.name}`);if(0==s.length)throw new Error(`Tool definition malformed in template ${this.name}: no start tool call definition`);this._toolCallStart=s[0],s.length>1&&(this._toolCallEnd=s[1])}}get hasTools(){return this.tools.length>0}addTool(s){if(!this?.toolsDef)throw new Error("This template does not support tools");return this.tools.push(s),this}processAnswer(s){if(!this.hasTools)return{isToolCall:!1,toolsCall:[]};let t=!1,e=new Array;if(s.trim().includes(this._toolCallStart)){t=!0;const o=this._parseToolCallString(s);try{if(!Array.isArray(o))throw new Error(`error parsing tool call response from model: the response object is not an Array:\n${o}`);e=o}catch(t){throw new Error(`error parsing tool call response from model:\n${s}`)}}return{isToolCall:t,toolsCall:e}}encodeToolResponse(s){if(!this.toolsDef)throw new Error("can not encode tool response: the template has no tools definition");return this.toolsDef.response.replace("{tools_response}",`${s}`)}cloneTo(s,t=!0){const e=new o(s);return t&&this?.shots&&this.shots.forEach((s=>{e.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&e.afterSystem(this._extraSystem),this._replaceSystem.length>0&&e.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&e.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&e.replacePrompt(this._replacePrompt),e}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t,e){if(e&&!this.toolsDef)throw new Error("This template does not support tools");return this.shots||(this.shots=[]),this.shots.push({user:s,assistant:t,tools:e}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot&&(e+=this.afterShot),t.push(this._buildAssistantBlock(e)),s?.tools&&t.push(this._buildToolsResponse(s.tools)),t.join("")}render(s=!0){const t=new Array;this.prefix&&t.push(this.prefix);const e="{system}"==this?.toolsDef?.def,o=this._buildSystemBlock(s,e);if(o.length>0&&(t.push(o),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this.toolsDef&&!e){const s=this._buildToolsBlock();s.length>0&&(t.push(s),this?.linebreaks?.tools&&t.push("\n".repeat(this.linebreaks.tools)))}if(this?.shots)for(const s of this.shots)t.push(this.renderShot(s));let a=!1;if(this.history.length>0){for(const s of this.history)t.push(this.renderShot(s));this.history[this.history.length-1]?.tools&&(a=!0)}return a||t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s,t=!0){return this.render(t).replace("{prompt}",s)}pushToHistory(s,t=!0){if(t&&this.tags?.endThink&&this.tags?.think){const t=s.assistant.split(this.tags.endThink);t.length>1&&(s.think=e(s.assistant,this.tags.think,this.tags.endThink),s.assistant=t[1])}return this.history.push(s),this}_buildSystemBlock(s,t=!1){let e="";return this?.system?(this._replaceSystem.length>0&&(this.system.message=this._replaceSystem),this.system?.message?(this._extraSystem.length>0&&(this.system.message=this.system.message+this._extraSystem),e=this.system.schema.replace("{system}",this.system.message)):this._extraSystem.length>0&&(e=this.system.schema.replace("{system}",this._extraSystem)),""==e&&(s||(e=this.system.schema)),t&&this.tools.length>0&&(e=e.replace("{tools}",this._buildToolsBlock(!0))),e):""}_buildToolsResponse(s){if(!this.toolsDef)throw new Error("No tools def in template to build tool response");const t=new Array;for(const e of Object.values(s))t.push(this.toolsDef.response.replace("{tools_response}",JSON.stringify(e.response)));return t.join("")}_buildToolsBlock(s=!1){if(!this.toolsDef)throw new Error("Can not build tools block: no tools definition found in template");let t="";if(0==this.tools.length)return"";const e=JSON.stringify(this.tools);return s?e:(t+=this.toolsDef.def.replace("{tools}",e),t)}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=this.user.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t="",e=this.assistant;return this?.linebreaks?.assistant&&(e+="\n".repeat(this.linebreaks.assistant)),this._extraAssistant.length>0&&(e+=this._extraAssistant),t+=e,s&&(t+=s),t}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}_parseToolCallString(s){return function(s,t,o){try{const a=e(s,t,o);let n=JSON.parse(a);return Array.isArray(n)||(n=[n]),n}catch(s){throw new Error(`Error parsing tool response content: ${s}`)}}(s,this._toolCallStart,this._toolCallEnd??void 0)}}return s.PromptTemplate=o,s.templates=t,s}({});
|
package/dist/utils.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
1
|
import { ToolCallSpec } from "./interfaces.js";
|
|
2
|
-
declare function extractBetweenTags(text: string, startTag: string, endTag?: string):
|
|
3
|
-
|
|
2
|
+
declare function extractBetweenTags(text: string, startTag: string, endTag?: string): string;
|
|
3
|
+
declare function extractToolSpec(text: string, startTag: string, endTag?: string): ToolCallSpec[];
|
|
4
|
+
export { extractBetweenTags, extractToolSpec };
|