modprompt 0.9.2 → 0.9.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/db.d.ts +1 -1
- package/dist/main.js +317 -244
- package/dist/main.min.js +1 -1
- package/package.json +1 -1
package/dist/db.d.ts
CHANGED
package/dist/main.js
CHANGED
|
@@ -1,357 +1,430 @@
|
|
|
1
|
+
// Autogenerated code: do not edit
|
|
1
2
|
const templates = {
|
|
2
|
-
"none": {
|
|
3
|
-
"id": "none",
|
|
4
|
-
"name": "No template",
|
|
5
|
-
"user": "{prompt}",
|
|
6
|
-
"assistant": "",
|
|
7
|
-
},
|
|
8
3
|
"alpaca": {
|
|
4
|
+
"assistant": "### Response:",
|
|
9
5
|
"id": "alpaca",
|
|
6
|
+
"linebreaks": {
|
|
7
|
+
"system": 2,
|
|
8
|
+
"user": 2
|
|
9
|
+
},
|
|
10
10
|
"name": "Alpaca",
|
|
11
11
|
"system": {
|
|
12
|
-
"schema": "{system}",
|
|
13
12
|
"message": "Below is an instruction that describes a task. Write a response that appropriately completes the request.",
|
|
13
|
+
"schema": "{system}"
|
|
14
14
|
},
|
|
15
|
-
"user": "### Instruction:\n{prompt}"
|
|
16
|
-
"assistant": "### Response:",
|
|
17
|
-
"linebreaks": {
|
|
18
|
-
"system": 2,
|
|
19
|
-
"user": 2
|
|
20
|
-
}
|
|
15
|
+
"user": "### Instruction:\n{prompt}"
|
|
21
16
|
},
|
|
22
|
-
"
|
|
23
|
-
"
|
|
24
|
-
"
|
|
17
|
+
"cerebrum": {
|
|
18
|
+
"assistant": "Ai:",
|
|
19
|
+
"id": "cerebrum",
|
|
20
|
+
"linebreaks": {
|
|
21
|
+
"user": 1
|
|
22
|
+
},
|
|
23
|
+
"name": "Cerebrum",
|
|
24
|
+
"prefix": "<s>",
|
|
25
|
+
"stop": [
|
|
26
|
+
"</s>"
|
|
27
|
+
],
|
|
25
28
|
"system": {
|
|
26
|
-
"
|
|
27
|
-
"
|
|
29
|
+
"message": "A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions.",
|
|
30
|
+
"schema": "{system}"
|
|
28
31
|
},
|
|
29
|
-
"user": "{prompt}"
|
|
30
|
-
|
|
32
|
+
"user": "User: {prompt}"
|
|
33
|
+
},
|
|
34
|
+
"chatml": {
|
|
35
|
+
"afterShot": " <|im_end|>\n",
|
|
36
|
+
"assistant": "<|im_start|>assistant",
|
|
37
|
+
"id": "chatml",
|
|
31
38
|
"linebreaks": {
|
|
32
|
-
"
|
|
33
|
-
"
|
|
39
|
+
"assistant": 1,
|
|
40
|
+
"system": 1,
|
|
41
|
+
"user": 1
|
|
34
42
|
},
|
|
35
|
-
"
|
|
36
|
-
"stop": [
|
|
43
|
+
"name": "ChatMl",
|
|
44
|
+
"stop": [
|
|
45
|
+
"<|im_end|>"
|
|
46
|
+
],
|
|
47
|
+
"system": {
|
|
48
|
+
"schema": "<|im_start|>system\n{system}<|im_end|>"
|
|
49
|
+
},
|
|
50
|
+
"user": "<|im_start|>user\n{prompt}<|im_end|>"
|
|
37
51
|
},
|
|
38
|
-
"
|
|
39
|
-
"
|
|
40
|
-
"name": "Mistral",
|
|
41
|
-
"user": "[INST] {prompt}",
|
|
52
|
+
"codestral": {
|
|
53
|
+
"afterShot": "\n",
|
|
42
54
|
"assistant": " [/INST]",
|
|
43
|
-
"
|
|
44
|
-
"
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
"
|
|
48
|
-
"
|
|
55
|
+
"id": "codestral",
|
|
56
|
+
"linebreaks": {
|
|
57
|
+
"system": 2
|
|
58
|
+
},
|
|
59
|
+
"name": "Codestral",
|
|
60
|
+
"stop": [
|
|
61
|
+
"</s>"
|
|
62
|
+
],
|
|
49
63
|
"system": {
|
|
50
|
-
"schema": "
|
|
51
|
-
"message": "You are an AI assistant that follows instruction extremely well. Help as much as you can.",
|
|
64
|
+
"schema": "<<SYS>>\n{system}\n<</SYS>>"
|
|
52
65
|
},
|
|
53
|
-
"user": "
|
|
54
|
-
|
|
66
|
+
"user": "[INST] {prompt}"
|
|
67
|
+
},
|
|
68
|
+
"command-r": {
|
|
69
|
+
"assistant": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
|
|
70
|
+
"id": "command-r",
|
|
55
71
|
"linebreaks": {
|
|
56
|
-
"
|
|
57
|
-
"user": 2
|
|
72
|
+
"user": 1
|
|
58
73
|
},
|
|
74
|
+
"name": "Command-R",
|
|
75
|
+
"prefix": "<BOS_TOKEN>",
|
|
76
|
+
"stop": [
|
|
77
|
+
"<|END_OF_TURN_TOKEN|>"
|
|
78
|
+
],
|
|
79
|
+
"system": {
|
|
80
|
+
"schema": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"
|
|
81
|
+
},
|
|
82
|
+
"user": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>"
|
|
59
83
|
},
|
|
60
|
-
"
|
|
61
|
-
"
|
|
62
|
-
"
|
|
63
|
-
"
|
|
64
|
-
"assistant": "### ASSISTANT:",
|
|
84
|
+
"deepseek": {
|
|
85
|
+
"afterShot": "\n",
|
|
86
|
+
"assistant": "### Response:",
|
|
87
|
+
"id": "deepseek",
|
|
65
88
|
"linebreaks": {
|
|
66
|
-
"
|
|
89
|
+
"system": 1,
|
|
90
|
+
"user": 1
|
|
67
91
|
},
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
92
|
+
"name": "Deepseek",
|
|
93
|
+
"stop": [
|
|
94
|
+
"<|EOT|>",
|
|
95
|
+
"### Instruction:"
|
|
96
|
+
],
|
|
72
97
|
"system": {
|
|
73
|
-
"
|
|
98
|
+
"message": "You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer.",
|
|
99
|
+
"schema": "{system}"
|
|
74
100
|
},
|
|
75
|
-
"user": "
|
|
76
|
-
|
|
101
|
+
"user": "### Instruction:\n{prompt}"
|
|
102
|
+
},
|
|
103
|
+
"deepseek2": {
|
|
104
|
+
"assistant": "Assistant:",
|
|
105
|
+
"id": "deepseek2",
|
|
77
106
|
"linebreaks": {
|
|
78
107
|
"system": 2,
|
|
79
108
|
"user": 2
|
|
80
109
|
},
|
|
110
|
+
"name": "Deepseek 2",
|
|
111
|
+
"stop": [
|
|
112
|
+
"<|end▁of▁sentence|>",
|
|
113
|
+
"<|tool▁calls▁end|>"
|
|
114
|
+
],
|
|
115
|
+
"system": {
|
|
116
|
+
"schema": "<|begin▁of▁sentence|>{system}"
|
|
117
|
+
},
|
|
118
|
+
"user": "User: {prompt}"
|
|
81
119
|
},
|
|
82
|
-
"
|
|
83
|
-
"
|
|
84
|
-
"
|
|
85
|
-
"
|
|
86
|
-
"assistant": "### ASSISTANT:",
|
|
120
|
+
"deepseek3": {
|
|
121
|
+
"afterShot": "<|end▁of▁sentence|>",
|
|
122
|
+
"assistant": "<|Assistant|>",
|
|
123
|
+
"id": "deepseek3",
|
|
87
124
|
"linebreaks": {
|
|
125
|
+
"system": 2,
|
|
88
126
|
"user": 2
|
|
89
127
|
},
|
|
90
|
-
"
|
|
128
|
+
"name": "Deepseek 3",
|
|
129
|
+
"stop": [
|
|
130
|
+
"<|end▁of▁sentence|>",
|
|
131
|
+
"<|tool▁calls▁end|>"
|
|
132
|
+
],
|
|
133
|
+
"system": {
|
|
134
|
+
"schema": "<|begin▁of▁sentence|>{system}"
|
|
135
|
+
},
|
|
136
|
+
"user": "<|User|>{prompt}"
|
|
137
|
+
},
|
|
138
|
+
"gemma": {
|
|
139
|
+
"afterShot": "\n",
|
|
140
|
+
"assistant": "<end_of_turn>\n<start_of_turn>model",
|
|
141
|
+
"id": "gemma",
|
|
142
|
+
"name": "Gemma",
|
|
143
|
+
"stop": [
|
|
144
|
+
"<end_of_turn>"
|
|
145
|
+
],
|
|
146
|
+
"user": "<start_of_turn>user\n{prompt}"
|
|
91
147
|
},
|
|
92
148
|
"guanaco": {
|
|
93
|
-
"id": "guanaco",
|
|
94
|
-
"name": "Guanaco",
|
|
95
|
-
"user": "### Human: {prompt}",
|
|
96
149
|
"assistant": "### Assistant:",
|
|
150
|
+
"id": "guanaco",
|
|
97
151
|
"linebreaks": {
|
|
98
152
|
"user": 1
|
|
99
153
|
},
|
|
154
|
+
"name": "Guanaco",
|
|
155
|
+
"user": "### Human: {prompt}"
|
|
100
156
|
},
|
|
101
|
-
"
|
|
102
|
-
"
|
|
103
|
-
"
|
|
104
|
-
"system": {
|
|
105
|
-
"schema": "<|im_start|>system\n{system}\n<|im_end|>",
|
|
106
|
-
},
|
|
107
|
-
"user": "<|im_start|>user\n{prompt}<|im_end|>",
|
|
108
|
-
"assistant": "<|im_start|>assistant",
|
|
157
|
+
"human_response": {
|
|
158
|
+
"assistant": "### RESPONSE:",
|
|
159
|
+
"id": "human_response",
|
|
109
160
|
"linebreaks": {
|
|
110
|
-
"system": 1,
|
|
111
|
-
"user": 1,
|
|
112
161
|
"assistant": 1,
|
|
162
|
+
"user": 2
|
|
113
163
|
},
|
|
114
|
-
"
|
|
115
|
-
"
|
|
164
|
+
"name": "Human response",
|
|
165
|
+
"user": "### HUMAN:\n{prompt}"
|
|
116
166
|
},
|
|
117
|
-
"
|
|
118
|
-
"
|
|
119
|
-
"
|
|
120
|
-
"system": {
|
|
121
|
-
"schema": "<|system|>\n{system}<|endoftext|>",
|
|
122
|
-
},
|
|
123
|
-
"user": "<|user|>\n{prompt}<|endoftext|>",
|
|
124
|
-
"assistant": "<|assistant|>",
|
|
167
|
+
"llama": {
|
|
168
|
+
"assistant": " [/INST] ",
|
|
169
|
+
"id": "llama",
|
|
125
170
|
"linebreaks": {
|
|
126
|
-
"system":
|
|
127
|
-
"user":
|
|
128
|
-
"assistant": 1,
|
|
171
|
+
"system": 2,
|
|
172
|
+
"user": 0
|
|
129
173
|
},
|
|
130
|
-
"
|
|
131
|
-
"
|
|
174
|
+
"name": "Llama",
|
|
175
|
+
"prefix": "<s>",
|
|
176
|
+
"stop": [
|
|
177
|
+
"</s>"
|
|
178
|
+
],
|
|
179
|
+
"system": {
|
|
180
|
+
"message": "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.",
|
|
181
|
+
"schema": "[INST] <<SYS>>\n{system}\n<</SYS>>"
|
|
182
|
+
},
|
|
183
|
+
"user": "{prompt}"
|
|
132
184
|
},
|
|
133
|
-
"
|
|
134
|
-
"
|
|
135
|
-
"
|
|
185
|
+
"llama3": {
|
|
186
|
+
"afterShot": "<|eot_id|>\n\n",
|
|
187
|
+
"assistant": "<|start_header_id|>assistant<|end_header_id|>",
|
|
188
|
+
"id": "llama3",
|
|
189
|
+
"name": "Llama 3",
|
|
190
|
+
"stop": [
|
|
191
|
+
"<|eot_id|>",
|
|
192
|
+
"<|end_of_text|>"
|
|
193
|
+
],
|
|
136
194
|
"system": {
|
|
137
|
-
"schema": "<|system
|
|
138
|
-
"message": "You are a router. Below is the query from the users, please call the correct function and generate the parameters to call the function."
|
|
195
|
+
"schema": "<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"
|
|
139
196
|
},
|
|
140
|
-
"user": "<|user
|
|
141
|
-
"assistant": "<|assistant|>",
|
|
142
|
-
"afterShot": "\n",
|
|
143
|
-
"stop": ["<|end|>"]
|
|
197
|
+
"user": "<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"
|
|
144
198
|
},
|
|
145
199
|
"llava": {
|
|
146
|
-
"id": "llava",
|
|
147
|
-
"name": "Llava",
|
|
148
|
-
"user": "USER: {prompt}",
|
|
149
200
|
"assistant": "ASSISTANT:",
|
|
201
|
+
"id": "llava",
|
|
150
202
|
"linebreaks": {
|
|
151
|
-
"user": 1
|
|
203
|
+
"user": 1
|
|
152
204
|
},
|
|
205
|
+
"name": "Llava",
|
|
206
|
+
"user": "USER: {prompt}"
|
|
153
207
|
},
|
|
154
|
-
"
|
|
155
|
-
"
|
|
156
|
-
"
|
|
157
|
-
"
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
208
|
+
"minichat": {
|
|
209
|
+
"afterShot": "\n",
|
|
210
|
+
"assistant": "[|Assistant|]",
|
|
211
|
+
"id": "minichat",
|
|
212
|
+
"name": "Minichat",
|
|
213
|
+
"prefix": "<s> ",
|
|
214
|
+
"stop": [
|
|
215
|
+
"</s>",
|
|
216
|
+
"[|User|]"
|
|
217
|
+
],
|
|
218
|
+
"user": "[|User|] {prompt} </s>"
|
|
219
|
+
},
|
|
220
|
+
"mistral": {
|
|
221
|
+
"afterShot": "\n",
|
|
222
|
+
"assistant": " [/INST]",
|
|
223
|
+
"id": "mistral",
|
|
224
|
+
"name": "Mistral",
|
|
225
|
+
"stop": [
|
|
226
|
+
"</s>"
|
|
227
|
+
],
|
|
228
|
+
"user": "[INST] {prompt}"
|
|
229
|
+
},
|
|
230
|
+
"nemotron": {
|
|
231
|
+
"afterShot": "\n\n",
|
|
232
|
+
"assistant": "<extra_id_1>Assistant",
|
|
233
|
+
"id": "nemotron",
|
|
163
234
|
"linebreaks": {
|
|
164
|
-
"system":
|
|
165
|
-
"user": 1
|
|
235
|
+
"system": 2,
|
|
236
|
+
"user": 1
|
|
166
237
|
},
|
|
167
|
-
|
|
168
|
-
"wizardlm": {
|
|
169
|
-
"id": "wizardlm",
|
|
170
|
-
"name": "WizardLM",
|
|
238
|
+
"name": "Nemotron",
|
|
171
239
|
"system": {
|
|
172
|
-
"schema": "{system}"
|
|
173
|
-
"message": "You are a helpful AI assistant."
|
|
240
|
+
"schema": "<extra_id_0>System\n{system}"
|
|
174
241
|
},
|
|
175
|
-
"user": "
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
242
|
+
"user": "<extra_id_1>User\n{prompt}"
|
|
243
|
+
},
|
|
244
|
+
"none": {
|
|
245
|
+
"assistant": "",
|
|
246
|
+
"id": "none",
|
|
247
|
+
"name": "No template",
|
|
248
|
+
"user": "{prompt}"
|
|
249
|
+
},
|
|
250
|
+
"octopus": {
|
|
251
|
+
"afterShot": "\n",
|
|
252
|
+
"assistant": "<|assistant|>",
|
|
253
|
+
"id": "octopus",
|
|
254
|
+
"name": "Octopus",
|
|
255
|
+
"stop": [
|
|
256
|
+
"<|end|>"
|
|
257
|
+
],
|
|
258
|
+
"system": {
|
|
259
|
+
"message": "You are a router. Below is the query from the users, please call the correct function and generate the parameters to call the function.",
|
|
260
|
+
"schema": "<|system|>{system}<|end|>"
|
|
179
261
|
},
|
|
262
|
+
"user": "<|user|>{prompt}<|end|>"
|
|
180
263
|
},
|
|
181
264
|
"openchat": {
|
|
265
|
+
"assistant": "GPT4 Assistant:",
|
|
182
266
|
"id": "openchat",
|
|
183
267
|
"name": "OpenChat",
|
|
184
|
-
"
|
|
185
|
-
|
|
186
|
-
|
|
268
|
+
"stop": [
|
|
269
|
+
"<|end_of_turn|>"
|
|
270
|
+
],
|
|
271
|
+
"user": "GPT4 User: {prompt}<|end_of_turn|>"
|
|
187
272
|
},
|
|
188
273
|
"openchat-correct": {
|
|
274
|
+
"assistant": "GPT4 Correct Assistant:",
|
|
189
275
|
"id": "openchat-correct",
|
|
190
276
|
"name": "OpenChat correct",
|
|
191
|
-
"
|
|
192
|
-
|
|
193
|
-
|
|
277
|
+
"stop": [
|
|
278
|
+
"<|end_of_turn|>"
|
|
279
|
+
],
|
|
280
|
+
"user": "GPT4 Correct User: {prompt}<|end_of_turn|>"
|
|
194
281
|
},
|
|
195
|
-
"
|
|
196
|
-
"
|
|
197
|
-
"
|
|
198
|
-
"user": "### HUMAN:\n{prompt}",
|
|
199
|
-
"assistant": "### RESPONSE:",
|
|
282
|
+
"opencodeinterpreter": {
|
|
283
|
+
"assistant": "<|Assistant|>",
|
|
284
|
+
"id": "opencodeinterpreter",
|
|
200
285
|
"linebreaks": {
|
|
201
|
-
"user": 2
|
|
202
|
-
"assistant": 1
|
|
286
|
+
"user": 2
|
|
203
287
|
},
|
|
288
|
+
"name": "Open code interpreter",
|
|
289
|
+
"stop": [
|
|
290
|
+
"<|EOT|>",
|
|
291
|
+
"<|User|>"
|
|
292
|
+
],
|
|
293
|
+
"user": "<|User|>\n{prompt}"
|
|
204
294
|
},
|
|
205
|
-
"
|
|
206
|
-
"
|
|
207
|
-
"
|
|
208
|
-
"
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
"
|
|
295
|
+
"orca": {
|
|
296
|
+
"assistant": "### Response:",
|
|
297
|
+
"id": "orca",
|
|
298
|
+
"linebreaks": {
|
|
299
|
+
"system": 2,
|
|
300
|
+
"user": 2
|
|
301
|
+
},
|
|
302
|
+
"name": "Orca",
|
|
303
|
+
"system": {
|
|
304
|
+
"message": "You are an AI assistant that follows instruction extremely well. Help as much as you can.",
|
|
305
|
+
"schema": "### System:\n{system}"
|
|
306
|
+
},
|
|
307
|
+
"user": "### User:\n{prompt}"
|
|
213
308
|
},
|
|
214
309
|
"phi": {
|
|
215
|
-
"id": "phi",
|
|
216
|
-
"name": "Phi",
|
|
217
|
-
"user": "Instruct: {prompt}",
|
|
218
310
|
"assistant": "Output:",
|
|
311
|
+
"id": "phi",
|
|
219
312
|
"linebreaks": {
|
|
220
313
|
"user": 1
|
|
221
314
|
},
|
|
222
|
-
"
|
|
315
|
+
"name": "Phi",
|
|
316
|
+
"stop": [
|
|
317
|
+
"</s>",
|
|
318
|
+
"Instruct:"
|
|
319
|
+
],
|
|
320
|
+
"user": "Instruct: {prompt}"
|
|
223
321
|
},
|
|
224
322
|
"phi3": {
|
|
323
|
+
"afterShot": "<|end|>\n",
|
|
324
|
+
"assistant": "<|assistant|>",
|
|
225
325
|
"id": "phi3",
|
|
226
326
|
"name": "Phi 3",
|
|
227
|
-
"
|
|
228
|
-
|
|
327
|
+
"stop": [
|
|
328
|
+
"<|end|>",
|
|
329
|
+
"<|user|>"
|
|
330
|
+
],
|
|
229
331
|
"system": {
|
|
230
|
-
"schema": "<|system|> {system}<|end|>"
|
|
332
|
+
"schema": "<|system|> {system}<|end|>"
|
|
231
333
|
},
|
|
232
|
-
"
|
|
233
|
-
"stop": ["<|end|>", "<|user|>"]
|
|
334
|
+
"user": "<|user|> {prompt}<|end|>"
|
|
234
335
|
},
|
|
235
|
-
"
|
|
236
|
-
"
|
|
237
|
-
"
|
|
336
|
+
"phi4": {
|
|
337
|
+
"afterShot": "<|im_end|>\n",
|
|
338
|
+
"assistant": "<|im_start|>assistant<|im_sep|>",
|
|
339
|
+
"id": "phi4",
|
|
340
|
+
"name": "Phi 4",
|
|
341
|
+
"stop": [
|
|
342
|
+
"<|im_end|>",
|
|
343
|
+
"<|im_sep|>"
|
|
344
|
+
],
|
|
238
345
|
"system": {
|
|
239
|
-
"schema": "{system}"
|
|
240
|
-
"message": "You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."
|
|
346
|
+
"schema": "<|im_start|>system<|im_sep|>{system}<|im_end|>"
|
|
241
347
|
},
|
|
242
|
-
"
|
|
243
|
-
|
|
244
|
-
|
|
348
|
+
"user": "<|im_start|>user<|im_sep|>{prompt}<|im_end|>"
|
|
349
|
+
},
|
|
350
|
+
"synthia-cot": {
|
|
351
|
+
"assistant": "ASSISTANT:",
|
|
352
|
+
"id": "synthia-cot",
|
|
245
353
|
"linebreaks": {
|
|
246
|
-
"user": 1,
|
|
247
354
|
"system": 1,
|
|
355
|
+
"user": 1
|
|
248
356
|
},
|
|
249
|
-
"
|
|
250
|
-
},
|
|
251
|
-
"deepseek2": {
|
|
252
|
-
"id": "deepseek2",
|
|
253
|
-
"name": "Deepseek v2",
|
|
357
|
+
"name": "Synthia CoT",
|
|
254
358
|
"system": {
|
|
255
|
-
"
|
|
359
|
+
"message": "Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation.",
|
|
360
|
+
"schema": "SYSTEM: {system}"
|
|
256
361
|
},
|
|
257
|
-
"user": "
|
|
258
|
-
|
|
362
|
+
"user": "USER: {prompt}"
|
|
363
|
+
},
|
|
364
|
+
"vicuna": {
|
|
365
|
+
"assistant": "### ASSISTANT:",
|
|
366
|
+
"id": "vicuna",
|
|
259
367
|
"linebreaks": {
|
|
260
|
-
"user": 2
|
|
261
|
-
"system": 2,
|
|
368
|
+
"user": 2
|
|
262
369
|
},
|
|
263
|
-
"
|
|
370
|
+
"name": "Vicuna",
|
|
371
|
+
"user": "USER: {prompt}"
|
|
264
372
|
},
|
|
265
|
-
"
|
|
266
|
-
"
|
|
267
|
-
"
|
|
268
|
-
"user": "<|User|>\n{prompt}",
|
|
269
|
-
"assistant": "<|Assistant|>",
|
|
373
|
+
"vicuna_system": {
|
|
374
|
+
"assistant": "### ASSISTANT:",
|
|
375
|
+
"id": "vicuna_system",
|
|
270
376
|
"linebreaks": {
|
|
377
|
+
"system": 2,
|
|
271
378
|
"user": 2
|
|
272
379
|
},
|
|
273
|
-
"
|
|
274
|
-
},
|
|
275
|
-
"cerebrum": {
|
|
276
|
-
"id": "cerebrum",
|
|
277
|
-
"name": "Cerebrum",
|
|
380
|
+
"name": "Vicuna system",
|
|
278
381
|
"system": {
|
|
279
|
-
"schema": "{system}"
|
|
280
|
-
"message": "A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."
|
|
281
|
-
},
|
|
282
|
-
"user": "User: {prompt}",
|
|
283
|
-
"assistant": "Ai:",
|
|
284
|
-
"linebreaks": {
|
|
285
|
-
"user": 1
|
|
382
|
+
"schema": "SYSTEM: {system}"
|
|
286
383
|
},
|
|
287
|
-
"
|
|
288
|
-
"stop": ["</s>"]
|
|
384
|
+
"user": "USER: {prompt}"
|
|
289
385
|
},
|
|
290
|
-
"
|
|
291
|
-
"
|
|
292
|
-
"
|
|
293
|
-
"user": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>",
|
|
294
|
-
"assistant": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
|
|
295
|
-
"prefix": "<BOS_TOKEN>",
|
|
296
|
-
"stop": [
|
|
297
|
-
"<|END_OF_TURN_TOKEN|>"
|
|
298
|
-
],
|
|
386
|
+
"wizard_vicuna": {
|
|
387
|
+
"assistant": "### ASSISTANT:",
|
|
388
|
+
"id": "wizard_vicuna",
|
|
299
389
|
"linebreaks": {
|
|
300
|
-
"user":
|
|
390
|
+
"user": 2
|
|
301
391
|
},
|
|
302
|
-
"
|
|
303
|
-
"schema": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"
|
|
304
|
-
}
|
|
305
|
-
},
|
|
306
|
-
"llama3": {
|
|
307
|
-
"id": "llama3",
|
|
308
|
-
"name": "Llama 3",
|
|
309
|
-
"user": "<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>",
|
|
310
|
-
"assistant": "<|start_header_id|>assistant<|end_header_id|>",
|
|
392
|
+
"name": "Wizard Vicuna",
|
|
311
393
|
"stop": [
|
|
312
|
-
"<|
|
|
313
|
-
"<|end_of_text|>"
|
|
394
|
+
"<|endoftext|>"
|
|
314
395
|
],
|
|
315
|
-
"
|
|
316
|
-
"system": {
|
|
317
|
-
"schema": "<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"
|
|
318
|
-
}
|
|
396
|
+
"user": "### Human:\n{prompt}"
|
|
319
397
|
},
|
|
320
|
-
"
|
|
321
|
-
"
|
|
322
|
-
"
|
|
323
|
-
"user": "[INST] {prompt}",
|
|
324
|
-
"assistant": " [/INST]",
|
|
325
|
-
"stop": ["</s>"],
|
|
326
|
-
"afterShot": "\n",
|
|
398
|
+
"wizardlm": {
|
|
399
|
+
"assistant": "ASSISTANT:",
|
|
400
|
+
"id": "wizardlm",
|
|
327
401
|
"linebreaks": {
|
|
328
|
-
"
|
|
402
|
+
"user": 1
|
|
329
403
|
},
|
|
404
|
+
"name": "WizardLM",
|
|
330
405
|
"system": {
|
|
331
|
-
"
|
|
406
|
+
"message": "You are a helpful AI assistant.",
|
|
407
|
+
"schema": "{system}"
|
|
332
408
|
},
|
|
409
|
+
"user": "USER: {prompt}"
|
|
333
410
|
},
|
|
334
|
-
"
|
|
335
|
-
"
|
|
336
|
-
"
|
|
337
|
-
"
|
|
338
|
-
"assistant": "<end_of_turn>\n<start_of_turn>model",
|
|
339
|
-
"stop": ["<end_of_turn>"],
|
|
340
|
-
"afterShot": "\n"
|
|
341
|
-
},
|
|
342
|
-
"nemotron": {
|
|
343
|
-
"id": "nemotron",
|
|
344
|
-
"name": "Nemotron",
|
|
345
|
-
"user": "<extra_id_1>User\n{prompt}",
|
|
346
|
-
"assistant": "<extra_id_1>Assistant",
|
|
411
|
+
"zephyr": {
|
|
412
|
+
"afterShot": "\n",
|
|
413
|
+
"assistant": "<|assistant|>",
|
|
414
|
+
"id": "zephyr",
|
|
347
415
|
"linebreaks": {
|
|
348
|
-
"
|
|
349
|
-
"
|
|
416
|
+
"assistant": 1,
|
|
417
|
+
"system": 1,
|
|
418
|
+
"user": 1
|
|
350
419
|
},
|
|
420
|
+
"name": "Zephyr",
|
|
421
|
+
"stop": [
|
|
422
|
+
"<|endoftext|>"
|
|
423
|
+
],
|
|
351
424
|
"system": {
|
|
352
|
-
"schema": "
|
|
425
|
+
"schema": "<|system|>\n{system}<|endoftext|>"
|
|
353
426
|
},
|
|
354
|
-
"
|
|
427
|
+
"user": "<|user|>\n{prompt}<|endoftext|>"
|
|
355
428
|
}
|
|
356
429
|
};
|
|
357
430
|
|
|
@@ -705,13 +778,13 @@ class PromptTemplate {
|
|
|
705
778
|
_buildAssistantBlock(msg) {
|
|
706
779
|
let buf = [];
|
|
707
780
|
let amsg = this.assistant;
|
|
781
|
+
if (this?.linebreaks?.assistant) {
|
|
782
|
+
amsg += "\n".repeat(this.linebreaks.assistant);
|
|
783
|
+
}
|
|
708
784
|
if (this._extraAssistant.length > 0) {
|
|
709
785
|
amsg += this._extraAssistant;
|
|
710
786
|
}
|
|
711
787
|
buf.push(amsg);
|
|
712
|
-
if (this?.linebreaks?.assistant) {
|
|
713
|
-
buf.push("\n".repeat(this.linebreaks.assistant));
|
|
714
|
-
}
|
|
715
788
|
if (msg) {
|
|
716
789
|
// this is a shot
|
|
717
790
|
buf.push(msg);
|
package/dist/main.min.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
var $tpl=function(s){"use strict";const e={none:{id:"none",name:"No template",user:"{prompt}",assistant:""},alpaca:{id:"alpaca",name:"Alpaca",system:{schema:"{system}",message:"Below is an instruction that describes a task. Write a response that appropriately completes the request."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},llama:{id:"llama",name:"Llama",system:{schema:"[INST] <<SYS>>\n{system}\n<</SYS>>",message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."},user:"{prompt}",assistant:" [/INST] ",linebreaks:{system:2,user:0},prefix:"<s>",stop:["</s>"]},mistral:{id:"mistral",name:"Mistral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n"},orca:{id:"orca",name:"Orca",system:{schema:"### System:\n{system}",message:"You are an AI assistant that follows instruction extremely well. Help as much as you can."},user:"### User:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},vicuna:{id:"vicuna",name:"Vicuna",user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2}},vicuna_system:{id:"vicuna_system",name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{system:2,user:2}},wizard_vicuna:{id:"wizard_vicuna",name:"Wizard Vicuna",user:"### Human:\n{prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2},stop:["<|endoftext|>"]},guanaco:{id:"guanaco",name:"Guanaco",user:"### Human: {prompt}",assistant:"### Assistant:",linebreaks:{user:1}},chatml:{id:"chatml",name:"ChatMl",system:{schema:"<|im_start|>system\n{system}\n<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>",assistant:"<|im_start|>assistant",linebreaks:{system:1,user:1,assistant:1},stop:["<|im_end|>"],afterShot:" <|im_end|>"},zephyr:{id:"zephyr",name:"Zephyr",system:{schema:"<|system|>\n{system}<|endoftext|>"},user:"<|user|>\n{prompt}<|endoftext|>",assistant:"<|assistant|>",linebreaks:{system:1,user:1,assistant:1},afterShot:"\n",stop:["<|endoftext|>"]},octopus:{id:"octopus",name:"Octopus",system:{schema:"<|system|>{system}<|end|>",message:"You are a router. Below is the query from the users, please call the correct function and generate the parameters to call the function."},user:"<|user|>{prompt}<|end|>",assistant:"<|assistant|>",afterShot:"\n",stop:["<|end|>"]},llava:{id:"llava",name:"Llava",user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},"synthia-cot":{id:"synthia-cot",name:"Synthia CoT",system:{schema:"SYSTEM: {system}",message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{system:1,user:1}},wizardlm:{id:"wizardlm",name:"WizardLM",system:{schema:"{system}",message:"You are a helpful AI assistant."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},openchat:{id:"openchat",name:"OpenChat",user:"GPT4 User: {prompt}<|end_of_turn|>",assistant:"GPT4 Assistant:",stop:["<|end_of_turn|>"]},"openchat-correct":{id:"openchat-correct",name:"OpenChat correct",user:"GPT4 Correct User: {prompt}<|end_of_turn|>",assistant:"GPT4 Correct Assistant:",stop:["<|end_of_turn|>"]},human_response:{id:"human_response",name:"Human response",user:"### HUMAN:\n{prompt}",assistant:"### RESPONSE:",linebreaks:{user:2,assistant:1}},minichat:{id:"minichat",name:"Minichat",user:"[|User|] {prompt} </s>",assistant:"[|Assistant|]",stop:["</s>","[|User|]"],afterShot:"\n",prefix:"<s> "},phi:{id:"phi",name:"Phi",user:"Instruct: {prompt}",assistant:"Output:",linebreaks:{user:1},stop:["</s>","Instruct:"]},phi3:{id:"phi3",name:"Phi 3",user:"<|user|> {prompt}<|end|>",assistant:"<|assistant|>",system:{schema:"<|system|> {system}<|end|>"},afterShot:"<|end|>\n",stop:["<|end|>","<|user|>"]},deepseek:{id:"deepseek",name:"Deepseek",system:{schema:"{system}",message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."},afterShot:"\n",user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{user:1,system:1},stop:["<|EOT|>","### Instruction:"]},deepseek2:{id:"deepseek2",name:"Deepseek v2",system:{schema:"<|begin▁of▁sentence|>{system}"},user:"User: {prompt}",assistant:"Assistant:",linebreaks:{user:2,system:2},stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"]},opencodeinterpreter:{id:"opencodeinterpreter",name:"Open code interpreter",user:"<|User|>\n{prompt}",assistant:"<|Assistant|>",linebreaks:{user:2},stop:["<|EOT|>","<|User|>"]},cerebrum:{id:"cerebrum",name:"Cerebrum",system:{schema:"{system}",message:"A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."},user:"User: {prompt}",assistant:"Ai:",linebreaks:{user:1},prefix:"<s>",stop:["</s>"]},"command-r":{id:"command-r",name:"Command-R",user:"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>",assistant:"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",prefix:"<BOS_TOKEN>",stop:["<|END_OF_TURN_TOKEN|>"],linebreaks:{user:1},system:{schema:"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"}},llama3:{id:"llama3",name:"Llama 3",user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>",assistant:"<|start_header_id|>assistant<|end_header_id|>",stop:["<|eot_id|>","<|end_of_text|>"],afterShot:"<|eot_id|>\n\n",system:{schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"}},codestral:{id:"codestral",name:"Codestral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n",linebreaks:{system:2},system:{schema:"<<SYS>>\n{system}\n<</SYS>>"}},gemma:{id:"gemma",name:"Gemma",user:"<start_of_turn>user\n{prompt}",assistant:"<end_of_turn>\n<start_of_turn>model",stop:["<end_of_turn>"],afterShot:"\n"},nemotron:{id:"nemotron",name:"Nemotron",user:"<extra_id_1>User\n{prompt}",assistant:"<extra_id_1>Assistant",linebreaks:{system:2,user:1},system:{schema:"<extra_id_0>System\n{system}"},afterShot:"\n\n"}};class t{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let e;e="string"==typeof s?this._load(s):s,this.id=e.id,this.name=e.name,this.user=e.user,this.assistant=e.assistant,this.system=e.system,this.shots=e.shots,this.stop=e.stop,this.linebreaks=e.linebreaks,this.afterShot=e.afterShot,this.prefix=e.prefix}cloneTo(s,e=!0){const a=new t(s);return e&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,e){this?.shots||(this.shots=[]);let t=e;return this.shots.push({user:s,assistant:t}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const e=[];e.push(this._buildUserBlock(s.user));let t=s.assistant;return this.afterShot&&(t+=this.afterShot),e.push(this._buildAssistantBlock(t)),e.join("")}render(s=!1){const e=new Array;this.prefix&&e.push(this.prefix);const t=this._buildSystemBlock(s);if(t.length>0&&(e.push(t),this?.linebreaks?.system&&e.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)e.push(this.renderShot(s));for(const s of this.history)e.push(this.renderShot(s));return e.push(this._buildUserBlock()),e.push(this._buildAssistantBlock()),e.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let e="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(e=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(e+=this._extraSystem)):s||(e=this.system.schema),e):""}_buildUserBlock(s){let e=[],t=this.user;return this._replacePrompt.length>0&&(t=t.replace("{prompt}",this._replacePrompt)),e.push(t),this?.linebreaks?.user&&e.push("\n".repeat(this.linebreaks.user)),s&&(e[0]=this.user.replace("{prompt}",s)),e.join("")}_buildAssistantBlock(s){let e=[],t=this.assistant;return this._extraAssistant.length>0&&(t+=this._extraAssistant),e.push(t),this?.linebreaks?.assistant&&e.push("\n".repeat(this.linebreaks.assistant)),s&&e.push(s),e.join("")}_load(s){try{if(s in e)return e[s];throw new Error(`Template ${s} not found`)}catch(e){throw new Error(`Error loading template ${s}: ${e}`)}}}return s.PromptTemplate=t,s.templates=e,s}({});
|
|
1
|
+
var $tpl=function(s){"use strict";const e={alpaca:{assistant:"### Response:",id:"alpaca",linebreaks:{system:2,user:2},name:"Alpaca",system:{message:"Below is an instruction that describes a task. Write a response that appropriately completes the request.",schema:"{system}"},user:"### Instruction:\n{prompt}"},cerebrum:{assistant:"Ai:",id:"cerebrum",linebreaks:{user:1},name:"Cerebrum",prefix:"<s>",stop:["</s>"],system:{message:"A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions.",schema:"{system}"},user:"User: {prompt}"},chatml:{afterShot:" <|im_end|>\n",assistant:"<|im_start|>assistant",id:"chatml",linebreaks:{assistant:1,system:1,user:1},name:"ChatMl",stop:["<|im_end|>"],system:{schema:"<|im_start|>system\n{system}<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>"},codestral:{afterShot:"\n",assistant:" [/INST]",id:"codestral",linebreaks:{system:2},name:"Codestral",stop:["</s>"],system:{schema:"<<SYS>>\n{system}\n<</SYS>>"},user:"[INST] {prompt}"},"command-r":{assistant:"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",id:"command-r",linebreaks:{user:1},name:"Command-R",prefix:"<BOS_TOKEN>",stop:["<|END_OF_TURN_TOKEN|>"],system:{schema:"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"},user:"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>"},deepseek:{afterShot:"\n",assistant:"### Response:",id:"deepseek",linebreaks:{system:1,user:1},name:"Deepseek",stop:["<|EOT|>","### Instruction:"],system:{message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer.",schema:"{system}"},user:"### Instruction:\n{prompt}"},deepseek2:{assistant:"Assistant:",id:"deepseek2",linebreaks:{system:2,user:2},name:"Deepseek 2",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"User: {prompt}"},deepseek3:{afterShot:"<|end▁of▁sentence|>",assistant:"<|Assistant|>",id:"deepseek3",linebreaks:{system:2,user:2},name:"Deepseek 3",stop:["<|end▁of▁sentence|>","<|tool▁calls▁end|>"],system:{schema:"<|begin▁of▁sentence|>{system}"},user:"<|User|>{prompt}"},gemma:{afterShot:"\n",assistant:"<end_of_turn>\n<start_of_turn>model",id:"gemma",name:"Gemma",stop:["<end_of_turn>"],user:"<start_of_turn>user\n{prompt}"},guanaco:{assistant:"### Assistant:",id:"guanaco",linebreaks:{user:1},name:"Guanaco",user:"### Human: {prompt}"},human_response:{assistant:"### RESPONSE:",id:"human_response",linebreaks:{assistant:1,user:2},name:"Human response",user:"### HUMAN:\n{prompt}"},llama:{assistant:" [/INST] ",id:"llama",linebreaks:{system:2,user:0},name:"Llama",prefix:"<s>",stop:["</s>"],system:{message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.",schema:"[INST] <<SYS>>\n{system}\n<</SYS>>"},user:"{prompt}"},llama3:{afterShot:"<|eot_id|>\n\n",assistant:"<|start_header_id|>assistant<|end_header_id|>",id:"llama3",name:"Llama 3",stop:["<|eot_id|>","<|end_of_text|>"],system:{schema:"<|start_header_id|>system<|end_header_id|>\n\n{system}<|eot_id|>"},user:"<|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|>"},llava:{assistant:"ASSISTANT:",id:"llava",linebreaks:{user:1},name:"Llava",user:"USER: {prompt}"},minichat:{afterShot:"\n",assistant:"[|Assistant|]",id:"minichat",name:"Minichat",prefix:"<s> ",stop:["</s>","[|User|]"],user:"[|User|] {prompt} </s>"},mistral:{afterShot:"\n",assistant:" [/INST]",id:"mistral",name:"Mistral",stop:["</s>"],user:"[INST] {prompt}"},nemotron:{afterShot:"\n\n",assistant:"<extra_id_1>Assistant",id:"nemotron",linebreaks:{system:2,user:1},name:"Nemotron",system:{schema:"<extra_id_0>System\n{system}"},user:"<extra_id_1>User\n{prompt}"},none:{assistant:"",id:"none",name:"No template",user:"{prompt}"},octopus:{afterShot:"\n",assistant:"<|assistant|>",id:"octopus",name:"Octopus",stop:["<|end|>"],system:{message:"You are a router. Below is the query from the users, please call the correct function and generate the parameters to call the function.",schema:"<|system|>{system}<|end|>"},user:"<|user|>{prompt}<|end|>"},openchat:{assistant:"GPT4 Assistant:",id:"openchat",name:"OpenChat",stop:["<|end_of_turn|>"],user:"GPT4 User: {prompt}<|end_of_turn|>"},"openchat-correct":{assistant:"GPT4 Correct Assistant:",id:"openchat-correct",name:"OpenChat correct",stop:["<|end_of_turn|>"],user:"GPT4 Correct User: {prompt}<|end_of_turn|>"},opencodeinterpreter:{assistant:"<|Assistant|>",id:"opencodeinterpreter",linebreaks:{user:2},name:"Open code interpreter",stop:["<|EOT|>","<|User|>"],user:"<|User|>\n{prompt}"},orca:{assistant:"### Response:",id:"orca",linebreaks:{system:2,user:2},name:"Orca",system:{message:"You are an AI assistant that follows instruction extremely well. Help as much as you can.",schema:"### System:\n{system}"},user:"### User:\n{prompt}"},phi:{assistant:"Output:",id:"phi",linebreaks:{user:1},name:"Phi",stop:["</s>","Instruct:"],user:"Instruct: {prompt}"},phi3:{afterShot:"<|end|>\n",assistant:"<|assistant|>",id:"phi3",name:"Phi 3",stop:["<|end|>","<|user|>"],system:{schema:"<|system|> {system}<|end|>"},user:"<|user|> {prompt}<|end|>"},phi4:{afterShot:"<|im_end|>\n",assistant:"<|im_start|>assistant<|im_sep|>",id:"phi4",name:"Phi 4",stop:["<|im_end|>","<|im_sep|>"],system:{schema:"<|im_start|>system<|im_sep|>{system}<|im_end|>"},user:"<|im_start|>user<|im_sep|>{prompt}<|im_end|>"},"synthia-cot":{assistant:"ASSISTANT:",id:"synthia-cot",linebreaks:{system:1,user:1},name:"Synthia CoT",system:{message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation.",schema:"SYSTEM: {system}"},user:"USER: {prompt}"},vicuna:{assistant:"### ASSISTANT:",id:"vicuna",linebreaks:{user:2},name:"Vicuna",user:"USER: {prompt}"},vicuna_system:{assistant:"### ASSISTANT:",id:"vicuna_system",linebreaks:{system:2,user:2},name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}"},wizard_vicuna:{assistant:"### ASSISTANT:",id:"wizard_vicuna",linebreaks:{user:2},name:"Wizard Vicuna",stop:["<|endoftext|>"],user:"### Human:\n{prompt}"},wizardlm:{assistant:"ASSISTANT:",id:"wizardlm",linebreaks:{user:1},name:"WizardLM",system:{message:"You are a helpful AI assistant.",schema:"{system}"},user:"USER: {prompt}"},zephyr:{afterShot:"\n",assistant:"<|assistant|>",id:"zephyr",linebreaks:{assistant:1,system:1,user:1},name:"Zephyr",stop:["<|endoftext|>"],system:{schema:"<|system|>\n{system}<|endoftext|>"},user:"<|user|>\n{prompt}<|endoftext|>"}};class t{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let e;e="string"==typeof s?this._load(s):s,this.id=e.id,this.name=e.name,this.user=e.user,this.assistant=e.assistant,this.system=e.system,this.shots=e.shots,this.stop=e.stop,this.linebreaks=e.linebreaks,this.afterShot=e.afterShot,this.prefix=e.prefix}cloneTo(s,e=!0){const a=new t(s);return e&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,e){this?.shots||(this.shots=[]);let t=e;return this.shots.push({user:s,assistant:t}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const e=[];e.push(this._buildUserBlock(s.user));let t=s.assistant;return this.afterShot&&(t+=this.afterShot),e.push(this._buildAssistantBlock(t)),e.join("")}render(s=!1){const e=new Array;this.prefix&&e.push(this.prefix);const t=this._buildSystemBlock(s);if(t.length>0&&(e.push(t),this?.linebreaks?.system&&e.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)e.push(this.renderShot(s));for(const s of this.history)e.push(this.renderShot(s));return e.push(this._buildUserBlock()),e.push(this._buildAssistantBlock()),e.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let e="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(e=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(e+=this._extraSystem)):s||(e=this.system.schema),e):""}_buildUserBlock(s){let e=[],t=this.user;return this._replacePrompt.length>0&&(t=t.replace("{prompt}",this._replacePrompt)),e.push(t),this?.linebreaks?.user&&e.push("\n".repeat(this.linebreaks.user)),s&&(e[0]=this.user.replace("{prompt}",s)),e.join("")}_buildAssistantBlock(s){let e=[],t=this.assistant;return this?.linebreaks?.assistant&&(t+="\n".repeat(this.linebreaks.assistant)),this._extraAssistant.length>0&&(t+=this._extraAssistant),e.push(t),s&&e.push(s),e.join("")}_load(s){try{if(s in e)return e[s];throw new Error(`Template ${s} not found`)}catch(e){throw new Error(`Error loading template ${s}: ${e}`)}}}return s.PromptTemplate=t,s.templates=e,s}({});
|