modprompt 0.5.0 → 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cls.d.ts CHANGED
@@ -1,4 +1,10 @@
1
1
  import { LmTemplate, PromptBlock, TurnBlock, SpacingSlots, HistoryTurn } from "./interfaces";
2
+ /**
3
+ * Represents a modified language model template.
4
+ *
5
+ * @example
6
+ * const tpl = new PromptTemplate('alpaca');
7
+ */
2
8
  declare class PromptTemplate {
3
9
  id: string;
4
10
  name: string;
@@ -15,18 +21,147 @@ declare class PromptTemplate {
15
21
  _extraAssistant: string;
16
22
  _replacePrompt: string;
17
23
  _replaceSystem: string;
24
+ /**
25
+ * Constructs a new `PromptTemplate` instance.
26
+ *
27
+ * @param template - Either the name of the template to load or an instance of `LmTemplate`.
28
+ *
29
+ * @example
30
+ * const tpl = new PromptTemplate('alpaca');
31
+ */
18
32
  constructor(template: string | LmTemplate);
33
+ /**
34
+ * Clones the current `PromptTemplate` instance to a new instance of `PromptTemplate`.
35
+ *
36
+ * This function creates a new `PromptTemplate` instance with the same state as the current instance.
37
+ * It is useful when you want to work with a copy of the current template without modifying the original one.
38
+ *
39
+ * @param {string | LmTemplate} template - The id or template instance of the new `PromptTemplate` to make
40
+ * @param {boolean} keepShots - Keep the shots for the template instance: this will also clone the shots
41
+ * @returns {PromptTemplate} - A new `PromptTemplate` instance with the same state as the current one.
42
+ *
43
+ * @example
44
+ * const tpl = new PromptTemplate('alpaca');
45
+ * const clonedTpl = tpl.cloneTo('chatml');
46
+ * console.log(clonedTpl);
47
+ */
19
48
  cloneTo(template: string | LmTemplate, keepShots?: boolean): PromptTemplate;
49
+ /**
50
+ * Converts the current `PromptTemplate` instance to a JSON object.
51
+ *
52
+ * This function serializes the current state of the `PromptTemplate` instance into a JSON object,
53
+ * which can be used for storing the template or transmitting it over a network.
54
+ *
55
+ * @returns {LmTemplate} - A JSON object representing the current state of the `PromptTemplate`.
56
+ *
57
+ * @example
58
+ * const tpl = new PromptTemplate('alpaca');
59
+ * const json = tpl.toJson();
60
+ * console.log(json);
61
+ */
20
62
  toJson(): LmTemplate;
63
+ /**
64
+ * Replaces the system block with a given message.
65
+ *
66
+ * @param msg - The message to replace the system block with.
67
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
68
+ *
69
+ * @example
70
+ * tpl.replaceSystem('You are a javascript expert');
71
+ */
21
72
  replaceSystem(msg: string): PromptTemplate;
73
+ /**
74
+ * Appends a given message after the system message.
75
+ *
76
+ * @param msg - The message to append.
77
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
78
+ *
79
+ * @example
80
+ * tpl.afterSystem('You are a javascript expert');
81
+ */
22
82
  afterSystem(msg: string): PromptTemplate;
83
+ /**
84
+ * Appends a given message after the assistant prompt token.
85
+ *
86
+ * @param msg - The message to append.
87
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
88
+ *
89
+ * @example
90
+ * tpl.afterAssistant('( answer in json )');
91
+ */
23
92
  afterAssistant(msg: string): PromptTemplate;
93
+ /**
94
+ * Replaces the `{prompt}` placeholder in the user message with a given message.
95
+ *
96
+ * @param msg - The message to replace the placeholder with.
97
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
98
+ *
99
+ * @example
100
+ * tpl.replacePrompt(fix this invalid json:\n\n```json\n{prompt}\n```);
101
+ */
24
102
  replacePrompt(msg: string): PromptTemplate;
103
+ /**
104
+ * Adds a new shot (a user-assistant interaction) to the template.
105
+ *
106
+ * @param user - The user's message.
107
+ * @param assistant - The assistant's response.
108
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
109
+ *
110
+ * @example
111
+ * tpl.addShot('Is it raining?', 'No, it is sunny.');
112
+ */
25
113
  addShot(user: string, assistant: string): PromptTemplate;
114
+ /**
115
+ * Adds multiple shots (user-assistant interactions) to the template.
116
+ *
117
+ * This function allows you to add multiple turns to the conversation. Each turn is represented by an object
118
+ * with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
119
+ *
120
+ * @param {Array<TurnBlock>} shots - An array of objects, where each object represents a user-assistant interaction.
121
+ * @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
122
+ *
123
+ * @example
124
+ * const tpl = new PromptTemplate('alpaca');
125
+ * tpl.addShots([
126
+ * { user: 'What is the weather like?', assistant: 'It is sunny today!' },
127
+ * { user: 'What is the weather like tomorrow?', assistant: 'I am sorry, but I can\'t predict the future.' }
128
+ * ]);
129
+ */
26
130
  addShots(shots: Array<TurnBlock>): PromptTemplate;
131
+ /**
132
+ * Render a turn block
133
+ *
134
+ * @param {TurnBlock | HistoryTurn} shot the shot to render
135
+ * @returns {string} ther rendered text
136
+ */
27
137
  renderShot(shot: TurnBlock | HistoryTurn): string;
138
+ /**
139
+ * Renders the template into a string representation.
140
+ *
141
+ * @returns The rendered template as a string.
142
+ *
143
+ * @example
144
+ * const rendered = tpl.render();
145
+ * console.log(rendered);
146
+ */
28
147
  render(skip_empty_system?: boolean): string;
148
+ /**
149
+ * Renders the template with the provided message replacing the `{prompt}` placeholder.
150
+ *
151
+ * @param msg - The message to use for replacing the `{prompt}` placeholder.
152
+ * @returns The rendered template with the provided message.
153
+ *
154
+ * @example
155
+ * const prompted = tpl.prompt("list the planets in the solar system");
156
+ * console.log(prompted);
157
+ */
29
158
  prompt(msg: string): string;
159
+ /**
160
+ * Push a turn into history
161
+ *
162
+ * @param {HistoryTurn} turn the history turn
163
+ * @returns {PromptTemplate}
164
+ */
30
165
  pushToHistory(turn: HistoryTurn): PromptTemplate;
31
166
  private _buildSystemBlock;
32
167
  private _buildUserBlock;
@@ -1,32 +1,157 @@
1
+ /**
2
+ * Defines the spacing (in terms of line breaks) to be applied between different parts of the conversation.
3
+ *
4
+ * @interface SpacingSlots
5
+ * @typedef {SpacingSlots}
6
+ *
7
+ * @example
8
+ * const spacingExample: SpacingSlots = {
9
+ * system: 2,
10
+ * user: 1,
11
+ * assistant: 1
12
+ * };
13
+ */
1
14
  interface SpacingSlots {
15
+ /**
16
+ * Number of line breaks to be applied after the system message.
17
+ */
2
18
  system?: number;
19
+ /**
20
+ * Number of line breaks to be applied after the user message.
21
+ */
3
22
  user?: number;
23
+ /**
24
+ * Number of line breaks to be applied after the assistant message.
25
+ */
4
26
  assistant?: number;
5
27
  }
28
+ /**
29
+ * Represents a block of system-level prompts or instructions in the conversation.
30
+ *
31
+ * @example
32
+ * const promptExample: PromptBlock = {
33
+ * schema: '### System: {system}',
34
+ * message: 'Some system message'
35
+ * };
36
+ */
6
37
  interface PromptBlock {
38
+ /**
39
+ * The schema or format for the system message.
40
+ *
41
+ * Can include placeholders like `{system}` which can be programmatically replaced with actual messages later.
42
+ */
7
43
  schema: string;
44
+ /**
45
+ * Optional default message content for the system.
46
+ *
47
+ * Used if a dynamic value isn't provided for `{system}` placeholder.
48
+ */
8
49
  message?: string;
9
50
  }
51
+ /**
52
+ * Represents a single turn in a conversation, consisting of a user message followed by an assistant response.
53
+ *
54
+ * @example
55
+ * const turnExample: TurnBlock = {
56
+ * user: 'What's the weather like?',
57
+ * assistant: 'It's sunny today!'
58
+ * };
59
+ */
10
60
  interface TurnBlock {
61
+ /**
62
+ * The message content from the user.
63
+ */
11
64
  user: string;
65
+ /**
66
+ * The corresponding response from the assistant.
67
+ */
12
68
  assistant: string;
13
69
  }
70
+ /**
71
+ * Represents a template for language modeling, detailing the structure and interaction elements of a conversation.
72
+ *
73
+ * @example
74
+ * const sampleTemplate: LmTemplate = {
75
+ * id: "alapaca",
76
+ * name: "Alpaca",
77
+ * system: {
78
+ * schema: "{system}",
79
+ * message: "Below is an instruction that describes a task. Write a response that appropriately completes the request.",
80
+ * },
81
+ * user: "### Instruction:\n{prompt}",
82
+ * assistant: "### Response:",
83
+ * linebreaks: {
84
+ * system: 2,
85
+ * user: 2,
86
+ * }
87
+ * };
88
+ */
14
89
  interface LmTemplate {
90
+ /**
91
+ * The id slug of the template.
92
+ */
15
93
  id: string;
94
+ /**
95
+ * The name of the template.
96
+ */
16
97
  name: string;
98
+ /**
99
+ * The default message template for the user.
100
+ *
101
+ * Includes a `{prompt}` placeholder which can be programmatically replaced later.
102
+ */
17
103
  user: string;
104
+ /**
105
+ * The default message template for the assistant.
106
+ */
18
107
  assistant: string;
108
+ /**
109
+ * Optional prompt block that represents system-level messages or instructions.
110
+ */
19
111
  system?: PromptBlock;
112
+ /**
113
+ * Optional array of turn blocks representing back-and-forths between the user and the assistant.
114
+ *
115
+ * Useful for simulating multi-turn interactions.
116
+ */
20
117
  shots?: Array<TurnBlock>;
118
+ /**
119
+ * Optional array of strings that signal the end of a conversation.
120
+ *
121
+ * These strings can be used to detect when a conversation should be terminated.
122
+ */
21
123
  stop?: Array<string>;
124
+ /**
125
+ * Optional specifications for line breaks between different message types.
126
+ *
127
+ * This can be used to format the rendered conversation.
128
+ */
22
129
  linebreaks?: SpacingSlots;
130
+ /**
131
+ * String to display after a shot
132
+ */
23
133
  afterShot?: string;
134
+ /**
135
+ * A prefix like a bos token to insert before content
136
+ */
24
137
  prefix?: string;
25
138
  }
139
+ /**
140
+ * Image data
141
+ *
142
+ * @interface ImgData
143
+ * @typedef {ImgData}
144
+ */
26
145
  interface ImgData {
27
146
  id: number;
28
147
  data: string;
29
148
  }
149
+ /**
150
+ * A history turn
151
+ *
152
+ * @interface HistoryTurn
153
+ * @typedef {HistoryTurn}
154
+ */
30
155
  interface HistoryTurn {
31
156
  user: string;
32
157
  assistant: string;
package/dist/mod.es.mjs CHANGED
@@ -165,8 +165,8 @@ const templates = {
165
165
  "stop": ["<|end_of_turn|>"]
166
166
  },
167
167
  "openchat-correct": {
168
- "id": "openchat",
169
- "name": "OpenChat",
168
+ "id": "openchat-corect",
169
+ "name": "OpenChat correct",
170
170
  "user": "GPT4 Correct User: {prompt}<|end_of_turn|>",
171
171
  "assistant": "GPT4 Correct Assistant:",
172
172
  "stop": ["<|end_of_turn|>"]
@@ -184,10 +184,11 @@ const templates = {
184
184
  "minichat": {
185
185
  "id": "minichat",
186
186
  "name": "Minichat",
187
- "user": "<s> [|User|] {prompt} </s>",
187
+ "user": "[|User|] {prompt} </s>",
188
188
  "assistant": "[|Assistant|]",
189
189
  "stop": ["</s>", "[|User|]"],
190
190
  "afterShot": "\n",
191
+ "prefix": "<s> "
191
192
  },
192
193
  "phi": {
193
194
  "id": "phi",
@@ -206,6 +207,7 @@ const templates = {
206
207
  "schema": "{system}",
207
208
  "message": "You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."
208
209
  },
210
+ "afterShot": "\n",
209
211
  "user": "### Instruction:\n{prompt}",
210
212
  "assistant": "### Response:",
211
213
  "linebreaks": {
@@ -213,9 +215,56 @@ const templates = {
213
215
  "system": 1,
214
216
  },
215
217
  "stop": ["<|EOT|>", "### Instruction:"]
218
+ },
219
+ "opencodeinterpreter": {
220
+ "id": "opencodeinterpreter",
221
+ "name": "Open code interpreter",
222
+ "user": "<|User|>\n{prompt}",
223
+ "assistant": "<|Assistant|>",
224
+ "linebreaks": {
225
+ "user": 2
226
+ },
227
+ "stop": ["<|EOT|>", "<|User|>"]
228
+ },
229
+ "cerebrum": {
230
+ "id": "cerebrum",
231
+ "name": "Cerebrum",
232
+ "system": {
233
+ "schema": "{system}",
234
+ "message": "A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."
235
+ },
236
+ "user": "User: {prompt}",
237
+ "assistant": "Ai:",
238
+ "linebreaks": {
239
+ "user": 1
240
+ },
241
+ "prefix": "<s>",
242
+ "stop": ["</s>"]
243
+ },
244
+ "command-r": {
245
+ "id": "command-r",
246
+ "name": "Command-R",
247
+ "user": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>",
248
+ "assistant": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
249
+ "prefix": "<BOS_TOKEN>",
250
+ "stop": [
251
+ "<|END_OF_TURN_TOKEN|>"
252
+ ],
253
+ "linebreaks": {
254
+ "user": 1,
255
+ },
256
+ "system": {
257
+ "schema": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"
258
+ }
216
259
  }
217
260
  };
218
261
 
262
+ /**
263
+ * Represents a modified language model template.
264
+ *
265
+ * @example
266
+ * const tpl = new PromptTemplate('alpaca');
267
+ */
219
268
  class PromptTemplate {
220
269
  id;
221
270
  name;
@@ -228,10 +277,19 @@ class PromptTemplate {
228
277
  linebreaks;
229
278
  afterShot;
230
279
  prefix;
280
+ // internal state
231
281
  _extraSystem = "";
232
282
  _extraAssistant = "";
233
283
  _replacePrompt = "";
234
284
  _replaceSystem = "";
285
+ /**
286
+ * Constructs a new `PromptTemplate` instance.
287
+ *
288
+ * @param template - Either the name of the template to load or an instance of `LmTemplate`.
289
+ *
290
+ * @example
291
+ * const tpl = new PromptTemplate('alpaca');
292
+ */
235
293
  constructor(template) {
236
294
  let tpl;
237
295
  if (typeof template == "string") {
@@ -251,6 +309,21 @@ class PromptTemplate {
251
309
  this.afterShot = tpl.afterShot;
252
310
  this.prefix = tpl.prefix;
253
311
  }
312
+ /**
313
+ * Clones the current `PromptTemplate` instance to a new instance of `PromptTemplate`.
314
+ *
315
+ * This function creates a new `PromptTemplate` instance with the same state as the current instance.
316
+ * It is useful when you want to work with a copy of the current template without modifying the original one.
317
+ *
318
+ * @param {string | LmTemplate} template - The id or template instance of the new `PromptTemplate` to make
319
+ * @param {boolean} keepShots - Keep the shots for the template instance: this will also clone the shots
320
+ * @returns {PromptTemplate} - A new `PromptTemplate` instance with the same state as the current one.
321
+ *
322
+ * @example
323
+ * const tpl = new PromptTemplate('alpaca');
324
+ * const clonedTpl = tpl.cloneTo('chatml');
325
+ * console.log(clonedTpl);
326
+ */
254
327
  cloneTo(template, keepShots = true) {
255
328
  const tpl = new PromptTemplate(template);
256
329
  if (keepShots) {
@@ -274,6 +347,19 @@ class PromptTemplate {
274
347
  }
275
348
  return tpl;
276
349
  }
350
+ /**
351
+ * Converts the current `PromptTemplate` instance to a JSON object.
352
+ *
353
+ * This function serializes the current state of the `PromptTemplate` instance into a JSON object,
354
+ * which can be used for storing the template or transmitting it over a network.
355
+ *
356
+ * @returns {LmTemplate} - A JSON object representing the current state of the `PromptTemplate`.
357
+ *
358
+ * @example
359
+ * const tpl = new PromptTemplate('alpaca');
360
+ * const json = tpl.toJson();
361
+ * console.log(json);
362
+ */
277
363
  toJson() {
278
364
  const res = {
279
365
  id: this.id,
@@ -301,6 +387,15 @@ class PromptTemplate {
301
387
  }
302
388
  return res;
303
389
  }
390
+ /**
391
+ * Replaces the system block with a given message.
392
+ *
393
+ * @param msg - The message to replace the system block with.
394
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
395
+ *
396
+ * @example
397
+ * tpl.replaceSystem('You are a javascript expert');
398
+ */
304
399
  replaceSystem(msg) {
305
400
  if (!this.system) {
306
401
  return this;
@@ -308,6 +403,15 @@ class PromptTemplate {
308
403
  this._replaceSystem = msg;
309
404
  return this;
310
405
  }
406
+ /**
407
+ * Appends a given message after the system message.
408
+ *
409
+ * @param msg - The message to append.
410
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
411
+ *
412
+ * @example
413
+ * tpl.afterSystem('You are a javascript expert');
414
+ */
311
415
  afterSystem(msg) {
312
416
  if (!this.system) {
313
417
  return this;
@@ -315,14 +419,42 @@ class PromptTemplate {
315
419
  this._extraSystem = msg;
316
420
  return this;
317
421
  }
422
+ /**
423
+ * Appends a given message after the assistant prompt token.
424
+ *
425
+ * @param msg - The message to append.
426
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
427
+ *
428
+ * @example
429
+ * tpl.afterAssistant('( answer in json )');
430
+ */
318
431
  afterAssistant(msg) {
319
432
  this._extraAssistant = msg;
320
433
  return this;
321
434
  }
435
+ /**
436
+ * Replaces the `{prompt}` placeholder in the user message with a given message.
437
+ *
438
+ * @param msg - The message to replace the placeholder with.
439
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
440
+ *
441
+ * @example
442
+ * tpl.replacePrompt(fix this invalid json:\n\n```json\n{prompt}\n```);
443
+ */
322
444
  replacePrompt(msg) {
323
445
  this._replacePrompt = msg;
324
446
  return this;
325
447
  }
448
+ /**
449
+ * Adds a new shot (a user-assistant interaction) to the template.
450
+ *
451
+ * @param user - The user's message.
452
+ * @param assistant - The assistant's response.
453
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
454
+ *
455
+ * @example
456
+ * tpl.addShot('Is it raining?', 'No, it is sunny.');
457
+ */
326
458
  addShot(user, assistant) {
327
459
  if (!this?.shots) {
328
460
  this.shots = [];
@@ -334,10 +466,32 @@ class PromptTemplate {
334
466
  });
335
467
  return this;
336
468
  }
469
+ /**
470
+ * Adds multiple shots (user-assistant interactions) to the template.
471
+ *
472
+ * This function allows you to add multiple turns to the conversation. Each turn is represented by an object
473
+ * with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
474
+ *
475
+ * @param {Array<TurnBlock>} shots - An array of objects, where each object represents a user-assistant interaction.
476
+ * @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
477
+ *
478
+ * @example
479
+ * const tpl = new PromptTemplate('alpaca');
480
+ * tpl.addShots([
481
+ * { user: 'What is the weather like?', assistant: 'It is sunny today!' },
482
+ * { user: 'What is the weather like tomorrow?', assistant: 'I am sorry, but I can\'t predict the future.' }
483
+ * ]);
484
+ */
337
485
  addShots(shots) {
338
486
  shots.forEach((s) => this.addShot(s.user, s.assistant));
339
487
  return this;
340
488
  }
489
+ /**
490
+ * Render a turn block
491
+ *
492
+ * @param {TurnBlock | HistoryTurn} shot the shot to render
493
+ * @returns {string} ther rendered text
494
+ */
341
495
  renderShot(shot) {
342
496
  const buf = [];
343
497
  buf.push(this._buildUserBlock(shot.user));
@@ -351,11 +505,22 @@ class PromptTemplate {
351
505
  buf.push(this._buildAssistantBlock(_assistantMsg));
352
506
  return buf.join("");
353
507
  }
508
+ /**
509
+ * Renders the template into a string representation.
510
+ *
511
+ * @returns The rendered template as a string.
512
+ *
513
+ * @example
514
+ * const rendered = tpl.render();
515
+ * console.log(rendered);
516
+ */
354
517
  render(skip_empty_system = false) {
355
518
  const buf = new Array();
519
+ // prefix
356
520
  if (this.prefix) {
357
521
  buf.push(this.prefix);
358
522
  }
523
+ // system prompt if any
359
524
  const _systemBlock = this._buildSystemBlock(skip_empty_system);
360
525
  if (_systemBlock.length > 0) {
361
526
  buf.push(_systemBlock);
@@ -363,21 +528,42 @@ class PromptTemplate {
363
528
  buf.push("\n".repeat(this.linebreaks.system));
364
529
  }
365
530
  }
531
+ // shots
366
532
  if (this?.shots) {
367
533
  for (const shot of this.shots) {
368
534
  buf.push(this.renderShot(shot));
369
535
  }
370
536
  }
537
+ // history
371
538
  for (const turn of this.history) {
372
539
  buf.push(this.renderShot(turn));
373
540
  }
541
+ // user block
374
542
  buf.push(this._buildUserBlock());
543
+ // assistant block
375
544
  buf.push(this._buildAssistantBlock());
545
+ //console.log(buf)
376
546
  return buf.join("");
377
547
  }
548
+ /**
549
+ * Renders the template with the provided message replacing the `{prompt}` placeholder.
550
+ *
551
+ * @param msg - The message to use for replacing the `{prompt}` placeholder.
552
+ * @returns The rendered template with the provided message.
553
+ *
554
+ * @example
555
+ * const prompted = tpl.prompt("list the planets in the solar system");
556
+ * console.log(prompted);
557
+ */
378
558
  prompt(msg) {
379
559
  return this.render().replace("{prompt}", msg);
380
560
  }
561
+ /**
562
+ * Push a turn into history
563
+ *
564
+ * @param {HistoryTurn} turn the history turn
565
+ * @returns {PromptTemplate}
566
+ */
381
567
  pushToHistory(turn) {
382
568
  this.history.push(turn);
383
569
  return this;
@@ -403,6 +589,7 @@ class PromptTemplate {
403
589
  }
404
590
  _buildUserBlock(msg) {
405
591
  let buf = [];
592
+ // prompt replacement
406
593
  let _userBlock = this.user;
407
594
  if (this._replacePrompt.length > 0) {
408
595
  _userBlock = _userBlock.replace("{prompt}", this._replacePrompt);
@@ -412,6 +599,7 @@ class PromptTemplate {
412
599
  buf.push("\n".repeat(this.linebreaks.user));
413
600
  }
414
601
  if (msg) {
602
+ // this is a shot
415
603
  buf[0] = _userBlock.replace("{prompt}", msg);
416
604
  }
417
605
  return buf.join("");
@@ -427,6 +615,7 @@ class PromptTemplate {
427
615
  buf.push("\n".repeat(this.linebreaks.assistant));
428
616
  }
429
617
  if (msg) {
618
+ // this is a shot
430
619
  buf.push(msg);
431
620
  }
432
621
  return buf.join("");
@@ -434,6 +623,7 @@ class PromptTemplate {
434
623
  _load(name) {
435
624
  try {
436
625
  if (name in templates) {
626
+ //console.log("Loading", name)
437
627
  return templates[name];
438
628
  }
439
629
  else {
package/dist/mod.min.js CHANGED
@@ -1 +1 @@
1
- var $tpl=function(s){"use strict";const t={none:{id:"none",name:"No template",user:"{prompt}",assistant:""},alpaca:{id:"alpaca",name:"Alpaca",system:{schema:"{system}",message:"Below is an instruction that describes a task. Write a response that appropriately completes the request."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},llama:{id:"llama",name:"Llama",system:{schema:"[INST] <<SYS>>\n{system}\n<</SYS>>",message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."},user:"{prompt}",assistant:" [/INST] ",linebreaks:{system:2,user:0},prefix:"<s>",stop:["</s>"]},mistral:{id:"mistral",name:"Mistral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n",prefix:"<s>"},orca:{id:"orca",name:"Orca",system:{schema:"### System:\n{system}",message:"You are an AI assistant that follows instruction extremely well. Help as much as you can."},user:"### User:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},vicuna:{id:"vicuna",name:"Vicuna",user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2}},vicuna_system:{id:"vicuna_system",name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{system:2,user:2}},wizard_vicuna:{id:"wizard_vicuna",name:"Wizard Vicuna",user:"### Human:\n{prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2},stop:["<|endoftext|>"]},guanaco:{id:"guanaco",name:"Guanaco",user:"### Human: {prompt}",assistant:"### Assistant:",linebreaks:{user:1}},chatml:{id:"chatml",name:"ChatMl",system:{schema:"<|im_start|>system\n{system}\n<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>",assistant:"<|im_start|>assistant",linebreaks:{system:1,user:1,assistant:1},stop:["<|im_end|>"],afterShot:" <|im_end|>"},zephyr:{id:"zephyr",name:"Zephyr",system:{schema:"<|system|>\n{system}</s>"},user:"<|user|>\n{prompt}</s>",assistant:"<|assistant|>",linebreaks:{system:1,user:1,assistant:1},afterShot:"\n"},"synthia-cot":{id:"synthia-cot",name:"Synthia CoT",system:{schema:"SYSTEM: {system}",message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{system:1,user:1}},wizardlm:{id:"wizardlm",name:"WizardLM",system:{schema:"{system}",message:"You are a helpful AI assistant."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},openchat:{id:"openchat",name:"OpenChat",user:"GPT4 User: {prompt}<|end_of_turn|>",assistant:"GPT4 Assistant:",stop:["<|end_of_turn|>"]},"openchat-correct":{id:"openchat",name:"OpenChat",user:"GPT4 Correct User: {prompt}<|end_of_turn|>",assistant:"GPT4 Correct Assistant:",stop:["<|end_of_turn|>"]},human_response:{id:"human_response",name:"Human response",user:"### HUMAN:\n{prompt}",assistant:"### RESPONSE:",linebreaks:{user:2,assistant:1}},minichat:{id:"minichat",name:"Minichat",user:"<s> [|User|] {prompt} </s>",assistant:"[|Assistant|]",stop:["</s>","[|User|]"],afterShot:"\n"},phi:{id:"phi",name:"Phi",user:"Instruct: {prompt}",assistant:"Output:",linebreaks:{user:1},stop:["</s>","Instruct:"]},deepseek:{id:"deepseek",name:"Deepseek",system:{schema:"{system}",message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{user:1,system:1},stop:["<|EOT|>","### Instruction:"]}};class e{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let t;t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t.system,this.shots=t.shots,this.stop=t.stop,this.linebreaks=t.linebreaks,this.afterShot=t.afterShot,this.prefix=t.prefix}cloneTo(s,t=!0){const a=new e(s);return t&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t){this?.shots||(this.shots=[]);let e=t;return this.shots.push({user:s,assistant:e}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot?e+=this.afterShot:e+="\n\n",t.push(this._buildAssistantBlock(e)),t.join("")}render(s=!1){const t=new Array;this.prefix&&t.push(this.prefix);const e=this._buildSystemBlock(s);if(e.length>0&&(t.push(e),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)t.push(this.renderShot(s));for(const s of this.history)t.push(this.renderShot(s));return t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let t="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(t=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(t+=this._extraSystem)):s||(t=this.system.schema),t):""}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=e.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t=[],e=this.assistant;return this._extraAssistant.length>0&&(e+=this._extraAssistant),t.push(e),this?.linebreaks?.assistant&&t.push("\n".repeat(this.linebreaks.assistant)),s&&t.push(s),t.join("")}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}}return s.PromptTemplate=e,s.templates=t,s}({});
1
+ var $tpl=function(s){"use strict";const t={none:{id:"none",name:"No template",user:"{prompt}",assistant:""},alpaca:{id:"alpaca",name:"Alpaca",system:{schema:"{system}",message:"Below is an instruction that describes a task. Write a response that appropriately completes the request."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},llama:{id:"llama",name:"Llama",system:{schema:"[INST] <<SYS>>\n{system}\n<</SYS>>",message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."},user:"{prompt}",assistant:" [/INST] ",linebreaks:{system:2,user:0},prefix:"<s>",stop:["</s>"]},mistral:{id:"mistral",name:"Mistral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n",prefix:"<s>"},orca:{id:"orca",name:"Orca",system:{schema:"### System:\n{system}",message:"You are an AI assistant that follows instruction extremely well. Help as much as you can."},user:"### User:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},vicuna:{id:"vicuna",name:"Vicuna",user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2}},vicuna_system:{id:"vicuna_system",name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{system:2,user:2}},wizard_vicuna:{id:"wizard_vicuna",name:"Wizard Vicuna",user:"### Human:\n{prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2},stop:["<|endoftext|>"]},guanaco:{id:"guanaco",name:"Guanaco",user:"### Human: {prompt}",assistant:"### Assistant:",linebreaks:{user:1}},chatml:{id:"chatml",name:"ChatMl",system:{schema:"<|im_start|>system\n{system}\n<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>",assistant:"<|im_start|>assistant",linebreaks:{system:1,user:1,assistant:1},stop:["<|im_end|>"],afterShot:" <|im_end|>"},zephyr:{id:"zephyr",name:"Zephyr",system:{schema:"<|system|>\n{system}</s>"},user:"<|user|>\n{prompt}</s>",assistant:"<|assistant|>",linebreaks:{system:1,user:1,assistant:1},afterShot:"\n"},"synthia-cot":{id:"synthia-cot",name:"Synthia CoT",system:{schema:"SYSTEM: {system}",message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{system:1,user:1}},wizardlm:{id:"wizardlm",name:"WizardLM",system:{schema:"{system}",message:"You are a helpful AI assistant."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},openchat:{id:"openchat",name:"OpenChat",user:"GPT4 User: {prompt}<|end_of_turn|>",assistant:"GPT4 Assistant:",stop:["<|end_of_turn|>"]},"openchat-correct":{id:"openchat-corect",name:"OpenChat correct",user:"GPT4 Correct User: {prompt}<|end_of_turn|>",assistant:"GPT4 Correct Assistant:",stop:["<|end_of_turn|>"]},human_response:{id:"human_response",name:"Human response",user:"### HUMAN:\n{prompt}",assistant:"### RESPONSE:",linebreaks:{user:2,assistant:1}},minichat:{id:"minichat",name:"Minichat",user:"[|User|] {prompt} </s>",assistant:"[|Assistant|]",stop:["</s>","[|User|]"],afterShot:"\n",prefix:"<s> "},phi:{id:"phi",name:"Phi",user:"Instruct: {prompt}",assistant:"Output:",linebreaks:{user:1},stop:["</s>","Instruct:"]},deepseek:{id:"deepseek",name:"Deepseek",system:{schema:"{system}",message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."},afterShot:"\n",user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{user:1,system:1},stop:["<|EOT|>","### Instruction:"]},opencodeinterpreter:{id:"opencodeinterpreter",name:"Open code interpreter",user:"<|User|>\n{prompt}",assistant:"<|Assistant|>",linebreaks:{user:2},stop:["<|EOT|>","<|User|>"]},cerebrum:{id:"cerebrum",name:"Cerebrum",system:{schema:"{system}",message:"A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."},user:"User: {prompt}",assistant:"Ai:",linebreaks:{user:1},prefix:"<s>",stop:["</s>"]},"command-r":{id:"command-r",name:"Command-R",user:"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|>",assistant:"<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",prefix:"<BOS_TOKEN>",stop:["<|END_OF_TURN_TOKEN|>"],linebreaks:{user:1},system:{schema:"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system}<|END_OF_TURN_TOKEN|>"}}};class e{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let t;t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t.system,this.shots=t.shots,this.stop=t.stop,this.linebreaks=t.linebreaks,this.afterShot=t.afterShot,this.prefix=t.prefix}cloneTo(s,t=!0){const a=new e(s);return t&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t){this?.shots||(this.shots=[]);let e=t;return this.shots.push({user:s,assistant:e}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot?e+=this.afterShot:e+="\n\n",t.push(this._buildAssistantBlock(e)),t.join("")}render(s=!1){const t=new Array;this.prefix&&t.push(this.prefix);const e=this._buildSystemBlock(s);if(e.length>0&&(t.push(e),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)t.push(this.renderShot(s));for(const s of this.history)t.push(this.renderShot(s));return t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let t="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(t=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(t+=this._extraSystem)):s||(t=this.system.schema),t):""}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=e.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t=[],e=this.assistant;return this._extraAssistant.length>0&&(e+=this._extraAssistant),t.push(e),this?.linebreaks?.assistant&&t.push("\n".repeat(this.linebreaks.assistant)),s&&t.push(s),t.join("")}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}}return s.PromptTemplate=e,s.templates=t,s}({});
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modprompt",
3
- "version": "0.5.0",
3
+ "version": "0.6.1",
4
4
  "description": "Prompt templates for language models",
5
5
  "license": "MIT",
6
6
  "scripts": {