modprompt 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -6,7 +6,7 @@ A collection of prompt templates for language models
6
6
 
7
7
  - Classic templates formats for different models
8
8
  - Easily modify and adapt templates on-the-fly
9
- - Few shots support
9
+ - Few shots and conversation history support
10
10
 
11
11
  :books: [Api doc](https://synw.github.io/modprompt/)
12
12
 
@@ -77,7 +77,7 @@ The template have system messages support if the original template supports it.
77
77
  To replace a system message:
78
78
 
79
79
  ```js
80
- tpl.system("You are a javascript specialist");
80
+ tpl.replaceSystem("You are a javascript specialist");
81
81
  ```
82
82
 
83
83
  Rendering for an Alpaca template:
package/dist/cls.d.ts CHANGED
@@ -1,4 +1,10 @@
1
1
  import { LmTemplate, PromptBlock, TurnBlock, SpacingSlots, HistoryTurn } from "./interfaces";
2
+ /**
3
+ * Represents a modified language model template.
4
+ *
5
+ * @example
6
+ * const tpl = new PromptTemplate('alpaca');
7
+ */
2
8
  declare class PromptTemplate {
3
9
  id: string;
4
10
  name: string;
@@ -15,17 +21,147 @@ declare class PromptTemplate {
15
21
  _extraAssistant: string;
16
22
  _replacePrompt: string;
17
23
  _replaceSystem: string;
24
+ /**
25
+ * Constructs a new `PromptTemplate` instance.
26
+ *
27
+ * @param template - Either the name of the template to load or an instance of `LmTemplate`.
28
+ *
29
+ * @example
30
+ * const tpl = new PromptTemplate('alpaca');
31
+ */
18
32
  constructor(template: string | LmTemplate);
33
+ /**
34
+ * Clones the current `PromptTemplate` instance to a new instance of `PromptTemplate`.
35
+ *
36
+ * This function creates a new `PromptTemplate` instance with the same state as the current instance.
37
+ * It is useful when you want to work with a copy of the current template without modifying the original one.
38
+ *
39
+ * @param {string | LmTemplate} template - The id or template instance of the new `PromptTemplate` to make
40
+ * @param {boolean} keepShots - Keep the shots for the template instance: this will also clone the shots
41
+ * @returns {PromptTemplate} - A new `PromptTemplate` instance with the same state as the current one.
42
+ *
43
+ * @example
44
+ * const tpl = new PromptTemplate('alpaca');
45
+ * const clonedTpl = tpl.cloneTo('chatml');
46
+ * console.log(clonedTpl);
47
+ */
19
48
  cloneTo(template: string | LmTemplate, keepShots?: boolean): PromptTemplate;
49
+ /**
50
+ * Converts the current `PromptTemplate` instance to a JSON object.
51
+ *
52
+ * This function serializes the current state of the `PromptTemplate` instance into a JSON object,
53
+ * which can be used for storing the template or transmitting it over a network.
54
+ *
55
+ * @returns {LmTemplate} - A JSON object representing the current state of the `PromptTemplate`.
56
+ *
57
+ * @example
58
+ * const tpl = new PromptTemplate('alpaca');
59
+ * const json = tpl.toJson();
60
+ * console.log(json);
61
+ */
20
62
  toJson(): LmTemplate;
63
+ /**
64
+ * Replaces the system block with a given message.
65
+ *
66
+ * @param msg - The message to replace the system block with.
67
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
68
+ *
69
+ * @example
70
+ * tpl.replaceSystem('You are a javascript expert');
71
+ */
21
72
  replaceSystem(msg: string): PromptTemplate;
73
+ /**
74
+ * Appends a given message after the system message.
75
+ *
76
+ * @param msg - The message to append.
77
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
78
+ *
79
+ * @example
80
+ * tpl.afterSystem('You are a javascript expert');
81
+ */
22
82
  afterSystem(msg: string): PromptTemplate;
83
+ /**
84
+ * Appends a given message after the assistant prompt token.
85
+ *
86
+ * @param msg - The message to append.
87
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
88
+ *
89
+ * @example
90
+ * tpl.afterAssistant('( answer in json )');
91
+ */
23
92
  afterAssistant(msg: string): PromptTemplate;
93
+ /**
94
+ * Replaces the `{prompt}` placeholder in the user message with a given message.
95
+ *
96
+ * @param msg - The message to replace the placeholder with.
97
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
98
+ *
99
+ * @example
100
+ * tpl.replacePrompt(fix this invalid json:\n\n```json\n{prompt}\n```);
101
+ */
24
102
  replacePrompt(msg: string): PromptTemplate;
103
+ /**
104
+ * Adds a new shot (a user-assistant interaction) to the template.
105
+ *
106
+ * @param user - The user's message.
107
+ * @param assistant - The assistant's response.
108
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
109
+ *
110
+ * @example
111
+ * tpl.addShot('Is it raining?', 'No, it is sunny.');
112
+ */
25
113
  addShot(user: string, assistant: string): PromptTemplate;
114
+ /**
115
+ * Adds multiple shots (user-assistant interactions) to the template.
116
+ *
117
+ * This function allows you to add multiple turns to the conversation. Each turn is represented by an object
118
+ * with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
119
+ *
120
+ * @param {Array<TurnBlock>} shots - An array of objects, where each object represents a user-assistant interaction.
121
+ * @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
122
+ *
123
+ * @example
124
+ * const tpl = new PromptTemplate('alpaca');
125
+ * tpl.addShots([
126
+ * { user: 'What is the weather like?', assistant: 'It is sunny today!' },
127
+ * { user: 'What is the weather like tomorrow?', assistant: 'I am sorry, but I can\'t predict the future.' }
128
+ * ]);
129
+ */
130
+ addShots(shots: Array<TurnBlock>): PromptTemplate;
131
+ /**
132
+ * Render a turn block
133
+ *
134
+ * @param {TurnBlock | HistoryTurn} shot the shot to render
135
+ * @returns {string} ther rendered text
136
+ */
26
137
  renderShot(shot: TurnBlock | HistoryTurn): string;
138
+ /**
139
+ * Renders the template into a string representation.
140
+ *
141
+ * @returns The rendered template as a string.
142
+ *
143
+ * @example
144
+ * const rendered = tpl.render();
145
+ * console.log(rendered);
146
+ */
27
147
  render(skip_empty_system?: boolean): string;
148
+ /**
149
+ * Renders the template with the provided message replacing the `{prompt}` placeholder.
150
+ *
151
+ * @param msg - The message to use for replacing the `{prompt}` placeholder.
152
+ * @returns The rendered template with the provided message.
153
+ *
154
+ * @example
155
+ * const prompted = tpl.prompt("list the planets in the solar system");
156
+ * console.log(prompted);
157
+ */
28
158
  prompt(msg: string): string;
159
+ /**
160
+ * Push a turn into history
161
+ *
162
+ * @param {HistoryTurn} turn the history turn
163
+ * @returns {PromptTemplate}
164
+ */
29
165
  pushToHistory(turn: HistoryTurn): PromptTemplate;
30
166
  private _buildSystemBlock;
31
167
  private _buildUserBlock;
@@ -1,32 +1,157 @@
1
+ /**
2
+ * Defines the spacing (in terms of line breaks) to be applied between different parts of the conversation.
3
+ *
4
+ * @interface SpacingSlots
5
+ * @typedef {SpacingSlots}
6
+ *
7
+ * @example
8
+ * const spacingExample: SpacingSlots = {
9
+ * system: 2,
10
+ * user: 1,
11
+ * assistant: 1
12
+ * };
13
+ */
1
14
  interface SpacingSlots {
15
+ /**
16
+ * Number of line breaks to be applied after the system message.
17
+ */
2
18
  system?: number;
19
+ /**
20
+ * Number of line breaks to be applied after the user message.
21
+ */
3
22
  user?: number;
23
+ /**
24
+ * Number of line breaks to be applied after the assistant message.
25
+ */
4
26
  assistant?: number;
5
27
  }
28
+ /**
29
+ * Represents a block of system-level prompts or instructions in the conversation.
30
+ *
31
+ * @example
32
+ * const promptExample: PromptBlock = {
33
+ * schema: '### System: {system}',
34
+ * message: 'Some system message'
35
+ * };
36
+ */
6
37
  interface PromptBlock {
38
+ /**
39
+ * The schema or format for the system message.
40
+ *
41
+ * Can include placeholders like `{system}` which can be programmatically replaced with actual messages later.
42
+ */
7
43
  schema: string;
44
+ /**
45
+ * Optional default message content for the system.
46
+ *
47
+ * Used if a dynamic value isn't provided for `{system}` placeholder.
48
+ */
8
49
  message?: string;
9
50
  }
51
+ /**
52
+ * Represents a single turn in a conversation, consisting of a user message followed by an assistant response.
53
+ *
54
+ * @example
55
+ * const turnExample: TurnBlock = {
56
+ * user: 'What's the weather like?',
57
+ * assistant: 'It's sunny today!'
58
+ * };
59
+ */
10
60
  interface TurnBlock {
61
+ /**
62
+ * The message content from the user.
63
+ */
11
64
  user: string;
65
+ /**
66
+ * The corresponding response from the assistant.
67
+ */
12
68
  assistant: string;
13
69
  }
70
+ /**
71
+ * Represents a template for language modeling, detailing the structure and interaction elements of a conversation.
72
+ *
73
+ * @example
74
+ * const sampleTemplate: LmTemplate = {
75
+ * id: "alapaca",
76
+ * name: "Alpaca",
77
+ * system: {
78
+ * schema: "{system}",
79
+ * message: "Below is an instruction that describes a task. Write a response that appropriately completes the request.",
80
+ * },
81
+ * user: "### Instruction:\n{prompt}",
82
+ * assistant: "### Response:",
83
+ * linebreaks: {
84
+ * system: 2,
85
+ * user: 2,
86
+ * }
87
+ * };
88
+ */
14
89
  interface LmTemplate {
90
+ /**
91
+ * The id slug of the template.
92
+ */
15
93
  id: string;
94
+ /**
95
+ * The name of the template.
96
+ */
16
97
  name: string;
98
+ /**
99
+ * The default message template for the user.
100
+ *
101
+ * Includes a `{prompt}` placeholder which can be programmatically replaced later.
102
+ */
17
103
  user: string;
104
+ /**
105
+ * The default message template for the assistant.
106
+ */
18
107
  assistant: string;
108
+ /**
109
+ * Optional prompt block that represents system-level messages or instructions.
110
+ */
19
111
  system?: PromptBlock;
112
+ /**
113
+ * Optional array of turn blocks representing back-and-forths between the user and the assistant.
114
+ *
115
+ * Useful for simulating multi-turn interactions.
116
+ */
20
117
  shots?: Array<TurnBlock>;
118
+ /**
119
+ * Optional array of strings that signal the end of a conversation.
120
+ *
121
+ * These strings can be used to detect when a conversation should be terminated.
122
+ */
21
123
  stop?: Array<string>;
124
+ /**
125
+ * Optional specifications for line breaks between different message types.
126
+ *
127
+ * This can be used to format the rendered conversation.
128
+ */
22
129
  linebreaks?: SpacingSlots;
130
+ /**
131
+ * String to display after a shot
132
+ */
23
133
  afterShot?: string;
134
+ /**
135
+ * A prefix like a bos token to insert before content
136
+ */
24
137
  prefix?: string;
25
138
  }
139
+ /**
140
+ * Image data
141
+ *
142
+ * @interface ImgData
143
+ * @typedef {ImgData}
144
+ */
26
145
  interface ImgData {
27
146
  id: number;
28
147
  data: string;
29
148
  }
149
+ /**
150
+ * A history turn
151
+ *
152
+ * @interface HistoryTurn
153
+ * @typedef {HistoryTurn}
154
+ */
30
155
  interface HistoryTurn {
31
156
  user: string;
32
157
  assistant: string;
package/dist/mod.es.mjs CHANGED
@@ -184,10 +184,11 @@ const templates = {
184
184
  "minichat": {
185
185
  "id": "minichat",
186
186
  "name": "Minichat",
187
- "user": "<s> [|User|] {prompt} </s>",
187
+ "user": "[|User|] {prompt} </s>",
188
188
  "assistant": "[|Assistant|]",
189
189
  "stop": ["</s>", "[|User|]"],
190
190
  "afterShot": "\n",
191
+ "prefix": "<s> "
191
192
  },
192
193
  "phi": {
193
194
  "id": "phi",
@@ -206,6 +207,7 @@ const templates = {
206
207
  "schema": "{system}",
207
208
  "message": "You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."
208
209
  },
210
+ "afterShot": "\n",
209
211
  "user": "### Instruction:\n{prompt}",
210
212
  "assistant": "### Response:",
211
213
  "linebreaks": {
@@ -213,9 +215,40 @@ const templates = {
213
215
  "system": 1,
214
216
  },
215
217
  "stop": ["<|EOT|>", "### Instruction:"]
216
- }
218
+ },
219
+ "opencodeinterpreter": {
220
+ "id": "opencodeinterpreter",
221
+ "name": "Open code interpreter",
222
+ "user": "<|User|>\n{prompt}",
223
+ "assistant": "<|Assistant|>",
224
+ "linebreaks": {
225
+ "user": 2
226
+ },
227
+ "stop": ["<|EOT|>", "<|User|>"]
228
+ },
229
+ "cerebrum": {
230
+ "id": "cerebrum",
231
+ "name": "Cerebrum",
232
+ "system": {
233
+ "schema": "{system}",
234
+ "message": "A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."
235
+ },
236
+ "user": "User: {prompt}",
237
+ "assistant": "Ai:",
238
+ "linebreaks": {
239
+ "user": 1
240
+ },
241
+ "prefix": "<s>",
242
+ "stop": ["</s>"]
243
+ },
217
244
  };
218
245
 
246
+ /**
247
+ * Represents a modified language model template.
248
+ *
249
+ * @example
250
+ * const tpl = new PromptTemplate('alpaca');
251
+ */
219
252
  class PromptTemplate {
220
253
  id;
221
254
  name;
@@ -228,10 +261,19 @@ class PromptTemplate {
228
261
  linebreaks;
229
262
  afterShot;
230
263
  prefix;
264
+ // internal state
231
265
  _extraSystem = "";
232
266
  _extraAssistant = "";
233
267
  _replacePrompt = "";
234
268
  _replaceSystem = "";
269
+ /**
270
+ * Constructs a new `PromptTemplate` instance.
271
+ *
272
+ * @param template - Either the name of the template to load or an instance of `LmTemplate`.
273
+ *
274
+ * @example
275
+ * const tpl = new PromptTemplate('alpaca');
276
+ */
235
277
  constructor(template) {
236
278
  let tpl;
237
279
  if (typeof template == "string") {
@@ -251,6 +293,21 @@ class PromptTemplate {
251
293
  this.afterShot = tpl.afterShot;
252
294
  this.prefix = tpl.prefix;
253
295
  }
296
+ /**
297
+ * Clones the current `PromptTemplate` instance to a new instance of `PromptTemplate`.
298
+ *
299
+ * This function creates a new `PromptTemplate` instance with the same state as the current instance.
300
+ * It is useful when you want to work with a copy of the current template without modifying the original one.
301
+ *
302
+ * @param {string | LmTemplate} template - The id or template instance of the new `PromptTemplate` to make
303
+ * @param {boolean} keepShots - Keep the shots for the template instance: this will also clone the shots
304
+ * @returns {PromptTemplate} - A new `PromptTemplate` instance with the same state as the current one.
305
+ *
306
+ * @example
307
+ * const tpl = new PromptTemplate('alpaca');
308
+ * const clonedTpl = tpl.cloneTo('chatml');
309
+ * console.log(clonedTpl);
310
+ */
254
311
  cloneTo(template, keepShots = true) {
255
312
  const tpl = new PromptTemplate(template);
256
313
  if (keepShots) {
@@ -274,6 +331,19 @@ class PromptTemplate {
274
331
  }
275
332
  return tpl;
276
333
  }
334
+ /**
335
+ * Converts the current `PromptTemplate` instance to a JSON object.
336
+ *
337
+ * This function serializes the current state of the `PromptTemplate` instance into a JSON object,
338
+ * which can be used for storing the template or transmitting it over a network.
339
+ *
340
+ * @returns {LmTemplate} - A JSON object representing the current state of the `PromptTemplate`.
341
+ *
342
+ * @example
343
+ * const tpl = new PromptTemplate('alpaca');
344
+ * const json = tpl.toJson();
345
+ * console.log(json);
346
+ */
277
347
  toJson() {
278
348
  const res = {
279
349
  id: this.id,
@@ -301,6 +371,15 @@ class PromptTemplate {
301
371
  }
302
372
  return res;
303
373
  }
374
+ /**
375
+ * Replaces the system block with a given message.
376
+ *
377
+ * @param msg - The message to replace the system block with.
378
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
379
+ *
380
+ * @example
381
+ * tpl.replaceSystem('You are a javascript expert');
382
+ */
304
383
  replaceSystem(msg) {
305
384
  if (!this.system) {
306
385
  return this;
@@ -308,6 +387,15 @@ class PromptTemplate {
308
387
  this._replaceSystem = msg;
309
388
  return this;
310
389
  }
390
+ /**
391
+ * Appends a given message after the system message.
392
+ *
393
+ * @param msg - The message to append.
394
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
395
+ *
396
+ * @example
397
+ * tpl.afterSystem('You are a javascript expert');
398
+ */
311
399
  afterSystem(msg) {
312
400
  if (!this.system) {
313
401
  return this;
@@ -315,14 +403,42 @@ class PromptTemplate {
315
403
  this._extraSystem = msg;
316
404
  return this;
317
405
  }
406
+ /**
407
+ * Appends a given message after the assistant prompt token.
408
+ *
409
+ * @param msg - The message to append.
410
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
411
+ *
412
+ * @example
413
+ * tpl.afterAssistant('( answer in json )');
414
+ */
318
415
  afterAssistant(msg) {
319
416
  this._extraAssistant = msg;
320
417
  return this;
321
418
  }
419
+ /**
420
+ * Replaces the `{prompt}` placeholder in the user message with a given message.
421
+ *
422
+ * @param msg - The message to replace the placeholder with.
423
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
424
+ *
425
+ * @example
426
+ * tpl.replacePrompt(fix this invalid json:\n\n```json\n{prompt}\n```);
427
+ */
322
428
  replacePrompt(msg) {
323
429
  this._replacePrompt = msg;
324
430
  return this;
325
431
  }
432
+ /**
433
+ * Adds a new shot (a user-assistant interaction) to the template.
434
+ *
435
+ * @param user - The user's message.
436
+ * @param assistant - The assistant's response.
437
+ * @returns A reference to the current `PromptTemplate` instance for chaining.
438
+ *
439
+ * @example
440
+ * tpl.addShot('Is it raining?', 'No, it is sunny.');
441
+ */
326
442
  addShot(user, assistant) {
327
443
  if (!this?.shots) {
328
444
  this.shots = [];
@@ -334,6 +450,32 @@ class PromptTemplate {
334
450
  });
335
451
  return this;
336
452
  }
453
+ /**
454
+ * Adds multiple shots (user-assistant interactions) to the template.
455
+ *
456
+ * This function allows you to add multiple turns to the conversation. Each turn is represented by an object
457
+ * with a 'user' property (the user's message) and an 'assistant' property (the assistant's response).
458
+ *
459
+ * @param {Array<TurnBlock>} shots - An array of objects, where each object represents a user-assistant interaction.
460
+ * @returns {PromptTemplate} - A reference to the current `PromptTemplate` instance for chaining.
461
+ *
462
+ * @example
463
+ * const tpl = new PromptTemplate('alpaca');
464
+ * tpl.addShots([
465
+ * { user: 'What is the weather like?', assistant: 'It is sunny today!' },
466
+ * { user: 'What is the weather like tomorrow?', assistant: 'I am sorry, but I can\'t predict the future.' }
467
+ * ]);
468
+ */
469
+ addShots(shots) {
470
+ shots.forEach((s) => this.addShot(s.user, s.assistant));
471
+ return this;
472
+ }
473
+ /**
474
+ * Render a turn block
475
+ *
476
+ * @param {TurnBlock | HistoryTurn} shot the shot to render
477
+ * @returns {string} ther rendered text
478
+ */
337
479
  renderShot(shot) {
338
480
  const buf = [];
339
481
  buf.push(this._buildUserBlock(shot.user));
@@ -347,11 +489,22 @@ class PromptTemplate {
347
489
  buf.push(this._buildAssistantBlock(_assistantMsg));
348
490
  return buf.join("");
349
491
  }
492
+ /**
493
+ * Renders the template into a string representation.
494
+ *
495
+ * @returns The rendered template as a string.
496
+ *
497
+ * @example
498
+ * const rendered = tpl.render();
499
+ * console.log(rendered);
500
+ */
350
501
  render(skip_empty_system = false) {
351
502
  const buf = new Array();
503
+ // prefix
352
504
  if (this.prefix) {
353
505
  buf.push(this.prefix);
354
506
  }
507
+ // system prompt if any
355
508
  const _systemBlock = this._buildSystemBlock(skip_empty_system);
356
509
  if (_systemBlock.length > 0) {
357
510
  buf.push(_systemBlock);
@@ -359,21 +512,42 @@ class PromptTemplate {
359
512
  buf.push("\n".repeat(this.linebreaks.system));
360
513
  }
361
514
  }
515
+ // shots
362
516
  if (this?.shots) {
363
517
  for (const shot of this.shots) {
364
518
  buf.push(this.renderShot(shot));
365
519
  }
366
520
  }
521
+ // history
367
522
  for (const turn of this.history) {
368
523
  buf.push(this.renderShot(turn));
369
524
  }
525
+ // user block
370
526
  buf.push(this._buildUserBlock());
527
+ // assistant block
371
528
  buf.push(this._buildAssistantBlock());
529
+ //console.log(buf)
372
530
  return buf.join("");
373
531
  }
532
+ /**
533
+ * Renders the template with the provided message replacing the `{prompt}` placeholder.
534
+ *
535
+ * @param msg - The message to use for replacing the `{prompt}` placeholder.
536
+ * @returns The rendered template with the provided message.
537
+ *
538
+ * @example
539
+ * const prompted = tpl.prompt("list the planets in the solar system");
540
+ * console.log(prompted);
541
+ */
374
542
  prompt(msg) {
375
543
  return this.render().replace("{prompt}", msg);
376
544
  }
545
+ /**
546
+ * Push a turn into history
547
+ *
548
+ * @param {HistoryTurn} turn the history turn
549
+ * @returns {PromptTemplate}
550
+ */
377
551
  pushToHistory(turn) {
378
552
  this.history.push(turn);
379
553
  return this;
@@ -399,6 +573,7 @@ class PromptTemplate {
399
573
  }
400
574
  _buildUserBlock(msg) {
401
575
  let buf = [];
576
+ // prompt replacement
402
577
  let _userBlock = this.user;
403
578
  if (this._replacePrompt.length > 0) {
404
579
  _userBlock = _userBlock.replace("{prompt}", this._replacePrompt);
@@ -408,6 +583,7 @@ class PromptTemplate {
408
583
  buf.push("\n".repeat(this.linebreaks.user));
409
584
  }
410
585
  if (msg) {
586
+ // this is a shot
411
587
  buf[0] = _userBlock.replace("{prompt}", msg);
412
588
  }
413
589
  return buf.join("");
@@ -423,6 +599,7 @@ class PromptTemplate {
423
599
  buf.push("\n".repeat(this.linebreaks.assistant));
424
600
  }
425
601
  if (msg) {
602
+ // this is a shot
426
603
  buf.push(msg);
427
604
  }
428
605
  return buf.join("");
@@ -430,6 +607,7 @@ class PromptTemplate {
430
607
  _load(name) {
431
608
  try {
432
609
  if (name in templates) {
610
+ //console.log("Loading", name)
433
611
  return templates[name];
434
612
  }
435
613
  else {
package/dist/mod.min.js CHANGED
@@ -1 +1 @@
1
- var $tpl=function(s){"use strict";const t={none:{id:"none",name:"No template",user:"{prompt}",assistant:""},alpaca:{id:"alpaca",name:"Alpaca",system:{schema:"{system}",message:"Below is an instruction that describes a task. Write a response that appropriately completes the request."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},llama:{id:"llama",name:"Llama",system:{schema:"[INST] <<SYS>>\n{system}\n<</SYS>>",message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."},user:"{prompt}",assistant:" [/INST] ",linebreaks:{system:2,user:0},prefix:"<s>",stop:["</s>"]},mistral:{id:"mistral",name:"Mistral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n",prefix:"<s>"},orca:{id:"orca",name:"Orca",system:{schema:"### System:\n{system}",message:"You are an AI assistant that follows instruction extremely well. Help as much as you can."},user:"### User:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},vicuna:{id:"vicuna",name:"Vicuna",user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2}},vicuna_system:{id:"vicuna_system",name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{system:2,user:2}},wizard_vicuna:{id:"wizard_vicuna",name:"Wizard Vicuna",user:"### Human:\n{prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2},stop:["<|endoftext|>"]},guanaco:{id:"guanaco",name:"Guanaco",user:"### Human: {prompt}",assistant:"### Assistant:",linebreaks:{user:1}},chatml:{id:"chatml",name:"ChatMl",system:{schema:"<|im_start|>system\n{system}\n<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>",assistant:"<|im_start|>assistant",linebreaks:{system:1,user:1,assistant:1},stop:["<|im_end|>"],afterShot:" <|im_end|>"},zephyr:{id:"zephyr",name:"Zephyr",system:{schema:"<|system|>\n{system}</s>"},user:"<|user|>\n{prompt}</s>",assistant:"<|assistant|>",linebreaks:{system:1,user:1,assistant:1},afterShot:"\n"},"synthia-cot":{id:"synthia-cot",name:"Synthia CoT",system:{schema:"SYSTEM: {system}",message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{system:1,user:1}},wizardlm:{id:"wizardlm",name:"WizardLM",system:{schema:"{system}",message:"You are a helpful AI assistant."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},openchat:{id:"openchat",name:"OpenChat",user:"GPT4 User: {prompt}<|end_of_turn|>",assistant:"GPT4 Assistant:",stop:["<|end_of_turn|>"]},"openchat-correct":{id:"openchat",name:"OpenChat",user:"GPT4 Correct User: {prompt}<|end_of_turn|>",assistant:"GPT4 Correct Assistant:",stop:["<|end_of_turn|>"]},human_response:{id:"human_response",name:"Human response",user:"### HUMAN:\n{prompt}",assistant:"### RESPONSE:",linebreaks:{user:2,assistant:1}},minichat:{id:"minichat",name:"Minichat",user:"<s> [|User|] {prompt} </s>",assistant:"[|Assistant|]",stop:["</s>","[|User|]"],afterShot:"\n"},phi:{id:"phi",name:"Phi",user:"Instruct: {prompt}",assistant:"Output:",linebreaks:{user:1},stop:["</s>","Instruct:"]},deepseek:{id:"deepseek",name:"Deepseek",system:{schema:"{system}",message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{user:1,system:1},stop:["<|EOT|>","### Instruction:"]}};class e{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let t;t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t.system,this.shots=t.shots,this.stop=t.stop,this.linebreaks=t.linebreaks,this.afterShot=t.afterShot,this.prefix=t.prefix}cloneTo(s,t=!0){const a=new e(s);return t&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t){this?.shots||(this.shots=[]);let e=t;return this.shots.push({user:s,assistant:e}),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot?e+=this.afterShot:e+="\n\n",t.push(this._buildAssistantBlock(e)),t.join("")}render(s=!1){const t=new Array;this.prefix&&t.push(this.prefix);const e=this._buildSystemBlock(s);if(e.length>0&&(t.push(e),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)t.push(this.renderShot(s));for(const s of this.history)t.push(this.renderShot(s));return t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let t="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(t=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(t+=this._extraSystem)):s||(t=this.system.schema),t):""}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=e.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t=[],e=this.assistant;return this._extraAssistant.length>0&&(e+=this._extraAssistant),t.push(e),this?.linebreaks?.assistant&&t.push("\n".repeat(this.linebreaks.assistant)),s&&t.push(s),t.join("")}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}}return s.PromptTemplate=e,s.templates=t,s}({});
1
+ var $tpl=function(s){"use strict";const t={none:{id:"none",name:"No template",user:"{prompt}",assistant:""},alpaca:{id:"alpaca",name:"Alpaca",system:{schema:"{system}",message:"Below is an instruction that describes a task. Write a response that appropriately completes the request."},user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},llama:{id:"llama",name:"Llama",system:{schema:"[INST] <<SYS>>\n{system}\n<</SYS>>",message:"You are a helpful, respectful and honest assistant. Always answer as helpfully as possible\n\nIf a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information."},user:"{prompt}",assistant:" [/INST] ",linebreaks:{system:2,user:0},prefix:"<s>",stop:["</s>"]},mistral:{id:"mistral",name:"Mistral",user:"[INST] {prompt}",assistant:" [/INST]",stop:["</s>"],afterShot:"\n",prefix:"<s>"},orca:{id:"orca",name:"Orca",system:{schema:"### System:\n{system}",message:"You are an AI assistant that follows instruction extremely well. Help as much as you can."},user:"### User:\n{prompt}",assistant:"### Response:",linebreaks:{system:2,user:2}},vicuna:{id:"vicuna",name:"Vicuna",user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2}},vicuna_system:{id:"vicuna_system",name:"Vicuna system",system:{schema:"SYSTEM: {system}"},user:"USER: {prompt}",assistant:"### ASSISTANT:",linebreaks:{system:2,user:2}},wizard_vicuna:{id:"wizard_vicuna",name:"Wizard Vicuna",user:"### Human:\n{prompt}",assistant:"### ASSISTANT:",linebreaks:{user:2},stop:["<|endoftext|>"]},guanaco:{id:"guanaco",name:"Guanaco",user:"### Human: {prompt}",assistant:"### Assistant:",linebreaks:{user:1}},chatml:{id:"chatml",name:"ChatMl",system:{schema:"<|im_start|>system\n{system}\n<|im_end|>"},user:"<|im_start|>user\n{prompt}<|im_end|>",assistant:"<|im_start|>assistant",linebreaks:{system:1,user:1,assistant:1},stop:["<|im_end|>"],afterShot:" <|im_end|>"},zephyr:{id:"zephyr",name:"Zephyr",system:{schema:"<|system|>\n{system}</s>"},user:"<|user|>\n{prompt}</s>",assistant:"<|assistant|>",linebreaks:{system:1,user:1,assistant:1},afterShot:"\n"},"synthia-cot":{id:"synthia-cot",name:"Synthia CoT",system:{schema:"SYSTEM: {system}",message:"Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{system:1,user:1}},wizardlm:{id:"wizardlm",name:"WizardLM",system:{schema:"{system}",message:"You are a helpful AI assistant."},user:"USER: {prompt}",assistant:"ASSISTANT:",linebreaks:{user:1}},openchat:{id:"openchat",name:"OpenChat",user:"GPT4 User: {prompt}<|end_of_turn|>",assistant:"GPT4 Assistant:",stop:["<|end_of_turn|>"]},"openchat-correct":{id:"openchat",name:"OpenChat",user:"GPT4 Correct User: {prompt}<|end_of_turn|>",assistant:"GPT4 Correct Assistant:",stop:["<|end_of_turn|>"]},human_response:{id:"human_response",name:"Human response",user:"### HUMAN:\n{prompt}",assistant:"### RESPONSE:",linebreaks:{user:2,assistant:1}},minichat:{id:"minichat",name:"Minichat",user:"[|User|] {prompt} </s>",assistant:"[|Assistant|]",stop:["</s>","[|User|]"],afterShot:"\n",prefix:"<s> "},phi:{id:"phi",name:"Phi",user:"Instruct: {prompt}",assistant:"Output:",linebreaks:{user:1},stop:["</s>","Instruct:"]},deepseek:{id:"deepseek",name:"Deepseek",system:{schema:"{system}",message:"You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."},afterShot:"\n",user:"### Instruction:\n{prompt}",assistant:"### Response:",linebreaks:{user:1,system:1},stop:["<|EOT|>","### Instruction:"]},opencodeinterpreter:{id:"opencodeinterpreter",name:"Open code interpreter",user:"<|User|>\n{prompt}",assistant:"<|Assistant|>",linebreaks:{user:2},stop:["<|EOT|>","<|User|>"]},cerebrum:{id:"cerebrum",name:"Cerebrum",system:{schema:"{system}",message:"A chat between a user and a thinking artificial intelligence assistant. The assistant describes its thought process and gives helpful and detailed answers to the user's questions."},user:"User: {prompt}",assistant:"Ai:",linebreaks:{user:1},prefix:"<s>",stop:["</s>"]}};class e{id;name;user;assistant;history=[];system;shots;stop;linebreaks;afterShot;prefix;_extraSystem="";_extraAssistant="";_replacePrompt="";_replaceSystem="";constructor(s){let t;t="string"==typeof s?this._load(s):s,this.id=t.id,this.name=t.name,this.user=t.user,this.assistant=t.assistant,this.system=t.system,this.shots=t.shots,this.stop=t.stop,this.linebreaks=t.linebreaks,this.afterShot=t.afterShot,this.prefix=t.prefix}cloneTo(s,t=!0){const a=new e(s);return t&&this?.shots&&this.shots.forEach((s=>{a.addShot(s.user,s.assistant)})),this._extraSystem.length>0&&a.afterSystem(this._extraSystem),this._replaceSystem.length>0&&a.replaceSystem(this._replaceSystem),this._extraAssistant.length>0&&a.afterAssistant(this._extraAssistant),this._replacePrompt.length>0&&a.replacePrompt(this._replacePrompt),a}toJson(){const s={id:this.id,name:this.name,user:this.user,assistant:this.assistant};return this?.prefix&&(s.prefix=this.prefix),this?.system&&(s.system=this.system),this?.shots&&(s.shots=this.shots),this?.afterShot&&(s.afterShot=this.afterShot),this?.stop&&(s.stop=this.stop),this?.linebreaks&&(s.linebreaks=this.linebreaks),s}replaceSystem(s){return this.system?(this._replaceSystem=s,this):this}afterSystem(s){return this.system?(this._extraSystem=s,this):this}afterAssistant(s){return this._extraAssistant=s,this}replacePrompt(s){return this._replacePrompt=s,this}addShot(s,t){this?.shots||(this.shots=[]);let e=t;return this.shots.push({user:s,assistant:e}),this}addShots(s){return s.forEach((s=>this.addShot(s.user,s.assistant))),this}renderShot(s){const t=[];t.push(this._buildUserBlock(s.user));let e=s.assistant;return this.afterShot?e+=this.afterShot:e+="\n\n",t.push(this._buildAssistantBlock(e)),t.join("")}render(s=!1){const t=new Array;this.prefix&&t.push(this.prefix);const e=this._buildSystemBlock(s);if(e.length>0&&(t.push(e),this?.linebreaks?.system&&t.push("\n".repeat(this.linebreaks.system))),this?.shots)for(const s of this.shots)t.push(this.renderShot(s));for(const s of this.history)t.push(this.renderShot(s));return t.push(this._buildUserBlock()),t.push(this._buildAssistantBlock()),t.join("")}prompt(s){return this.render().replace("{prompt}",s)}pushToHistory(s){return this.history.push(s),this}_buildSystemBlock(s){let t="";return this?.system?(this._replaceSystem&&(this.system.message=this._replaceSystem),this.system?.message?(t=this.system.schema.replace("{system}",this.system.message),this._extraSystem&&(t+=this._extraSystem)):s||(t=this.system.schema),t):""}_buildUserBlock(s){let t=[],e=this.user;return this._replacePrompt.length>0&&(e=e.replace("{prompt}",this._replacePrompt)),t.push(e),this?.linebreaks?.user&&t.push("\n".repeat(this.linebreaks.user)),s&&(t[0]=e.replace("{prompt}",s)),t.join("")}_buildAssistantBlock(s){let t=[],e=this.assistant;return this._extraAssistant.length>0&&(e+=this._extraAssistant),t.push(e),this?.linebreaks?.assistant&&t.push("\n".repeat(this.linebreaks.assistant)),s&&t.push(s),t.join("")}_load(s){try{if(s in t)return t[s];throw new Error(`Template ${s} not found`)}catch(t){throw new Error(`Error loading template ${s}: ${t}`)}}}return s.PromptTemplate=e,s.templates=t,s}({});
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "modprompt",
3
- "version": "0.4.0",
3
+ "version": "0.6.0",
4
4
  "description": "Prompt templates for language models",
5
5
  "license": "MIT",
6
6
  "scripts": {