modelfusion 0.40.0 → 0.40.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -142,7 +142,10 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
142
142
  withPromptFormat(promptFormat) {
143
143
  return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
144
144
  model: this.withSettings({
145
- stopSequences: promptFormat.stopSequences,
145
+ stopSequences: [
146
+ ...(this.settings.stopSequences ?? []),
147
+ ...promptFormat.stopSequences,
148
+ ],
146
149
  }),
147
150
  promptFormat,
148
151
  });
@@ -136,7 +136,10 @@ export class CohereTextGenerationModel extends AbstractModel {
136
136
  withPromptFormat(promptFormat) {
137
137
  return new PromptFormatTextGenerationModel({
138
138
  model: this.withSettings({
139
- stopSequences: promptFormat.stopSequences,
139
+ stopSequences: [
140
+ ...(this.settings.stopSequences ?? []),
141
+ ...promptFormat.stopSequences,
142
+ ],
140
143
  }),
141
144
  promptFormat,
142
145
  });
@@ -108,7 +108,10 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
108
108
  withPromptFormat(promptFormat) {
109
109
  return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
110
110
  model: this.withSettings({
111
- stopSequences: promptFormat.stopSequences,
111
+ stopSequences: [
112
+ ...(this.settings.stopSequences ?? []),
113
+ ...promptFormat.stopSequences,
114
+ ],
112
115
  }),
113
116
  promptFormat,
114
117
  });
@@ -102,7 +102,10 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
102
102
  withPromptFormat(promptFormat) {
103
103
  return new PromptFormatTextGenerationModel({
104
104
  model: this.withSettings({
105
- stopSequences: promptFormat.stopSequences,
105
+ stopSequences: [
106
+ ...(this.settings.stopSequences ?? []),
107
+ ...promptFormat.stopSequences,
108
+ ],
106
109
  }),
107
110
  promptFormat,
108
111
  });
@@ -242,7 +242,10 @@ class OpenAITextGenerationModel extends AbstractModel_js_1.AbstractModel {
242
242
  withPromptFormat(promptFormat) {
243
243
  return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
244
244
  model: this.withSettings({
245
- stopSequences: promptFormat.stopSequences,
245
+ stopSequences: [
246
+ ...(this.settings.stopSequences ?? []),
247
+ ...promptFormat.stopSequences,
248
+ ],
246
249
  }),
247
250
  promptFormat,
248
251
  });
@@ -233,7 +233,10 @@ export class OpenAITextGenerationModel extends AbstractModel {
233
233
  withPromptFormat(promptFormat) {
234
234
  return new PromptFormatTextGenerationModel({
235
235
  model: this.withSettings({
236
- stopSequences: promptFormat.stopSequences,
236
+ stopSequences: [
237
+ ...(this.settings.stopSequences ?? []),
238
+ ...promptFormat.stopSequences,
239
+ ],
237
240
  }),
238
241
  promptFormat,
239
242
  });
@@ -329,7 +329,12 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
329
329
  }
330
330
  withPromptFormat(promptFormat) {
331
331
  return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
332
- model: this.withSettings({ stopSequences: promptFormat.stopSequences }),
332
+ model: this.withSettings({
333
+ stopSequences: [
334
+ ...(this.settings.stopSequences ?? []),
335
+ ...promptFormat.stopSequences,
336
+ ],
337
+ }),
333
338
  promptFormat,
334
339
  });
335
340
  }
@@ -320,7 +320,12 @@ export class OpenAIChatModel extends AbstractModel {
320
320
  }
321
321
  withPromptFormat(promptFormat) {
322
322
  return new PromptFormatTextGenerationModel({
323
- model: this.withSettings({ stopSequences: promptFormat.stopSequences }),
323
+ model: this.withSettings({
324
+ stopSequences: [
325
+ ...(this.settings.stopSequences ?? []),
326
+ ...promptFormat.stopSequences,
327
+ ],
328
+ }),
324
329
  promptFormat,
325
330
  });
326
331
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.40.0",
4
+ "version": "0.40.1",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -60,7 +60,10 @@ class PromptFormatTextGenerationModel {
60
60
  withPromptFormat(promptFormat) {
61
61
  return new PromptFormatTextGenerationModel({
62
62
  model: this.withSettings({
63
- stopSequences: promptFormat.stopSequences,
63
+ stopSequences: [
64
+ ...(this.settings.stopSequences ?? []),
65
+ ...promptFormat.stopSequences,
66
+ ],
64
67
  }),
65
68
  promptFormat,
66
69
  });
@@ -57,7 +57,10 @@ export class PromptFormatTextGenerationModel {
57
57
  withPromptFormat(promptFormat) {
58
58
  return new PromptFormatTextGenerationModel({
59
59
  model: this.withSettings({
60
- stopSequences: promptFormat.stopSequences,
60
+ stopSequences: [
61
+ ...(this.settings.stopSequences ?? []),
62
+ ...promptFormat.stopSequences,
63
+ ],
61
64
  }),
62
65
  promptFormat,
63
66
  });