@bedrockio/ai 0.9.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,11 @@
1
+ ## 0.9.2
2
+
3
+ - Strip empty input out of resulting messages.
4
+
5
+ ## 0.9.1
6
+
7
+ - Pass interpolated instructions back in result.
8
+
1
9
  ## 0.9.0
2
10
 
3
11
  - Function calls are now handled out of the box.
@@ -23,7 +23,7 @@ class BaseClient {
23
23
  */
24
24
  async prompt(options) {
25
25
  options = this.normalizeOptions(options);
26
- const { output, stream, schema } = options;
26
+ const { output, stream, schema, instructions } = options;
27
27
  const response = await this.runPrompt(options);
28
28
  if (!stream) {
29
29
  this.debug('Response:', response, options);
@@ -45,6 +45,7 @@ class BaseClient {
45
45
  return {
46
46
  result,
47
47
  response,
48
+ instructions,
48
49
  ...this.normalizeResponse(response, options),
49
50
  };
50
51
  }
@@ -106,6 +107,12 @@ class BaseClient {
106
107
  getTemplateSource(name) {
107
108
  return this.renderer.getTemplateSource(name);
108
109
  }
110
+ getFilteredMessages(options) {
111
+ const { messages = [] } = options;
112
+ return messages.filter((message) => {
113
+ return message.content;
114
+ });
115
+ }
109
116
  // Protected
110
117
  runPrompt(options) {
111
118
  void options;
@@ -156,14 +163,14 @@ class BaseClient {
156
163
  }
157
164
  normalizeInputs(options) {
158
165
  options = this.normalizeTemplateOptions(options);
159
- let { system, output = 'text' } = options;
166
+ let { instructions, output = 'text' } = options;
160
167
  if (output === 'json') {
161
- system = [system, 'Output only valid JSON.'].join('\n\n');
168
+ instructions = [instructions, 'Output only valid JSON.'].join('\n\n');
162
169
  }
163
170
  const messages = this.normalizeMessages(options);
164
171
  return {
165
- system,
166
172
  messages,
173
+ instructions,
167
174
  };
168
175
  }
169
176
  normalizeTemplateOptions(options) {
@@ -175,7 +182,7 @@ class BaseClient {
175
182
  params,
176
183
  template,
177
184
  });
178
- let system = '';
185
+ let instructions = '';
179
186
  let { messages = [] } = options;
180
187
  // Templates may contain multiple roles, ie SYSTEM or USER, making them
181
188
  // useful for one-off prompting. However in a multi-turn conversation
@@ -188,7 +195,7 @@ class BaseClient {
188
195
  const { title = 'system', content } = section;
189
196
  const role = title.toLowerCase();
190
197
  if (role === 'system') {
191
- system += [system, content].join('\n');
198
+ instructions += [instructions, content].join('\n');
192
199
  }
193
200
  else if (!hasUserMessages) {
194
201
  messages = [
@@ -200,10 +207,10 @@ class BaseClient {
200
207
  ];
201
208
  }
202
209
  }
203
- system = system.trim();
210
+ instructions = instructions.trim();
204
211
  return {
205
212
  ...options,
206
- system,
213
+ instructions,
207
214
  messages,
208
215
  };
209
216
  }
@@ -274,7 +281,9 @@ class BaseClient {
274
281
  exports.default = BaseClient;
275
282
  /**
276
283
  * @typedef {Object} PromptOptions
277
- * @property {string|PromptMessage[]} input - Input to use.
284
+ * @property {string} input - Basic input to be comes user message.
285
+ * @property {string} instructions
286
+ * @property {PromptMessage[]} messages - Full message input.
278
287
  * @property {string} [model] - The model to use.
279
288
  * @property {boolean} stream - Stream response.
280
289
  * @property {Object} [schema] - A JSON schema compatible object that defines the output shape.
@@ -22,7 +22,7 @@ class AnthropicClient extends BaseClient_js_1.default {
22
22
  return data.map((o) => o.id);
23
23
  }
24
24
  async runPrompt(options) {
25
- const { model, messages, temperature, system = '', stream = false, tokens = DEFAULT_TOKENS, } = options;
25
+ const { model, messages, temperature, stream = false, tokens = DEFAULT_TOKENS, instructions: system = '', } = options;
26
26
  const params = {
27
27
  model,
28
28
  stream,
@@ -57,10 +57,9 @@ class AnthropicClient extends BaseClient_js_1.default {
57
57
  return toolBlock?.input || null;
58
58
  }
59
59
  normalizeResponse(response, options) {
60
- const { messages } = options;
61
60
  return {
62
61
  messages: [
63
- ...messages,
62
+ ...this.getFilteredMessages(options),
64
63
  ...response.content
65
64
  .filter((item) => {
66
65
  return item.type === 'text';
@@ -113,7 +112,7 @@ class AnthropicClient extends BaseClient_js_1.default {
113
112
  return {
114
113
  type: 'stop',
115
114
  messages: [
116
- ...options.messages,
115
+ ...this.getFilteredMessages(options),
117
116
  {
118
117
  role: 'assistant',
119
118
  content: options.buffer,
@@ -43,7 +43,7 @@ class OpenAiClient extends BaseClient_js_1.default {
43
43
  return names;
44
44
  }
45
45
  async runPrompt(options) {
46
- const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions = '', tool_choice = 'auto', stream = false, } = options;
46
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, instructions = '', tool_choice = 'auto', stream = false, } = options;
47
47
  const params = {
48
48
  model,
49
49
  tools,
@@ -91,10 +91,9 @@ class OpenAiClient extends BaseClient_js_1.default {
91
91
  return JSON.parse(last.text);
92
92
  }
93
93
  normalizeResponse(response, options) {
94
- const { messages } = options;
95
94
  return {
96
95
  messages: [
97
- ...messages,
96
+ ...this.getFilteredMessages(options),
98
97
  {
99
98
  role: 'assistant',
100
99
  content: response.output_text,
@@ -150,8 +149,9 @@ class OpenAiClient extends BaseClient_js_1.default {
150
149
  return {
151
150
  type: 'stop',
152
151
  id: event.response.id,
152
+ instructions: options.instructions,
153
153
  messages: [
154
- ...options.messages,
154
+ ...this.getFilteredMessages(options),
155
155
  {
156
156
  role: 'assistant',
157
157
  content: output?.content[0].text,
@@ -21,7 +21,7 @@ export default class BaseClient {
21
21
  */
22
22
  async prompt(options) {
23
23
  options = this.normalizeOptions(options);
24
- const { output, stream, schema } = options;
24
+ const { output, stream, schema, instructions } = options;
25
25
  const response = await this.runPrompt(options);
26
26
  if (!stream) {
27
27
  this.debug('Response:', response, options);
@@ -43,6 +43,7 @@ export default class BaseClient {
43
43
  return {
44
44
  result,
45
45
  response,
46
+ instructions,
46
47
  ...this.normalizeResponse(response, options),
47
48
  };
48
49
  }
@@ -104,6 +105,12 @@ export default class BaseClient {
104
105
  getTemplateSource(name) {
105
106
  return this.renderer.getTemplateSource(name);
106
107
  }
108
+ getFilteredMessages(options) {
109
+ const { messages = [] } = options;
110
+ return messages.filter((message) => {
111
+ return message.content;
112
+ });
113
+ }
107
114
  // Protected
108
115
  runPrompt(options) {
109
116
  void options;
@@ -154,14 +161,14 @@ export default class BaseClient {
154
161
  }
155
162
  normalizeInputs(options) {
156
163
  options = this.normalizeTemplateOptions(options);
157
- let { system, output = 'text' } = options;
164
+ let { instructions, output = 'text' } = options;
158
165
  if (output === 'json') {
159
- system = [system, 'Output only valid JSON.'].join('\n\n');
166
+ instructions = [instructions, 'Output only valid JSON.'].join('\n\n');
160
167
  }
161
168
  const messages = this.normalizeMessages(options);
162
169
  return {
163
- system,
164
170
  messages,
171
+ instructions,
165
172
  };
166
173
  }
167
174
  normalizeTemplateOptions(options) {
@@ -173,7 +180,7 @@ export default class BaseClient {
173
180
  params,
174
181
  template,
175
182
  });
176
- let system = '';
183
+ let instructions = '';
177
184
  let { messages = [] } = options;
178
185
  // Templates may contain multiple roles, ie SYSTEM or USER, making them
179
186
  // useful for one-off prompting. However in a multi-turn conversation
@@ -186,7 +193,7 @@ export default class BaseClient {
186
193
  const { title = 'system', content } = section;
187
194
  const role = title.toLowerCase();
188
195
  if (role === 'system') {
189
- system += [system, content].join('\n');
196
+ instructions += [instructions, content].join('\n');
190
197
  }
191
198
  else if (!hasUserMessages) {
192
199
  messages = [
@@ -198,10 +205,10 @@ export default class BaseClient {
198
205
  ];
199
206
  }
200
207
  }
201
- system = system.trim();
208
+ instructions = instructions.trim();
202
209
  return {
203
210
  ...options,
204
- system,
211
+ instructions,
205
212
  messages,
206
213
  };
207
214
  }
@@ -271,7 +278,9 @@ export default class BaseClient {
271
278
  }
272
279
  /**
273
280
  * @typedef {Object} PromptOptions
274
- * @property {string|PromptMessage[]} input - Input to use.
281
+ * @property {string} input - Basic input to be comes user message.
282
+ * @property {string} instructions
283
+ * @property {PromptMessage[]} messages - Full message input.
275
284
  * @property {string} [model] - The model to use.
276
285
  * @property {boolean} stream - Stream response.
277
286
  * @property {Object} [schema] - A JSON schema compatible object that defines the output shape.
@@ -16,7 +16,7 @@ export class AnthropicClient extends BaseClient {
16
16
  return data.map((o) => o.id);
17
17
  }
18
18
  async runPrompt(options) {
19
- const { model, messages, temperature, system = '', stream = false, tokens = DEFAULT_TOKENS, } = options;
19
+ const { model, messages, temperature, stream = false, tokens = DEFAULT_TOKENS, instructions: system = '', } = options;
20
20
  const params = {
21
21
  model,
22
22
  stream,
@@ -51,10 +51,9 @@ export class AnthropicClient extends BaseClient {
51
51
  return toolBlock?.input || null;
52
52
  }
53
53
  normalizeResponse(response, options) {
54
- const { messages } = options;
55
54
  return {
56
55
  messages: [
57
- ...messages,
56
+ ...this.getFilteredMessages(options),
58
57
  ...response.content
59
58
  .filter((item) => {
60
59
  return item.type === 'text';
@@ -107,7 +106,7 @@ export class AnthropicClient extends BaseClient {
107
106
  return {
108
107
  type: 'stop',
109
108
  messages: [
110
- ...options.messages,
109
+ ...this.getFilteredMessages(options),
111
110
  {
112
111
  role: 'assistant',
113
112
  content: options.buffer,
@@ -37,7 +37,7 @@ export class OpenAiClient extends BaseClient {
37
37
  return names;
38
38
  }
39
39
  async runPrompt(options) {
40
- const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions = '', tool_choice = 'auto', stream = false, } = options;
40
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, instructions = '', tool_choice = 'auto', stream = false, } = options;
41
41
  const params = {
42
42
  model,
43
43
  tools,
@@ -85,10 +85,9 @@ export class OpenAiClient extends BaseClient {
85
85
  return JSON.parse(last.text);
86
86
  }
87
87
  normalizeResponse(response, options) {
88
- const { messages } = options;
89
88
  return {
90
89
  messages: [
91
- ...messages,
90
+ ...this.getFilteredMessages(options),
92
91
  {
93
92
  role: 'assistant',
94
93
  content: response.output_text,
@@ -144,8 +143,9 @@ export class OpenAiClient extends BaseClient {
144
143
  return {
145
144
  type: 'stop',
146
145
  id: event.response.id,
146
+ instructions: options.instructions,
147
147
  messages: [
148
- ...options.messages,
148
+ ...this.getFilteredMessages(options),
149
149
  {
150
150
  role: 'assistant',
151
151
  content: output?.content[0].text,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bedrockio/ai",
3
- "version": "0.9.0",
3
+ "version": "0.9.2",
4
4
  "description": "Bedrock wrapper for common AI chatbots.",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -22,6 +22,7 @@ export default class BaseClient {
22
22
  * @param {string} name
23
23
  */
24
24
  getTemplateSource(name: string): any;
25
+ getFilteredMessages(options: any): any;
25
26
  runPrompt(options: any): void;
26
27
  runStream(options: any): void;
27
28
  getTextResponse(response: any): void;
@@ -42,8 +43,8 @@ export default class BaseClient {
42
43
  */
43
44
  normalizeOptions(options: any): any;
44
45
  normalizeInputs(options: any): {
45
- system: any;
46
46
  messages: any[];
47
+ instructions: any;
47
48
  };
48
49
  normalizeTemplateOptions(options: any): any;
49
50
  normalizeMessages(options: any): any[];
@@ -56,9 +57,14 @@ export default class BaseClient {
56
57
  }
57
58
  export type PromptOptions = {
58
59
  /**
59
- * - Input to use.
60
+ * - Basic input to be comes user message.
60
61
  */
61
- input: string | PromptMessage[];
62
+ input: string;
63
+ instructions: string;
64
+ /**
65
+ * - Full message input.
66
+ */
67
+ messages: PromptMessage[];
62
68
  /**
63
69
  * - The model to use.
64
70
  */
@@ -1 +1 @@
1
- {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAKA;IACE,0BASC;IARC,aAIC;IACD,2BAEE;IAKJ;;;;;OAKG;IACH,gBAFW,aAAa,gBAgCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED;;;;OAIG;IACH,wBAFW,MAAM,OAIhB;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAIC;IAED;;OAEG;IACH,oDAIC;IAID;;OAEG;IACH,oCAOC;IAED;;;MAeC;IAED,4CAiDC;IAED,uCAoBC;IAED;;;MA4BC;IAED,uDAWC;IAED,kDAMC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,MAAM,GAAG,MAAM;;;;;;;;;;;sBAOf,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM;;iCAvVa,sBAAsB"}
1
+ {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAKA;IACE,0BASC;IARC,aAIC;IACD,2BAEE;IAKJ;;;;;OAKG;IACH,gBAFW,aAAa,gBAiCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED;;;;OAIG;IACH,wBAFW,MAAM,OAIhB;IAED,uCAKC;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAIC;IAED;;OAEG;IACH,oDAIC;IAID;;OAEG;IACH,oCAOC;IAED;;;MAeC;IAED,4CAiDC;IAED,uCAoBC;IAED;;;MA4BC;IAED,uDAWC;IAED,kDAMC;CACF;;;;;WAIa,MAAM;kBACN,MAAM;;;;cACN,aAAa,EAAE;;;;YACf,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,MAAM,GAAG,MAAM;;;;;;;;;;;sBAOf,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM;;iCAjWa,sBAAsB"}
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGA2BC;IAED;;wGAKC;IAED,oCAKC;IASD;;;;;;MAkBC;IAED;;;MAKC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MAsCC;IAID;;;;;;;;;;MA0CC;IAED,oCAKC;IAUD;;;;MAOC;IAID;;;;MAOC;IAED;;;;MAQC;CACF;uBApOsB,iBAAiB;sBAFlB,mBAAmB"}
1
+ {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGA2BC;IAED;;wGAKC;IAED,oCAKC;IASD;;;;;;MAiBC;IAED;;;MAKC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MAsCC;IAID;;;;;;;;;;MA0CC;IAED,oCAKC;IAUD;;;;MAOC;IAID;;;;MAOC;IAED;;;;MAQC;CACF;uBAnOsB,iBAAiB;sBAFlB,mBAAmB"}
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;;OAIG;IACH,kBAHW,cAAc,qBAgCxB;IAED;;yFAiCC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;;;;;MAeC;IAED;;;MAKC;IAID;;;;;;;;;;MAmBC;CA0CF;6BAQA,GAAC,GAAK,KAAK,GACL,SAAS,GACT,WAAW,GACX,aAAa,GACb,YAAY,GACZ,WAAW,GACX,QAAQ,GACR,OAAO,GACP,OAAO,GACP,MAAM,GACN,QAAQ;uBA3NQ,iBAAiB;mBAFrB,QAAQ"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;;OAIG;IACH,kBAHW,cAAc,qBAgCxB;IAED;;yFAiCC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;;;;;MAcC;IAED;;;MAKC;IAID;;;;;;;;;;MAmBC;CA2CF;6BAQA,GAAC,GAAK,KAAK,GACL,SAAS,GACT,WAAW,GACX,aAAa,GACb,YAAY,GACZ,WAAW,GACX,QAAQ,GACR,OAAO,GACP,OAAO,GACP,MAAM,GACN,QAAQ;uBA3NQ,iBAAiB;mBAFrB,QAAQ"}