@bedrockio/ai 0.9.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 0.9.1
2
+
3
+ - Pass interpolated instructions back in result.
4
+
1
5
  ## 0.9.0
2
6
 
3
7
  - Function calls are now handled out of the box.
@@ -23,7 +23,7 @@ class BaseClient {
23
23
  */
24
24
  async prompt(options) {
25
25
  options = this.normalizeOptions(options);
26
- const { output, stream, schema } = options;
26
+ const { output, stream, schema, instructions } = options;
27
27
  const response = await this.runPrompt(options);
28
28
  if (!stream) {
29
29
  this.debug('Response:', response, options);
@@ -45,6 +45,7 @@ class BaseClient {
45
45
  return {
46
46
  result,
47
47
  response,
48
+ instructions,
48
49
  ...this.normalizeResponse(response, options),
49
50
  };
50
51
  }
@@ -156,14 +157,14 @@ class BaseClient {
156
157
  }
157
158
  normalizeInputs(options) {
158
159
  options = this.normalizeTemplateOptions(options);
159
- let { system, output = 'text' } = options;
160
+ let { instructions, output = 'text' } = options;
160
161
  if (output === 'json') {
161
- system = [system, 'Output only valid JSON.'].join('\n\n');
162
+ instructions = [instructions, 'Output only valid JSON.'].join('\n\n');
162
163
  }
163
164
  const messages = this.normalizeMessages(options);
164
165
  return {
165
- system,
166
166
  messages,
167
+ instructions,
167
168
  };
168
169
  }
169
170
  normalizeTemplateOptions(options) {
@@ -175,7 +176,7 @@ class BaseClient {
175
176
  params,
176
177
  template,
177
178
  });
178
- let system = '';
179
+ let instructions = '';
179
180
  let { messages = [] } = options;
180
181
  // Templates may contain multiple roles, ie SYSTEM or USER, making them
181
182
  // useful for one-off prompting. However in a multi-turn conversation
@@ -188,7 +189,7 @@ class BaseClient {
188
189
  const { title = 'system', content } = section;
189
190
  const role = title.toLowerCase();
190
191
  if (role === 'system') {
191
- system += [system, content].join('\n');
192
+ instructions += [instructions, content].join('\n');
192
193
  }
193
194
  else if (!hasUserMessages) {
194
195
  messages = [
@@ -200,10 +201,10 @@ class BaseClient {
200
201
  ];
201
202
  }
202
203
  }
203
- system = system.trim();
204
+ instructions = instructions.trim();
204
205
  return {
205
206
  ...options,
206
- system,
207
+ instructions,
207
208
  messages,
208
209
  };
209
210
  }
@@ -274,7 +275,9 @@ class BaseClient {
274
275
  exports.default = BaseClient;
275
276
  /**
276
277
  * @typedef {Object} PromptOptions
277
- * @property {string|PromptMessage[]} input - Input to use.
278
+ * @property {string} input - Basic input to be comes user message.
279
+ * @property {string} instructions
280
+ * @property {PromptMessage[]} messages - Full message input.
278
281
  * @property {string} [model] - The model to use.
279
282
  * @property {boolean} stream - Stream response.
280
283
  * @property {Object} [schema] - A JSON schema compatible object that defines the output shape.
@@ -22,7 +22,7 @@ class AnthropicClient extends BaseClient_js_1.default {
22
22
  return data.map((o) => o.id);
23
23
  }
24
24
  async runPrompt(options) {
25
- const { model, messages, temperature, system = '', stream = false, tokens = DEFAULT_TOKENS, } = options;
25
+ const { model, messages, temperature, stream = false, tokens = DEFAULT_TOKENS, instructions: system = '', } = options;
26
26
  const params = {
27
27
  model,
28
28
  stream,
@@ -43,7 +43,7 @@ class OpenAiClient extends BaseClient_js_1.default {
43
43
  return names;
44
44
  }
45
45
  async runPrompt(options) {
46
- const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions = '', tool_choice = 'auto', stream = false, } = options;
46
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, instructions = '', tool_choice = 'auto', stream = false, } = options;
47
47
  const params = {
48
48
  model,
49
49
  tools,
@@ -150,6 +150,7 @@ class OpenAiClient extends BaseClient_js_1.default {
150
150
  return {
151
151
  type: 'stop',
152
152
  id: event.response.id,
153
+ instructions: options.instructions,
153
154
  messages: [
154
155
  ...options.messages,
155
156
  {
@@ -21,7 +21,7 @@ export default class BaseClient {
21
21
  */
22
22
  async prompt(options) {
23
23
  options = this.normalizeOptions(options);
24
- const { output, stream, schema } = options;
24
+ const { output, stream, schema, instructions } = options;
25
25
  const response = await this.runPrompt(options);
26
26
  if (!stream) {
27
27
  this.debug('Response:', response, options);
@@ -43,6 +43,7 @@ export default class BaseClient {
43
43
  return {
44
44
  result,
45
45
  response,
46
+ instructions,
46
47
  ...this.normalizeResponse(response, options),
47
48
  };
48
49
  }
@@ -154,14 +155,14 @@ export default class BaseClient {
154
155
  }
155
156
  normalizeInputs(options) {
156
157
  options = this.normalizeTemplateOptions(options);
157
- let { system, output = 'text' } = options;
158
+ let { instructions, output = 'text' } = options;
158
159
  if (output === 'json') {
159
- system = [system, 'Output only valid JSON.'].join('\n\n');
160
+ instructions = [instructions, 'Output only valid JSON.'].join('\n\n');
160
161
  }
161
162
  const messages = this.normalizeMessages(options);
162
163
  return {
163
- system,
164
164
  messages,
165
+ instructions,
165
166
  };
166
167
  }
167
168
  normalizeTemplateOptions(options) {
@@ -173,7 +174,7 @@ export default class BaseClient {
173
174
  params,
174
175
  template,
175
176
  });
176
- let system = '';
177
+ let instructions = '';
177
178
  let { messages = [] } = options;
178
179
  // Templates may contain multiple roles, ie SYSTEM or USER, making them
179
180
  // useful for one-off prompting. However in a multi-turn conversation
@@ -186,7 +187,7 @@ export default class BaseClient {
186
187
  const { title = 'system', content } = section;
187
188
  const role = title.toLowerCase();
188
189
  if (role === 'system') {
189
- system += [system, content].join('\n');
190
+ instructions += [instructions, content].join('\n');
190
191
  }
191
192
  else if (!hasUserMessages) {
192
193
  messages = [
@@ -198,10 +199,10 @@ export default class BaseClient {
198
199
  ];
199
200
  }
200
201
  }
201
- system = system.trim();
202
+ instructions = instructions.trim();
202
203
  return {
203
204
  ...options,
204
- system,
205
+ instructions,
205
206
  messages,
206
207
  };
207
208
  }
@@ -271,7 +272,9 @@ export default class BaseClient {
271
272
  }
272
273
  /**
273
274
  * @typedef {Object} PromptOptions
274
- * @property {string|PromptMessage[]} input - Input to use.
275
+ * @property {string} input - Basic input to be comes user message.
276
+ * @property {string} instructions
277
+ * @property {PromptMessage[]} messages - Full message input.
275
278
  * @property {string} [model] - The model to use.
276
279
  * @property {boolean} stream - Stream response.
277
280
  * @property {Object} [schema] - A JSON schema compatible object that defines the output shape.
@@ -16,7 +16,7 @@ export class AnthropicClient extends BaseClient {
16
16
  return data.map((o) => o.id);
17
17
  }
18
18
  async runPrompt(options) {
19
- const { model, messages, temperature, system = '', stream = false, tokens = DEFAULT_TOKENS, } = options;
19
+ const { model, messages, temperature, stream = false, tokens = DEFAULT_TOKENS, instructions: system = '', } = options;
20
20
  const params = {
21
21
  model,
22
22
  stream,
@@ -37,7 +37,7 @@ export class OpenAiClient extends BaseClient {
37
37
  return names;
38
38
  }
39
39
  async runPrompt(options) {
40
- const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions = '', tool_choice = 'auto', stream = false, } = options;
40
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, instructions = '', tool_choice = 'auto', stream = false, } = options;
41
41
  const params = {
42
42
  model,
43
43
  tools,
@@ -144,6 +144,7 @@ export class OpenAiClient extends BaseClient {
144
144
  return {
145
145
  type: 'stop',
146
146
  id: event.response.id,
147
+ instructions: options.instructions,
147
148
  messages: [
148
149
  ...options.messages,
149
150
  {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bedrockio/ai",
3
- "version": "0.9.0",
3
+ "version": "0.9.1",
4
4
  "description": "Bedrock wrapper for common AI chatbots.",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -42,8 +42,8 @@ export default class BaseClient {
42
42
  */
43
43
  normalizeOptions(options: any): any;
44
44
  normalizeInputs(options: any): {
45
- system: any;
46
45
  messages: any[];
46
+ instructions: any;
47
47
  };
48
48
  normalizeTemplateOptions(options: any): any;
49
49
  normalizeMessages(options: any): any[];
@@ -56,9 +56,14 @@ export default class BaseClient {
56
56
  }
57
57
  export type PromptOptions = {
58
58
  /**
59
- * - Input to use.
59
+ * - Basic input to be comes user message.
60
60
  */
61
- input: string | PromptMessage[];
61
+ input: string;
62
+ instructions: string;
63
+ /**
64
+ * - Full message input.
65
+ */
66
+ messages: PromptMessage[];
62
67
  /**
63
68
  * - The model to use.
64
69
  */
@@ -1 +1 @@
1
- {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAKA;IACE,0BASC;IARC,aAIC;IACD,2BAEE;IAKJ;;;;;OAKG;IACH,gBAFW,aAAa,gBAgCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED;;;;OAIG;IACH,wBAFW,MAAM,OAIhB;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAIC;IAED;;OAEG;IACH,oDAIC;IAID;;OAEG;IACH,oCAOC;IAED;;;MAeC;IAED,4CAiDC;IAED,uCAoBC;IAED;;;MA4BC;IAED,uDAWC;IAED,kDAMC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,MAAM,GAAG,MAAM;;;;;;;;;;;sBAOf,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM;;iCAvVa,sBAAsB"}
1
+ {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAKA;IACE,0BASC;IARC,aAIC;IACD,2BAEE;IAKJ;;;;;OAKG;IACH,gBAFW,aAAa,gBAiCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED;;;;OAIG;IACH,wBAFW,MAAM,OAIhB;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAIC;IAED;;OAEG;IACH,oDAIC;IAID;;OAEG;IACH,oCAOC;IAED;;;MAeC;IAED,4CAiDC;IAED,uCAoBC;IAED;;;MA4BC;IAED,uDAWC;IAED,kDAMC;CACF;;;;;WAIa,MAAM;kBACN,MAAM;;;;cACN,aAAa,EAAE;;;;YACf,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,MAAM,GAAG,MAAM;;;;;;;;;;;sBAOf,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM;;iCA1Va,sBAAsB"}
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;;OAIG;IACH,kBAHW,cAAc,qBAgCxB;IAED;;yFAiCC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;;;;;MAeC;IAED;;;MAKC;IAID;;;;;;;;;;MAmBC;CA0CF;6BAQA,GAAC,GAAK,KAAK,GACL,SAAS,GACT,WAAW,GACX,aAAa,GACb,YAAY,GACZ,WAAW,GACX,QAAQ,GACR,OAAO,GACP,OAAO,GACP,MAAM,GACN,QAAQ;uBA3NQ,iBAAiB;mBAFrB,QAAQ"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;;OAIG;IACH,kBAHW,cAAc,qBAgCxB;IAED;;yFAiCC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;;;;;MAeC;IAED;;;MAKC;IAID;;;;;;;;;;MAmBC;CA2CF;6BAQA,GAAC,GAAK,KAAK,GACL,SAAS,GACT,WAAW,GACX,aAAa,GACb,YAAY,GACZ,WAAW,GACX,QAAQ,GACR,OAAO,GACP,OAAO,GACP,MAAM,GACN,QAAQ;uBA5NQ,iBAAiB;mBAFrB,QAAQ"}