@bedrockio/ai 0.5.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 0.6.0
2
+
3
+ - Moved template rendering out to external package.
4
+ - Normalized messages input.
5
+ - MCP use with Anthropic.
6
+
1
7
  ## 0.5.1
2
8
 
3
9
  - Added basic api key authorization.
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ const templates_1 = require("@bedrockio/templates");
3
4
  const code_js_1 = require("./utils/code.js");
4
5
  const json_js_1 = require("./utils/json.js");
5
- const templates_js_1 = require("./utils/templates.js");
6
6
  class BaseClient {
7
7
  constructor(options) {
8
8
  this.options = {
@@ -10,7 +10,9 @@ class BaseClient {
10
10
  model: this.constructor.DEFAULT_MODEL,
11
11
  ...options,
12
12
  };
13
- this.templates = null;
13
+ this.renderer = new templates_1.TemplateRenderer({
14
+ dir: options.templates,
15
+ });
14
16
  }
15
17
  // Public
16
18
  /**
@@ -20,8 +22,8 @@ class BaseClient {
20
22
  * @param {PromptOptions} options
21
23
  */
22
24
  async prompt(options) {
23
- options = await this.normalizeOptions(options);
24
- const { input, output, stream, schema } = options;
25
+ options = this.normalizeOptions(options);
26
+ const { output, stream, schema } = options;
25
27
  const response = await this.runPrompt(options);
26
28
  if (!stream) {
27
29
  this.debug('Response:', response);
@@ -46,7 +48,7 @@ class BaseClient {
46
48
  if (output === 'messages') {
47
49
  return {
48
50
  result,
49
- ...this.getMessagesResponse(input, response),
51
+ ...this.getMessagesResponse(response, options),
50
52
  };
51
53
  }
52
54
  else {
@@ -60,7 +62,7 @@ class BaseClient {
60
62
  * @returns {AsyncIterator}
61
63
  */
62
64
  async *stream(options) {
63
- options = await this.normalizeOptions(options);
65
+ options = this.normalizeOptions(options);
64
66
  const extractor = this.getMessageExtractor(options);
65
67
  try {
66
68
  const stream = await this.runStream(options);
@@ -103,10 +105,6 @@ class BaseClient {
103
105
  };
104
106
  }
105
107
  }
106
- async buildTemplate(options) {
107
- const template = await this.resolveTemplate(options);
108
- return (0, templates_js_1.renderTemplate)(template, options);
109
- }
110
108
  // Protected
111
109
  runPrompt(options) {
112
110
  void options;
@@ -142,24 +140,56 @@ class BaseClient {
142
140
  throw new Error('Method not implemented.');
143
141
  }
144
142
  // Private
145
- async normalizeOptions(options) {
146
- options = {
147
- input: '',
148
- output: 'text',
143
+ /**
144
+ * @returns {Object}
145
+ */
146
+ normalizeOptions(options) {
147
+ return {
149
148
  ...this.options,
150
149
  ...options,
150
+ ...this.normalizeInputs(options),
151
+ ...this.normalizeSchema(options),
151
152
  };
152
- options.input = this.normalizeInput(options);
153
- options.schema = this.normalizeSchema(options);
154
- options.instructions ||= await this.resolveInstructions(options);
155
- return options;
156
153
  }
157
- normalizeInput(options) {
158
- let { input = '', output } = options;
159
- if (typeof input === 'string') {
160
- if (output === 'json') {
161
- input += '\nOutput only valid JSON.';
154
+ normalizeInputs(options) {
155
+ const { template, params, output = 'text' } = options;
156
+ const { sections } = this.renderer.run({
157
+ params,
158
+ template,
159
+ });
160
+ let system = '';
161
+ let messages = [];
162
+ for (let section of sections) {
163
+ const { title = 'system', content } = section;
164
+ const role = title.toLowerCase();
165
+ if (role === 'system') {
166
+ system += [system, content].join('\n');
162
167
  }
168
+ else {
169
+ messages = [
170
+ ...messages,
171
+ {
172
+ role,
173
+ content,
174
+ },
175
+ ];
176
+ }
177
+ }
178
+ messages = [...messages, ...this.normalizeInput(options)];
179
+ if (output === 'json') {
180
+ system = [system, 'Output only valid JSON.'].join('\n\n');
181
+ }
182
+ return {
183
+ system,
184
+ messages,
185
+ };
186
+ }
187
+ normalizeInput(options) {
188
+ let { input = '' } = options;
189
+ if (!input) {
190
+ input = [];
191
+ }
192
+ else if (typeof input === 'string') {
163
193
  input = [
164
194
  {
165
195
  role: 'user',
@@ -174,6 +204,7 @@ class BaseClient {
174
204
  if (!schema) {
175
205
  return;
176
206
  }
207
+ let hasWrappedSchema = false;
177
208
  // Convert to JSON schema.
178
209
  schema = schema.toJSON?.() || schema;
179
210
  if (schema?.type === 'array') {
@@ -185,9 +216,12 @@ class BaseClient {
185
216
  required: ['items'],
186
217
  additionalProperties: false,
187
218
  };
188
- options.hasWrappedSchema = true;
219
+ hasWrappedSchema = true;
189
220
  }
190
- return schema;
221
+ return {
222
+ schema,
223
+ hasWrappedSchema,
224
+ };
191
225
  }
192
226
  getMessageExtractor(options) {
193
227
  const { extractMessages } = options;
@@ -208,21 +242,6 @@ class BaseClient {
208
242
  console.debug(`${message}\n${JSON.stringify(arg, null, 2)}\n`);
209
243
  }
210
244
  }
211
- async resolveInstructions(options) {
212
- if (options.template) {
213
- const template = await this.resolveTemplate(options);
214
- return (0, templates_js_1.renderTemplate)(template, options);
215
- }
216
- }
217
- async resolveTemplate(options) {
218
- const { template } = options;
219
- await this.loadTemplates();
220
- return this.templates[template] || template;
221
- }
222
- async loadTemplates() {
223
- const { templates } = this.options;
224
- this.templates ||= await (0, templates_js_1.loadTemplates)(templates);
225
- }
226
245
  }
227
246
  exports.default = BaseClient;
228
247
  /**
@@ -23,16 +23,20 @@ class AnthropicClient extends BaseClient_js_1.default {
23
23
  }
24
24
  async runPrompt(options) {
25
25
  const { input, model, temperature, instructions, stream = false, tokens = DEFAULT_TOKENS, } = options;
26
- // @ts-ignore
27
- return await this.client.messages.create({
26
+ const params = {
28
27
  model,
29
28
  stream,
30
29
  temperature,
31
30
  max_tokens: tokens,
32
31
  system: instructions,
33
- ...this.getSchemaOptions(options),
34
32
  messages: input,
35
- });
33
+ ...this.getToolOptions(options),
34
+ };
35
+ const clientOptions = this.getClientOptions(params);
36
+ this.debug('Params:', params);
37
+ this.debug('Options:', options);
38
+ // @ts-ignore
39
+ return await this.client.messages.create(params, clientOptions);
36
40
  }
37
41
  async runStream(options) {
38
42
  return await this.runPrompt({
@@ -53,10 +57,11 @@ class AnthropicClient extends BaseClient_js_1.default {
53
57
  });
54
58
  return toolBlock?.input || null;
55
59
  }
56
- getMessagesResponse(input, response) {
60
+ getMessagesResponse(response, options) {
61
+ const { messages } = options;
57
62
  return {
58
63
  messages: [
59
- ...input,
64
+ ...messages,
60
65
  ...response.content
61
66
  .filter((item) => {
62
67
  return item.type === 'text';
@@ -90,31 +95,57 @@ class AnthropicClient extends BaseClient_js_1.default {
90
95
  }
91
96
  }
92
97
  // Private
93
- getSchemaOptions(options) {
94
- const { output } = options;
95
- if (output?.type) {
96
- let schema = output;
97
- if (schema.type === 'array') {
98
- schema = {
99
- type: 'object',
100
- properties: {
101
- items: schema,
102
- },
103
- required: ['items'],
104
- additionalProperties: false,
105
- };
106
- }
98
+ getToolOptions(options) {
99
+ let { tools = [], schema } = options;
100
+ let toolChoice;
101
+ if (schema) {
102
+ tools.push({
103
+ name: 'schema',
104
+ description: 'Follow the schema for JSON output.',
105
+ input_schema: schema,
106
+ });
107
+ toolChoice = {
108
+ type: 'tool',
109
+ name: 'schema',
110
+ };
111
+ }
112
+ else {
113
+ // The default.
114
+ toolChoice = {
115
+ type: 'auto',
116
+ };
117
+ }
118
+ const mcpServers = tools
119
+ .filter((tool) => {
120
+ return tool.type === 'mcp';
121
+ })
122
+ .map((tool) => {
123
+ return this.mapMcpTool(tool);
124
+ });
125
+ tools = tools.filter((tool) => {
126
+ return tool.type !== 'mcp';
127
+ });
128
+ return {
129
+ tools,
130
+ mcp_servers: mcpServers,
131
+ tool_choice: toolChoice,
132
+ };
133
+ }
134
+ // Map OpenAI-like input of MCP servers as "tools" to
135
+ // Anthropic's mcp_servers.
136
+ mapMcpTool(tool) {
137
+ const { server_label, server_url } = tool;
138
+ return {
139
+ type: 'url',
140
+ name: server_label,
141
+ url: server_url,
142
+ };
143
+ }
144
+ getClientOptions(params) {
145
+ if (params.mcp_servers) {
107
146
  return {
108
- tools: [
109
- {
110
- name: 'schema',
111
- description: 'Follow the schema for JSON output.',
112
- input_schema: schema,
113
- },
114
- ],
115
- tool_choice: {
116
- type: 'tool',
117
- name: 'schema',
147
+ headers: {
148
+ 'anthropic-beta': 'mcp-client-2025-04-04',
118
149
  },
119
150
  };
120
151
  }
@@ -21,11 +21,11 @@ class OpenAiClient extends BaseClient_js_1.default {
21
21
  return data.map((o) => o.id);
22
22
  }
23
23
  async runPrompt(options) {
24
- const { input, model, tools, verbosity, temperature, instructions, prevResponseId, stream = false, } = options;
24
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions, stream = false, } = options;
25
25
  const params = {
26
26
  model,
27
- input,
28
27
  tools,
28
+ input,
29
29
  stream,
30
30
  temperature,
31
31
  instructions,
@@ -67,10 +67,11 @@ class OpenAiClient extends BaseClient_js_1.default {
67
67
  const last = outputs[outputs.length - 1];
68
68
  return JSON.parse(last.text);
69
69
  }
70
- getMessagesResponse(input, response) {
70
+ getMessagesResponse(response, options) {
71
+ const { messages } = options;
71
72
  return {
72
73
  messages: [
73
- ...input,
74
+ ...messages,
74
75
  {
75
76
  role: 'assistant',
76
77
  content: response.output_text,
@@ -1,6 +1,6 @@
1
+ import { TemplateRenderer } from '@bedrockio/templates';
1
2
  import { parseCode } from './utils/code.js';
2
3
  import { createMessageExtractor } from './utils/json.js';
3
- import { loadTemplates, renderTemplate } from './utils/templates.js';
4
4
  export default class BaseClient {
5
5
  constructor(options) {
6
6
  this.options = {
@@ -8,7 +8,9 @@ export default class BaseClient {
8
8
  model: this.constructor.DEFAULT_MODEL,
9
9
  ...options,
10
10
  };
11
- this.templates = null;
11
+ this.renderer = new TemplateRenderer({
12
+ dir: options.templates,
13
+ });
12
14
  }
13
15
  // Public
14
16
  /**
@@ -18,8 +20,8 @@ export default class BaseClient {
18
20
  * @param {PromptOptions} options
19
21
  */
20
22
  async prompt(options) {
21
- options = await this.normalizeOptions(options);
22
- const { input, output, stream, schema } = options;
23
+ options = this.normalizeOptions(options);
24
+ const { output, stream, schema } = options;
23
25
  const response = await this.runPrompt(options);
24
26
  if (!stream) {
25
27
  this.debug('Response:', response);
@@ -44,7 +46,7 @@ export default class BaseClient {
44
46
  if (output === 'messages') {
45
47
  return {
46
48
  result,
47
- ...this.getMessagesResponse(input, response),
49
+ ...this.getMessagesResponse(response, options),
48
50
  };
49
51
  }
50
52
  else {
@@ -58,7 +60,7 @@ export default class BaseClient {
58
60
  * @returns {AsyncIterator}
59
61
  */
60
62
  async *stream(options) {
61
- options = await this.normalizeOptions(options);
63
+ options = this.normalizeOptions(options);
62
64
  const extractor = this.getMessageExtractor(options);
63
65
  try {
64
66
  const stream = await this.runStream(options);
@@ -101,10 +103,6 @@ export default class BaseClient {
101
103
  };
102
104
  }
103
105
  }
104
- async buildTemplate(options) {
105
- const template = await this.resolveTemplate(options);
106
- return renderTemplate(template, options);
107
- }
108
106
  // Protected
109
107
  runPrompt(options) {
110
108
  void options;
@@ -140,24 +138,56 @@ export default class BaseClient {
140
138
  throw new Error('Method not implemented.');
141
139
  }
142
140
  // Private
143
- async normalizeOptions(options) {
144
- options = {
145
- input: '',
146
- output: 'text',
141
+ /**
142
+ * @returns {Object}
143
+ */
144
+ normalizeOptions(options) {
145
+ return {
147
146
  ...this.options,
148
147
  ...options,
148
+ ...this.normalizeInputs(options),
149
+ ...this.normalizeSchema(options),
149
150
  };
150
- options.input = this.normalizeInput(options);
151
- options.schema = this.normalizeSchema(options);
152
- options.instructions ||= await this.resolveInstructions(options);
153
- return options;
154
151
  }
155
- normalizeInput(options) {
156
- let { input = '', output } = options;
157
- if (typeof input === 'string') {
158
- if (output === 'json') {
159
- input += '\nOutput only valid JSON.';
152
+ normalizeInputs(options) {
153
+ const { template, params, output = 'text' } = options;
154
+ const { sections } = this.renderer.run({
155
+ params,
156
+ template,
157
+ });
158
+ let system = '';
159
+ let messages = [];
160
+ for (let section of sections) {
161
+ const { title = 'system', content } = section;
162
+ const role = title.toLowerCase();
163
+ if (role === 'system') {
164
+ system += [system, content].join('\n');
160
165
  }
166
+ else {
167
+ messages = [
168
+ ...messages,
169
+ {
170
+ role,
171
+ content,
172
+ },
173
+ ];
174
+ }
175
+ }
176
+ messages = [...messages, ...this.normalizeInput(options)];
177
+ if (output === 'json') {
178
+ system = [system, 'Output only valid JSON.'].join('\n\n');
179
+ }
180
+ return {
181
+ system,
182
+ messages,
183
+ };
184
+ }
185
+ normalizeInput(options) {
186
+ let { input = '' } = options;
187
+ if (!input) {
188
+ input = [];
189
+ }
190
+ else if (typeof input === 'string') {
161
191
  input = [
162
192
  {
163
193
  role: 'user',
@@ -172,6 +202,7 @@ export default class BaseClient {
172
202
  if (!schema) {
173
203
  return;
174
204
  }
205
+ let hasWrappedSchema = false;
175
206
  // Convert to JSON schema.
176
207
  schema = schema.toJSON?.() || schema;
177
208
  if (schema?.type === 'array') {
@@ -183,9 +214,12 @@ export default class BaseClient {
183
214
  required: ['items'],
184
215
  additionalProperties: false,
185
216
  };
186
- options.hasWrappedSchema = true;
217
+ hasWrappedSchema = true;
187
218
  }
188
- return schema;
219
+ return {
220
+ schema,
221
+ hasWrappedSchema,
222
+ };
189
223
  }
190
224
  getMessageExtractor(options) {
191
225
  const { extractMessages } = options;
@@ -206,21 +240,6 @@ export default class BaseClient {
206
240
  console.debug(`${message}\n${JSON.stringify(arg, null, 2)}\n`);
207
241
  }
208
242
  }
209
- async resolveInstructions(options) {
210
- if (options.template) {
211
- const template = await this.resolveTemplate(options);
212
- return renderTemplate(template, options);
213
- }
214
- }
215
- async resolveTemplate(options) {
216
- const { template } = options;
217
- await this.loadTemplates();
218
- return this.templates[template] || template;
219
- }
220
- async loadTemplates() {
221
- const { templates } = this.options;
222
- this.templates ||= await loadTemplates(templates);
223
- }
224
243
  }
225
244
  /**
226
245
  * @typedef {Object} PromptOptions
@@ -17,16 +17,20 @@ export class AnthropicClient extends BaseClient {
17
17
  }
18
18
  async runPrompt(options) {
19
19
  const { input, model, temperature, instructions, stream = false, tokens = DEFAULT_TOKENS, } = options;
20
- // @ts-ignore
21
- return await this.client.messages.create({
20
+ const params = {
22
21
  model,
23
22
  stream,
24
23
  temperature,
25
24
  max_tokens: tokens,
26
25
  system: instructions,
27
- ...this.getSchemaOptions(options),
28
26
  messages: input,
29
- });
27
+ ...this.getToolOptions(options),
28
+ };
29
+ const clientOptions = this.getClientOptions(params);
30
+ this.debug('Params:', params);
31
+ this.debug('Options:', options);
32
+ // @ts-ignore
33
+ return await this.client.messages.create(params, clientOptions);
30
34
  }
31
35
  async runStream(options) {
32
36
  return await this.runPrompt({
@@ -47,10 +51,11 @@ export class AnthropicClient extends BaseClient {
47
51
  });
48
52
  return toolBlock?.input || null;
49
53
  }
50
- getMessagesResponse(input, response) {
54
+ getMessagesResponse(response, options) {
55
+ const { messages } = options;
51
56
  return {
52
57
  messages: [
53
- ...input,
58
+ ...messages,
54
59
  ...response.content
55
60
  .filter((item) => {
56
61
  return item.type === 'text';
@@ -84,31 +89,57 @@ export class AnthropicClient extends BaseClient {
84
89
  }
85
90
  }
86
91
  // Private
87
- getSchemaOptions(options) {
88
- const { output } = options;
89
- if (output?.type) {
90
- let schema = output;
91
- if (schema.type === 'array') {
92
- schema = {
93
- type: 'object',
94
- properties: {
95
- items: schema,
96
- },
97
- required: ['items'],
98
- additionalProperties: false,
99
- };
100
- }
92
+ getToolOptions(options) {
93
+ let { tools = [], schema } = options;
94
+ let toolChoice;
95
+ if (schema) {
96
+ tools.push({
97
+ name: 'schema',
98
+ description: 'Follow the schema for JSON output.',
99
+ input_schema: schema,
100
+ });
101
+ toolChoice = {
102
+ type: 'tool',
103
+ name: 'schema',
104
+ };
105
+ }
106
+ else {
107
+ // The default.
108
+ toolChoice = {
109
+ type: 'auto',
110
+ };
111
+ }
112
+ const mcpServers = tools
113
+ .filter((tool) => {
114
+ return tool.type === 'mcp';
115
+ })
116
+ .map((tool) => {
117
+ return this.mapMcpTool(tool);
118
+ });
119
+ tools = tools.filter((tool) => {
120
+ return tool.type !== 'mcp';
121
+ });
122
+ return {
123
+ tools,
124
+ mcp_servers: mcpServers,
125
+ tool_choice: toolChoice,
126
+ };
127
+ }
128
+ // Map OpenAI-like input of MCP servers as "tools" to
129
+ // Anthropic's mcp_servers.
130
+ mapMcpTool(tool) {
131
+ const { server_label, server_url } = tool;
132
+ return {
133
+ type: 'url',
134
+ name: server_label,
135
+ url: server_url,
136
+ };
137
+ }
138
+ getClientOptions(params) {
139
+ if (params.mcp_servers) {
101
140
  return {
102
- tools: [
103
- {
104
- name: 'schema',
105
- description: 'Follow the schema for JSON output.',
106
- input_schema: schema,
107
- },
108
- ],
109
- tool_choice: {
110
- type: 'tool',
111
- name: 'schema',
141
+ headers: {
142
+ 'anthropic-beta': 'mcp-client-2025-04-04',
112
143
  },
113
144
  };
114
145
  }
@@ -15,11 +15,11 @@ export class OpenAiClient extends BaseClient {
15
15
  return data.map((o) => o.id);
16
16
  }
17
17
  async runPrompt(options) {
18
- const { input, model, tools, verbosity, temperature, instructions, prevResponseId, stream = false, } = options;
18
+ const { model, tools, verbosity, temperature, prevResponseId, messages: input, system: instructions, stream = false, } = options;
19
19
  const params = {
20
20
  model,
21
- input,
22
21
  tools,
22
+ input,
23
23
  stream,
24
24
  temperature,
25
25
  instructions,
@@ -61,10 +61,11 @@ export class OpenAiClient extends BaseClient {
61
61
  const last = outputs[outputs.length - 1];
62
62
  return JSON.parse(last.text);
63
63
  }
64
- getMessagesResponse(input, response) {
64
+ getMessagesResponse(response, options) {
65
+ const { messages } = options;
65
66
  return {
66
67
  messages: [
67
- ...input,
68
+ ...messages,
68
69
  {
69
70
  role: 'assistant',
70
71
  content: response.output_text,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bedrockio/ai",
3
- "version": "0.5.1",
3
+ "version": "0.6.0",
4
4
  "description": "Bedrock wrapper for common AI chatbots.",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -35,11 +35,10 @@
35
35
  "url": "https://github.com/bedrockio/router"
36
36
  },
37
37
  "dependencies": {
38
- "@anthropic-ai/sdk": "^0.65.0",
38
+ "@anthropic-ai/sdk": "^0.66.0",
39
+ "@bedrockio/templates": "^0.1.0",
39
40
  "@google/generative-ai": "^0.21.0",
40
- "glob": "^11.0.1",
41
- "mustache": "^4.2.0",
42
- "openai": "^6.1.0",
41
+ "openai": "^6.3.0",
43
42
  "partial-json": "^0.1.7"
44
43
  },
45
44
  "devDependencies": {
@@ -1,7 +1,7 @@
1
1
  export default class BaseClient {
2
2
  constructor(options: any);
3
3
  options: any;
4
- templates: any;
4
+ renderer: TemplateRenderer;
5
5
  /**
6
6
  * Interpolates vars into the provided template as instructions and runs the
7
7
  * prompt.
@@ -16,7 +16,6 @@ export default class BaseClient {
16
16
  * @returns {AsyncIterator}
17
17
  */
18
18
  stream(options: PromptOptions & StreamOptions): AsyncIterator<any, any, any>;
19
- buildTemplate(options: any): Promise<any>;
20
19
  runPrompt(options: any): void;
21
20
  runStream(options: any): void;
22
21
  getTextResponse(response: any): void;
@@ -32,14 +31,21 @@ export default class BaseClient {
32
31
  * @returns {Object}
33
32
  */
34
33
  normalizeStreamEvent(event: any): any;
35
- normalizeOptions(options: any): Promise<any>;
34
+ /**
35
+ * @returns {Object}
36
+ */
37
+ normalizeOptions(options: any): any;
38
+ normalizeInputs(options: any): {
39
+ system: string;
40
+ messages: any[];
41
+ };
36
42
  normalizeInput(options: any): any;
37
- normalizeSchema(options: any): any;
43
+ normalizeSchema(options: any): {
44
+ schema: any;
45
+ hasWrappedSchema: boolean;
46
+ };
38
47
  getMessageExtractor(options: any): (event: any) => any;
39
48
  debug(message: any, arg: any): void;
40
- resolveInstructions(options: any): Promise<any>;
41
- resolveTemplate(options: any): Promise<any>;
42
- loadTemplates(): Promise<void>;
43
49
  }
44
50
  export type PromptOptions = {
45
51
  /**
@@ -78,4 +84,5 @@ export type PromptMessage = {
78
84
  role: "system" | "user" | "assistant";
79
85
  content: string;
80
86
  };
87
+ import { TemplateRenderer } from '@bedrockio/templates';
81
88
  //# sourceMappingURL=BaseClient.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAIA;IACE,0BAOC;IANC,aAIC;IACD,eAAqB;IAKvB;;;;;OAKG;IACH,gBAFW,aAAa,gBAuCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED,0CAGC;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAGC;IAED;;OAEG;IACH,sCAGC;IAID,6CAaC;IAED,kCAiBC;IAED,mCAuBC;IAED,uDAWC;IAED,oCAMC;IAED,gDAKC;IAED,4CAIC;IAED,+BAGC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU;;;;;;;;;;;sBAOpC,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM"}
1
+ {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAKA;IACE,0BASC;IARC,aAIC;IACD,2BAEE;IAKJ;;;;;OAKG;IACH,gBAFW,aAAa,gBAuCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAGC;IAED;;OAEG;IACH,sCAGC;IAID;;OAEG;IACH,oCAOC;IAED;;;MAsCC;IAED,kCAeC;IAED;;;MA4BC;IAED,uDAWC;IAED,oCAMC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU;;;;;;;;;;;sBAOpC,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM;;iCAlTa,sBAAsB"}
@@ -13,7 +13,7 @@ export class AnthropicClient extends BaseClient {
13
13
  _request_id?: string | null;
14
14
  } & import("@anthropic-ai/sdk/core/streaming.js").Stream<Anthropic.Messages.RawMessageStreamEvent>>;
15
15
  getTextResponse(response: any): any;
16
- getMessagesResponse(input: any, response: any): {
16
+ getMessagesResponse(response: any, options: any): {
17
17
  messages: any[];
18
18
  };
19
19
  normalizeStreamEvent(event: any): {
@@ -23,15 +23,25 @@ export class AnthropicClient extends BaseClient {
23
23
  type: string;
24
24
  text: any;
25
25
  };
26
- getSchemaOptions(options: any): {
27
- tools: {
28
- name: string;
29
- description: string;
30
- input_schema: any;
31
- }[];
26
+ getToolOptions(options: any): {
27
+ tools: any;
28
+ mcp_servers: any;
32
29
  tool_choice: {
33
30
  type: string;
34
31
  name: string;
32
+ } | {
33
+ type: string;
34
+ name?: undefined;
35
+ };
36
+ };
37
+ mapMcpTool(tool: any): {
38
+ type: string;
39
+ name: any;
40
+ url: any;
41
+ };
42
+ getClientOptions(params: any): {
43
+ headers: {
44
+ 'anthropic-beta': string;
35
45
  };
36
46
  };
37
47
  }
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGAoBC;IAED;;wGAMC;IAED,oCAKC;IASD;;MAgBC;IAED;;;;;;MAgBC;IAID;;;;;;;;;;MA8BC;CACF;uBAtIsB,iBAAiB;sBAFlB,mBAAmB"}
1
+ {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGA2BC;IAED;;wGAMC;IAED,oCAKC;IASD;;MAiBC;IAED;;;;;;MAgBC;IAID;;;;;;;;;;MAsCC;IAID;;;;MAOC;IAED;;;;MAQC;CACF;uBA3KsB,iBAAiB;sBAFlB,mBAAmB"}
package/types/openai.d.ts CHANGED
@@ -13,7 +13,7 @@ export class OpenAiClient extends BaseClient {
13
13
  _request_id?: string | null;
14
14
  } & import("openai/core/streaming.js").Stream<OpenAI.Responses.ResponseStreamEvent>>;
15
15
  getTextResponse(response: any): any;
16
- getMessagesResponse(input: any, response: any): {
16
+ getMessagesResponse(response: any, options: any): {
17
17
  messages: any[];
18
18
  prevResponseId: any;
19
19
  };
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;OAGG;IACH,4BAGC;IAED;;yFA+BC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;MAaC;IAID;;;;;;;;;;MAmBC;IAED;;;;;;;;;;;;;;;;;;;;;;;;MAyBC;CACF;uBArJsB,iBAAiB;mBAFrB,QAAQ"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;OAGG;IACH,4BAGC;IAED;;yFA+BC;IAED;;yFAKC;IAED,oCAEC;IAwBD;;;MAcC;IAID;;;;;;;;;;MAmBC;IAED;;;;;;;;;;;;;;;;;;;;;;;;MAyBC;CACF;uBAtJsB,iBAAiB;mBAFrB,QAAQ"}
@@ -1,83 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.loadTemplates = loadTemplates;
7
- exports.renderTemplate = renderTemplate;
8
- const promises_1 = __importDefault(require("fs/promises"));
9
- const path_1 = __importDefault(require("path"));
10
- const glob_1 = require("glob");
11
- const mustache_1 = __importDefault(require("mustache"));
12
- async function loadTemplates(dir) {
13
- const result = {};
14
- const files = await (0, glob_1.glob)(path_1.default.join(dir, '*.md'));
15
- if (!files.length) {
16
- throw new Error(`No templates found in: ${dir}.`);
17
- }
18
- for (let file of files) {
19
- const base = path_1.default.basename(file, '.md');
20
- result[base] = await loadTemplate(file);
21
- }
22
- return result;
23
- }
24
- function renderTemplate(template, options) {
25
- let params = {
26
- ...options,
27
- ...options.params,
28
- };
29
- params = mapObjects(params);
30
- params = wrapProxy(params);
31
- return mustache_1.default.render(template, params);
32
- }
33
- // Utils
34
- async function loadTemplate(file) {
35
- return await promises_1.default.readFile(file, 'utf-8');
36
- }
37
- // Transform arrays and object to versions
38
- // that are more understandable in the context
39
- // of a template that may have meaningful whitespace.
40
- function mapObjects(params) {
41
- const result = {};
42
- for (let [key, value] of Object.entries(params)) {
43
- if (Array.isArray(value)) {
44
- value = mapArray(value);
45
- }
46
- else if (typeof value === 'object') {
47
- value = JSON.stringify(value, null, 2);
48
- }
49
- result[key] = value;
50
- }
51
- return result;
52
- }
53
- function mapArray(arr) {
54
- // Only map simple arrays of primitives.
55
- if (typeof arr[0] === 'string') {
56
- arr = arr
57
- .map((el) => {
58
- return `- ${el}`;
59
- })
60
- .join('\n');
61
- }
62
- return arr;
63
- }
64
- // Wrap params with a proxy object that reports
65
- // as having all properties. If one is accessed
66
- // that does not exist then return the original
67
- // token. This way templates can be partially
68
- // interpolated and re-interpolated later.
69
- function wrapProxy(params) {
70
- return new Proxy(params, {
71
- has() {
72
- return true;
73
- },
74
- get(target, prop) {
75
- if (prop in target) {
76
- return target[prop];
77
- }
78
- else {
79
- return `{{{${prop.toString()}}}}`;
80
- }
81
- },
82
- });
83
- }
@@ -1,76 +0,0 @@
1
- import fs from 'fs/promises';
2
- import path from 'path';
3
- import { glob } from 'glob';
4
- import Mustache from 'mustache';
5
- export async function loadTemplates(dir) {
6
- const result = {};
7
- const files = await glob(path.join(dir, '*.md'));
8
- if (!files.length) {
9
- throw new Error(`No templates found in: ${dir}.`);
10
- }
11
- for (let file of files) {
12
- const base = path.basename(file, '.md');
13
- result[base] = await loadTemplate(file);
14
- }
15
- return result;
16
- }
17
- export function renderTemplate(template, options) {
18
- let params = {
19
- ...options,
20
- ...options.params,
21
- };
22
- params = mapObjects(params);
23
- params = wrapProxy(params);
24
- return Mustache.render(template, params);
25
- }
26
- // Utils
27
- async function loadTemplate(file) {
28
- return await fs.readFile(file, 'utf-8');
29
- }
30
- // Transform arrays and object to versions
31
- // that are more understandable in the context
32
- // of a template that may have meaningful whitespace.
33
- function mapObjects(params) {
34
- const result = {};
35
- for (let [key, value] of Object.entries(params)) {
36
- if (Array.isArray(value)) {
37
- value = mapArray(value);
38
- }
39
- else if (typeof value === 'object') {
40
- value = JSON.stringify(value, null, 2);
41
- }
42
- result[key] = value;
43
- }
44
- return result;
45
- }
46
- function mapArray(arr) {
47
- // Only map simple arrays of primitives.
48
- if (typeof arr[0] === 'string') {
49
- arr = arr
50
- .map((el) => {
51
- return `- ${el}`;
52
- })
53
- .join('\n');
54
- }
55
- return arr;
56
- }
57
- // Wrap params with a proxy object that reports
58
- // as having all properties. If one is accessed
59
- // that does not exist then return the original
60
- // token. This way templates can be partially
61
- // interpolated and re-interpolated later.
62
- function wrapProxy(params) {
63
- return new Proxy(params, {
64
- has() {
65
- return true;
66
- },
67
- get(target, prop) {
68
- if (prop in target) {
69
- return target[prop];
70
- }
71
- else {
72
- return `{{{${prop.toString()}}}}`;
73
- }
74
- },
75
- });
76
- }