@bedrockio/ai 0.2.1 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/.claude/settings.local.json +11 -0
  2. package/CHANGELOG.md +34 -0
  3. package/README.md +59 -17
  4. package/__mocks__/@anthropic-ai/sdk.js +16 -22
  5. package/__mocks__/@google/generative-ai.js +1 -1
  6. package/__mocks__/openai.js +33 -28
  7. package/dist/cjs/BaseClient.js +242 -126
  8. package/dist/cjs/anthropic.js +115 -93
  9. package/dist/cjs/google.js +74 -86
  10. package/dist/cjs/index.js +24 -25
  11. package/dist/cjs/openai.js +114 -69
  12. package/dist/cjs/package.json +1 -0
  13. package/dist/cjs/utils/code.js +11 -0
  14. package/dist/cjs/utils/json.js +53 -0
  15. package/dist/cjs/utils/templates.js +83 -0
  16. package/dist/cjs/xai.js +14 -0
  17. package/dist/esm/BaseClient.js +243 -0
  18. package/dist/esm/anthropic.js +116 -0
  19. package/dist/esm/google.js +75 -0
  20. package/dist/esm/index.js +25 -0
  21. package/dist/esm/openai.js +113 -0
  22. package/dist/esm/utils/code.js +8 -0
  23. package/dist/esm/utils/json.js +50 -0
  24. package/dist/esm/utils/templates.js +76 -0
  25. package/dist/esm/xai.js +10 -0
  26. package/eslint.config.js +2 -0
  27. package/package.json +18 -17
  28. package/src/BaseClient.js +239 -89
  29. package/src/anthropic.js +96 -56
  30. package/src/google.js +6 -12
  31. package/src/index.js +20 -16
  32. package/src/openai.js +97 -31
  33. package/src/utils/code.js +9 -0
  34. package/src/utils/json.js +58 -0
  35. package/src/utils/templates.js +87 -0
  36. package/src/xai.js +12 -0
  37. package/tsconfig.cjs.json +8 -0
  38. package/tsconfig.esm.json +8 -0
  39. package/tsconfig.types.json +9 -0
  40. package/types/BaseClient.d.ts +68 -26
  41. package/types/BaseClient.d.ts.map +1 -1
  42. package/types/anthropic.d.ts +26 -2
  43. package/types/anthropic.d.ts.map +1 -1
  44. package/types/google.d.ts.map +1 -1
  45. package/types/index.d.ts +4 -3
  46. package/types/index.d.ts.map +1 -1
  47. package/types/openai.d.ts +45 -2
  48. package/types/openai.d.ts.map +1 -1
  49. package/types/util.d.ts +1 -1
  50. package/types/util.d.ts.map +1 -1
  51. package/types/utils/code.d.ts +2 -0
  52. package/types/utils/code.d.ts.map +1 -0
  53. package/types/utils/json.d.ts +2 -0
  54. package/types/utils/json.d.ts.map +1 -0
  55. package/types/utils/templates.d.ts +3 -0
  56. package/types/utils/templates.d.ts.map +1 -0
  57. package/types/utils.d.ts +4 -0
  58. package/types/utils.d.ts.map +1 -0
  59. package/types/xai.d.ts +4 -0
  60. package/types/xai.d.ts.map +1 -0
  61. package/vitest.config.js +10 -0
  62. package/dist/cjs/util.js +0 -47
  63. package/src/util.js +0 -42
package/src/google.js CHANGED
@@ -1,7 +1,6 @@
1
1
  import { GoogleGenerativeAI } from '@google/generative-ai';
2
2
 
3
3
  import BaseClient from './BaseClient.js';
4
- import { transformResponse } from './util.js';
5
4
 
6
5
  const DEFAULT_MODEL = 'models/gemini-2.0-flash-exp';
7
6
 
@@ -29,8 +28,11 @@ export class GoogleClient extends BaseClient {
29
28
  const { model = DEFAULT_MODEL, output = 'text', stream = false } = options;
30
29
  const { client } = this;
31
30
 
32
- const generator = client.getGenerativeModel({ model });
31
+ const generator = client.getGenerativeModel({
32
+ model,
33
+ });
33
34
 
35
+ // @ts-ignore
34
36
  const messages = await this.getMessages(options);
35
37
 
36
38
  const prompts = messages.map((message) => {
@@ -44,11 +46,6 @@ export class GoogleClient extends BaseClient {
44
46
  } else {
45
47
  response = await generator.generateContent(prompts);
46
48
  }
47
- // const response = await client.chat.completions.create({
48
- // model,
49
- // messages,
50
- // stream,
51
- // });
52
49
 
53
50
  if (output === 'raw') {
54
51
  return response;
@@ -60,13 +57,10 @@ export class GoogleClient extends BaseClient {
60
57
  });
61
58
  const [message] = parts;
62
59
 
63
- return transformResponse({
64
- ...options,
65
- messages,
66
- message,
67
- });
60
+ return message;
68
61
  }
69
62
  async getStream(options) {
63
+ // @ts-ignore
70
64
  const response = await super.getStream(options);
71
65
  // @ts-ignore
72
66
  return response.stream;
package/src/index.js CHANGED
@@ -1,20 +1,24 @@
1
- import { OpenAiClient } from './openai.js';
2
- import { GoogleClient } from './google.js';
3
1
  import { AnthropicClient } from './anthropic.js';
2
+ import { GoogleClient } from './google.js';
3
+ import { OpenAiClient } from './openai.js';
4
+ import { XAiClient } from './xai.js';
5
+
6
+ export function createClient(options = {}) {
7
+ const { platform } = options;
8
+
9
+ if (!platform) {
10
+ throw new Error('No platform specified.');
11
+ }
4
12
 
5
- export class Client {
6
- constructor(options) {
7
- const { platform, ...rest } = options;
8
- if (platform === 'openai' || platform === 'gpt') {
9
- return new OpenAiClient(rest);
10
- } else if (platform === 'google' || platform === 'gemini') {
11
- return new GoogleClient(rest);
12
- } else if (platform === 'anthropic' || platform === 'claude') {
13
- return new AnthropicClient(rest);
14
- } else if (platform) {
15
- throw new Error(`Unknown platform "${platform}".`);
16
- } else {
17
- throw new Error('Platform required.');
18
- }
13
+ if (platform === 'openai' || platform === 'gpt') {
14
+ return new OpenAiClient(options);
15
+ } else if (platform === 'google' || platform === 'gemini') {
16
+ return new GoogleClient(options);
17
+ } else if (platform === 'anthropic' || platform === 'claude') {
18
+ return new AnthropicClient(options);
19
+ } else if (platform === 'xai' || platform === 'grok') {
20
+ return new XAiClient(options);
21
+ } else if (platform) {
22
+ throw new Error(`Unknown platform "${platform}".`);
19
23
  }
20
24
  }
package/src/openai.js CHANGED
@@ -1,16 +1,13 @@
1
1
  import OpenAI from 'openai';
2
2
 
3
3
  import BaseClient from './BaseClient.js';
4
- import { transformResponse } from './util.js';
5
-
6
- const DEFAULT_MODEL = 'gpt-4o';
7
4
 
8
5
  export class OpenAiClient extends BaseClient {
6
+ static DEFAULT_MODEL = 'gpt-5-nano';
7
+
9
8
  constructor(options) {
10
9
  super(options);
11
- this.client = new OpenAI({
12
- ...options,
13
- });
10
+ this.client = new OpenAI(options);
14
11
  }
15
12
 
16
13
  /**
@@ -22,46 +19,115 @@ export class OpenAiClient extends BaseClient {
22
19
  return data.map((o) => o.id);
23
20
  }
24
21
 
25
- async getCompletion(options) {
26
- const { model = DEFAULT_MODEL, output = 'text', stream = false } = options;
27
- const { client } = this;
22
+ async runPrompt(options) {
23
+ const {
24
+ input,
25
+ model,
26
+ tools,
27
+ verbosity,
28
+ temperature,
29
+ instructions,
30
+ prevResponseId,
31
+ stream = false,
32
+ } = options;
28
33
 
29
- const messages = await this.getMessages(options);
30
- const response = await client.chat.completions.create({
34
+ const params = {
31
35
  model,
32
- messages,
36
+ input,
37
+ tools,
33
38
  stream,
34
- });
39
+ temperature,
40
+ instructions,
41
+ previous_response_id: prevResponseId,
35
42
 
36
- if (output === 'raw') {
37
- return response;
38
- }
43
+ text: {
44
+ format: this.getOutputFormat(options),
45
+ verbosity,
46
+ },
47
+ };
39
48
 
40
- const { message } = response.choices[0];
49
+ this.debug('Params:', params);
41
50
 
42
- return transformResponse({
51
+ // @ts-ignore
52
+ return await this.client.responses.create(params);
53
+ }
54
+
55
+ async runStream(options) {
56
+ return await this.runPrompt({
43
57
  ...options,
44
- messages,
45
- message,
58
+ stream: true,
46
59
  });
47
60
  }
48
61
 
49
- getStreamedChunk(chunk, started) {
50
- const [choice] = chunk.choices;
62
+ getTextResponse(response) {
63
+ return response.output_text;
64
+ }
65
+
66
+ getStructuredResponse(response) {
67
+ return JSON.parse(response.output_text);
68
+ }
69
+
70
+ getMessagesResponse(input, response) {
71
+ return {
72
+ messages: [
73
+ ...input,
74
+ {
75
+ role: 'assistant',
76
+ content: response.output_text,
77
+ },
78
+ ],
79
+ // Note that this ability currently only
80
+ // exists for OpenAI compatible providers.
81
+ prevResponseId: response.id,
82
+ };
83
+ }
84
+
85
+ // Private
51
86
 
52
- let type;
53
- if (!started) {
54
- type = 'start';
55
- } else if (choice.finish_reason === 'stop') {
56
- type = 'stop';
87
+ getOutputFormat(options) {
88
+ let { output, schema } = options;
89
+ if (output === 'json') {
90
+ return {
91
+ type: 'json_object',
92
+ };
93
+ } else if (schema) {
94
+ return {
95
+ type: 'json_schema',
96
+ // Name is required but arbitrary.
97
+ name: 'schema',
98
+ strict: true,
99
+ schema,
100
+ };
57
101
  } else {
58
- type = 'chunk';
102
+ return {
103
+ type: 'text',
104
+ };
59
105
  }
106
+ }
60
107
 
61
- if (type) {
108
+ normalizeStreamEvent(event) {
109
+ const { type } = event;
110
+
111
+ if (type === 'response.created') {
112
+ return {
113
+ type: 'start',
114
+ id: event.response.id,
115
+ };
116
+ } else if (type === 'response.completed') {
117
+ return {
118
+ type: 'stop',
119
+ id: event.response.id,
120
+ usage: event.response.usage,
121
+ };
122
+ } else if (type === 'response.output_text.delta') {
123
+ return {
124
+ type: 'delta',
125
+ delta: event.delta,
126
+ };
127
+ } else if (type === 'response.output_text.done') {
62
128
  return {
63
- type,
64
- text: choice.delta.content || '',
129
+ type: 'done',
130
+ text: event.text,
65
131
  };
66
132
  }
67
133
  }
@@ -0,0 +1,9 @@
1
+ const CODE_REG = /^```\w*(.+)```/s;
2
+
3
+ export function parseCode(content) {
4
+ const match = content.trim().match(CODE_REG);
5
+ if (match) {
6
+ content = match[1].trim();
7
+ }
8
+ return content;
9
+ }
@@ -0,0 +1,58 @@
1
+ import { OBJ, STR, parse } from 'partial-json';
2
+
3
+ export function createMessageExtractor(keys) {
4
+ let buffer = '';
5
+ const extractors = keys.map((key) => {
6
+ return createExtractor(key);
7
+ });
8
+ return (delta) => {
9
+ buffer += delta;
10
+ return extractors
11
+ .map((extractor) => {
12
+ return extractor(buffer);
13
+ })
14
+ .filter((extracted) => {
15
+ return extracted;
16
+ });
17
+ };
18
+ }
19
+
20
+ function createExtractor(key) {
21
+ let lastText = '';
22
+ let done = false;
23
+ return (buffer) => {
24
+ if (done) {
25
+ return;
26
+ }
27
+
28
+ const text = extractText(buffer, key);
29
+
30
+ if (!text) {
31
+ return;
32
+ }
33
+
34
+ // Don't finish while the buffer has whitespace as it
35
+ // may be in the middle of trying to extract.
36
+ if (text === lastText && !buffer.endsWith(' ')) {
37
+ done = true;
38
+ }
39
+ const delta = text.slice(lastText.length);
40
+
41
+ lastText = text;
42
+
43
+ return {
44
+ key,
45
+ text,
46
+ delta,
47
+ done,
48
+ };
49
+ };
50
+ }
51
+
52
+ function extractText(input, key) {
53
+ if (!input) {
54
+ return;
55
+ }
56
+ const parsed = parse(input, STR | OBJ);
57
+ return parsed?.[key] || '';
58
+ }
@@ -0,0 +1,87 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+
4
+ import { glob } from 'glob';
5
+ import Mustache from 'mustache';
6
+
7
+ export async function loadTemplates(dir) {
8
+ const result = {};
9
+ const files = await glob(path.join(dir, '*.md'));
10
+
11
+ if (!files.length) {
12
+ throw new Error(`No templates found in: ${dir}.`);
13
+ }
14
+
15
+ for (let file of files) {
16
+ const base = path.basename(file, '.md');
17
+ result[base] = await loadTemplate(file);
18
+ }
19
+
20
+ return result;
21
+ }
22
+
23
+ export function renderTemplate(template, options) {
24
+ let params = {
25
+ ...options,
26
+ ...options.params,
27
+ };
28
+
29
+ params = mapObjects(params);
30
+ params = wrapProxy(params);
31
+ return Mustache.render(template, params);
32
+ }
33
+
34
+ // Utils
35
+
36
+ async function loadTemplate(file) {
37
+ return await fs.readFile(file, 'utf-8');
38
+ }
39
+
40
+ // Transform arrays and object to versions
41
+ // that are more understandable in the context
42
+ // of a template that may have meaningful whitespace.
43
+ function mapObjects(params) {
44
+ const result = {};
45
+ for (let [key, value] of Object.entries(params)) {
46
+ if (Array.isArray(value)) {
47
+ value = mapArray(value);
48
+ } else if (typeof value === 'object') {
49
+ value = JSON.stringify(value, null, 2);
50
+ }
51
+ result[key] = value;
52
+ }
53
+ return result;
54
+ }
55
+
56
+ function mapArray(arr) {
57
+ // Only map simple arrays of primitives.
58
+ if (typeof arr[0] === 'string') {
59
+ arr = arr
60
+ .map((el) => {
61
+ return `- ${el}`;
62
+ })
63
+ .join('\n');
64
+ }
65
+ return arr;
66
+ }
67
+
68
+ // Wrap params with a proxy object that reports
69
+ // as having all properties. If one is accessed
70
+ // that does not exist then return the original
71
+ // token. This way templates can be partially
72
+ // interpolated and re-interpolated later.
73
+ function wrapProxy(params) {
74
+ return new Proxy(params, {
75
+ has() {
76
+ return true;
77
+ },
78
+
79
+ get(target, prop) {
80
+ if (prop in target) {
81
+ return target[prop];
82
+ } else {
83
+ return `{{{${prop.toString()}}}}`;
84
+ }
85
+ },
86
+ });
87
+ }
package/src/xai.js ADDED
@@ -0,0 +1,12 @@
1
+ import { OpenAiClient } from './openai.js';
2
+
3
+ export class XAiClient extends OpenAiClient {
4
+ static DEFAULT_MODEL = 'grok-4-fast';
5
+
6
+ constructor(options) {
7
+ super({
8
+ ...options,
9
+ baseURL: 'https://api.x.ai/v1',
10
+ });
11
+ }
12
+ }
@@ -0,0 +1,8 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "target": "ES2022",
5
+ "module": "CommonJS",
6
+ "outDir": "dist/cjs"
7
+ }
8
+ }
@@ -0,0 +1,8 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "module": "ESNext",
5
+ "outDir": "dist/esm",
6
+ "useDefineForClassFields": true
7
+ }
8
+ }
@@ -0,0 +1,9 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "types",
5
+ "declaration": true,
6
+ "declarationMap": true,
7
+ "emitDeclarationOnly": true,
8
+ }
9
+ }
@@ -3,37 +3,79 @@ export default class BaseClient {
3
3
  options: any;
4
4
  templates: any;
5
5
  /**
6
- * Interpolates vars into the provided template and
7
- * runs the chat completion. The "output" option may
8
- * be omitted and will default to `"text"`.
9
- * {@link https://github.com/bedrockio/ai?tab=readme-ov-file#bedrockioai Documentation}
6
+ * Interpolates vars into the provided template as instructions and runs the
7
+ * prompt.
10
8
  *
11
- * @param {object} options
12
- * @param {string} options.model - The model to use.
13
- * @param {"raw" | "text" | "json" | "messages"} [options.output] - The output to use.
14
- * @param {Object.<string, any>} [options.other] - Additional props
15
- * will be interpolated in the template.
16
- */
17
- prompt(options: {
18
- model: string;
19
- output?: "raw" | "text" | "json" | "messages";
20
- other?: {
21
- [x: string]: any;
22
- };
23
- }): Promise<void>;
9
+ * @param {PromptOptions} options
10
+ */
11
+ prompt(options: PromptOptions): Promise<any>;
24
12
  /**
25
13
  * Streams the prompt response.
14
+ *
15
+ * @param {PromptOptions & StreamOptions} options
26
16
  * @returns {AsyncIterator}
27
17
  */
28
- stream(options: any): AsyncIterator<any, any, any>;
29
- getMessages(options: any): Promise<{
30
- role: any;
31
- content: any;
32
- }[]>;
33
- loadTemplates(): Promise<void>;
18
+ stream(options: PromptOptions & StreamOptions): AsyncIterator<any, any, any>;
19
+ buildTemplate(options: any): Promise<any>;
20
+ runPrompt(options: any): void;
21
+ runStream(options: any): void;
22
+ getTextResponse(response: any): void;
23
+ /**
24
+ * @returns {Object}
25
+ */
26
+ getStructuredResponse(response: any): any;
27
+ /**
28
+ * @returns {Object}
29
+ */
30
+ getMessagesResponse(input: any, response: any): any;
31
+ /**
32
+ * @returns {Object}
33
+ */
34
+ normalizeStreamEvent(event: any): any;
35
+ normalizeOptions(options: any): Promise<any>;
36
+ normalizeInput(options: any): any;
37
+ normalizeSchema(options: any): any;
38
+ getMessageExtractor(options: any): (event: any) => any;
39
+ debug(message: any, arg: any): void;
40
+ resolveInstructions(options: any): Promise<any>;
34
41
  resolveTemplate(options: any): Promise<any>;
35
- getStream(options: any): Promise<void>;
36
- getCompletion(options: any): void;
37
- getStreamedChunk(chunk: any, started: any): void;
42
+ loadTemplates(): Promise<void>;
38
43
  }
44
+ export type PromptOptions = {
45
+ /**
46
+ * - Input to use.
47
+ */
48
+ input: string | PromptMessage[];
49
+ /**
50
+ * - The model to use.
51
+ */
52
+ model?: string;
53
+ /**
54
+ * - Stream response.
55
+ */
56
+ stream: boolean;
57
+ /**
58
+ * - A JSON schema compatible object that defines the output shape.
59
+ */
60
+ schema?: any;
61
+ /**
62
+ * - The return value type.
63
+ */
64
+ output?: "raw" | "text" | "json" | "messages";
65
+ /**
66
+ * - Params to be interpolated into the template.
67
+ * May also be passed as additional props to options.
68
+ */
69
+ params?: any;
70
+ };
71
+ export type StreamOptions = {
72
+ /**
73
+ * - Key in JSON response to extract a message stream from.
74
+ */
75
+ extractMessages?: string;
76
+ };
77
+ export type PromptMessage = {
78
+ role: "system" | "user" | "assistant";
79
+ content: string;
80
+ };
39
81
  //# sourceMappingURL=BaseClient.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAMA;IACE,0BAGC;IAFC,aAAsB;IACtB,eAAqB;IAGvB;;;;;;;;;;;OAWG;IACH,gBALG;QAAwB,KAAK,EAArB,MAAM;QACyC,MAAM,GAArD,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU;QACL,KAAK,GAApC;gBAAQ,MAAM,GAAE,GAAG;SAAC;KAE9B,iBAYA;IAED;;;OAGG;IACH,mDAeC;IAED;;;SAqBC;IAED,+BAGC;IAED,4CAcC;IAED,uCAMC;IAED,kCAGC;IAED,iDAIC;CACF"}
1
+ {"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAIA;IACE,0BAOC;IANC,aAIC;IACD,eAAqB;IAKvB;;;;;OAKG;IACH,gBAFW,aAAa,gBAuCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED,0CAGC;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAGC;IAED;;OAEG;IACH,sCAGC;IAID,6CAaC;IAED,kCAiBC;IAED,mCAuBC;IAED,uDAWC;IAED,oCAMC;IAED,gDAKC;IAED,4CAIC;IAED,+BAGC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU;;;;;;;;;;;sBAOpC,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM"}
@@ -1,15 +1,39 @@
1
1
  export class AnthropicClient extends BaseClient {
2
+ static DEFAULT_MODEL: string;
2
3
  client: Anthropic;
3
4
  /**
4
5
  * Lists available models.
5
6
  * {@link https://docs.anthropic.com/en/docs/about-claude/models Documentation}
6
7
  */
7
8
  models(): Promise<string[]>;
8
- getCompletion(options: any): Promise<any>;
9
- getStreamedChunk(chunk: any): {
9
+ runPrompt(options: any): Promise<Anthropic.Messages.Message & {
10
+ _request_id?: string | null;
11
+ } & import("@anthropic-ai/sdk/core/streaming.js").Stream<Anthropic.Messages.RawMessageStreamEvent>>;
12
+ runStream(options: any): Promise<Anthropic.Messages.Message & {
13
+ _request_id?: string | null;
14
+ } & import("@anthropic-ai/sdk/core/streaming.js").Stream<Anthropic.Messages.RawMessageStreamEvent>>;
15
+ getTextResponse(response: any): any;
16
+ getMessagesResponse(input: any, response: any): {
17
+ messages: any[];
18
+ };
19
+ normalizeStreamEvent(event: any): {
20
+ type: string;
21
+ text?: undefined;
22
+ } | {
10
23
  type: string;
11
24
  text: any;
12
25
  };
26
+ getSchemaOptions(options: any): {
27
+ tools: {
28
+ name: string;
29
+ description: string;
30
+ input_schema: any;
31
+ }[];
32
+ tool_choice: {
33
+ type: string;
34
+ name: string;
35
+ };
36
+ };
13
37
  }
14
38
  import BaseClient from './BaseClient.js';
15
39
  import Anthropic from '@anthropic-ai/sdk';
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAQA;IAGI,kBAEE;IAGJ;;;OAGG;IACH,4BAGC;IAED,0CAsCC;IAED;;;MAiBC;CACF;uBAjFsB,iBAAiB;sBAFlB,mBAAmB"}
1
+ {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGAoBC;IAED;;wGAMC;IAED,oCAKC;IASD;;MAgBC;IAED;;;;;;MAgBC;IAID;;;;;;;;;;MA8BC;CACF;uBAtIsB,iBAAiB;sBAFlB,mBAAmB"}
@@ -1 +1 @@
1
- {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../src/google.js"],"names":[],"mappings":"AAOA;IAII,2BAA4C;IAG9C;;;OAGG;IACH,4BAOC;IAED,0CAwCC;IACD,sCAIC;IAED;;;MAkBC;CACF;uBA3FsB,iBAAiB;mCAFL,uBAAuB"}
1
+ {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../src/google.js"],"names":[],"mappings":"AAMA;IAII,2BAA4C;IAG9C;;;OAGG;IACH,4BAOC;IAED,0CAkCC;IACD,sCAKC;IAED;;;MAkBC;CACF;uBArFsB,iBAAiB;mCAFL,uBAAuB"}
package/types/index.d.ts CHANGED
@@ -1,4 +1,5 @@
1
- export class Client {
2
- constructor(options: any);
3
- }
1
+ export function createClient(options?: {}): AnthropicClient | GoogleClient | OpenAiClient;
2
+ import { AnthropicClient } from './anthropic.js';
3
+ import { GoogleClient } from './google.js';
4
+ import { OpenAiClient } from './openai.js';
4
5
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.js"],"names":[],"mappings":"AAIA;IACE,0BAaC;CACF"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.js"],"names":[],"mappings":"AAKA,0FAkBC;gCAvB+B,gBAAgB;6BACnB,aAAa;6BACb,aAAa"}