@bedrockio/ai 0.3.0 → 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/README.md +58 -17
- package/dist/cjs/BaseClient.js +242 -182
- package/dist/cjs/anthropic.js +115 -93
- package/dist/cjs/google.js +74 -80
- package/dist/cjs/index.js +23 -75
- package/dist/cjs/openai.js +114 -72
- package/dist/cjs/package.json +1 -0
- package/dist/cjs/utils/code.js +11 -0
- package/dist/cjs/utils/json.js +53 -0
- package/dist/cjs/utils/templates.js +83 -0
- package/dist/cjs/xai.js +11 -20
- package/dist/esm/BaseClient.js +243 -0
- package/dist/esm/anthropic.js +116 -0
- package/dist/esm/google.js +75 -0
- package/dist/esm/index.js +25 -0
- package/dist/esm/openai.js +113 -0
- package/dist/esm/utils/code.js +8 -0
- package/dist/esm/utils/json.js +50 -0
- package/dist/esm/utils/templates.js +76 -0
- package/dist/esm/xai.js +10 -0
- package/package.json +25 -18
- package/types/BaseClient.d.ts +67 -26
- package/types/BaseClient.d.ts.map +1 -1
- package/types/anthropic.d.ts +26 -2
- package/types/anthropic.d.ts.map +1 -1
- package/types/google.d.ts.map +1 -1
- package/types/index.d.ts +4 -11
- package/types/index.d.ts.map +1 -1
- package/types/openai.d.ts +45 -2
- package/types/openai.d.ts.map +1 -1
- package/types/utils/code.d.ts +2 -0
- package/types/utils/code.d.ts.map +1 -0
- package/types/utils/json.d.ts +2 -0
- package/types/utils/json.d.ts.map +1 -0
- package/types/utils/templates.d.ts +3 -0
- package/types/utils/templates.d.ts.map +1 -0
- package/types/utils.d.ts +4 -0
- package/types/utils.d.ts.map +1 -0
- package/types/xai.d.ts.map +1 -1
- package/.prettierignore +0 -1
- package/.prettierrc.cjs +0 -1
- package/__mocks__/@anthropic-ai/sdk.js +0 -43
- package/__mocks__/@google/generative-ai.js +0 -59
- package/__mocks__/openai.js +0 -48
- package/dist/cjs/util.js +0 -62
- package/src/BaseClient.js +0 -195
- package/src/anthropic.js +0 -97
- package/src/google.js +0 -91
- package/src/index.js +0 -72
- package/src/openai.js +0 -71
- package/src/util.js +0 -60
- package/src/xai.js +0 -19
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import BaseClient from './BaseClient.js';
|
|
3
|
+
export class OpenAiClient extends BaseClient {
|
|
4
|
+
static DEFAULT_MODEL = 'gpt-5-nano';
|
|
5
|
+
constructor(options) {
|
|
6
|
+
super(options);
|
|
7
|
+
this.client = new OpenAI(options);
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Lists available models.
|
|
11
|
+
* {@link https://platform.openai.com/docs/models Documentation}
|
|
12
|
+
*/
|
|
13
|
+
async models() {
|
|
14
|
+
const { data } = await this.client.models.list();
|
|
15
|
+
return data.map((o) => o.id);
|
|
16
|
+
}
|
|
17
|
+
async runPrompt(options) {
|
|
18
|
+
const { input, model, tools, verbosity, temperature, instructions, prevResponseId, stream = false, } = options;
|
|
19
|
+
const params = {
|
|
20
|
+
model,
|
|
21
|
+
input,
|
|
22
|
+
tools,
|
|
23
|
+
stream,
|
|
24
|
+
temperature,
|
|
25
|
+
instructions,
|
|
26
|
+
previous_response_id: prevResponseId,
|
|
27
|
+
text: {
|
|
28
|
+
format: this.getOutputFormat(options),
|
|
29
|
+
verbosity,
|
|
30
|
+
},
|
|
31
|
+
};
|
|
32
|
+
this.debug('Params:', params);
|
|
33
|
+
// @ts-ignore
|
|
34
|
+
return await this.client.responses.create(params);
|
|
35
|
+
}
|
|
36
|
+
async runStream(options) {
|
|
37
|
+
return await this.runPrompt({
|
|
38
|
+
...options,
|
|
39
|
+
stream: true,
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
getTextResponse(response) {
|
|
43
|
+
return response.output_text;
|
|
44
|
+
}
|
|
45
|
+
getStructuredResponse(response) {
|
|
46
|
+
return JSON.parse(response.output_text);
|
|
47
|
+
}
|
|
48
|
+
getMessagesResponse(input, response) {
|
|
49
|
+
return {
|
|
50
|
+
messages: [
|
|
51
|
+
...input,
|
|
52
|
+
{
|
|
53
|
+
role: 'assistant',
|
|
54
|
+
content: response.output_text,
|
|
55
|
+
},
|
|
56
|
+
],
|
|
57
|
+
// Note that this ability currently only
|
|
58
|
+
// exists for OpenAI compatible providers.
|
|
59
|
+
prevResponseId: response.id,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
// Private
|
|
63
|
+
getOutputFormat(options) {
|
|
64
|
+
let { output, schema } = options;
|
|
65
|
+
if (output === 'json') {
|
|
66
|
+
return {
|
|
67
|
+
type: 'json_object',
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
else if (schema) {
|
|
71
|
+
return {
|
|
72
|
+
type: 'json_schema',
|
|
73
|
+
// Name is required but arbitrary.
|
|
74
|
+
name: 'schema',
|
|
75
|
+
strict: true,
|
|
76
|
+
schema,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
else {
|
|
80
|
+
return {
|
|
81
|
+
type: 'text',
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
normalizeStreamEvent(event) {
|
|
86
|
+
const { type } = event;
|
|
87
|
+
if (type === 'response.created') {
|
|
88
|
+
return {
|
|
89
|
+
type: 'start',
|
|
90
|
+
id: event.response.id,
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
else if (type === 'response.completed') {
|
|
94
|
+
return {
|
|
95
|
+
type: 'stop',
|
|
96
|
+
id: event.response.id,
|
|
97
|
+
usage: event.response.usage,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
else if (type === 'response.output_text.delta') {
|
|
101
|
+
return {
|
|
102
|
+
type: 'delta',
|
|
103
|
+
delta: event.delta,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
else if (type === 'response.output_text.done') {
|
|
107
|
+
return {
|
|
108
|
+
type: 'done',
|
|
109
|
+
text: event.text,
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { OBJ, STR, parse } from 'partial-json';
|
|
2
|
+
export function createMessageExtractor(keys) {
|
|
3
|
+
let buffer = '';
|
|
4
|
+
const extractors = keys.map((key) => {
|
|
5
|
+
return createExtractor(key);
|
|
6
|
+
});
|
|
7
|
+
return (delta) => {
|
|
8
|
+
buffer += delta;
|
|
9
|
+
return extractors
|
|
10
|
+
.map((extractor) => {
|
|
11
|
+
return extractor(buffer);
|
|
12
|
+
})
|
|
13
|
+
.filter((extracted) => {
|
|
14
|
+
return extracted;
|
|
15
|
+
});
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
function createExtractor(key) {
|
|
19
|
+
let lastText = '';
|
|
20
|
+
let done = false;
|
|
21
|
+
return (buffer) => {
|
|
22
|
+
if (done) {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
const text = extractText(buffer, key);
|
|
26
|
+
if (!text) {
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
// Don't finish while the buffer has whitespace as it
|
|
30
|
+
// may be in the middle of trying to extract.
|
|
31
|
+
if (text === lastText && !buffer.endsWith(' ')) {
|
|
32
|
+
done = true;
|
|
33
|
+
}
|
|
34
|
+
const delta = text.slice(lastText.length);
|
|
35
|
+
lastText = text;
|
|
36
|
+
return {
|
|
37
|
+
key,
|
|
38
|
+
text,
|
|
39
|
+
delta,
|
|
40
|
+
done,
|
|
41
|
+
};
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
function extractText(input, key) {
|
|
45
|
+
if (!input) {
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
const parsed = parse(input, STR | OBJ);
|
|
49
|
+
return parsed?.[key] || '';
|
|
50
|
+
}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { glob } from 'glob';
|
|
4
|
+
import Mustache from 'mustache';
|
|
5
|
+
export async function loadTemplates(dir) {
|
|
6
|
+
const result = {};
|
|
7
|
+
const files = await glob(path.join(dir, '*.md'));
|
|
8
|
+
if (!files.length) {
|
|
9
|
+
throw new Error(`No templates found in: ${dir}.`);
|
|
10
|
+
}
|
|
11
|
+
for (let file of files) {
|
|
12
|
+
const base = path.basename(file, '.md');
|
|
13
|
+
result[base] = await loadTemplate(file);
|
|
14
|
+
}
|
|
15
|
+
return result;
|
|
16
|
+
}
|
|
17
|
+
export function renderTemplate(template, options) {
|
|
18
|
+
let params = {
|
|
19
|
+
...options,
|
|
20
|
+
...options.params,
|
|
21
|
+
};
|
|
22
|
+
params = mapObjects(params);
|
|
23
|
+
params = wrapProxy(params);
|
|
24
|
+
return Mustache.render(template, params);
|
|
25
|
+
}
|
|
26
|
+
// Utils
|
|
27
|
+
async function loadTemplate(file) {
|
|
28
|
+
return await fs.readFile(file, 'utf-8');
|
|
29
|
+
}
|
|
30
|
+
// Transform arrays and object to versions
|
|
31
|
+
// that are more understandable in the context
|
|
32
|
+
// of a template that may have meaningful whitespace.
|
|
33
|
+
function mapObjects(params) {
|
|
34
|
+
const result = {};
|
|
35
|
+
for (let [key, value] of Object.entries(params)) {
|
|
36
|
+
if (Array.isArray(value)) {
|
|
37
|
+
value = mapArray(value);
|
|
38
|
+
}
|
|
39
|
+
else if (typeof value === 'object') {
|
|
40
|
+
value = JSON.stringify(value, null, 2);
|
|
41
|
+
}
|
|
42
|
+
result[key] = value;
|
|
43
|
+
}
|
|
44
|
+
return result;
|
|
45
|
+
}
|
|
46
|
+
function mapArray(arr) {
|
|
47
|
+
// Only map simple arrays of primitives.
|
|
48
|
+
if (typeof arr[0] === 'string') {
|
|
49
|
+
arr = arr
|
|
50
|
+
.map((el) => {
|
|
51
|
+
return `- ${el}`;
|
|
52
|
+
})
|
|
53
|
+
.join('\n');
|
|
54
|
+
}
|
|
55
|
+
return arr;
|
|
56
|
+
}
|
|
57
|
+
// Wrap params with a proxy object that reports
|
|
58
|
+
// as having all properties. If one is accessed
|
|
59
|
+
// that does not exist then return the original
|
|
60
|
+
// token. This way templates can be partially
|
|
61
|
+
// interpolated and re-interpolated later.
|
|
62
|
+
function wrapProxy(params) {
|
|
63
|
+
return new Proxy(params, {
|
|
64
|
+
has() {
|
|
65
|
+
return true;
|
|
66
|
+
},
|
|
67
|
+
get(target, prop) {
|
|
68
|
+
if (prop in target) {
|
|
69
|
+
return target[prop];
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
return `{{{${prop.toString()}}}}`;
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
});
|
|
76
|
+
}
|
package/dist/esm/xai.js
ADDED
package/package.json
CHANGED
|
@@ -1,21 +1,25 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bedrockio/ai",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.4",
|
|
4
4
|
"description": "Bedrock wrapper for common AI chatbots.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"scripts": {
|
|
7
|
-
"test": "
|
|
8
|
-
"
|
|
7
|
+
"test": "vitest run",
|
|
8
|
+
"test:live": "node --env-file=.env.live ./node_modules/.bin/vitest run",
|
|
9
9
|
"lint": "eslint",
|
|
10
10
|
"build": "scripts/build",
|
|
11
11
|
"eject": "scripts/eject",
|
|
12
|
-
"
|
|
12
|
+
"build:cjs": "tsc -p tsconfig.cjs.json",
|
|
13
|
+
"build:esm": "tsc -p tsconfig.esm.json && tsc-alias -f -p tsconfig.esm.json",
|
|
14
|
+
"build:types": "tsc -p tsconfig.types.json",
|
|
15
|
+
"prepublish": "yarn build"
|
|
13
16
|
},
|
|
14
17
|
"types": "types/index.d.ts",
|
|
15
18
|
"main": "./dist/cjs/index.js",
|
|
16
19
|
"exports": {
|
|
17
20
|
".": {
|
|
18
|
-
"
|
|
21
|
+
"types": "./types/index.d.ts",
|
|
22
|
+
"import": "./dist/esm/index.js",
|
|
19
23
|
"require": "./dist/cjs/index.js"
|
|
20
24
|
}
|
|
21
25
|
},
|
|
@@ -31,27 +35,30 @@
|
|
|
31
35
|
"url": "https://github.com/bedrockio/router"
|
|
32
36
|
},
|
|
33
37
|
"dependencies": {
|
|
34
|
-
"@anthropic-ai/sdk": "^0.
|
|
38
|
+
"@anthropic-ai/sdk": "^0.65.0",
|
|
35
39
|
"@google/generative-ai": "^0.21.0",
|
|
36
40
|
"glob": "^11.0.1",
|
|
37
41
|
"mustache": "^4.2.0",
|
|
38
|
-
"openai": "^
|
|
42
|
+
"openai": "^6.1.0",
|
|
43
|
+
"partial-json": "^0.1.7"
|
|
39
44
|
},
|
|
40
45
|
"devDependencies": {
|
|
41
|
-
"@
|
|
42
|
-
"@babel/core": "^7.26.0",
|
|
43
|
-
"@babel/eslint-parser": "^7.26.5",
|
|
44
|
-
"@babel/preset-env": "^7.26.0",
|
|
46
|
+
"@bedrockio/eslint-plugin": "^1.2.2",
|
|
45
47
|
"@bedrockio/prettier-config": "^1.0.2",
|
|
46
|
-
"
|
|
47
|
-
"eslint
|
|
48
|
-
"
|
|
49
|
-
"
|
|
50
|
-
"
|
|
51
|
-
"typescript": "^5.7.3"
|
|
48
|
+
"@bedrockio/yada": "^1.8.0",
|
|
49
|
+
"eslint": "^9.36.0",
|
|
50
|
+
"tsc-alias": "^1.8.16",
|
|
51
|
+
"typescript": "^5.9.3",
|
|
52
|
+
"vitest": "^3.2.4"
|
|
52
53
|
},
|
|
54
|
+
"files": [
|
|
55
|
+
"dist/**",
|
|
56
|
+
"types/**",
|
|
57
|
+
"README.md",
|
|
58
|
+
"CHANGELOG.md"
|
|
59
|
+
],
|
|
53
60
|
"volta": {
|
|
54
|
-
"node": "22.
|
|
61
|
+
"node": "22.20.0",
|
|
55
62
|
"yarn": "1.22.22"
|
|
56
63
|
}
|
|
57
64
|
}
|
package/types/BaseClient.d.ts
CHANGED
|
@@ -3,38 +3,79 @@ export default class BaseClient {
|
|
|
3
3
|
options: any;
|
|
4
4
|
templates: any;
|
|
5
5
|
/**
|
|
6
|
-
* Interpolates vars into the provided template and
|
|
7
|
-
*
|
|
8
|
-
* be omitted and will default to `"text"`.
|
|
9
|
-
* {@link https://github.com/bedrockio/ai?tab=readme-ov-file#bedrockioai Documentation}
|
|
6
|
+
* Interpolates vars into the provided template as instructions and runs the
|
|
7
|
+
* prompt.
|
|
10
8
|
*
|
|
11
|
-
* @param {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
* @param {Object.<string, any>} [options.other] - Additional props
|
|
15
|
-
* will be interpolated in the template.
|
|
16
|
-
*/
|
|
17
|
-
prompt(options: {
|
|
18
|
-
model: string;
|
|
19
|
-
output?: "raw" | "text" | "json" | "messages";
|
|
20
|
-
other?: {
|
|
21
|
-
[x: string]: any;
|
|
22
|
-
};
|
|
23
|
-
}): Promise<void>;
|
|
9
|
+
* @param {PromptOptions} options
|
|
10
|
+
*/
|
|
11
|
+
prompt(options: PromptOptions): Promise<any>;
|
|
24
12
|
/**
|
|
25
13
|
* Streams the prompt response.
|
|
14
|
+
*
|
|
15
|
+
* @param {PromptOptions & StreamOptions} options
|
|
26
16
|
* @returns {AsyncIterator}
|
|
27
17
|
*/
|
|
28
|
-
stream(options:
|
|
29
|
-
getMessages(options: any): Promise<{
|
|
30
|
-
role: any;
|
|
31
|
-
content: any;
|
|
32
|
-
}[]>;
|
|
18
|
+
stream(options: PromptOptions & StreamOptions): AsyncIterator<any, any, any>;
|
|
33
19
|
buildTemplate(options: any): Promise<any>;
|
|
34
|
-
|
|
20
|
+
runPrompt(options: any): void;
|
|
21
|
+
runStream(options: any): void;
|
|
22
|
+
getTextResponse(response: any): void;
|
|
23
|
+
/**
|
|
24
|
+
* @returns {Object}
|
|
25
|
+
*/
|
|
26
|
+
getStructuredResponse(response: any): any;
|
|
27
|
+
/**
|
|
28
|
+
* @returns {Object}
|
|
29
|
+
*/
|
|
30
|
+
getMessagesResponse(input: any, response: any): any;
|
|
31
|
+
/**
|
|
32
|
+
* @returns {Object}
|
|
33
|
+
*/
|
|
34
|
+
normalizeStreamEvent(event: any): any;
|
|
35
|
+
normalizeOptions(options: any): Promise<any>;
|
|
36
|
+
normalizeInput(options: any): any;
|
|
37
|
+
normalizeSchema(options: any): any;
|
|
38
|
+
getMessageExtractor(options: any): (event: any) => any;
|
|
39
|
+
debug(message: any, arg: any): void;
|
|
40
|
+
resolveInstructions(options: any): Promise<any>;
|
|
35
41
|
resolveTemplate(options: any): Promise<any>;
|
|
36
|
-
|
|
37
|
-
getCompletion(options: any): void;
|
|
38
|
-
getStreamedChunk(chunk: any, started: any): void;
|
|
42
|
+
loadTemplates(): Promise<void>;
|
|
39
43
|
}
|
|
44
|
+
export type PromptOptions = {
|
|
45
|
+
/**
|
|
46
|
+
* - Input to use.
|
|
47
|
+
*/
|
|
48
|
+
input: string | PromptMessage[];
|
|
49
|
+
/**
|
|
50
|
+
* - The model to use.
|
|
51
|
+
*/
|
|
52
|
+
model?: string;
|
|
53
|
+
/**
|
|
54
|
+
* - Stream response.
|
|
55
|
+
*/
|
|
56
|
+
stream: boolean;
|
|
57
|
+
/**
|
|
58
|
+
* - A JSON schema compatible object that defines the output shape.
|
|
59
|
+
*/
|
|
60
|
+
schema?: any;
|
|
61
|
+
/**
|
|
62
|
+
* - The return value type.
|
|
63
|
+
*/
|
|
64
|
+
output?: "raw" | "text" | "json" | "messages";
|
|
65
|
+
/**
|
|
66
|
+
* - Params to be interpolated into the template.
|
|
67
|
+
* May also be passed as additional props to options.
|
|
68
|
+
*/
|
|
69
|
+
params?: any;
|
|
70
|
+
};
|
|
71
|
+
export type StreamOptions = {
|
|
72
|
+
/**
|
|
73
|
+
* - Key in JSON response to extract a message stream from.
|
|
74
|
+
*/
|
|
75
|
+
extractMessages?: string;
|
|
76
|
+
};
|
|
77
|
+
export type PromptMessage = {
|
|
78
|
+
role: "system" | "user" | "assistant";
|
|
79
|
+
content: string;
|
|
80
|
+
};
|
|
40
81
|
//# sourceMappingURL=BaseClient.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"BaseClient.d.ts","sourceRoot":"","sources":["../src/BaseClient.js"],"names":[],"mappings":"AAIA;IACE,0BAOC;IANC,aAIC;IACD,eAAqB;IAKvB;;;;;OAKG;IACH,gBAFW,aAAa,gBAuCvB;IAED;;;;;OAKG;IACH,gBAHW,aAAa,GAAG,aAAa,gCAsDvC;IAED,0CAGC;IAID,8BAGC;IAED,8BAGC;IAED,qCAGC;IAED;;OAEG;IACH,0CAGC;IAED;;OAEG;IACH,oDAGC;IAED;;OAEG;IACH,sCAGC;IAID,6CAaC;IAED,kCAiBC;IAED,mCAuBC;IAED,uDAWC;IAED,oCAMC;IAED,gDAKC;IAED,4CAIC;IAED,+BAGC;CACF;;;;;WAIa,MAAM,GAAC,aAAa,EAAE;;;;YACtB,MAAM;;;;YACN,OAAO;;;;;;;;aAEP,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,UAAU;;;;;;;;;;;sBAOpC,MAAM;;;UAKN,QAAQ,GAAG,MAAM,GAAG,WAAW;aAC/B,MAAM"}
|
package/types/anthropic.d.ts
CHANGED
|
@@ -1,15 +1,39 @@
|
|
|
1
1
|
export class AnthropicClient extends BaseClient {
|
|
2
|
+
static DEFAULT_MODEL: string;
|
|
2
3
|
client: Anthropic;
|
|
3
4
|
/**
|
|
4
5
|
* Lists available models.
|
|
5
6
|
* {@link https://docs.anthropic.com/en/docs/about-claude/models Documentation}
|
|
6
7
|
*/
|
|
7
8
|
models(): Promise<string[]>;
|
|
8
|
-
|
|
9
|
-
|
|
9
|
+
runPrompt(options: any): Promise<Anthropic.Messages.Message & {
|
|
10
|
+
_request_id?: string | null;
|
|
11
|
+
} & import("@anthropic-ai/sdk/core/streaming.js").Stream<Anthropic.Messages.RawMessageStreamEvent>>;
|
|
12
|
+
runStream(options: any): Promise<Anthropic.Messages.Message & {
|
|
13
|
+
_request_id?: string | null;
|
|
14
|
+
} & import("@anthropic-ai/sdk/core/streaming.js").Stream<Anthropic.Messages.RawMessageStreamEvent>>;
|
|
15
|
+
getTextResponse(response: any): any;
|
|
16
|
+
getMessagesResponse(input: any, response: any): {
|
|
17
|
+
messages: any[];
|
|
18
|
+
};
|
|
19
|
+
normalizeStreamEvent(event: any): {
|
|
20
|
+
type: string;
|
|
21
|
+
text?: undefined;
|
|
22
|
+
} | {
|
|
10
23
|
type: string;
|
|
11
24
|
text: any;
|
|
12
25
|
};
|
|
26
|
+
getSchemaOptions(options: any): {
|
|
27
|
+
tools: {
|
|
28
|
+
name: string;
|
|
29
|
+
description: string;
|
|
30
|
+
input_schema: any;
|
|
31
|
+
}[];
|
|
32
|
+
tool_choice: {
|
|
33
|
+
type: string;
|
|
34
|
+
name: string;
|
|
35
|
+
};
|
|
36
|
+
};
|
|
13
37
|
}
|
|
14
38
|
import BaseClient from './BaseClient.js';
|
|
15
39
|
import Anthropic from '@anthropic-ai/sdk';
|
package/types/anthropic.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../src/anthropic.js"],"names":[],"mappings":"AAMA;IACE,6BAA2C;IAIzC,kBAAoC;IAGtC;;;OAGG;IACH,4BAGC;IAED;;wGAoBC;IAED;;wGAMC;IAED,oCAKC;IASD;;MAgBC;IAED;;;;;;MAgBC;IAID;;;;;;;;;;MA8BC;CACF;uBAtIsB,iBAAiB;sBAFlB,mBAAmB"}
|
package/types/google.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../src/google.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../src/google.js"],"names":[],"mappings":"AAMA;IAII,2BAA4C;IAG9C;;;OAGG;IACH,4BAOC;IAED,0CAkCC;IACD,sCAKC;IAED;;;MAkBC;CACF;uBArFsB,iBAAiB;mCAFL,uBAAuB"}
|
package/types/index.d.ts
CHANGED
|
@@ -1,12 +1,5 @@
|
|
|
1
|
-
export
|
|
2
|
-
|
|
3
|
-
}
|
|
4
|
-
|
|
5
|
-
constructor(options: any);
|
|
6
|
-
clients: {};
|
|
7
|
-
prompt(options: any): any;
|
|
8
|
-
stream(options: any): any;
|
|
9
|
-
buildTemplate(options: any): any;
|
|
10
|
-
getClient(options: any): any;
|
|
11
|
-
}
|
|
1
|
+
export function createClient(options?: {}): AnthropicClient | GoogleClient | OpenAiClient;
|
|
2
|
+
import { AnthropicClient } from './anthropic.js';
|
|
3
|
+
import { GoogleClient } from './google.js';
|
|
4
|
+
import { OpenAiClient } from './openai.js';
|
|
12
5
|
//# sourceMappingURL=index.d.ts.map
|
package/types/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.js"],"names":[],"mappings":"AAKA
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.js"],"names":[],"mappings":"AAKA,0FAkBC;gCAvB+B,gBAAgB;6BACnB,aAAa;6BACb,aAAa"}
|
package/types/openai.d.ts
CHANGED
|
@@ -1,14 +1,57 @@
|
|
|
1
1
|
export class OpenAiClient extends BaseClient {
|
|
2
|
+
static DEFAULT_MODEL: string;
|
|
2
3
|
client: OpenAI;
|
|
3
4
|
/**
|
|
4
5
|
* Lists available models.
|
|
5
6
|
* {@link https://platform.openai.com/docs/models Documentation}
|
|
6
7
|
*/
|
|
7
8
|
models(): Promise<string[]>;
|
|
8
|
-
|
|
9
|
-
|
|
9
|
+
runPrompt(options: any): Promise<OpenAI.Responses.Response & {
|
|
10
|
+
_request_id?: string | null;
|
|
11
|
+
} & import("openai/core/streaming.js").Stream<OpenAI.Responses.ResponseStreamEvent>>;
|
|
12
|
+
runStream(options: any): Promise<OpenAI.Responses.Response & {
|
|
13
|
+
_request_id?: string | null;
|
|
14
|
+
} & import("openai/core/streaming.js").Stream<OpenAI.Responses.ResponseStreamEvent>>;
|
|
15
|
+
getTextResponse(response: any): any;
|
|
16
|
+
getMessagesResponse(input: any, response: any): {
|
|
17
|
+
messages: any[];
|
|
18
|
+
prevResponseId: any;
|
|
19
|
+
};
|
|
20
|
+
getOutputFormat(options: any): {
|
|
21
|
+
type: string;
|
|
22
|
+
name?: undefined;
|
|
23
|
+
strict?: undefined;
|
|
24
|
+
schema?: undefined;
|
|
25
|
+
} | {
|
|
26
|
+
type: string;
|
|
27
|
+
name: string;
|
|
28
|
+
strict: boolean;
|
|
29
|
+
schema: any;
|
|
30
|
+
};
|
|
31
|
+
normalizeStreamEvent(event: any): {
|
|
32
|
+
type: string;
|
|
33
|
+
id: any;
|
|
34
|
+
usage?: undefined;
|
|
35
|
+
delta?: undefined;
|
|
36
|
+
text?: undefined;
|
|
37
|
+
} | {
|
|
38
|
+
type: string;
|
|
39
|
+
id: any;
|
|
40
|
+
usage: any;
|
|
41
|
+
delta?: undefined;
|
|
42
|
+
text?: undefined;
|
|
43
|
+
} | {
|
|
44
|
+
type: string;
|
|
45
|
+
delta: any;
|
|
46
|
+
id?: undefined;
|
|
47
|
+
usage?: undefined;
|
|
48
|
+
text?: undefined;
|
|
49
|
+
} | {
|
|
10
50
|
type: string;
|
|
11
51
|
text: any;
|
|
52
|
+
id?: undefined;
|
|
53
|
+
usage?: undefined;
|
|
54
|
+
delta?: undefined;
|
|
12
55
|
};
|
|
13
56
|
}
|
|
14
57
|
import BaseClient from './BaseClient.js';
|
package/types/openai.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../src/openai.js"],"names":[],"mappings":"AAIA;IACE,6BAAoC;IAIlC,eAAiC;IAGnC;;;OAGG;IACH,4BAGC;IAED;;yFA+BC;IAED;;yFAKC;IAED,oCAEC;IAMD;;;MAaC;IAID;;;;;;;;;;MAmBC;IAED;;;;;;;;;;;;;;;;;;;;;;;;MAyBC;CACF;uBAnIsB,iBAAiB;mBAFrB,QAAQ"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"code.d.ts","sourceRoot":"","sources":["../../src/utils/code.js"],"names":[],"mappings":"AAEA,6CAMC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"json.d.ts","sourceRoot":"","sources":["../../src/utils/json.js"],"names":[],"mappings":"AAEA,oDAKU,UAAK,SAUd"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"templates.d.ts","sourceRoot":"","sources":["../../src/utils/templates.js"],"names":[],"mappings":"AAMA,qDAcC;AAED,iEASC"}
|
package/types/utils.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.js"],"names":[],"mappings":"AAQA,qDAcC;AAED,6CAMC;AAED,iEASC"}
|
package/types/xai.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"xai.d.ts","sourceRoot":"","sources":["../src/xai.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"xai.d.ts","sourceRoot":"","sources":["../src/xai.js"],"names":[],"mappings":"AAEA;CASC;6BAX4B,aAAa"}
|
package/.prettierignore
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
test/templates
|
package/.prettierrc.cjs
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
module.exports = require('@bedrockio/prettier-config');
|