@bedrockio/ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierignore +1 -0
- package/.prettierrc.cjs +1 -0
- package/CHANGELOG.md +3 -0
- package/README.md +84 -0
- package/__mocks__/@anthropic-ai/sdk.js +43 -0
- package/__mocks__/openai.js +48 -0
- package/dist/cjs/Wrapper.js +63 -0
- package/dist/cjs/anthropic.js +19 -0
- package/dist/cjs/index.js +3 -0
- package/dist/cjs/openai.js +49 -0
- package/dist/cjs/util.js +29 -0
- package/package.json +50 -0
- package/src/BaseClient.js +134 -0
- package/src/anthropic.js +96 -0
- package/src/index.js +17 -0
- package/src/openai.js +67 -0
- package/src/util.js +42 -0
- package/types/Link.d.ts +2 -0
- package/types/Link.d.ts.map +1 -0
- package/types/NavLink.d.ts +2 -0
- package/types/NavLink.d.ts.map +1 -0
- package/types/Redirect.d.ts +2 -0
- package/types/Redirect.d.ts.map +1 -0
- package/types/Route.d.ts +22 -0
- package/types/Route.d.ts.map +1 -0
- package/types/Router.d.ts +4 -0
- package/types/Router.d.ts.map +1 -0
- package/types/hoc.d.ts +2 -0
- package/types/hoc.d.ts.map +1 -0
- package/types/index.d.ts +1 -0
- package/types/index.d.ts.map +1 -0
- package/types/location.d.ts +2 -0
- package/types/location.d.ts.map +1 -0
- package/types/navigate.d.ts +8 -0
- package/types/navigate.d.ts.map +1 -0
- package/types/params.d.ts +2 -0
- package/types/params.d.ts.map +1 -0
- package/types/search.d.ts +2 -0
- package/types/search.d.ts.map +1 -0
package/.prettierignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
test/templates
|
package/.prettierrc.cjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('@bedrockio/prettier-config');
|
package/CHANGELOG.md
ADDED
package/README.md
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# @bedrockio/ai
|
|
2
|
+
|
|
3
|
+
This package provides a thin wrapper for common AI chatbots. It standardizes
|
|
4
|
+
usage to allow different platforms to be swapped easily and allows templated
|
|
5
|
+
usage.
|
|
6
|
+
|
|
7
|
+
- [Install](#install)
|
|
8
|
+
- [Usage](#usage)
|
|
9
|
+
|
|
10
|
+
## Install
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
yarn install @bedrockio/ai
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
## Usage
|
|
17
|
+
|
|
18
|
+
```js
|
|
19
|
+
import { Client } from '@bedrockio/ai';
|
|
20
|
+
|
|
21
|
+
const client = new Client({
|
|
22
|
+
// Directory to templates
|
|
23
|
+
templates: './test/templates',
|
|
24
|
+
// Platform: openai|gpt|anthopic|claude
|
|
25
|
+
platform: 'openai',
|
|
26
|
+
// Your API key
|
|
27
|
+
apiKey: 'my-api-key',
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// Get a one time response.
|
|
31
|
+
const response = await client.prompt({
|
|
32
|
+
// The template file to use.
|
|
33
|
+
file: 'classify-fruits',
|
|
34
|
+
// The form of output. May be raw|text|messages|json.
|
|
35
|
+
// Default is "text".
|
|
36
|
+
output: 'json',
|
|
37
|
+
// A custom template may be passed if "file" is not.
|
|
38
|
+
template: 'custom',
|
|
39
|
+
|
|
40
|
+
// All other variables will be
|
|
41
|
+
// interpolated into the template.
|
|
42
|
+
text: 'a long yellow fruit',
|
|
43
|
+
fruit: 'banana, apple, pear',
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
// Stream the results
|
|
47
|
+
const stream = await client.stream({
|
|
48
|
+
file: 'classify-fruits',
|
|
49
|
+
// ...
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
// Will return an AsyncIterator
|
|
53
|
+
for await (const chunk of stream) {
|
|
54
|
+
console.info(chunk.text);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// List available models
|
|
58
|
+
const models = await client.models();
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Templates
|
|
62
|
+
|
|
63
|
+
Template files must have be markdown (`.md`) and live in your templates
|
|
64
|
+
directory. They may be a simple text description or delineated roles:
|
|
65
|
+
|
|
66
|
+
````
|
|
67
|
+
--- SYSTEM ---
|
|
68
|
+
|
|
69
|
+
This is a list of fruits: {{fruits}}
|
|
70
|
+
|
|
71
|
+
--- USER ---
|
|
72
|
+
|
|
73
|
+
Which fruit do you think the following input most closely resembles?
|
|
74
|
+
|
|
75
|
+
Please provide your response as a JSON object containing:
|
|
76
|
+
|
|
77
|
+
- "name" {string} - The name of the fruit.
|
|
78
|
+
- "reason" {string} - The reason you believe it matches.
|
|
79
|
+
- "certainty" {number} - Your confidence in your answer from 0 to 1.
|
|
80
|
+
|
|
81
|
+
```
|
|
82
|
+
{{text}}
|
|
83
|
+
```
|
|
84
|
+
````
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
let mock;
|
|
2
|
+
|
|
3
|
+
function Anthropic() {
|
|
4
|
+
return {
|
|
5
|
+
messages: {
|
|
6
|
+
create(options) {
|
|
7
|
+
if (options.stream) {
|
|
8
|
+
return streamMock();
|
|
9
|
+
} else {
|
|
10
|
+
return mock;
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
},
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function setResponse(data) {
|
|
18
|
+
mock = data;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async function* streamMock() {
|
|
22
|
+
const content = mock.content[0].text;
|
|
23
|
+
const size = Math.floor(content.length / 3);
|
|
24
|
+
const one = content.slice(0, size);
|
|
25
|
+
const two = content.slice(size, 2 * size);
|
|
26
|
+
const three = content.slice(2 * size);
|
|
27
|
+
yield wrapChunk(one, 'content_block_start');
|
|
28
|
+
yield wrapChunk(two, 'content_block_delta');
|
|
29
|
+
yield wrapChunk(three, 'message_stop');
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function wrapChunk(str, type) {
|
|
33
|
+
return {
|
|
34
|
+
type,
|
|
35
|
+
delta: {
|
|
36
|
+
text: str,
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
Anthropic.setResponse = setResponse;
|
|
42
|
+
|
|
43
|
+
module.exports = Anthropic;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
let mock;
|
|
2
|
+
|
|
3
|
+
function OpenAI() {
|
|
4
|
+
return {
|
|
5
|
+
chat: {
|
|
6
|
+
completions: {
|
|
7
|
+
create(options) {
|
|
8
|
+
if (options.stream) {
|
|
9
|
+
return streamMock();
|
|
10
|
+
} else {
|
|
11
|
+
return mock;
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
},
|
|
15
|
+
},
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function* streamMock() {
|
|
20
|
+
const content = mock.choices[0].message.content;
|
|
21
|
+
const size = Math.floor(content.length / 3);
|
|
22
|
+
const one = content.slice(0, size);
|
|
23
|
+
const two = content.slice(size, 2 * size);
|
|
24
|
+
const three = content.slice(2 * size);
|
|
25
|
+
yield wrapChunk(one);
|
|
26
|
+
yield wrapChunk(two);
|
|
27
|
+
yield wrapChunk(three);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function wrapChunk(str) {
|
|
31
|
+
return {
|
|
32
|
+
choices: [
|
|
33
|
+
{
|
|
34
|
+
delta: {
|
|
35
|
+
content: str,
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
],
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function setResponse(data) {
|
|
43
|
+
mock = data;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
OpenAI.setResponse = setResponse;
|
|
47
|
+
|
|
48
|
+
module.exports = OpenAI;
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.default = void 0;
|
|
7
|
+
var _mustache = _interopRequireDefault(require("mustache"));
|
|
8
|
+
var _util = require("./util.js");
|
|
9
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
10
|
+
const MESSAGES_REG = /(?:^|\n)-{3,}\s*(\w+)\s*-{3,}(.*?)(?=\n-{3,}|$)/gs;
|
|
11
|
+
class Wrapper {
|
|
12
|
+
constructor(options) {
|
|
13
|
+
this.options = options;
|
|
14
|
+
}
|
|
15
|
+
async getMessages(options) {
|
|
16
|
+
const template = await this.resolveTemplate(options);
|
|
17
|
+
const raw = _mustache.default.render(template, transformParams(options));
|
|
18
|
+
const messages = [];
|
|
19
|
+
for (let match of raw.matchAll(MESSAGES_REG)) {
|
|
20
|
+
const [, role, content] = match;
|
|
21
|
+
messages.push({
|
|
22
|
+
role: role.toLowerCase(),
|
|
23
|
+
content: content.trim()
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
return messages;
|
|
27
|
+
}
|
|
28
|
+
async loadTemplates() {
|
|
29
|
+
const {
|
|
30
|
+
templates
|
|
31
|
+
} = this.options;
|
|
32
|
+
this.templates ||= await (0, _util.loadTemplates)(templates);
|
|
33
|
+
}
|
|
34
|
+
async resolveTemplate(options) {
|
|
35
|
+
await this.loadTemplates();
|
|
36
|
+
let {
|
|
37
|
+
file,
|
|
38
|
+
template
|
|
39
|
+
} = options;
|
|
40
|
+
if (!template && file) {
|
|
41
|
+
template = this.templates[file];
|
|
42
|
+
}
|
|
43
|
+
if (!template) {
|
|
44
|
+
throw new Error('No template provided.');
|
|
45
|
+
}
|
|
46
|
+
return template;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
exports.default = Wrapper;
|
|
50
|
+
function transformParams(params) {
|
|
51
|
+
const result = {};
|
|
52
|
+
for (let [key, value] of Object.entries(params)) {
|
|
53
|
+
if (Array.isArray(value)) {
|
|
54
|
+
value = value.map(el => {
|
|
55
|
+
return `- ${el}`;
|
|
56
|
+
}).join('\n');
|
|
57
|
+
} else if (typeof value === 'object') {
|
|
58
|
+
value = JSON.stringify(value, null, 2);
|
|
59
|
+
}
|
|
60
|
+
result[key] = value;
|
|
61
|
+
}
|
|
62
|
+
return result;
|
|
63
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _sdk = _interopRequireDefault(require("@anthropic-ai/sdk"));
|
|
4
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
5
|
+
const client = new _sdk.default({
|
|
6
|
+
apiKey: process.env['ANTHROPIC_API_KEY'] // This is the default and can be omitted
|
|
7
|
+
});
|
|
8
|
+
async function main() {
|
|
9
|
+
const message = await client.messages.create({
|
|
10
|
+
max_tokens: 1024,
|
|
11
|
+
messages: [{
|
|
12
|
+
role: 'user',
|
|
13
|
+
content: 'Hello, Claude'
|
|
14
|
+
}],
|
|
15
|
+
model: 'claude-3-5-sonnet-latest'
|
|
16
|
+
});
|
|
17
|
+
console.log(message.content);
|
|
18
|
+
}
|
|
19
|
+
main();
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.OpenAiClient = void 0;
|
|
7
|
+
var _openai = _interopRequireDefault(require("openai"));
|
|
8
|
+
var _Wrapper = _interopRequireDefault(require("./Wrapper.js"));
|
|
9
|
+
var _util = require("./util.js");
|
|
10
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
11
|
+
const DEFAULT_MODEL = 'gpt-4o';
|
|
12
|
+
class OpenAiClient extends _Wrapper.default {
|
|
13
|
+
constructor(options) {
|
|
14
|
+
super(options);
|
|
15
|
+
this.client = new _openai.default({
|
|
16
|
+
...options
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
async prompt(options) {
|
|
20
|
+
const messages = await this.getMessages(options);
|
|
21
|
+
return await runCompletion(this.client, messages, options);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
exports.OpenAiClient = OpenAiClient;
|
|
25
|
+
async function runCompletion(client, messages, options) {
|
|
26
|
+
const {
|
|
27
|
+
output = 'text',
|
|
28
|
+
model = DEFAULT_MODEL
|
|
29
|
+
} = options;
|
|
30
|
+
const response = await client.chat.completions.create({
|
|
31
|
+
model,
|
|
32
|
+
messages
|
|
33
|
+
});
|
|
34
|
+
let content = response.choices[0].message.content;
|
|
35
|
+
if (output === 'raw') {
|
|
36
|
+
return response;
|
|
37
|
+
} else if (output === 'text') {
|
|
38
|
+
return content;
|
|
39
|
+
} else if (output === 'messages') {
|
|
40
|
+
const {
|
|
41
|
+
message
|
|
42
|
+
} = response.choices[0];
|
|
43
|
+
return [...messages, message];
|
|
44
|
+
} else if (output === 'json') {
|
|
45
|
+
return (0, _util.parse)(content);
|
|
46
|
+
} else {
|
|
47
|
+
throw new Error(`Unknown output type "${output}".`);
|
|
48
|
+
}
|
|
49
|
+
}
|
package/dist/cjs/util.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.loadTemplates = loadTemplates;
|
|
7
|
+
exports.parse = parse;
|
|
8
|
+
var _promises = _interopRequireDefault(require("fs/promises"));
|
|
9
|
+
var _path = _interopRequireDefault(require("path"));
|
|
10
|
+
var _glob = require("glob");
|
|
11
|
+
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
12
|
+
const JSON_REG = /([{[].+[}\]])/s;
|
|
13
|
+
async function loadTemplates(dir) {
|
|
14
|
+
const result = {};
|
|
15
|
+
const files = await (0, _glob.glob)(_path.default.join(dir, '*.md'));
|
|
16
|
+
for (let file of files) {
|
|
17
|
+
const base = _path.default.basename(file, '.md');
|
|
18
|
+
result[base] = await _promises.default.readFile(file, 'utf-8');
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
}
|
|
22
|
+
function parse(content) {
|
|
23
|
+
try {
|
|
24
|
+
const match = content.match(JSON_REG);
|
|
25
|
+
return JSON.parse(match[1]);
|
|
26
|
+
} catch (error) {
|
|
27
|
+
throw new Error('Unable to derive JSON object in response.');
|
|
28
|
+
}
|
|
29
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@bedrockio/ai",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Bedrock wrapper for common AI chatbots.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"test": "node --no-warnings node_modules/.bin/jest",
|
|
8
|
+
"types": "tsc",
|
|
9
|
+
"lint": "eslint",
|
|
10
|
+
"build": "scripts/build",
|
|
11
|
+
"eject": "scripts/eject",
|
|
12
|
+
"prepublish": "yarn build && yarn types"
|
|
13
|
+
},
|
|
14
|
+
"main": "./dist/cjs/index.js",
|
|
15
|
+
"types": "types/index.d.ts",
|
|
16
|
+
"contributors": [
|
|
17
|
+
{
|
|
18
|
+
"name": "Andrew Plummer",
|
|
19
|
+
"email": "andrew@rekall.ai"
|
|
20
|
+
}
|
|
21
|
+
],
|
|
22
|
+
"license": "MIT",
|
|
23
|
+
"repository": {
|
|
24
|
+
"type": "git",
|
|
25
|
+
"url": "https://github.com/bedrockio/router"
|
|
26
|
+
},
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"@anthropic-ai/sdk": "^0.33.1",
|
|
29
|
+
"glob": "^11.0.1",
|
|
30
|
+
"mustache": "^4.2.0",
|
|
31
|
+
"openai": "^4.79.1"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@babel/cli": "^7.26.4",
|
|
35
|
+
"@babel/core": "^7.26.0",
|
|
36
|
+
"@babel/eslint-parser": "^7.26.5",
|
|
37
|
+
"@babel/preset-env": "^7.26.0",
|
|
38
|
+
"@bedrockio/prettier-config": "^1.0.2",
|
|
39
|
+
"eslint": "^8.33.0",
|
|
40
|
+
"eslint-plugin-bedrock": "^1.0.27",
|
|
41
|
+
"eslint-plugin-import": "^2.31.0",
|
|
42
|
+
"jest": "^29.7.0",
|
|
43
|
+
"prettier-eslint": "^16.3.0",
|
|
44
|
+
"typescript": "^5.7.3"
|
|
45
|
+
},
|
|
46
|
+
"volta": {
|
|
47
|
+
"node": "22.12.0",
|
|
48
|
+
"yarn": "1.22.22"
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import Mustache from 'mustache';
|
|
2
|
+
|
|
3
|
+
import { loadTemplates } from './util.js';
|
|
4
|
+
|
|
5
|
+
const MESSAGES_REG = /(?:^|\n)-{3,}\s*(\w+)\s*-{3,}(.*?)(?=\n-{3,}|$)/gs;
|
|
6
|
+
|
|
7
|
+
export default class BaseClient {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
this.options = options;
|
|
10
|
+
this.templates = null;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Interpolates vars into the provided template and
|
|
15
|
+
* runs the chat completion. The "output" option may
|
|
16
|
+
* be omitted and will default to `"text"`.
|
|
17
|
+
* {@link https://github.com/bedrockio/ai?tab=readme-ov-file#bedrockioai Documentation}
|
|
18
|
+
*
|
|
19
|
+
* @param {object} options
|
|
20
|
+
* @param {string} options.model - The model to use.
|
|
21
|
+
* @param {"raw" | "text" | "json" | "messages"} [options.output] - The output to use.
|
|
22
|
+
* @param {Object.<string, any>} [options.other] - Additional props
|
|
23
|
+
* will be interpolated in the template.
|
|
24
|
+
*/
|
|
25
|
+
async prompt(options) {
|
|
26
|
+
options = {
|
|
27
|
+
...this.options,
|
|
28
|
+
...options,
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
const messages = await this.getMessages(options);
|
|
32
|
+
return await this.getCompletion({
|
|
33
|
+
...options,
|
|
34
|
+
messages,
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Streams the prompt response.
|
|
40
|
+
* @returns {AsyncIterator}
|
|
41
|
+
*/
|
|
42
|
+
async *stream(options) {
|
|
43
|
+
const stream = await this.prompt({
|
|
44
|
+
...options,
|
|
45
|
+
output: 'raw',
|
|
46
|
+
stream: true,
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
let started = false;
|
|
50
|
+
|
|
51
|
+
// @ts-ignore
|
|
52
|
+
for await (const chunk of stream) {
|
|
53
|
+
const resolved = this.getStreamedChunk(chunk, started);
|
|
54
|
+
started = true;
|
|
55
|
+
|
|
56
|
+
// @ts-ignore
|
|
57
|
+
if (resolved) {
|
|
58
|
+
yield resolved;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
async getMessages(options) {
|
|
64
|
+
const template = await this.resolveTemplate(options);
|
|
65
|
+
const raw = Mustache.render(template, transformParams(options));
|
|
66
|
+
|
|
67
|
+
const messages = [];
|
|
68
|
+
for (let match of raw.matchAll(MESSAGES_REG)) {
|
|
69
|
+
const [, role, content] = match;
|
|
70
|
+
messages.push({
|
|
71
|
+
role: role.toLowerCase(),
|
|
72
|
+
content: content.trim(),
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (!messages.length) {
|
|
77
|
+
messages.push({
|
|
78
|
+
role: 'user',
|
|
79
|
+
content: raw.trim(),
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return messages;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async loadTemplates() {
|
|
87
|
+
const { templates } = this.options;
|
|
88
|
+
this.templates ||= await loadTemplates(templates);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
async resolveTemplate(options) {
|
|
92
|
+
await this.loadTemplates();
|
|
93
|
+
|
|
94
|
+
let { file, template } = options;
|
|
95
|
+
|
|
96
|
+
if (!template && file) {
|
|
97
|
+
template = this.templates[file];
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (!template) {
|
|
101
|
+
throw new Error('No template provided.');
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return template;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
getCompletion(options) {
|
|
108
|
+
void options;
|
|
109
|
+
new Error('Method not implemented.');
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
getStreamedChunk(chunk, started) {
|
|
113
|
+
void chunk;
|
|
114
|
+
void started;
|
|
115
|
+
new Error('Method not implemented.');
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function transformParams(params) {
|
|
120
|
+
const result = {};
|
|
121
|
+
for (let [key, value] of Object.entries(params)) {
|
|
122
|
+
if (Array.isArray(value)) {
|
|
123
|
+
value = value
|
|
124
|
+
.map((el) => {
|
|
125
|
+
return `- ${el}`;
|
|
126
|
+
})
|
|
127
|
+
.join('\n');
|
|
128
|
+
} else if (typeof value === 'object') {
|
|
129
|
+
value = JSON.stringify(value, null, 2);
|
|
130
|
+
}
|
|
131
|
+
result[key] = value;
|
|
132
|
+
}
|
|
133
|
+
return result;
|
|
134
|
+
}
|
package/src/anthropic.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
2
|
+
|
|
3
|
+
import BaseClient from './BaseClient.js';
|
|
4
|
+
import { transformResponse } from './util.js';
|
|
5
|
+
|
|
6
|
+
const MODELS_URL = 'https://docs.anthropic.com/en/docs/about-claude/models';
|
|
7
|
+
const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
|
8
|
+
|
|
9
|
+
export class AnthropicClient extends BaseClient {
|
|
10
|
+
constructor(options) {
|
|
11
|
+
super(options);
|
|
12
|
+
this.client = new Anthropic({
|
|
13
|
+
...options,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Lists available models.
|
|
19
|
+
*/
|
|
20
|
+
async models() {
|
|
21
|
+
const { data } = await this.client.models.list();
|
|
22
|
+
return data.map((o) => o.id);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async getCompletion(options) {
|
|
26
|
+
const {
|
|
27
|
+
model = DEFAULT_MODEL,
|
|
28
|
+
max_tokens = 2048,
|
|
29
|
+
output = 'text',
|
|
30
|
+
stream = false,
|
|
31
|
+
messages,
|
|
32
|
+
} = options;
|
|
33
|
+
const { client } = this;
|
|
34
|
+
|
|
35
|
+
const { system, user } = splitMessages(messages);
|
|
36
|
+
|
|
37
|
+
if (!model) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
`No model specified. Available models are here: ${MODELS_URL}.`,
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const response = await client.messages.create({
|
|
44
|
+
max_tokens,
|
|
45
|
+
messages: user,
|
|
46
|
+
system,
|
|
47
|
+
model,
|
|
48
|
+
stream,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
if (output === 'raw') {
|
|
52
|
+
return response;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// @ts-ignore
|
|
56
|
+
const message = response.content[0];
|
|
57
|
+
|
|
58
|
+
return transformResponse({
|
|
59
|
+
...options,
|
|
60
|
+
messages,
|
|
61
|
+
message,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
getStreamedChunk(chunk) {
|
|
66
|
+
// @ts-ignore
|
|
67
|
+
let type;
|
|
68
|
+
if (chunk.type === 'content_block_start') {
|
|
69
|
+
type = 'start';
|
|
70
|
+
} else if (chunk.type === 'content_block_delta') {
|
|
71
|
+
type = 'chunk';
|
|
72
|
+
} else if (chunk.type === 'message_stop') {
|
|
73
|
+
type = 'stop';
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (type) {
|
|
77
|
+
return {
|
|
78
|
+
type,
|
|
79
|
+
text: chunk.delta?.text || '',
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function splitMessages(messages) {
|
|
86
|
+
const system = [];
|
|
87
|
+
const user = [];
|
|
88
|
+
for (let message of messages) {
|
|
89
|
+
if (message.role === 'system') {
|
|
90
|
+
system.push(message);
|
|
91
|
+
} else {
|
|
92
|
+
user.push(message);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return { system: system.join('\n'), user };
|
|
96
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { OpenAiClient } from './openai.js';
|
|
2
|
+
import { AnthropicClient } from './anthropic.js';
|
|
3
|
+
|
|
4
|
+
export class Client {
|
|
5
|
+
constructor(options) {
|
|
6
|
+
const { platform, ...rest } = options;
|
|
7
|
+
if (platform === 'openai' || platform === 'gpt') {
|
|
8
|
+
return new OpenAiClient(rest);
|
|
9
|
+
} else if (platform === 'anthropic' || platform === 'claude') {
|
|
10
|
+
return new AnthropicClient(rest);
|
|
11
|
+
} else if (platform) {
|
|
12
|
+
throw new Error(`Unknown platform "${platform}".`);
|
|
13
|
+
} else {
|
|
14
|
+
throw new Error('Platform required.');
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
package/src/openai.js
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
|
|
3
|
+
import BaseClient from './BaseClient.js';
|
|
4
|
+
import { transformResponse } from './util.js';
|
|
5
|
+
|
|
6
|
+
const DEFAULT_MODEL = 'gpt-4o';
|
|
7
|
+
|
|
8
|
+
export class OpenAiClient extends BaseClient {
|
|
9
|
+
constructor(options) {
|
|
10
|
+
super(options);
|
|
11
|
+
this.client = new OpenAI({
|
|
12
|
+
...options,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Lists available models.
|
|
18
|
+
*/
|
|
19
|
+
async models() {
|
|
20
|
+
const { data } = await this.client.models.list();
|
|
21
|
+
return data.map((o) => o.id);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async getCompletion(options) {
|
|
25
|
+
const { model = DEFAULT_MODEL, output = 'text', stream = false } = options;
|
|
26
|
+
const { client } = this;
|
|
27
|
+
|
|
28
|
+
const messages = await this.getMessages(options);
|
|
29
|
+
const response = await client.chat.completions.create({
|
|
30
|
+
model,
|
|
31
|
+
messages,
|
|
32
|
+
stream,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
if (output === 'raw') {
|
|
36
|
+
return response;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const { message } = response.choices[0];
|
|
40
|
+
|
|
41
|
+
return transformResponse({
|
|
42
|
+
...options,
|
|
43
|
+
messages,
|
|
44
|
+
message,
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
getStreamedChunk(chunk, started) {
|
|
49
|
+
const [choice] = chunk.choices;
|
|
50
|
+
|
|
51
|
+
let type;
|
|
52
|
+
if (!started) {
|
|
53
|
+
type = 'start';
|
|
54
|
+
} else if (choice.finish_reason === 'stop') {
|
|
55
|
+
type = 'stop';
|
|
56
|
+
} else {
|
|
57
|
+
type = 'chunk';
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (type) {
|
|
61
|
+
return {
|
|
62
|
+
type,
|
|
63
|
+
text: choice.delta.content || '',
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
package/src/util.js
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
import { glob } from 'glob';
|
|
6
|
+
|
|
7
|
+
const JSON_REG = /([{[].+[}\]])/s;
|
|
8
|
+
|
|
9
|
+
export async function loadTemplates(dir) {
|
|
10
|
+
const result = {};
|
|
11
|
+
const files = await glob(path.join(dir, '*.md'));
|
|
12
|
+
|
|
13
|
+
for (let file of files) {
|
|
14
|
+
const base = path.basename(file, '.md');
|
|
15
|
+
result[base] = await fs.readFile(file, 'utf-8');
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return result;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function parse(content) {
|
|
22
|
+
try {
|
|
23
|
+
const match = content.match(JSON_REG);
|
|
24
|
+
return JSON.parse(match[1]);
|
|
25
|
+
} catch (error) {
|
|
26
|
+
throw new Error('Unable to derive JSON object in response.');
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function transformResponse(options) {
|
|
31
|
+
const { output = 'text', messages, message } = options;
|
|
32
|
+
const content = message.content || message.text;
|
|
33
|
+
if (output === 'text') {
|
|
34
|
+
return content;
|
|
35
|
+
} else if (output === 'messages') {
|
|
36
|
+
return [...messages, message];
|
|
37
|
+
} else if (output === 'json') {
|
|
38
|
+
return parse(content);
|
|
39
|
+
} else {
|
|
40
|
+
throw new Error(`Unknown output type "${output}".`);
|
|
41
|
+
}
|
|
42
|
+
}
|
package/types/Link.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Link.d.ts","sourceRoot":"","sources":["../src/Link.js"],"names":[],"mappings":"AAEA,kFAWC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"NavLink.d.ts","sourceRoot":"","sources":["../src/NavLink.js"],"names":[],"mappings":"AAIA,qFAkBC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Redirect.d.ts","sourceRoot":"","sources":["../src/Redirect.js"],"names":[],"mappings":"AAIA,kDASC"}
|
package/types/Route.d.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {import('react').ReactNode|import('react').ElementType} Node
|
|
3
|
+
*
|
|
4
|
+
* @typedef {Object} RouteProps
|
|
5
|
+
* @property {string} path - The path to use.
|
|
6
|
+
* @property {Node} render - The component or element to render.
|
|
7
|
+
*
|
|
8
|
+
* @param {RouteProps} props
|
|
9
|
+
*/
|
|
10
|
+
export default function Route(props: RouteProps): string | number | boolean | import("react").ReactElement<any, string | import("react").JSXElementConstructor<any>> | Iterable<import("react").ReactNode> | import("react").ReactPortal;
|
|
11
|
+
export type Node = import("react").ReactNode | import("react").ElementType;
|
|
12
|
+
export type RouteProps = {
|
|
13
|
+
/**
|
|
14
|
+
* - The path to use.
|
|
15
|
+
*/
|
|
16
|
+
path: string;
|
|
17
|
+
/**
|
|
18
|
+
* - The component or element to render.
|
|
19
|
+
*/
|
|
20
|
+
render: Node;
|
|
21
|
+
};
|
|
22
|
+
//# sourceMappingURL=Route.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Route.d.ts","sourceRoot":"","sources":["../src/Route.js"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AACH,qCAFW,UAAU,0LAWpB;mBAjBY,OAAO,OAAO,EAAE,SAAS,GAAC,OAAO,OAAO,EAAE,WAAW;;;;;UAGpD,MAAM;;;;YACN,IAAI"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Router.d.ts","sourceRoot":"","sources":["../src/Router.js"],"names":[],"mappings":"AA6BA,oFAsDC;AAED,iCAEC;AAxED,yDAA2D"}
|
package/types/hoc.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hoc.d.ts","sourceRoot":"","sources":["../src/hoc.js"],"names":[],"mappings":"AAEA,oGAgBC"}
|
package/types/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.js"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"location.d.ts","sourceRoot":"","sources":["../src/location.js"],"names":[],"mappings":"AAEA,mCAGC"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export function useNavigate(): typeof navigate;
|
|
2
|
+
declare function navigate(path: any, state: any): void;
|
|
3
|
+
declare namespace navigate {
|
|
4
|
+
export { replace };
|
|
5
|
+
}
|
|
6
|
+
declare function replace(path: any, state: any): void;
|
|
7
|
+
export {};
|
|
8
|
+
//# sourceMappingURL=navigate.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"navigate.d.ts","sourceRoot":"","sources":["../src/navigate.js"],"names":[],"mappings":"AAYA,+CAEC;AAZD,uDAEC;;;;AAED,sDAEC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"params.d.ts","sourceRoot":"","sources":["../src/params.js"],"names":[],"mappings":"AAEA,iCAGC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"search.d.ts","sourceRoot":"","sources":["../src/search.js"],"names":[],"mappings":"AAEA,iCAGC"}
|