@push.rocks/smartai 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist_ts/00_commitinfo_data.js +1 -1
- package/dist_ts/provider.anthropic.js +84 -3
- package/dist_ts/provider.xai.d.ts +43 -0
- package/dist_ts/provider.xai.js +141 -0
- package/license +19 -0
- package/package.json +1 -1
- package/readme.md +26 -3
- package/ts/00_commitinfo_data.ts +1 -1
- package/ts/provider.anthropic.ts +96 -2
- package/ts/provider.xai.ts +183 -0
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
*/
|
|
4
4
|
export const commitinfo = {
|
|
5
5
|
name: '@push.rocks/smartai',
|
|
6
|
-
version: '0.
|
|
6
|
+
version: '0.3.0',
|
|
7
7
|
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
|
8
8
|
};
|
|
9
9
|
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiMDBfY29tbWl0aW5mb19kYXRhLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvMDBfY29tbWl0aW5mb19kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHO0lBQ3hCLElBQUksRUFBRSxxQkFBcUI7SUFDM0IsT0FBTyxFQUFFLE9BQU87SUFDaEIsV0FBVyxFQUFFLCtJQUErSTtDQUM3SixDQUFBIn0=
|
|
@@ -108,10 +108,91 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
108
108
|
throw new Error('Audio generation is not yet supported by Anthropic.');
|
|
109
109
|
}
|
|
110
110
|
async vision(optionsArg) {
|
|
111
|
-
|
|
111
|
+
const base64Image = optionsArg.image.toString('base64');
|
|
112
|
+
const content = [
|
|
113
|
+
{
|
|
114
|
+
type: 'text',
|
|
115
|
+
text: optionsArg.prompt
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
type: 'image',
|
|
119
|
+
source: {
|
|
120
|
+
type: 'base64',
|
|
121
|
+
media_type: 'image/jpeg',
|
|
122
|
+
data: base64Image
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
];
|
|
126
|
+
const result = await this.anthropicApiClient.messages.create({
|
|
127
|
+
model: 'claude-3-opus-20240229',
|
|
128
|
+
messages: [{
|
|
129
|
+
role: 'user',
|
|
130
|
+
content
|
|
131
|
+
}],
|
|
132
|
+
max_tokens: 1024
|
|
133
|
+
});
|
|
134
|
+
// Extract text content from the response
|
|
135
|
+
let message = '';
|
|
136
|
+
for (const block of result.content) {
|
|
137
|
+
if ('text' in block) {
|
|
138
|
+
message += block.text;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return message;
|
|
112
142
|
}
|
|
113
143
|
async document(optionsArg) {
|
|
114
|
-
|
|
144
|
+
// Convert PDF documents to images using SmartPDF
|
|
145
|
+
const smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
|
146
|
+
let documentImageBytesArray = [];
|
|
147
|
+
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
148
|
+
const documentImageArray = await smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
149
|
+
documentImageBytesArray = documentImageBytesArray.concat(documentImageArray);
|
|
150
|
+
}
|
|
151
|
+
// Convert message history to Anthropic format
|
|
152
|
+
const messages = optionsArg.messageHistory.map(msg => ({
|
|
153
|
+
role: msg.role === 'assistant' ? 'assistant' : 'user',
|
|
154
|
+
content: msg.content
|
|
155
|
+
}));
|
|
156
|
+
// Create content array with text and images
|
|
157
|
+
const content = [
|
|
158
|
+
{
|
|
159
|
+
type: 'text',
|
|
160
|
+
text: optionsArg.userMessage
|
|
161
|
+
}
|
|
162
|
+
];
|
|
163
|
+
// Add each document page as an image
|
|
164
|
+
for (const imageBytes of documentImageBytesArray) {
|
|
165
|
+
content.push({
|
|
166
|
+
type: 'image',
|
|
167
|
+
source: {
|
|
168
|
+
type: 'base64',
|
|
169
|
+
media_type: 'image/jpeg',
|
|
170
|
+
data: Buffer.from(imageBytes).toString('base64')
|
|
171
|
+
}
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
const result = await this.anthropicApiClient.messages.create({
|
|
175
|
+
model: 'claude-3-opus-20240229',
|
|
176
|
+
system: optionsArg.systemMessage,
|
|
177
|
+
messages: [
|
|
178
|
+
...messages,
|
|
179
|
+
{ role: 'user', content }
|
|
180
|
+
],
|
|
181
|
+
max_tokens: 4096
|
|
182
|
+
});
|
|
183
|
+
// Extract text content from the response
|
|
184
|
+
let message = '';
|
|
185
|
+
for (const block of result.content) {
|
|
186
|
+
if ('text' in block) {
|
|
187
|
+
message += block.text;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
return {
|
|
191
|
+
message: {
|
|
192
|
+
role: 'assistant',
|
|
193
|
+
content: message
|
|
194
|
+
}
|
|
195
|
+
};
|
|
115
196
|
}
|
|
116
197
|
}
|
|
117
|
-
//# sourceMappingURL=data:application/json;base64,
|
|
198
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmlkZXIuYW50aHJvcGljLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvcHJvdmlkZXIuYW50aHJvcGljLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sY0FBYyxDQUFDO0FBQ3hDLE9BQU8sS0FBSyxLQUFLLE1BQU0sWUFBWSxDQUFDO0FBQ3BDLE9BQU8sRUFBRSxlQUFlLEVBQUUsTUFBTSxrQ0FBa0MsQ0FBQztBQVVuRSxNQUFNLE9BQU8saUJBQWtCLFNBQVEsZUFBZTtJQUlwRCxZQUFZLFVBQXFDO1FBQy9DLEtBQUssRUFBRSxDQUFDO1FBQ1IsSUFBSSxDQUFDLE9BQU8sR0FBRyxVQUFVLENBQUEsQ0FBQyw2QkFBNkI7SUFDekQsQ0FBQztJQUVELEtBQUssQ0FBQyxLQUFLO1FBQ1QsSUFBSSxDQUFDLGtCQUFrQixHQUFHLElBQUksT0FBTyxDQUFDLFNBQVMsQ0FBQyxPQUFPLENBQUM7WUFDdEQsTUFBTSxFQUFFLElBQUksQ0FBQyxPQUFPLENBQUMsY0FBYztTQUNwQyxDQUFDLENBQUM7SUFDTCxDQUFDO0lBRUQsS0FBSyxDQUFDLElBQUksS0FBSSxDQUFDO0lBRVIsS0FBSyxDQUFDLFVBQVUsQ0FBQyxLQUFpQztRQUN2RCxpREFBaUQ7UUFDakQsTUFBTSxPQUFPLEdBQUcsSUFBSSxXQUFXLEVBQUUsQ0FBQztRQUNsQyxJQUFJLE1BQU0sR0FBRyxFQUFFLENBQUM7UUFDaEIsSUFBSSxjQUFjLEdBQThDLElBQUksQ0FBQztRQUVyRSxnREFBZ0Q7UUFDaEQsTUFBTSxTQUFTLEdBQUcsSUFBSSxlQUFlLENBQXFCO1lBQ3hELEtBQUssQ0FBQyxTQUFTLENBQUMsS0FBSyxFQUFFLFVBQVU7Z0JBQy9CLE1BQU0sSUFBSSxPQUFPLENBQUMsTUFBTSxDQUFDLEtBQUssRUFBRSxFQUFFLE1BQU0sRUFBRSxJQUFJLEVBQUUsQ0FBQyxDQUFDO2dCQUVsRCxzREFBc0Q7Z0JBQ3RELE9BQU8sSUFBSSxFQUFFLENBQUM7b0JBQ1osTUFBTSxZQUFZLEdBQUcsTUFBTSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQztvQkFDMUMsSUFBSSxZQUFZLEtBQUssQ0FBQyxDQUFDO3dCQUFFLE1BQU07b0JBRS9CLE1BQU0sSUFBSSxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUMsQ0FBQyxFQUFFLFlBQVksQ0FBQyxDQUFDO29CQUMzQyxNQUFNLEdBQUcsTUFBTSxDQUFDLEtBQUssQ0FBQyxZQUFZLEdBQUcsQ0FBQyxDQUFDLENBQUM7b0JBRXhDLElBQUksSUFBSSxDQUFDLElBQUksRUFBRSxFQUFFLENBQUM7d0JBQ2hCLElBQUksQ0FBQzs0QkFDSCxNQUFNLE9BQU8sR0FBRyxJQUFJLENBQUMsS0FBSyxDQUFDLElBQUksQ0FBQyxDQUFDOzRCQUNqQyxjQUFjLEdBQUc7Z0NBQ2YsSUFBSSxFQUFFLE9BQU8sQ0FBQyxJQUFJLElBQUksTUFBTTtnQ0FDNUIsT0FBTyxFQUFFLE9BQU8sQ0FBQyxPQUFPLElBQUksRUFBRTs2QkFDL0IsQ0FBQzt3QkFDSixDQUFDO3dCQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7NEJBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQywwQkFBMEIsRUFBRSxDQUFDLENBQUMsQ0FBQzt3QkFDL0MsQ0FBQztvQkFDSCxDQUFDO2dCQUNILENBQUM7Z0JBRUQsc0RBQXNEO2dCQUN0RCxJQUFJLGNBQWMsRUFBRSxDQUFDO29CQUNuQixNQUFNLE1BQU0sR0FBRyxNQUFNLElBQUksQ0FBQyxrQkFBa0IsQ0FBQyxRQUFRLENBQUMsTUFBTSxDQUFDO3dCQUMzRCxLQUFLLEVBQUUsd0JBQXdCO3dCQUMvQixRQUFRLEVBQUUsQ0FBQyxFQUFFLElBQUksRUFBRSxjQUFjLENBQUMsSUFBSSxFQUFFLE9BQU8sRUFBRSxjQUFjLENBQUMsT0FBTyxFQUFFLENBQUM7d0JBQzFFLE1BQU0sRUFBRSxFQUFFO3dCQUNWLE1BQU0sRUFBRSxJQUFJO3dCQUNaLFVBQVUsRUFBRSxJQUFJO3FCQUNqQixDQUFDLENBQUM7b0JBRUgsb0NBQW9DO29CQUNwQyxJQUFJLEtBQUssRUFBRSxNQUFNLEtBQUssSUFBSSxNQUFNLEVBQUUsQ0FBQzt3QkFDakMsTUFBTSxPQUFPLEdBQUcsS0FBSyxDQUFDLEtBQUssRUFBRSxJQUFJLENBQUM7d0JBQ2xDLElBQUksT0FBTyxFQUFFLENBQUM7NEJBQ1osVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsQ0FBQzt3QkFDOUIsQ0FBQztvQkFDSCxDQUFDO29CQUVELGNBQWMsR0FBRyxJQUFJLENBQUM7Z0JBQ3hCLENBQUM7WUFDSCxDQUFDO1lBRUQsS0FBSyxDQUFDLFVBQVU7Z0JBQ2QsSUFBSSxNQUFNLEVBQUUsQ0FBQztvQkFDWCxJQUFJLENBQUM7d0JBQ0gsTUFBTSxPQUFPLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxNQUFNLENBQUMsQ0FBQzt3QkFDbkMsVUFBVSxDQUFDLE9BQU8sQ0FBQyxPQUFPLENBQUMsT0FBTyxJQUFJLEVBQUUsQ0FBQyxDQUFDO29CQUM1QyxDQUFDO29CQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7d0JBQ1gsT0FBTyxDQUFDLEtBQUssQ0FBQyxtQ0FBbUMsRUFBRSxDQUFDLENBQUMsQ0FBQztvQkFDeEQsQ0FBQztnQkFDSCxDQUFDO1lBQ0gsQ0FBQztTQUNGLENBQUMsQ0FBQztRQUVILDRDQUE0QztRQUM1QyxPQUFPLEtBQUssQ0FBQyxXQUFXLENBQUMsU0FBUyxDQUFDLENBQUM7SUFDdEMsQ0FBQztJQUVELGdEQUFnRDtJQUN6QyxLQUFLLENBQUMsSUFBSSxDQUFDLFVBQXVCO1FBQ3ZDLDhDQUE4QztRQUM5QyxNQUFNLFFBQVEsR0FBRyxVQUFVLENBQUMsY0FBYyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUM7WUFDckQsSUFBSSxFQUFFLEdBQUcsQ0FBQyxJQUFJLEtBQUssV0FBVyxDQUFDLENBQUMsQ0FBQyxXQUFvQixDQUFDLENBQUMsQ0FBQyxNQUFlO1lBQ3ZFLE9BQU8sRUFBRSxHQUFHLENBQUMsT0FBTztTQUNyQixDQUFDLENBQUMsQ0FBQztRQUVKLE1BQU0sTUFBTSxHQUFHLE1BQU0sSUFBSSxDQUFDLGtCQUFrQixDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUM7WUFDM0QsS0FBSyxFQUFFLHdCQUF3QjtZQUMvQixNQUFNLEVBQUUsVUFBVSxDQUFDLGFBQWE7WUFDaEMsUUFBUSxFQUFFO2dCQUNSLEdBQUcsUUFBUTtnQkFDWCxFQUFFLElBQUksRUFBRSxNQUFlLEVBQUUsT0FBTyxFQUFFLFVBQVUsQ0FBQyxXQUFXLEVBQUU7YUFDM0Q7WUFDRCxVQUFVLEVBQUUsSUFBSTtTQUNqQixDQUFDLENBQUM7UUFFSCx5Q0FBeUM7UUFDekMsSUFBSSxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ2pCLEtBQUssTUFBTSxLQUFLLElBQUksTUFBTSxDQUFDLE9BQU8sRUFBRSxDQUFDO1lBQ25DLElBQUksTUFBTSxJQUFJLEtBQUssRUFBRSxDQUFDO2dCQUNwQixPQUFPLElBQUksS0FBSyxDQUFDLElBQUksQ0FBQztZQUN4QixDQUFDO1FBQ0gsQ0FBQztRQUVELE9BQU87WUFDTCxJQUFJLEVBQUUsV0FBb0I7WUFDMUIsT0FBTztTQUNSLENBQUM7SUFDSixDQUFDO0lBRU0sS0FBSyxDQUFDLEtBQUssQ0FBQyxVQUErQjtRQUNoRCw4RUFBOEU7UUFDOUUsTUFBTSxJQUFJLEtBQUssQ0FBQyxxREFBcUQsQ0FBQyxDQUFDO0lBQ3pFLENBQUM7SUFFTSxLQUFLLENBQUMsTUFBTSxDQUFDLFVBQTZDO1FBQy9ELE1BQU0sV0FBVyxHQUFHLFVBQVUsQ0FBQyxLQUFLLENBQUMsUUFBUSxDQUFDLFFBQVEsQ0FBQyxDQUFDO1FBRXhELE1BQU0sT0FBTyxHQUFtQjtZQUM5QjtnQkFDRSxJQUFJLEVBQUUsTUFBTTtnQkFDWixJQUFJLEVBQUUsVUFBVSxDQUFDLE1BQU07YUFDeEI7WUFDRDtnQkFDRSxJQUFJLEVBQUUsT0FBTztnQkFDYixNQUFNLEVBQUU7b0JBQ04sSUFBSSxFQUFFLFFBQVE7b0JBQ2QsVUFBVSxFQUFFLFlBQVk7b0JBQ3hCLElBQUksRUFBRSxXQUFXO2lCQUNsQjthQUNGO1NBQ0YsQ0FBQztRQUVGLE1BQU0sTUFBTSxHQUFHLE1BQU0sSUFBSSxDQUFDLGtCQUFrQixDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUM7WUFDM0QsS0FBSyxFQUFFLHdCQUF3QjtZQUMvQixRQUFRLEVBQUUsQ0FBQztvQkFDVCxJQUFJLEVBQUUsTUFBTTtvQkFDWixPQUFPO2lCQUNSLENBQUM7WUFDRixVQUFVLEVBQUUsSUFBSTtTQUNqQixDQUFDLENBQUM7UUFFSCx5Q0FBeUM7UUFDekMsSUFBSSxPQUFPLEdBQUcsRUFBRSxDQUFDO1FBQ2pCLEtBQUssTUFBTSxLQUFLLElBQUksTUFBTSxDQUFDLE9BQU8sRUFBRSxDQUFDO1lBQ25DLElBQUksTUFBTSxJQUFJLEtBQUssRUFBRSxDQUFDO2dCQUNwQixPQUFPLElBQUksS0FBSyxDQUFDLElBQUksQ0FBQztZQUN4QixDQUFDO1FBQ0gsQ0FBQztRQUNELE9BQU8sT0FBTyxDQUFDO0lBQ2pCLENBQUM7SUFFTSxLQUFLLENBQUMsUUFBUSxDQUFDLFVBS3JCO1FBQ0MsaURBQWlEO1FBQ2pELE1BQU0sZ0JBQWdCLEdBQUcsSUFBSSxPQUFPLENBQUMsUUFBUSxDQUFDLFFBQVEsRUFBRSxDQUFDO1FBQ3pELElBQUksdUJBQXVCLEdBQWlCLEVBQUUsQ0FBQztRQUUvQyxLQUFLLE1BQU0sV0FBVyxJQUFJLFVBQVUsQ0FBQyxZQUFZLEVBQUUsQ0FBQztZQUNsRCxNQUFNLGtCQUFrQixHQUFHLE1BQU0sZ0JBQWdCLENBQUMsb0JBQW9CLENBQUMsV0FBVyxDQUFDLENBQUM7WUFDcEYsdUJBQXVCLEdBQUcsdUJBQXVCLENBQUMsTUFBTSxDQUFDLGtCQUFrQixDQUFDLENBQUM7UUFDL0UsQ0FBQztRQUVELDhDQUE4QztRQUM5QyxNQUFNLFFBQVEsR0FBRyxVQUFVLENBQUMsY0FBYyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUM7WUFDckQsSUFBSSxFQUFFLEdBQUcsQ0FBQyxJQUFJLEtBQUssV0FBVyxDQUFDLENBQUMsQ0FBQyxXQUFvQixDQUFDLENBQUMsQ0FBQyxNQUFlO1lBQ3ZFLE9BQU8sRUFBRSxHQUFHLENBQUMsT0FBTztTQUNyQixDQUFDLENBQUMsQ0FBQztRQUVKLDRDQUE0QztRQUM1QyxNQUFNLE9BQU8sR0FBbUI7WUFDOUI7Z0JBQ0UsSUFBSSxFQUFFLE1BQU07Z0JBQ1osSUFBSSxFQUFFLFVBQVUsQ0FBQyxXQUFXO2FBQzdCO1NBQ0YsQ0FBQztRQUVGLHFDQUFxQztRQUNyQyxLQUFLLE1BQU0sVUFBVSxJQUFJLHVCQUF1QixFQUFFLENBQUM7WUFDakQsT0FBTyxDQUFDLElBQUksQ0FBQztnQkFDWCxJQUFJLEVBQUUsT0FBTztnQkFDYixNQUFNLEVBQUU7b0JBQ04sSUFBSSxFQUFFLFFBQVE7b0JBQ2QsVUFBVSxFQUFFLFlBQVk7b0JBQ3hCLElBQUksRUFBRSxNQUFNLENBQUMsSUFBSSxDQUFDLFVBQVUsQ0FBQyxDQUFDLFFBQVEsQ0FBQyxRQUFRLENBQUM7aUJBQ2pEO2FBQ0YsQ0FBQyxDQUFDO1FBQ0wsQ0FBQztRQUVELE1BQU0sTUFBTSxHQUFHLE1BQU0sSUFBSSxDQUFDLGtCQUFrQixDQUFDLFFBQVEsQ0FBQyxNQUFNLENBQUM7WUFDM0QsS0FBSyxFQUFFLHdCQUF3QjtZQUMvQixNQUFNLEVBQUUsVUFBVSxDQUFDLGFBQWE7WUFDaEMsUUFBUSxFQUFFO2dCQUNSLEdBQUcsUUFBUTtnQkFDWCxFQUFFLElBQUksRUFBRSxNQUFNLEVBQUUsT0FBTyxFQUFFO2FBQzFCO1lBQ0QsVUFBVSxFQUFFLElBQUk7U0FDakIsQ0FBQyxDQUFDO1FBRUgseUNBQXlDO1FBQ3pDLElBQUksT0FBTyxHQUFHLEVBQUUsQ0FBQztRQUNqQixLQUFLLE1BQU0sS0FBSyxJQUFJLE1BQU0sQ0FBQyxPQUFPLEVBQUUsQ0FBQztZQUNuQyxJQUFJLE1BQU0sSUFBSSxLQUFLLEVBQUUsQ0FBQztnQkFDcEIsT0FBTyxJQUFJLEtBQUssQ0FBQyxJQUFJLENBQUM7WUFDeEIsQ0FBQztRQUNILENBQUM7UUFFRCxPQUFPO1lBQ0wsT0FBTyxFQUFFO2dCQUNQLElBQUksRUFBRSxXQUFXO2dCQUNqQixPQUFPLEVBQUUsT0FBTzthQUNqQjtTQUNGLENBQUM7SUFDSixDQUFDO0NBQ0YifQ==
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import * as plugins from './plugins.js';
|
|
2
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
3
|
+
export interface IXAIProviderOptions {
|
|
4
|
+
xaiToken: string;
|
|
5
|
+
}
|
|
6
|
+
export declare class XAIProvider extends MultiModalModel {
|
|
7
|
+
private options;
|
|
8
|
+
openAiApiClient: plugins.openai.default;
|
|
9
|
+
smartpdfInstance: plugins.smartpdf.SmartPdf;
|
|
10
|
+
constructor(optionsArg: IXAIProviderOptions);
|
|
11
|
+
start(): Promise<void>;
|
|
12
|
+
stop(): Promise<void>;
|
|
13
|
+
chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>>;
|
|
14
|
+
chat(optionsArg: {
|
|
15
|
+
systemMessage: string;
|
|
16
|
+
userMessage: string;
|
|
17
|
+
messageHistory: {
|
|
18
|
+
role: string;
|
|
19
|
+
content: string;
|
|
20
|
+
}[];
|
|
21
|
+
}): Promise<{
|
|
22
|
+
role: 'assistant';
|
|
23
|
+
message: string;
|
|
24
|
+
}>;
|
|
25
|
+
audio(optionsArg: {
|
|
26
|
+
message: string;
|
|
27
|
+
}): Promise<NodeJS.ReadableStream>;
|
|
28
|
+
vision(optionsArg: {
|
|
29
|
+
image: Buffer;
|
|
30
|
+
prompt: string;
|
|
31
|
+
}): Promise<string>;
|
|
32
|
+
document(optionsArg: {
|
|
33
|
+
systemMessage: string;
|
|
34
|
+
userMessage: string;
|
|
35
|
+
pdfDocuments: Uint8Array[];
|
|
36
|
+
messageHistory: {
|
|
37
|
+
role: string;
|
|
38
|
+
content: string;
|
|
39
|
+
}[];
|
|
40
|
+
}): Promise<{
|
|
41
|
+
message: any;
|
|
42
|
+
}>;
|
|
43
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import * as plugins from './plugins.js';
|
|
2
|
+
import * as paths from './paths.js';
|
|
3
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
+
export class XAIProvider extends MultiModalModel {
|
|
5
|
+
constructor(optionsArg) {
|
|
6
|
+
super();
|
|
7
|
+
this.options = optionsArg;
|
|
8
|
+
}
|
|
9
|
+
async start() {
|
|
10
|
+
this.openAiApiClient = new plugins.openai.default({
|
|
11
|
+
apiKey: this.options.xaiToken,
|
|
12
|
+
baseURL: 'https://api.x.ai/v1',
|
|
13
|
+
});
|
|
14
|
+
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
|
15
|
+
}
|
|
16
|
+
async stop() { }
|
|
17
|
+
async chatStream(input) {
|
|
18
|
+
// Create a TextDecoder to handle incoming chunks
|
|
19
|
+
const decoder = new TextDecoder();
|
|
20
|
+
let buffer = '';
|
|
21
|
+
let currentMessage = null;
|
|
22
|
+
// Create a TransformStream to process the input
|
|
23
|
+
const transform = new TransformStream({
|
|
24
|
+
async transform(chunk, controller) {
|
|
25
|
+
buffer += decoder.decode(chunk, { stream: true });
|
|
26
|
+
// Try to parse complete JSON messages from the buffer
|
|
27
|
+
while (true) {
|
|
28
|
+
const newlineIndex = buffer.indexOf('\n');
|
|
29
|
+
if (newlineIndex === -1)
|
|
30
|
+
break;
|
|
31
|
+
const line = buffer.slice(0, newlineIndex);
|
|
32
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
33
|
+
if (line.trim()) {
|
|
34
|
+
try {
|
|
35
|
+
const message = JSON.parse(line);
|
|
36
|
+
currentMessage = {
|
|
37
|
+
role: message.role || 'user',
|
|
38
|
+
content: message.content || '',
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
catch (e) {
|
|
42
|
+
console.error('Failed to parse message:', e);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
// If we have a complete message, send it to X.AI
|
|
47
|
+
if (currentMessage) {
|
|
48
|
+
const stream = await this.openAiApiClient.chat.completions.create({
|
|
49
|
+
model: 'grok-2-latest',
|
|
50
|
+
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
51
|
+
stream: true,
|
|
52
|
+
});
|
|
53
|
+
// Process each chunk from X.AI
|
|
54
|
+
for await (const chunk of stream) {
|
|
55
|
+
const content = chunk.choices[0]?.delta?.content;
|
|
56
|
+
if (content) {
|
|
57
|
+
controller.enqueue(content);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
currentMessage = null;
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
flush(controller) {
|
|
64
|
+
if (buffer) {
|
|
65
|
+
try {
|
|
66
|
+
const message = JSON.parse(buffer);
|
|
67
|
+
controller.enqueue(message.content || '');
|
|
68
|
+
}
|
|
69
|
+
catch (e) {
|
|
70
|
+
console.error('Failed to parse remaining buffer:', e);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
// Connect the input to our transform stream
|
|
76
|
+
return input.pipeThrough(transform);
|
|
77
|
+
}
|
|
78
|
+
async chat(optionsArg) {
|
|
79
|
+
// Prepare messages array with system message, history, and user message
|
|
80
|
+
const messages = [
|
|
81
|
+
{ role: 'system', content: optionsArg.systemMessage },
|
|
82
|
+
...optionsArg.messageHistory.map(msg => ({
|
|
83
|
+
role: msg.role,
|
|
84
|
+
content: msg.content
|
|
85
|
+
})),
|
|
86
|
+
{ role: 'user', content: optionsArg.userMessage }
|
|
87
|
+
];
|
|
88
|
+
// Call X.AI's chat completion API
|
|
89
|
+
const completion = await this.openAiApiClient.chat.completions.create({
|
|
90
|
+
model: 'grok-2-latest',
|
|
91
|
+
messages: messages,
|
|
92
|
+
stream: false,
|
|
93
|
+
});
|
|
94
|
+
// Return the assistant's response
|
|
95
|
+
return {
|
|
96
|
+
role: 'assistant',
|
|
97
|
+
message: completion.choices[0]?.message?.content || ''
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
async audio(optionsArg) {
|
|
101
|
+
throw new Error('Audio generation is not supported by X.AI');
|
|
102
|
+
}
|
|
103
|
+
async vision(optionsArg) {
|
|
104
|
+
throw new Error('Vision tasks are not supported by X.AI');
|
|
105
|
+
}
|
|
106
|
+
async document(optionsArg) {
|
|
107
|
+
// First convert PDF documents to images
|
|
108
|
+
let pdfDocumentImageBytesArray = [];
|
|
109
|
+
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
110
|
+
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
111
|
+
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
|
|
112
|
+
}
|
|
113
|
+
// Convert images to base64 for inclusion in the message
|
|
114
|
+
const imageBase64Array = pdfDocumentImageBytesArray.map(bytes => Buffer.from(bytes).toString('base64'));
|
|
115
|
+
// Combine document images into the user message
|
|
116
|
+
const enhancedUserMessage = `
|
|
117
|
+
${optionsArg.userMessage}
|
|
118
|
+
|
|
119
|
+
Document contents (as images):
|
|
120
|
+
${imageBase64Array.map((img, i) => `Image ${i + 1}: <image data>`).join('\n')}
|
|
121
|
+
`;
|
|
122
|
+
// Use chat completion to analyze the documents
|
|
123
|
+
const messages = [
|
|
124
|
+
{ role: 'system', content: optionsArg.systemMessage },
|
|
125
|
+
...optionsArg.messageHistory.map(msg => ({
|
|
126
|
+
role: msg.role,
|
|
127
|
+
content: msg.content
|
|
128
|
+
})),
|
|
129
|
+
{ role: 'user', content: enhancedUserMessage }
|
|
130
|
+
];
|
|
131
|
+
const completion = await this.openAiApiClient.chat.completions.create({
|
|
132
|
+
model: 'grok-2-latest',
|
|
133
|
+
messages: messages,
|
|
134
|
+
stream: false,
|
|
135
|
+
});
|
|
136
|
+
return {
|
|
137
|
+
message: completion.choices[0]?.message?.content || ''
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJvdmlkZXIueGFpLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvcHJvdmlkZXIueGFpLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxPQUFPLE1BQU0sY0FBYyxDQUFDO0FBQ3hDLE9BQU8sS0FBSyxLQUFLLE1BQU0sWUFBWSxDQUFDO0FBQ3BDLE9BQU8sRUFBRSxlQUFlLEVBQUUsTUFBTSxrQ0FBa0MsQ0FBQztBQVFuRSxNQUFNLE9BQU8sV0FBWSxTQUFRLGVBQWU7SUFLOUMsWUFBWSxVQUErQjtRQUN6QyxLQUFLLEVBQUUsQ0FBQztRQUNSLElBQUksQ0FBQyxPQUFPLEdBQUcsVUFBVSxDQUFDO0lBQzVCLENBQUM7SUFFTSxLQUFLLENBQUMsS0FBSztRQUNoQixJQUFJLENBQUMsZUFBZSxHQUFHLElBQUksT0FBTyxDQUFDLE1BQU0sQ0FBQyxPQUFPLENBQUM7WUFDaEQsTUFBTSxFQUFFLElBQUksQ0FBQyxPQUFPLENBQUMsUUFBUTtZQUM3QixPQUFPLEVBQUUscUJBQXFCO1NBQy9CLENBQUMsQ0FBQztRQUNILElBQUksQ0FBQyxnQkFBZ0IsR0FBRyxJQUFJLE9BQU8sQ0FBQyxRQUFRLENBQUMsUUFBUSxFQUFFLENBQUM7SUFDMUQsQ0FBQztJQUVNLEtBQUssQ0FBQyxJQUFJLEtBQUksQ0FBQztJQUVmLEtBQUssQ0FBQyxVQUFVLENBQUMsS0FBaUM7UUFDdkQsaURBQWlEO1FBQ2pELE1BQU0sT0FBTyxHQUFHLElBQUksV0FBVyxFQUFFLENBQUM7UUFDbEMsSUFBSSxNQUFNLEdBQUcsRUFBRSxDQUFDO1FBQ2hCLElBQUksY0FBYyxHQUE4QyxJQUFJLENBQUM7UUFFckUsZ0RBQWdEO1FBQ2hELE1BQU0sU0FBUyxHQUFHLElBQUksZUFBZSxDQUFxQjtZQUN4RCxLQUFLLENBQUMsU0FBUyxDQUFDLEtBQUssRUFBRSxVQUFVO2dCQUMvQixNQUFNLElBQUksT0FBTyxDQUFDLE1BQU0sQ0FBQyxLQUFLLEVBQUUsRUFBRSxNQUFNLEVBQUUsSUFBSSxFQUFFLENBQUMsQ0FBQztnQkFFbEQsc0RBQXNEO2dCQUN0RCxPQUFPLElBQUksRUFBRSxDQUFDO29CQUNaLE1BQU0sWUFBWSxHQUFHLE1BQU0sQ0FBQyxPQUFPLENBQUMsSUFBSSxDQUFDLENBQUM7b0JBQzFDLElBQUksWUFBWSxLQUFLLENBQUMsQ0FBQzt3QkFBRSxNQUFNO29CQUUvQixNQUFNLElBQUksR0FBRyxNQUFNLENBQUMsS0FBSyxDQUFDLENBQUMsRUFBRSxZQUFZLENBQUMsQ0FBQztvQkFDM0MsTUFBTSxHQUFHLE1BQU0sQ0FBQyxLQUFLLENBQUMsWUFBWSxHQUFHLENBQUMsQ0FBQyxDQUFDO29CQUV4QyxJQUFJLElBQUksQ0FBQyxJQUFJLEVBQUUsRUFBRSxDQUFDO3dCQUNoQixJQUFJLENBQUM7NEJBQ0gsTUFBTSxPQUFPLEdBQUcsSUFBSSxDQUFDLEtBQUssQ0FBQyxJQUFJLENBQUMsQ0FBQzs0QkFDakMsY0FBYyxHQUFHO2dDQUNmLElBQUksRUFBRSxPQUFPLENBQUMsSUFBSSxJQUFJLE1BQU07Z0NBQzVCLE9BQU8sRUFBRSxPQUFPLENBQUMsT0FBTyxJQUFJLEVBQUU7NkJBQy9CLENBQUM7d0JBQ0osQ0FBQzt3QkFBQyxPQUFPLENBQUMsRUFBRSxDQUFDOzRCQUNYLE9BQU8sQ0FBQyxLQUFLLENBQUMsMEJBQTBCLEVBQUUsQ0FBQyxDQUFDLENBQUM7d0JBQy9DLENBQUM7b0JBQ0gsQ0FBQztnQkFDSCxDQUFDO2dCQUVELGlEQUFpRDtnQkFDakQsSUFBSSxjQUFjLEVBQUUsQ0FBQztvQkFDbkIsTUFBTSxNQUFNLEdBQUcsTUFBTSxJQUFJLENBQUMsZUFBZSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsTUFBTSxDQUFDO3dCQUNoRSxLQUFLLEVBQUUsZUFBZTt3QkFDdEIsUUFBUSxFQUFFLENBQUMsRUFBRSxJQUFJLEVBQUUsY0FBYyxDQUFDLElBQUksRUFBRSxPQUFPLEVBQUUsY0FBYyxDQUFDLE9BQU8sRUFBRSxDQUFDO3dCQUMxRSxNQUFNLEVBQUUsSUFBSTtxQkFDYixDQUFDLENBQUM7b0JBRUgsK0JBQStCO29CQUMvQixJQUFJLEtBQUssRUFBRSxNQUFNLEtBQUssSUFBSSxNQUFNLEVBQUUsQ0FBQzt3QkFDakMsTUFBTSxPQUFPLEdBQUcsS0FBSyxDQUFDLE9BQU8sQ0FBQyxDQUFDLENBQUMsRUFBRSxLQUFLLEVBQUUsT0FBTyxDQUFDO3dCQUNqRCxJQUFJLE9BQU8sRUFBRSxDQUFDOzRCQUNaLFVBQVUsQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLENBQUM7d0JBQzlCLENBQUM7b0JBQ0gsQ0FBQztvQkFFRCxjQUFjLEdBQUcsSUFBSSxDQUFDO2dCQUN4QixDQUFDO1lBQ0gsQ0FBQztZQUVELEtBQUssQ0FBQyxVQUFVO2dCQUNkLElBQUksTUFBTSxFQUFFLENBQUM7b0JBQ1gsSUFBSSxDQUFDO3dCQUNILE1BQU0sT0FBTyxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUMsTUFBTSxDQUFDLENBQUM7d0JBQ25DLFVBQVUsQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLE9BQU8sSUFBSSxFQUFFLENBQUMsQ0FBQztvQkFDNUMsQ0FBQztvQkFBQyxPQUFPLENBQUMsRUFBRSxDQUFDO3dCQUNYLE9BQU8sQ0FBQyxLQUFLLENBQUMsbUNBQW1DLEVBQUUsQ0FBQyxDQUFDLENBQUM7b0JBQ3hELENBQUM7Z0JBQ0gsQ0FBQztZQUNILENBQUM7U0FDRixDQUFDLENBQUM7UUFFSCw0Q0FBNEM7UUFDNUMsT0FBTyxLQUFLLENBQUMsV0FBVyxDQUFDLFNBQVMsQ0FBQyxDQUFDO0lBQ3RDLENBQUM7SUFFTSxLQUFLLENBQUMsSUFBSSxDQUFDLFVBSWpCO1FBQ0Msd0VBQXdFO1FBQ3hFLE1BQU0sUUFBUSxHQUFpQztZQUM3QyxFQUFFLElBQUksRUFBRSxRQUFRLEVBQUUsT0FBTyxFQUFFLFVBQVUsQ0FBQyxhQUFhLEVBQUU7WUFDckQsR0FBRyxVQUFVLENBQUMsY0FBYyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUM7Z0JBQ3ZDLElBQUksRUFBRSxHQUFHLENBQUMsSUFBdUM7Z0JBQ2pELE9BQU8sRUFBRSxHQUFHLENBQUMsT0FBTzthQUNyQixDQUFDLENBQUM7WUFDSCxFQUFFLElBQUksRUFBRSxNQUFNLEVBQUUsT0FBTyxFQUFFLFVBQVUsQ0FBQyxXQUFXLEVBQUU7U0FDbEQsQ0FBQztRQUVGLGtDQUFrQztRQUNsQyxNQUFNLFVBQVUsR0FBRyxNQUFNLElBQUksQ0FBQyxlQUFlLENBQUMsSUFBSSxDQUFDLFdBQVcsQ0FBQyxNQUFNLENBQUM7WUFDcEUsS0FBSyxFQUFFLGVBQWU7WUFDdEIsUUFBUSxFQUFFLFFBQVE7WUFDbEIsTUFBTSxFQUFFLEtBQUs7U0FDZCxDQUFDLENBQUM7UUFFSCxrQ0FBa0M7UUFDbEMsT0FBTztZQUNMLElBQUksRUFBRSxXQUFXO1lBQ2pCLE9BQU8sRUFBRSxVQUFVLENBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxFQUFFLE9BQU8sRUFBRSxPQUFPLElBQUksRUFBRTtTQUN2RCxDQUFDO0lBQ0osQ0FBQztJQUVNLEtBQUssQ0FBQyxLQUFLLENBQUMsVUFBK0I7UUFDaEQsTUFBTSxJQUFJLEtBQUssQ0FBQywyQ0FBMkMsQ0FBQyxDQUFDO0lBQy9ELENBQUM7SUFFTSxLQUFLLENBQUMsTUFBTSxDQUFDLFVBQTZDO1FBQy9ELE1BQU0sSUFBSSxLQUFLLENBQUMsd0NBQXdDLENBQUMsQ0FBQztJQUM1RCxDQUFDO0lBRU0sS0FBSyxDQUFDLFFBQVEsQ0FBQyxVQUtyQjtRQUNDLHdDQUF3QztRQUN4QyxJQUFJLDBCQUEwQixHQUFpQixFQUFFLENBQUM7UUFFbEQsS0FBSyxNQUFNLFdBQVcsSUFBSSxVQUFVLENBQUMsWUFBWSxFQUFFLENBQUM7WUFDbEQsTUFBTSxrQkFBa0IsR0FBRyxNQUFNLElBQUksQ0FBQyxnQkFBZ0IsQ0FBQyxvQkFBb0IsQ0FBQyxXQUFXLENBQUMsQ0FBQztZQUN6RiwwQkFBMEIsR0FBRywwQkFBMEIsQ0FBQyxNQUFNLENBQUMsa0JBQWtCLENBQUMsQ0FBQztRQUNyRixDQUFDO1FBRUQsd0RBQXdEO1FBQ3hELE1BQU0sZ0JBQWdCLEdBQUcsMEJBQTBCLENBQUMsR0FBRyxDQUFDLEtBQUssQ0FBQyxFQUFFLENBQzlELE1BQU0sQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLENBQUMsUUFBUSxDQUFDLFFBQVEsQ0FBQyxDQUN0QyxDQUFDO1FBRUYsZ0RBQWdEO1FBQ2hELE1BQU0sbUJBQW1CLEdBQUc7UUFDeEIsVUFBVSxDQUFDLFdBQVc7OztRQUd0QixnQkFBZ0IsQ0FBQyxHQUFHLENBQUMsQ0FBQyxHQUFHLEVBQUUsQ0FBQyxFQUFFLEVBQUUsQ0FBQyxTQUFTLENBQUMsR0FBRyxDQUFDLGdCQUFnQixDQUFDLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQztLQUM5RSxDQUFDO1FBRUYsK0NBQStDO1FBQy9DLE1BQU0sUUFBUSxHQUFpQztZQUM3QyxFQUFFLElBQUksRUFBRSxRQUFRLEVBQUUsT0FBTyxFQUFFLFVBQVUsQ0FBQyxhQUFhLEVBQUU7WUFDckQsR0FBRyxVQUFVLENBQUMsY0FBYyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsRUFBRSxDQUFDLENBQUM7Z0JBQ3ZDLElBQUksRUFBRSxHQUFHLENBQUMsSUFBdUM7Z0JBQ2pELE9BQU8sRUFBRSxHQUFHLENBQUMsT0FBTzthQUNyQixDQUFDLENBQUM7WUFDSCxFQUFFLElBQUksRUFBRSxNQUFNLEVBQUUsT0FBTyxFQUFFLG1CQUFtQixFQUFFO1NBQy9DLENBQUM7UUFFRixNQUFNLFVBQVUsR0FBRyxNQUFNLElBQUksQ0FBQyxlQUFlLENBQUMsSUFBSSxDQUFDLFdBQVcsQ0FBQyxNQUFNLENBQUM7WUFDcEUsS0FBSyxFQUFFLGVBQWU7WUFDdEIsUUFBUSxFQUFFLFFBQVE7WUFDbEIsTUFBTSxFQUFFLEtBQUs7U0FDZCxDQUFDLENBQUM7UUFFSCxPQUFPO1lBQ0wsT0FBTyxFQUFFLFVBQVUsQ0FBQyxPQUFPLENBQUMsQ0FBQyxDQUFDLEVBQUUsT0FBTyxFQUFFLE9BQU8sSUFBSSxFQUFFO1NBQ3ZELENBQUM7SUFDSixDQUFDO0NBQ0YifQ==
|
package/license
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Copyright (c) 2024 Task Venture Capital GmbH (hello@task.vc)
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
4
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
5
|
+
in the Software without restriction, including without limitation the rights
|
|
6
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
7
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
8
|
+
furnished to do so, subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
The above copyright notice and this permission notice shall be included in all
|
|
11
|
+
copies or substantial portions of the Software.
|
|
12
|
+
|
|
13
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
14
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
15
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
16
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
17
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
18
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
19
|
+
SOFTWARE.
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@push.rocks/smartai",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.0",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.",
|
|
6
6
|
"main": "dist_ts/index.js",
|
package/readme.md
CHANGED
|
@@ -24,9 +24,17 @@ This command installs the package and adds it to your project's dependencies.
|
|
|
24
24
|
openaiToken: 'your-openai-token'
|
|
25
25
|
```
|
|
26
26
|
|
|
27
|
+
### X.AI
|
|
28
|
+
- Models: Grok-2-latest
|
|
29
|
+
- Features: Chat, Streaming, Document Processing
|
|
30
|
+
- Configuration:
|
|
31
|
+
```typescript
|
|
32
|
+
xaiToken: 'your-xai-token'
|
|
33
|
+
```
|
|
34
|
+
|
|
27
35
|
### Anthropic
|
|
28
36
|
- Models: Claude-3-opus-20240229
|
|
29
|
-
- Features: Chat, Streaming
|
|
37
|
+
- Features: Chat, Streaming, Vision, Document Processing
|
|
30
38
|
- Configuration:
|
|
31
39
|
```typescript
|
|
32
40
|
anthropicToken: 'your-anthropic-token'
|
|
@@ -75,6 +83,7 @@ import { SmartAi } from '@push.rocks/smartai';
|
|
|
75
83
|
|
|
76
84
|
const smartAi = new SmartAi({
|
|
77
85
|
openaiToken: 'your-openai-token',
|
|
86
|
+
xaiToken: 'your-xai-token',
|
|
78
87
|
anthropicToken: 'your-anthropic-token',
|
|
79
88
|
perplexityToken: 'your-perplexity-token',
|
|
80
89
|
groqToken: 'your-groq-token',
|
|
@@ -148,7 +157,7 @@ const audioStream = await smartAi.openaiProvider.audio({
|
|
|
148
157
|
|
|
149
158
|
### Document Processing
|
|
150
159
|
|
|
151
|
-
For providers that support document processing (OpenAI and
|
|
160
|
+
For providers that support document processing (OpenAI, Ollama, and Anthropic):
|
|
152
161
|
|
|
153
162
|
```typescript
|
|
154
163
|
// Using OpenAI
|
|
@@ -166,6 +175,14 @@ const analysis = await smartAi.ollamaProvider.document({
|
|
|
166
175
|
messageHistory: [],
|
|
167
176
|
pdfDocuments: [pdfBuffer] // Uint8Array of PDF content
|
|
168
177
|
});
|
|
178
|
+
|
|
179
|
+
// Using Anthropic with Claude 3
|
|
180
|
+
const anthropicAnalysis = await smartAi.anthropicProvider.document({
|
|
181
|
+
systemMessage: 'You are a document analysis assistant',
|
|
182
|
+
userMessage: 'Please analyze this document and extract key information',
|
|
183
|
+
messageHistory: [],
|
|
184
|
+
pdfDocuments: [pdfBuffer] // Uint8Array of PDF content
|
|
185
|
+
});
|
|
169
186
|
```
|
|
170
187
|
|
|
171
188
|
Both providers will:
|
|
@@ -175,7 +192,7 @@ Both providers will:
|
|
|
175
192
|
|
|
176
193
|
### Vision Processing
|
|
177
194
|
|
|
178
|
-
For providers that support vision tasks (OpenAI and
|
|
195
|
+
For providers that support vision tasks (OpenAI, Ollama, and Anthropic):
|
|
179
196
|
|
|
180
197
|
```typescript
|
|
181
198
|
// Using OpenAI's GPT-4 Vision
|
|
@@ -189,6 +206,12 @@ const analysis = await smartAi.ollamaProvider.vision({
|
|
|
189
206
|
image: imageBuffer,
|
|
190
207
|
prompt: 'Analyze this image in detail'
|
|
191
208
|
});
|
|
209
|
+
|
|
210
|
+
// Using Anthropic's Claude 3
|
|
211
|
+
const anthropicAnalysis = await smartAi.anthropicProvider.vision({
|
|
212
|
+
image: imageBuffer,
|
|
213
|
+
prompt: 'Please analyze this image and describe what you see'
|
|
214
|
+
});
|
|
192
215
|
```
|
|
193
216
|
|
|
194
217
|
## Error Handling
|
package/ts/00_commitinfo_data.ts
CHANGED
package/ts/provider.anthropic.ts
CHANGED
|
@@ -2,6 +2,9 @@ import * as plugins from './plugins.js';
|
|
|
2
2
|
import * as paths from './paths.js';
|
|
3
3
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
4
|
import type { ChatOptions, ChatResponse, ChatMessage } from './abstract.classes.multimodal.js';
|
|
5
|
+
import type { ImageBlockParam, TextBlockParam } from '@anthropic-ai/sdk/resources/messages';
|
|
6
|
+
|
|
7
|
+
type ContentBlock = ImageBlockParam | TextBlockParam;
|
|
5
8
|
|
|
6
9
|
export interface IAnthropicProviderOptions {
|
|
7
10
|
anthropicToken: string;
|
|
@@ -132,7 +135,40 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
132
135
|
}
|
|
133
136
|
|
|
134
137
|
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
|
135
|
-
|
|
138
|
+
const base64Image = optionsArg.image.toString('base64');
|
|
139
|
+
|
|
140
|
+
const content: ContentBlock[] = [
|
|
141
|
+
{
|
|
142
|
+
type: 'text',
|
|
143
|
+
text: optionsArg.prompt
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
type: 'image',
|
|
147
|
+
source: {
|
|
148
|
+
type: 'base64',
|
|
149
|
+
media_type: 'image/jpeg',
|
|
150
|
+
data: base64Image
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
];
|
|
154
|
+
|
|
155
|
+
const result = await this.anthropicApiClient.messages.create({
|
|
156
|
+
model: 'claude-3-opus-20240229',
|
|
157
|
+
messages: [{
|
|
158
|
+
role: 'user',
|
|
159
|
+
content
|
|
160
|
+
}],
|
|
161
|
+
max_tokens: 1024
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// Extract text content from the response
|
|
165
|
+
let message = '';
|
|
166
|
+
for (const block of result.content) {
|
|
167
|
+
if ('text' in block) {
|
|
168
|
+
message += block.text;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return message;
|
|
136
172
|
}
|
|
137
173
|
|
|
138
174
|
public async document(optionsArg: {
|
|
@@ -141,6 +177,64 @@ export class AnthropicProvider extends MultiModalModel {
|
|
|
141
177
|
pdfDocuments: Uint8Array[];
|
|
142
178
|
messageHistory: ChatMessage[];
|
|
143
179
|
}): Promise<{ message: any }> {
|
|
144
|
-
|
|
180
|
+
// Convert PDF documents to images using SmartPDF
|
|
181
|
+
const smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
|
182
|
+
let documentImageBytesArray: Uint8Array[] = [];
|
|
183
|
+
|
|
184
|
+
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
185
|
+
const documentImageArray = await smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
186
|
+
documentImageBytesArray = documentImageBytesArray.concat(documentImageArray);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Convert message history to Anthropic format
|
|
190
|
+
const messages = optionsArg.messageHistory.map(msg => ({
|
|
191
|
+
role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
|
|
192
|
+
content: msg.content
|
|
193
|
+
}));
|
|
194
|
+
|
|
195
|
+
// Create content array with text and images
|
|
196
|
+
const content: ContentBlock[] = [
|
|
197
|
+
{
|
|
198
|
+
type: 'text',
|
|
199
|
+
text: optionsArg.userMessage
|
|
200
|
+
}
|
|
201
|
+
];
|
|
202
|
+
|
|
203
|
+
// Add each document page as an image
|
|
204
|
+
for (const imageBytes of documentImageBytesArray) {
|
|
205
|
+
content.push({
|
|
206
|
+
type: 'image',
|
|
207
|
+
source: {
|
|
208
|
+
type: 'base64',
|
|
209
|
+
media_type: 'image/jpeg',
|
|
210
|
+
data: Buffer.from(imageBytes).toString('base64')
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const result = await this.anthropicApiClient.messages.create({
|
|
216
|
+
model: 'claude-3-opus-20240229',
|
|
217
|
+
system: optionsArg.systemMessage,
|
|
218
|
+
messages: [
|
|
219
|
+
...messages,
|
|
220
|
+
{ role: 'user', content }
|
|
221
|
+
],
|
|
222
|
+
max_tokens: 4096
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
// Extract text content from the response
|
|
226
|
+
let message = '';
|
|
227
|
+
for (const block of result.content) {
|
|
228
|
+
if ('text' in block) {
|
|
229
|
+
message += block.text;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return {
|
|
234
|
+
message: {
|
|
235
|
+
role: 'assistant',
|
|
236
|
+
content: message
|
|
237
|
+
}
|
|
238
|
+
};
|
|
145
239
|
}
|
|
146
240
|
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import * as plugins from './plugins.js';
|
|
2
|
+
import * as paths from './paths.js';
|
|
3
|
+
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
4
|
+
import type { ChatOptions, ChatResponse, ChatMessage } from './abstract.classes.multimodal.js';
|
|
5
|
+
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions';
|
|
6
|
+
|
|
7
|
+
export interface IXAIProviderOptions {
|
|
8
|
+
xaiToken: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export class XAIProvider extends MultiModalModel {
|
|
12
|
+
private options: IXAIProviderOptions;
|
|
13
|
+
public openAiApiClient: plugins.openai.default;
|
|
14
|
+
public smartpdfInstance: plugins.smartpdf.SmartPdf;
|
|
15
|
+
|
|
16
|
+
constructor(optionsArg: IXAIProviderOptions) {
|
|
17
|
+
super();
|
|
18
|
+
this.options = optionsArg;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
public async start() {
|
|
22
|
+
this.openAiApiClient = new plugins.openai.default({
|
|
23
|
+
apiKey: this.options.xaiToken,
|
|
24
|
+
baseURL: 'https://api.x.ai/v1',
|
|
25
|
+
});
|
|
26
|
+
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
public async stop() {}
|
|
30
|
+
|
|
31
|
+
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
|
32
|
+
// Create a TextDecoder to handle incoming chunks
|
|
33
|
+
const decoder = new TextDecoder();
|
|
34
|
+
let buffer = '';
|
|
35
|
+
let currentMessage: { role: string; content: string; } | null = null;
|
|
36
|
+
|
|
37
|
+
// Create a TransformStream to process the input
|
|
38
|
+
const transform = new TransformStream<Uint8Array, string>({
|
|
39
|
+
async transform(chunk, controller) {
|
|
40
|
+
buffer += decoder.decode(chunk, { stream: true });
|
|
41
|
+
|
|
42
|
+
// Try to parse complete JSON messages from the buffer
|
|
43
|
+
while (true) {
|
|
44
|
+
const newlineIndex = buffer.indexOf('\n');
|
|
45
|
+
if (newlineIndex === -1) break;
|
|
46
|
+
|
|
47
|
+
const line = buffer.slice(0, newlineIndex);
|
|
48
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
49
|
+
|
|
50
|
+
if (line.trim()) {
|
|
51
|
+
try {
|
|
52
|
+
const message = JSON.parse(line);
|
|
53
|
+
currentMessage = {
|
|
54
|
+
role: message.role || 'user',
|
|
55
|
+
content: message.content || '',
|
|
56
|
+
};
|
|
57
|
+
} catch (e) {
|
|
58
|
+
console.error('Failed to parse message:', e);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// If we have a complete message, send it to X.AI
|
|
64
|
+
if (currentMessage) {
|
|
65
|
+
const stream = await this.openAiApiClient.chat.completions.create({
|
|
66
|
+
model: 'grok-2-latest',
|
|
67
|
+
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
68
|
+
stream: true,
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
// Process each chunk from X.AI
|
|
72
|
+
for await (const chunk of stream) {
|
|
73
|
+
const content = chunk.choices[0]?.delta?.content;
|
|
74
|
+
if (content) {
|
|
75
|
+
controller.enqueue(content);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
currentMessage = null;
|
|
80
|
+
}
|
|
81
|
+
},
|
|
82
|
+
|
|
83
|
+
flush(controller) {
|
|
84
|
+
if (buffer) {
|
|
85
|
+
try {
|
|
86
|
+
const message = JSON.parse(buffer);
|
|
87
|
+
controller.enqueue(message.content || '');
|
|
88
|
+
} catch (e) {
|
|
89
|
+
console.error('Failed to parse remaining buffer:', e);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
// Connect the input to our transform stream
|
|
96
|
+
return input.pipeThrough(transform);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
public async chat(optionsArg: {
|
|
100
|
+
systemMessage: string;
|
|
101
|
+
userMessage: string;
|
|
102
|
+
messageHistory: { role: string; content: string; }[];
|
|
103
|
+
}): Promise<{ role: 'assistant'; message: string; }> {
|
|
104
|
+
// Prepare messages array with system message, history, and user message
|
|
105
|
+
const messages: ChatCompletionMessageParam[] = [
|
|
106
|
+
{ role: 'system', content: optionsArg.systemMessage },
|
|
107
|
+
...optionsArg.messageHistory.map(msg => ({
|
|
108
|
+
role: msg.role as 'system' | 'user' | 'assistant',
|
|
109
|
+
content: msg.content
|
|
110
|
+
})),
|
|
111
|
+
{ role: 'user', content: optionsArg.userMessage }
|
|
112
|
+
];
|
|
113
|
+
|
|
114
|
+
// Call X.AI's chat completion API
|
|
115
|
+
const completion = await this.openAiApiClient.chat.completions.create({
|
|
116
|
+
model: 'grok-2-latest',
|
|
117
|
+
messages: messages,
|
|
118
|
+
stream: false,
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
// Return the assistant's response
|
|
122
|
+
return {
|
|
123
|
+
role: 'assistant',
|
|
124
|
+
message: completion.choices[0]?.message?.content || ''
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
|
129
|
+
throw new Error('Audio generation is not supported by X.AI');
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
|
133
|
+
throw new Error('Vision tasks are not supported by X.AI');
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
public async document(optionsArg: {
|
|
137
|
+
systemMessage: string;
|
|
138
|
+
userMessage: string;
|
|
139
|
+
pdfDocuments: Uint8Array[];
|
|
140
|
+
messageHistory: { role: string; content: string; }[];
|
|
141
|
+
}): Promise<{ message: any }> {
|
|
142
|
+
// First convert PDF documents to images
|
|
143
|
+
let pdfDocumentImageBytesArray: Uint8Array[] = [];
|
|
144
|
+
|
|
145
|
+
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
146
|
+
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
147
|
+
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Convert images to base64 for inclusion in the message
|
|
151
|
+
const imageBase64Array = pdfDocumentImageBytesArray.map(bytes =>
|
|
152
|
+
Buffer.from(bytes).toString('base64')
|
|
153
|
+
);
|
|
154
|
+
|
|
155
|
+
// Combine document images into the user message
|
|
156
|
+
const enhancedUserMessage = `
|
|
157
|
+
${optionsArg.userMessage}
|
|
158
|
+
|
|
159
|
+
Document contents (as images):
|
|
160
|
+
${imageBase64Array.map((img, i) => `Image ${i + 1}: <image data>`).join('\n')}
|
|
161
|
+
`;
|
|
162
|
+
|
|
163
|
+
// Use chat completion to analyze the documents
|
|
164
|
+
const messages: ChatCompletionMessageParam[] = [
|
|
165
|
+
{ role: 'system', content: optionsArg.systemMessage },
|
|
166
|
+
...optionsArg.messageHistory.map(msg => ({
|
|
167
|
+
role: msg.role as 'system' | 'user' | 'assistant',
|
|
168
|
+
content: msg.content
|
|
169
|
+
})),
|
|
170
|
+
{ role: 'user', content: enhancedUserMessage }
|
|
171
|
+
];
|
|
172
|
+
|
|
173
|
+
const completion = await this.openAiApiClient.chat.completions.create({
|
|
174
|
+
model: 'grok-2-latest',
|
|
175
|
+
messages: messages,
|
|
176
|
+
stream: false,
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
return {
|
|
180
|
+
message: completion.choices[0]?.message?.content || ''
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
}
|