outlet-orm 7.0.0 → 9.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +130 -2
- package/package.json +1 -1
- package/src/AI/AIPromptEnhancer.js +170 -0
- package/src/AI/AIQueryBuilder.js +234 -0
- package/src/AI/AIQueryOptimizer.js +185 -0
- package/src/AI/AISeeder.js +181 -0
- package/src/AI/AiBridgeManager.js +287 -0
- package/src/AI/Builders/TextBuilder.js +170 -0
- package/src/AI/Contracts/AudioProviderContract.js +29 -0
- package/src/AI/Contracts/ChatProviderContract.js +38 -0
- package/src/AI/Contracts/EmbeddingsProviderContract.js +19 -0
- package/src/AI/Contracts/ImageProviderContract.js +19 -0
- package/src/AI/Contracts/ModelsProviderContract.js +26 -0
- package/src/AI/Contracts/ToolContract.js +25 -0
- package/src/AI/Facades/AiBridge.js +79 -0
- package/src/AI/MCPServer.js +113 -0
- package/src/AI/Providers/ClaudeProvider.js +64 -0
- package/src/AI/Providers/CustomOpenAIProvider.js +238 -0
- package/src/AI/Providers/GeminiProvider.js +68 -0
- package/src/AI/Providers/GrokProvider.js +46 -0
- package/src/AI/Providers/MistralProvider.js +21 -0
- package/src/AI/Providers/OllamaProvider.js +249 -0
- package/src/AI/Providers/OllamaTurboProvider.js +32 -0
- package/src/AI/Providers/OnnProvider.js +46 -0
- package/src/AI/Providers/OpenAIProvider.js +471 -0
- package/src/AI/Support/AudioNormalizer.js +37 -0
- package/src/AI/Support/ChatNormalizer.js +42 -0
- package/src/AI/Support/Document.js +77 -0
- package/src/AI/Support/DocumentAttachmentMapper.js +101 -0
- package/src/AI/Support/EmbeddingsNormalizer.js +30 -0
- package/src/AI/Support/Exceptions/ProviderError.js +22 -0
- package/src/AI/Support/FileSecurity.js +56 -0
- package/src/AI/Support/ImageNormalizer.js +62 -0
- package/src/AI/Support/JsonSchemaValidator.js +73 -0
- package/src/AI/Support/Message.js +40 -0
- package/src/AI/Support/StreamChunk.js +45 -0
- package/src/AI/Support/ToolChatRunner.js +160 -0
- package/src/AI/Support/ToolRegistry.js +62 -0
- package/src/AI/Tools/SystemInfoTool.js +25 -0
- package/src/index.js +67 -1
- package/types/index.d.ts +326 -0
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AIQueryOptimizer
|
|
5
|
+
* Uses LLM to analyze and optimize SQL queries.
|
|
6
|
+
* Sends the query + schema to an AI provider and returns optimization suggestions.
|
|
7
|
+
*
|
|
8
|
+
* @since 8.0.0
|
|
9
|
+
*/
|
|
10
|
+
class AIQueryOptimizer {
|
|
11
|
+
/**
|
|
12
|
+
* @param {import('./AiBridgeManager')} manager
|
|
13
|
+
* @param {Object} [connection] - DatabaseConnection instance (optional, for schema introspection)
|
|
14
|
+
*/
|
|
15
|
+
constructor(manager, connection = null) {
|
|
16
|
+
this._manager = manager;
|
|
17
|
+
this._connection = connection;
|
|
18
|
+
this._provider = 'openai';
|
|
19
|
+
this._model = 'gpt-4o-mini';
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Set the provider and model to use.
|
|
24
|
+
* @param {string} provider
|
|
25
|
+
* @param {string} model
|
|
26
|
+
* @returns {this}
|
|
27
|
+
*/
|
|
28
|
+
using(provider, model) {
|
|
29
|
+
this._provider = provider;
|
|
30
|
+
this._model = model;
|
|
31
|
+
return this;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Analyze a SQL query and return optimization suggestions.
|
|
36
|
+
* @param {string} sql - The SQL query to optimize
|
|
37
|
+
* @param {Object} [options={}]
|
|
38
|
+
* @returns {Promise<{original: string, optimized: string, suggestions: Array, explanation: string}>}
|
|
39
|
+
*/
|
|
40
|
+
async optimize(sql, options = {}) {
|
|
41
|
+
const schema = options.schema || await this._introspectSchema();
|
|
42
|
+
const dialect = options.dialect || this._connection?.config?.client || 'mysql';
|
|
43
|
+
|
|
44
|
+
const systemPrompt = `You are a senior database performance engineer. Analyze SQL queries and provide optimizations.
|
|
45
|
+
|
|
46
|
+
DATABASE DIALECT: ${dialect}
|
|
47
|
+
${schema ? `\nDATABASE SCHEMA:\n${schema}` : ''}
|
|
48
|
+
|
|
49
|
+
RULES:
|
|
50
|
+
- Return a JSON object with keys:
|
|
51
|
+
"optimized" (the optimized SQL query),
|
|
52
|
+
"suggestions" (array of optimization suggestions, each with "type", "description", "impact"),
|
|
53
|
+
"explanation" (brief overall explanation),
|
|
54
|
+
"indexes" (array of recommended CREATE INDEX statements, if any).
|
|
55
|
+
- Preserve query semantics — the optimized query must return the same results.
|
|
56
|
+
- Consider: indexes, query rewriting, subquery elimination, JOIN optimization, proper use of LIMIT/OFFSET.
|
|
57
|
+
- Rate impact as "high", "medium", or "low".`;
|
|
58
|
+
|
|
59
|
+
const messages = [
|
|
60
|
+
{ role: 'system', content: systemPrompt },
|
|
61
|
+
{ role: 'user', content: `Optimize this SQL query:\n\n${sql}` },
|
|
62
|
+
];
|
|
63
|
+
|
|
64
|
+
const res = await this._manager.chat(this._provider, messages, {
|
|
65
|
+
model: options.model || this._model,
|
|
66
|
+
temperature: 0.2,
|
|
67
|
+
max_tokens: 2048,
|
|
68
|
+
response_format: 'json',
|
|
69
|
+
json_schema: {
|
|
70
|
+
name: 'optimization_result',
|
|
71
|
+
schema: {
|
|
72
|
+
type: 'object',
|
|
73
|
+
properties: {
|
|
74
|
+
optimized: { type: 'string' },
|
|
75
|
+
suggestions: {
|
|
76
|
+
type: 'array',
|
|
77
|
+
items: {
|
|
78
|
+
type: 'object',
|
|
79
|
+
properties: {
|
|
80
|
+
type: { type: 'string' },
|
|
81
|
+
description: { type: 'string' },
|
|
82
|
+
impact: { type: 'string' },
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
},
|
|
86
|
+
explanation: { type: 'string' },
|
|
87
|
+
indexes: { type: 'array', items: { type: 'string' } },
|
|
88
|
+
},
|
|
89
|
+
required: ['optimized', 'suggestions'],
|
|
90
|
+
},
|
|
91
|
+
},
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const parsed = this._extractResult(res);
|
|
95
|
+
return {
|
|
96
|
+
original: sql,
|
|
97
|
+
optimized: parsed.optimized || sql,
|
|
98
|
+
suggestions: parsed.suggestions || [],
|
|
99
|
+
explanation: parsed.explanation || '',
|
|
100
|
+
indexes: parsed.indexes || [],
|
|
101
|
+
raw_response: res,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Analyze query execution plan using EXPLAIN.
|
|
107
|
+
* @param {string} sql
|
|
108
|
+
* @returns {Promise<{plan: Array, analysis: string}>}
|
|
109
|
+
*/
|
|
110
|
+
async explain(sql) {
|
|
111
|
+
if (!this._connection) throw new Error('Database connection required for EXPLAIN.');
|
|
112
|
+
const dialect = this._connection.config?.client || 'mysql';
|
|
113
|
+
|
|
114
|
+
let plan;
|
|
115
|
+
if (dialect === 'pg' || dialect === 'postgresql') {
|
|
116
|
+
plan = await this._connection.raw(`EXPLAIN (FORMAT JSON) ${sql}`);
|
|
117
|
+
} else if (dialect === 'sqlite' || dialect === 'sqlite3') {
|
|
118
|
+
plan = await this._connection.raw(`EXPLAIN QUERY PLAN ${sql}`);
|
|
119
|
+
} else {
|
|
120
|
+
plan = await this._connection.raw(`EXPLAIN ${sql}`);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Ask LLM to analyze the execution plan
|
|
124
|
+
const messages = [
|
|
125
|
+
{ role: 'system', content: 'You are a database performance expert. Analyze this EXPLAIN plan and provide actionable insights.' },
|
|
126
|
+
{ role: 'user', content: `EXPLAIN output for query "${sql}":\n\n${JSON.stringify(plan, null, 2)}` },
|
|
127
|
+
];
|
|
128
|
+
|
|
129
|
+
const res = await this._manager.chat(this._provider, messages, {
|
|
130
|
+
model: this._model,
|
|
131
|
+
temperature: 0.2,
|
|
132
|
+
max_tokens: 1024,
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
const analysis = res?.output_text || res?.choices?.[0]?.message?.content || res?.content?.[0]?.text || '';
|
|
136
|
+
return { plan: Array.isArray(plan) ? plan : [plan], analysis };
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/** @private */
|
|
140
|
+
async _introspectSchema() {
|
|
141
|
+
if (!this._connection) return '';
|
|
142
|
+
try {
|
|
143
|
+
const dialect = this._connection.config?.client || 'mysql';
|
|
144
|
+
let tables = [];
|
|
145
|
+
if (dialect === 'pg' || dialect === 'postgresql') {
|
|
146
|
+
const res = await this._connection.raw("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'");
|
|
147
|
+
tables = (res.rows || res).map(r => r.table_name);
|
|
148
|
+
} else if (dialect === 'sqlite' || dialect === 'sqlite3') {
|
|
149
|
+
const res = await this._connection.raw("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'");
|
|
150
|
+
tables = (Array.isArray(res) ? res : []).map(r => r.name);
|
|
151
|
+
} else {
|
|
152
|
+
const res = await this._connection.raw('SHOW TABLES');
|
|
153
|
+
tables = (Array.isArray(res) ? res[0] || res : res).map(r => Object.values(r)[0]);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
const parts = [];
|
|
157
|
+
for (const table of tables.slice(0, 30)) { // Limit to 30 tables
|
|
158
|
+
try {
|
|
159
|
+
let cols;
|
|
160
|
+
if (dialect === 'pg' || dialect === 'postgresql') {
|
|
161
|
+
const cRes = await this._connection.raw(`SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '${table}'`);
|
|
162
|
+
cols = (cRes.rows || cRes).map(c => `${c.column_name} ${c.data_type}`);
|
|
163
|
+
} else if (dialect === 'sqlite' || dialect === 'sqlite3') {
|
|
164
|
+
const cRes = await this._connection.raw(`PRAGMA table_info("${table}")`);
|
|
165
|
+
cols = (Array.isArray(cRes) ? cRes : []).map(c => `${c.name} ${c.type}`);
|
|
166
|
+
} else {
|
|
167
|
+
const cRes = await this._connection.raw(`DESCRIBE \`${table}\``);
|
|
168
|
+
cols = (Array.isArray(cRes) ? cRes[0] || cRes : cRes).map(c => `${c.Field} ${c.Type}`);
|
|
169
|
+
}
|
|
170
|
+
parts.push(`TABLE ${table}: ${cols.join(', ')}`);
|
|
171
|
+
} catch { /* skip */ }
|
|
172
|
+
}
|
|
173
|
+
return parts.join('\n');
|
|
174
|
+
} catch { return ''; }
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/** @private */
|
|
178
|
+
_extractResult(res) {
|
|
179
|
+
let content = res?.output_text || res?.choices?.[0]?.message?.content || res?.content?.[0]?.text || res?.message?.content || '';
|
|
180
|
+
if (typeof content !== 'string') content = JSON.stringify(content);
|
|
181
|
+
try { return JSON.parse(content); } catch { return {}; }
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
module.exports = AIQueryOptimizer;
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AISeeder
|
|
5
|
+
* AI-powered data seeding that uses LLM to generate realistic, contextual seed data.
|
|
6
|
+
* Instead of hard-coded Faker data, asks the LLM to produce domain-specific records.
|
|
7
|
+
*
|
|
8
|
+
* @since 8.0.0
|
|
9
|
+
*/
|
|
10
|
+
class AISeeder {
|
|
11
|
+
/**
|
|
12
|
+
* @param {import('./AiBridgeManager')} manager
|
|
13
|
+
* @param {Object} connection - DatabaseConnection instance
|
|
14
|
+
*/
|
|
15
|
+
constructor(manager, connection) {
|
|
16
|
+
this._manager = manager;
|
|
17
|
+
this._connection = connection;
|
|
18
|
+
this._provider = 'openai';
|
|
19
|
+
this._model = 'gpt-4o-mini';
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Set the provider and model to use.
|
|
24
|
+
* @param {string} provider
|
|
25
|
+
* @param {string} model
|
|
26
|
+
* @returns {this}
|
|
27
|
+
*/
|
|
28
|
+
using(provider, model) {
|
|
29
|
+
this._provider = provider;
|
|
30
|
+
this._model = model;
|
|
31
|
+
return this;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Generate and insert seed data for a table.
|
|
36
|
+
* @param {string} table - Table name
|
|
37
|
+
* @param {number} [count=10] - Number of records to generate
|
|
38
|
+
* @param {Object} [context={}] - Additional context (e.g., domain, constraints)
|
|
39
|
+
* @returns {Promise<{records: Array, inserted: number}>}
|
|
40
|
+
*/
|
|
41
|
+
async seed(table, count = 10, context = {}) {
|
|
42
|
+
const schema = await this._getTableSchema(table);
|
|
43
|
+
const systemPrompt = this._buildSystemPrompt(table, schema, count, context);
|
|
44
|
+
const messages = [
|
|
45
|
+
{ role: 'system', content: systemPrompt },
|
|
46
|
+
{ role: 'user', content: `Generate ${count} realistic seed records for the "${table}" table.${context.description ? ' Context: ' + context.description : ''}` },
|
|
47
|
+
];
|
|
48
|
+
|
|
49
|
+
const res = await this._manager.chat(this._provider, messages, {
|
|
50
|
+
model: this._model,
|
|
51
|
+
temperature: 0.8, // Higher temp for creative data
|
|
52
|
+
max_tokens: 4096,
|
|
53
|
+
response_format: 'json',
|
|
54
|
+
json_schema: {
|
|
55
|
+
name: 'seed_data',
|
|
56
|
+
schema: {
|
|
57
|
+
type: 'object',
|
|
58
|
+
properties: {
|
|
59
|
+
records: {
|
|
60
|
+
type: 'array',
|
|
61
|
+
items: { type: 'object' },
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
required: ['records'],
|
|
65
|
+
},
|
|
66
|
+
},
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
const records = this._extractRecords(res);
|
|
70
|
+
|
|
71
|
+
// Insert records
|
|
72
|
+
let inserted = 0;
|
|
73
|
+
if (records.length > 0 && this._connection) {
|
|
74
|
+
for (const record of records) {
|
|
75
|
+
try {
|
|
76
|
+
const columns = Object.keys(record);
|
|
77
|
+
const values = Object.values(record);
|
|
78
|
+
const placeholders = columns.map(() => '?').join(', ');
|
|
79
|
+
const sql = `INSERT INTO \`${table}\` (${columns.map(c => `\`${c}\``).join(', ')}) VALUES (${placeholders})`;
|
|
80
|
+
await this._connection.raw(sql, values);
|
|
81
|
+
inserted++;
|
|
82
|
+
} catch (err) {
|
|
83
|
+
// Skip records that fail (FK violations, etc.)
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return { records, inserted };
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Generate seed records without inserting them.
|
|
94
|
+
* @param {string} table
|
|
95
|
+
* @param {number} [count=10]
|
|
96
|
+
* @param {Object} [context={}]
|
|
97
|
+
* @returns {Promise<Array>}
|
|
98
|
+
*/
|
|
99
|
+
async generate(table, count = 10, context = {}) {
|
|
100
|
+
const schema = await this._getTableSchema(table);
|
|
101
|
+
const systemPrompt = this._buildSystemPrompt(table, schema, count, context);
|
|
102
|
+
const messages = [
|
|
103
|
+
{ role: 'system', content: systemPrompt },
|
|
104
|
+
{ role: 'user', content: `Generate ${count} realistic seed records for the "${table}" table.${context.description ? ' Context: ' + context.description : ''}` },
|
|
105
|
+
];
|
|
106
|
+
|
|
107
|
+
const res = await this._manager.chat(this._provider, messages, {
|
|
108
|
+
model: this._model,
|
|
109
|
+
temperature: 0.8,
|
|
110
|
+
max_tokens: 4096,
|
|
111
|
+
response_format: 'json',
|
|
112
|
+
json_schema: {
|
|
113
|
+
name: 'seed_data',
|
|
114
|
+
schema: {
|
|
115
|
+
type: 'object',
|
|
116
|
+
properties: {
|
|
117
|
+
records: { type: 'array', items: { type: 'object' } },
|
|
118
|
+
},
|
|
119
|
+
required: ['records'],
|
|
120
|
+
},
|
|
121
|
+
},
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
return this._extractRecords(res);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/** @private */
|
|
128
|
+
async _getTableSchema(table) {
|
|
129
|
+
if (!this._connection) return '(no connection)';
|
|
130
|
+
try {
|
|
131
|
+
const dialect = this._connection.config?.client || 'mysql';
|
|
132
|
+
if (dialect === 'pg' || dialect === 'postgresql') {
|
|
133
|
+
const res = await this._connection.raw(
|
|
134
|
+
`SELECT column_name, data_type, is_nullable, column_default FROM information_schema.columns WHERE table_name = '${table}' ORDER BY ordinal_position`
|
|
135
|
+
);
|
|
136
|
+
return (res.rows || res).map(c => `${c.column_name} ${c.data_type}${c.is_nullable === 'YES' ? ' NULL' : ' NOT NULL'}${c.column_default ? ' DEFAULT ' + c.column_default : ''}`).join('\n');
|
|
137
|
+
} else if (dialect === 'sqlite' || dialect === 'sqlite3') {
|
|
138
|
+
const res = await this._connection.raw(`PRAGMA table_info("${table}")`);
|
|
139
|
+
return (Array.isArray(res) ? res : []).map(c => `${c.name} ${c.type}${c.notnull ? ' NOT NULL' : ' NULL'}${c.dflt_value ? ' DEFAULT ' + c.dflt_value : ''}`).join('\n');
|
|
140
|
+
} else {
|
|
141
|
+
const res = await this._connection.raw(`DESCRIBE \`${table}\``);
|
|
142
|
+
return (Array.isArray(res) ? res[0] || res : res).map(c => `${c.Field} ${c.Type}${c.Null === 'YES' ? ' NULL' : ' NOT NULL'}${c.Default ? ' DEFAULT ' + c.Default : ''}${c.Extra ? ' ' + c.Extra : ''}`).join('\n');
|
|
143
|
+
}
|
|
144
|
+
} catch (err) {
|
|
145
|
+
return `(error: ${err.message})`;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/** @private */
|
|
150
|
+
_buildSystemPrompt(table, schema, count, context) {
|
|
151
|
+
return `You are a database seed data generator. Generate realistic, diverse, and contextually appropriate test data.
|
|
152
|
+
|
|
153
|
+
TABLE: ${table}
|
|
154
|
+
SCHEMA:
|
|
155
|
+
${schema}
|
|
156
|
+
|
|
157
|
+
RULES:
|
|
158
|
+
- Return a JSON object with a "records" key containing an array of ${count} objects.
|
|
159
|
+
- Each object should have keys matching the column names (exclude auto-increment id columns).
|
|
160
|
+
- Use realistic names, emails, dates, etc. — not lorem ipsum.
|
|
161
|
+
- Respect data types and constraints (NOT NULL, defaults).
|
|
162
|
+
- Foreign key values should use integers 1-${Math.max(5, count)}.
|
|
163
|
+
- Dates should be in ISO format (YYYY-MM-DD or YYYY-MM-DD HH:MM:SS).
|
|
164
|
+
${context.locale ? `- Use locale: ${context.locale}` : ''}
|
|
165
|
+
${context.domain ? `- Domain context: ${context.domain}` : ''}`;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
/** @private */
|
|
169
|
+
_extractRecords(res) {
|
|
170
|
+
let content = res?.output_text || res?.choices?.[0]?.message?.content || res?.content?.[0]?.text || res?.message?.content || '';
|
|
171
|
+
if (typeof content !== 'string') content = JSON.stringify(content);
|
|
172
|
+
try {
|
|
173
|
+
const parsed = JSON.parse(content);
|
|
174
|
+
return Array.isArray(parsed.records) ? parsed.records : (Array.isArray(parsed) ? parsed : []);
|
|
175
|
+
} catch {
|
|
176
|
+
return [];
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
module.exports = AISeeder;
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const OpenAIProvider = require('./Providers/OpenAIProvider');
|
|
4
|
+
const OllamaProvider = require('./Providers/OllamaProvider');
|
|
5
|
+
const OllamaTurboProvider = require('./Providers/OllamaTurboProvider');
|
|
6
|
+
const OnnProvider = require('./Providers/OnnProvider');
|
|
7
|
+
const GeminiProvider = require('./Providers/GeminiProvider');
|
|
8
|
+
const GrokProvider = require('./Providers/GrokProvider');
|
|
9
|
+
const ClaudeProvider = require('./Providers/ClaudeProvider');
|
|
10
|
+
const MistralProvider = require('./Providers/MistralProvider');
|
|
11
|
+
const CustomOpenAIProvider = require('./Providers/CustomOpenAIProvider');
|
|
12
|
+
const ProviderError = require('./Support/Exceptions/ProviderError');
|
|
13
|
+
const ToolRegistry = require('./Support/ToolRegistry');
|
|
14
|
+
const ToolChatRunner = require('./Support/ToolChatRunner');
|
|
15
|
+
|
|
16
|
+
const BEARER_PREFIX = 'Bearer ';
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* AiBridgeManager
|
|
20
|
+
* Central orchestrator for all AI providers. Supports provider registration,
|
|
21
|
+
* per-call overrides, capability delegation (chat, stream, embeddings, images,
|
|
22
|
+
* audio, models), tool registration, and chatWithTools loop.
|
|
23
|
+
*/
|
|
24
|
+
class AiBridgeManager {
|
|
25
|
+
/**
|
|
26
|
+
* @param {Object} config
|
|
27
|
+
*/
|
|
28
|
+
constructor(config = {}) {
|
|
29
|
+
this._providers = {};
|
|
30
|
+
this._toolRegistry = new ToolRegistry();
|
|
31
|
+
this._options = config.options || {};
|
|
32
|
+
|
|
33
|
+
// Auto-register providers from config
|
|
34
|
+
if (config.openai?.api_key) {
|
|
35
|
+
this._providers.openai = new OpenAIProvider(config.openai.api_key);
|
|
36
|
+
}
|
|
37
|
+
if (config.ollama?.endpoint) {
|
|
38
|
+
this._providers.ollama = new OllamaProvider(config.ollama.endpoint);
|
|
39
|
+
}
|
|
40
|
+
if (config.ollama_turbo?.api_key) {
|
|
41
|
+
const ep = config.ollama_turbo.endpoint || 'https://ollama.com';
|
|
42
|
+
this._providers.ollama_turbo = new OllamaTurboProvider(config.ollama_turbo.api_key, ep);
|
|
43
|
+
}
|
|
44
|
+
if (config.onn?.api_key) {
|
|
45
|
+
this._providers.onn = new OnnProvider(config.onn.api_key);
|
|
46
|
+
}
|
|
47
|
+
if (config.gemini?.api_key) {
|
|
48
|
+
this._providers.gemini = new GeminiProvider(config.gemini.api_key);
|
|
49
|
+
}
|
|
50
|
+
if (config.grok?.api_key) {
|
|
51
|
+
this._providers.grok = new GrokProvider(config.grok.api_key);
|
|
52
|
+
}
|
|
53
|
+
if (config.claude?.api_key) {
|
|
54
|
+
this._providers.claude = new ClaudeProvider(config.claude.api_key);
|
|
55
|
+
}
|
|
56
|
+
if (config.mistral?.api_key) {
|
|
57
|
+
const ep = config.mistral.endpoint || 'https://api.mistral.ai/v1/chat/completions';
|
|
58
|
+
this._providers.mistral = new MistralProvider(config.mistral.api_key, ep);
|
|
59
|
+
}
|
|
60
|
+
if (config.openai_custom?.api_key && config.openai_custom?.base_url) {
|
|
61
|
+
const c = config.openai_custom;
|
|
62
|
+
this._providers.openai_custom = new CustomOpenAIProvider(
|
|
63
|
+
c.api_key, c.base_url, c.paths || {},
|
|
64
|
+
c.auth_header || 'Authorization', c.auth_prefix || BEARER_PREFIX,
|
|
65
|
+
c.extra_headers || {}
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
// OpenRouter (OpenAI-compatible)
|
|
69
|
+
if (config.openrouter?.api_key) {
|
|
70
|
+
const base = config.openrouter.base_url || 'https://openrouter.ai/api/v1';
|
|
71
|
+
const hdrs = {};
|
|
72
|
+
if (config.openrouter.referer) hdrs['HTTP-Referer'] = config.openrouter.referer;
|
|
73
|
+
if (config.openrouter.title) hdrs['X-Title'] = config.openrouter.title;
|
|
74
|
+
this._providers.openrouter = new CustomOpenAIProvider(
|
|
75
|
+
config.openrouter.api_key, base,
|
|
76
|
+
{ chat: '/chat/completions', embeddings: '/embeddings', image: '/images/generations', tts: '/audio/speech', stt: '/audio/transcriptions' },
|
|
77
|
+
'Authorization', BEARER_PREFIX, hdrs
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// ─── Provider resolution ───
|
|
83
|
+
|
|
84
|
+
/** @private */
|
|
85
|
+
_hasOverrides(options) {
|
|
86
|
+
const keys = ['api_key', 'endpoint', 'base_url', 'chat_endpoint', 'auth_header', 'auth_prefix', 'paths', 'extra_headers'];
|
|
87
|
+
return keys.some(k => options[k] !== undefined);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/** @private */
|
|
91
|
+
_buildProviderFromOptions(name, options) {
|
|
92
|
+
switch (name) {
|
|
93
|
+
case 'openai': {
|
|
94
|
+
const api = options.api_key;
|
|
95
|
+
if (api) return new OpenAIProvider(api, options.chat_endpoint || 'https://api.openai.com/v1/chat/completions');
|
|
96
|
+
break;
|
|
97
|
+
}
|
|
98
|
+
case 'ollama': return new OllamaProvider(options.endpoint || 'http://localhost:11434');
|
|
99
|
+
case 'ollama_turbo': {
|
|
100
|
+
const api = options.api_key;
|
|
101
|
+
if (api) return new OllamaTurboProvider(api, options.endpoint || 'https://ollama.com');
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
case 'onn': {
|
|
105
|
+
const api = options.api_key;
|
|
106
|
+
if (api) return new OnnProvider(api, options.endpoint || 'https://api.onn.ai/v1/chat');
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
case 'gemini': {
|
|
110
|
+
const api = options.api_key;
|
|
111
|
+
if (api) return new GeminiProvider(api, options.endpoint);
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
case 'grok': {
|
|
115
|
+
const api = options.api_key;
|
|
116
|
+
if (api) return new GrokProvider(api, options.endpoint);
|
|
117
|
+
break;
|
|
118
|
+
}
|
|
119
|
+
case 'claude': {
|
|
120
|
+
const api = options.api_key;
|
|
121
|
+
if (api) return new ClaudeProvider(api, options.endpoint);
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
case 'mistral': {
|
|
125
|
+
const api = options.api_key;
|
|
126
|
+
if (api) return new MistralProvider(api, options.endpoint || 'https://api.mistral.ai/v1/chat/completions');
|
|
127
|
+
break;
|
|
128
|
+
}
|
|
129
|
+
case 'openai_custom': {
|
|
130
|
+
const api = options.api_key;
|
|
131
|
+
const base = options.base_url;
|
|
132
|
+
if (api && base) {
|
|
133
|
+
return new CustomOpenAIProvider(api, base, options.paths || {},
|
|
134
|
+
options.auth_header || 'Authorization', options.auth_prefix || BEARER_PREFIX,
|
|
135
|
+
options.extra_headers || {});
|
|
136
|
+
}
|
|
137
|
+
break;
|
|
138
|
+
}
|
|
139
|
+
case 'openrouter': {
|
|
140
|
+
const api = options.api_key;
|
|
141
|
+
if (api) {
|
|
142
|
+
const base = options.base_url || 'https://openrouter.ai/api/v1';
|
|
143
|
+
const hdrs = {};
|
|
144
|
+
if (options.referer) hdrs['HTTP-Referer'] = options.referer;
|
|
145
|
+
if (options.title) hdrs['X-Title'] = options.title;
|
|
146
|
+
return new CustomOpenAIProvider(api, base,
|
|
147
|
+
{ chat: '/chat/completions', embeddings: '/embeddings', image: '/images/generations', tts: '/audio/speech', stt: '/audio/transcriptions' },
|
|
148
|
+
'Authorization', BEARER_PREFIX, hdrs
|
|
149
|
+
);
|
|
150
|
+
}
|
|
151
|
+
break;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
return null;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/** @private */
|
|
158
|
+
_resolveProvider(name, options = {}) {
|
|
159
|
+
if (this._hasOverrides(options)) {
|
|
160
|
+
const p = this._buildProviderFromOptions(name, options);
|
|
161
|
+
if (p) return p;
|
|
162
|
+
}
|
|
163
|
+
if (this._providers[name]) return this._providers[name];
|
|
164
|
+
const p = this._buildProviderFromOptions(name, options);
|
|
165
|
+
if (p) { this._providers[name] = p; }
|
|
166
|
+
return p || null;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// ─── Public API ───
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Get a registered provider by name.
|
|
173
|
+
* @param {string} name
|
|
174
|
+
* @returns {Object|null}
|
|
175
|
+
*/
|
|
176
|
+
provider(name) {
|
|
177
|
+
return this._providers[name] || null;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Register a provider instance.
|
|
182
|
+
* @param {string} name
|
|
183
|
+
* @param {Object} provider
|
|
184
|
+
* @returns {this}
|
|
185
|
+
*/
|
|
186
|
+
registerProvider(name, provider) {
|
|
187
|
+
this._providers[name] = provider;
|
|
188
|
+
return this;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// ─── Chat ───
|
|
192
|
+
async chat(provider, messages, options = {}) {
|
|
193
|
+
const p = this._resolveProvider(provider, options);
|
|
194
|
+
if (!p || typeof p.chat !== 'function') throw ProviderError.unsupported(provider, 'chat');
|
|
195
|
+
return p.chat(messages, options);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// ─── Stream ───
|
|
199
|
+
async *stream(provider, messages, options = {}) {
|
|
200
|
+
const p = this._resolveProvider(provider, options);
|
|
201
|
+
if (!p || typeof p.stream !== 'function') throw ProviderError.unsupported(provider, 'streaming');
|
|
202
|
+
yield* p.stream(messages, options);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// ─── Stream Events ───
|
|
206
|
+
async *streamEvents(provider, messages, options = {}) {
|
|
207
|
+
const p = this._resolveProvider(provider, options);
|
|
208
|
+
if (!p || typeof p.supportsStreaming !== 'function' || !p.supportsStreaming()) {
|
|
209
|
+
throw ProviderError.unsupported(provider, 'streaming');
|
|
210
|
+
}
|
|
211
|
+
if (typeof p.streamEvents === 'function') {
|
|
212
|
+
yield* p.streamEvents(messages, options);
|
|
213
|
+
return;
|
|
214
|
+
}
|
|
215
|
+
for await (const chunk of p.stream(messages, options)) {
|
|
216
|
+
yield { type: 'delta', data: chunk };
|
|
217
|
+
}
|
|
218
|
+
yield { type: 'end', data: null };
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// ─── Embeddings ───
|
|
222
|
+
async embeddings(provider, inputs, options = {}) {
|
|
223
|
+
const p = this._resolveProvider(provider, options);
|
|
224
|
+
if (!p || typeof p.embeddings !== 'function') throw ProviderError.unsupported(provider, 'embeddings');
|
|
225
|
+
return p.embeddings(inputs, options);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// ─── Models ───
|
|
229
|
+
async models(provider) {
|
|
230
|
+
const p = this._providers[provider];
|
|
231
|
+
if (!p || typeof p.listModels !== 'function') throw ProviderError.unsupported(provider, 'models');
|
|
232
|
+
return p.listModels();
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
async model(provider, id) {
|
|
236
|
+
const p = this._providers[provider];
|
|
237
|
+
if (!p || typeof p.getModel !== 'function') throw ProviderError.unsupported(provider, 'model');
|
|
238
|
+
return p.getModel(id);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// ─── Images ───
|
|
242
|
+
async image(provider, prompt, options = {}) {
|
|
243
|
+
const p = this._resolveProvider(provider, options);
|
|
244
|
+
if (!p || typeof p.generateImage !== 'function') throw ProviderError.unsupported(provider, 'image');
|
|
245
|
+
return p.generateImage(prompt, options);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// ─── Audio ───
|
|
249
|
+
async tts(provider, text, options = {}) {
|
|
250
|
+
const p = this._resolveProvider(provider, options);
|
|
251
|
+
if (!p || typeof p.textToSpeech !== 'function') throw ProviderError.unsupported(provider, 'tts');
|
|
252
|
+
return p.textToSpeech(text, options);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
async stt(provider, filePath, options = {}) {
|
|
256
|
+
const p = this._resolveProvider(provider, options);
|
|
257
|
+
if (!p || typeof p.speechToText !== 'function') throw ProviderError.unsupported(provider, 'stt');
|
|
258
|
+
return p.speechToText(filePath, options);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// ─── Fluent builder ───
|
|
262
|
+
text() {
|
|
263
|
+
const TextBuilder = require('./Builders/TextBuilder');
|
|
264
|
+
return new TextBuilder(this);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// ─── Tools API ───
|
|
268
|
+
registerTool(tool) {
|
|
269
|
+
this._toolRegistry.register(tool);
|
|
270
|
+
return this;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
tool(name) {
|
|
274
|
+
return this._toolRegistry.get(name);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
tools() {
|
|
278
|
+
return this._toolRegistry.all();
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
async chatWithTools(provider, messages, options = {}) {
|
|
282
|
+
const runner = new ToolChatRunner(this);
|
|
283
|
+
return runner.run(provider, messages, options);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
module.exports = AiBridgeManager;
|