outlet-orm 7.0.0 → 9.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +130 -2
- package/package.json +1 -1
- package/src/AI/AIPromptEnhancer.js +170 -0
- package/src/AI/AIQueryBuilder.js +234 -0
- package/src/AI/AIQueryOptimizer.js +185 -0
- package/src/AI/AISeeder.js +181 -0
- package/src/AI/AiBridgeManager.js +287 -0
- package/src/AI/Builders/TextBuilder.js +170 -0
- package/src/AI/Contracts/AudioProviderContract.js +29 -0
- package/src/AI/Contracts/ChatProviderContract.js +38 -0
- package/src/AI/Contracts/EmbeddingsProviderContract.js +19 -0
- package/src/AI/Contracts/ImageProviderContract.js +19 -0
- package/src/AI/Contracts/ModelsProviderContract.js +26 -0
- package/src/AI/Contracts/ToolContract.js +25 -0
- package/src/AI/Facades/AiBridge.js +79 -0
- package/src/AI/MCPServer.js +113 -0
- package/src/AI/Providers/ClaudeProvider.js +64 -0
- package/src/AI/Providers/CustomOpenAIProvider.js +238 -0
- package/src/AI/Providers/GeminiProvider.js +68 -0
- package/src/AI/Providers/GrokProvider.js +46 -0
- package/src/AI/Providers/MistralProvider.js +21 -0
- package/src/AI/Providers/OllamaProvider.js +249 -0
- package/src/AI/Providers/OllamaTurboProvider.js +32 -0
- package/src/AI/Providers/OnnProvider.js +46 -0
- package/src/AI/Providers/OpenAIProvider.js +471 -0
- package/src/AI/Support/AudioNormalizer.js +37 -0
- package/src/AI/Support/ChatNormalizer.js +42 -0
- package/src/AI/Support/Document.js +77 -0
- package/src/AI/Support/DocumentAttachmentMapper.js +101 -0
- package/src/AI/Support/EmbeddingsNormalizer.js +30 -0
- package/src/AI/Support/Exceptions/ProviderError.js +22 -0
- package/src/AI/Support/FileSecurity.js +56 -0
- package/src/AI/Support/ImageNormalizer.js +62 -0
- package/src/AI/Support/JsonSchemaValidator.js +73 -0
- package/src/AI/Support/Message.js +40 -0
- package/src/AI/Support/StreamChunk.js +45 -0
- package/src/AI/Support/ToolChatRunner.js +160 -0
- package/src/AI/Support/ToolRegistry.js +62 -0
- package/src/AI/Tools/SystemInfoTool.js +25 -0
- package/src/index.js +67 -1
- package/types/index.d.ts +326 -0
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const ChatNormalizer = require('../Support/ChatNormalizer');
|
|
4
|
+
const StreamChunk = require('../Support/StreamChunk');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* TextBuilder
|
|
8
|
+
* Fluent builder for text generation over AiBridge providers.
|
|
9
|
+
* Keeps method names short and explicit, reducing array option errors.
|
|
10
|
+
*
|
|
11
|
+
* @example
|
|
12
|
+
* const result = await manager.text()
|
|
13
|
+
* .using('openai', 'gpt-4o-mini')
|
|
14
|
+
* .withPrompt('Explain quantum computing')
|
|
15
|
+
* .withMaxTokens(200)
|
|
16
|
+
* .asText();
|
|
17
|
+
*/
|
|
18
|
+
class TextBuilder {
|
|
19
|
+
static ERR_MISSING_USING = 'Provider and model must be set via using().';
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* @param {import('../AiBridgeManager')} manager
|
|
23
|
+
*/
|
|
24
|
+
constructor(manager) {
|
|
25
|
+
this._manager = manager;
|
|
26
|
+
this._provider = null;
|
|
27
|
+
this._model = null;
|
|
28
|
+
this._providerConfig = {};
|
|
29
|
+
this._messages = [];
|
|
30
|
+
this._systemPrompt = null;
|
|
31
|
+
this._maxTokens = null;
|
|
32
|
+
this._temperature = null;
|
|
33
|
+
this._topP = null;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Set the provider and model (required).
|
|
38
|
+
* @param {string} provider
|
|
39
|
+
* @param {string} model
|
|
40
|
+
* @param {Object} [providerConfig={}]
|
|
41
|
+
* @returns {this}
|
|
42
|
+
*/
|
|
43
|
+
using(provider, model, providerConfig = {}) {
|
|
44
|
+
this._provider = provider;
|
|
45
|
+
this._model = model;
|
|
46
|
+
this._providerConfig = providerConfig;
|
|
47
|
+
return this;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Add a user prompt message.
|
|
52
|
+
* @param {string} text
|
|
53
|
+
* @param {Array} [attachments=[]]
|
|
54
|
+
* @returns {this}
|
|
55
|
+
*/
|
|
56
|
+
withPrompt(text, attachments = []) {
|
|
57
|
+
const msg = { role: 'user', content: text };
|
|
58
|
+
if (attachments.length > 0) msg.attachments = attachments;
|
|
59
|
+
this._messages.push(msg);
|
|
60
|
+
return this;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/** Alias for withPrompt */
|
|
64
|
+
prompt(text) { return this.withPrompt(text); }
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Set the system prompt.
|
|
68
|
+
* @param {string} text
|
|
69
|
+
* @returns {this}
|
|
70
|
+
*/
|
|
71
|
+
withSystemPrompt(text) {
|
|
72
|
+
this._systemPrompt = text;
|
|
73
|
+
return this;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** @param {number} tokens @returns {this} */
|
|
77
|
+
withMaxTokens(tokens) { this._maxTokens = tokens; return this; }
|
|
78
|
+
|
|
79
|
+
/** @param {number} t @returns {this} */
|
|
80
|
+
usingTemperature(t) { this._temperature = t; return this; }
|
|
81
|
+
|
|
82
|
+
/** @param {number} p @returns {this} */
|
|
83
|
+
usingTopP(p) { this._topP = p; return this; }
|
|
84
|
+
|
|
85
|
+
// ─── Override helpers ───
|
|
86
|
+
|
|
87
|
+
/** @param {string} key @returns {this} */
|
|
88
|
+
withApiKey(key) { this._providerConfig.api_key = key; return this; }
|
|
89
|
+
/** @param {string} ep @returns {this} */
|
|
90
|
+
withEndpoint(ep) { this._providerConfig.endpoint = ep; return this; }
|
|
91
|
+
/** @param {string} url @returns {this} */
|
|
92
|
+
withBaseUrl(url) { this._providerConfig.base_url = url; return this; }
|
|
93
|
+
/** @param {string} url @returns {this} */
|
|
94
|
+
withChatEndpoint(url) { this._providerConfig.chat_endpoint = url; return this; }
|
|
95
|
+
/** @param {string} header @param {string} [prefix='Bearer '] @returns {this} */
|
|
96
|
+
withAuthHeader(header, prefix = 'Bearer ') { this._providerConfig.auth_header = header; this._providerConfig.auth_prefix = prefix; return this; }
|
|
97
|
+
/** @param {Object} headers @returns {this} */
|
|
98
|
+
withExtraHeaders(headers) { this._providerConfig.extra_headers = headers; return this; }
|
|
99
|
+
/** @param {Object} paths @returns {this} */
|
|
100
|
+
withPaths(paths) { this._providerConfig.paths = paths; return this; }
|
|
101
|
+
|
|
102
|
+
// ─── Private helpers ───
|
|
103
|
+
|
|
104
|
+
/** @private */
|
|
105
|
+
_buildMessages() {
|
|
106
|
+
const msgs = [...this._messages];
|
|
107
|
+
if (this._systemPrompt) {
|
|
108
|
+
msgs.unshift({ role: 'system', content: this._systemPrompt });
|
|
109
|
+
}
|
|
110
|
+
return msgs;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/** @private */
|
|
114
|
+
_callOptions() {
|
|
115
|
+
const opts = { ...this._providerConfig };
|
|
116
|
+
if (this._model) opts.model = this._model;
|
|
117
|
+
if (this._maxTokens !== null) opts.max_tokens = this._maxTokens;
|
|
118
|
+
if (this._temperature !== null) opts.temperature = this._temperature;
|
|
119
|
+
if (this._topP !== null) opts.top_p = this._topP;
|
|
120
|
+
return opts;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// ─── Terminal methods ───
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Execute and return normalized text response.
|
|
127
|
+
* @returns {Promise<{text: string, raw: Object, usage: Object|null, finish_reason: string|null}>}
|
|
128
|
+
*/
|
|
129
|
+
async asText() {
|
|
130
|
+
if (!this._provider || !this._model) throw new Error(TextBuilder.ERR_MISSING_USING);
|
|
131
|
+
const res = await this._manager.chat(this._provider, this._buildMessages(), this._callOptions());
|
|
132
|
+
const norm = ChatNormalizer.normalize(res);
|
|
133
|
+
return {
|
|
134
|
+
text: norm.text || '',
|
|
135
|
+
raw: res,
|
|
136
|
+
usage: norm.usage || null,
|
|
137
|
+
finish_reason: norm.finish_reason || null,
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Execute and return raw provider response.
|
|
143
|
+
* @returns {Promise<Object>}
|
|
144
|
+
*/
|
|
145
|
+
async asRaw() {
|
|
146
|
+
if (!this._provider || !this._model) throw new Error(TextBuilder.ERR_MISSING_USING);
|
|
147
|
+
return this._manager.chat(this._provider, this._buildMessages(), this._callOptions());
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Execute as a streaming generator of StreamChunk objects.
|
|
152
|
+
* @returns {AsyncGenerator<StreamChunk>}
|
|
153
|
+
*/
|
|
154
|
+
async *asStream() {
|
|
155
|
+
if (!this._provider || !this._model) throw new Error(TextBuilder.ERR_MISSING_USING);
|
|
156
|
+
for await (const chunk of this._manager.stream(this._provider, this._buildMessages(), this._callOptions())) {
|
|
157
|
+
if (typeof chunk === 'string') {
|
|
158
|
+
yield StreamChunk.delta(chunk);
|
|
159
|
+
} else if (chunk && typeof chunk === 'object') {
|
|
160
|
+
const text = String(chunk.delta || chunk.text || '');
|
|
161
|
+
yield new StreamChunk(text, chunk.usage || null, chunk.finish_reason || null,
|
|
162
|
+
chunk.type || 'delta', chunk.tool_calls || [], chunk.tool_results || []);
|
|
163
|
+
} else {
|
|
164
|
+
yield StreamChunk.delta(String(chunk));
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
module.exports = TextBuilder;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AudioProviderContract
|
|
5
|
+
* Base class for providers that support text-to-speech and speech-to-text.
|
|
6
|
+
*/
|
|
7
|
+
class AudioProviderContract {
|
|
8
|
+
/**
|
|
9
|
+
* Text to speech.
|
|
10
|
+
* @param {string} text
|
|
11
|
+
* @param {Object} [options={}]
|
|
12
|
+
* @returns {Promise<{audio: string, mime: string}>} audio is base64-encoded
|
|
13
|
+
*/
|
|
14
|
+
async textToSpeech(text, options = {}) {
|
|
15
|
+
throw new Error('Not implemented: textToSpeech()');
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Speech to text.
|
|
20
|
+
* @param {string} filePath
|
|
21
|
+
* @param {Object} [options={}]
|
|
22
|
+
* @returns {Promise<{text: string, raw?: Object}>}
|
|
23
|
+
*/
|
|
24
|
+
async speechToText(filePath, options = {}) {
|
|
25
|
+
throw new Error('Not implemented: speechToText()');
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
module.exports = AudioProviderContract;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ChatProviderContract
|
|
5
|
+
* Base class defining the chat provider interface.
|
|
6
|
+
* All chat providers must extend this and implement its methods.
|
|
7
|
+
*/
|
|
8
|
+
class ChatProviderContract {
|
|
9
|
+
/**
|
|
10
|
+
* Send a chat message and return raw provider response object.
|
|
11
|
+
* @param {Array<{role: string, content: string}>} messages
|
|
12
|
+
* @param {Object} [options={}]
|
|
13
|
+
* @returns {Promise<Object>}
|
|
14
|
+
*/
|
|
15
|
+
async chat(messages, options = {}) {
|
|
16
|
+
throw new Error('Not implemented: chat()');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Stream a chat completion (async generator yielding chunks of text or objects).
|
|
21
|
+
* @param {Array<{role: string, content: string}>} messages
|
|
22
|
+
* @param {Object} [options={}]
|
|
23
|
+
* @yields {string|Object}
|
|
24
|
+
*/
|
|
25
|
+
async *stream(messages, options = {}) {
|
|
26
|
+
throw new Error('Not implemented: stream()');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Whether this provider supports streaming.
|
|
31
|
+
* @returns {boolean}
|
|
32
|
+
*/
|
|
33
|
+
supportsStreaming() {
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
module.exports = ChatProviderContract;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* EmbeddingsProviderContract
|
|
5
|
+
* Base class for providers that support embeddings generation.
|
|
6
|
+
*/
|
|
7
|
+
class EmbeddingsProviderContract {
|
|
8
|
+
/**
|
|
9
|
+
* Generate embeddings for one or multiple inputs.
|
|
10
|
+
* @param {string[]} inputs
|
|
11
|
+
* @param {Object} [options={}]
|
|
12
|
+
* @returns {Promise<{embeddings: number[][], usage?: Object, raw?: Object}>}
|
|
13
|
+
*/
|
|
14
|
+
async embeddings(inputs, options = {}) {
|
|
15
|
+
throw new Error('Not implemented: embeddings()');
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = EmbeddingsProviderContract;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ImageProviderContract
|
|
5
|
+
* Base class for providers that support image generation.
|
|
6
|
+
*/
|
|
7
|
+
class ImageProviderContract {
|
|
8
|
+
/**
|
|
9
|
+
* Generate images from a prompt.
|
|
10
|
+
* @param {string} prompt
|
|
11
|
+
* @param {Object} [options={}]
|
|
12
|
+
* @returns {Promise<{images: Array, meta?: Object, raw?: Object}>}
|
|
13
|
+
*/
|
|
14
|
+
async generateImage(prompt, options = {}) {
|
|
15
|
+
throw new Error('Not implemented: generateImage()');
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = ImageProviderContract;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ModelsProviderContract
|
|
5
|
+
* Base class for providers that support listing models.
|
|
6
|
+
*/
|
|
7
|
+
class ModelsProviderContract {
|
|
8
|
+
/**
|
|
9
|
+
* List models metadata as returned by the provider.
|
|
10
|
+
* @returns {Promise<Array>}
|
|
11
|
+
*/
|
|
12
|
+
async listModels() {
|
|
13
|
+
throw new Error('Not implemented: listModels()');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Retrieve a single model metadata by id/name.
|
|
18
|
+
* @param {string} id
|
|
19
|
+
* @returns {Promise<Object>}
|
|
20
|
+
*/
|
|
21
|
+
async getModel(id) {
|
|
22
|
+
throw new Error('Not implemented: getModel()');
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
module.exports = ModelsProviderContract;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ToolContract
|
|
5
|
+
* Base class for tools usable by AI providers (function calling).
|
|
6
|
+
*/
|
|
7
|
+
class ToolContract {
|
|
8
|
+
/** @returns {string} */
|
|
9
|
+
name() { throw new Error('Not implemented: name()'); }
|
|
10
|
+
|
|
11
|
+
/** @returns {string} */
|
|
12
|
+
description() { throw new Error('Not implemented: description()'); }
|
|
13
|
+
|
|
14
|
+
/** @returns {Object} JSON Schema of parameters */
|
|
15
|
+
schema() { throw new Error('Not implemented: schema()'); }
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Execute the tool with given arguments. Must return a string result.
|
|
19
|
+
* @param {Object} args
|
|
20
|
+
* @returns {Promise<string>|string}
|
|
21
|
+
*/
|
|
22
|
+
execute(args) { throw new Error('Not implemented: execute()'); }
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
module.exports = ToolContract;
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AiBridge Facade
|
|
5
|
+
*
|
|
6
|
+
* Convenience entry-point mirroring AiBridge\Facades\AiBridge in PHP.
|
|
7
|
+
* Provides static-like helpers that delegate to an AiBridgeManager instance.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const ImageNormalizer = require('../Support/ImageNormalizer');
|
|
11
|
+
const AudioNormalizer = require('../Support/AudioNormalizer');
|
|
12
|
+
const EmbeddingsNormalizer = require('../Support/EmbeddingsNormalizer');
|
|
13
|
+
|
|
14
|
+
let _manager = null;
|
|
15
|
+
|
|
16
|
+
const AiBridge = {
|
|
17
|
+
/**
|
|
18
|
+
* Bind an AiBridgeManager instance so all helpers delegate to it.
|
|
19
|
+
* @param {import('../AiBridgeManager')} manager
|
|
20
|
+
*/
|
|
21
|
+
setManager(manager) {
|
|
22
|
+
_manager = manager;
|
|
23
|
+
},
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Return the bound manager (or null).
|
|
27
|
+
* @returns {import('../AiBridgeManager')|null}
|
|
28
|
+
*/
|
|
29
|
+
getManager() {
|
|
30
|
+
return _manager;
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
/* ─── Normalization helpers (static, no manager needed) ────── */
|
|
34
|
+
|
|
35
|
+
normalizeImages(raw) {
|
|
36
|
+
return ImageNormalizer.normalize(raw);
|
|
37
|
+
},
|
|
38
|
+
|
|
39
|
+
normalizeTTSAudio(raw) {
|
|
40
|
+
return AudioNormalizer.normalizeTTS(raw);
|
|
41
|
+
},
|
|
42
|
+
|
|
43
|
+
normalizeSTTAudio(raw) {
|
|
44
|
+
return AudioNormalizer.normalizeSTT(raw);
|
|
45
|
+
},
|
|
46
|
+
|
|
47
|
+
normalizeEmbeddings(raw) {
|
|
48
|
+
return EmbeddingsNormalizer.normalize(raw);
|
|
49
|
+
},
|
|
50
|
+
|
|
51
|
+
/* ─── Delegated helpers (require bound manager) ────────────── */
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Create a new TextBuilder via the bound manager.
|
|
55
|
+
* @returns {import('../Builders/TextBuilder')}
|
|
56
|
+
*/
|
|
57
|
+
text() {
|
|
58
|
+
if (!_manager) throw new Error('AiBridge facade: no manager bound. Call AiBridge.setManager(manager) first.');
|
|
59
|
+
return _manager.text();
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Shorthand for manager.chat()
|
|
64
|
+
*/
|
|
65
|
+
async chat(messages, opts) {
|
|
66
|
+
if (!_manager) throw new Error('AiBridge facade: no manager bound.');
|
|
67
|
+
return _manager.chat(messages, opts);
|
|
68
|
+
},
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Shorthand for manager.provider()
|
|
72
|
+
*/
|
|
73
|
+
provider(name) {
|
|
74
|
+
if (!_manager) throw new Error('AiBridge facade: no manager bound.');
|
|
75
|
+
return _manager.provider(name);
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
module.exports = AiBridge;
|
package/src/AI/MCPServer.js
CHANGED
|
@@ -112,6 +112,33 @@ const TOOL_DEFINITIONS = [
|
|
|
112
112
|
},
|
|
113
113
|
required: ['filePath', 'consent']
|
|
114
114
|
}
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
name: 'ai_query',
|
|
118
|
+
description: 'Convert a natural language question into SQL and execute it. Requires an AI provider (AiBridge).',
|
|
119
|
+
inputSchema: {
|
|
120
|
+
type: 'object',
|
|
121
|
+
properties: {
|
|
122
|
+
question: { type: 'string', description: 'Natural language question, e.g. "Show me the top 5 users by order count"' },
|
|
123
|
+
provider: { type: 'string', description: 'AI provider to use (default: openai)' },
|
|
124
|
+
model: { type: 'string', description: 'AI model to use (default: gpt-4o-mini)' },
|
|
125
|
+
safe_mode: { type: 'boolean', description: 'Only allow SELECT queries (default: true)' }
|
|
126
|
+
},
|
|
127
|
+
required: ['question']
|
|
128
|
+
}
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
name: 'query_optimize',
|
|
132
|
+
description: 'Analyze a SQL query using AI and return optimization suggestions, rewritten query, and index recommendations.',
|
|
133
|
+
inputSchema: {
|
|
134
|
+
type: 'object',
|
|
135
|
+
properties: {
|
|
136
|
+
sql: { type: 'string', description: 'The SQL query to optimize' },
|
|
137
|
+
provider: { type: 'string', description: 'AI provider to use (default: openai)' },
|
|
138
|
+
model: { type: 'string', description: 'AI model to use (default: gpt-4o-mini)' }
|
|
139
|
+
},
|
|
140
|
+
required: ['sql']
|
|
141
|
+
}
|
|
115
142
|
}
|
|
116
143
|
];
|
|
117
144
|
|
|
@@ -313,6 +340,8 @@ class MCPServer extends EventEmitter {
|
|
|
313
340
|
case 'model_list': return this._toolModelList();
|
|
314
341
|
case 'backup_create': return this._toolBackupCreate(args);
|
|
315
342
|
case 'backup_restore': return this._toolBackupRestore(args);
|
|
343
|
+
case 'ai_query': return this._toolAiQuery(args);
|
|
344
|
+
case 'query_optimize': return this._toolQueryOptimize(args);
|
|
316
345
|
default:
|
|
317
346
|
throw new Error(`Unknown tool: ${name}`);
|
|
318
347
|
}
|
|
@@ -610,6 +639,90 @@ class MCPServer extends EventEmitter {
|
|
|
610
639
|
return `Backup restored from: ${args.filePath}`;
|
|
611
640
|
}
|
|
612
641
|
|
|
642
|
+
// ── ai_query (NL → SQL) ───────────────────────────────────────
|
|
643
|
+
|
|
644
|
+
async _toolAiQuery(args) {
|
|
645
|
+
if (!args.question) throw new Error('A natural language question is required.');
|
|
646
|
+
const conn = await this._getConnection();
|
|
647
|
+
const manager = this._getAiBridgeManager();
|
|
648
|
+
if (!manager) throw new Error('AiBridge is not configured. Set OPENAI_API_KEY or configure a provider.');
|
|
649
|
+
|
|
650
|
+
const AIQueryBuilder = require('./AIQueryBuilder');
|
|
651
|
+
const builder = new AIQueryBuilder(manager, conn);
|
|
652
|
+
|
|
653
|
+
if (args.provider || args.model) {
|
|
654
|
+
builder.using(args.provider || 'openai', args.model || 'gpt-4o-mini');
|
|
655
|
+
}
|
|
656
|
+
if (args.safe_mode === false) {
|
|
657
|
+
builder.safeMode(false);
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
const result = await builder.query(args.question);
|
|
661
|
+
return {
|
|
662
|
+
sql: result.sql,
|
|
663
|
+
params: result.params,
|
|
664
|
+
explanation: result.explanation,
|
|
665
|
+
results: result.results,
|
|
666
|
+
error: result.error || null
|
|
667
|
+
};
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
// ── query_optimize ─────────────────────────────────────────────
|
|
671
|
+
|
|
672
|
+
async _toolQueryOptimize(args) {
|
|
673
|
+
if (!args.sql) throw new Error('SQL query is required.');
|
|
674
|
+
const conn = await this._getConnection();
|
|
675
|
+
const manager = this._getAiBridgeManager();
|
|
676
|
+
if (!manager) throw new Error('AiBridge is not configured. Set OPENAI_API_KEY or configure a provider.');
|
|
677
|
+
|
|
678
|
+
const AIQueryOptimizer = require('./AIQueryOptimizer');
|
|
679
|
+
const optimizer = new AIQueryOptimizer(manager, conn);
|
|
680
|
+
|
|
681
|
+
if (args.provider || args.model) {
|
|
682
|
+
optimizer.using(args.provider || 'openai', args.model || 'gpt-4o-mini');
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
const result = await optimizer.optimize(args.sql);
|
|
686
|
+
return {
|
|
687
|
+
original: result.original,
|
|
688
|
+
optimized: result.optimized,
|
|
689
|
+
suggestions: result.suggestions,
|
|
690
|
+
explanation: result.explanation,
|
|
691
|
+
indexes: result.indexes
|
|
692
|
+
};
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
// ── AiBridge manager helper ────────────────────────────────────
|
|
696
|
+
|
|
697
|
+
/**
|
|
698
|
+
* Lazily creates an AiBridge manager from environment variables.
|
|
699
|
+
* @returns {import('./Bridge/AiBridgeManager')|null}
|
|
700
|
+
*/
|
|
701
|
+
_getAiBridgeManager() {
|
|
702
|
+
if (this._aiBridgeManager) return this._aiBridgeManager;
|
|
703
|
+
|
|
704
|
+
try {
|
|
705
|
+
const AiBridgeManager = require('./AiBridgeManager');
|
|
706
|
+
const config = {};
|
|
707
|
+
|
|
708
|
+
// Auto-detect providers from env
|
|
709
|
+
if (process.env.OPENAI_API_KEY) config.openai = { api_key: process.env.OPENAI_API_KEY };
|
|
710
|
+
if (process.env.OLLAMA_ENDPOINT) config.ollama = { endpoint: process.env.OLLAMA_ENDPOINT };
|
|
711
|
+
if (process.env.CLAUDE_API_KEY) config.claude = { api_key: process.env.CLAUDE_API_KEY };
|
|
712
|
+
if (process.env.GEMINI_API_KEY) config.gemini = { api_key: process.env.GEMINI_API_KEY };
|
|
713
|
+
if (process.env.GROK_API_KEY) config.grok = { api_key: process.env.GROK_API_KEY };
|
|
714
|
+
if (process.env.MISTRAL_API_KEY) config.mistral = { api_key: process.env.MISTRAL_API_KEY };
|
|
715
|
+
if (process.env.ONN_API_KEY) config.onn = { api_key: process.env.ONN_API_KEY };
|
|
716
|
+
|
|
717
|
+
if (Object.keys(config).length === 0) return null;
|
|
718
|
+
|
|
719
|
+
this._aiBridgeManager = new AiBridgeManager(config);
|
|
720
|
+
return this._aiBridgeManager;
|
|
721
|
+
} catch {
|
|
722
|
+
return null;
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
|
|
613
726
|
// ─── Template helpers ──────────────────────────────────────────
|
|
614
727
|
|
|
615
728
|
_extractTableName(migrationName) {
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ClaudeProvider
|
|
5
|
+
* Anthropic Messages API. System messages are converted to user role.
|
|
6
|
+
* Streaming is simulated via chunk-splitting (60-char chunks).
|
|
7
|
+
*/
|
|
8
|
+
class ClaudeProvider {
|
|
9
|
+
/**
|
|
10
|
+
* @param {string} apiKey
|
|
11
|
+
* @param {string} [endpoint='https://api.anthropic.com/v1/messages']
|
|
12
|
+
*/
|
|
13
|
+
constructor(apiKey, endpoint = 'https://api.anthropic.com/v1/messages') {
|
|
14
|
+
this.apiKey = apiKey;
|
|
15
|
+
this.endpoint = endpoint;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/** @private */
|
|
19
|
+
_headers() {
|
|
20
|
+
return {
|
|
21
|
+
'x-api-key': this.apiKey,
|
|
22
|
+
'anthropic-version': '2023-06-01',
|
|
23
|
+
'Content-Type': 'application/json',
|
|
24
|
+
'Accept': 'application/json',
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async chat(messages, options = {}) {
|
|
29
|
+
// Convert system messages to user role (Claude requirement)
|
|
30
|
+
const converted = messages.map(m => {
|
|
31
|
+
if ((m.role || '') === 'system') return { role: 'user', content: m.content };
|
|
32
|
+
return m;
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
const payload = {
|
|
36
|
+
model: options.model || 'claude-3-opus-20240229',
|
|
37
|
+
max_tokens: options.max_tokens || 512,
|
|
38
|
+
messages: converted,
|
|
39
|
+
};
|
|
40
|
+
if (options.temperature !== undefined) payload.temperature = options.temperature;
|
|
41
|
+
|
|
42
|
+
const res = await fetch(this.endpoint, {
|
|
43
|
+
method: 'POST',
|
|
44
|
+
headers: this._headers(),
|
|
45
|
+
body: JSON.stringify(payload),
|
|
46
|
+
});
|
|
47
|
+
const data = await res.json();
|
|
48
|
+
return data || {};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async *stream(messages, options = {}) {
|
|
52
|
+
const full = await this.chat(messages, options);
|
|
53
|
+
let text = '';
|
|
54
|
+
if (full?.content?.[0]?.text) text = full.content[0].text;
|
|
55
|
+
// Simulated: yield 60-char chunks
|
|
56
|
+
for (let i = 0; i < text.length; i += 60) {
|
|
57
|
+
yield text.slice(i, i + 60);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
supportsStreaming() { return true; } // simulated
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
module.exports = ClaudeProvider;
|