@dangao/bun-server 1.12.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -0
- package/dist/ai/ai-module.d.ts +24 -0
- package/dist/ai/ai-module.d.ts.map +1 -0
- package/dist/ai/decorators.d.ts +25 -0
- package/dist/ai/decorators.d.ts.map +1 -0
- package/dist/ai/errors.d.ts +39 -0
- package/dist/ai/errors.d.ts.map +1 -0
- package/dist/ai/index.d.ts +12 -0
- package/dist/ai/index.d.ts.map +1 -0
- package/dist/ai/providers/anthropic-provider.d.ts +23 -0
- package/dist/ai/providers/anthropic-provider.d.ts.map +1 -0
- package/dist/ai/providers/google-provider.d.ts +20 -0
- package/dist/ai/providers/google-provider.d.ts.map +1 -0
- package/dist/ai/providers/ollama-provider.d.ts +17 -0
- package/dist/ai/providers/ollama-provider.d.ts.map +1 -0
- package/dist/ai/providers/openai-provider.d.ts +28 -0
- package/dist/ai/providers/openai-provider.d.ts.map +1 -0
- package/dist/ai/service.d.ts +40 -0
- package/dist/ai/service.d.ts.map +1 -0
- package/dist/ai/tools/tool-executor.d.ts +15 -0
- package/dist/ai/tools/tool-executor.d.ts.map +1 -0
- package/dist/ai/tools/tool-registry.d.ts +39 -0
- package/dist/ai/tools/tool-registry.d.ts.map +1 -0
- package/dist/ai/types.d.ts +134 -0
- package/dist/ai/types.d.ts.map +1 -0
- package/dist/ai-guard/ai-guard-module.d.ts +18 -0
- package/dist/ai-guard/ai-guard-module.d.ts.map +1 -0
- package/dist/ai-guard/decorators.d.ts +16 -0
- package/dist/ai-guard/decorators.d.ts.map +1 -0
- package/dist/ai-guard/detectors/content-moderator.d.ts +26 -0
- package/dist/ai-guard/detectors/content-moderator.d.ts.map +1 -0
- package/dist/ai-guard/detectors/injection-detector.d.ts +13 -0
- package/dist/ai-guard/detectors/injection-detector.d.ts.map +1 -0
- package/dist/ai-guard/detectors/pii-detector.d.ts +11 -0
- package/dist/ai-guard/detectors/pii-detector.d.ts.map +1 -0
- package/dist/ai-guard/index.d.ts +8 -0
- package/dist/ai-guard/index.d.ts.map +1 -0
- package/dist/ai-guard/service.d.ts +21 -0
- package/dist/ai-guard/service.d.ts.map +1 -0
- package/dist/ai-guard/types.d.ts +59 -0
- package/dist/ai-guard/types.d.ts.map +1 -0
- package/dist/conversation/conversation-module.d.ts +25 -0
- package/dist/conversation/conversation-module.d.ts.map +1 -0
- package/dist/conversation/decorators.d.ts +28 -0
- package/dist/conversation/decorators.d.ts.map +1 -0
- package/dist/conversation/index.d.ts +8 -0
- package/dist/conversation/index.d.ts.map +1 -0
- package/dist/conversation/service.d.ts +43 -0
- package/dist/conversation/service.d.ts.map +1 -0
- package/dist/conversation/stores/database-store.d.ts +46 -0
- package/dist/conversation/stores/database-store.d.ts.map +1 -0
- package/dist/conversation/stores/memory-store.d.ts +17 -0
- package/dist/conversation/stores/memory-store.d.ts.map +1 -0
- package/dist/conversation/stores/redis-store.d.ts +39 -0
- package/dist/conversation/stores/redis-store.d.ts.map +1 -0
- package/dist/conversation/types.d.ts +64 -0
- package/dist/conversation/types.d.ts.map +1 -0
- package/dist/embedding/embedding-module.d.ts +20 -0
- package/dist/embedding/embedding-module.d.ts.map +1 -0
- package/dist/embedding/index.d.ts +6 -0
- package/dist/embedding/index.d.ts.map +1 -0
- package/dist/embedding/providers/ollama-embedding-provider.d.ts +18 -0
- package/dist/embedding/providers/ollama-embedding-provider.d.ts.map +1 -0
- package/dist/embedding/providers/openai-embedding-provider.d.ts +18 -0
- package/dist/embedding/providers/openai-embedding-provider.d.ts.map +1 -0
- package/dist/embedding/service.d.ts +27 -0
- package/dist/embedding/service.d.ts.map +1 -0
- package/dist/embedding/types.d.ts +25 -0
- package/dist/embedding/types.d.ts.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2638 -1
- package/dist/mcp/decorators.d.ts +42 -0
- package/dist/mcp/decorators.d.ts.map +1 -0
- package/dist/mcp/index.d.ts +6 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/mcp-module.d.ts +22 -0
- package/dist/mcp/mcp-module.d.ts.map +1 -0
- package/dist/mcp/registry.d.ts +23 -0
- package/dist/mcp/registry.d.ts.map +1 -0
- package/dist/mcp/server.d.ts +29 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/types.d.ts +60 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/prompt/index.d.ts +6 -0
- package/dist/prompt/index.d.ts.map +1 -0
- package/dist/prompt/prompt-module.d.ts +23 -0
- package/dist/prompt/prompt-module.d.ts.map +1 -0
- package/dist/prompt/service.d.ts +47 -0
- package/dist/prompt/service.d.ts.map +1 -0
- package/dist/prompt/stores/file-store.d.ts +36 -0
- package/dist/prompt/stores/file-store.d.ts.map +1 -0
- package/dist/prompt/stores/memory-store.d.ts +17 -0
- package/dist/prompt/stores/memory-store.d.ts.map +1 -0
- package/dist/prompt/types.d.ts +68 -0
- package/dist/prompt/types.d.ts.map +1 -0
- package/dist/rag/chunkers/markdown-chunker.d.ts +11 -0
- package/dist/rag/chunkers/markdown-chunker.d.ts.map +1 -0
- package/dist/rag/chunkers/text-chunker.d.ts +11 -0
- package/dist/rag/chunkers/text-chunker.d.ts.map +1 -0
- package/dist/rag/decorators.d.ts +24 -0
- package/dist/rag/decorators.d.ts.map +1 -0
- package/dist/rag/index.d.ts +7 -0
- package/dist/rag/index.d.ts.map +1 -0
- package/dist/rag/rag-module.d.ts +23 -0
- package/dist/rag/rag-module.d.ts.map +1 -0
- package/dist/rag/service.d.ts +36 -0
- package/dist/rag/service.d.ts.map +1 -0
- package/dist/rag/types.d.ts +56 -0
- package/dist/rag/types.d.ts.map +1 -0
- package/dist/vector-store/index.d.ts +6 -0
- package/dist/vector-store/index.d.ts.map +1 -0
- package/dist/vector-store/stores/memory-store.d.ts +17 -0
- package/dist/vector-store/stores/memory-store.d.ts.map +1 -0
- package/dist/vector-store/stores/pinecone-store.d.ts +27 -0
- package/dist/vector-store/stores/pinecone-store.d.ts.map +1 -0
- package/dist/vector-store/stores/qdrant-store.d.ts +29 -0
- package/dist/vector-store/stores/qdrant-store.d.ts.map +1 -0
- package/dist/vector-store/types.d.ts +60 -0
- package/dist/vector-store/types.d.ts.map +1 -0
- package/dist/vector-store/vector-store-module.d.ts +20 -0
- package/dist/vector-store/vector-store-module.d.ts.map +1 -0
- package/docs/ai.md +500 -0
- package/docs/best-practices.md +83 -8
- package/docs/database.md +23 -0
- package/docs/guide.md +90 -27
- package/docs/migration.md +81 -7
- package/docs/security.md +23 -0
- package/docs/zh/ai.md +441 -0
- package/docs/zh/best-practices.md +43 -0
- package/docs/zh/database.md +23 -0
- package/docs/zh/guide.md +40 -1
- package/docs/zh/migration.md +39 -0
- package/docs/zh/security.md +23 -0
- package/package.json +2 -2
- package/src/ai/ai-module.ts +62 -0
- package/src/ai/decorators.ts +30 -0
- package/src/ai/errors.ts +71 -0
- package/src/ai/index.ts +11 -0
- package/src/ai/providers/anthropic-provider.ts +190 -0
- package/src/ai/providers/google-provider.ts +179 -0
- package/src/ai/providers/ollama-provider.ts +126 -0
- package/src/ai/providers/openai-provider.ts +242 -0
- package/src/ai/service.ts +155 -0
- package/src/ai/tools/tool-executor.ts +38 -0
- package/src/ai/tools/tool-registry.ts +91 -0
- package/src/ai/types.ts +145 -0
- package/src/ai-guard/ai-guard-module.ts +50 -0
- package/src/ai-guard/decorators.ts +21 -0
- package/src/ai-guard/detectors/content-moderator.ts +80 -0
- package/src/ai-guard/detectors/injection-detector.ts +48 -0
- package/src/ai-guard/detectors/pii-detector.ts +64 -0
- package/src/ai-guard/index.ts +7 -0
- package/src/ai-guard/service.ts +100 -0
- package/src/ai-guard/types.ts +61 -0
- package/src/conversation/conversation-module.ts +63 -0
- package/src/conversation/decorators.ts +47 -0
- package/src/conversation/index.ts +7 -0
- package/src/conversation/service.ts +133 -0
- package/src/conversation/stores/database-store.ts +125 -0
- package/src/conversation/stores/memory-store.ts +57 -0
- package/src/conversation/stores/redis-store.ts +101 -0
- package/src/conversation/types.ts +68 -0
- package/src/embedding/embedding-module.ts +52 -0
- package/src/embedding/index.ts +5 -0
- package/src/embedding/providers/ollama-embedding-provider.ts +39 -0
- package/src/embedding/providers/openai-embedding-provider.ts +47 -0
- package/src/embedding/service.ts +55 -0
- package/src/embedding/types.ts +27 -0
- package/src/index.ts +10 -0
- package/src/mcp/decorators.ts +60 -0
- package/src/mcp/index.ts +5 -0
- package/src/mcp/mcp-module.ts +58 -0
- package/src/mcp/registry.ts +72 -0
- package/src/mcp/server.ts +164 -0
- package/src/mcp/types.ts +63 -0
- package/src/prompt/index.ts +5 -0
- package/src/prompt/prompt-module.ts +61 -0
- package/src/prompt/service.ts +93 -0
- package/src/prompt/stores/file-store.ts +135 -0
- package/src/prompt/stores/memory-store.ts +82 -0
- package/src/prompt/types.ts +84 -0
- package/src/rag/chunkers/markdown-chunker.ts +40 -0
- package/src/rag/chunkers/text-chunker.ts +30 -0
- package/src/rag/decorators.ts +26 -0
- package/src/rag/index.ts +6 -0
- package/src/rag/rag-module.ts +78 -0
- package/src/rag/service.ts +134 -0
- package/src/rag/types.ts +47 -0
- package/src/vector-store/index.ts +5 -0
- package/src/vector-store/stores/memory-store.ts +69 -0
- package/src/vector-store/stores/pinecone-store.ts +123 -0
- package/src/vector-store/stores/qdrant-store.ts +147 -0
- package/src/vector-store/types.ts +77 -0
- package/src/vector-store/vector-store-module.ts +50 -0
- package/tests/ai/ai-module.test.ts +46 -0
- package/tests/ai/ai-service.test.ts +91 -0
- package/tests/ai/tool-registry.test.ts +57 -0
- package/tests/ai-guard/ai-guard-module.test.ts +23 -0
- package/tests/ai-guard/content-moderator.test.ts +65 -0
- package/tests/ai-guard/pii-detector.test.ts +41 -0
- package/tests/conversation/conversation-module.test.ts +26 -0
- package/tests/conversation/conversation-service.test.ts +64 -0
- package/tests/conversation/memory-store.test.ts +68 -0
- package/tests/embedding/embedding-service.test.ts +55 -0
- package/tests/mcp/mcp-server.test.ts +85 -0
- package/tests/prompt/prompt-module.test.ts +30 -0
- package/tests/prompt/prompt-service.test.ts +74 -0
- package/tests/rag/chunkers.test.ts +58 -0
- package/tests/rag/rag-service.test.ts +66 -0
- package/tests/vector-store/memory-vector-store.test.ts +84 -0
- package/tests/interceptor/perf/interceptor-performance.test.ts +0 -340
- package/tests/perf/optimization.test.ts +0 -182
- package/tests/perf/regression.test.ts +0 -120
package/dist/index.js
CHANGED
|
@@ -13065,12 +13065,2574 @@ class ServiceMetricsCollector {
|
|
|
13065
13065
|
this.metricsIntegration?.stop();
|
|
13066
13066
|
}
|
|
13067
13067
|
}
|
|
13068
|
+
// src/ai/types.ts
|
|
13069
|
+
var AI_SERVICE_TOKEN = Symbol("@dangao/bun-server:ai:service");
|
|
13070
|
+
var AI_MODULE_OPTIONS_TOKEN = Symbol("@dangao/bun-server:ai:options");
|
|
13071
|
+
var AI_TOOL_REGISTRY_TOKEN = Symbol("@dangao/bun-server:ai:tool-registry");
|
|
13072
|
+
var AI_TOOL_METADATA_KEY = "@dangao/bun-server:ai:tool";
|
|
13073
|
+
// src/ai/errors.ts
|
|
13074
|
+
init_http_exception();
|
|
13075
|
+
|
|
13076
|
+
class AiProviderError extends HttpException {
|
|
13077
|
+
provider;
|
|
13078
|
+
constructor(message, provider, statusCode = 502) {
|
|
13079
|
+
super(statusCode, `[${provider}] ${message}`);
|
|
13080
|
+
this.provider = provider;
|
|
13081
|
+
}
|
|
13082
|
+
}
|
|
13083
|
+
|
|
13084
|
+
class AiRateLimitError extends AiProviderError {
|
|
13085
|
+
constructor(provider, retryAfterMs) {
|
|
13086
|
+
super(retryAfterMs ? `Rate limit exceeded. Retry after ${retryAfterMs}ms` : "Rate limit exceeded", provider, 429);
|
|
13087
|
+
}
|
|
13088
|
+
}
|
|
13089
|
+
|
|
13090
|
+
class AiContextLengthError extends AiProviderError {
|
|
13091
|
+
constructor(provider, maxTokens) {
|
|
13092
|
+
super(maxTokens ? `Context length exceeded (max ${maxTokens} tokens)` : "Context length exceeded", provider, 413);
|
|
13093
|
+
}
|
|
13094
|
+
}
|
|
13095
|
+
|
|
13096
|
+
class AiTimeoutError extends AiProviderError {
|
|
13097
|
+
constructor(provider, timeoutMs) {
|
|
13098
|
+
super(`Request timed out after ${timeoutMs}ms`, provider, 504);
|
|
13099
|
+
}
|
|
13100
|
+
}
|
|
13101
|
+
|
|
13102
|
+
class AiNoProviderError extends HttpException {
|
|
13103
|
+
constructor() {
|
|
13104
|
+
super(500, "No AI providers configured. Call AiModule.forRoot() first.");
|
|
13105
|
+
}
|
|
13106
|
+
}
|
|
13107
|
+
|
|
13108
|
+
class AiAllProvidersFailed extends HttpException {
|
|
13109
|
+
constructor(errors) {
|
|
13110
|
+
super(502, `All AI providers failed: ${errors.join("; ")}`);
|
|
13111
|
+
}
|
|
13112
|
+
}
|
|
13113
|
+
// src/ai/decorators.ts
|
|
13114
|
+
function AiTool(definition) {
|
|
13115
|
+
return (target, propertyKey) => {
|
|
13116
|
+
Reflect.defineMetadata(AI_TOOL_METADATA_KEY, definition, target, propertyKey);
|
|
13117
|
+
};
|
|
13118
|
+
}
|
|
13119
|
+
// src/ai/service.ts
|
|
13120
|
+
init_decorators();
|
|
13121
|
+
init_decorators();
|
|
13122
|
+
|
|
13123
|
+
// src/ai/tools/tool-executor.ts
|
|
13124
|
+
class ToolExecutor {
|
|
13125
|
+
registry;
|
|
13126
|
+
constructor(registry) {
|
|
13127
|
+
this.registry = registry;
|
|
13128
|
+
}
|
|
13129
|
+
async executeAll(toolCalls) {
|
|
13130
|
+
const results = await Promise.all(toolCalls.map((call) => this.executeOne(call)));
|
|
13131
|
+
return results;
|
|
13132
|
+
}
|
|
13133
|
+
async executeOne(call) {
|
|
13134
|
+
let content;
|
|
13135
|
+
try {
|
|
13136
|
+
const result = await this.registry.execute(call.name, call.arguments);
|
|
13137
|
+
content = typeof result === "string" ? result : JSON.stringify(result, null, 2);
|
|
13138
|
+
} catch (err) {
|
|
13139
|
+
content = `Error executing tool "${call.name}": ${err instanceof Error ? err.message : String(err)}`;
|
|
13140
|
+
}
|
|
13141
|
+
return {
|
|
13142
|
+
role: "tool",
|
|
13143
|
+
content,
|
|
13144
|
+
toolCallId: call.id
|
|
13145
|
+
};
|
|
13146
|
+
}
|
|
13147
|
+
}
|
|
13148
|
+
|
|
13149
|
+
// src/ai/service.ts
|
|
13150
|
+
class AiService {
|
|
13151
|
+
providers = new Map;
|
|
13152
|
+
defaultProviderName;
|
|
13153
|
+
options;
|
|
13154
|
+
toolExecutor = null;
|
|
13155
|
+
constructor(options) {
|
|
13156
|
+
this.options = options;
|
|
13157
|
+
for (const entry of options.providers) {
|
|
13158
|
+
const provider = new entry.provider(entry.config);
|
|
13159
|
+
this.providers.set(entry.name, provider);
|
|
13160
|
+
if (entry.default || !this.defaultProviderName) {
|
|
13161
|
+
this.defaultProviderName = entry.name;
|
|
13162
|
+
}
|
|
13163
|
+
}
|
|
13164
|
+
}
|
|
13165
|
+
setToolRegistry(registry) {
|
|
13166
|
+
this.toolExecutor = new ToolExecutor(registry);
|
|
13167
|
+
}
|
|
13168
|
+
async complete(request) {
|
|
13169
|
+
const maxIterations = this.options.tools?.maxIterations ?? 10;
|
|
13170
|
+
let messages = [...request.messages];
|
|
13171
|
+
let iteration = 0;
|
|
13172
|
+
while (iteration < maxIterations) {
|
|
13173
|
+
const response = await this.completeSingle({ ...request, messages });
|
|
13174
|
+
if (!response.toolCalls || response.toolCalls.length === 0 || !this.toolExecutor) {
|
|
13175
|
+
return response;
|
|
13176
|
+
}
|
|
13177
|
+
messages = [
|
|
13178
|
+
...messages,
|
|
13179
|
+
{ role: "assistant", content: response.content, toolCalls: response.toolCalls }
|
|
13180
|
+
];
|
|
13181
|
+
const toolResults = await this.toolExecutor.executeAll(response.toolCalls);
|
|
13182
|
+
messages = [...messages, ...toolResults];
|
|
13183
|
+
iteration++;
|
|
13184
|
+
}
|
|
13185
|
+
return this.completeSingle({ ...request, messages, tools: [] });
|
|
13186
|
+
}
|
|
13187
|
+
stream(request) {
|
|
13188
|
+
const provider = this.getProvider(request.provider);
|
|
13189
|
+
return provider.stream(request);
|
|
13190
|
+
}
|
|
13191
|
+
countTokens(messages) {
|
|
13192
|
+
const provider = this.getProvider();
|
|
13193
|
+
return provider.countTokens(messages);
|
|
13194
|
+
}
|
|
13195
|
+
getProvider(name) {
|
|
13196
|
+
const providerName = name ?? this.defaultProviderName;
|
|
13197
|
+
if (!providerName)
|
|
13198
|
+
throw new AiNoProviderError;
|
|
13199
|
+
const provider = this.providers.get(providerName);
|
|
13200
|
+
if (!provider)
|
|
13201
|
+
throw new AiNoProviderError;
|
|
13202
|
+
return provider;
|
|
13203
|
+
}
|
|
13204
|
+
getProviderNames() {
|
|
13205
|
+
return Array.from(this.providers.keys());
|
|
13206
|
+
}
|
|
13207
|
+
async completeSingle(request) {
|
|
13208
|
+
const targetName = request.provider ?? this.defaultProviderName;
|
|
13209
|
+
if (!targetName)
|
|
13210
|
+
throw new AiNoProviderError;
|
|
13211
|
+
const fallback = this.options.fallback ?? false;
|
|
13212
|
+
const timeout = this.options.timeout ?? 30000;
|
|
13213
|
+
if (!fallback) {
|
|
13214
|
+
return this.withTimeout(this.getProvider(targetName).complete(request), timeout, targetName);
|
|
13215
|
+
}
|
|
13216
|
+
const names = [
|
|
13217
|
+
targetName,
|
|
13218
|
+
...Array.from(this.providers.keys()).filter((n) => n !== targetName)
|
|
13219
|
+
];
|
|
13220
|
+
const errors = [];
|
|
13221
|
+
for (const name of names) {
|
|
13222
|
+
try {
|
|
13223
|
+
const provider = this.providers.get(name);
|
|
13224
|
+
if (!provider)
|
|
13225
|
+
continue;
|
|
13226
|
+
return await this.withTimeout(provider.complete({ ...request, provider: name }), timeout, name);
|
|
13227
|
+
} catch (err) {
|
|
13228
|
+
errors.push(`${name}: ${err instanceof Error ? err.message : String(err)}`);
|
|
13229
|
+
}
|
|
13230
|
+
}
|
|
13231
|
+
throw new AiAllProvidersFailed(errors);
|
|
13232
|
+
}
|
|
13233
|
+
withTimeout(promise, ms, providerName) {
|
|
13234
|
+
return new Promise((resolve2, reject) => {
|
|
13235
|
+
const timer = setTimeout(() => reject(new AiTimeoutError(providerName, ms)), ms);
|
|
13236
|
+
promise.then((val) => {
|
|
13237
|
+
clearTimeout(timer);
|
|
13238
|
+
resolve2(val);
|
|
13239
|
+
}, (err) => {
|
|
13240
|
+
clearTimeout(timer);
|
|
13241
|
+
reject(err);
|
|
13242
|
+
});
|
|
13243
|
+
});
|
|
13244
|
+
}
|
|
13245
|
+
}
|
|
13246
|
+
AiService = __legacyDecorateClassTS([
|
|
13247
|
+
Injectable(),
|
|
13248
|
+
__legacyDecorateParamTS(0, Inject(AI_MODULE_OPTIONS_TOKEN)),
|
|
13249
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
13250
|
+
typeof AiModuleOptions === "undefined" ? Object : AiModuleOptions
|
|
13251
|
+
])
|
|
13252
|
+
], AiService);
|
|
13253
|
+
// src/ai/ai-module.ts
|
|
13254
|
+
init_module();
|
|
13255
|
+
|
|
13256
|
+
// src/ai/tools/tool-registry.ts
|
|
13257
|
+
class ToolRegistry {
|
|
13258
|
+
tools = new Map;
|
|
13259
|
+
register(tool) {
|
|
13260
|
+
this.tools.set(tool.name, tool);
|
|
13261
|
+
}
|
|
13262
|
+
scanAndRegister(instance) {
|
|
13263
|
+
const proto = Object.getPrototypeOf(instance);
|
|
13264
|
+
const methodNames = Object.getOwnPropertyNames(proto).filter((key) => key !== "constructor");
|
|
13265
|
+
for (const methodName of methodNames) {
|
|
13266
|
+
const metadata = Reflect.getMetadata(AI_TOOL_METADATA_KEY, proto, methodName);
|
|
13267
|
+
if (metadata) {
|
|
13268
|
+
const method = instance[methodName];
|
|
13269
|
+
if (typeof method === "function") {
|
|
13270
|
+
this.tools.set(metadata.name, {
|
|
13271
|
+
...metadata,
|
|
13272
|
+
execute: (args) => method.call(instance, args)
|
|
13273
|
+
});
|
|
13274
|
+
}
|
|
13275
|
+
}
|
|
13276
|
+
}
|
|
13277
|
+
}
|
|
13278
|
+
getDefinitions() {
|
|
13279
|
+
return Array.from(this.tools.values()).map(({ name, description, parameters }) => ({
|
|
13280
|
+
name,
|
|
13281
|
+
description,
|
|
13282
|
+
parameters
|
|
13283
|
+
}));
|
|
13284
|
+
}
|
|
13285
|
+
async execute(name, args) {
|
|
13286
|
+
const tool = this.tools.get(name);
|
|
13287
|
+
if (!tool) {
|
|
13288
|
+
throw new Error(`Tool "${name}" not found in registry`);
|
|
13289
|
+
}
|
|
13290
|
+
return tool.execute(args);
|
|
13291
|
+
}
|
|
13292
|
+
has(name) {
|
|
13293
|
+
return this.tools.has(name);
|
|
13294
|
+
}
|
|
13295
|
+
get size() {
|
|
13296
|
+
return this.tools.size;
|
|
13297
|
+
}
|
|
13298
|
+
}
|
|
13299
|
+
|
|
13300
|
+
// src/ai/ai-module.ts
|
|
13301
|
+
class AiModule {
|
|
13302
|
+
static forRoot(options) {
|
|
13303
|
+
const toolRegistry = new ToolRegistry;
|
|
13304
|
+
const aiService = new AiService(options);
|
|
13305
|
+
aiService.setToolRegistry(toolRegistry);
|
|
13306
|
+
const providers2 = [
|
|
13307
|
+
{ provide: AI_MODULE_OPTIONS_TOKEN, useValue: options },
|
|
13308
|
+
{ provide: AI_SERVICE_TOKEN, useValue: aiService },
|
|
13309
|
+
{ provide: AI_TOOL_REGISTRY_TOKEN, useValue: toolRegistry },
|
|
13310
|
+
AiService
|
|
13311
|
+
];
|
|
13312
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, AiModule) || {};
|
|
13313
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
13314
|
+
...existing,
|
|
13315
|
+
providers: [...existing.providers || [], ...providers2],
|
|
13316
|
+
exports: [
|
|
13317
|
+
...existing.exports || [],
|
|
13318
|
+
AI_SERVICE_TOKEN,
|
|
13319
|
+
AI_TOOL_REGISTRY_TOKEN,
|
|
13320
|
+
AiService
|
|
13321
|
+
]
|
|
13322
|
+
}, AiModule);
|
|
13323
|
+
return AiModule;
|
|
13324
|
+
}
|
|
13325
|
+
static reset() {
|
|
13326
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, AiModule);
|
|
13327
|
+
}
|
|
13328
|
+
}
|
|
13329
|
+
AiModule = __legacyDecorateClassTS([
|
|
13330
|
+
Module({ providers: [] })
|
|
13331
|
+
], AiModule);
|
|
13332
|
+
// src/ai/providers/openai-provider.ts
|
|
13333
|
+
var DEFAULT_PRICING = {
|
|
13334
|
+
"gpt-4o": { input: 2.5, output: 10 },
|
|
13335
|
+
"gpt-4o-mini": { input: 0.15, output: 0.6 },
|
|
13336
|
+
"gpt-4-turbo": { input: 10, output: 30 },
|
|
13337
|
+
"gpt-3.5-turbo": { input: 0.5, output: 1.5 }
|
|
13338
|
+
};
|
|
13339
|
+
|
|
13340
|
+
class OpenAIProvider {
|
|
13341
|
+
name = "openai";
|
|
13342
|
+
apiKey;
|
|
13343
|
+
baseUrl;
|
|
13344
|
+
defaultModel;
|
|
13345
|
+
pricing;
|
|
13346
|
+
constructor(config) {
|
|
13347
|
+
this.apiKey = config.apiKey;
|
|
13348
|
+
this.baseUrl = (config.baseUrl ?? "https://api.openai.com/v1").replace(/\/$/, "");
|
|
13349
|
+
this.defaultModel = config.defaultModel ?? "gpt-4o";
|
|
13350
|
+
this.pricing = { ...DEFAULT_PRICING, ...config.pricing ?? {} };
|
|
13351
|
+
}
|
|
13352
|
+
async complete(request) {
|
|
13353
|
+
const model = request.model ?? this.defaultModel;
|
|
13354
|
+
const body = {
|
|
13355
|
+
model,
|
|
13356
|
+
messages: request.messages,
|
|
13357
|
+
temperature: request.temperature,
|
|
13358
|
+
max_tokens: request.maxTokens
|
|
13359
|
+
};
|
|
13360
|
+
if (request.tools && request.tools.length > 0) {
|
|
13361
|
+
body["tools"] = request.tools.map((t) => ({
|
|
13362
|
+
type: "function",
|
|
13363
|
+
function: { name: t.name, description: t.description, parameters: t.parameters }
|
|
13364
|
+
}));
|
|
13365
|
+
}
|
|
13366
|
+
const response = await this.post("/chat/completions", body);
|
|
13367
|
+
const choice = response.choices?.[0];
|
|
13368
|
+
const usage = response.usage ?? { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
|
13369
|
+
const message = choice?.message;
|
|
13370
|
+
return {
|
|
13371
|
+
content: message?.content ?? "",
|
|
13372
|
+
toolCalls: message?.tool_calls?.map((tc) => ({
|
|
13373
|
+
id: tc.id ?? "",
|
|
13374
|
+
name: tc.function?.name ?? "",
|
|
13375
|
+
arguments: this.safeParseToolArguments(tc.function?.arguments)
|
|
13376
|
+
})),
|
|
13377
|
+
model,
|
|
13378
|
+
provider: this.name,
|
|
13379
|
+
usage: {
|
|
13380
|
+
promptTokens: usage.prompt_tokens ?? 0,
|
|
13381
|
+
completionTokens: usage.completion_tokens ?? 0,
|
|
13382
|
+
totalTokens: usage.total_tokens ?? 0,
|
|
13383
|
+
estimatedCostUsd: this.estimateCost(model, usage.prompt_tokens ?? 0, usage.completion_tokens ?? 0)
|
|
13384
|
+
},
|
|
13385
|
+
finishReason: choice?.finish_reason === "tool_calls" ? "tool_calls" : "stop"
|
|
13386
|
+
};
|
|
13387
|
+
}
|
|
13388
|
+
stream(request) {
|
|
13389
|
+
const model = request.model ?? this.defaultModel;
|
|
13390
|
+
const body = {
|
|
13391
|
+
model,
|
|
13392
|
+
messages: request.messages,
|
|
13393
|
+
temperature: request.temperature,
|
|
13394
|
+
max_tokens: request.maxTokens,
|
|
13395
|
+
stream: true
|
|
13396
|
+
};
|
|
13397
|
+
if (request.tools && request.tools.length > 0) {
|
|
13398
|
+
body["tools"] = request.tools.map((t) => ({
|
|
13399
|
+
type: "function",
|
|
13400
|
+
function: { name: t.name, description: t.description, parameters: t.parameters }
|
|
13401
|
+
}));
|
|
13402
|
+
}
|
|
13403
|
+
const encoder = new TextEncoder;
|
|
13404
|
+
const apiKey = this.apiKey;
|
|
13405
|
+
const baseUrl = this.baseUrl;
|
|
13406
|
+
return new ReadableStream({
|
|
13407
|
+
async start(controller2) {
|
|
13408
|
+
try {
|
|
13409
|
+
const res = await fetch(`${baseUrl}/chat/completions`, {
|
|
13410
|
+
method: "POST",
|
|
13411
|
+
headers: {
|
|
13412
|
+
"Content-Type": "application/json",
|
|
13413
|
+
Authorization: `Bearer ${apiKey}`
|
|
13414
|
+
},
|
|
13415
|
+
body: JSON.stringify(body)
|
|
13416
|
+
});
|
|
13417
|
+
if (!res.ok || !res.body) {
|
|
13418
|
+
const err = await res.text();
|
|
13419
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: err, done: true })}
|
|
13420
|
+
|
|
13421
|
+
`));
|
|
13422
|
+
controller2.close();
|
|
13423
|
+
return;
|
|
13424
|
+
}
|
|
13425
|
+
const reader = res.body.getReader();
|
|
13426
|
+
const dec = new TextDecoder;
|
|
13427
|
+
let buf = "";
|
|
13428
|
+
while (true) {
|
|
13429
|
+
const { done, value } = await reader.read();
|
|
13430
|
+
if (done)
|
|
13431
|
+
break;
|
|
13432
|
+
buf += dec.decode(value, { stream: true });
|
|
13433
|
+
const lines = buf.split(`
|
|
13434
|
+
`);
|
|
13435
|
+
buf = lines.pop() ?? "";
|
|
13436
|
+
for (const line of lines) {
|
|
13437
|
+
if (line.startsWith("data: ")) {
|
|
13438
|
+
const data = line.slice(6).trim();
|
|
13439
|
+
if (data === "[DONE]") {
|
|
13440
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}
|
|
13441
|
+
|
|
13442
|
+
`));
|
|
13443
|
+
continue;
|
|
13444
|
+
}
|
|
13445
|
+
try {
|
|
13446
|
+
const parsed = JSON.parse(data);
|
|
13447
|
+
const delta = parsed.choices?.[0]?.delta;
|
|
13448
|
+
const chunk = { content: delta?.content ?? "", done: false };
|
|
13449
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}
|
|
13450
|
+
|
|
13451
|
+
`));
|
|
13452
|
+
} catch {}
|
|
13453
|
+
}
|
|
13454
|
+
}
|
|
13455
|
+
}
|
|
13456
|
+
} catch (err) {
|
|
13457
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: String(err), done: true })}
|
|
13458
|
+
|
|
13459
|
+
`));
|
|
13460
|
+
} finally {
|
|
13461
|
+
controller2.close();
|
|
13462
|
+
}
|
|
13463
|
+
}
|
|
13464
|
+
});
|
|
13465
|
+
}
|
|
13466
|
+
countTokens(messages) {
|
|
13467
|
+
return Math.ceil(messages.reduce((sum, m) => sum + m.content.length, 0) / 4);
|
|
13468
|
+
}
|
|
13469
|
+
async post(path, body) {
|
|
13470
|
+
const res = await fetch(`${this.baseUrl}${path}`, {
|
|
13471
|
+
method: "POST",
|
|
13472
|
+
headers: {
|
|
13473
|
+
"Content-Type": "application/json",
|
|
13474
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
13475
|
+
},
|
|
13476
|
+
body: JSON.stringify(body)
|
|
13477
|
+
});
|
|
13478
|
+
if (res.status === 429) {
|
|
13479
|
+
const retryAfter = res.headers.get("retry-after");
|
|
13480
|
+
throw new AiRateLimitError(this.name, retryAfter ? Number(retryAfter) * 1000 : undefined);
|
|
13481
|
+
}
|
|
13482
|
+
if (res.status === 413) {
|
|
13483
|
+
throw new AiContextLengthError(this.name);
|
|
13484
|
+
}
|
|
13485
|
+
if (res.status === 408 || res.status === 504) {
|
|
13486
|
+
throw new AiTimeoutError(this.name, 30000);
|
|
13487
|
+
}
|
|
13488
|
+
if (!res.ok) {
|
|
13489
|
+
const text = await res.text();
|
|
13490
|
+
throw new AiProviderError(text, this.name, res.status);
|
|
13491
|
+
}
|
|
13492
|
+
return await res.json();
|
|
13493
|
+
}
|
|
13494
|
+
safeParseToolArguments(argumentsJson) {
|
|
13495
|
+
if (!argumentsJson) {
|
|
13496
|
+
return {};
|
|
13497
|
+
}
|
|
13498
|
+
try {
|
|
13499
|
+
const parsed = JSON.parse(argumentsJson);
|
|
13500
|
+
if (typeof parsed === "object" && parsed !== null) {
|
|
13501
|
+
return parsed;
|
|
13502
|
+
}
|
|
13503
|
+
return {};
|
|
13504
|
+
} catch {
|
|
13505
|
+
return {};
|
|
13506
|
+
}
|
|
13507
|
+
}
|
|
13508
|
+
estimateCost(model, promptTokens, completionTokens) {
|
|
13509
|
+
const pricing = this.pricing[model];
|
|
13510
|
+
if (!pricing)
|
|
13511
|
+
return 0;
|
|
13512
|
+
return (promptTokens * pricing.input + completionTokens * pricing.output) / 1e6;
|
|
13513
|
+
}
|
|
13514
|
+
}
|
|
13515
|
+
// src/ai/providers/anthropic-provider.ts
|
|
13516
|
+
class AnthropicProvider {
|
|
13517
|
+
name = "anthropic";
|
|
13518
|
+
apiKey;
|
|
13519
|
+
baseUrl;
|
|
13520
|
+
defaultModel;
|
|
13521
|
+
anthropicVersion;
|
|
13522
|
+
constructor(config) {
|
|
13523
|
+
this.apiKey = config.apiKey;
|
|
13524
|
+
this.baseUrl = (config.baseUrl ?? "https://api.anthropic.com").replace(/\/$/, "");
|
|
13525
|
+
this.defaultModel = config.defaultModel ?? "claude-3-7-sonnet-20250219";
|
|
13526
|
+
this.anthropicVersion = config.anthropicVersion ?? "2023-06-01";
|
|
13527
|
+
}
|
|
13528
|
+
async complete(request) {
|
|
13529
|
+
const model = request.model ?? this.defaultModel;
|
|
13530
|
+
const systemMessages = request.messages.filter((m) => m.role === "system");
|
|
13531
|
+
const chatMessages = request.messages.filter((m) => m.role !== "system");
|
|
13532
|
+
const body = {
|
|
13533
|
+
model,
|
|
13534
|
+
messages: chatMessages.map((m) => ({
|
|
13535
|
+
role: m.role === "tool" ? "user" : m.role,
|
|
13536
|
+
content: m.role === "tool" ? [{ type: "tool_result", tool_use_id: m.toolCallId, content: m.content }] : m.content
|
|
13537
|
+
})),
|
|
13538
|
+
max_tokens: request.maxTokens ?? 4096,
|
|
13539
|
+
temperature: request.temperature
|
|
13540
|
+
};
|
|
13541
|
+
if (systemMessages.length > 0) {
|
|
13542
|
+
body["system"] = systemMessages.map((m) => m.content).join(`
|
|
13543
|
+
`);
|
|
13544
|
+
}
|
|
13545
|
+
if (request.tools && request.tools.length > 0) {
|
|
13546
|
+
body["tools"] = request.tools.map((t) => ({
|
|
13547
|
+
name: t.name,
|
|
13548
|
+
description: t.description,
|
|
13549
|
+
input_schema: t.parameters
|
|
13550
|
+
}));
|
|
13551
|
+
}
|
|
13552
|
+
const response = await this.post("/v1/messages", body);
|
|
13553
|
+
const usage = response["usage"] ?? { input_tokens: 0, output_tokens: 0 };
|
|
13554
|
+
let content = "";
|
|
13555
|
+
const toolCalls = [];
|
|
13556
|
+
const contentBlocks = response["content"];
|
|
13557
|
+
for (const block of contentBlocks) {
|
|
13558
|
+
if (block["type"] === "text")
|
|
13559
|
+
content += block["text"];
|
|
13560
|
+
else if (block["type"] === "tool_use") {
|
|
13561
|
+
toolCalls.push({
|
|
13562
|
+
id: block["id"],
|
|
13563
|
+
name: block["name"],
|
|
13564
|
+
arguments: block["input"]
|
|
13565
|
+
});
|
|
13566
|
+
}
|
|
13567
|
+
}
|
|
13568
|
+
return {
|
|
13569
|
+
content,
|
|
13570
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
|
|
13571
|
+
model,
|
|
13572
|
+
provider: this.name,
|
|
13573
|
+
usage: {
|
|
13574
|
+
promptTokens: usage.input_tokens,
|
|
13575
|
+
completionTokens: usage.output_tokens,
|
|
13576
|
+
totalTokens: usage.input_tokens + usage.output_tokens
|
|
13577
|
+
},
|
|
13578
|
+
finishReason: response["stop_reason"] === "tool_use" ? "tool_calls" : "stop"
|
|
13579
|
+
};
|
|
13580
|
+
}
|
|
13581
|
+
stream(request) {
|
|
13582
|
+
const model = request.model ?? this.defaultModel;
|
|
13583
|
+
const systemMessages = request.messages.filter((m) => m.role === "system");
|
|
13584
|
+
const chatMessages = request.messages.filter((m) => m.role !== "system");
|
|
13585
|
+
const body = {
|
|
13586
|
+
model,
|
|
13587
|
+
messages: chatMessages.map((m) => ({ role: m.role, content: m.content })),
|
|
13588
|
+
max_tokens: request.maxTokens ?? 4096,
|
|
13589
|
+
temperature: request.temperature,
|
|
13590
|
+
stream: true
|
|
13591
|
+
};
|
|
13592
|
+
if (systemMessages.length > 0) {
|
|
13593
|
+
body["system"] = systemMessages.map((m) => m.content).join(`
|
|
13594
|
+
`);
|
|
13595
|
+
}
|
|
13596
|
+
const apiKey = this.apiKey;
|
|
13597
|
+
const baseUrl = this.baseUrl;
|
|
13598
|
+
const anthropicVersion = this.anthropicVersion;
|
|
13599
|
+
const encoder = new TextEncoder;
|
|
13600
|
+
return new ReadableStream({
|
|
13601
|
+
async start(controller2) {
|
|
13602
|
+
try {
|
|
13603
|
+
const res = await fetch(`${baseUrl}/v1/messages`, {
|
|
13604
|
+
method: "POST",
|
|
13605
|
+
headers: {
|
|
13606
|
+
"Content-Type": "application/json",
|
|
13607
|
+
"x-api-key": apiKey,
|
|
13608
|
+
"anthropic-version": anthropicVersion
|
|
13609
|
+
},
|
|
13610
|
+
body: JSON.stringify(body)
|
|
13611
|
+
});
|
|
13612
|
+
if (!res.ok || !res.body) {
|
|
13613
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: await res.text(), done: true })}
|
|
13614
|
+
|
|
13615
|
+
`));
|
|
13616
|
+
controller2.close();
|
|
13617
|
+
return;
|
|
13618
|
+
}
|
|
13619
|
+
const reader = res.body.getReader();
|
|
13620
|
+
const dec = new TextDecoder;
|
|
13621
|
+
let buf = "";
|
|
13622
|
+
while (true) {
|
|
13623
|
+
const { done, value } = await reader.read();
|
|
13624
|
+
if (done)
|
|
13625
|
+
break;
|
|
13626
|
+
buf += dec.decode(value, { stream: true });
|
|
13627
|
+
const lines = buf.split(`
|
|
13628
|
+
`);
|
|
13629
|
+
buf = lines.pop() ?? "";
|
|
13630
|
+
for (const line of lines) {
|
|
13631
|
+
if (line.startsWith("data: ")) {
|
|
13632
|
+
try {
|
|
13633
|
+
const parsed = JSON.parse(line.slice(6));
|
|
13634
|
+
if (parsed.type === "content_block_delta") {
|
|
13635
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ content: parsed.delta?.text ?? "", done: false })}
|
|
13636
|
+
|
|
13637
|
+
`));
|
|
13638
|
+
} else if (parsed.type === "message_stop") {
|
|
13639
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}
|
|
13640
|
+
|
|
13641
|
+
`));
|
|
13642
|
+
}
|
|
13643
|
+
} catch {}
|
|
13644
|
+
}
|
|
13645
|
+
}
|
|
13646
|
+
}
|
|
13647
|
+
} catch (err) {
|
|
13648
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: String(err), done: true })}
|
|
13649
|
+
|
|
13650
|
+
`));
|
|
13651
|
+
} finally {
|
|
13652
|
+
controller2.close();
|
|
13653
|
+
}
|
|
13654
|
+
}
|
|
13655
|
+
});
|
|
13656
|
+
}
|
|
13657
|
+
countTokens(messages) {
|
|
13658
|
+
return Math.ceil(messages.reduce((sum, m) => sum + m.content.length, 0) / 4);
|
|
13659
|
+
}
|
|
13660
|
+
async post(path, body) {
|
|
13661
|
+
const res = await fetch(`${this.baseUrl}${path}`, {
|
|
13662
|
+
method: "POST",
|
|
13663
|
+
headers: {
|
|
13664
|
+
"Content-Type": "application/json",
|
|
13665
|
+
"x-api-key": this.apiKey,
|
|
13666
|
+
"anthropic-version": this.anthropicVersion
|
|
13667
|
+
},
|
|
13668
|
+
body: JSON.stringify(body)
|
|
13669
|
+
});
|
|
13670
|
+
if (res.status === 429)
|
|
13671
|
+
throw new AiRateLimitError(this.name);
|
|
13672
|
+
if (res.status === 413)
|
|
13673
|
+
throw new AiContextLengthError(this.name);
|
|
13674
|
+
if (!res.ok)
|
|
13675
|
+
throw new AiProviderError(await res.text(), this.name, res.status);
|
|
13676
|
+
return res.json();
|
|
13677
|
+
}
|
|
13678
|
+
}
|
|
13679
|
+
// src/ai/providers/ollama-provider.ts
|
|
13680
|
+
class OllamaProvider {
|
|
13681
|
+
name = "ollama";
|
|
13682
|
+
baseUrl;
|
|
13683
|
+
defaultModel;
|
|
13684
|
+
constructor(config = {}) {
|
|
13685
|
+
this.baseUrl = (config.baseUrl ?? "http://localhost:11434").replace(/\/$/, "");
|
|
13686
|
+
this.defaultModel = config.defaultModel ?? "llama3.2";
|
|
13687
|
+
}
|
|
13688
|
+
async complete(request) {
|
|
13689
|
+
const model = request.model ?? this.defaultModel;
|
|
13690
|
+
const res = await fetch(`${this.baseUrl}/api/chat`, {
|
|
13691
|
+
method: "POST",
|
|
13692
|
+
headers: { "Content-Type": "application/json" },
|
|
13693
|
+
body: JSON.stringify({
|
|
13694
|
+
model,
|
|
13695
|
+
messages: request.messages.map((m) => ({ role: m.role, content: m.content })),
|
|
13696
|
+
stream: false,
|
|
13697
|
+
options: {
|
|
13698
|
+
temperature: request.temperature,
|
|
13699
|
+
num_predict: request.maxTokens
|
|
13700
|
+
}
|
|
13701
|
+
})
|
|
13702
|
+
});
|
|
13703
|
+
if (!res.ok) {
|
|
13704
|
+
throw new AiProviderError(await res.text(), this.name, res.status);
|
|
13705
|
+
}
|
|
13706
|
+
const data = await res.json();
|
|
13707
|
+
const message = data["message"];
|
|
13708
|
+
const evalCount = data["eval_count"] ?? 0;
|
|
13709
|
+
const promptEvalCount = data["prompt_eval_count"] ?? 0;
|
|
13710
|
+
return {
|
|
13711
|
+
content: message["content"] ?? "",
|
|
13712
|
+
model,
|
|
13713
|
+
provider: this.name,
|
|
13714
|
+
usage: {
|
|
13715
|
+
promptTokens: promptEvalCount,
|
|
13716
|
+
completionTokens: evalCount,
|
|
13717
|
+
totalTokens: promptEvalCount + evalCount
|
|
13718
|
+
},
|
|
13719
|
+
finishReason: "stop"
|
|
13720
|
+
};
|
|
13721
|
+
}
|
|
13722
|
+
stream(request) {
|
|
13723
|
+
const model = request.model ?? this.defaultModel;
|
|
13724
|
+
const baseUrl = this.baseUrl;
|
|
13725
|
+
const encoder = new TextEncoder;
|
|
13726
|
+
return new ReadableStream({
|
|
13727
|
+
async start(controller2) {
|
|
13728
|
+
try {
|
|
13729
|
+
const res = await fetch(`${baseUrl}/api/chat`, {
|
|
13730
|
+
method: "POST",
|
|
13731
|
+
headers: { "Content-Type": "application/json" },
|
|
13732
|
+
body: JSON.stringify({
|
|
13733
|
+
model,
|
|
13734
|
+
messages: request.messages.map((m) => ({ role: m.role, content: m.content })),
|
|
13735
|
+
stream: true,
|
|
13736
|
+
options: {
|
|
13737
|
+
temperature: request.temperature,
|
|
13738
|
+
num_predict: request.maxTokens
|
|
13739
|
+
}
|
|
13740
|
+
})
|
|
13741
|
+
});
|
|
13742
|
+
if (!res.ok || !res.body) {
|
|
13743
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: await res.text(), done: true })}
|
|
13744
|
+
|
|
13745
|
+
`));
|
|
13746
|
+
controller2.close();
|
|
13747
|
+
return;
|
|
13748
|
+
}
|
|
13749
|
+
const reader = res.body.getReader();
|
|
13750
|
+
const dec = new TextDecoder;
|
|
13751
|
+
let buf = "";
|
|
13752
|
+
while (true) {
|
|
13753
|
+
const { done, value } = await reader.read();
|
|
13754
|
+
if (done)
|
|
13755
|
+
break;
|
|
13756
|
+
buf += dec.decode(value, { stream: true });
|
|
13757
|
+
const lines = buf.split(`
|
|
13758
|
+
`);
|
|
13759
|
+
buf = lines.pop() ?? "";
|
|
13760
|
+
for (const line of lines) {
|
|
13761
|
+
if (!line.trim())
|
|
13762
|
+
continue;
|
|
13763
|
+
try {
|
|
13764
|
+
const parsed = JSON.parse(line);
|
|
13765
|
+
const msgContent = parsed["message"]?.["content"] ?? "";
|
|
13766
|
+
const isDone = Boolean(parsed["done"]);
|
|
13767
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ content: msgContent, done: isDone })}
|
|
13768
|
+
|
|
13769
|
+
`));
|
|
13770
|
+
} catch {}
|
|
13771
|
+
}
|
|
13772
|
+
}
|
|
13773
|
+
} catch (err) {
|
|
13774
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: String(err), done: true })}
|
|
13775
|
+
|
|
13776
|
+
`));
|
|
13777
|
+
} finally {
|
|
13778
|
+
controller2.close();
|
|
13779
|
+
}
|
|
13780
|
+
}
|
|
13781
|
+
});
|
|
13782
|
+
}
|
|
13783
|
+
countTokens(messages) {
|
|
13784
|
+
return Math.ceil(messages.reduce((sum, m) => sum + m.content.length, 0) / 4);
|
|
13785
|
+
}
|
|
13786
|
+
}
|
|
13787
|
+
// src/ai/providers/google-provider.ts
|
|
13788
|
+
class GoogleProvider {
|
|
13789
|
+
name = "google";
|
|
13790
|
+
apiKey;
|
|
13791
|
+
defaultModel;
|
|
13792
|
+
baseUrl;
|
|
13793
|
+
constructor(config) {
|
|
13794
|
+
this.apiKey = config.apiKey;
|
|
13795
|
+
this.defaultModel = config.defaultModel ?? "gemini-2.0-flash";
|
|
13796
|
+
this.baseUrl = (config.baseUrl ?? "https://generativelanguage.googleapis.com/v1beta").replace(/\/$/, "");
|
|
13797
|
+
}
|
|
13798
|
+
async complete(request) {
|
|
13799
|
+
const model = request.model ?? this.defaultModel;
|
|
13800
|
+
const { contents, systemInstruction } = this.toGeminiMessages(request.messages);
|
|
13801
|
+
const body = {
|
|
13802
|
+
contents,
|
|
13803
|
+
generationConfig: {
|
|
13804
|
+
temperature: request.temperature,
|
|
13805
|
+
maxOutputTokens: request.maxTokens
|
|
13806
|
+
}
|
|
13807
|
+
};
|
|
13808
|
+
if (systemInstruction)
|
|
13809
|
+
body["system_instruction"] = { parts: [{ text: systemInstruction }] };
|
|
13810
|
+
if (request.tools && request.tools.length > 0) {
|
|
13811
|
+
body["tools"] = [{
|
|
13812
|
+
functionDeclarations: request.tools.map((t) => ({
|
|
13813
|
+
name: t.name,
|
|
13814
|
+
description: t.description,
|
|
13815
|
+
parameters: t.parameters
|
|
13816
|
+
}))
|
|
13817
|
+
}];
|
|
13818
|
+
}
|
|
13819
|
+
const res = await fetch(`${this.baseUrl}/models/${model}:generateContent?key=${this.apiKey}`, {
|
|
13820
|
+
method: "POST",
|
|
13821
|
+
headers: { "Content-Type": "application/json" },
|
|
13822
|
+
body: JSON.stringify(body)
|
|
13823
|
+
});
|
|
13824
|
+
if (res.status === 429)
|
|
13825
|
+
throw new AiRateLimitError(this.name);
|
|
13826
|
+
if (!res.ok)
|
|
13827
|
+
throw new AiProviderError(await res.text(), this.name, res.status);
|
|
13828
|
+
const data = await res.json();
|
|
13829
|
+
const candidate = data["candidates"]?.[0];
|
|
13830
|
+
const parts = candidate?.["content"]?.["parts"] ?? [];
|
|
13831
|
+
let content = "";
|
|
13832
|
+
const toolCalls = [];
|
|
13833
|
+
for (const part of parts) {
|
|
13834
|
+
if (part["text"])
|
|
13835
|
+
content += part["text"];
|
|
13836
|
+
else if (part["functionCall"]) {
|
|
13837
|
+
const fc = part["functionCall"];
|
|
13838
|
+
toolCalls.push({
|
|
13839
|
+
id: `fc-${Date.now()}`,
|
|
13840
|
+
name: fc["name"],
|
|
13841
|
+
arguments: fc["args"]
|
|
13842
|
+
});
|
|
13843
|
+
}
|
|
13844
|
+
}
|
|
13845
|
+
const usageMeta = data["usageMetadata"] ?? {};
|
|
13846
|
+
return {
|
|
13847
|
+
content,
|
|
13848
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
|
|
13849
|
+
model,
|
|
13850
|
+
provider: this.name,
|
|
13851
|
+
usage: {
|
|
13852
|
+
promptTokens: usageMeta["promptTokenCount"] ?? 0,
|
|
13853
|
+
completionTokens: usageMeta["candidatesTokenCount"] ?? 0,
|
|
13854
|
+
totalTokens: usageMeta["totalTokenCount"] ?? 0
|
|
13855
|
+
},
|
|
13856
|
+
finishReason: toolCalls.length > 0 ? "tool_calls" : "stop"
|
|
13857
|
+
};
|
|
13858
|
+
}
|
|
13859
|
+
stream(request) {
|
|
13860
|
+
const model = request.model ?? this.defaultModel;
|
|
13861
|
+
const { contents, systemInstruction } = this.toGeminiMessages(request.messages);
|
|
13862
|
+
const apiKey = this.apiKey;
|
|
13863
|
+
const baseUrl = this.baseUrl;
|
|
13864
|
+
const encoder = new TextEncoder;
|
|
13865
|
+
const body = {
|
|
13866
|
+
contents,
|
|
13867
|
+
generationConfig: { temperature: request.temperature, maxOutputTokens: request.maxTokens }
|
|
13868
|
+
};
|
|
13869
|
+
if (systemInstruction)
|
|
13870
|
+
body["system_instruction"] = { parts: [{ text: systemInstruction }] };
|
|
13871
|
+
return new ReadableStream({
|
|
13872
|
+
async start(controller2) {
|
|
13873
|
+
try {
|
|
13874
|
+
const res = await fetch(`${baseUrl}/models/${model}:streamGenerateContent?key=${apiKey}&alt=sse`, {
|
|
13875
|
+
method: "POST",
|
|
13876
|
+
headers: { "Content-Type": "application/json" },
|
|
13877
|
+
body: JSON.stringify(body)
|
|
13878
|
+
});
|
|
13879
|
+
if (!res.ok || !res.body) {
|
|
13880
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: await res.text(), done: true })}
|
|
13881
|
+
|
|
13882
|
+
`));
|
|
13883
|
+
controller2.close();
|
|
13884
|
+
return;
|
|
13885
|
+
}
|
|
13886
|
+
const reader = res.body.getReader();
|
|
13887
|
+
const dec = new TextDecoder;
|
|
13888
|
+
let buf = "";
|
|
13889
|
+
while (true) {
|
|
13890
|
+
const { done, value } = await reader.read();
|
|
13891
|
+
if (done)
|
|
13892
|
+
break;
|
|
13893
|
+
buf += dec.decode(value, { stream: true });
|
|
13894
|
+
const lines = buf.split(`
|
|
13895
|
+
`);
|
|
13896
|
+
buf = lines.pop() ?? "";
|
|
13897
|
+
for (const line of lines) {
|
|
13898
|
+
if (line.startsWith("data: ")) {
|
|
13899
|
+
try {
|
|
13900
|
+
const parsed = JSON.parse(line.slice(6));
|
|
13901
|
+
const candidate = parsed["candidates"]?.[0];
|
|
13902
|
+
const parts = candidate?.["content"]?.["parts"] ?? [];
|
|
13903
|
+
const text = parts.map((p) => p["text"] ?? "").join("");
|
|
13904
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ content: text, done: false })}
|
|
13905
|
+
|
|
13906
|
+
`));
|
|
13907
|
+
} catch {}
|
|
13908
|
+
}
|
|
13909
|
+
}
|
|
13910
|
+
}
|
|
13911
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}
|
|
13912
|
+
|
|
13913
|
+
`));
|
|
13914
|
+
} catch (err) {
|
|
13915
|
+
controller2.enqueue(encoder.encode(`data: ${JSON.stringify({ error: String(err), done: true })}
|
|
13916
|
+
|
|
13917
|
+
`));
|
|
13918
|
+
} finally {
|
|
13919
|
+
controller2.close();
|
|
13920
|
+
}
|
|
13921
|
+
}
|
|
13922
|
+
});
|
|
13923
|
+
}
|
|
13924
|
+
countTokens(messages) {
|
|
13925
|
+
return Math.ceil(messages.reduce((sum, m) => sum + m.content.length, 0) / 4);
|
|
13926
|
+
}
|
|
13927
|
+
toGeminiMessages(messages) {
|
|
13928
|
+
const systemParts = messages.filter((m) => m.role === "system").map((m) => m.content);
|
|
13929
|
+
const chatMessages = messages.filter((m) => m.role !== "system");
|
|
13930
|
+
const contents = chatMessages.map((m) => ({
|
|
13931
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
13932
|
+
parts: [{ text: m.content }]
|
|
13933
|
+
}));
|
|
13934
|
+
return {
|
|
13935
|
+
contents,
|
|
13936
|
+
systemInstruction: systemParts.length > 0 ? systemParts.join(`
|
|
13937
|
+
`) : undefined
|
|
13938
|
+
};
|
|
13939
|
+
}
|
|
13940
|
+
}
|
|
13941
|
+
// src/conversation/types.ts
|
|
13942
|
+
var CONVERSATION_SERVICE_TOKEN = Symbol("@dangao/bun-server:conversation:service");
|
|
13943
|
+
var CONVERSATION_OPTIONS_TOKEN = Symbol("@dangao/bun-server:conversation:options");
|
|
13944
|
+
// src/conversation/service.ts
|
|
13945
|
+
init_decorators();
|
|
13946
|
+
init_decorators();
|
|
13947
|
+
class ConversationService {
|
|
13948
|
+
store;
|
|
13949
|
+
maxMessages;
|
|
13950
|
+
autoTrim;
|
|
13951
|
+
summaryThreshold;
|
|
13952
|
+
constructor(options) {
|
|
13953
|
+
this.store = options.store;
|
|
13954
|
+
this.maxMessages = options.maxMessages ?? 100;
|
|
13955
|
+
this.autoTrim = options.autoTrim ?? true;
|
|
13956
|
+
this.summaryThreshold = options.summaryThreshold;
|
|
13957
|
+
}
|
|
13958
|
+
async create(metadata) {
|
|
13959
|
+
return this.store.create(metadata);
|
|
13960
|
+
}
|
|
13961
|
+
async get(id) {
|
|
13962
|
+
return this.store.get(id);
|
|
13963
|
+
}
|
|
13964
|
+
async getHistory(id) {
|
|
13965
|
+
const conv = await this.store.get(id);
|
|
13966
|
+
return conv?.messages ?? [];
|
|
13967
|
+
}
|
|
13968
|
+
async appendMessage(id, message, options) {
|
|
13969
|
+
await this.store.appendMessage(id, message);
|
|
13970
|
+
const opts = options ?? {};
|
|
13971
|
+
const summarizer = opts.summarizer;
|
|
13972
|
+
const summaryThreshold = this.summaryThreshold;
|
|
13973
|
+
if (summaryThreshold && summarizer) {
|
|
13974
|
+
const conv = await this.store.get(id);
|
|
13975
|
+
if (conv && conv.messages.length >= summaryThreshold) {
|
|
13976
|
+
await this.summarizeAndCompress(id, conv.messages, summarizer);
|
|
13977
|
+
return;
|
|
13978
|
+
}
|
|
13979
|
+
}
|
|
13980
|
+
if (this.autoTrim) {
|
|
13981
|
+
await this.store.trim(id, this.maxMessages);
|
|
13982
|
+
}
|
|
13983
|
+
}
|
|
13984
|
+
async delete(id) {
|
|
13985
|
+
return this.store.delete(id);
|
|
13986
|
+
}
|
|
13987
|
+
async list() {
|
|
13988
|
+
return this.store.list();
|
|
13989
|
+
}
|
|
13990
|
+
async summarize(id, summarizer) {
|
|
13991
|
+
const conv = await this.store.get(id);
|
|
13992
|
+
if (!conv)
|
|
13993
|
+
return;
|
|
13994
|
+
await this.summarizeAndCompress(id, conv.messages, summarizer);
|
|
13995
|
+
}
|
|
13996
|
+
async summarizeAndCompress(id, messages, summarizer) {
|
|
13997
|
+
const keepCount = Math.floor(this.maxMessages / 4);
|
|
13998
|
+
const toSummarize = messages.slice(0, -keepCount);
|
|
13999
|
+
const toKeep = messages.slice(-keepCount);
|
|
14000
|
+
if (toSummarize.length === 0)
|
|
14001
|
+
return;
|
|
14002
|
+
try {
|
|
14003
|
+
const summary = await summarizer(toSummarize);
|
|
14004
|
+
const summaryMessage = {
|
|
14005
|
+
role: "system",
|
|
14006
|
+
content: `[Conversation summary: ${summary}]`
|
|
14007
|
+
};
|
|
14008
|
+
const conv = await this.store.get(id);
|
|
14009
|
+
if (!conv)
|
|
14010
|
+
return;
|
|
14011
|
+
const newMessages = [summaryMessage, ...toKeep];
|
|
14012
|
+
for (const _msg of conv.messages) {
|
|
14013
|
+
await this.store.trim(id, 0);
|
|
14014
|
+
}
|
|
14015
|
+
for (const msg of newMessages) {
|
|
14016
|
+
await this.store.appendMessage(id, msg);
|
|
14017
|
+
}
|
|
14018
|
+
} catch {
|
|
14019
|
+
await this.store.trim(id, this.maxMessages);
|
|
14020
|
+
}
|
|
14021
|
+
}
|
|
14022
|
+
}
|
|
14023
|
+
ConversationService = __legacyDecorateClassTS([
|
|
14024
|
+
Injectable(),
|
|
14025
|
+
__legacyDecorateParamTS(0, Inject(CONVERSATION_OPTIONS_TOKEN)),
|
|
14026
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
14027
|
+
typeof ConversationModuleOptions === "undefined" ? Object : ConversationModuleOptions
|
|
14028
|
+
])
|
|
14029
|
+
], ConversationService);
|
|
14030
|
+
// src/conversation/decorators.ts
|
|
14031
|
+
function InjectConversation() {
|
|
14032
|
+
return (target, propertyKey, parameterIndex) => {
|
|
14033
|
+
const existing = Reflect.getMetadata("conversation:inject:params", target, propertyKey) ?? [];
|
|
14034
|
+
existing.push(parameterIndex);
|
|
14035
|
+
Reflect.defineMetadata("conversation:inject:params", existing, target, propertyKey);
|
|
14036
|
+
};
|
|
14037
|
+
}
|
|
14038
|
+
function extractConversationId(ctx) {
|
|
14039
|
+
const url = new URL(ctx.request.url);
|
|
14040
|
+
const fromQuery = url.searchParams.get("conversationId");
|
|
14041
|
+
if (fromQuery)
|
|
14042
|
+
return fromQuery;
|
|
14043
|
+
const fromHeader = ctx.request.headers.get("x-conversation-id");
|
|
14044
|
+
if (fromHeader)
|
|
14045
|
+
return fromHeader;
|
|
14046
|
+
return;
|
|
14047
|
+
}
|
|
14048
|
+
// src/conversation/conversation-module.ts
|
|
14049
|
+
init_module();
|
|
14050
|
+
|
|
14051
|
+
// src/conversation/stores/memory-store.ts
|
|
14052
|
+
class MemoryConversationStore {
|
|
14053
|
+
conversations = new Map;
|
|
14054
|
+
async create(metadata = {}) {
|
|
14055
|
+
const id = crypto.randomUUID();
|
|
14056
|
+
const conversation = {
|
|
14057
|
+
id,
|
|
14058
|
+
messages: [],
|
|
14059
|
+
metadata,
|
|
14060
|
+
createdAt: new Date,
|
|
14061
|
+
updatedAt: new Date
|
|
14062
|
+
};
|
|
14063
|
+
this.conversations.set(id, conversation);
|
|
14064
|
+
return { ...conversation, messages: [] };
|
|
14065
|
+
}
|
|
14066
|
+
async get(id) {
|
|
14067
|
+
const conv = this.conversations.get(id);
|
|
14068
|
+
if (!conv)
|
|
14069
|
+
return null;
|
|
14070
|
+
return { ...conv, messages: [...conv.messages] };
|
|
14071
|
+
}
|
|
14072
|
+
async appendMessage(id, message) {
|
|
14073
|
+
const conv = this.conversations.get(id);
|
|
14074
|
+
if (!conv)
|
|
14075
|
+
throw new Error(`Conversation "${id}" not found`);
|
|
14076
|
+
conv.messages.push(message);
|
|
14077
|
+
conv.updatedAt = new Date;
|
|
14078
|
+
}
|
|
14079
|
+
async trim(id, maxMessages) {
|
|
14080
|
+
const conv = this.conversations.get(id);
|
|
14081
|
+
if (!conv)
|
|
14082
|
+
return;
|
|
14083
|
+
if (conv.messages.length > maxMessages) {
|
|
14084
|
+
conv.messages = conv.messages.slice(-maxMessages);
|
|
14085
|
+
conv.updatedAt = new Date;
|
|
14086
|
+
}
|
|
14087
|
+
}
|
|
14088
|
+
async delete(id) {
|
|
14089
|
+
return this.conversations.delete(id);
|
|
14090
|
+
}
|
|
14091
|
+
async list() {
|
|
14092
|
+
return Array.from(this.conversations.keys());
|
|
14093
|
+
}
|
|
14094
|
+
get size() {
|
|
14095
|
+
return this.conversations.size;
|
|
14096
|
+
}
|
|
14097
|
+
}
|
|
14098
|
+
|
|
14099
|
+
// src/conversation/conversation-module.ts
|
|
14100
|
+
class ConversationModule {
|
|
14101
|
+
static forRoot(options = {}) {
|
|
14102
|
+
const resolvedOptions = {
|
|
14103
|
+
...options,
|
|
14104
|
+
store: options.store ?? new MemoryConversationStore
|
|
14105
|
+
};
|
|
14106
|
+
const service2 = new ConversationService(resolvedOptions);
|
|
14107
|
+
const providers2 = [
|
|
14108
|
+
{ provide: CONVERSATION_OPTIONS_TOKEN, useValue: resolvedOptions },
|
|
14109
|
+
{ provide: CONVERSATION_SERVICE_TOKEN, useValue: service2 },
|
|
14110
|
+
ConversationService
|
|
14111
|
+
];
|
|
14112
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, ConversationModule) || {};
|
|
14113
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
14114
|
+
...existing,
|
|
14115
|
+
providers: [...existing.providers || [], ...providers2],
|
|
14116
|
+
exports: [
|
|
14117
|
+
...existing.exports || [],
|
|
14118
|
+
CONVERSATION_SERVICE_TOKEN,
|
|
14119
|
+
ConversationService
|
|
14120
|
+
]
|
|
14121
|
+
}, ConversationModule);
|
|
14122
|
+
return ConversationModule;
|
|
14123
|
+
}
|
|
14124
|
+
static reset() {
|
|
14125
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, ConversationModule);
|
|
14126
|
+
}
|
|
14127
|
+
}
|
|
14128
|
+
ConversationModule = __legacyDecorateClassTS([
|
|
14129
|
+
Module({ providers: [] })
|
|
14130
|
+
], ConversationModule);
|
|
14131
|
+
// src/conversation/stores/redis-store.ts
|
|
14132
|
+
class RedisConversationStore {
|
|
14133
|
+
client;
|
|
14134
|
+
keyPrefix;
|
|
14135
|
+
ttl;
|
|
14136
|
+
constructor(config) {
|
|
14137
|
+
this.client = config.client;
|
|
14138
|
+
this.keyPrefix = config.keyPrefix ?? "conv:";
|
|
14139
|
+
this.ttl = config.ttl ?? 86400;
|
|
14140
|
+
}
|
|
14141
|
+
async create(metadata = {}) {
|
|
14142
|
+
const id = crypto.randomUUID();
|
|
14143
|
+
const conversation = {
|
|
14144
|
+
id,
|
|
14145
|
+
messages: [],
|
|
14146
|
+
metadata,
|
|
14147
|
+
createdAt: new Date,
|
|
14148
|
+
updatedAt: new Date
|
|
14149
|
+
};
|
|
14150
|
+
await this.save(conversation);
|
|
14151
|
+
return conversation;
|
|
14152
|
+
}
|
|
14153
|
+
async get(id) {
|
|
14154
|
+
const raw = await this.client.get(this.key(id));
|
|
14155
|
+
if (!raw)
|
|
14156
|
+
return null;
|
|
14157
|
+
const parsed = JSON.parse(raw);
|
|
14158
|
+
parsed.createdAt = new Date(parsed.createdAt);
|
|
14159
|
+
parsed.updatedAt = new Date(parsed.updatedAt);
|
|
14160
|
+
return parsed;
|
|
14161
|
+
}
|
|
14162
|
+
async appendMessage(id, message) {
|
|
14163
|
+
const conv = await this.get(id);
|
|
14164
|
+
if (!conv)
|
|
14165
|
+
throw new Error(`Conversation "${id}" not found`);
|
|
14166
|
+
conv.messages.push(message);
|
|
14167
|
+
conv.updatedAt = new Date;
|
|
14168
|
+
await this.save(conv);
|
|
14169
|
+
}
|
|
14170
|
+
async trim(id, maxMessages) {
|
|
14171
|
+
const conv = await this.get(id);
|
|
14172
|
+
if (!conv)
|
|
14173
|
+
return;
|
|
14174
|
+
if (conv.messages.length > maxMessages) {
|
|
14175
|
+
conv.messages = conv.messages.slice(-maxMessages);
|
|
14176
|
+
conv.updatedAt = new Date;
|
|
14177
|
+
await this.save(conv);
|
|
14178
|
+
}
|
|
14179
|
+
}
|
|
14180
|
+
async delete(id) {
|
|
14181
|
+
const result = await this.client.del(this.key(id));
|
|
14182
|
+
return Number(result) > 0;
|
|
14183
|
+
}
|
|
14184
|
+
async list() {
|
|
14185
|
+
const keys = await this.client.keys(`${this.keyPrefix}*`);
|
|
14186
|
+
return keys.map((k) => k.slice(this.keyPrefix.length));
|
|
14187
|
+
}
|
|
14188
|
+
key(id) {
|
|
14189
|
+
return `${this.keyPrefix}${id}`;
|
|
14190
|
+
}
|
|
14191
|
+
async save(conversation) {
|
|
14192
|
+
await this.client.set(this.key(conversation.id), JSON.stringify(conversation), "EX", this.ttl);
|
|
14193
|
+
}
|
|
14194
|
+
}
|
|
14195
|
+
// src/conversation/stores/database-store.ts
|
|
14196
|
+
class DatabaseConversationStore {
|
|
14197
|
+
db;
|
|
14198
|
+
tableName;
|
|
14199
|
+
initialized = false;
|
|
14200
|
+
constructor(config) {
|
|
14201
|
+
this.db = config.database;
|
|
14202
|
+
this.tableName = config.tableName ?? "conversations";
|
|
14203
|
+
}
|
|
14204
|
+
async create(metadata = {}) {
|
|
14205
|
+
await this.ensureTable();
|
|
14206
|
+
const id = crypto.randomUUID();
|
|
14207
|
+
const now = new Date().toISOString();
|
|
14208
|
+
await this.db.execute(`INSERT INTO ${this.tableName} (id, messages, metadata, created_at, updated_at) VALUES (?, ?, ?, ?, ?)`, [id, "[]", JSON.stringify(metadata), now, now]);
|
|
14209
|
+
return { id, messages: [], metadata, createdAt: new Date(now), updatedAt: new Date(now) };
|
|
14210
|
+
}
|
|
14211
|
+
async get(id) {
|
|
14212
|
+
await this.ensureTable();
|
|
14213
|
+
const rows = await this.db.query(`SELECT * FROM ${this.tableName} WHERE id = ?`, [id]);
|
|
14214
|
+
if (!rows.length)
|
|
14215
|
+
return null;
|
|
14216
|
+
return this.rowToConversation(rows[0]);
|
|
14217
|
+
}
|
|
14218
|
+
async appendMessage(id, message) {
|
|
14219
|
+
await this.ensureTable();
|
|
14220
|
+
const conv = await this.get(id);
|
|
14221
|
+
if (!conv)
|
|
14222
|
+
throw new Error(`Conversation "${id}" not found`);
|
|
14223
|
+
const messages = [...conv.messages, message];
|
|
14224
|
+
const now = new Date().toISOString();
|
|
14225
|
+
await this.db.execute(`UPDATE ${this.tableName} SET messages = ?, updated_at = ? WHERE id = ?`, [JSON.stringify(messages), now, id]);
|
|
14226
|
+
}
|
|
14227
|
+
async trim(id, maxMessages) {
|
|
14228
|
+
await this.ensureTable();
|
|
14229
|
+
const conv = await this.get(id);
|
|
14230
|
+
if (!conv || conv.messages.length <= maxMessages)
|
|
14231
|
+
return;
|
|
14232
|
+
const trimmed = conv.messages.slice(-maxMessages);
|
|
14233
|
+
const now = new Date().toISOString();
|
|
14234
|
+
await this.db.execute(`UPDATE ${this.tableName} SET messages = ?, updated_at = ? WHERE id = ?`, [JSON.stringify(trimmed), now, id]);
|
|
14235
|
+
}
|
|
14236
|
+
async delete(id) {
|
|
14237
|
+
await this.ensureTable();
|
|
14238
|
+
await this.db.execute(`DELETE FROM ${this.tableName} WHERE id = ?`, [id]);
|
|
14239
|
+
return true;
|
|
14240
|
+
}
|
|
14241
|
+
async list() {
|
|
14242
|
+
await this.ensureTable();
|
|
14243
|
+
const rows = await this.db.query(`SELECT id FROM ${this.tableName}`);
|
|
14244
|
+
return rows.map((r) => r.id);
|
|
14245
|
+
}
|
|
14246
|
+
async ensureTable() {
|
|
14247
|
+
if (this.initialized)
|
|
14248
|
+
return;
|
|
14249
|
+
await this.db.execute(`
|
|
14250
|
+
CREATE TABLE IF NOT EXISTS ${this.tableName} (
|
|
14251
|
+
id TEXT PRIMARY KEY,
|
|
14252
|
+
messages TEXT NOT NULL DEFAULT '[]',
|
|
14253
|
+
metadata TEXT NOT NULL DEFAULT '{}',
|
|
14254
|
+
created_at TEXT NOT NULL,
|
|
14255
|
+
updated_at TEXT NOT NULL
|
|
14256
|
+
)
|
|
14257
|
+
`);
|
|
14258
|
+
this.initialized = true;
|
|
14259
|
+
}
|
|
14260
|
+
rowToConversation(row) {
|
|
14261
|
+
return {
|
|
14262
|
+
id: row["id"],
|
|
14263
|
+
messages: JSON.parse(row["messages"]),
|
|
14264
|
+
metadata: JSON.parse(row["metadata"]),
|
|
14265
|
+
createdAt: new Date(row["created_at"]),
|
|
14266
|
+
updatedAt: new Date(row["updated_at"])
|
|
14267
|
+
};
|
|
14268
|
+
}
|
|
14269
|
+
}
|
|
14270
|
+
// src/prompt/types.ts
|
|
14271
|
+
var PROMPT_SERVICE_TOKEN = Symbol("@dangao/bun-server:prompt:service");
|
|
14272
|
+
var PROMPT_OPTIONS_TOKEN = Symbol("@dangao/bun-server:prompt:options");
|
|
14273
|
+
function extractVariables(content) {
|
|
14274
|
+
const regex = /\{\{(\w+)\}\}/g;
|
|
14275
|
+
const vars = new Set;
|
|
14276
|
+
let match;
|
|
14277
|
+
while ((match = regex.exec(content)) !== null) {
|
|
14278
|
+
vars.add(match[1]);
|
|
14279
|
+
}
|
|
14280
|
+
return Array.from(vars);
|
|
14281
|
+
}
|
|
14282
|
+
function renderTemplate(content, vars) {
|
|
14283
|
+
return content.replace(/\{\{(\w+)\}\}/g, (_, key) => vars[key] ?? `{{${key}}}`);
|
|
14284
|
+
}
|
|
14285
|
+
// src/prompt/service.ts
|
|
14286
|
+
init_decorators();
|
|
14287
|
+
init_decorators();
|
|
14288
|
+
init_http_exception();
|
|
14289
|
+
class PromptService {
|
|
14290
|
+
store;
|
|
14291
|
+
constructor(options) {
|
|
14292
|
+
this.store = options.store;
|
|
14293
|
+
}
|
|
14294
|
+
async get(id) {
|
|
14295
|
+
const template = await this.store.get(id);
|
|
14296
|
+
if (!template)
|
|
14297
|
+
throw new HttpException(404, `Prompt template "${id}" not found`);
|
|
14298
|
+
return template;
|
|
14299
|
+
}
|
|
14300
|
+
async getVersion(id, version) {
|
|
14301
|
+
const template = await this.store.getVersion(id, version);
|
|
14302
|
+
if (!template)
|
|
14303
|
+
throw new HttpException(404, `Prompt template "${id}" version ${version} not found`);
|
|
14304
|
+
return template;
|
|
14305
|
+
}
|
|
14306
|
+
async list() {
|
|
14307
|
+
return this.store.list();
|
|
14308
|
+
}
|
|
14309
|
+
async create(input) {
|
|
14310
|
+
return this.store.create(input);
|
|
14311
|
+
}
|
|
14312
|
+
async update(id, input) {
|
|
14313
|
+
return this.store.update(id, input);
|
|
14314
|
+
}
|
|
14315
|
+
async delete(id) {
|
|
14316
|
+
return this.store.delete(id);
|
|
14317
|
+
}
|
|
14318
|
+
async render(id, vars) {
|
|
14319
|
+
const template = await this.get(id);
|
|
14320
|
+
return renderTemplate(template.content, vars);
|
|
14321
|
+
}
|
|
14322
|
+
async renderVersion(id, version, vars) {
|
|
14323
|
+
const template = await this.getVersion(id, version);
|
|
14324
|
+
return renderTemplate(template.content, vars);
|
|
14325
|
+
}
|
|
14326
|
+
}
|
|
14327
|
+
PromptService = __legacyDecorateClassTS([
|
|
14328
|
+
Injectable(),
|
|
14329
|
+
__legacyDecorateParamTS(0, Inject(PROMPT_OPTIONS_TOKEN)),
|
|
14330
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
14331
|
+
typeof PromptModuleOptions === "undefined" ? Object : PromptModuleOptions
|
|
14332
|
+
])
|
|
14333
|
+
], PromptService);
|
|
14334
|
+
// src/prompt/prompt-module.ts
|
|
14335
|
+
init_module();
|
|
14336
|
+
|
|
14337
|
+
// src/prompt/stores/memory-store.ts
|
|
14338
|
+
class InMemoryPromptStore {
|
|
14339
|
+
templates = new Map;
|
|
14340
|
+
history = new Map;
|
|
14341
|
+
async get(id) {
|
|
14342
|
+
return this.templates.get(id) ?? null;
|
|
14343
|
+
}
|
|
14344
|
+
async getVersion(id, version) {
|
|
14345
|
+
return this.history.get(id)?.get(version) ?? null;
|
|
14346
|
+
}
|
|
14347
|
+
async list() {
|
|
14348
|
+
return Array.from(this.templates.values());
|
|
14349
|
+
}
|
|
14350
|
+
async create(input) {
|
|
14351
|
+
const id = input.id ?? crypto.randomUUID();
|
|
14352
|
+
if (this.templates.has(id)) {
|
|
14353
|
+
throw new Error(`Prompt template "${id}" already exists`);
|
|
14354
|
+
}
|
|
14355
|
+
const now = new Date;
|
|
14356
|
+
const template = {
|
|
14357
|
+
id,
|
|
14358
|
+
name: input.name,
|
|
14359
|
+
content: input.content,
|
|
14360
|
+
version: 1,
|
|
14361
|
+
variables: extractVariables(input.content),
|
|
14362
|
+
description: input.description,
|
|
14363
|
+
createdAt: now,
|
|
14364
|
+
updatedAt: now
|
|
14365
|
+
};
|
|
14366
|
+
this.templates.set(id, template);
|
|
14367
|
+
this.saveVersion(template);
|
|
14368
|
+
return { ...template };
|
|
14369
|
+
}
|
|
14370
|
+
async update(id, input) {
|
|
14371
|
+
const existing = this.templates.get(id);
|
|
14372
|
+
if (!existing)
|
|
14373
|
+
throw new Error(`Prompt template "${id}" not found`);
|
|
14374
|
+
const now = new Date;
|
|
14375
|
+
const content = input.content ?? existing.content;
|
|
14376
|
+
const updated = {
|
|
14377
|
+
...existing,
|
|
14378
|
+
name: input.name ?? existing.name,
|
|
14379
|
+
content,
|
|
14380
|
+
description: input.description ?? existing.description,
|
|
14381
|
+
version: existing.version + 1,
|
|
14382
|
+
variables: extractVariables(content),
|
|
14383
|
+
updatedAt: now
|
|
14384
|
+
};
|
|
14385
|
+
this.templates.set(id, updated);
|
|
14386
|
+
this.saveVersion(updated);
|
|
14387
|
+
return { ...updated };
|
|
14388
|
+
}
|
|
14389
|
+
async delete(id) {
|
|
14390
|
+
const existed = this.templates.delete(id);
|
|
14391
|
+
this.history.delete(id);
|
|
14392
|
+
return existed;
|
|
14393
|
+
}
|
|
14394
|
+
saveVersion(template) {
|
|
14395
|
+
if (!this.history.has(template.id)) {
|
|
14396
|
+
this.history.set(template.id, new Map);
|
|
14397
|
+
}
|
|
14398
|
+
this.history.get(template.id).set(template.version, { ...template });
|
|
14399
|
+
}
|
|
14400
|
+
}
|
|
14401
|
+
|
|
14402
|
+
// src/prompt/prompt-module.ts
|
|
14403
|
+
class PromptModule {
|
|
14404
|
+
static forRoot(options = {}) {
|
|
14405
|
+
const resolvedOptions = {
|
|
14406
|
+
...options,
|
|
14407
|
+
store: options.store ?? new InMemoryPromptStore
|
|
14408
|
+
};
|
|
14409
|
+
const service3 = new PromptService(resolvedOptions);
|
|
14410
|
+
const providers2 = [
|
|
14411
|
+
{ provide: PROMPT_OPTIONS_TOKEN, useValue: resolvedOptions },
|
|
14412
|
+
{ provide: PROMPT_SERVICE_TOKEN, useValue: service3 },
|
|
14413
|
+
PromptService
|
|
14414
|
+
];
|
|
14415
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, PromptModule) || {};
|
|
14416
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
14417
|
+
...existing,
|
|
14418
|
+
providers: [...existing.providers || [], ...providers2],
|
|
14419
|
+
exports: [
|
|
14420
|
+
...existing.exports || [],
|
|
14421
|
+
PROMPT_SERVICE_TOKEN,
|
|
14422
|
+
PromptService
|
|
14423
|
+
]
|
|
14424
|
+
}, PromptModule);
|
|
14425
|
+
return PromptModule;
|
|
14426
|
+
}
|
|
14427
|
+
static reset() {
|
|
14428
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, PromptModule);
|
|
14429
|
+
}
|
|
14430
|
+
}
|
|
14431
|
+
PromptModule = __legacyDecorateClassTS([
|
|
14432
|
+
Module({ providers: [] })
|
|
14433
|
+
], PromptModule);
|
|
14434
|
+
// src/prompt/stores/file-store.ts
|
|
14435
|
+
class FilePromptStore {
|
|
14436
|
+
promptsDir;
|
|
14437
|
+
memory;
|
|
14438
|
+
loaded = false;
|
|
14439
|
+
constructor(config = {}) {
|
|
14440
|
+
this.promptsDir = config.promptsDir ?? "./.prompts";
|
|
14441
|
+
this.memory = new InMemoryPromptStore;
|
|
14442
|
+
}
|
|
14443
|
+
async get(id) {
|
|
14444
|
+
await this.ensureLoaded();
|
|
14445
|
+
return this.memory.get(id);
|
|
14446
|
+
}
|
|
14447
|
+
async getVersion(id, version) {
|
|
14448
|
+
await this.ensureLoaded();
|
|
14449
|
+
return this.memory.getVersion(id, version);
|
|
14450
|
+
}
|
|
14451
|
+
async list() {
|
|
14452
|
+
await this.ensureLoaded();
|
|
14453
|
+
return this.memory.list();
|
|
14454
|
+
}
|
|
14455
|
+
async create(input) {
|
|
14456
|
+
await this.ensureLoaded();
|
|
14457
|
+
const template = await this.memory.create(input);
|
|
14458
|
+
await this.writeFile(template);
|
|
14459
|
+
return template;
|
|
14460
|
+
}
|
|
14461
|
+
async update(id, input) {
|
|
14462
|
+
await this.ensureLoaded();
|
|
14463
|
+
const template = await this.memory.update(id, input);
|
|
14464
|
+
await this.writeFile(template);
|
|
14465
|
+
return template;
|
|
14466
|
+
}
|
|
14467
|
+
async delete(id) {
|
|
14468
|
+
await this.ensureLoaded();
|
|
14469
|
+
const deleted = await this.memory.delete(id);
|
|
14470
|
+
if (deleted) {
|
|
14471
|
+
try {
|
|
14472
|
+
const path = `${this.promptsDir}/${id}.json`;
|
|
14473
|
+
await Bun.file(path).exists() && Bun.write(path, "");
|
|
14474
|
+
} catch {}
|
|
14475
|
+
}
|
|
14476
|
+
return deleted;
|
|
14477
|
+
}
|
|
14478
|
+
async ensureLoaded() {
|
|
14479
|
+
if (this.loaded)
|
|
14480
|
+
return;
|
|
14481
|
+
this.loaded = true;
|
|
14482
|
+
try {
|
|
14483
|
+
const glob = new Bun.Glob("*.json");
|
|
14484
|
+
const files = Array.from(glob.scanSync(this.promptsDir));
|
|
14485
|
+
for (const file of files) {
|
|
14486
|
+
try {
|
|
14487
|
+
const content = await Bun.file(`${this.promptsDir}/${file}`).text();
|
|
14488
|
+
if (!content.trim())
|
|
14489
|
+
continue;
|
|
14490
|
+
const data = JSON.parse(content);
|
|
14491
|
+
const id = data.id ?? file.replace(/\.json$/, "");
|
|
14492
|
+
await this.memory.create({ id, ...data }).catch(() => {});
|
|
14493
|
+
} catch {}
|
|
14494
|
+
}
|
|
14495
|
+
} catch {}
|
|
14496
|
+
}
|
|
14497
|
+
async writeFile(template) {
|
|
14498
|
+
try {
|
|
14499
|
+
const content = JSON.stringify({
|
|
14500
|
+
id: template.id,
|
|
14501
|
+
name: template.name,
|
|
14502
|
+
content: template.content,
|
|
14503
|
+
description: template.description,
|
|
14504
|
+
variables: extractVariables(template.content)
|
|
14505
|
+
}, null, 2);
|
|
14506
|
+
await Bun.write(`${this.promptsDir}/${template.id}.json`, content);
|
|
14507
|
+
} catch {}
|
|
14508
|
+
}
|
|
14509
|
+
}
|
|
14510
|
+
// src/embedding/types.ts
|
|
14511
|
+
var EMBEDDING_SERVICE_TOKEN = Symbol("@dangao/bun-server:embedding:service");
|
|
14512
|
+
var EMBEDDING_OPTIONS_TOKEN = Symbol("@dangao/bun-server:embedding:options");
|
|
14513
|
+
// src/embedding/service.ts
|
|
14514
|
+
init_decorators();
|
|
14515
|
+
init_decorators();
|
|
14516
|
+
class EmbeddingService2 {
|
|
14517
|
+
provider;
|
|
14518
|
+
batchSize;
|
|
14519
|
+
constructor(options) {
|
|
14520
|
+
this.provider = new options.provider.provider(options.provider.config);
|
|
14521
|
+
this.batchSize = options.batchSize ?? 100;
|
|
14522
|
+
}
|
|
14523
|
+
async embed(text) {
|
|
14524
|
+
return this.provider.embed(text);
|
|
14525
|
+
}
|
|
14526
|
+
async embedBatch(texts) {
|
|
14527
|
+
const results = [];
|
|
14528
|
+
for (let i = 0;i < texts.length; i += this.batchSize) {
|
|
14529
|
+
const batch = texts.slice(i, i + this.batchSize);
|
|
14530
|
+
const embeddings = await this.provider.embedBatch(batch);
|
|
14531
|
+
results.push(...embeddings);
|
|
14532
|
+
}
|
|
14533
|
+
return results;
|
|
14534
|
+
}
|
|
14535
|
+
get dimensions() {
|
|
14536
|
+
return this.provider.dimensions;
|
|
14537
|
+
}
|
|
14538
|
+
get providerName() {
|
|
14539
|
+
return this.provider.name;
|
|
14540
|
+
}
|
|
14541
|
+
}
|
|
14542
|
+
EmbeddingService2 = __legacyDecorateClassTS([
|
|
14543
|
+
Injectable(),
|
|
14544
|
+
__legacyDecorateParamTS(0, Inject(EMBEDDING_OPTIONS_TOKEN)),
|
|
14545
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
14546
|
+
typeof EmbeddingModuleOptions === "undefined" ? Object : EmbeddingModuleOptions
|
|
14547
|
+
])
|
|
14548
|
+
], EmbeddingService2);
|
|
14549
|
+
// src/embedding/embedding-module.ts
|
|
14550
|
+
init_module();
|
|
14551
|
+
class EmbeddingModule {
|
|
14552
|
+
static forRoot(options) {
|
|
14553
|
+
const service4 = new EmbeddingService2(options);
|
|
14554
|
+
const providers2 = [
|
|
14555
|
+
{ provide: EMBEDDING_OPTIONS_TOKEN, useValue: options },
|
|
14556
|
+
{ provide: EMBEDDING_SERVICE_TOKEN, useValue: service4 },
|
|
14557
|
+
EmbeddingService2
|
|
14558
|
+
];
|
|
14559
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, EmbeddingModule) || {};
|
|
14560
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
14561
|
+
...existing,
|
|
14562
|
+
providers: [...existing.providers || [], ...providers2],
|
|
14563
|
+
exports: [
|
|
14564
|
+
...existing.exports || [],
|
|
14565
|
+
EMBEDDING_SERVICE_TOKEN,
|
|
14566
|
+
EmbeddingService2
|
|
14567
|
+
]
|
|
14568
|
+
}, EmbeddingModule);
|
|
14569
|
+
return EmbeddingModule;
|
|
14570
|
+
}
|
|
14571
|
+
static reset() {
|
|
14572
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, EmbeddingModule);
|
|
14573
|
+
}
|
|
14574
|
+
}
|
|
14575
|
+
EmbeddingModule = __legacyDecorateClassTS([
|
|
14576
|
+
Module({ providers: [] })
|
|
14577
|
+
], EmbeddingModule);
|
|
14578
|
+
// src/embedding/providers/openai-embedding-provider.ts
|
|
14579
|
+
class OpenAIEmbeddingProvider {
|
|
14580
|
+
name = "openai";
|
|
14581
|
+
dimensions;
|
|
14582
|
+
apiKey;
|
|
14583
|
+
model;
|
|
14584
|
+
baseUrl;
|
|
14585
|
+
constructor(config) {
|
|
14586
|
+
this.apiKey = config.apiKey;
|
|
14587
|
+
this.model = config.model ?? "text-embedding-3-small";
|
|
14588
|
+
this.baseUrl = (config.baseUrl ?? "https://api.openai.com/v1").replace(/\/$/, "");
|
|
14589
|
+
this.dimensions = this.model.includes("large") ? 3072 : 1536;
|
|
14590
|
+
}
|
|
14591
|
+
async embed(text) {
|
|
14592
|
+
const results = await this.embedBatch([text]);
|
|
14593
|
+
return results[0];
|
|
14594
|
+
}
|
|
14595
|
+
async embedBatch(texts) {
|
|
14596
|
+
const res = await fetch(`${this.baseUrl}/embeddings`, {
|
|
14597
|
+
method: "POST",
|
|
14598
|
+
headers: {
|
|
14599
|
+
"Content-Type": "application/json",
|
|
14600
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
14601
|
+
},
|
|
14602
|
+
body: JSON.stringify({ input: texts, model: this.model })
|
|
14603
|
+
});
|
|
14604
|
+
if (res.status === 429)
|
|
14605
|
+
throw new AiRateLimitError("openai-embedding");
|
|
14606
|
+
if (!res.ok)
|
|
14607
|
+
throw new AiProviderError(await res.text(), "openai-embedding", res.status);
|
|
14608
|
+
const data = await res.json();
|
|
14609
|
+
return data.data.map((d) => d.embedding);
|
|
14610
|
+
}
|
|
14611
|
+
}
|
|
14612
|
+
// src/embedding/providers/ollama-embedding-provider.ts
|
|
14613
|
+
class OllamaEmbeddingProvider {
|
|
14614
|
+
name = "ollama";
|
|
14615
|
+
dimensions;
|
|
14616
|
+
baseUrl;
|
|
14617
|
+
model;
|
|
14618
|
+
constructor(config = {}) {
|
|
14619
|
+
this.baseUrl = (config.baseUrl ?? "http://localhost:11434").replace(/\/$/, "");
|
|
14620
|
+
this.model = config.model ?? "nomic-embed-text";
|
|
14621
|
+
this.dimensions = config.dimensions ?? 768;
|
|
14622
|
+
}
|
|
14623
|
+
async embed(text) {
|
|
14624
|
+
const res = await fetch(`${this.baseUrl}/api/embed`, {
|
|
14625
|
+
method: "POST",
|
|
14626
|
+
headers: { "Content-Type": "application/json" },
|
|
14627
|
+
body: JSON.stringify({ model: this.model, input: text })
|
|
14628
|
+
});
|
|
14629
|
+
if (!res.ok)
|
|
14630
|
+
throw new AiProviderError(await res.text(), "ollama-embedding", res.status);
|
|
14631
|
+
const data = await res.json();
|
|
14632
|
+
return data.embeddings[0];
|
|
14633
|
+
}
|
|
14634
|
+
async embedBatch(texts) {
|
|
14635
|
+
return Promise.all(texts.map((t) => this.embed(t)));
|
|
14636
|
+
}
|
|
14637
|
+
}
|
|
14638
|
+
// src/vector-store/types.ts
|
|
14639
|
+
var VECTOR_STORE_TOKEN = Symbol("@dangao/bun-server:vector-store:store");
|
|
14640
|
+
var VECTOR_STORE_OPTIONS_TOKEN = Symbol("@dangao/bun-server:vector-store:options");
|
|
14641
|
+
function cosineSimilarity(a, b) {
|
|
14642
|
+
if (a.length !== b.length)
|
|
14643
|
+
return 0;
|
|
14644
|
+
let dot = 0;
|
|
14645
|
+
let normA = 0;
|
|
14646
|
+
let normB = 0;
|
|
14647
|
+
for (let i = 0;i < a.length; i++) {
|
|
14648
|
+
dot += a[i] * b[i];
|
|
14649
|
+
normA += a[i] * a[i];
|
|
14650
|
+
normB += b[i] * b[i];
|
|
14651
|
+
}
|
|
14652
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
14653
|
+
return denom === 0 ? 0 : dot / denom;
|
|
14654
|
+
}
|
|
14655
|
+
// src/vector-store/vector-store-module.ts
|
|
14656
|
+
init_module();
|
|
14657
|
+
|
|
14658
|
+
// src/vector-store/stores/memory-store.ts
|
|
14659
|
+
class MemoryVectorStore {
|
|
14660
|
+
documents = new Map;
|
|
14661
|
+
async upsert(document) {
|
|
14662
|
+
this.documents.set(this.key(document.id, document.collection), document);
|
|
14663
|
+
}
|
|
14664
|
+
async upsertBatch(documents) {
|
|
14665
|
+
for (const doc of documents) {
|
|
14666
|
+
this.documents.set(this.key(doc.id, doc.collection), doc);
|
|
14667
|
+
}
|
|
14668
|
+
}
|
|
14669
|
+
async get(id, collection) {
|
|
14670
|
+
return this.documents.get(this.key(id, collection)) ?? null;
|
|
14671
|
+
}
|
|
14672
|
+
async search(query, options = {}) {
|
|
14673
|
+
const { topK = 5, minScore = 0, collection, filter: filter2 } = options;
|
|
14674
|
+
const results = [];
|
|
14675
|
+
for (const doc of this.documents.values()) {
|
|
14676
|
+
if (collection && doc.collection !== collection)
|
|
14677
|
+
continue;
|
|
14678
|
+
if (filter2 && !filter2(doc))
|
|
14679
|
+
continue;
|
|
14680
|
+
const score = cosineSimilarity(query, doc.vector);
|
|
14681
|
+
if (score >= minScore) {
|
|
14682
|
+
results.push({ document: doc, score });
|
|
14683
|
+
}
|
|
14684
|
+
}
|
|
14685
|
+
return results.sort((a, b) => b.score - a.score).slice(0, topK);
|
|
14686
|
+
}
|
|
14687
|
+
async delete(id, collection) {
|
|
14688
|
+
return this.documents.delete(this.key(id, collection));
|
|
14689
|
+
}
|
|
14690
|
+
async deleteCollection(collection) {
|
|
14691
|
+
for (const [key, doc] of this.documents.entries()) {
|
|
14692
|
+
if (doc.collection === collection) {
|
|
14693
|
+
this.documents.delete(key);
|
|
14694
|
+
}
|
|
14695
|
+
}
|
|
14696
|
+
}
|
|
14697
|
+
async count(collection) {
|
|
14698
|
+
if (!collection)
|
|
14699
|
+
return this.documents.size;
|
|
14700
|
+
let count = 0;
|
|
14701
|
+
for (const doc of this.documents.values()) {
|
|
14702
|
+
if (doc.collection === collection)
|
|
14703
|
+
count++;
|
|
14704
|
+
}
|
|
14705
|
+
return count;
|
|
14706
|
+
}
|
|
14707
|
+
key(id, collection) {
|
|
14708
|
+
return collection ? `${collection}:${id}` : id;
|
|
14709
|
+
}
|
|
14710
|
+
}
|
|
14711
|
+
|
|
14712
|
+
// src/vector-store/vector-store-module.ts
|
|
14713
|
+
class VectorStoreModule {
|
|
14714
|
+
static forRoot(options = {}) {
|
|
14715
|
+
const store = options.store ?? new MemoryVectorStore;
|
|
14716
|
+
const providers2 = [
|
|
14717
|
+
{ provide: VECTOR_STORE_OPTIONS_TOKEN, useValue: options },
|
|
14718
|
+
{ provide: VECTOR_STORE_TOKEN, useValue: store }
|
|
14719
|
+
];
|
|
14720
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, VectorStoreModule) || {};
|
|
14721
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
14722
|
+
...existing,
|
|
14723
|
+
providers: [...existing.providers || [], ...providers2],
|
|
14724
|
+
exports: [
|
|
14725
|
+
...existing.exports || [],
|
|
14726
|
+
VECTOR_STORE_TOKEN
|
|
14727
|
+
]
|
|
14728
|
+
}, VectorStoreModule);
|
|
14729
|
+
return VectorStoreModule;
|
|
14730
|
+
}
|
|
14731
|
+
static reset() {
|
|
14732
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, VectorStoreModule);
|
|
14733
|
+
}
|
|
14734
|
+
}
|
|
14735
|
+
VectorStoreModule = __legacyDecorateClassTS([
|
|
14736
|
+
Module({ providers: [] })
|
|
14737
|
+
], VectorStoreModule);
|
|
14738
|
+
// src/vector-store/stores/pinecone-store.ts
|
|
14739
|
+
class PineconeVectorStore {
|
|
14740
|
+
apiKey;
|
|
14741
|
+
host;
|
|
14742
|
+
namespace;
|
|
14743
|
+
constructor(config) {
|
|
14744
|
+
this.apiKey = config.apiKey;
|
|
14745
|
+
this.host = config.host.replace(/\/$/, "");
|
|
14746
|
+
this.namespace = config.namespace ?? "";
|
|
14747
|
+
}
|
|
14748
|
+
async upsert(document) {
|
|
14749
|
+
await this.upsertBatch([document]);
|
|
14750
|
+
}
|
|
14751
|
+
async upsertBatch(documents) {
|
|
14752
|
+
const vectors = documents.map((d) => ({
|
|
14753
|
+
id: d.id,
|
|
14754
|
+
values: d.vector,
|
|
14755
|
+
metadata: { content: d.content, collection: d.collection ?? "", ...d.metadata }
|
|
14756
|
+
}));
|
|
14757
|
+
await this.request("/vectors/upsert", "POST", { vectors, namespace: this.namespace });
|
|
14758
|
+
}
|
|
14759
|
+
async get(id) {
|
|
14760
|
+
const res = await this.request(`/vectors/fetch?ids=${encodeURIComponent(id)}&namespace=${this.namespace}`, "GET");
|
|
14761
|
+
const vector = res?.["vectors"]?.[id];
|
|
14762
|
+
if (!vector)
|
|
14763
|
+
return null;
|
|
14764
|
+
const metadata = vector["metadata"] ?? {};
|
|
14765
|
+
return {
|
|
14766
|
+
id,
|
|
14767
|
+
vector: vector["values"],
|
|
14768
|
+
content: metadata["content"] ?? "",
|
|
14769
|
+
collection: metadata["collection"] || undefined,
|
|
14770
|
+
metadata
|
|
14771
|
+
};
|
|
14772
|
+
}
|
|
14773
|
+
async search(query, options = {}) {
|
|
14774
|
+
const { topK = 5, collection } = options;
|
|
14775
|
+
const filter2 = collection ? { collection: { $eq: collection } } : undefined;
|
|
14776
|
+
const res = await this.request("/query", "POST", {
|
|
14777
|
+
vector: query,
|
|
14778
|
+
topK,
|
|
14779
|
+
includeMetadata: true,
|
|
14780
|
+
namespace: this.namespace,
|
|
14781
|
+
filter: filter2
|
|
14782
|
+
});
|
|
14783
|
+
return (res?.["matches"] ?? []).map((match) => {
|
|
14784
|
+
const metadata = match["metadata"] ?? {};
|
|
14785
|
+
return {
|
|
14786
|
+
document: {
|
|
14787
|
+
id: match["id"],
|
|
14788
|
+
vector: [],
|
|
14789
|
+
content: metadata["content"] ?? "",
|
|
14790
|
+
collection: metadata["collection"] || undefined,
|
|
14791
|
+
metadata
|
|
14792
|
+
},
|
|
14793
|
+
score: match["score"]
|
|
14794
|
+
};
|
|
14795
|
+
});
|
|
14796
|
+
}
|
|
14797
|
+
async delete(id) {
|
|
14798
|
+
await this.request("/vectors/delete", "POST", { ids: [id], namespace: this.namespace });
|
|
14799
|
+
return true;
|
|
14800
|
+
}
|
|
14801
|
+
async deleteCollection(collection) {
|
|
14802
|
+
await this.request("/vectors/delete", "POST", {
|
|
14803
|
+
deleteAll: false,
|
|
14804
|
+
namespace: this.namespace,
|
|
14805
|
+
filter: { collection: { $eq: collection } }
|
|
14806
|
+
});
|
|
14807
|
+
}
|
|
14808
|
+
async count() {
|
|
14809
|
+
const res = await this.request("/describe_index_stats", "POST", {});
|
|
14810
|
+
const ns = res?.["namespaces"]?.[this.namespace];
|
|
14811
|
+
return ns?.vectorCount ?? 0;
|
|
14812
|
+
}
|
|
14813
|
+
async request(path, method, body) {
|
|
14814
|
+
const res = await fetch(`${this.host}${path}`, {
|
|
14815
|
+
method,
|
|
14816
|
+
headers: {
|
|
14817
|
+
"Api-Key": this.apiKey,
|
|
14818
|
+
"Content-Type": "application/json"
|
|
14819
|
+
},
|
|
14820
|
+
body: body !== undefined ? JSON.stringify(body) : undefined
|
|
14821
|
+
});
|
|
14822
|
+
if (!res.ok)
|
|
14823
|
+
return null;
|
|
14824
|
+
return res.json();
|
|
14825
|
+
}
|
|
14826
|
+
}
|
|
14827
|
+
// src/vector-store/stores/qdrant-store.ts
|
|
14828
|
+
class QdrantVectorStore {
|
|
14829
|
+
url;
|
|
14830
|
+
collectionName;
|
|
14831
|
+
apiKey;
|
|
14832
|
+
constructor(config) {
|
|
14833
|
+
this.url = (config.url ?? "http://localhost:6333").replace(/\/$/, "");
|
|
14834
|
+
this.collectionName = config.collectionName;
|
|
14835
|
+
this.apiKey = config.apiKey;
|
|
14836
|
+
}
|
|
14837
|
+
async upsert(document) {
|
|
14838
|
+
await this.upsertBatch([document]);
|
|
14839
|
+
}
|
|
14840
|
+
async upsertBatch(documents) {
|
|
14841
|
+
const points = documents.map((d, idx) => ({
|
|
14842
|
+
id: this.toNumericId(d.id) ?? idx,
|
|
14843
|
+
vector: d.vector,
|
|
14844
|
+
payload: {
|
|
14845
|
+
original_id: d.id,
|
|
14846
|
+
content: d.content,
|
|
14847
|
+
collection: d.collection ?? "",
|
|
14848
|
+
...d.metadata ?? {}
|
|
14849
|
+
}
|
|
14850
|
+
}));
|
|
14851
|
+
await this.request(`/collections/${this.collectionName}/points`, "PUT", { points });
|
|
14852
|
+
}
|
|
14853
|
+
async get(id) {
|
|
14854
|
+
const numId = this.toNumericId(id);
|
|
14855
|
+
if (!numId)
|
|
14856
|
+
return null;
|
|
14857
|
+
const res = await this.request(`/collections/${this.collectionName}/points/${numId}`, "GET");
|
|
14858
|
+
const point = res?.["result"];
|
|
14859
|
+
if (!point)
|
|
14860
|
+
return null;
|
|
14861
|
+
const payload = point["payload"] ?? {};
|
|
14862
|
+
return {
|
|
14863
|
+
id: payload["original_id"] ?? id,
|
|
14864
|
+
vector: point["vector"] ?? [],
|
|
14865
|
+
content: payload["content"] ?? "",
|
|
14866
|
+
collection: payload["collection"] || undefined,
|
|
14867
|
+
metadata: payload
|
|
14868
|
+
};
|
|
14869
|
+
}
|
|
14870
|
+
async search(query, options = {}) {
|
|
14871
|
+
const { topK = 5, minScore = 0, collection } = options;
|
|
14872
|
+
const body = {
|
|
14873
|
+
vector: query,
|
|
14874
|
+
limit: topK,
|
|
14875
|
+
with_payload: true,
|
|
14876
|
+
score_threshold: minScore
|
|
14877
|
+
};
|
|
14878
|
+
if (collection) {
|
|
14879
|
+
body["filter"] = { must: [{ key: "collection", match: { value: collection } }] };
|
|
14880
|
+
}
|
|
14881
|
+
const res = await this.request(`/collections/${this.collectionName}/points/search`, "POST", body);
|
|
14882
|
+
return (res?.["result"] ?? []).map((hit) => {
|
|
14883
|
+
const payload = hit["payload"] ?? {};
|
|
14884
|
+
return {
|
|
14885
|
+
document: {
|
|
14886
|
+
id: payload["original_id"] ?? String(hit["id"]),
|
|
14887
|
+
vector: [],
|
|
14888
|
+
content: payload["content"] ?? "",
|
|
14889
|
+
collection: payload["collection"] || undefined,
|
|
14890
|
+
metadata: payload
|
|
14891
|
+
},
|
|
14892
|
+
score: hit["score"]
|
|
14893
|
+
};
|
|
14894
|
+
});
|
|
14895
|
+
}
|
|
14896
|
+
async delete(id) {
|
|
14897
|
+
const numId = this.toNumericId(id);
|
|
14898
|
+
if (!numId)
|
|
14899
|
+
return false;
|
|
14900
|
+
await this.request(`/collections/${this.collectionName}/points/delete`, "POST", { points: [numId] });
|
|
14901
|
+
return true;
|
|
14902
|
+
}
|
|
14903
|
+
async deleteCollection(collection) {
|
|
14904
|
+
await this.request(`/collections/${this.collectionName}/points/delete`, "POST", { filter: { must: [{ key: "collection", match: { value: collection } }] } });
|
|
14905
|
+
}
|
|
14906
|
+
async count() {
|
|
14907
|
+
const res = await this.request(`/collections/${this.collectionName}/points/count`, "POST", {});
|
|
14908
|
+
return res?.["result"]?.count ?? 0;
|
|
14909
|
+
}
|
|
14910
|
+
toNumericId(id) {
|
|
14911
|
+
const n = parseInt(id, 10);
|
|
14912
|
+
return isNaN(n) ? null : n;
|
|
14913
|
+
}
|
|
14914
|
+
async request(path, method, body) {
|
|
14915
|
+
const headers = { "Content-Type": "application/json" };
|
|
14916
|
+
if (this.apiKey)
|
|
14917
|
+
headers["api-key"] = this.apiKey;
|
|
14918
|
+
const res = await fetch(`${this.url}${path}`, {
|
|
14919
|
+
method,
|
|
14920
|
+
headers,
|
|
14921
|
+
body: body !== undefined ? JSON.stringify(body) : undefined
|
|
14922
|
+
});
|
|
14923
|
+
if (!res.ok)
|
|
14924
|
+
return null;
|
|
14925
|
+
return res.json();
|
|
14926
|
+
}
|
|
14927
|
+
}
|
|
14928
|
+
// src/rag/types.ts
|
|
14929
|
+
var RAG_SERVICE_TOKEN = Symbol("@dangao/bun-server:rag:service");
|
|
14930
|
+
var RAG_OPTIONS_TOKEN = Symbol("@dangao/bun-server:rag:options");
|
|
14931
|
+
// src/rag/service.ts
|
|
14932
|
+
init_decorators();
|
|
14933
|
+
init_decorators();
|
|
14934
|
+
|
|
14935
|
+
// src/rag/chunkers/text-chunker.ts
|
|
14936
|
+
class TextChunker {
|
|
14937
|
+
chunkSize;
|
|
14938
|
+
chunkOverlap;
|
|
14939
|
+
constructor(chunkSize = 512, chunkOverlap = 50) {
|
|
14940
|
+
this.chunkSize = chunkSize;
|
|
14941
|
+
this.chunkOverlap = chunkOverlap;
|
|
14942
|
+
}
|
|
14943
|
+
chunk(text) {
|
|
14944
|
+
const chunks = [];
|
|
14945
|
+
const step = this.chunkSize - this.chunkOverlap;
|
|
14946
|
+
for (let start = 0;start < text.length; start += step) {
|
|
14947
|
+
const end = Math.min(start + this.chunkSize, text.length);
|
|
14948
|
+
const content = text.slice(start, end).trim();
|
|
14949
|
+
if (content.length > 0) {
|
|
14950
|
+
chunks.push({ content });
|
|
14951
|
+
}
|
|
14952
|
+
if (end >= text.length)
|
|
14953
|
+
break;
|
|
14954
|
+
}
|
|
14955
|
+
return chunks;
|
|
14956
|
+
}
|
|
14957
|
+
}
|
|
14958
|
+
|
|
14959
|
+
// src/rag/chunkers/markdown-chunker.ts
|
|
14960
|
+
class MarkdownChunker {
|
|
14961
|
+
maxChunkSize;
|
|
14962
|
+
constructor(maxChunkSize = 1024) {
|
|
14963
|
+
this.maxChunkSize = maxChunkSize;
|
|
14964
|
+
}
|
|
14965
|
+
chunk(text) {
|
|
14966
|
+
const chunks = [];
|
|
14967
|
+
const sections = text.split(/(?=^#{1,3} )/m).filter((s) => s.trim());
|
|
14968
|
+
for (const section of sections) {
|
|
14969
|
+
if (section.length <= this.maxChunkSize) {
|
|
14970
|
+
chunks.push({ content: section.trim() });
|
|
14971
|
+
} else {
|
|
14972
|
+
const paragraphs = section.split(/\n\n+/).filter((p) => p.trim());
|
|
14973
|
+
let current = "";
|
|
14974
|
+
for (const para of paragraphs) {
|
|
14975
|
+
if ((current + `
|
|
14976
|
+
|
|
14977
|
+
` + para).length > this.maxChunkSize && current) {
|
|
14978
|
+
chunks.push({ content: current.trim() });
|
|
14979
|
+
current = para;
|
|
14980
|
+
} else {
|
|
14981
|
+
current = current ? current + `
|
|
14982
|
+
|
|
14983
|
+
` + para : para;
|
|
14984
|
+
}
|
|
14985
|
+
}
|
|
14986
|
+
if (current.trim())
|
|
14987
|
+
chunks.push({ content: current.trim() });
|
|
14988
|
+
}
|
|
14989
|
+
}
|
|
14990
|
+
return chunks.length > 0 ? chunks : [{ content: text.trim() }];
|
|
14991
|
+
}
|
|
14992
|
+
}
|
|
14993
|
+
|
|
14994
|
+
// src/rag/service.ts
|
|
14995
|
+
class RagService {
|
|
14996
|
+
options;
|
|
14997
|
+
embeddingService;
|
|
14998
|
+
vectorStore;
|
|
14999
|
+
textChunker;
|
|
15000
|
+
markdownChunker;
|
|
15001
|
+
constructor(options, embeddingService, vectorStore) {
|
|
15002
|
+
this.options = options;
|
|
15003
|
+
this.embeddingService = embeddingService;
|
|
15004
|
+
this.vectorStore = vectorStore;
|
|
15005
|
+
this.textChunker = new TextChunker(options.chunkSize, options.chunkOverlap);
|
|
15006
|
+
this.markdownChunker = new MarkdownChunker(options.chunkSize);
|
|
15007
|
+
}
|
|
15008
|
+
async ingest(source, collection) {
|
|
15009
|
+
const targetCollection = collection ?? this.options.collection ?? "rag";
|
|
15010
|
+
const text = await this.loadText(source);
|
|
15011
|
+
if (!text.trim())
|
|
15012
|
+
return 0;
|
|
15013
|
+
const isMarkdown = source.type === "file" && (source.path.endsWith(".md") || source.path.endsWith(".mdx"));
|
|
15014
|
+
const chunker = isMarkdown ? this.markdownChunker : this.textChunker;
|
|
15015
|
+
const chunks = chunker.chunk(text);
|
|
15016
|
+
const vectors = await this.embeddingService.embedBatch(chunks.map((c) => c.content));
|
|
15017
|
+
for (let i = 0;i < chunks.length; i++) {
|
|
15018
|
+
const chunk = chunks[i];
|
|
15019
|
+
const vector = vectors[i];
|
|
15020
|
+
await this.vectorStore.upsert({
|
|
15021
|
+
id: crypto.randomUUID(),
|
|
15022
|
+
vector,
|
|
15023
|
+
content: chunk.content,
|
|
15024
|
+
collection: targetCollection,
|
|
15025
|
+
metadata: {
|
|
15026
|
+
...chunk.metadata ?? {},
|
|
15027
|
+
...source.metadata ?? {},
|
|
15028
|
+
sourceType: source.type,
|
|
15029
|
+
...source.type === "file" ? { sourcePath: source.path } : {},
|
|
15030
|
+
...source.type === "url" ? { sourceUrl: source.url } : {}
|
|
15031
|
+
}
|
|
15032
|
+
});
|
|
15033
|
+
}
|
|
15034
|
+
return chunks.length;
|
|
15035
|
+
}
|
|
15036
|
+
async retrieve(query, collection) {
|
|
15037
|
+
const targetCollection = collection ?? this.options.collection ?? "rag";
|
|
15038
|
+
const queryVector = await this.embeddingService.embed(query);
|
|
15039
|
+
const results = await this.vectorStore.search(queryVector, {
|
|
15040
|
+
topK: this.options.topK ?? 5,
|
|
15041
|
+
minScore: this.options.minScore ?? 0.5,
|
|
15042
|
+
collection: targetCollection
|
|
15043
|
+
});
|
|
15044
|
+
const chunks = results.map((r) => ({
|
|
15045
|
+
content: r.document.content,
|
|
15046
|
+
score: r.score,
|
|
15047
|
+
metadata: r.document.metadata
|
|
15048
|
+
}));
|
|
15049
|
+
const formatted = chunks.length > 0 ? chunks.map((c, i) => `[${i + 1}] ${c.content}`).join(`
|
|
15050
|
+
|
|
15051
|
+
`) : "";
|
|
15052
|
+
return { chunks, formatted };
|
|
15053
|
+
}
|
|
15054
|
+
async buildContextPrompt(query, collection) {
|
|
15055
|
+
const context2 = await this.retrieve(query, collection);
|
|
15056
|
+
if (!context2.formatted)
|
|
15057
|
+
return "";
|
|
15058
|
+
return `Use the following context to answer the question:
|
|
15059
|
+
|
|
15060
|
+
${context2.formatted}`;
|
|
15061
|
+
}
|
|
15062
|
+
async loadText(source) {
|
|
15063
|
+
switch (source.type) {
|
|
15064
|
+
case "text":
|
|
15065
|
+
return source.content;
|
|
15066
|
+
case "file": {
|
|
15067
|
+
const file = Bun.file(source.path);
|
|
15068
|
+
return file.text();
|
|
15069
|
+
}
|
|
15070
|
+
case "url": {
|
|
15071
|
+
const res = await fetch(source.url);
|
|
15072
|
+
if (!res.ok)
|
|
15073
|
+
throw new Error(`Failed to fetch ${source.url}: ${res.status}`);
|
|
15074
|
+
return res.text();
|
|
15075
|
+
}
|
|
15076
|
+
}
|
|
15077
|
+
}
|
|
15078
|
+
}
|
|
15079
|
+
RagService = __legacyDecorateClassTS([
|
|
15080
|
+
Injectable(),
|
|
15081
|
+
__legacyDecorateParamTS(0, Inject(RAG_OPTIONS_TOKEN)),
|
|
15082
|
+
__legacyDecorateParamTS(1, Inject(EMBEDDING_SERVICE_TOKEN)),
|
|
15083
|
+
__legacyDecorateParamTS(2, Inject(VECTOR_STORE_TOKEN)),
|
|
15084
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
15085
|
+
typeof RagModuleOptions === "undefined" ? Object : RagModuleOptions,
|
|
15086
|
+
typeof EmbeddingService === "undefined" ? Object : EmbeddingService,
|
|
15087
|
+
typeof VectorStore === "undefined" ? Object : VectorStore
|
|
15088
|
+
])
|
|
15089
|
+
], RagService);
|
|
15090
|
+
// src/rag/decorators.ts
|
|
15091
|
+
var RAG_METADATA_KEY = "@dangao/bun-server:rag:collection";
|
|
15092
|
+
function Rag(options = {}) {
|
|
15093
|
+
return (target, propertyKey) => {
|
|
15094
|
+
Reflect.defineMetadata(RAG_METADATA_KEY, options, target, propertyKey);
|
|
15095
|
+
};
|
|
15096
|
+
}
|
|
15097
|
+
// src/rag/rag-module.ts
|
|
15098
|
+
init_module();
|
|
15099
|
+
class RagModule {
|
|
15100
|
+
static forRoot(options = {}) {
|
|
15101
|
+
const resolvedOptions = {
|
|
15102
|
+
collection: "rag",
|
|
15103
|
+
chunkSize: 512,
|
|
15104
|
+
chunkOverlap: 50,
|
|
15105
|
+
topK: 5,
|
|
15106
|
+
minScore: 0.5,
|
|
15107
|
+
...options
|
|
15108
|
+
};
|
|
15109
|
+
const providers2 = [
|
|
15110
|
+
{ provide: RAG_OPTIONS_TOKEN, useValue: resolvedOptions },
|
|
15111
|
+
{
|
|
15112
|
+
provide: RAG_SERVICE_TOKEN,
|
|
15113
|
+
useFactory: (container) => {
|
|
15114
|
+
const embeddingService = container.resolve(EMBEDDING_SERVICE_TOKEN);
|
|
15115
|
+
const vectorStore = container.resolve(VECTOR_STORE_TOKEN);
|
|
15116
|
+
return new RagService(resolvedOptions, embeddingService, vectorStore);
|
|
15117
|
+
}
|
|
15118
|
+
},
|
|
15119
|
+
RagService
|
|
15120
|
+
];
|
|
15121
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, RagModule) || {};
|
|
15122
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
15123
|
+
...existing,
|
|
15124
|
+
imports: [
|
|
15125
|
+
...existing.imports || [],
|
|
15126
|
+
EmbeddingModule,
|
|
15127
|
+
VectorStoreModule
|
|
15128
|
+
],
|
|
15129
|
+
providers: [...existing.providers || [], ...providers2],
|
|
15130
|
+
exports: [
|
|
15131
|
+
...existing.exports || [],
|
|
15132
|
+
RAG_SERVICE_TOKEN,
|
|
15133
|
+
RagService
|
|
15134
|
+
]
|
|
15135
|
+
}, RagModule);
|
|
15136
|
+
return RagModule;
|
|
15137
|
+
}
|
|
15138
|
+
static reset() {
|
|
15139
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, RagModule);
|
|
15140
|
+
}
|
|
15141
|
+
}
|
|
15142
|
+
RagModule = __legacyDecorateClassTS([
|
|
15143
|
+
Module({
|
|
15144
|
+
imports: [EmbeddingModule, VectorStoreModule],
|
|
15145
|
+
providers: []
|
|
15146
|
+
})
|
|
15147
|
+
], RagModule);
|
|
15148
|
+
// src/mcp/types.ts
|
|
15149
|
+
var MCP_SERVER_TOKEN = Symbol("@dangao/bun-server:mcp:server");
|
|
15150
|
+
var MCP_OPTIONS_TOKEN = Symbol("@dangao/bun-server:mcp:options");
|
|
15151
|
+
var MCP_TOOL_METADATA_KEY = "@dangao/bun-server:mcp:tool";
|
|
15152
|
+
var MCP_RESOURCE_METADATA_KEY = "@dangao/bun-server:mcp:resource";
|
|
15153
|
+
// src/mcp/decorators.ts
|
|
15154
|
+
function McpTool(definition) {
|
|
15155
|
+
return (target, propertyKey) => {
|
|
15156
|
+
Reflect.defineMetadata(MCP_TOOL_METADATA_KEY, definition, target, propertyKey);
|
|
15157
|
+
};
|
|
15158
|
+
}
|
|
15159
|
+
function McpResource(definition) {
|
|
15160
|
+
return (target, propertyKey) => {
|
|
15161
|
+
Reflect.defineMetadata(MCP_RESOURCE_METADATA_KEY, definition, target, propertyKey);
|
|
15162
|
+
};
|
|
15163
|
+
}
|
|
15164
|
+
function McpParam(name) {
|
|
15165
|
+
return (target, propertyKey, parameterIndex) => {
|
|
15166
|
+
const existing = Reflect.getMetadata("mcp:params", target, propertyKey) ?? [];
|
|
15167
|
+
existing.push({ index: parameterIndex, name });
|
|
15168
|
+
Reflect.defineMetadata("mcp:params", existing, target, propertyKey);
|
|
15169
|
+
};
|
|
15170
|
+
}
|
|
15171
|
+
// src/mcp/registry.ts
|
|
15172
|
+
class McpRegistry {
|
|
15173
|
+
tools = new Map;
|
|
15174
|
+
resources = new Map;
|
|
15175
|
+
scan(instance) {
|
|
15176
|
+
const proto = Object.getPrototypeOf(instance);
|
|
15177
|
+
const methodNames = Object.getOwnPropertyNames(proto).filter((key) => key !== "constructor");
|
|
15178
|
+
for (const methodName of methodNames) {
|
|
15179
|
+
const toolDef = Reflect.getMetadata(MCP_TOOL_METADATA_KEY, proto, methodName);
|
|
15180
|
+
if (toolDef) {
|
|
15181
|
+
const method = instance[methodName];
|
|
15182
|
+
this.tools.set(toolDef.name, {
|
|
15183
|
+
...toolDef,
|
|
15184
|
+
execute: (args) => method.call(instance, args)
|
|
15185
|
+
});
|
|
15186
|
+
}
|
|
15187
|
+
const resourceDef = Reflect.getMetadata(MCP_RESOURCE_METADATA_KEY, proto, methodName);
|
|
15188
|
+
if (resourceDef) {
|
|
15189
|
+
const method = instance[methodName];
|
|
15190
|
+
this.resources.set(resourceDef.uri, {
|
|
15191
|
+
...resourceDef,
|
|
15192
|
+
read: (params) => method.call(instance, params)
|
|
15193
|
+
});
|
|
15194
|
+
}
|
|
15195
|
+
}
|
|
15196
|
+
}
|
|
15197
|
+
getTools() {
|
|
15198
|
+
return Array.from(this.tools.values());
|
|
15199
|
+
}
|
|
15200
|
+
getResources() {
|
|
15201
|
+
return Array.from(this.resources.values());
|
|
15202
|
+
}
|
|
15203
|
+
getTool(name) {
|
|
15204
|
+
return this.tools.get(name);
|
|
15205
|
+
}
|
|
15206
|
+
getResource(uri) {
|
|
15207
|
+
return this.resources.get(uri);
|
|
15208
|
+
}
|
|
15209
|
+
}
|
|
15210
|
+
// src/mcp/server.ts
|
|
15211
|
+
class McpServer {
|
|
15212
|
+
registry;
|
|
15213
|
+
serverInfo;
|
|
15214
|
+
constructor(registry, serverInfo) {
|
|
15215
|
+
this.registry = registry;
|
|
15216
|
+
this.serverInfo = serverInfo;
|
|
15217
|
+
}
|
|
15218
|
+
async handle(request) {
|
|
15219
|
+
try {
|
|
15220
|
+
const result = await this.dispatch(request.method, request.params);
|
|
15221
|
+
return { jsonrpc: "2.0", id: request.id ?? null, result };
|
|
15222
|
+
} catch (err) {
|
|
15223
|
+
return {
|
|
15224
|
+
jsonrpc: "2.0",
|
|
15225
|
+
id: request.id ?? null,
|
|
15226
|
+
error: {
|
|
15227
|
+
code: err instanceof McpError ? err.code : -32603,
|
|
15228
|
+
message: err instanceof Error ? err.message : "Internal error"
|
|
15229
|
+
}
|
|
15230
|
+
};
|
|
15231
|
+
}
|
|
15232
|
+
}
|
|
15233
|
+
async handleHttp(req) {
|
|
15234
|
+
const body = await req.json();
|
|
15235
|
+
const response = await this.handle(body);
|
|
15236
|
+
return new Response(JSON.stringify(response), {
|
|
15237
|
+
headers: { "Content-Type": "application/json" }
|
|
15238
|
+
});
|
|
15239
|
+
}
|
|
15240
|
+
createSseResponse() {
|
|
15241
|
+
const registry = this.registry;
|
|
15242
|
+
const serverInfo = this.serverInfo;
|
|
15243
|
+
const encoder = new TextEncoder;
|
|
15244
|
+
const stream = new ReadableStream({
|
|
15245
|
+
start(controller2) {
|
|
15246
|
+
const initEvent = `event: endpoint
|
|
15247
|
+
data: ${JSON.stringify({
|
|
15248
|
+
type: "endpoint",
|
|
15249
|
+
method: "POST"
|
|
15250
|
+
})}
|
|
15251
|
+
|
|
15252
|
+
`;
|
|
15253
|
+
controller2.enqueue(encoder.encode(initEvent));
|
|
15254
|
+
const pingInterval = setInterval(() => {
|
|
15255
|
+
try {
|
|
15256
|
+
controller2.enqueue(encoder.encode(`: ping
|
|
15257
|
+
|
|
15258
|
+
`));
|
|
15259
|
+
} catch {
|
|
15260
|
+
clearInterval(pingInterval);
|
|
15261
|
+
}
|
|
15262
|
+
}, 15000);
|
|
15263
|
+
}
|
|
15264
|
+
});
|
|
15265
|
+
return new Response(stream, {
|
|
15266
|
+
headers: {
|
|
15267
|
+
"Content-Type": "text/event-stream",
|
|
15268
|
+
"Cache-Control": "no-cache",
|
|
15269
|
+
Connection: "keep-alive",
|
|
15270
|
+
"X-MCP-Server": `${serverInfo.name}/${serverInfo.version}`
|
|
15271
|
+
}
|
|
15272
|
+
});
|
|
15273
|
+
}
|
|
15274
|
+
async dispatch(method, params) {
|
|
15275
|
+
switch (method) {
|
|
15276
|
+
case "initialize":
|
|
15277
|
+
return {
|
|
15278
|
+
protocolVersion: "2024-11-05",
|
|
15279
|
+
capabilities: { tools: {}, resources: {} },
|
|
15280
|
+
serverInfo: this.serverInfo
|
|
15281
|
+
};
|
|
15282
|
+
case "tools/list":
|
|
15283
|
+
return {
|
|
15284
|
+
tools: this.registry.getTools().map((t) => ({
|
|
15285
|
+
name: t.name,
|
|
15286
|
+
description: t.description,
|
|
15287
|
+
inputSchema: t.inputSchema
|
|
15288
|
+
}))
|
|
15289
|
+
};
|
|
15290
|
+
case "tools/call": {
|
|
15291
|
+
const { name, arguments: args = {} } = params;
|
|
15292
|
+
const tool = this.registry.getTool(name);
|
|
15293
|
+
if (!tool)
|
|
15294
|
+
throw new McpError(-32601, `Tool "${name}" not found`);
|
|
15295
|
+
const result = await tool.execute(args);
|
|
15296
|
+
return {
|
|
15297
|
+
content: [
|
|
15298
|
+
{
|
|
15299
|
+
type: "text",
|
|
15300
|
+
text: typeof result === "string" ? result : JSON.stringify(result, null, 2)
|
|
15301
|
+
}
|
|
15302
|
+
]
|
|
15303
|
+
};
|
|
15304
|
+
}
|
|
15305
|
+
case "resources/list":
|
|
15306
|
+
return {
|
|
15307
|
+
resources: this.registry.getResources().map((r) => ({
|
|
15308
|
+
uri: r.uri,
|
|
15309
|
+
name: r.name,
|
|
15310
|
+
description: r.description,
|
|
15311
|
+
mimeType: r.mimeType
|
|
15312
|
+
}))
|
|
15313
|
+
};
|
|
15314
|
+
case "resources/read": {
|
|
15315
|
+
const { uri } = params;
|
|
15316
|
+
const resource = this.registry.getResource(uri);
|
|
15317
|
+
if (!resource)
|
|
15318
|
+
throw new McpError(-32601, `Resource "${uri}" not found`);
|
|
15319
|
+
const content = await resource.read({});
|
|
15320
|
+
return {
|
|
15321
|
+
contents: [
|
|
15322
|
+
{
|
|
15323
|
+
uri,
|
|
15324
|
+
mimeType: resource.mimeType ?? "application/json",
|
|
15325
|
+
text: typeof content === "string" ? content : JSON.stringify(content)
|
|
15326
|
+
}
|
|
15327
|
+
]
|
|
15328
|
+
};
|
|
15329
|
+
}
|
|
15330
|
+
case "ping":
|
|
15331
|
+
return {};
|
|
15332
|
+
default:
|
|
15333
|
+
throw new McpError(-32601, `Method "${method}" not found`);
|
|
15334
|
+
}
|
|
15335
|
+
}
|
|
15336
|
+
}
|
|
15337
|
+
|
|
15338
|
+
class McpError extends Error {
|
|
15339
|
+
code;
|
|
15340
|
+
constructor(code, message) {
|
|
15341
|
+
super(message);
|
|
15342
|
+
this.code = code;
|
|
15343
|
+
}
|
|
15344
|
+
}
|
|
15345
|
+
// src/mcp/mcp-module.ts
|
|
15346
|
+
init_module();
|
|
15347
|
+
class McpModule {
|
|
15348
|
+
static forRoot(options) {
|
|
15349
|
+
const registry = new McpRegistry;
|
|
15350
|
+
const server = new McpServer(registry, options.serverInfo);
|
|
15351
|
+
const resolvedOptions = {
|
|
15352
|
+
transport: "sse",
|
|
15353
|
+
path: "/mcp",
|
|
15354
|
+
...options
|
|
15355
|
+
};
|
|
15356
|
+
const providers2 = [
|
|
15357
|
+
{ provide: MCP_OPTIONS_TOKEN, useValue: resolvedOptions },
|
|
15358
|
+
{ provide: MCP_SERVER_TOKEN, useValue: server },
|
|
15359
|
+
{ provide: McpRegistry, useValue: registry }
|
|
15360
|
+
];
|
|
15361
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, McpModule) || {};
|
|
15362
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
15363
|
+
...existing,
|
|
15364
|
+
providers: [...existing.providers || [], ...providers2],
|
|
15365
|
+
exports: [
|
|
15366
|
+
...existing.exports || [],
|
|
15367
|
+
MCP_SERVER_TOKEN,
|
|
15368
|
+
McpRegistry
|
|
15369
|
+
]
|
|
15370
|
+
}, McpModule);
|
|
15371
|
+
return McpModule;
|
|
15372
|
+
}
|
|
15373
|
+
static reset() {
|
|
15374
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, McpModule);
|
|
15375
|
+
}
|
|
15376
|
+
}
|
|
15377
|
+
McpModule = __legacyDecorateClassTS([
|
|
15378
|
+
Module({ providers: [] })
|
|
15379
|
+
], McpModule);
|
|
15380
|
+
// src/ai-guard/types.ts
|
|
15381
|
+
var AI_GUARD_SERVICE_TOKEN = Symbol("@dangao/bun-server:ai-guard:service");
|
|
15382
|
+
var AI_GUARD_OPTIONS_TOKEN = Symbol("@dangao/bun-server:ai-guard:options");
|
|
15383
|
+
var AI_GUARD_METADATA_KEY = "@dangao/bun-server:ai-guard:options";
|
|
15384
|
+
// src/ai-guard/decorators.ts
|
|
15385
|
+
function AiGuard(options = {}) {
|
|
15386
|
+
return (target, propertyKey) => {
|
|
15387
|
+
Reflect.defineMetadata(AI_GUARD_METADATA_KEY, options, target, propertyKey);
|
|
15388
|
+
};
|
|
15389
|
+
}
|
|
15390
|
+
// src/ai-guard/service.ts
|
|
15391
|
+
init_decorators();
|
|
15392
|
+
init_decorators();
|
|
15393
|
+
|
|
15394
|
+
// src/ai-guard/detectors/pii-detector.ts
|
|
15395
|
+
var PII_PATTERNS = [
|
|
15396
|
+
{
|
|
15397
|
+
type: "email",
|
|
15398
|
+
regex: /\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b/g,
|
|
15399
|
+
replacement: "[EMAIL]"
|
|
15400
|
+
},
|
|
15401
|
+
{
|
|
15402
|
+
type: "phone",
|
|
15403
|
+
regex: /(\+?1[-.\s]?)?\(?\d{3}\)?[-.\s]?\d{3}[-.\s]?\d{4}\b/g,
|
|
15404
|
+
replacement: "[PHONE]"
|
|
15405
|
+
},
|
|
15406
|
+
{
|
|
15407
|
+
type: "ssn",
|
|
15408
|
+
regex: /\b\d{3}-\d{2}-\d{4}\b/g,
|
|
15409
|
+
replacement: "[SSN]"
|
|
15410
|
+
},
|
|
15411
|
+
{
|
|
15412
|
+
type: "credit_card",
|
|
15413
|
+
regex: /\b(?:\d[ -]?){13,16}\b/g,
|
|
15414
|
+
replacement: "[CREDIT_CARD]"
|
|
15415
|
+
},
|
|
15416
|
+
{
|
|
15417
|
+
type: "ip_address",
|
|
15418
|
+
regex: /\b(?:\d{1,3}\.){3}\d{1,3}\b/g,
|
|
15419
|
+
replacement: "[IP_ADDRESS]"
|
|
15420
|
+
},
|
|
15421
|
+
{
|
|
15422
|
+
type: "url_with_credentials",
|
|
15423
|
+
regex: /https?:\/\/[^:@\s]+:[^:@\s]+@[^\s]+/g,
|
|
15424
|
+
replacement: "[URL_WITH_CREDENTIALS]"
|
|
15425
|
+
}
|
|
15426
|
+
];
|
|
15427
|
+
|
|
15428
|
+
class PiiDetector {
|
|
15429
|
+
detect(text, redact = true) {
|
|
15430
|
+
const foundTypes = new Set;
|
|
15431
|
+
let sanitized = text;
|
|
15432
|
+
for (const pattern of PII_PATTERNS) {
|
|
15433
|
+
if (pattern.regex.test(text)) {
|
|
15434
|
+
foundTypes.add(pattern.type);
|
|
15435
|
+
if (redact) {
|
|
15436
|
+
sanitized = sanitized.replace(pattern.regex, pattern.replacement);
|
|
15437
|
+
}
|
|
15438
|
+
}
|
|
15439
|
+
pattern.regex.lastIndex = 0;
|
|
15440
|
+
}
|
|
15441
|
+
return {
|
|
15442
|
+
detected: foundTypes.size > 0,
|
|
15443
|
+
sanitized,
|
|
15444
|
+
types: Array.from(foundTypes)
|
|
15445
|
+
};
|
|
15446
|
+
}
|
|
15447
|
+
}
|
|
15448
|
+
|
|
15449
|
+
// src/ai-guard/detectors/content-moderator.ts
|
|
15450
|
+
class ContentModerator {
|
|
15451
|
+
config;
|
|
15452
|
+
constructor(config = {}) {
|
|
15453
|
+
this.config = config;
|
|
15454
|
+
}
|
|
15455
|
+
async moderate(text) {
|
|
15456
|
+
if (this.config.moderator) {
|
|
15457
|
+
return this.config.moderator(text);
|
|
15458
|
+
}
|
|
15459
|
+
if (this.config.openaiApiKey) {
|
|
15460
|
+
return this.moderateWithOpenAI(text);
|
|
15461
|
+
}
|
|
15462
|
+
return { flagged: false, categories: {}, scores: {} };
|
|
15463
|
+
}
|
|
15464
|
+
isBlocked(result) {
|
|
15465
|
+
if (!result.flagged)
|
|
15466
|
+
return false;
|
|
15467
|
+
const blockCategories = this.config.blockCategories;
|
|
15468
|
+
if (!blockCategories || blockCategories.length === 0)
|
|
15469
|
+
return true;
|
|
15470
|
+
return blockCategories.some((cat) => result.categories[cat]);
|
|
15471
|
+
}
|
|
15472
|
+
async moderateWithOpenAI(text) {
|
|
15473
|
+
const res = await fetch("https://api.openai.com/v1/moderations", {
|
|
15474
|
+
method: "POST",
|
|
15475
|
+
headers: {
|
|
15476
|
+
"Content-Type": "application/json",
|
|
15477
|
+
Authorization: `Bearer ${this.config.openaiApiKey}`
|
|
15478
|
+
},
|
|
15479
|
+
body: JSON.stringify({ input: text })
|
|
15480
|
+
});
|
|
15481
|
+
if (!res.ok) {
|
|
15482
|
+
return { flagged: false, categories: {}, scores: {} };
|
|
15483
|
+
}
|
|
15484
|
+
const data = await res.json();
|
|
15485
|
+
const result = data.results[0];
|
|
15486
|
+
if (!result)
|
|
15487
|
+
return { flagged: false, categories: {}, scores: {} };
|
|
15488
|
+
return {
|
|
15489
|
+
flagged: result.flagged,
|
|
15490
|
+
categories: result.categories,
|
|
15491
|
+
scores: result.category_scores
|
|
15492
|
+
};
|
|
15493
|
+
}
|
|
15494
|
+
}
|
|
15495
|
+
|
|
15496
|
+
// src/ai-guard/detectors/injection-detector.ts
|
|
15497
|
+
var INJECTION_PATTERNS = [
|
|
15498
|
+
{ pattern: /ignore\s+(all\s+)?previous\s+instructions?/i, reason: "ignore_instructions", weight: 0.9 },
|
|
15499
|
+
{ pattern: /forget\s+(all\s+)?previous\s+instructions?/i, reason: "forget_instructions", weight: 0.9 },
|
|
15500
|
+
{ pattern: /you\s+are\s+now\s+(?:a\s+)?(?:different|new|another)/i, reason: "role_override", weight: 0.7 },
|
|
15501
|
+
{ pattern: /disregard\s+(?:your\s+)?(?:previous\s+)?(?:instructions?|guidelines?|rules?)/i, reason: "disregard_rules", weight: 0.8 },
|
|
15502
|
+
{ pattern: /system\s*:\s*(?:you|your|ignore)/i, reason: "fake_system_message", weight: 0.8 },
|
|
15503
|
+
{ pattern: /\[system\]/i, reason: "system_tag_injection", weight: 0.6 },
|
|
15504
|
+
{ pattern: /act\s+as\s+(?:an?\s+)?(?:unrestricted|unfiltered|jailbreak)/i, reason: "jailbreak_attempt", weight: 0.95 },
|
|
15505
|
+
{ pattern: /jailbreak|DAN\s+mode|developer\s+mode/i, reason: "jailbreak_keyword", weight: 0.85 },
|
|
15506
|
+
{ pattern: /print\s+your\s+(?:system\s+)?prompt|reveal\s+your\s+instructions?/i, reason: "prompt_extraction", weight: 0.7 }
|
|
15507
|
+
];
|
|
15508
|
+
|
|
15509
|
+
class PromptInjectionDetector {
|
|
15510
|
+
threshold;
|
|
15511
|
+
constructor(sensitivity = "medium") {
|
|
15512
|
+
this.threshold = sensitivity === "low" ? 0.85 : sensitivity === "medium" ? 0.7 : 0.55;
|
|
15513
|
+
}
|
|
15514
|
+
detect(text) {
|
|
15515
|
+
let maxScore = 0;
|
|
15516
|
+
let detectedReason;
|
|
15517
|
+
for (const { pattern, reason, weight } of INJECTION_PATTERNS) {
|
|
15518
|
+
if (pattern.test(text)) {
|
|
15519
|
+
if (weight > maxScore) {
|
|
15520
|
+
maxScore = weight;
|
|
15521
|
+
detectedReason = reason;
|
|
15522
|
+
}
|
|
15523
|
+
}
|
|
15524
|
+
}
|
|
15525
|
+
return {
|
|
15526
|
+
detected: maxScore >= this.threshold,
|
|
15527
|
+
confidence: maxScore,
|
|
15528
|
+
reason: detectedReason
|
|
15529
|
+
};
|
|
15530
|
+
}
|
|
15531
|
+
}
|
|
15532
|
+
|
|
15533
|
+
// src/ai-guard/service.ts
|
|
15534
|
+
init_http_exception();
|
|
15535
|
+
class AiGuardService {
|
|
15536
|
+
piiDetector;
|
|
15537
|
+
contentModerator;
|
|
15538
|
+
injectionDetector;
|
|
15539
|
+
options;
|
|
15540
|
+
constructor(options) {
|
|
15541
|
+
this.options = options;
|
|
15542
|
+
this.piiDetector = options.piiDetection ? new PiiDetector : null;
|
|
15543
|
+
if (options.moderation) {
|
|
15544
|
+
const modConfig = typeof options.moderation === "object" ? options.moderation : {};
|
|
15545
|
+
this.contentModerator = new ContentModerator(modConfig);
|
|
15546
|
+
} else {
|
|
15547
|
+
this.contentModerator = null;
|
|
15548
|
+
}
|
|
15549
|
+
if (options.promptInjection) {
|
|
15550
|
+
const injConfig = typeof options.promptInjection === "object" ? options.promptInjection : {};
|
|
15551
|
+
this.injectionDetector = new PromptInjectionDetector(injConfig.sensitivity);
|
|
15552
|
+
} else {
|
|
15553
|
+
this.injectionDetector = null;
|
|
15554
|
+
}
|
|
15555
|
+
}
|
|
15556
|
+
async check(text) {
|
|
15557
|
+
let workingText = text;
|
|
15558
|
+
const result = { allowed: true };
|
|
15559
|
+
if (this.piiDetector) {
|
|
15560
|
+
const redact = typeof this.options.piiDetection === "object" ? this.options.piiDetection.redact !== false : true;
|
|
15561
|
+
const piiResult = this.piiDetector.detect(workingText, redact);
|
|
15562
|
+
result.pii = piiResult;
|
|
15563
|
+
if (redact && piiResult.detected) {
|
|
15564
|
+
workingText = piiResult.sanitized;
|
|
15565
|
+
}
|
|
15566
|
+
}
|
|
15567
|
+
if (this.injectionDetector) {
|
|
15568
|
+
const injResult = this.injectionDetector.detect(workingText);
|
|
15569
|
+
result.injection = injResult;
|
|
15570
|
+
if (injResult.detected) {
|
|
15571
|
+
result.allowed = false;
|
|
15572
|
+
}
|
|
15573
|
+
}
|
|
15574
|
+
if (this.contentModerator && result.allowed) {
|
|
15575
|
+
const modResult = await this.contentModerator.moderate(workingText);
|
|
15576
|
+
result.moderation = modResult;
|
|
15577
|
+
if (this.contentModerator.isBlocked(modResult)) {
|
|
15578
|
+
result.allowed = false;
|
|
15579
|
+
}
|
|
15580
|
+
}
|
|
15581
|
+
result.sanitizedInput = workingText;
|
|
15582
|
+
return result;
|
|
15583
|
+
}
|
|
15584
|
+
async checkOrThrow(text) {
|
|
15585
|
+
const result = await this.check(text);
|
|
15586
|
+
if (!result.allowed) {
|
|
15587
|
+
const reason = result.injection?.detected ? "Prompt injection detected" : result.moderation?.flagged ? "Content violates usage policies" : "Content not allowed";
|
|
15588
|
+
throw new HttpException(400, reason);
|
|
15589
|
+
}
|
|
15590
|
+
return result.sanitizedInput ?? text;
|
|
15591
|
+
}
|
|
15592
|
+
}
|
|
15593
|
+
AiGuardService = __legacyDecorateClassTS([
|
|
15594
|
+
Injectable(),
|
|
15595
|
+
__legacyDecorateParamTS(0, Inject(AI_GUARD_OPTIONS_TOKEN)),
|
|
15596
|
+
__legacyMetadataTS("design:paramtypes", [
|
|
15597
|
+
typeof AiGuardModuleOptions === "undefined" ? Object : AiGuardModuleOptions
|
|
15598
|
+
])
|
|
15599
|
+
], AiGuardService);
|
|
15600
|
+
// src/ai-guard/ai-guard-module.ts
|
|
15601
|
+
init_module();
|
|
15602
|
+
class AiGuardModule {
|
|
15603
|
+
static forRoot(options = {}) {
|
|
15604
|
+
const service6 = new AiGuardService(options);
|
|
15605
|
+
const providers2 = [
|
|
15606
|
+
{ provide: AI_GUARD_OPTIONS_TOKEN, useValue: options },
|
|
15607
|
+
{ provide: AI_GUARD_SERVICE_TOKEN, useValue: service6 },
|
|
15608
|
+
AiGuardService
|
|
15609
|
+
];
|
|
15610
|
+
const existing = Reflect.getMetadata(MODULE_METADATA_KEY, AiGuardModule) || {};
|
|
15611
|
+
Reflect.defineMetadata(MODULE_METADATA_KEY, {
|
|
15612
|
+
...existing,
|
|
15613
|
+
providers: [...existing.providers || [], ...providers2],
|
|
15614
|
+
exports: [
|
|
15615
|
+
...existing.exports || [],
|
|
15616
|
+
AI_GUARD_SERVICE_TOKEN,
|
|
15617
|
+
AiGuardService
|
|
15618
|
+
]
|
|
15619
|
+
}, AiGuardModule);
|
|
15620
|
+
return AiGuardModule;
|
|
15621
|
+
}
|
|
15622
|
+
static reset() {
|
|
15623
|
+
Reflect.deleteMetadata(MODULE_METADATA_KEY, AiGuardModule);
|
|
15624
|
+
}
|
|
15625
|
+
}
|
|
15626
|
+
AiGuardModule = __legacyDecorateClassTS([
|
|
15627
|
+
Module({ providers: [] })
|
|
15628
|
+
], AiGuardModule);
|
|
13068
15629
|
export {
|
|
13069
15630
|
validateParameters,
|
|
13070
15631
|
validateObjectSync,
|
|
13071
15632
|
validateObject,
|
|
13072
15633
|
scanInterceptorMetadata,
|
|
13073
15634
|
requiresAuth,
|
|
15635
|
+
renderTemplate,
|
|
13074
15636
|
registerReflector,
|
|
13075
15637
|
isValidateClass,
|
|
13076
15638
|
isGlobalModule,
|
|
@@ -13086,6 +15648,8 @@ export {
|
|
|
13086
15648
|
getColumnMetadata,
|
|
13087
15649
|
getClassValidationMetadata,
|
|
13088
15650
|
getAuthMetadata,
|
|
15651
|
+
extractVariables,
|
|
15652
|
+
extractConversationId,
|
|
13089
15653
|
createUserKeyGenerator,
|
|
13090
15654
|
createTokenKeyGenerator,
|
|
13091
15655
|
createSwaggerUIMiddleware,
|
|
@@ -13105,17 +15669,21 @@ export {
|
|
|
13105
15669
|
createCustomValidator,
|
|
13106
15670
|
createCorsMiddleware,
|
|
13107
15671
|
createClient,
|
|
15672
|
+
cosineSimilarity,
|
|
13108
15673
|
contextStore,
|
|
13109
15674
|
checkRoles,
|
|
13110
15675
|
applyDecorators,
|
|
13111
15676
|
WeightedRoundRobinLoadBalancer,
|
|
13112
15677
|
WebSocketGatewayRegistry,
|
|
13113
15678
|
WebSocketGateway,
|
|
15679
|
+
VectorStoreModule,
|
|
13114
15680
|
ValidationError,
|
|
13115
15681
|
ValidateNested,
|
|
13116
15682
|
ValidateIf,
|
|
13117
15683
|
ValidateClass,
|
|
13118
15684
|
Validate,
|
|
15685
|
+
VECTOR_STORE_TOKEN,
|
|
15686
|
+
VECTOR_STORE_OPTIONS_TOKEN,
|
|
13119
15687
|
UserInfoRequestInterceptor,
|
|
13120
15688
|
UseMiddleware,
|
|
13121
15689
|
UseGuards,
|
|
@@ -13127,6 +15695,9 @@ export {
|
|
|
13127
15695
|
TransactionInterceptor,
|
|
13128
15696
|
Tracer,
|
|
13129
15697
|
TraceIdRequestInterceptor,
|
|
15698
|
+
ToolRegistry,
|
|
15699
|
+
ToolExecutor,
|
|
15700
|
+
TextChunker,
|
|
13130
15701
|
TestingModuleBuilder,
|
|
13131
15702
|
TestingModule,
|
|
13132
15703
|
TestHttpClient,
|
|
@@ -13166,38 +15737,57 @@ export {
|
|
|
13166
15737
|
Repository,
|
|
13167
15738
|
Reflector,
|
|
13168
15739
|
RedisSessionStore,
|
|
15740
|
+
RedisConversationStore,
|
|
13169
15741
|
RedisCacheStore,
|
|
13170
15742
|
RateLimiter,
|
|
13171
15743
|
RateLimit,
|
|
13172
15744
|
RandomLoadBalancer,
|
|
15745
|
+
RagService,
|
|
15746
|
+
RagModule,
|
|
15747
|
+
Rag,
|
|
13173
15748
|
ROLES_METADATA_KEY,
|
|
13174
15749
|
REFLECTOR_TOKEN,
|
|
15750
|
+
RAG_SERVICE_TOKEN,
|
|
15751
|
+
RAG_OPTIONS_TOKEN,
|
|
15752
|
+
RAG_METADATA_KEY,
|
|
13175
15753
|
QueueService,
|
|
13176
15754
|
QueueModule,
|
|
13177
15755
|
Queue,
|
|
13178
15756
|
QueryMap,
|
|
13179
15757
|
Query,
|
|
15758
|
+
QdrantVectorStore,
|
|
13180
15759
|
QUEUE_SERVICE_TOKEN,
|
|
13181
15760
|
QUEUE_OPTIONS_TOKEN,
|
|
13182
15761
|
Property,
|
|
13183
15762
|
Propagation,
|
|
15763
|
+
PromptService,
|
|
15764
|
+
PromptModule,
|
|
15765
|
+
PromptInjectionDetector,
|
|
13184
15766
|
PrometheusFormatter,
|
|
13185
15767
|
PrimaryKey,
|
|
15768
|
+
PineconeVectorStore,
|
|
15769
|
+
PiiDetector,
|
|
13186
15770
|
PermissionInterceptor,
|
|
13187
15771
|
Permission,
|
|
13188
15772
|
PerformanceHarness,
|
|
13189
15773
|
ParamBinder,
|
|
13190
15774
|
Param,
|
|
13191
15775
|
PUT,
|
|
15776
|
+
PROMPT_SERVICE_TOKEN,
|
|
15777
|
+
PROMPT_OPTIONS_TOKEN,
|
|
13192
15778
|
POST,
|
|
13193
15779
|
PERMISSION_METADATA_KEY,
|
|
13194
15780
|
PATCH,
|
|
13195
15781
|
OrmService,
|
|
13196
15782
|
OptionalAuthGuard,
|
|
15783
|
+
OpenAIProvider,
|
|
15784
|
+
OpenAIEmbeddingProvider,
|
|
13197
15785
|
OnOpen,
|
|
13198
15786
|
OnMessage,
|
|
13199
15787
|
OnEvent,
|
|
13200
15788
|
OnClose,
|
|
15789
|
+
OllamaProvider,
|
|
15790
|
+
OllamaEmbeddingProvider,
|
|
13201
15791
|
ORM_SERVICE_TOKEN,
|
|
13202
15792
|
ON_EVENT_METADATA_KEY,
|
|
13203
15793
|
OAuth2Service,
|
|
@@ -13217,15 +15807,28 @@ export {
|
|
|
13217
15807
|
MiddlewarePipeline,
|
|
13218
15808
|
MetricsModule,
|
|
13219
15809
|
MetricsCollector,
|
|
15810
|
+
MemoryVectorStore,
|
|
13220
15811
|
MemoryTraceCollector,
|
|
13221
15812
|
MemorySessionStore,
|
|
13222
15813
|
MemoryQueueStore,
|
|
15814
|
+
MemoryConversationStore,
|
|
13223
15815
|
MemoryCacheStore,
|
|
15816
|
+
McpTool,
|
|
15817
|
+
McpServer,
|
|
15818
|
+
McpResource,
|
|
15819
|
+
McpRegistry,
|
|
15820
|
+
McpParam,
|
|
15821
|
+
McpModule,
|
|
13224
15822
|
MaxLength,
|
|
13225
15823
|
Max,
|
|
13226
15824
|
Matches,
|
|
15825
|
+
MarkdownChunker,
|
|
13227
15826
|
METRICS_SERVICE_TOKEN,
|
|
13228
15827
|
METRICS_OPTIONS_TOKEN,
|
|
15828
|
+
MCP_TOOL_METADATA_KEY,
|
|
15829
|
+
MCP_SERVER_TOKEN,
|
|
15830
|
+
MCP_RESOURCE_METADATA_KEY,
|
|
15831
|
+
MCP_OPTIONS_TOKEN,
|
|
13229
15832
|
LoggerModule,
|
|
13230
15833
|
LoggerExtension,
|
|
13231
15834
|
LogLevel2 as LogLevel,
|
|
@@ -13278,7 +15881,9 @@ export {
|
|
|
13278
15881
|
InterceptorRegistry,
|
|
13279
15882
|
InterceptorChain,
|
|
13280
15883
|
Injectable,
|
|
15884
|
+
InjectConversation,
|
|
13281
15885
|
Inject,
|
|
15886
|
+
InMemoryPromptStore,
|
|
13282
15887
|
INTERCEPTOR_REGISTRY_TOKEN,
|
|
13283
15888
|
HttpException,
|
|
13284
15889
|
HealthModule,
|
|
@@ -13287,12 +15892,14 @@ export {
|
|
|
13287
15892
|
HEALTH_OPTIONS_TOKEN,
|
|
13288
15893
|
HEALTH_INDICATORS_TOKEN,
|
|
13289
15894
|
GuardRegistry,
|
|
15895
|
+
GoogleProvider,
|
|
13290
15896
|
Global,
|
|
13291
15897
|
GUARD_REGISTRY_TOKEN,
|
|
13292
15898
|
GUARDS_METADATA_KEY,
|
|
13293
15899
|
GLOBAL_MODULE_METADATA_KEY,
|
|
13294
15900
|
GET,
|
|
13295
15901
|
ForbiddenException,
|
|
15902
|
+
FilePromptStore,
|
|
13296
15903
|
ExecutionContextImpl,
|
|
13297
15904
|
ExceptionFilterRegistry,
|
|
13298
15905
|
EventModule,
|
|
@@ -13302,16 +15909,21 @@ export {
|
|
|
13302
15909
|
Equals,
|
|
13303
15910
|
Entity,
|
|
13304
15911
|
EnableCacheProxy,
|
|
15912
|
+
EmbeddingService2 as EmbeddingService,
|
|
15913
|
+
EmbeddingModule,
|
|
13305
15914
|
EVENT_OPTIONS_TOKEN,
|
|
13306
15915
|
EVENT_LISTENER_SCANNER_TOKEN,
|
|
13307
15916
|
EVENT_LISTENER_CLASS_METADATA_KEY,
|
|
13308
15917
|
EVENT_EMITTER_TOKEN,
|
|
15918
|
+
EMBEDDING_SERVICE_TOKEN,
|
|
15919
|
+
EMBEDDING_OPTIONS_TOKEN,
|
|
13309
15920
|
DrizzleBaseRepository,
|
|
13310
15921
|
DebugModule,
|
|
13311
15922
|
DatabaseService2 as DatabaseService,
|
|
13312
15923
|
DatabaseModule,
|
|
13313
15924
|
DatabaseHealthIndicator,
|
|
13314
15925
|
DatabaseExtension,
|
|
15926
|
+
DatabaseConversationStore,
|
|
13315
15927
|
DatabaseConnectionManager,
|
|
13316
15928
|
DashboardService,
|
|
13317
15929
|
DashboardModule,
|
|
@@ -13322,11 +15934,14 @@ export {
|
|
|
13322
15934
|
DATABASE_OPTIONS_TOKEN,
|
|
13323
15935
|
DASHBOARD_OPTIONS_TOKEN,
|
|
13324
15936
|
Cron,
|
|
15937
|
+
ConversationService,
|
|
15938
|
+
ConversationModule,
|
|
13325
15939
|
ControllerRegistry,
|
|
13326
15940
|
Controller,
|
|
13327
15941
|
ContextService,
|
|
13328
15942
|
Context2 as ContextParam,
|
|
13329
15943
|
Context,
|
|
15944
|
+
ContentModerator,
|
|
13330
15945
|
Contains,
|
|
13331
15946
|
Container,
|
|
13332
15947
|
ConsoleTraceCollector,
|
|
@@ -13352,6 +15967,8 @@ export {
|
|
|
13352
15967
|
CacheEvictInterceptor,
|
|
13353
15968
|
CacheEvict,
|
|
13354
15969
|
Cache,
|
|
15970
|
+
CONVERSATION_SERVICE_TOKEN,
|
|
15971
|
+
CONVERSATION_OPTIONS_TOKEN,
|
|
13355
15972
|
CONTEXT_SERVICE_TOKEN,
|
|
13356
15973
|
CONFIG_SERVICE_TOKEN,
|
|
13357
15974
|
CONFIG_CENTER_TOKEN,
|
|
@@ -13381,5 +15998,25 @@ export {
|
|
|
13381
15998
|
ApiResponse,
|
|
13382
15999
|
ApiParam,
|
|
13383
16000
|
ApiOperation,
|
|
13384
|
-
ApiBody
|
|
16001
|
+
ApiBody,
|
|
16002
|
+
AnthropicProvider,
|
|
16003
|
+
AiTool,
|
|
16004
|
+
AiTimeoutError,
|
|
16005
|
+
AiService,
|
|
16006
|
+
AiRateLimitError,
|
|
16007
|
+
AiProviderError,
|
|
16008
|
+
AiNoProviderError,
|
|
16009
|
+
AiModule,
|
|
16010
|
+
AiGuardService,
|
|
16011
|
+
AiGuardModule,
|
|
16012
|
+
AiGuard,
|
|
16013
|
+
AiContextLengthError,
|
|
16014
|
+
AiAllProvidersFailed,
|
|
16015
|
+
AI_TOOL_REGISTRY_TOKEN,
|
|
16016
|
+
AI_TOOL_METADATA_KEY,
|
|
16017
|
+
AI_SERVICE_TOKEN,
|
|
16018
|
+
AI_MODULE_OPTIONS_TOKEN,
|
|
16019
|
+
AI_GUARD_SERVICE_TOKEN,
|
|
16020
|
+
AI_GUARD_OPTIONS_TOKEN,
|
|
16021
|
+
AI_GUARD_METADATA_KEY
|
|
13385
16022
|
};
|