@juspay/neurolink 3.0.1 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -6
- package/README.md +235 -2
- package/dist/agent/direct-tools.d.ts +6 -6
- package/dist/chat/client-utils.d.ts +92 -0
- package/dist/chat/client-utils.js +298 -0
- package/dist/chat/index.d.ts +27 -0
- package/dist/chat/index.js +41 -0
- package/dist/chat/session-storage.d.ts +77 -0
- package/dist/chat/session-storage.js +233 -0
- package/dist/chat/session.d.ts +95 -0
- package/dist/chat/session.js +257 -0
- package/dist/chat/sse-handler.d.ts +49 -0
- package/dist/chat/sse-handler.js +266 -0
- package/dist/chat/types.d.ts +73 -0
- package/dist/chat/types.js +5 -0
- package/dist/chat/websocket-chat-handler.d.ts +36 -0
- package/dist/chat/websocket-chat-handler.js +262 -0
- package/dist/cli/commands/config.js +12 -12
- package/dist/cli/commands/mcp.js +3 -4
- package/dist/cli/index.d.ts +0 -7
- package/dist/cli/index.js +247 -28
- package/dist/config/configManager.d.ts +60 -0
- package/dist/config/configManager.js +300 -0
- package/dist/config/types.d.ts +136 -0
- package/dist/config/types.js +43 -0
- package/dist/core/analytics.d.ts +23 -0
- package/dist/core/analytics.js +131 -0
- package/dist/core/constants.d.ts +41 -0
- package/dist/core/constants.js +50 -0
- package/dist/core/defaults.d.ts +18 -0
- package/dist/core/defaults.js +29 -0
- package/dist/core/evaluation-config.d.ts +29 -0
- package/dist/core/evaluation-config.js +144 -0
- package/dist/core/evaluation-providers.d.ts +30 -0
- package/dist/core/evaluation-providers.js +187 -0
- package/dist/core/evaluation.d.ts +117 -0
- package/dist/core/evaluation.js +528 -0
- package/dist/core/factory.js +33 -25
- package/dist/core/types.d.ts +165 -6
- package/dist/core/types.js +3 -4
- package/dist/index.d.ts +9 -4
- package/dist/index.js +25 -4
- package/dist/lib/agent/direct-tools.d.ts +6 -6
- package/dist/lib/chat/client-utils.d.ts +92 -0
- package/dist/lib/chat/client-utils.js +298 -0
- package/dist/lib/chat/index.d.ts +27 -0
- package/dist/lib/chat/index.js +41 -0
- package/dist/lib/chat/session-storage.d.ts +77 -0
- package/dist/lib/chat/session-storage.js +233 -0
- package/dist/lib/chat/session.d.ts +95 -0
- package/dist/lib/chat/session.js +257 -0
- package/dist/lib/chat/sse-handler.d.ts +49 -0
- package/dist/lib/chat/sse-handler.js +266 -0
- package/dist/lib/chat/types.d.ts +73 -0
- package/dist/lib/chat/types.js +5 -0
- package/dist/lib/chat/websocket-chat-handler.d.ts +36 -0
- package/dist/lib/chat/websocket-chat-handler.js +262 -0
- package/dist/lib/config/configManager.d.ts +60 -0
- package/dist/lib/config/configManager.js +300 -0
- package/dist/lib/config/types.d.ts +136 -0
- package/dist/lib/config/types.js +43 -0
- package/dist/lib/core/analytics.d.ts +23 -0
- package/dist/lib/core/analytics.js +131 -0
- package/dist/lib/core/constants.d.ts +41 -0
- package/dist/lib/core/constants.js +50 -0
- package/dist/lib/core/defaults.d.ts +18 -0
- package/dist/lib/core/defaults.js +29 -0
- package/dist/lib/core/evaluation-config.d.ts +29 -0
- package/dist/lib/core/evaluation-config.js +144 -0
- package/dist/lib/core/evaluation-providers.d.ts +30 -0
- package/dist/lib/core/evaluation-providers.js +187 -0
- package/dist/lib/core/evaluation.d.ts +117 -0
- package/dist/lib/core/evaluation.js +528 -0
- package/dist/lib/core/factory.js +33 -26
- package/dist/lib/core/types.d.ts +165 -6
- package/dist/lib/core/types.js +3 -4
- package/dist/lib/index.d.ts +9 -4
- package/dist/lib/index.js +25 -4
- package/dist/lib/mcp/contracts/mcpContract.d.ts +118 -0
- package/dist/lib/mcp/contracts/mcpContract.js +5 -0
- package/dist/lib/mcp/function-calling.js +11 -3
- package/dist/lib/mcp/logging.js +5 -0
- package/dist/lib/mcp/neurolink-mcp-client.js +2 -1
- package/dist/lib/mcp/orchestrator.js +18 -9
- package/dist/lib/mcp/registry.d.ts +49 -16
- package/dist/lib/mcp/registry.js +80 -6
- package/dist/lib/mcp/servers/ai-providers/ai-workflow-tools.js +5 -4
- package/dist/lib/mcp/tool-integration.js +1 -1
- package/dist/lib/mcp/tool-registry.d.ts +55 -34
- package/dist/lib/mcp/tool-registry.js +111 -97
- package/dist/lib/mcp/unified-mcp.js +6 -1
- package/dist/lib/mcp/unified-registry.d.ts +12 -4
- package/dist/lib/mcp/unified-registry.js +17 -4
- package/dist/lib/neurolink.d.ts +26 -0
- package/dist/lib/neurolink.js +43 -1
- package/dist/lib/providers/agent-enhanced-provider.d.ts +11 -2
- package/dist/lib/providers/agent-enhanced-provider.js +86 -15
- package/dist/lib/providers/amazonBedrock.d.ts +9 -1
- package/dist/lib/providers/amazonBedrock.js +26 -2
- package/dist/lib/providers/analytics-helper.d.ts +53 -0
- package/dist/lib/providers/analytics-helper.js +151 -0
- package/dist/lib/providers/anthropic.d.ts +11 -1
- package/dist/lib/providers/anthropic.js +29 -4
- package/dist/lib/providers/azureOpenAI.d.ts +3 -1
- package/dist/lib/providers/azureOpenAI.js +28 -4
- package/dist/lib/providers/function-calling-provider.d.ts +9 -1
- package/dist/lib/providers/function-calling-provider.js +14 -1
- package/dist/lib/providers/googleAIStudio.d.ts +15 -1
- package/dist/lib/providers/googleAIStudio.js +32 -2
- package/dist/lib/providers/googleVertexAI.d.ts +9 -1
- package/dist/lib/providers/googleVertexAI.js +31 -2
- package/dist/lib/providers/huggingFace.d.ts +3 -1
- package/dist/lib/providers/huggingFace.js +26 -3
- package/dist/lib/providers/mcp-provider.d.ts +9 -1
- package/dist/lib/providers/mcp-provider.js +12 -0
- package/dist/lib/providers/mistralAI.d.ts +3 -1
- package/dist/lib/providers/mistralAI.js +25 -2
- package/dist/lib/providers/ollama.d.ts +3 -1
- package/dist/lib/providers/ollama.js +27 -4
- package/dist/lib/providers/openAI.d.ts +15 -1
- package/dist/lib/providers/openAI.js +32 -2
- package/dist/lib/proxy/proxy-fetch.js +8 -7
- package/dist/lib/services/streaming/streaming-manager.d.ts +29 -0
- package/dist/lib/services/streaming/streaming-manager.js +244 -0
- package/dist/lib/services/types.d.ts +155 -0
- package/dist/lib/services/types.js +2 -0
- package/dist/lib/services/websocket/websocket-server.d.ts +34 -0
- package/dist/lib/services/websocket/websocket-server.js +304 -0
- package/dist/lib/telemetry/index.d.ts +15 -0
- package/dist/lib/telemetry/index.js +22 -0
- package/dist/lib/telemetry/telemetry-service.d.ts +47 -0
- package/dist/lib/telemetry/telemetry-service.js +259 -0
- package/dist/lib/utils/streaming-utils.d.ts +67 -0
- package/dist/lib/utils/streaming-utils.js +201 -0
- package/dist/mcp/contracts/mcpContract.d.ts +118 -0
- package/dist/mcp/contracts/mcpContract.js +5 -0
- package/dist/mcp/function-calling.js +11 -3
- package/dist/mcp/logging.js +5 -0
- package/dist/mcp/neurolink-mcp-client.js +2 -1
- package/dist/mcp/orchestrator.js +18 -9
- package/dist/mcp/registry.d.ts +49 -16
- package/dist/mcp/registry.js +80 -6
- package/dist/mcp/servers/ai-providers/ai-workflow-tools.d.ts +2 -2
- package/dist/mcp/servers/ai-providers/ai-workflow-tools.js +5 -4
- package/dist/mcp/tool-integration.js +1 -1
- package/dist/mcp/tool-registry.d.ts +55 -34
- package/dist/mcp/tool-registry.js +111 -97
- package/dist/mcp/unified-mcp.js +6 -1
- package/dist/mcp/unified-registry.d.ts +12 -4
- package/dist/mcp/unified-registry.js +17 -4
- package/dist/neurolink.d.ts +26 -0
- package/dist/neurolink.js +43 -1
- package/dist/providers/agent-enhanced-provider.d.ts +11 -2
- package/dist/providers/agent-enhanced-provider.js +86 -15
- package/dist/providers/amazonBedrock.d.ts +9 -1
- package/dist/providers/amazonBedrock.js +26 -2
- package/dist/providers/analytics-helper.d.ts +53 -0
- package/dist/providers/analytics-helper.js +151 -0
- package/dist/providers/anthropic.d.ts +11 -1
- package/dist/providers/anthropic.js +29 -4
- package/dist/providers/azureOpenAI.d.ts +3 -1
- package/dist/providers/azureOpenAI.js +29 -4
- package/dist/providers/function-calling-provider.d.ts +9 -1
- package/dist/providers/function-calling-provider.js +14 -1
- package/dist/providers/googleAIStudio.d.ts +15 -1
- package/dist/providers/googleAIStudio.js +32 -2
- package/dist/providers/googleVertexAI.d.ts +9 -1
- package/dist/providers/googleVertexAI.js +31 -2
- package/dist/providers/huggingFace.d.ts +3 -1
- package/dist/providers/huggingFace.js +26 -3
- package/dist/providers/mcp-provider.d.ts +9 -1
- package/dist/providers/mcp-provider.js +12 -0
- package/dist/providers/mistralAI.d.ts +3 -1
- package/dist/providers/mistralAI.js +25 -2
- package/dist/providers/ollama.d.ts +3 -1
- package/dist/providers/ollama.js +27 -4
- package/dist/providers/openAI.d.ts +15 -1
- package/dist/providers/openAI.js +33 -2
- package/dist/proxy/proxy-fetch.js +8 -7
- package/dist/services/streaming/streaming-manager.d.ts +29 -0
- package/dist/services/streaming/streaming-manager.js +244 -0
- package/dist/services/types.d.ts +155 -0
- package/dist/services/types.js +2 -0
- package/dist/services/websocket/websocket-server.d.ts +34 -0
- package/dist/services/websocket/websocket-server.js +304 -0
- package/dist/telemetry/index.d.ts +15 -0
- package/dist/telemetry/index.js +22 -0
- package/dist/telemetry/telemetry-service.d.ts +47 -0
- package/dist/telemetry/telemetry-service.js +261 -0
- package/dist/utils/streaming-utils.d.ts +67 -0
- package/dist/utils/streaming-utils.js +201 -0
- package/package.json +18 -2
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
import { NodeSDK } from "@opentelemetry/sdk-node";
|
|
2
|
+
import { metrics, trace } from "@opentelemetry/api";
|
|
3
|
+
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
|
|
4
|
+
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http";
|
|
5
|
+
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-http";
|
|
6
|
+
import { Resource } from "@opentelemetry/resources";
|
|
7
|
+
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION, } from "@opentelemetry/semantic-conventions";
|
|
8
|
+
import { PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
|
9
|
+
export class TelemetryService {
|
|
10
|
+
static instance;
|
|
11
|
+
sdk;
|
|
12
|
+
enabled = false;
|
|
13
|
+
meter;
|
|
14
|
+
tracer;
|
|
15
|
+
// Optional Metrics (only created when enabled)
|
|
16
|
+
aiRequestCounter;
|
|
17
|
+
aiRequestDuration;
|
|
18
|
+
aiTokensUsed;
|
|
19
|
+
aiProviderErrors;
|
|
20
|
+
mcpToolCalls;
|
|
21
|
+
connectionCounter;
|
|
22
|
+
responseTimeHistogram;
|
|
23
|
+
constructor() {
|
|
24
|
+
// Check if telemetry is enabled
|
|
25
|
+
this.enabled = this.isTelemetryEnabled();
|
|
26
|
+
if (this.enabled) {
|
|
27
|
+
this.initializeTelemetry();
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
console.log("[Telemetry] Disabled - set NEUROLINK_TELEMETRY_ENABLED=true or configure OTEL_EXPORTER_OTLP_ENDPOINT to enable");
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
static getInstance() {
|
|
34
|
+
if (!TelemetryService.instance) {
|
|
35
|
+
TelemetryService.instance = new TelemetryService();
|
|
36
|
+
}
|
|
37
|
+
return TelemetryService.instance;
|
|
38
|
+
}
|
|
39
|
+
isTelemetryEnabled() {
|
|
40
|
+
return (process.env.NEUROLINK_TELEMETRY_ENABLED === "true" ||
|
|
41
|
+
process.env.OTEL_EXPORTER_OTLP_ENDPOINT !== undefined);
|
|
42
|
+
}
|
|
43
|
+
initializeTelemetry() {
|
|
44
|
+
try {
|
|
45
|
+
const resource = new Resource({
|
|
46
|
+
[ATTR_SERVICE_NAME]: process.env.OTEL_SERVICE_NAME || "neurolink-ai",
|
|
47
|
+
[ATTR_SERVICE_VERSION]: process.env.OTEL_SERVICE_VERSION || "3.0.1",
|
|
48
|
+
});
|
|
49
|
+
this.sdk = new NodeSDK({
|
|
50
|
+
resource,
|
|
51
|
+
traceExporter: new OTLPTraceExporter({
|
|
52
|
+
url: process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ||
|
|
53
|
+
`${process.env.OTEL_EXPORTER_OTLP_ENDPOINT}/v1/traces`,
|
|
54
|
+
}),
|
|
55
|
+
// Note: Metric reader configured separately
|
|
56
|
+
instrumentations: [getNodeAutoInstrumentations()],
|
|
57
|
+
});
|
|
58
|
+
this.meter = metrics.getMeter("neurolink-ai");
|
|
59
|
+
this.tracer = trace.getTracer("neurolink-ai");
|
|
60
|
+
this.initializeMetrics();
|
|
61
|
+
console.log("[Telemetry] Initialized with endpoint:", process.env.OTEL_EXPORTER_OTLP_ENDPOINT);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
console.error("[Telemetry] Failed to initialize:", error);
|
|
65
|
+
this.enabled = false;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
initializeMetrics() {
|
|
69
|
+
if (!this.enabled || !this.meter) {
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
this.aiRequestCounter = this.meter.createCounter("ai_requests_total", {
|
|
73
|
+
description: "Total number of AI requests",
|
|
74
|
+
});
|
|
75
|
+
this.aiRequestDuration = this.meter.createHistogram("ai_request_duration_ms", {
|
|
76
|
+
description: "AI request duration in milliseconds",
|
|
77
|
+
});
|
|
78
|
+
this.aiTokensUsed = this.meter.createCounter("ai_tokens_used_total", {
|
|
79
|
+
description: "Total number of AI tokens used",
|
|
80
|
+
});
|
|
81
|
+
this.aiProviderErrors = this.meter.createCounter("ai_provider_errors_total", {
|
|
82
|
+
description: "Total number of AI provider errors",
|
|
83
|
+
});
|
|
84
|
+
this.mcpToolCalls = this.meter.createCounter("mcp_tool_calls_total", {
|
|
85
|
+
description: "Total number of MCP tool calls",
|
|
86
|
+
});
|
|
87
|
+
this.connectionCounter = this.meter.createCounter("connections_total", {
|
|
88
|
+
description: "Total number of connections",
|
|
89
|
+
});
|
|
90
|
+
this.responseTimeHistogram = this.meter.createHistogram("response_time_ms", {
|
|
91
|
+
description: "Response time in milliseconds",
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
async initialize() {
|
|
95
|
+
if (!this.enabled) {
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
try {
|
|
99
|
+
await this.sdk?.start();
|
|
100
|
+
console.log("[Telemetry] SDK started successfully");
|
|
101
|
+
}
|
|
102
|
+
catch (error) {
|
|
103
|
+
console.error("[Telemetry] Failed to start SDK:", error);
|
|
104
|
+
this.enabled = false;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
// AI Operation Tracing (NO-OP when disabled)
|
|
108
|
+
async traceAIRequest(provider, operation) {
|
|
109
|
+
if (!this.enabled || !this.tracer) {
|
|
110
|
+
return await operation(); // Direct execution when disabled
|
|
111
|
+
}
|
|
112
|
+
const span = this.tracer.startSpan(`ai.${provider}.generate_text`, {
|
|
113
|
+
attributes: {
|
|
114
|
+
"ai.provider": provider,
|
|
115
|
+
"ai.operation": "generate_text",
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
try {
|
|
119
|
+
const result = await operation();
|
|
120
|
+
span.setStatus({ code: 1 }); // OK
|
|
121
|
+
return result;
|
|
122
|
+
}
|
|
123
|
+
catch (error) {
|
|
124
|
+
span.setStatus({
|
|
125
|
+
code: 2,
|
|
126
|
+
message: error instanceof Error ? error.message : "Unknown error",
|
|
127
|
+
}); // ERROR
|
|
128
|
+
span.recordException(error);
|
|
129
|
+
throw error;
|
|
130
|
+
}
|
|
131
|
+
finally {
|
|
132
|
+
span.end();
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
// Metrics Recording (NO-OP when disabled)
|
|
136
|
+
recordAIRequest(provider, model, tokens, duration) {
|
|
137
|
+
if (!this.enabled || !this.aiRequestCounter) {
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
const labels = { provider, model };
|
|
141
|
+
this.aiRequestCounter.add(1, labels);
|
|
142
|
+
this.aiRequestDuration?.record(duration, labels);
|
|
143
|
+
this.aiTokensUsed?.add(tokens, labels);
|
|
144
|
+
}
|
|
145
|
+
recordAIError(provider, error) {
|
|
146
|
+
if (!this.enabled || !this.aiProviderErrors) {
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
this.aiProviderErrors.add(1, {
|
|
150
|
+
provider,
|
|
151
|
+
error: error.name,
|
|
152
|
+
message: error.message.substring(0, 100), // Limit message length
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
recordMCPToolCall(toolName, duration, success) {
|
|
156
|
+
if (!this.enabled || !this.mcpToolCalls) {
|
|
157
|
+
return;
|
|
158
|
+
}
|
|
159
|
+
this.mcpToolCalls.add(1, {
|
|
160
|
+
tool: toolName,
|
|
161
|
+
success: success.toString(),
|
|
162
|
+
duration_bucket: this.getDurationBucket(duration),
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
recordConnection(type) {
|
|
166
|
+
if (!this.enabled || !this.connectionCounter) {
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
this.connectionCounter.add(1, { connection_type: type });
|
|
170
|
+
}
|
|
171
|
+
recordResponseTime(endpoint, method, duration) {
|
|
172
|
+
if (!this.enabled || !this.responseTimeHistogram) {
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
this.responseTimeHistogram.record(duration, {
|
|
176
|
+
endpoint,
|
|
177
|
+
method,
|
|
178
|
+
status_bucket: this.getStatusBucket(duration),
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
// Custom Metrics
|
|
182
|
+
recordCustomMetric(name, value, labels) {
|
|
183
|
+
if (!this.enabled || !this.meter) {
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
const counter = this.meter.createCounter(`custom_${name}`, {
|
|
187
|
+
description: `Custom metric: ${name}`,
|
|
188
|
+
});
|
|
189
|
+
counter.add(value, labels || {});
|
|
190
|
+
}
|
|
191
|
+
recordCustomHistogram(name, value, labels) {
|
|
192
|
+
if (!this.enabled || !this.meter) {
|
|
193
|
+
return;
|
|
194
|
+
}
|
|
195
|
+
const histogram = this.meter.createHistogram(`custom_${name}_histogram`, {
|
|
196
|
+
description: `Custom histogram: ${name}`,
|
|
197
|
+
});
|
|
198
|
+
histogram.record(value, labels || {});
|
|
199
|
+
}
|
|
200
|
+
// Health Checks
|
|
201
|
+
async getHealthMetrics() {
|
|
202
|
+
const memoryUsage = process.memoryUsage();
|
|
203
|
+
return {
|
|
204
|
+
timestamp: Date.now(),
|
|
205
|
+
memoryUsage,
|
|
206
|
+
uptime: process.uptime(),
|
|
207
|
+
activeConnections: 0, // Would need to be provided by calling code
|
|
208
|
+
errorRate: 0, // Would need to be calculated from metrics
|
|
209
|
+
averageResponseTime: 0, // Would need to be calculated from metrics
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
// Telemetry Status
|
|
213
|
+
isEnabled() {
|
|
214
|
+
return this.enabled;
|
|
215
|
+
}
|
|
216
|
+
getStatus() {
|
|
217
|
+
return {
|
|
218
|
+
enabled: this.enabled,
|
|
219
|
+
endpoint: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,
|
|
220
|
+
service: process.env.OTEL_SERVICE_NAME || "neurolink-ai",
|
|
221
|
+
version: process.env.OTEL_SERVICE_VERSION || "3.0.1",
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
// Helper methods
|
|
225
|
+
getDurationBucket(duration) {
|
|
226
|
+
if (duration < 100) {
|
|
227
|
+
return "fast";
|
|
228
|
+
}
|
|
229
|
+
if (duration < 500) {
|
|
230
|
+
return "medium";
|
|
231
|
+
}
|
|
232
|
+
if (duration < 1000) {
|
|
233
|
+
return "slow";
|
|
234
|
+
}
|
|
235
|
+
return "very_slow";
|
|
236
|
+
}
|
|
237
|
+
getStatusBucket(duration) {
|
|
238
|
+
if (duration < 200) {
|
|
239
|
+
return "excellent";
|
|
240
|
+
}
|
|
241
|
+
if (duration < 500) {
|
|
242
|
+
return "good";
|
|
243
|
+
}
|
|
244
|
+
if (duration < 1000) {
|
|
245
|
+
return "acceptable";
|
|
246
|
+
}
|
|
247
|
+
return "poor";
|
|
248
|
+
}
|
|
249
|
+
// Cleanup
|
|
250
|
+
async shutdown() {
|
|
251
|
+
if (this.enabled && this.sdk) {
|
|
252
|
+
try {
|
|
253
|
+
await this.sdk.shutdown();
|
|
254
|
+
console.log("[Telemetry] SDK shutdown completed");
|
|
255
|
+
}
|
|
256
|
+
catch (error) {
|
|
257
|
+
console.error("[Telemetry] Error during shutdown:", error);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Phase 2: Enhanced Streaming Infrastructure
|
|
3
|
+
* Streaming utilities for progress tracking and metadata enhancement
|
|
4
|
+
*/
|
|
5
|
+
import type { StreamingProgressData, StreamingMetadata, ProgressCallback, EnhancedStreamTextOptions } from "../core/types.js";
|
|
6
|
+
export interface UIProgressHandler {
|
|
7
|
+
onProgress: (progress: StreamingProgressData) => void;
|
|
8
|
+
onComplete: (metadata: StreamingMetadata) => void;
|
|
9
|
+
onError: (error: Error) => void;
|
|
10
|
+
}
|
|
11
|
+
export interface StreamingStats {
|
|
12
|
+
totalChunks: number;
|
|
13
|
+
totalBytes: number;
|
|
14
|
+
duration: number;
|
|
15
|
+
averageChunkSize: number;
|
|
16
|
+
provider: string;
|
|
17
|
+
model: string;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Enhanced streaming utilities for progress tracking and metadata
|
|
21
|
+
*/
|
|
22
|
+
export declare class StreamingEnhancer {
|
|
23
|
+
/**
|
|
24
|
+
* Add progress tracking to a readable stream
|
|
25
|
+
*/
|
|
26
|
+
static addProgressTracking(stream: ReadableStream, callback?: ProgressCallback, options?: {
|
|
27
|
+
streamId?: string;
|
|
28
|
+
bufferSize?: number;
|
|
29
|
+
}): ReadableStream;
|
|
30
|
+
/**
|
|
31
|
+
* Add metadata headers to streaming response
|
|
32
|
+
*/
|
|
33
|
+
static addMetadataHeaders(response: Response, stats: StreamingStats): Response;
|
|
34
|
+
/**
|
|
35
|
+
* Create progress callback for UI integration
|
|
36
|
+
*/
|
|
37
|
+
static createProgressCallback(ui: UIProgressHandler): ProgressCallback;
|
|
38
|
+
/**
|
|
39
|
+
* Estimate remaining time based on current progress
|
|
40
|
+
*/
|
|
41
|
+
static estimateRemainingTime(totalBytes: number, elapsedTime: number, chunkCount: number): number | undefined;
|
|
42
|
+
/**
|
|
43
|
+
* Create enhanced streaming configuration
|
|
44
|
+
*/
|
|
45
|
+
static createStreamingConfig(options: EnhancedStreamTextOptions): {
|
|
46
|
+
progressTracking: boolean;
|
|
47
|
+
callback?: ProgressCallback;
|
|
48
|
+
metadata: boolean;
|
|
49
|
+
bufferSize: number;
|
|
50
|
+
headers: boolean;
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Streaming performance monitor for debugging and optimization
|
|
55
|
+
*/
|
|
56
|
+
export declare class StreamingMonitor {
|
|
57
|
+
private static activeStreams;
|
|
58
|
+
static registerStream(streamId: string): void;
|
|
59
|
+
static updateStream(streamId: string, progress: StreamingProgressData): void;
|
|
60
|
+
static completeStream(streamId: string): void;
|
|
61
|
+
static getActiveStreams(): StreamingProgressData[];
|
|
62
|
+
static getStreamStats(): {
|
|
63
|
+
activeCount: number;
|
|
64
|
+
totalBytesActive: number;
|
|
65
|
+
averageProgress: number;
|
|
66
|
+
};
|
|
67
|
+
}
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Phase 2: Enhanced Streaming Infrastructure
|
|
3
|
+
* Streaming utilities for progress tracking and metadata enhancement
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Enhanced streaming utilities for progress tracking and metadata
|
|
7
|
+
*/
|
|
8
|
+
export class StreamingEnhancer {
|
|
9
|
+
/**
|
|
10
|
+
* Add progress tracking to a readable stream
|
|
11
|
+
*/
|
|
12
|
+
static addProgressTracking(stream, callback, options) {
|
|
13
|
+
const streamId = options?.streamId || `stream_${Date.now()}`;
|
|
14
|
+
const startTime = Date.now();
|
|
15
|
+
let chunkCount = 0;
|
|
16
|
+
let totalBytes = 0;
|
|
17
|
+
let lastProgressTime = startTime;
|
|
18
|
+
return new ReadableStream({
|
|
19
|
+
start(controller) {
|
|
20
|
+
if (callback) {
|
|
21
|
+
callback({
|
|
22
|
+
chunkCount: 0,
|
|
23
|
+
totalBytes: 0,
|
|
24
|
+
chunkSize: 0,
|
|
25
|
+
elapsedTime: 0,
|
|
26
|
+
streamId,
|
|
27
|
+
phase: "initializing",
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
},
|
|
31
|
+
async pull(controller) {
|
|
32
|
+
const reader = stream.getReader();
|
|
33
|
+
try {
|
|
34
|
+
const { done, value } = await reader.read();
|
|
35
|
+
if (done) {
|
|
36
|
+
controller.close();
|
|
37
|
+
if (callback) {
|
|
38
|
+
const elapsedTime = Date.now() - startTime;
|
|
39
|
+
callback({
|
|
40
|
+
chunkCount,
|
|
41
|
+
totalBytes,
|
|
42
|
+
chunkSize: totalBytes > 0 ? Math.round(totalBytes / chunkCount) : 0,
|
|
43
|
+
elapsedTime,
|
|
44
|
+
streamId,
|
|
45
|
+
phase: "complete",
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
// Track progress
|
|
51
|
+
chunkCount++;
|
|
52
|
+
const chunkSize = value
|
|
53
|
+
? new TextEncoder().encode(value.toString()).length
|
|
54
|
+
: 0;
|
|
55
|
+
totalBytes += chunkSize;
|
|
56
|
+
const currentTime = Date.now();
|
|
57
|
+
const elapsedTime = currentTime - startTime;
|
|
58
|
+
const timeSinceLastProgress = currentTime - lastProgressTime;
|
|
59
|
+
// Call progress callback
|
|
60
|
+
if (callback && (timeSinceLastProgress > 100 || chunkCount === 1)) {
|
|
61
|
+
// Throttle to max 10 calls/second
|
|
62
|
+
const estimatedRemaining = StreamingEnhancer.estimateRemainingTime(totalBytes, elapsedTime, chunkCount);
|
|
63
|
+
callback({
|
|
64
|
+
chunkCount,
|
|
65
|
+
totalBytes,
|
|
66
|
+
chunkSize,
|
|
67
|
+
elapsedTime,
|
|
68
|
+
estimatedRemaining,
|
|
69
|
+
streamId,
|
|
70
|
+
phase: "streaming",
|
|
71
|
+
});
|
|
72
|
+
lastProgressTime = currentTime;
|
|
73
|
+
}
|
|
74
|
+
controller.enqueue(value);
|
|
75
|
+
}
|
|
76
|
+
catch (error) {
|
|
77
|
+
controller.error(error);
|
|
78
|
+
if (callback) {
|
|
79
|
+
callback({
|
|
80
|
+
chunkCount,
|
|
81
|
+
totalBytes,
|
|
82
|
+
chunkSize: 0,
|
|
83
|
+
elapsedTime: Date.now() - startTime,
|
|
84
|
+
streamId,
|
|
85
|
+
phase: "error",
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
finally {
|
|
90
|
+
reader.releaseLock();
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Add metadata headers to streaming response
|
|
97
|
+
*/
|
|
98
|
+
static addMetadataHeaders(response, stats) {
|
|
99
|
+
const headers = new Headers(response.headers);
|
|
100
|
+
headers.set("X-Streaming-Chunks", stats.totalChunks.toString());
|
|
101
|
+
headers.set("X-Streaming-Bytes", stats.totalBytes.toString());
|
|
102
|
+
headers.set("X-Streaming-Duration", stats.duration.toString());
|
|
103
|
+
headers.set("X-Streaming-Avg-Chunk-Size", stats.averageChunkSize.toString());
|
|
104
|
+
headers.set("X-Streaming-Provider", stats.provider);
|
|
105
|
+
headers.set("X-Streaming-Model", stats.model);
|
|
106
|
+
headers.set("X-Streaming-Throughput", Math.round(stats.totalBytes / (stats.duration / 1000)).toString());
|
|
107
|
+
return new Response(response.body, {
|
|
108
|
+
status: response.status,
|
|
109
|
+
statusText: response.statusText,
|
|
110
|
+
headers,
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Create progress callback for UI integration
|
|
115
|
+
*/
|
|
116
|
+
static createProgressCallback(ui) {
|
|
117
|
+
return (progress) => {
|
|
118
|
+
try {
|
|
119
|
+
ui.onProgress(progress);
|
|
120
|
+
if (progress.phase === "complete") {
|
|
121
|
+
ui.onComplete({
|
|
122
|
+
startTime: Date.now() - progress.elapsedTime,
|
|
123
|
+
endTime: Date.now(),
|
|
124
|
+
totalDuration: progress.elapsedTime,
|
|
125
|
+
averageChunkSize: Math.round(progress.totalBytes / progress.chunkCount),
|
|
126
|
+
maxChunkSize: progress.chunkSize, // This would need to be tracked better in real implementation
|
|
127
|
+
minChunkSize: progress.chunkSize, // This would need to be tracked better in real implementation
|
|
128
|
+
throughputBytesPerSecond: Math.round(progress.totalBytes / (progress.elapsedTime / 1000)),
|
|
129
|
+
streamingProvider: "unknown", // Would be passed from provider
|
|
130
|
+
modelUsed: "unknown", // Would be passed from provider
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
ui.onError(error);
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Estimate remaining time based on current progress
|
|
141
|
+
*/
|
|
142
|
+
static estimateRemainingTime(totalBytes, elapsedTime, chunkCount) {
|
|
143
|
+
if (elapsedTime < 1000 || chunkCount < 3) {
|
|
144
|
+
return undefined; // Not enough data for estimation
|
|
145
|
+
}
|
|
146
|
+
const bytesPerMs = totalBytes / elapsedTime;
|
|
147
|
+
const avgChunkSize = totalBytes / chunkCount;
|
|
148
|
+
// Rough estimation assuming similar chunk sizes going forward
|
|
149
|
+
// This is a simple heuristic - real implementation might be more sophisticated
|
|
150
|
+
const estimatedTotalBytes = avgChunkSize * (chunkCount + 10); // Assume 10 more chunks
|
|
151
|
+
const remainingBytes = estimatedTotalBytes - totalBytes;
|
|
152
|
+
return Math.max(0, remainingBytes / bytesPerMs);
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Create enhanced streaming configuration
|
|
156
|
+
*/
|
|
157
|
+
static createStreamingConfig(options) {
|
|
158
|
+
return {
|
|
159
|
+
progressTracking: options.enableProgressTracking ?? false,
|
|
160
|
+
callback: options.progressCallback,
|
|
161
|
+
metadata: options.includeStreamingMetadata ?? false,
|
|
162
|
+
bufferSize: options.streamingBufferSize ?? 8192,
|
|
163
|
+
headers: options.enableStreamingHeaders ?? false,
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Streaming performance monitor for debugging and optimization
|
|
169
|
+
*/
|
|
170
|
+
export class StreamingMonitor {
|
|
171
|
+
static activeStreams = new Map();
|
|
172
|
+
static registerStream(streamId) {
|
|
173
|
+
this.activeStreams.set(streamId, {
|
|
174
|
+
chunkCount: 0,
|
|
175
|
+
totalBytes: 0,
|
|
176
|
+
chunkSize: 0,
|
|
177
|
+
elapsedTime: 0,
|
|
178
|
+
streamId,
|
|
179
|
+
phase: "initializing",
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
static updateStream(streamId, progress) {
|
|
183
|
+
this.activeStreams.set(streamId, progress);
|
|
184
|
+
}
|
|
185
|
+
static completeStream(streamId) {
|
|
186
|
+
this.activeStreams.delete(streamId);
|
|
187
|
+
}
|
|
188
|
+
static getActiveStreams() {
|
|
189
|
+
return Array.from(this.activeStreams.values());
|
|
190
|
+
}
|
|
191
|
+
static getStreamStats() {
|
|
192
|
+
const streams = this.getActiveStreams();
|
|
193
|
+
return {
|
|
194
|
+
activeCount: streams.length,
|
|
195
|
+
totalBytesActive: streams.reduce((sum, s) => sum + s.totalBytes, 0),
|
|
196
|
+
averageProgress: streams.length > 0
|
|
197
|
+
? streams.reduce((sum, s) => sum + s.elapsedTime, 0) / streams.length
|
|
198
|
+
: 0,
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "4.0.0",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|
|
@@ -132,16 +132,32 @@
|
|
|
132
132
|
"@google/generative-ai": "^0.24.1",
|
|
133
133
|
"@huggingface/inference": "^2.8.0",
|
|
134
134
|
"@modelcontextprotocol/sdk": "^1.13.0",
|
|
135
|
+
"@opentelemetry/api": "^1.9.0",
|
|
136
|
+
"@opentelemetry/auto-instrumentations-node": "^0.52.1",
|
|
137
|
+
"@opentelemetry/exporter-logs-otlp-http": "^0.54.2",
|
|
138
|
+
"@opentelemetry/exporter-metrics-otlp-http": "^0.54.2",
|
|
139
|
+
"@opentelemetry/exporter-trace-otlp-http": "^0.54.2",
|
|
140
|
+
"@opentelemetry/instrumentation-fetch": "^0.54.2",
|
|
141
|
+
"@opentelemetry/instrumentation-http": "^0.54.2",
|
|
142
|
+
"@opentelemetry/propagator-b3": "^1.30.1",
|
|
143
|
+
"@opentelemetry/resource-detector-aws": "^1.12.0",
|
|
144
|
+
"@opentelemetry/resources": "^1.30.1",
|
|
145
|
+
"@opentelemetry/sdk-logs": "^0.54.2",
|
|
146
|
+
"@opentelemetry/sdk-metrics": "^1.30.1",
|
|
147
|
+
"@opentelemetry/sdk-node": "^0.54.2",
|
|
148
|
+
"@opentelemetry/semantic-conventions": "^1.34.0",
|
|
149
|
+
"@types/ws": "^8.18.1",
|
|
135
150
|
"ai": "^4.0.0",
|
|
136
151
|
"chalk": "^5.3.0",
|
|
137
152
|
"cors": "^2.8.5",
|
|
138
153
|
"dotenv": "^16.5.0",
|
|
139
154
|
"express": "^5.1.0",
|
|
140
155
|
"inquirer": "^9.2.15",
|
|
141
|
-
"undici": "^6.6.2",
|
|
142
156
|
"ora": "^7.0.1",
|
|
143
157
|
"playwright": "^1.52.0",
|
|
158
|
+
"undici": "^6.6.2",
|
|
144
159
|
"uuid": "^11.1.0",
|
|
160
|
+
"ws": "^8.18.3",
|
|
145
161
|
"yargs": "^17.7.2",
|
|
146
162
|
"zod": "^3.22.0",
|
|
147
163
|
"zod-to-json-schema": "^3.24.5"
|