@juspay/neurolink 9.55.0 → 9.55.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/dist/browser/neurolink.min.js +279 -277
- package/dist/cli/commands/proxy.js +1 -1
- package/dist/lib/neurolink.js +10 -2
- package/dist/lib/services/server/ai/observability/instrumentation.d.ts +2 -3
- package/dist/lib/services/server/ai/observability/instrumentation.js +22 -18
- package/dist/lib/tasks/backends/bullmqBackend.js +15 -3
- package/dist/neurolink.js +10 -2
- package/dist/services/server/ai/observability/instrumentation.d.ts +2 -3
- package/dist/services/server/ai/observability/instrumentation.js +22 -18
- package/dist/tasks/backends/bullmqBackend.js +15 -3
- package/package.json +5 -5
- package/dist/lib/utils/imageCompressor.d.ts +0 -25
- package/dist/lib/utils/imageCompressor.js +0 -141
- package/dist/utils/imageCompressor.d.ts +0 -25
- package/dist/utils/imageCompressor.js +0 -140
|
@@ -776,7 +776,7 @@ async function initializeProxyOpenTelemetry() {
|
|
|
776
776
|
const observabilityConfig = buildObservabilityConfigFromEnv();
|
|
777
777
|
const langfuseConfig = observabilityConfig?.langfuse;
|
|
778
778
|
const langfuseEnabled = langfuseConfig?.enabled === true;
|
|
779
|
-
initializeOpenTelemetry({
|
|
779
|
+
await initializeOpenTelemetry({
|
|
780
780
|
enabled: langfuseEnabled,
|
|
781
781
|
publicKey: langfuseConfig?.publicKey || "",
|
|
782
782
|
secretKey: langfuseConfig?.secretKey || "",
|
package/dist/lib/neurolink.js
CHANGED
|
@@ -1472,7 +1472,11 @@ Current user's request: ${currentInput}`;
|
|
|
1472
1472
|
message: "Starting Langfuse observability initialization",
|
|
1473
1473
|
});
|
|
1474
1474
|
// Initialize OpenTelemetry (sets defaults from config)
|
|
1475
|
-
initializeOpenTelemetry(langfuseConfig)
|
|
1475
|
+
void initializeOpenTelemetry(langfuseConfig).catch((err) => {
|
|
1476
|
+
logger.error("[NeuroLink] OpenTelemetry initialization failed", {
|
|
1477
|
+
error: err instanceof Error ? err.message : String(err),
|
|
1478
|
+
});
|
|
1479
|
+
});
|
|
1476
1480
|
const healthStatus = getLangfuseHealthStatus();
|
|
1477
1481
|
const langfuseInitDurationNs = process.hrtime.bigint() - langfuseInitStartTime;
|
|
1478
1482
|
if (healthStatus.initialized &&
|
|
@@ -2234,7 +2238,11 @@ Current user's request: ${currentInput}`;
|
|
|
2234
2238
|
try {
|
|
2235
2239
|
const langfuseConfig = this.observabilityConfig?.langfuse;
|
|
2236
2240
|
if (langfuseConfig?.enabled) {
|
|
2237
|
-
initializeOpenTelemetry(langfuseConfig)
|
|
2241
|
+
void initializeOpenTelemetry(langfuseConfig).catch((err) => {
|
|
2242
|
+
logger.error("[NeuroLink] OpenTelemetry initialization failed", {
|
|
2243
|
+
error: err instanceof Error ? err.message : String(err),
|
|
2244
|
+
});
|
|
2245
|
+
});
|
|
2238
2246
|
logger.debug("[NeuroLink] Langfuse observability initialized via public method");
|
|
2239
2247
|
}
|
|
2240
2248
|
else {
|
|
@@ -6,7 +6,6 @@
|
|
|
6
6
|
*
|
|
7
7
|
* Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
|
|
8
8
|
*/
|
|
9
|
-
import { LangfuseSpanProcessor } from "@langfuse/otel";
|
|
10
9
|
import { trace } from "@opentelemetry/api";
|
|
11
10
|
import { LoggerProvider } from "@opentelemetry/sdk-logs";
|
|
12
11
|
import { type SpanProcessor } from "@opentelemetry/sdk-trace-base";
|
|
@@ -26,7 +25,7 @@ import type { LangfuseConfig, LangfuseContext } from "../../../../types/index.js
|
|
|
26
25
|
*
|
|
27
26
|
* @param config - Langfuse configuration passed from parent application
|
|
28
27
|
*/
|
|
29
|
-
export declare function initializeOpenTelemetry(config: LangfuseConfig): void
|
|
28
|
+
export declare function initializeOpenTelemetry(config: LangfuseConfig): Promise<void>;
|
|
30
29
|
/**
|
|
31
30
|
* Flush all pending spans to Langfuse
|
|
32
31
|
*/
|
|
@@ -38,7 +37,7 @@ export declare function shutdownOpenTelemetry(): Promise<void>;
|
|
|
38
37
|
/**
|
|
39
38
|
* Get the Langfuse span processor
|
|
40
39
|
*/
|
|
41
|
-
export declare function getLangfuseSpanProcessor():
|
|
40
|
+
export declare function getLangfuseSpanProcessor(): SpanProcessor | null;
|
|
42
41
|
/**
|
|
43
42
|
* Get the tracer provider
|
|
44
43
|
*/
|
|
@@ -6,7 +6,6 @@
|
|
|
6
6
|
*
|
|
7
7
|
* Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
|
|
8
8
|
*/
|
|
9
|
-
import { LangfuseSpanProcessor } from "@langfuse/otel";
|
|
10
9
|
import { metrics, SpanStatusCode, trace } from "@opentelemetry/api";
|
|
11
10
|
import { W3CTraceContextPropagator } from "@opentelemetry/core";
|
|
12
11
|
import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-http";
|
|
@@ -503,8 +502,19 @@ class ContextEnricher {
|
|
|
503
502
|
return Promise.resolve();
|
|
504
503
|
}
|
|
505
504
|
}
|
|
506
|
-
function createLangfuseProcessor(config) {
|
|
507
|
-
|
|
505
|
+
async function createLangfuseProcessor(config) {
|
|
506
|
+
let mod;
|
|
507
|
+
try {
|
|
508
|
+
mod = await import(/* @vite-ignore */ "@langfuse/otel");
|
|
509
|
+
}
|
|
510
|
+
catch (err) {
|
|
511
|
+
const e = err instanceof Error ? err : null;
|
|
512
|
+
if (e?.code === "ERR_MODULE_NOT_FOUND" && e.message.includes("langfuse")) {
|
|
513
|
+
throw new Error('Langfuse observability requires "@langfuse/otel". Install it with:\n pnpm add @langfuse/otel', { cause: err });
|
|
514
|
+
}
|
|
515
|
+
throw err;
|
|
516
|
+
}
|
|
517
|
+
return new mod.LangfuseSpanProcessor({
|
|
508
518
|
publicKey: config.publicKey,
|
|
509
519
|
secretKey: config.secretKey,
|
|
510
520
|
baseUrl: config.baseUrl || "https://cloud.langfuse.com",
|
|
@@ -513,7 +523,7 @@ function createLangfuseProcessor(config) {
|
|
|
513
523
|
shouldExportSpan: () => true,
|
|
514
524
|
});
|
|
515
525
|
}
|
|
516
|
-
function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
526
|
+
async function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
517
527
|
if (langfuseRequested && !hasLangfuseCreds) {
|
|
518
528
|
if (!otlpEndpoint) {
|
|
519
529
|
logger.warn(`${LOG_PREFIX} External provider mode requested Langfuse but credentials are missing, and no OTLP endpoint is configured; skipping initialization`, {
|
|
@@ -535,7 +545,7 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
535
545
|
isCredentialsValid = hasLangfuseCreds;
|
|
536
546
|
langfuseProcessor =
|
|
537
547
|
langfuseRequested && hasLangfuseCreds
|
|
538
|
-
? createLangfuseProcessor(config)
|
|
548
|
+
? await createLangfuseProcessor(config)
|
|
539
549
|
: null;
|
|
540
550
|
usingExternalProvider = true;
|
|
541
551
|
isInitialized = true;
|
|
@@ -548,11 +558,9 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
548
558
|
// Auto-detect: skip if consumer already registered a LangfuseSpanProcessor.
|
|
549
559
|
//
|
|
550
560
|
// Detection strategy (ordered by robustness):
|
|
551
|
-
// 1.
|
|
552
|
-
// the same @langfuse/otel package instance (same module identity).
|
|
553
|
-
// 2. Duck-type check for Langfuse-specific public member
|
|
561
|
+
// 1. Duck-type check for Langfuse-specific public member
|
|
554
562
|
// (`langfuseClient` property) — survives minification.
|
|
555
|
-
//
|
|
563
|
+
// 2. `constructor.name === "LangfuseSpanProcessor"` — last resort,
|
|
556
564
|
// brittle under minification or bundler renaming.
|
|
557
565
|
//
|
|
558
566
|
// NOTE: `_registeredSpanProcessors` is an internal OpenTelemetry field.
|
|
@@ -566,10 +574,6 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
566
574
|
if (p === null || p === undefined || typeof p !== "object") {
|
|
567
575
|
return false;
|
|
568
576
|
}
|
|
569
|
-
// Prefer instanceof — works when same @langfuse/otel package is shared
|
|
570
|
-
if (p instanceof LangfuseSpanProcessor) {
|
|
571
|
-
return true;
|
|
572
|
-
}
|
|
573
577
|
// Duck-type: Langfuse processor exposes a langfuseClient property
|
|
574
578
|
if ("langfuseClient" in p) {
|
|
575
579
|
return true;
|
|
@@ -626,7 +630,7 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
626
630
|
isInitialized = true;
|
|
627
631
|
}
|
|
628
632
|
}
|
|
629
|
-
function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
633
|
+
async function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
630
634
|
if ((!langfuseRequested || !hasLangfuseCreds) && !otlpEndpoint) {
|
|
631
635
|
if (langfuseRequested && !hasLangfuseCreds) {
|
|
632
636
|
logger.warn(`${LOG_PREFIX} Langfuse requested but credentials are missing, and no OTLP endpoint is configured; skipping initialization`, {
|
|
@@ -652,7 +656,7 @@ function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, s
|
|
|
652
656
|
isCredentialsValid = hasLangfuseCreds;
|
|
653
657
|
langfuseProcessor =
|
|
654
658
|
langfuseRequested && hasLangfuseCreds
|
|
655
|
-
? createLangfuseProcessor(config)
|
|
659
|
+
? await createLangfuseProcessor(config)
|
|
656
660
|
: null;
|
|
657
661
|
logger.debug(`${LOG_PREFIX} Standalone observability mode`, {
|
|
658
662
|
langfuseEnabled: !!langfuseProcessor,
|
|
@@ -741,7 +745,7 @@ function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, s
|
|
|
741
745
|
*
|
|
742
746
|
* @param config - Langfuse configuration passed from parent application
|
|
743
747
|
*/
|
|
744
|
-
export function initializeOpenTelemetry(config) {
|
|
748
|
+
export async function initializeOpenTelemetry(config) {
|
|
745
749
|
// Guard against multiple initializations — but always update config
|
|
746
750
|
// so that later NeuroLink instances can change traceNameFormat,
|
|
747
751
|
// autoDetectOperationName, and other configuration preferences
|
|
@@ -771,10 +775,10 @@ export function initializeOpenTelemetry(config) {
|
|
|
771
775
|
const serviceName = process.env.OTEL_SERVICE_NAME || "neurolink";
|
|
772
776
|
const resource = createOtelResource(config, serviceName);
|
|
773
777
|
if (shouldUseExternal) {
|
|
774
|
-
initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
778
|
+
await initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
775
779
|
return;
|
|
776
780
|
}
|
|
777
|
-
initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
781
|
+
await initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
778
782
|
}
|
|
779
783
|
/**
|
|
780
784
|
* Flush all pending spans to Langfuse
|
|
@@ -6,10 +6,21 @@
|
|
|
6
6
|
* - One-shot tasks → BullMQ delayed jobs
|
|
7
7
|
* - Survives process restarts (Redis-persisted)
|
|
8
8
|
*/
|
|
9
|
-
import { Queue, Worker } from "bullmq";
|
|
10
9
|
import { logger } from "../../utils/logger.js";
|
|
11
10
|
import { TaskError } from "../errors.js";
|
|
12
11
|
import { TASK_DEFAULTS, } from "../../types/index.js";
|
|
12
|
+
async function loadBullMQ() {
|
|
13
|
+
try {
|
|
14
|
+
return await import(/* @vite-ignore */ "bullmq");
|
|
15
|
+
}
|
|
16
|
+
catch (err) {
|
|
17
|
+
const e = err instanceof Error ? err : null;
|
|
18
|
+
if (e?.code === "ERR_MODULE_NOT_FOUND" && e.message.includes("bullmq")) {
|
|
19
|
+
throw new Error('BullMQ task backend requires the "bullmq" package. Install it with:\n pnpm add bullmq', { cause: err });
|
|
20
|
+
}
|
|
21
|
+
throw err;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
13
24
|
const QUEUE_NAME = "neurolink-tasks";
|
|
14
25
|
export class BullMQBackend {
|
|
15
26
|
name = "bullmq";
|
|
@@ -21,9 +32,10 @@ export class BullMQBackend {
|
|
|
21
32
|
this.config = config;
|
|
22
33
|
}
|
|
23
34
|
async initialize() {
|
|
35
|
+
const { Queue: BullQueue, Worker: BullWorker } = await loadBullMQ();
|
|
24
36
|
const connection = this.getConnectionConfig();
|
|
25
|
-
this.queue = new
|
|
26
|
-
this.worker = new
|
|
37
|
+
this.queue = new BullQueue(QUEUE_NAME, { connection });
|
|
38
|
+
this.worker = new BullWorker(QUEUE_NAME, async (job) => {
|
|
27
39
|
const taskId = job.data.taskId;
|
|
28
40
|
const task = job.data.task;
|
|
29
41
|
const executor = this.executors.get(taskId);
|
package/dist/neurolink.js
CHANGED
|
@@ -1472,7 +1472,11 @@ Current user's request: ${currentInput}`;
|
|
|
1472
1472
|
message: "Starting Langfuse observability initialization",
|
|
1473
1473
|
});
|
|
1474
1474
|
// Initialize OpenTelemetry (sets defaults from config)
|
|
1475
|
-
initializeOpenTelemetry(langfuseConfig)
|
|
1475
|
+
void initializeOpenTelemetry(langfuseConfig).catch((err) => {
|
|
1476
|
+
logger.error("[NeuroLink] OpenTelemetry initialization failed", {
|
|
1477
|
+
error: err instanceof Error ? err.message : String(err),
|
|
1478
|
+
});
|
|
1479
|
+
});
|
|
1476
1480
|
const healthStatus = getLangfuseHealthStatus();
|
|
1477
1481
|
const langfuseInitDurationNs = process.hrtime.bigint() - langfuseInitStartTime;
|
|
1478
1482
|
if (healthStatus.initialized &&
|
|
@@ -2234,7 +2238,11 @@ Current user's request: ${currentInput}`;
|
|
|
2234
2238
|
try {
|
|
2235
2239
|
const langfuseConfig = this.observabilityConfig?.langfuse;
|
|
2236
2240
|
if (langfuseConfig?.enabled) {
|
|
2237
|
-
initializeOpenTelemetry(langfuseConfig)
|
|
2241
|
+
void initializeOpenTelemetry(langfuseConfig).catch((err) => {
|
|
2242
|
+
logger.error("[NeuroLink] OpenTelemetry initialization failed", {
|
|
2243
|
+
error: err instanceof Error ? err.message : String(err),
|
|
2244
|
+
});
|
|
2245
|
+
});
|
|
2238
2246
|
logger.debug("[NeuroLink] Langfuse observability initialized via public method");
|
|
2239
2247
|
}
|
|
2240
2248
|
else {
|
|
@@ -6,7 +6,6 @@
|
|
|
6
6
|
*
|
|
7
7
|
* Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
|
|
8
8
|
*/
|
|
9
|
-
import { LangfuseSpanProcessor } from "@langfuse/otel";
|
|
10
9
|
import { trace } from "@opentelemetry/api";
|
|
11
10
|
import { LoggerProvider } from "@opentelemetry/sdk-logs";
|
|
12
11
|
import { type SpanProcessor } from "@opentelemetry/sdk-trace-base";
|
|
@@ -26,7 +25,7 @@ import type { LangfuseConfig, LangfuseContext } from "../../../../types/index.js
|
|
|
26
25
|
*
|
|
27
26
|
* @param config - Langfuse configuration passed from parent application
|
|
28
27
|
*/
|
|
29
|
-
export declare function initializeOpenTelemetry(config: LangfuseConfig): void
|
|
28
|
+
export declare function initializeOpenTelemetry(config: LangfuseConfig): Promise<void>;
|
|
30
29
|
/**
|
|
31
30
|
* Flush all pending spans to Langfuse
|
|
32
31
|
*/
|
|
@@ -38,7 +37,7 @@ export declare function shutdownOpenTelemetry(): Promise<void>;
|
|
|
38
37
|
/**
|
|
39
38
|
* Get the Langfuse span processor
|
|
40
39
|
*/
|
|
41
|
-
export declare function getLangfuseSpanProcessor():
|
|
40
|
+
export declare function getLangfuseSpanProcessor(): SpanProcessor | null;
|
|
42
41
|
/**
|
|
43
42
|
* Get the tracer provider
|
|
44
43
|
*/
|
|
@@ -6,7 +6,6 @@
|
|
|
6
6
|
*
|
|
7
7
|
* Flow: Vercel AI SDK → OpenTelemetry Spans → LangfuseSpanProcessor → Langfuse Platform
|
|
8
8
|
*/
|
|
9
|
-
import { LangfuseSpanProcessor } from "@langfuse/otel";
|
|
10
9
|
import { metrics, SpanStatusCode, trace } from "@opentelemetry/api";
|
|
11
10
|
import { W3CTraceContextPropagator } from "@opentelemetry/core";
|
|
12
11
|
import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-http";
|
|
@@ -503,8 +502,19 @@ class ContextEnricher {
|
|
|
503
502
|
return Promise.resolve();
|
|
504
503
|
}
|
|
505
504
|
}
|
|
506
|
-
function createLangfuseProcessor(config) {
|
|
507
|
-
|
|
505
|
+
async function createLangfuseProcessor(config) {
|
|
506
|
+
let mod;
|
|
507
|
+
try {
|
|
508
|
+
mod = await import(/* @vite-ignore */ "@langfuse/otel");
|
|
509
|
+
}
|
|
510
|
+
catch (err) {
|
|
511
|
+
const e = err instanceof Error ? err : null;
|
|
512
|
+
if (e?.code === "ERR_MODULE_NOT_FOUND" && e.message.includes("langfuse")) {
|
|
513
|
+
throw new Error('Langfuse observability requires "@langfuse/otel". Install it with:\n pnpm add @langfuse/otel', { cause: err });
|
|
514
|
+
}
|
|
515
|
+
throw err;
|
|
516
|
+
}
|
|
517
|
+
return new mod.LangfuseSpanProcessor({
|
|
508
518
|
publicKey: config.publicKey,
|
|
509
519
|
secretKey: config.secretKey,
|
|
510
520
|
baseUrl: config.baseUrl || "https://cloud.langfuse.com",
|
|
@@ -513,7 +523,7 @@ function createLangfuseProcessor(config) {
|
|
|
513
523
|
shouldExportSpan: () => true,
|
|
514
524
|
});
|
|
515
525
|
}
|
|
516
|
-
function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
526
|
+
async function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
517
527
|
if (langfuseRequested && !hasLangfuseCreds) {
|
|
518
528
|
if (!otlpEndpoint) {
|
|
519
529
|
logger.warn(`${LOG_PREFIX} External provider mode requested Langfuse but credentials are missing, and no OTLP endpoint is configured; skipping initialization`, {
|
|
@@ -535,7 +545,7 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
535
545
|
isCredentialsValid = hasLangfuseCreds;
|
|
536
546
|
langfuseProcessor =
|
|
537
547
|
langfuseRequested && hasLangfuseCreds
|
|
538
|
-
? createLangfuseProcessor(config)
|
|
548
|
+
? await createLangfuseProcessor(config)
|
|
539
549
|
: null;
|
|
540
550
|
usingExternalProvider = true;
|
|
541
551
|
isInitialized = true;
|
|
@@ -548,11 +558,9 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
548
558
|
// Auto-detect: skip if consumer already registered a LangfuseSpanProcessor.
|
|
549
559
|
//
|
|
550
560
|
// Detection strategy (ordered by robustness):
|
|
551
|
-
// 1.
|
|
552
|
-
// the same @langfuse/otel package instance (same module identity).
|
|
553
|
-
// 2. Duck-type check for Langfuse-specific public member
|
|
561
|
+
// 1. Duck-type check for Langfuse-specific public member
|
|
554
562
|
// (`langfuseClient` property) — survives minification.
|
|
555
|
-
//
|
|
563
|
+
// 2. `constructor.name === "LangfuseSpanProcessor"` — last resort,
|
|
556
564
|
// brittle under minification or bundler renaming.
|
|
557
565
|
//
|
|
558
566
|
// NOTE: `_registeredSpanProcessors` is an internal OpenTelemetry field.
|
|
@@ -566,10 +574,6 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
566
574
|
if (p === null || p === undefined || typeof p !== "object") {
|
|
567
575
|
return false;
|
|
568
576
|
}
|
|
569
|
-
// Prefer instanceof — works when same @langfuse/otel package is shared
|
|
570
|
-
if (p instanceof LangfuseSpanProcessor) {
|
|
571
|
-
return true;
|
|
572
|
-
}
|
|
573
577
|
// Duck-type: Langfuse processor exposes a langfuseClient property
|
|
574
578
|
if ("langfuseClient" in p) {
|
|
575
579
|
return true;
|
|
@@ -626,7 +630,7 @@ function initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, ser
|
|
|
626
630
|
isInitialized = true;
|
|
627
631
|
}
|
|
628
632
|
}
|
|
629
|
-
function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
633
|
+
async function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds) {
|
|
630
634
|
if ((!langfuseRequested || !hasLangfuseCreds) && !otlpEndpoint) {
|
|
631
635
|
if (langfuseRequested && !hasLangfuseCreds) {
|
|
632
636
|
logger.warn(`${LOG_PREFIX} Langfuse requested but credentials are missing, and no OTLP endpoint is configured; skipping initialization`, {
|
|
@@ -652,7 +656,7 @@ function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, s
|
|
|
652
656
|
isCredentialsValid = hasLangfuseCreds;
|
|
653
657
|
langfuseProcessor =
|
|
654
658
|
langfuseRequested && hasLangfuseCreds
|
|
655
|
-
? createLangfuseProcessor(config)
|
|
659
|
+
? await createLangfuseProcessor(config)
|
|
656
660
|
: null;
|
|
657
661
|
logger.debug(`${LOG_PREFIX} Standalone observability mode`, {
|
|
658
662
|
langfuseEnabled: !!langfuseProcessor,
|
|
@@ -741,7 +745,7 @@ function initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, s
|
|
|
741
745
|
*
|
|
742
746
|
* @param config - Langfuse configuration passed from parent application
|
|
743
747
|
*/
|
|
744
|
-
export function initializeOpenTelemetry(config) {
|
|
748
|
+
export async function initializeOpenTelemetry(config) {
|
|
745
749
|
// Guard against multiple initializations — but always update config
|
|
746
750
|
// so that later NeuroLink instances can change traceNameFormat,
|
|
747
751
|
// autoDetectOperationName, and other configuration preferences
|
|
@@ -771,10 +775,10 @@ export function initializeOpenTelemetry(config) {
|
|
|
771
775
|
const serviceName = process.env.OTEL_SERVICE_NAME || "neurolink";
|
|
772
776
|
const resource = createOtelResource(config, serviceName);
|
|
773
777
|
if (shouldUseExternal) {
|
|
774
|
-
initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
778
|
+
await initializeExternalOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
775
779
|
return;
|
|
776
780
|
}
|
|
777
|
-
initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
781
|
+
await initializeStandaloneOpenTelemetryMode(config, resource, otlpEndpoint, serviceName, langfuseRequested, hasLangfuseCreds);
|
|
778
782
|
}
|
|
779
783
|
/**
|
|
780
784
|
* Flush all pending spans to Langfuse
|
|
@@ -6,10 +6,21 @@
|
|
|
6
6
|
* - One-shot tasks → BullMQ delayed jobs
|
|
7
7
|
* - Survives process restarts (Redis-persisted)
|
|
8
8
|
*/
|
|
9
|
-
import { Queue, Worker } from "bullmq";
|
|
10
9
|
import { logger } from "../../utils/logger.js";
|
|
11
10
|
import { TaskError } from "../errors.js";
|
|
12
11
|
import { TASK_DEFAULTS, } from "../../types/index.js";
|
|
12
|
+
async function loadBullMQ() {
|
|
13
|
+
try {
|
|
14
|
+
return await import(/* @vite-ignore */ "bullmq");
|
|
15
|
+
}
|
|
16
|
+
catch (err) {
|
|
17
|
+
const e = err instanceof Error ? err : null;
|
|
18
|
+
if (e?.code === "ERR_MODULE_NOT_FOUND" && e.message.includes("bullmq")) {
|
|
19
|
+
throw new Error('BullMQ task backend requires the "bullmq" package. Install it with:\n pnpm add bullmq', { cause: err });
|
|
20
|
+
}
|
|
21
|
+
throw err;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
13
24
|
const QUEUE_NAME = "neurolink-tasks";
|
|
14
25
|
export class BullMQBackend {
|
|
15
26
|
name = "bullmq";
|
|
@@ -21,9 +32,10 @@ export class BullMQBackend {
|
|
|
21
32
|
this.config = config;
|
|
22
33
|
}
|
|
23
34
|
async initialize() {
|
|
35
|
+
const { Queue: BullQueue, Worker: BullWorker } = await loadBullMQ();
|
|
24
36
|
const connection = this.getConnectionConfig();
|
|
25
|
-
this.queue = new
|
|
26
|
-
this.worker = new
|
|
37
|
+
this.queue = new BullQueue(QUEUE_NAME, { connection });
|
|
38
|
+
this.worker = new BullWorker(QUEUE_NAME, async (job) => {
|
|
27
39
|
const taskId = job.data.taskId;
|
|
28
40
|
const task = job.data.task;
|
|
29
41
|
const executor = this.executors.get(taskId);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "9.55.
|
|
3
|
+
"version": "9.55.2",
|
|
4
4
|
"packageManager": "pnpm@10.15.1",
|
|
5
5
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 13 providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
6
6
|
"author": {
|
|
@@ -212,7 +212,6 @@
|
|
|
212
212
|
"@google/genai": "^1.43.0",
|
|
213
213
|
"@huggingface/inference": "^4.13.14",
|
|
214
214
|
"@juspay/hippocampus": "^0.1.4",
|
|
215
|
-
"@langfuse/otel": "^5.0.1",
|
|
216
215
|
"@modelcontextprotocol/sdk": "^1.27.1",
|
|
217
216
|
"@openrouter/ai-sdk-provider": "^2.2.3",
|
|
218
217
|
"@opentelemetry/api-logs": "^0.214.0",
|
|
@@ -229,7 +228,6 @@
|
|
|
229
228
|
"@picovoice/cobra-node": "^3.0.2",
|
|
230
229
|
"adm-zip": "^0.5.16",
|
|
231
230
|
"ai": "^6.0.134",
|
|
232
|
-
"bullmq": "^5.52.2",
|
|
233
231
|
"chalk": "^5.6.2",
|
|
234
232
|
"croner": "^9.1.0",
|
|
235
233
|
"csv-parser": "^3.2.0",
|
|
@@ -249,8 +247,6 @@
|
|
|
249
247
|
"open": "^11.0.0",
|
|
250
248
|
"ora": "^9.3.0",
|
|
251
249
|
"p-limit": "^7.3.0",
|
|
252
|
-
"pdf-parse": "^2.4.5",
|
|
253
|
-
"pdf-to-img": "^5.0.0",
|
|
254
250
|
"pptxgenjs": "^4.0.1",
|
|
255
251
|
"redis": "^5.11.0",
|
|
256
252
|
"tar-stream": "^3.1.8",
|
|
@@ -275,6 +271,10 @@
|
|
|
275
271
|
}
|
|
276
272
|
},
|
|
277
273
|
"optionalDependencies": {
|
|
274
|
+
"@langfuse/otel": "^5.0.1",
|
|
275
|
+
"bullmq": "^5.52.2",
|
|
276
|
+
"pdf-parse": "^2.4.5",
|
|
277
|
+
"pdf-to-img": "^5.0.0",
|
|
278
278
|
"@fastify/cors": "^11.2.0",
|
|
279
279
|
"@fastify/rate-limit": "^10.3.0",
|
|
280
280
|
"@hono/node-server": "^1.19.9",
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
import type { CompressionOptions, CompressionResult, ProviderName } from "../types/index.js";
|
|
2
|
-
/**
|
|
3
|
-
* Provider-specific image size limits in bytes
|
|
4
|
-
*/
|
|
5
|
-
export declare const PROVIDER_IMAGE_LIMITS: Record<ProviderName, number>;
|
|
6
|
-
/**
|
|
7
|
-
* Compress an image to meet provider-specific size limits
|
|
8
|
-
* @param imageBuffer - Input image buffer
|
|
9
|
-
* @param options - Compression options including provider name
|
|
10
|
-
* @returns Compressed image buffer with metadata
|
|
11
|
-
*/
|
|
12
|
-
export declare function compressImage(imageBuffer: Buffer, options: CompressionOptions): Promise<CompressionResult>;
|
|
13
|
-
/**
|
|
14
|
-
* Check if an image needs compression for a specific provider
|
|
15
|
-
* @param imageBuffer - Input image buffer
|
|
16
|
-
* @param provider - AI provider name
|
|
17
|
-
* @returns True if compression is needed
|
|
18
|
-
*/
|
|
19
|
-
export declare function needsCompression(imageBuffer: Buffer, provider: ProviderName): boolean;
|
|
20
|
-
/**
|
|
21
|
-
* Get the size limit for a specific provider
|
|
22
|
-
* @param provider - AI provider name
|
|
23
|
-
* @returns Size limit in bytes
|
|
24
|
-
*/
|
|
25
|
-
export declare function getProviderSizeLimit(provider: ProviderName): number;
|
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
import sharp from "sharp";
|
|
2
|
-
import { withTimeout } from "./async/index.js";
|
|
3
|
-
const SUPPORTED_FORMATS = [
|
|
4
|
-
"jpeg",
|
|
5
|
-
"png",
|
|
6
|
-
"webp",
|
|
7
|
-
];
|
|
8
|
-
const IMAGE_COMPRESSION_TIMEOUT_MS = 30_000;
|
|
9
|
-
/**
|
|
10
|
-
* Provider-specific image size limits in bytes
|
|
11
|
-
*/
|
|
12
|
-
export const PROVIDER_IMAGE_LIMITS = {
|
|
13
|
-
openai: 20 * 1024 * 1024, // 20MB
|
|
14
|
-
"openai-compatible": 20 * 1024 * 1024, // 20MB (same as OpenAI)
|
|
15
|
-
anthropic: 5 * 1024 * 1024, // 5MB
|
|
16
|
-
"google-ai": 4 * 1024 * 1024, // 4MB
|
|
17
|
-
vertex: 4 * 1024 * 1024, // 4MB
|
|
18
|
-
bedrock: 5 * 1024 * 1024, // 5MB
|
|
19
|
-
azure: 20 * 1024 * 1024, // 20MB
|
|
20
|
-
mistral: 5 * 1024 * 1024, // 5MB
|
|
21
|
-
huggingface: 10 * 1024 * 1024, // 10MB
|
|
22
|
-
ollama: 100 * 1024 * 1024, // 100MB (local, no strict limit)
|
|
23
|
-
openrouter: 20 * 1024 * 1024, // 20MB
|
|
24
|
-
sagemaker: 5 * 1024 * 1024, // 5MB
|
|
25
|
-
litellm: 20 * 1024 * 1024, // 20MB (proxy, use OpenAI default)
|
|
26
|
-
auto: 5 * 1024 * 1024, // 5MB (conservative fallback)
|
|
27
|
-
};
|
|
28
|
-
/**
|
|
29
|
-
* Compress an image to meet provider-specific size limits
|
|
30
|
-
* @param imageBuffer - Input image buffer
|
|
31
|
-
* @param options - Compression options including provider name
|
|
32
|
-
* @returns Compressed image buffer with metadata
|
|
33
|
-
*/
|
|
34
|
-
export async function compressImage(imageBuffer, options) {
|
|
35
|
-
const { provider, quality = 80, maxDimension, format } = options;
|
|
36
|
-
const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
|
|
37
|
-
const originalSize = imageBuffer.length;
|
|
38
|
-
// Get original metadata
|
|
39
|
-
const image = sharp(imageBuffer);
|
|
40
|
-
const metadata = await withTimeout(image.metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading image metadata");
|
|
41
|
-
if (!metadata.width || !metadata.height) {
|
|
42
|
-
throw new Error("Unable to read image dimensions");
|
|
43
|
-
}
|
|
44
|
-
// If image is already under limit and no format conversion needed, return as-is
|
|
45
|
-
if (originalSize <= sizeLimit && !format && !maxDimension) {
|
|
46
|
-
return {
|
|
47
|
-
buffer: imageBuffer,
|
|
48
|
-
originalSize,
|
|
49
|
-
compressedSize: originalSize,
|
|
50
|
-
compressionRatio: 1,
|
|
51
|
-
metadata: {
|
|
52
|
-
width: metadata.width,
|
|
53
|
-
height: metadata.height,
|
|
54
|
-
format: metadata.format ?? "unknown",
|
|
55
|
-
},
|
|
56
|
-
};
|
|
57
|
-
}
|
|
58
|
-
// Prepare compression pipeline
|
|
59
|
-
let pipeline = sharp(imageBuffer);
|
|
60
|
-
// Resize if needed
|
|
61
|
-
if (maxDimension) {
|
|
62
|
-
const needsResize = metadata.width > maxDimension || metadata.height > maxDimension;
|
|
63
|
-
if (needsResize) {
|
|
64
|
-
pipeline = pipeline.resize(maxDimension, maxDimension, {
|
|
65
|
-
fit: "inside",
|
|
66
|
-
withoutEnlargement: true,
|
|
67
|
-
});
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
// Resolve target format — validate metadata.format against supported set
|
|
71
|
-
const rawFormat = metadata.format;
|
|
72
|
-
const targetFormat = format ??
|
|
73
|
-
(SUPPORTED_FORMATS.includes(rawFormat)
|
|
74
|
-
? rawFormat
|
|
75
|
-
: "jpeg");
|
|
76
|
-
const applyFormat = (p, q) => {
|
|
77
|
-
switch (targetFormat) {
|
|
78
|
-
case "jpeg":
|
|
79
|
-
return p.jpeg({ quality: q, mozjpeg: true });
|
|
80
|
-
case "png":
|
|
81
|
-
return p.png({ quality: q, compressionLevel: 9 });
|
|
82
|
-
case "webp":
|
|
83
|
-
return p.webp({ quality: q });
|
|
84
|
-
}
|
|
85
|
-
};
|
|
86
|
-
// Compress
|
|
87
|
-
let compressedBuffer = await withTimeout(applyFormat(pipeline, quality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
|
|
88
|
-
let currentQuality = quality;
|
|
89
|
-
// Iteratively reduce quality if still over limit
|
|
90
|
-
// Note: the sharp pipeline must be rebuilt on each iteration because
|
|
91
|
-
// sharp does not support modifying quality settings after creation.
|
|
92
|
-
while (compressedBuffer.length > sizeLimit && currentQuality > 10) {
|
|
93
|
-
currentQuality -= 10;
|
|
94
|
-
let p = sharp(imageBuffer);
|
|
95
|
-
if (maxDimension) {
|
|
96
|
-
p = p.resize(maxDimension, maxDimension, {
|
|
97
|
-
fit: "inside",
|
|
98
|
-
withoutEnlargement: true,
|
|
99
|
-
});
|
|
100
|
-
}
|
|
101
|
-
compressedBuffer = await withTimeout(applyFormat(p, currentQuality).toBuffer(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out compressing image");
|
|
102
|
-
}
|
|
103
|
-
// Final check
|
|
104
|
-
if (compressedBuffer.length > sizeLimit) {
|
|
105
|
-
throw new Error(`Unable to compress image to ${sizeLimit} bytes for provider ${provider}. ` +
|
|
106
|
-
`Final size: ${compressedBuffer.length} bytes. ` +
|
|
107
|
-
`Try using a smaller image or lower maxDimension.`);
|
|
108
|
-
}
|
|
109
|
-
// Get final metadata
|
|
110
|
-
const finalMetadata = await withTimeout(sharp(compressedBuffer).metadata(), IMAGE_COMPRESSION_TIMEOUT_MS, "Timed out reading compressed image metadata");
|
|
111
|
-
return {
|
|
112
|
-
buffer: compressedBuffer,
|
|
113
|
-
originalSize,
|
|
114
|
-
compressedSize: compressedBuffer.length,
|
|
115
|
-
compressionRatio: originalSize / compressedBuffer.length,
|
|
116
|
-
metadata: {
|
|
117
|
-
width: finalMetadata.width ?? 0,
|
|
118
|
-
height: finalMetadata.height ?? 0,
|
|
119
|
-
format: targetFormat,
|
|
120
|
-
},
|
|
121
|
-
};
|
|
122
|
-
}
|
|
123
|
-
/**
|
|
124
|
-
* Check if an image needs compression for a specific provider
|
|
125
|
-
* @param imageBuffer - Input image buffer
|
|
126
|
-
* @param provider - AI provider name
|
|
127
|
-
* @returns True if compression is needed
|
|
128
|
-
*/
|
|
129
|
-
export function needsCompression(imageBuffer, provider) {
|
|
130
|
-
const sizeLimit = PROVIDER_IMAGE_LIMITS[provider];
|
|
131
|
-
return imageBuffer.length > sizeLimit;
|
|
132
|
-
}
|
|
133
|
-
/**
|
|
134
|
-
* Get the size limit for a specific provider
|
|
135
|
-
* @param provider - AI provider name
|
|
136
|
-
* @returns Size limit in bytes
|
|
137
|
-
*/
|
|
138
|
-
export function getProviderSizeLimit(provider) {
|
|
139
|
-
return PROVIDER_IMAGE_LIMITS[provider];
|
|
140
|
-
}
|
|
141
|
-
//# sourceMappingURL=imageCompressor.js.map
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
import type { CompressionOptions, CompressionResult, ProviderName } from "../types/index.js";
|
|
2
|
-
/**
|
|
3
|
-
* Provider-specific image size limits in bytes
|
|
4
|
-
*/
|
|
5
|
-
export declare const PROVIDER_IMAGE_LIMITS: Record<ProviderName, number>;
|
|
6
|
-
/**
|
|
7
|
-
* Compress an image to meet provider-specific size limits
|
|
8
|
-
* @param imageBuffer - Input image buffer
|
|
9
|
-
* @param options - Compression options including provider name
|
|
10
|
-
* @returns Compressed image buffer with metadata
|
|
11
|
-
*/
|
|
12
|
-
export declare function compressImage(imageBuffer: Buffer, options: CompressionOptions): Promise<CompressionResult>;
|
|
13
|
-
/**
|
|
14
|
-
* Check if an image needs compression for a specific provider
|
|
15
|
-
* @param imageBuffer - Input image buffer
|
|
16
|
-
* @param provider - AI provider name
|
|
17
|
-
* @returns True if compression is needed
|
|
18
|
-
*/
|
|
19
|
-
export declare function needsCompression(imageBuffer: Buffer, provider: ProviderName): boolean;
|
|
20
|
-
/**
|
|
21
|
-
* Get the size limit for a specific provider
|
|
22
|
-
* @param provider - AI provider name
|
|
23
|
-
* @returns Size limit in bytes
|
|
24
|
-
*/
|
|
25
|
-
export declare function getProviderSizeLimit(provider: ProviderName): number;
|