@far-world-labs/verblets 0.1.7 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vitest.config.examples.js +4 -0
- package/DEVELOPING.md +1 -1
- package/package.json +5 -2
- package/scripts/clear-redis.js +74 -0
- package/src/chains/conversation/README.md +26 -0
- package/src/chains/conversation/index.examples.js +398 -0
- package/src/chains/conversation/index.js +126 -0
- package/src/chains/conversation/index.spec.js +148 -0
- package/src/chains/conversation/turn-policies.js +93 -0
- package/src/chains/conversation/turn-policies.md +123 -0
- package/src/chains/conversation/turn-policies.spec.js +135 -0
- package/src/chains/expect/index.js +34 -0
- package/src/chains/intersections/README.md +20 -6
- package/src/chains/intersections/index.examples.js +9 -8
- package/src/chains/intersections/index.js +39 -187
- package/src/chains/llm-logger/README.md +291 -133
- package/src/chains/llm-logger/index.js +451 -65
- package/src/chains/llm-logger/index.spec.js +85 -24
- package/src/chains/llm-logger/schema.json +105 -0
- package/src/chains/set-interval/index.examples.js +34 -6
- package/src/chains/set-interval/index.js +53 -32
- package/src/constants/common.js +7 -1
- package/src/constants/models.js +2 -1
- package/src/index.js +11 -1
- package/src/lib/assert/README.md +84 -0
- package/src/lib/assert/index.js +50 -0
- package/src/lib/ring-buffer/README.md +50 -428
- package/src/lib/ring-buffer/index.js +148 -987
- package/src/lib/ring-buffer/index.spec.js +388 -0
- package/src/verblets/conversation-turn/README.md +33 -0
- package/src/verblets/conversation-turn/index.examples.js +218 -0
- package/src/verblets/conversation-turn/index.js +68 -0
- package/src/verblets/conversation-turn/index.spec.js +77 -0
- package/src/verblets/conversation-turn-multi/README.md +31 -0
- package/src/verblets/conversation-turn-multi/index.examples.js +160 -0
- package/src/verblets/conversation-turn-multi/index.js +104 -0
- package/src/verblets/conversation-turn-multi/index.spec.js +63 -0
- package/src/verblets/intent/index.examples.js +1 -1
- package/src/verblets/intersection/index.js +46 -5
- package/src/verblets/people-list/README.md +28 -0
- package/src/verblets/people-list/index.examples.js +184 -0
- package/src/verblets/people-list/index.js +44 -0
- package/src/verblets/people-list/index.spec.js +49 -0
|
@@ -1,34 +1,50 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* LLM Logger -
|
|
3
|
-
*
|
|
4
|
-
* Creates a sophisticated logger instance that can be used with the global logger service.
|
|
5
|
-
* This is NOT automatically used - users must explicitly create and set it.
|
|
2
|
+
* LLM Enhanced Logger - Transparent Proxy for Structured Logging
|
|
6
3
|
*
|
|
7
4
|
* Features:
|
|
8
|
-
* -
|
|
9
|
-
* -
|
|
10
|
-
* -
|
|
11
|
-
* -
|
|
5
|
+
* - Drop-in replacement for existing structured loggers
|
|
6
|
+
* - Preserves all original log properties
|
|
7
|
+
* - AI enrichment through attachments merged into output
|
|
8
|
+
* - Ring buffer with fully parallel batch processing
|
|
9
|
+
* - NDJSON bulk processing for efficient LLM interaction
|
|
10
|
+
* - Host logger integration for library internals
|
|
12
11
|
*/
|
|
13
12
|
|
|
14
13
|
import RingBuffer from '../../lib/ring-buffer/index.js';
|
|
14
|
+
import assert from '../../lib/assert/index.js';
|
|
15
15
|
|
|
16
16
|
/**
|
|
17
17
|
* @typedef {Object} LogEntry
|
|
18
18
|
* @property {string} id - Unique identifier for the log entry
|
|
19
19
|
* @property {number} ts - Timestamp when the log was created
|
|
20
|
-
* @property {*} raw - The original log data
|
|
21
|
-
* @property {Object} fileContext - File context information
|
|
20
|
+
* @property {*} raw - The original log data (any structure)
|
|
22
21
|
* @property {Map} meta - Additional metadata
|
|
22
|
+
* @property {Object} attachments - AI enrichments to merge with output
|
|
23
|
+
* @property {Object} aiMeta - AI metadata (not output with structured logs)
|
|
23
24
|
*/
|
|
24
25
|
|
|
25
26
|
/**
|
|
26
27
|
* @typedef {Object} LogLaneConfig
|
|
27
28
|
* @property {string} laneId - Unique identifier for the lane
|
|
28
|
-
* @property {Function} writer - Function to write logs (receives array of strings)
|
|
29
|
+
* @property {Function} writer - Function to write logs (receives array of objects/strings)
|
|
29
30
|
* @property {Function} [filters] - Optional filter function for log entries
|
|
30
31
|
*/
|
|
31
32
|
|
|
33
|
+
/**
|
|
34
|
+
* @typedef {Object} LogProcessor
|
|
35
|
+
* @property {string} processorId - Unique identifier for the processor
|
|
36
|
+
* @property {Function} process - Function to process NDJSON log batches
|
|
37
|
+
* @property {number} [batchSize=10] - Batch size for processing
|
|
38
|
+
* @property {string} [description] - Description for LLM context
|
|
39
|
+
*/
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @typedef {Object} BulkAdjustment
|
|
43
|
+
* @property {string} logId - ID of the log to adjust
|
|
44
|
+
* @property {Object} adjustments - Key-value pairs of paths to values
|
|
45
|
+
* @property {Object} [aiMeta] - AI metadata adjustments
|
|
46
|
+
*/
|
|
47
|
+
|
|
32
48
|
/**
|
|
33
49
|
* Extract file context information from the call stack
|
|
34
50
|
*/
|
|
@@ -51,12 +67,69 @@ function extractFileContext() {
|
|
|
51
67
|
return { filePath: 'unknown', line: 0 };
|
|
52
68
|
}
|
|
53
69
|
|
|
70
|
+
/**
|
|
71
|
+
* Set value at JSON path (a.b.c syntax)
|
|
72
|
+
*/
|
|
73
|
+
function setAtPath(obj, path, value) {
|
|
74
|
+
const parts = path.split('.');
|
|
75
|
+
let current = obj;
|
|
76
|
+
|
|
77
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
78
|
+
const part = parts[i];
|
|
79
|
+
if (!(part in current) || typeof current[part] !== 'object' || current[part] === null) {
|
|
80
|
+
current[part] = {};
|
|
81
|
+
}
|
|
82
|
+
current = current[part];
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
current[parts[parts.length - 1]] = value;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Get value at JSON path (a.b.c syntax)
|
|
90
|
+
*/
|
|
91
|
+
function getAtPath(obj, path) {
|
|
92
|
+
const parts = path.split('.');
|
|
93
|
+
let current = obj;
|
|
94
|
+
|
|
95
|
+
for (const part of parts) {
|
|
96
|
+
if (current == null || typeof current !== 'object' || !(part in current)) {
|
|
97
|
+
return undefined;
|
|
98
|
+
}
|
|
99
|
+
current = current[part];
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return current;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Convert log entries to NDJSON format for bulk processing
|
|
107
|
+
*/
|
|
108
|
+
function logsToNDJSON(logs) {
|
|
109
|
+
return logs
|
|
110
|
+
.map((log) =>
|
|
111
|
+
JSON.stringify({
|
|
112
|
+
id: log.id,
|
|
113
|
+
ts: log.ts,
|
|
114
|
+
...log.raw, // Spread original properties
|
|
115
|
+
attachments: log.attachments,
|
|
116
|
+
})
|
|
117
|
+
)
|
|
118
|
+
.join('\n');
|
|
119
|
+
}
|
|
120
|
+
|
|
54
121
|
/**
|
|
55
122
|
* Create console writer function
|
|
56
123
|
*/
|
|
57
124
|
export function createConsoleWriter(prefix = '') {
|
|
58
125
|
return (logs) => {
|
|
59
|
-
logs.forEach((log) =>
|
|
126
|
+
logs.forEach((log) => {
|
|
127
|
+
if (typeof log === 'string') {
|
|
128
|
+
console.log(prefix + log);
|
|
129
|
+
} else {
|
|
130
|
+
console.log(prefix + JSON.stringify(log));
|
|
131
|
+
}
|
|
132
|
+
});
|
|
60
133
|
};
|
|
61
134
|
}
|
|
62
135
|
|
|
@@ -70,82 +143,356 @@ export function createFileWriter(filePath) {
|
|
|
70
143
|
}
|
|
71
144
|
|
|
72
145
|
/**
|
|
73
|
-
* Create
|
|
146
|
+
* Create host logger integration - allows library to use external logger
|
|
147
|
+
*/
|
|
148
|
+
export function createHostLoggerIntegration(hostLogger) {
|
|
149
|
+
return {
|
|
150
|
+
log: (data) => {
|
|
151
|
+
if (typeof hostLogger.log === 'function') {
|
|
152
|
+
return hostLogger.log(data);
|
|
153
|
+
} else if (typeof hostLogger === 'function') {
|
|
154
|
+
return hostLogger(data);
|
|
155
|
+
}
|
|
156
|
+
},
|
|
157
|
+
info: (data) => {
|
|
158
|
+
if (typeof hostLogger.info === 'function') {
|
|
159
|
+
return hostLogger.info(data);
|
|
160
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
161
|
+
return hostLogger.log(data);
|
|
162
|
+
} else if (typeof hostLogger === 'function') {
|
|
163
|
+
return hostLogger(data);
|
|
164
|
+
}
|
|
165
|
+
},
|
|
166
|
+
warn: (data) => {
|
|
167
|
+
if (typeof hostLogger.warn === 'function') {
|
|
168
|
+
return hostLogger.warn(data);
|
|
169
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
170
|
+
return hostLogger.log(data);
|
|
171
|
+
} else if (typeof hostLogger === 'function') {
|
|
172
|
+
return hostLogger(data);
|
|
173
|
+
}
|
|
174
|
+
},
|
|
175
|
+
error: (data) => {
|
|
176
|
+
if (typeof hostLogger.error === 'function') {
|
|
177
|
+
return hostLogger.error(data);
|
|
178
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
179
|
+
return hostLogger.log(data);
|
|
180
|
+
} else if (typeof hostLogger === 'function') {
|
|
181
|
+
return hostLogger(data);
|
|
182
|
+
}
|
|
183
|
+
},
|
|
184
|
+
debug: (data) => {
|
|
185
|
+
if (typeof hostLogger.debug === 'function') {
|
|
186
|
+
return hostLogger.debug(data);
|
|
187
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
188
|
+
return hostLogger.log(data);
|
|
189
|
+
} else if (typeof hostLogger === 'function') {
|
|
190
|
+
return hostLogger(data);
|
|
191
|
+
}
|
|
192
|
+
},
|
|
193
|
+
trace: (data) => {
|
|
194
|
+
if (typeof hostLogger.trace === 'function') {
|
|
195
|
+
return hostLogger.trace(data);
|
|
196
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
197
|
+
return hostLogger.log(data);
|
|
198
|
+
} else if (typeof hostLogger === 'function') {
|
|
199
|
+
return hostLogger(data);
|
|
200
|
+
}
|
|
201
|
+
},
|
|
202
|
+
fatal: (data) => {
|
|
203
|
+
if (typeof hostLogger.fatal === 'function') {
|
|
204
|
+
return hostLogger.fatal(data);
|
|
205
|
+
} else if (typeof hostLogger.error === 'function') {
|
|
206
|
+
return hostLogger.error(data);
|
|
207
|
+
} else if (typeof hostLogger.log === 'function') {
|
|
208
|
+
return hostLogger.log(data);
|
|
209
|
+
} else if (typeof hostLogger === 'function') {
|
|
210
|
+
return hostLogger(data);
|
|
211
|
+
}
|
|
212
|
+
},
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Create an Enhanced LLM Logger instance
|
|
74
218
|
*
|
|
75
219
|
* @param {Object} config - Configuration object
|
|
76
|
-
* @param {number} [config.ringBufferSize=
|
|
220
|
+
* @param {number} [config.ringBufferSize=5000] - Size of the ring buffer
|
|
77
221
|
* @param {LogLaneConfig[]} [config.lanes=[]] - Lane configurations
|
|
78
|
-
* @param {
|
|
79
|
-
* @
|
|
222
|
+
* @param {LogProcessor[]} [config.processors=[]] - Log processors for enhancement
|
|
223
|
+
* @param {number} [config.flushInterval=1000] - Flush interval in milliseconds
|
|
224
|
+
* @param {boolean} [config.immediateFlush=false] - Whether to flush immediately
|
|
225
|
+
* @param {Object} [config.hostLogger] - Host logger for library internals
|
|
226
|
+
* @returns {Object} Enhanced logger instance
|
|
80
227
|
*/
|
|
81
228
|
export function createLLMLogger(config = {}) {
|
|
82
|
-
const {
|
|
229
|
+
const {
|
|
230
|
+
ringBufferSize = 5000,
|
|
231
|
+
lanes = [],
|
|
232
|
+
processors = [],
|
|
233
|
+
flushInterval = 1000,
|
|
234
|
+
immediateFlush = false,
|
|
235
|
+
hostLogger = null,
|
|
236
|
+
} = config;
|
|
83
237
|
|
|
84
238
|
// Initialize ring buffer
|
|
85
239
|
const ringBuffer = new RingBuffer(ringBufferSize);
|
|
86
240
|
|
|
241
|
+
// Keep track of all log entries for legacy API compatibility
|
|
242
|
+
const allLogs = [];
|
|
243
|
+
|
|
244
|
+
// Host logger integration for internal logging
|
|
245
|
+
const internalLogger = hostLogger ? createHostLoggerIntegration(hostLogger) : null;
|
|
246
|
+
|
|
87
247
|
// Lane buffers for batching
|
|
88
248
|
const laneBuffers = new Map();
|
|
249
|
+
|
|
89
250
|
lanes.forEach((lane) => {
|
|
251
|
+
assert(typeof lane.laneId !== 'undefined', 'Each lane must have an laneId property').toBe(true);
|
|
90
252
|
laneBuffers.set(lane.laneId, []);
|
|
91
253
|
});
|
|
92
254
|
|
|
93
|
-
//
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
255
|
+
// Processor state tracking - just track latest processed offsets
|
|
256
|
+
const processorOffsets = new Map();
|
|
257
|
+
const processorReaders = new Map();
|
|
258
|
+
|
|
259
|
+
// Register processors and start parallel processing
|
|
260
|
+
processors.forEach((processor) => {
|
|
261
|
+
const readerId = ringBuffer.registerReader();
|
|
262
|
+
processorReaders.set(processor.processorId, readerId);
|
|
263
|
+
processorOffsets.set(processor.processorId, -1);
|
|
264
|
+
|
|
265
|
+
const batchSize = processor.batchSize || 10;
|
|
266
|
+
|
|
267
|
+
// Fully parallel processing loop - no coordination
|
|
268
|
+
const processLoop = async () => {
|
|
269
|
+
try {
|
|
270
|
+
const batch = await ringBuffer.readBatch(readerId, batchSize);
|
|
271
|
+
|
|
272
|
+
// Convert to NDJSON for LLM processing
|
|
273
|
+
const ndjsonData = logsToNDJSON(batch.data);
|
|
274
|
+
|
|
275
|
+
// Create context comment for LLM
|
|
276
|
+
const contextComment = `# Log Processing Context
|
|
277
|
+
# Processor: ${processor.processorId} (${processor.description || 'No description'})
|
|
278
|
+
# Batch size: ${batch.data.length} logs
|
|
279
|
+
# Task: Analyze the following NDJSON log entries and return bulk adjustments
|
|
280
|
+
#
|
|
281
|
+
# Expected response format (array of BulkAdjustment objects):
|
|
282
|
+
# [
|
|
283
|
+
# {
|
|
284
|
+
# "logId": "log-id-here",
|
|
285
|
+
# "adjustments": {
|
|
286
|
+
# "path.to.field": "value",
|
|
287
|
+
# "another.path": { "nested": "object" }
|
|
288
|
+
# },
|
|
289
|
+
# "aiMeta": {
|
|
290
|
+
# "skip": false,
|
|
291
|
+
# "confidence": 0.95
|
|
292
|
+
# }
|
|
293
|
+
# }
|
|
294
|
+
# ]
|
|
295
|
+
#
|
|
296
|
+
# NDJSON Log Data:
|
|
297
|
+
`;
|
|
298
|
+
|
|
299
|
+
const fullInput = contextComment + ndjsonData;
|
|
300
|
+
|
|
301
|
+
// Process the batch and get bulk adjustments
|
|
302
|
+
const bulkAdjustments = await processor.process(fullInput);
|
|
303
|
+
|
|
304
|
+
// Apply adjustments in processor order (index determines priority)
|
|
305
|
+
if (Array.isArray(bulkAdjustments)) {
|
|
306
|
+
for (const adjustment of bulkAdjustments) {
|
|
307
|
+
const logIndex = allLogs.findIndex((log) => log.id === adjustment.logId);
|
|
308
|
+
if (logIndex !== -1) {
|
|
309
|
+
const logEntry = allLogs[logIndex];
|
|
310
|
+
|
|
311
|
+
// Apply regular adjustments to attachments
|
|
312
|
+
if (adjustment.adjustments) {
|
|
313
|
+
for (const [path, value] of Object.entries(adjustment.adjustments)) {
|
|
314
|
+
setAtPath(logEntry.attachments, path, value);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Apply AI metadata (last write wins per processor order)
|
|
319
|
+
if (adjustment.aiMeta) {
|
|
320
|
+
if (!logEntry.aiMeta) {
|
|
321
|
+
logEntry.aiMeta = {};
|
|
322
|
+
}
|
|
323
|
+
Object.assign(logEntry.aiMeta, adjustment.aiMeta);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// Update processed offset
|
|
330
|
+
processorOffsets.set(processor.processorId, batch.lastOffset);
|
|
331
|
+
|
|
332
|
+
// Continue processing immediately (fully parallel)
|
|
333
|
+
setTimeout(processLoop, 0);
|
|
334
|
+
} catch (error) {
|
|
335
|
+
if (internalLogger) {
|
|
336
|
+
internalLogger.error(`Processor ${processor.processorId} error: ${error.message}`);
|
|
103
337
|
}
|
|
338
|
+
setTimeout(processLoop, 1000); // Retry after delay
|
|
104
339
|
}
|
|
105
|
-
setTimeout(flushLoop, flushInterval);
|
|
106
340
|
};
|
|
107
|
-
|
|
341
|
+
|
|
342
|
+
// Start processing loop
|
|
343
|
+
processLoop();
|
|
108
344
|
});
|
|
109
345
|
|
|
346
|
+
// Flush function that can be called immediately or on timer
|
|
347
|
+
const flushLanes = () => {
|
|
348
|
+
for (const [laneId, buffer] of laneBuffers) {
|
|
349
|
+
if (buffer.length > 0) {
|
|
350
|
+
const lane = lanes.find((l) => l.laneId === laneId);
|
|
351
|
+
if (lane) {
|
|
352
|
+
// Filter out logs marked for skipping
|
|
353
|
+
const logsToWrite = buffer.filter((logData) => {
|
|
354
|
+
return !logData.aiMeta?.skip;
|
|
355
|
+
});
|
|
356
|
+
|
|
357
|
+
if (logsToWrite.length > 0) {
|
|
358
|
+
lane.writer(logsToWrite);
|
|
359
|
+
}
|
|
360
|
+
buffer.length = 0;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
};
|
|
365
|
+
|
|
366
|
+
// Flush loops for each lane (only if not immediate flush mode)
|
|
367
|
+
if (!immediateFlush) {
|
|
368
|
+
lanes.forEach((_lane) => {
|
|
369
|
+
const flushLoop = () => {
|
|
370
|
+
flushLanes();
|
|
371
|
+
setTimeout(flushLoop, flushInterval);
|
|
372
|
+
};
|
|
373
|
+
flushLoop();
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
|
|
110
377
|
/**
|
|
111
|
-
* Process a log entry
|
|
378
|
+
* Process a log entry - accepts any structured data
|
|
112
379
|
*/
|
|
113
380
|
function processLog(data, level = 'log') {
|
|
114
381
|
const logEntry = {
|
|
115
|
-
id: Date.now()
|
|
382
|
+
id: `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
|
116
383
|
ts: new Date(),
|
|
117
|
-
raw: data,
|
|
384
|
+
raw: data, // Store original data as-is (any structure)
|
|
118
385
|
meta: new Map([
|
|
119
386
|
['level', level],
|
|
120
387
|
['fileContext', extractFileContext()],
|
|
121
388
|
]),
|
|
389
|
+
attachments: {}, // AI enrichments to merge
|
|
390
|
+
aiMeta: {}, // AI metadata (not included in output)
|
|
122
391
|
};
|
|
123
392
|
|
|
124
|
-
// Add to ring buffer
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
//
|
|
128
|
-
|
|
393
|
+
// Add to ring buffer
|
|
394
|
+
ringBuffer.write(logEntry);
|
|
395
|
+
|
|
396
|
+
// Also add to our legacy array for compatibility
|
|
397
|
+
allLogs.push(logEntry);
|
|
129
398
|
|
|
130
|
-
//
|
|
399
|
+
// Keep only the last ringBufferSize entries
|
|
400
|
+
if (allLogs.length > ringBufferSize) {
|
|
401
|
+
allLogs.shift();
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Process through lanes
|
|
131
405
|
for (const lane of lanes) {
|
|
132
406
|
if (!lane.filters || lane.filters(logEntry)) {
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
407
|
+
let outputData;
|
|
408
|
+
|
|
409
|
+
// Handle different data types appropriately
|
|
410
|
+
if (data === null || data === undefined) {
|
|
411
|
+
// Handle null/undefined
|
|
412
|
+
outputData = {
|
|
413
|
+
data,
|
|
414
|
+
id: logEntry.id,
|
|
415
|
+
ts: logEntry.ts,
|
|
416
|
+
level,
|
|
417
|
+
...logEntry.attachments,
|
|
418
|
+
};
|
|
419
|
+
} else if (
|
|
420
|
+
typeof data === 'string' ||
|
|
421
|
+
typeof data === 'number' ||
|
|
422
|
+
typeof data === 'boolean'
|
|
423
|
+
) {
|
|
424
|
+
// Handle primitives - wrap in data property
|
|
425
|
+
outputData = {
|
|
426
|
+
data,
|
|
427
|
+
id: logEntry.id,
|
|
428
|
+
ts: logEntry.ts,
|
|
429
|
+
level,
|
|
430
|
+
...logEntry.attachments,
|
|
431
|
+
};
|
|
432
|
+
} else if (typeof data === 'object' && data !== null) {
|
|
433
|
+
// Handle objects - merge properties
|
|
434
|
+
outputData = {
|
|
435
|
+
...data, // Spread all original properties
|
|
436
|
+
...logEntry.attachments, // Merge AI enrichments
|
|
437
|
+
// Add internal metadata only if not already present
|
|
438
|
+
...(data.id ? {} : { id: logEntry.id }),
|
|
439
|
+
...(data.ts ? {} : { ts: logEntry.ts }),
|
|
440
|
+
...(data.level ? {} : { level }),
|
|
441
|
+
};
|
|
442
|
+
} else {
|
|
443
|
+
// Fallback for other types
|
|
444
|
+
outputData = {
|
|
445
|
+
data,
|
|
446
|
+
id: logEntry.id,
|
|
447
|
+
ts: logEntry.ts,
|
|
448
|
+
level,
|
|
449
|
+
...logEntry.attachments,
|
|
450
|
+
};
|
|
141
451
|
}
|
|
452
|
+
|
|
453
|
+
// Add aiMeta for filtering but don't include in final output
|
|
454
|
+
outputData.aiMeta = logEntry.aiMeta;
|
|
455
|
+
|
|
456
|
+
laneBuffers.get(lane.laneId).push(outputData);
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
// Immediate flush if enabled
|
|
461
|
+
if (immediateFlush) {
|
|
462
|
+
flushLanes();
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
// Log to host logger if configured
|
|
466
|
+
if (internalLogger) {
|
|
467
|
+
const logMessage = `LLM Logger processed: ${JSON.stringify(data)}`;
|
|
468
|
+
switch (level) {
|
|
469
|
+
case 'error':
|
|
470
|
+
internalLogger.error(logMessage);
|
|
471
|
+
break;
|
|
472
|
+
case 'warn':
|
|
473
|
+
internalLogger.warn(logMessage);
|
|
474
|
+
break;
|
|
475
|
+
case 'info':
|
|
476
|
+
internalLogger.info(logMessage);
|
|
477
|
+
break;
|
|
478
|
+
case 'debug':
|
|
479
|
+
internalLogger.debug(logMessage);
|
|
480
|
+
break;
|
|
481
|
+
case 'trace':
|
|
482
|
+
internalLogger.trace(logMessage);
|
|
483
|
+
break;
|
|
484
|
+
case 'fatal':
|
|
485
|
+
internalLogger.fatal(logMessage);
|
|
486
|
+
break;
|
|
487
|
+
default:
|
|
488
|
+
internalLogger.log(logMessage);
|
|
142
489
|
}
|
|
143
490
|
}
|
|
144
491
|
}
|
|
145
492
|
|
|
146
|
-
// Return logger instance
|
|
493
|
+
// Return enhanced logger instance
|
|
147
494
|
return {
|
|
148
|
-
// Standard logger interface
|
|
495
|
+
// Standard logger interface - accepts any structured data
|
|
149
496
|
log: (data) => processLog(data, 'log'),
|
|
150
497
|
info: (data) => processLog(data, 'info'),
|
|
151
498
|
warn: (data) => processLog(data, 'warn'),
|
|
@@ -154,31 +501,59 @@ export function createLLMLogger(config = {}) {
|
|
|
154
501
|
trace: (data) => processLog(data, 'trace'),
|
|
155
502
|
fatal: (data) => processLog(data, 'fatal'),
|
|
156
503
|
|
|
157
|
-
//
|
|
504
|
+
// Enhanced attachment API
|
|
505
|
+
attachToLog: (logId, path, value) => {
|
|
506
|
+
const logIndex = allLogs.findIndex((log) => log.id === logId);
|
|
507
|
+
if (logIndex !== -1) {
|
|
508
|
+
setAtPath(allLogs[logIndex].attachments, path, value);
|
|
509
|
+
return true;
|
|
510
|
+
}
|
|
511
|
+
return false;
|
|
512
|
+
},
|
|
513
|
+
|
|
514
|
+
getLogAttachment: (logId, path) => {
|
|
515
|
+
const log = allLogs.find((log) => log.id === logId);
|
|
516
|
+
return log ? getAtPath(log.attachments, path) : undefined;
|
|
517
|
+
},
|
|
518
|
+
|
|
519
|
+
markLogSkippable: (logId, skip = true) => {
|
|
520
|
+
const logIndex = allLogs.findIndex((log) => log.id === logId);
|
|
521
|
+
if (logIndex !== -1) {
|
|
522
|
+
allLogs[logIndex].aiMeta.skip = skip;
|
|
523
|
+
return true;
|
|
524
|
+
}
|
|
525
|
+
return false;
|
|
526
|
+
},
|
|
527
|
+
|
|
528
|
+
// Ring buffer access - legacy API compatibility
|
|
158
529
|
ringBuffer: {
|
|
159
|
-
all: () =>
|
|
160
|
-
size: () =>
|
|
161
|
-
clear: () =>
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
530
|
+
all: () => [...allLogs],
|
|
531
|
+
size: () => allLogs.length,
|
|
532
|
+
clear: () => {
|
|
533
|
+
allLogs.length = 0;
|
|
534
|
+
ringBuffer.clear();
|
|
535
|
+
},
|
|
536
|
+
tail: (count) => allLogs.slice(-count),
|
|
537
|
+
head: (count) => allLogs.slice(0, count),
|
|
538
|
+
filter: (predicate) => allLogs.filter(predicate),
|
|
539
|
+
},
|
|
540
|
+
|
|
541
|
+
// Processor status
|
|
542
|
+
getProcessorOffsets: () => new Map(processorOffsets),
|
|
543
|
+
|
|
544
|
+
// Host logger integration
|
|
545
|
+
setHostLogger: (newHostLogger) => {
|
|
546
|
+
config.hostLogger = newHostLogger;
|
|
547
|
+
return createHostLoggerIntegration(newHostLogger);
|
|
166
548
|
},
|
|
167
549
|
|
|
168
550
|
// Utility methods
|
|
169
551
|
flush: () => {
|
|
170
|
-
|
|
171
|
-
if (buffer.length > 0) {
|
|
172
|
-
const lane = lanes.find((l) => l.laneId === laneId);
|
|
173
|
-
if (lane) {
|
|
174
|
-
lane.writer([...buffer]);
|
|
175
|
-
buffer.length = 0;
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
}
|
|
552
|
+
flushLanes();
|
|
179
553
|
},
|
|
180
554
|
|
|
181
555
|
clear: () => {
|
|
556
|
+
allLogs.length = 0;
|
|
182
557
|
ringBuffer.clear();
|
|
183
558
|
for (const buffer of laneBuffers.values()) {
|
|
184
559
|
buffer.length = 0;
|
|
@@ -189,6 +564,17 @@ export function createLLMLogger(config = {}) {
|
|
|
189
564
|
ringBufferSize,
|
|
190
565
|
flushInterval,
|
|
191
566
|
lanes: [...lanes],
|
|
567
|
+
processors: [...processors],
|
|
568
|
+
hostLogger: !!config.hostLogger,
|
|
569
|
+
}),
|
|
570
|
+
|
|
571
|
+
getStats: () => ({
|
|
572
|
+
...ringBuffer.getStats(),
|
|
573
|
+
processorOffsets: Object.fromEntries(processorOffsets),
|
|
574
|
+
processors: processors.map((p) => ({
|
|
575
|
+
id: p.processorId,
|
|
576
|
+
processedOffset: processorOffsets.get(p.processorId),
|
|
577
|
+
})),
|
|
192
578
|
}),
|
|
193
579
|
};
|
|
194
580
|
}
|