@kylindc/ccxray 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,71 @@
1
+ 'use strict';
2
+
3
+ const store = require('./store');
4
+
5
+ // Strip req/res from broadcast — browser only needs summary for the turn list
6
+ function summarizeEntry(entry) {
7
+ const tok = entry.tokens;
8
+ return {
9
+ id: entry.id, ts: entry.ts, sessionId: entry.sessionId,
10
+ method: entry.method, url: entry.url,
11
+ elapsed: entry.elapsed, status: entry.status, isSSE: entry.isSSE,
12
+ usage: entry.usage, cost: entry.cost, maxContext: entry.maxContext, cwd: entry.cwd,
13
+ model: entry.model || null,
14
+ msgCount: entry.msgCount || 0,
15
+ toolCount: entry.toolCount || 0,
16
+ toolCalls: entry.toolCalls || [],
17
+ isSubagent: entry.isSubagent || false,
18
+ title: entry.title || null,
19
+ stopReason: entry.stopReason || '',
20
+ thinkingDuration: entry.thinkingDuration || null,
21
+ duplicateToolCalls: entry.duplicateToolCalls || null,
22
+ tokens: tok ? {
23
+ system: tok.system, tools: tok.tools, messages: tok.messages, total: tok.total,
24
+ contextBreakdown: tok.contextBreakdown,
25
+ perMessage: tok.perMessage || null,
26
+ } : null,
27
+ };
28
+ }
29
+
30
+ function broadcast(entry) {
31
+ const data = JSON.stringify(summarizeEntry(entry));
32
+ for (const res of store.sseClients) {
33
+ res.write(`data: ${data}\n\n`);
34
+ }
35
+ }
36
+
37
+ function broadcastSessionStatus(sessionId) {
38
+ const active = (store.activeRequests[sessionId] || 0) > 0;
39
+ const lastSeenAt = store.sessionMeta[sessionId]?.lastSeenAt || null;
40
+ const data = JSON.stringify({ _type: 'session_status', sessionId, active, lastSeenAt });
41
+ for (const res of store.sseClients) {
42
+ res.write(`data: ${data}\n\n`);
43
+ }
44
+ }
45
+
46
+ function broadcastPendingRequest(requestId, parsedBody, sessionId) {
47
+ const data = JSON.stringify({
48
+ _type: 'pending_request', requestId, sessionId,
49
+ body: parsedBody,
50
+ });
51
+ for (const res of store.sseClients) res.write(`data: ${data}\n\n`);
52
+ }
53
+
54
+ function broadcastInterceptToggle(sessionId, enabled) {
55
+ const data = JSON.stringify({ _type: 'intercept_toggled', sessionId, enabled });
56
+ for (const res of store.sseClients) res.write(`data: ${data}\n\n`);
57
+ }
58
+
59
+ function broadcastInterceptRemoved(requestId) {
60
+ const data = JSON.stringify({ _type: 'intercept_removed', requestId });
61
+ for (const res of store.sseClients) res.write(`data: ${data}\n\n`);
62
+ }
63
+
64
+ module.exports = {
65
+ summarizeEntry,
66
+ broadcast,
67
+ broadcastSessionStatus,
68
+ broadcastPendingRequest,
69
+ broadcastInterceptToggle,
70
+ broadcastInterceptRemoved,
71
+ };
@@ -0,0 +1,36 @@
1
+ 'use strict';
2
+
3
+ const { createLocalStorage } = require('./local');
4
+
5
+ /**
6
+ * Create the appropriate storage adapter based on STORAGE_BACKEND env var.
7
+ *
8
+ * STORAGE_BACKEND=local (default) — local filesystem
9
+ * STORAGE_BACKEND=s3 — S3/R2 (requires @aws-sdk/client-s3)
10
+ *
11
+ * @returns {import('./interface').StorageAdapter}
12
+ */
13
+ function createStorage() {
14
+ const backend = (process.env.STORAGE_BACKEND || 'local').toLowerCase();
15
+
16
+ switch (backend) {
17
+ case 's3': {
18
+ const { createS3Storage } = require('./s3');
19
+ return createS3Storage({
20
+ bucket: process.env.S3_BUCKET,
21
+ region: process.env.S3_REGION || 'auto',
22
+ endpoint: process.env.S3_ENDPOINT || undefined,
23
+ prefix: process.env.S3_PREFIX || 'logs/',
24
+ });
25
+ }
26
+ case 'local':
27
+ default: {
28
+ const path = require('path');
29
+ const os = require('os');
30
+ const logsDir = process.env.LOGS_DIR || path.join(process.env.CCXRAY_HOME || path.join(os.homedir(), '.ccxray'), 'logs');
31
+ return createLocalStorage(logsDir);
32
+ }
33
+ }
34
+ }
35
+
36
+ module.exports = { createStorage };
@@ -0,0 +1,26 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * Storage adapter interface for ccxray log persistence.
5
+ *
6
+ * Every adapter must implement these methods. The proxy uses `id` (timestamp-based)
7
+ * and `suffix` (_req.json, _res.json, _sse.txt) to identify files.
8
+ *
9
+ * @typedef {Object} StorageAdapter
10
+ * @property {() => Promise<void>} init
11
+ * Ensure storage backend is ready (create dirs, check credentials, etc.).
12
+ *
13
+ * @property {(id: string, suffix: string, data: string|Buffer) => Promise<void>} write
14
+ * Write a log artifact. Fire-and-forget callers should .catch() the promise.
15
+ *
16
+ * @property {(id: string, suffix: string) => Promise<string>} read
17
+ * Read a log artifact as UTF-8 string. Throws if not found.
18
+ *
19
+ * @property {() => Promise<string[]>} list
20
+ * List all filenames in the log store (e.g. ['2025-03-17T12-00-00-000_req.json', ...]).
21
+ *
22
+ * @property {(id: string, suffix: string) => Promise<{mtimeMs: number}>} stat
23
+ * Get metadata (at minimum mtimeMs) for a log artifact. Throws if not found.
24
+ */
25
+
26
+ module.exports = {};
@@ -0,0 +1,79 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const fsp = fs.promises;
5
+ const path = require('path');
6
+
7
+ /**
8
+ * Local filesystem storage adapter.
9
+ * @param {string} logsDir — absolute path to the logs directory
10
+ * @returns {import('./interface').StorageAdapter}
11
+ */
12
+ function createLocalStorage(logsDir) {
13
+ const sharedDir = path.join(logsDir, 'shared');
14
+ const indexPath = path.join(logsDir, 'index.ndjson');
15
+
16
+ return {
17
+ async init() {
18
+ await fsp.mkdir(logsDir, { recursive: true });
19
+ await fsp.mkdir(sharedDir, { recursive: true });
20
+ },
21
+
22
+ async write(id, suffix, data) {
23
+ await fsp.writeFile(path.join(logsDir, id + suffix), data);
24
+ },
25
+
26
+ async read(id, suffix) {
27
+ return fsp.readFile(path.join(logsDir, id + suffix), 'utf8');
28
+ },
29
+
30
+ async list() {
31
+ return fsp.readdir(logsDir);
32
+ },
33
+
34
+ async stat(id, suffix) {
35
+ return fsp.stat(path.join(logsDir, id + suffix));
36
+ },
37
+
38
+ // ── Index (index.ndjson) ──────────────────────────────────────────
39
+
40
+ async appendIndex(line) {
41
+ await fsp.appendFile(indexPath, line);
42
+ },
43
+
44
+ async readIndex() {
45
+ try {
46
+ return await fsp.readFile(indexPath, 'utf8');
47
+ } catch (e) {
48
+ if (e.code === 'ENOENT') return '';
49
+ throw e;
50
+ }
51
+ },
52
+
53
+ // ── Shared content-addressed storage (shared/) ───────────────────
54
+
55
+ async writeSharedIfAbsent(filename, data) {
56
+ const p = path.join(sharedDir, filename);
57
+ try {
58
+ await fsp.writeFile(p, data, { flag: 'wx' });
59
+ } catch (e) {
60
+ if (e.code !== 'EEXIST') throw e;
61
+ }
62
+ },
63
+
64
+ async readShared(filename) {
65
+ return fsp.readFile(path.join(sharedDir, filename), 'utf8');
66
+ },
67
+
68
+ async listShared() {
69
+ try {
70
+ return await fsp.readdir(sharedDir);
71
+ } catch (e) {
72
+ if (e.code === 'ENOENT') return [];
73
+ throw e;
74
+ }
75
+ },
76
+ };
77
+ }
78
+
79
+ module.exports = { createLocalStorage };
@@ -0,0 +1,91 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * S3/R2 storage adapter skeleton.
5
+ *
6
+ * Requires: @aws-sdk/client-s3 (or compatible R2 endpoint).
7
+ * Configure via environment variables:
8
+ * S3_BUCKET, S3_REGION, S3_ENDPOINT (for R2/MinIO), S3_PREFIX
9
+ *
10
+ * @param {Object} opts
11
+ * @param {string} opts.bucket
12
+ * @param {string} [opts.region='auto']
13
+ * @param {string} [opts.endpoint]
14
+ * @param {string} [opts.prefix='logs/']
15
+ * @returns {import('./interface').StorageAdapter}
16
+ */
17
+ function createS3Storage(opts) {
18
+ const { bucket, region = 'auto', endpoint, prefix = 'logs/' } = opts;
19
+
20
+ // Lazy-load SDK to avoid hard dependency
21
+ let s3;
22
+ function getClient() {
23
+ if (s3) return s3;
24
+ const { S3Client } = require('@aws-sdk/client-s3');
25
+ s3 = new S3Client({
26
+ region,
27
+ ...(endpoint ? { endpoint, forcePathStyle: true } : {}),
28
+ });
29
+ return s3;
30
+ }
31
+
32
+ function key(id, suffix) {
33
+ return prefix + id + suffix;
34
+ }
35
+
36
+ return {
37
+ async init() {
38
+ // Verify bucket access
39
+ const { HeadBucketCommand } = require('@aws-sdk/client-s3');
40
+ await getClient().send(new HeadBucketCommand({ Bucket: bucket }));
41
+ },
42
+
43
+ async write(id, suffix, data) {
44
+ const { PutObjectCommand } = require('@aws-sdk/client-s3');
45
+ await getClient().send(new PutObjectCommand({
46
+ Bucket: bucket,
47
+ Key: key(id, suffix),
48
+ Body: data,
49
+ ContentType: suffix.endsWith('.json') ? 'application/json' : 'text/plain',
50
+ }));
51
+ },
52
+
53
+ async read(id, suffix) {
54
+ const { GetObjectCommand } = require('@aws-sdk/client-s3');
55
+ const res = await getClient().send(new GetObjectCommand({
56
+ Bucket: bucket,
57
+ Key: key(id, suffix),
58
+ }));
59
+ return res.Body.transformToString('utf-8');
60
+ },
61
+
62
+ async list() {
63
+ const { ListObjectsV2Command } = require('@aws-sdk/client-s3');
64
+ const files = [];
65
+ let token;
66
+ do {
67
+ const res = await getClient().send(new ListObjectsV2Command({
68
+ Bucket: bucket,
69
+ Prefix: prefix,
70
+ ContinuationToken: token,
71
+ }));
72
+ for (const obj of (res.Contents || [])) {
73
+ files.push(obj.Key.slice(prefix.length));
74
+ }
75
+ token = res.IsTruncated ? res.NextContinuationToken : null;
76
+ } while (token);
77
+ return files;
78
+ },
79
+
80
+ async stat(id, suffix) {
81
+ const { HeadObjectCommand } = require('@aws-sdk/client-s3');
82
+ const res = await getClient().send(new HeadObjectCommand({
83
+ Bucket: bucket,
84
+ Key: key(id, suffix),
85
+ }));
86
+ return { mtimeMs: res.LastModified ? res.LastModified.getTime() : Date.now() };
87
+ },
88
+ };
89
+ }
90
+
91
+ module.exports = { createS3Storage };
@@ -0,0 +1,108 @@
1
+ 'use strict';
2
+
3
+ // ── In-memory store & SSE clients ───────────────────────────────────
4
+ const MAX_ENTRIES = parseInt(process.env.CCXRAY_MAX_ENTRIES || '5000', 10);
5
+ const entries = [];
6
+ const sseClients = [];
7
+
8
+ function trimEntries() {
9
+ if (entries.length > MAX_ENTRIES) {
10
+ entries.splice(0, entries.length - MAX_ENTRIES);
11
+ }
12
+ }
13
+
14
+ // ── Rate limit state (from Anthropic response headers) ──────────────
15
+ let rateLimitState = null;
16
+
17
+ // ── Session tracking ────────────────────────────────────────────────
18
+ let currentSessionId = null;
19
+ let lastMsgCount = 0;
20
+ let sessionCounter = 0;
21
+
22
+ // ── Session metadata (cwd per session) ──────────────────────────────
23
+ const sessionMeta = {}; // { sessionId: { cwd, lastSeenAt } }
24
+ const activeRequests = {}; // sessionId → in-flight count
25
+ const sessionCosts = new Map(); // sessionId → accumulated cost
26
+
27
+ // ── Version Index (cc_version → { reqId, b2Len, firstSeen }) ────────
28
+ const versionIndex = new Map();
29
+
30
+ // ── Intercept (request pause) ────────────────────────────────────────
31
+ const interceptSessions = new Set();
32
+ const pendingRequests = new Map();
33
+ let interceptTimeout = 120;
34
+
35
+ function isQuotaCheck(req) {
36
+ return req?.max_tokens === 1 && !req?.system &&
37
+ req?.messages?.length === 1 && req.messages[0]?.content === 'quota';
38
+ }
39
+
40
+ function extractCwd(req) {
41
+ if (isQuotaCheck(req)) return '(quota-check)';
42
+ if (!req?.system) return null;
43
+ const txt = Array.isArray(req.system) ? req.system.map(b => b.text || '').join('\n') : String(req.system);
44
+ const m = txt.match(/Primary working directory: (.+)/);
45
+ return m ? m[1].trim() : null;
46
+ }
47
+
48
+ function extractSessionId(req) {
49
+ const uid = req?.metadata?.user_id || '';
50
+ // New format: user_id is JSON like {"session_id":"xxx-yyy"}
51
+ const jsonMatch = uid.match(/"session_id"\s*:\s*"([a-f0-9-]+)"/);
52
+ if (jsonMatch) return jsonMatch[1];
53
+ // Legacy format: user_id is "session_xxx-yyy"
54
+ const m = uid.match(/session_([a-f0-9-]+)/);
55
+ return m ? m[1] : null;
56
+ }
57
+
58
+ function detectSession(req) {
59
+ const realId = extractSessionId(req);
60
+ const isNew = realId ? (realId !== currentSessionId) : (!currentSessionId || (req?.messages?.length || 0) < lastMsgCount);
61
+ if (isNew) {
62
+ sessionCounter++;
63
+ currentSessionId = realId || 'direct-api';
64
+ }
65
+ lastMsgCount = req?.messages?.length || 0;
66
+ return { sessionId: currentSessionId, isNewSession: isNew };
67
+ }
68
+
69
+ function printSessionBanner(sessionId) {
70
+ const w = 60;
71
+ const shortId = sessionId.slice(0, 8);
72
+ const label = ` NEW SESSION ${shortId} `;
73
+ const pad = Math.max(0, Math.floor((w - label.length) / 2));
74
+ const line = '★'.repeat(pad) + label + '★'.repeat(w - pad - label.length);
75
+ console.log();
76
+ console.log('\x1b[1;35m' + line + '\x1b[0m');
77
+ console.log(`\x1b[35m claude --continue ${sessionId}\x1b[0m`);
78
+ console.log();
79
+ }
80
+
81
+ function getRateLimitState() { return rateLimitState; }
82
+ function setRateLimitState(state) { rateLimitState = state; }
83
+ function getInterceptTimeout() { return interceptTimeout; }
84
+ function setInterceptTimeout(val) { interceptTimeout = val; }
85
+ function getCurrentSessionId() { return currentSessionId; }
86
+
87
+ module.exports = {
88
+ MAX_ENTRIES,
89
+ entries,
90
+ trimEntries,
91
+ sseClients,
92
+ getRateLimitState,
93
+ setRateLimitState,
94
+ sessionMeta,
95
+ activeRequests,
96
+ sessionCosts,
97
+ versionIndex,
98
+ interceptSessions,
99
+ pendingRequests,
100
+ getInterceptTimeout,
101
+ setInterceptTimeout,
102
+ getCurrentSessionId,
103
+ isQuotaCheck,
104
+ extractCwd,
105
+ extractSessionId,
106
+ detectSession,
107
+ printSessionBanner,
108
+ };
@@ -0,0 +1,150 @@
1
+ 'use strict';
2
+
3
+ const { safeCountTokens } = require('./helpers');
4
+
5
+ // ── System prompt diff helpers ───────────────────────────────────────
6
+
7
+ const BLOCK_OWNERS_SERVER = {
8
+ billingHeader: 'anthropic', coreIdentity: 'anthropic', coreInstructions: 'anthropic',
9
+ customSkills: 'user', pluginSkills: 'user', mcpServersList: 'user', settingsJson: 'user', envAndGit: 'user',
10
+ customAgents: 'user', autoMemory: 'user',
11
+ };
12
+
13
+ function extractAgentType(sys) {
14
+ if (!Array.isArray(sys) || sys.length < 2) return { key: 'unknown', label: 'Unknown' };
15
+ const b1 = (sys[1]?.text || '').trim();
16
+ const b2 = (sys[2]?.text || '').trim();
17
+ // Primary: main Claude Code prompt
18
+ if (b2.startsWith('You are an interactive agent')) return { key: 'claude-code', label: 'Claude Code' };
19
+ // Known sub-agents by b2 content (must come before b1 fallback — sub-agents also have b1="You are Claude Code")
20
+ if (b2.startsWith('Generate a concise')) return { key: 'title-generator', label: 'Title Generator' };
21
+ // Fallback: older versions put the identity in b1
22
+ if (b1.startsWith('You are Claude Code')) return { key: 'claude-code', label: 'Claude Code' };
23
+ // "You are a/an/the <role>" pattern
24
+ const m = b2.match(/^You are (?:a |an |the )?(.+?)(?:\s+for\s|\s+that\s|\s+specializ|\s*[,.]|\n)/i);
25
+ if (m) {
26
+ const role = m[1].trim().toLowerCase().replace(/\s+/g, '-').replace(/[^a-z0-9-]/g, '').slice(0, 40);
27
+ const label = m[1].trim().replace(/\b\w/g, c => c.toUpperCase());
28
+ return { key: role || 'agent', label: label || 'Agent' };
29
+ }
30
+ return { key: 'agent', label: 'Agent' };
31
+ }
32
+
33
+ function splitB2IntoBlocks(b2) {
34
+ const markerDefs = [
35
+ { key: 'customSkills', pattern: /# User'?s Current Configuration/ },
36
+ { key: 'customAgents', pattern: /\*\*Available custom agents/ },
37
+ { key: 'mcpServersList', pattern: /\*\*Configured MCP servers/ },
38
+ { key: 'pluginSkills', pattern: /\*\*Available plugin skills/ },
39
+ { key: 'settingsJson', pattern: /\*\*User's settings\.json/ },
40
+ { key: 'envAndGit', pattern: /# Environment\n|<env>/ },
41
+ { key: 'autoMemory', pattern: /# auto memory\n|You have a persistent, file-based memory/ },
42
+ ];
43
+ const positions = [];
44
+ for (const m of markerDefs) {
45
+ const match = m.pattern.exec(b2);
46
+ if (match) positions.push({ key: m.key, index: match.index });
47
+ }
48
+ positions.sort((a, b) => a.index - b.index);
49
+ const result = {};
50
+ const firstPos = positions.length > 0 ? positions[0].index : b2.length;
51
+ result['coreInstructions'] = b2.slice(0, firstPos);
52
+ for (let i = 0; i < positions.length; i++) {
53
+ const start = positions[i].index;
54
+ const end = i + 1 < positions.length ? positions[i + 1].index : b2.length;
55
+ result[positions[i].key] = b2.slice(start, end);
56
+ }
57
+ return result;
58
+ }
59
+
60
+ function computeBlockDiff(b2A, b2B) {
61
+ const blocksA = splitB2IntoBlocks(b2A);
62
+ const blocksB = splitB2IntoBlocks(b2B);
63
+ const ALL_BLOCKS = ['coreInstructions', 'customSkills', 'customAgents', 'mcpServersList', 'pluginSkills', 'settingsJson', 'envAndGit', 'autoMemory'];
64
+ return ALL_BLOCKS.map(block => {
65
+ const textA = blocksA[block] || '';
66
+ const textB = blocksB[block] || '';
67
+ const aTokens = safeCountTokens(textA);
68
+ const bTokens = safeCountTokens(textB);
69
+ const delta = bTokens - aTokens;
70
+ const owner = BLOCK_OWNERS_SERVER[block] || 'anthropic';
71
+ const status = textA === textB ? 'same' : 'changed';
72
+ const blockDiff = status === 'changed' ? computeUnifiedDiff(textA, textB, block, block) : '';
73
+ return { block, tokA: aTokens, tokB: bTokens, aTokens, bTokens, delta, status, owner, textB, blockDiff };
74
+ });
75
+ }
76
+
77
+ function computeUnifiedDiff(textA, textB, labelA, labelB) {
78
+ const linesA = textA.split('\n');
79
+ const linesB = textB.split('\n');
80
+ const result = [`--- ${labelA}`, `+++ ${labelB}`];
81
+ const changes = [];
82
+ let i = 0, j = 0;
83
+ while (i < linesA.length || j < linesB.length) {
84
+ if (i < linesA.length && j < linesB.length && linesA[i] === linesB[j]) {
85
+ changes.push({ type: ' ', line: linesA[i] }); i++; j++;
86
+ } else if (j < linesB.length && (i >= linesA.length || linesA[i] !== linesB[j])) {
87
+ let matchAhead = -1;
88
+ for (let k = 1; k <= 5 && i + k < linesA.length; k++) {
89
+ if (linesA[i + k] === linesB[j]) { matchAhead = k; break; }
90
+ }
91
+ if (matchAhead === -1) {
92
+ changes.push({ type: '+', line: linesB[j] }); j++;
93
+ } else {
94
+ for (let k = 0; k < matchAhead; k++) { changes.push({ type: '-', line: linesA[i] }); i++; }
95
+ }
96
+ } else {
97
+ changes.push({ type: '-', line: linesA[i] }); i++;
98
+ }
99
+ }
100
+ const CONTEXT = 3;
101
+ const hunkStarts = [];
102
+ for (let ci = 0; ci < changes.length; ci++) {
103
+ if (changes[ci].type !== ' ') hunkStarts.push(ci);
104
+ }
105
+ const hunkRanges = [];
106
+ let ri = 0;
107
+ while (ri < hunkStarts.length) {
108
+ const start = Math.max(0, hunkStarts[ri] - CONTEXT);
109
+ let end = Math.min(changes.length - 1, hunkStarts[ri] + CONTEXT);
110
+ while (ri + 1 < hunkStarts.length && hunkStarts[ri + 1] <= end + CONTEXT) {
111
+ ri++;
112
+ end = Math.min(changes.length - 1, hunkStarts[ri] + CONTEXT);
113
+ }
114
+ hunkRanges.push([start, end]);
115
+ ri++;
116
+ }
117
+ let aLine = 1, bLine = 1, ci2 = 0;
118
+ for (const [hStart, hEnd] of hunkRanges) {
119
+ while (ci2 < hStart) {
120
+ if (changes[ci2].type !== '+') aLine++;
121
+ if (changes[ci2].type !== '-') bLine++;
122
+ ci2++;
123
+ }
124
+ const hunkA = aLine, hunkB = bLine;
125
+ let aCount = 0, bCount = 0;
126
+ const hunkLines = [];
127
+ for (let ci3 = hStart; ci3 <= hEnd; ci3++) {
128
+ const c = changes[ci3];
129
+ hunkLines.push(c.type + c.line);
130
+ if (c.type !== '+') aCount++;
131
+ if (c.type !== '-') bCount++;
132
+ }
133
+ result.push(`@@ -${hunkA},${aCount} +${hunkB},${bCount} @@`);
134
+ for (const l of hunkLines) result.push(l);
135
+ for (let ci3 = hStart; ci3 <= hEnd; ci3++) {
136
+ if (changes[ci3].type !== '+') aLine++;
137
+ if (changes[ci3].type !== '-') bLine++;
138
+ }
139
+ ci2 = hEnd + 1;
140
+ }
141
+ return result.join('\n');
142
+ }
143
+
144
+ module.exports = {
145
+ BLOCK_OWNERS_SERVER,
146
+ extractAgentType,
147
+ splitB2IntoBlocks,
148
+ computeBlockDiff,
149
+ computeUnifiedDiff,
150
+ };