claude-usage-dashboard 1.4.0 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -77
- package/bin/cli.cjs +20 -20
- package/bin/cli.js +16 -16
- package/bin/cli.sh +11 -11
- package/package.json +43 -43
- package/public/css/style.css +265 -265
- package/public/index.html +108 -108
- package/public/js/api.js +16 -16
- package/public/js/app.js +304 -304
- package/public/js/charts/cache-efficiency.js +29 -29
- package/public/js/charts/cost-comparison.js +39 -39
- package/public/js/charts/model-distribution.js +56 -56
- package/public/js/charts/project-distribution.js +103 -103
- package/public/js/charts/session-stats.js +117 -117
- package/public/js/charts/token-trend.js +357 -357
- package/public/js/components/date-picker.js +35 -35
- package/public/js/components/plan-selector.js +57 -57
- package/server/aggregator.js +151 -151
- package/server/credentials.js +112 -112
- package/server/index.js +45 -45
- package/server/parser.js +129 -129
- package/server/pricing.js +52 -52
- package/server/routes/api.js +141 -130
- package/server/sync.js +69 -69
package/server/pricing.js
CHANGED
|
@@ -1,52 +1,52 @@
|
|
|
1
|
-
export const MODEL_PRICING = {
|
|
2
|
-
'claude-opus-4-6': {
|
|
3
|
-
input_price_per_mtok: 5,
|
|
4
|
-
output_price_per_mtok: 25,
|
|
5
|
-
cache_read_price_per_mtok: 0.50,
|
|
6
|
-
cache_creation_price_per_mtok: 6.25,
|
|
7
|
-
},
|
|
8
|
-
'claude-sonnet-4-6': {
|
|
9
|
-
input_price_per_mtok: 3,
|
|
10
|
-
output_price_per_mtok: 15,
|
|
11
|
-
cache_read_price_per_mtok: 0.30,
|
|
12
|
-
cache_creation_price_per_mtok: 3.75,
|
|
13
|
-
},
|
|
14
|
-
'claude-haiku-4-5': {
|
|
15
|
-
input_price_per_mtok: 1,
|
|
16
|
-
output_price_per_mtok: 5,
|
|
17
|
-
cache_read_price_per_mtok: 0.10,
|
|
18
|
-
cache_creation_price_per_mtok: 1.25,
|
|
19
|
-
},
|
|
20
|
-
};
|
|
21
|
-
|
|
22
|
-
export const PLAN_DEFAULTS = {
|
|
23
|
-
pro: 20,
|
|
24
|
-
max5x: 100,
|
|
25
|
-
max20x: 200,
|
|
26
|
-
};
|
|
27
|
-
|
|
28
|
-
export function getModelPricing(modelId) {
|
|
29
|
-
return MODEL_PRICING[modelId] || null;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
* Calculate the API cost for a single usage record.
|
|
34
|
-
* Returns 0 for unknown models.
|
|
35
|
-
*
|
|
36
|
-
* In Claude Code logs, input_tokens is the non-cached input.
|
|
37
|
-
* cache_read_tokens and cache_creation_tokens are separate, additive fields.
|
|
38
|
-
* cost = input * input_rate + cache_read * read_rate + cache_creation * write_rate + output * output_rate
|
|
39
|
-
*/
|
|
40
|
-
export function calculateRecordCost(record) {
|
|
41
|
-
const pricing = MODEL_PRICING[record.model];
|
|
42
|
-
if (!pricing) return 0;
|
|
43
|
-
|
|
44
|
-
const M = 1_000_000;
|
|
45
|
-
|
|
46
|
-
return (
|
|
47
|
-
(record.input_tokens / M) * pricing.input_price_per_mtok +
|
|
48
|
-
(record.cache_read_tokens / M) * pricing.cache_read_price_per_mtok +
|
|
49
|
-
(record.cache_creation_tokens / M) * pricing.cache_creation_price_per_mtok +
|
|
50
|
-
(record.output_tokens / M) * pricing.output_price_per_mtok
|
|
51
|
-
);
|
|
52
|
-
}
|
|
1
|
+
export const MODEL_PRICING = {
|
|
2
|
+
'claude-opus-4-6': {
|
|
3
|
+
input_price_per_mtok: 5,
|
|
4
|
+
output_price_per_mtok: 25,
|
|
5
|
+
cache_read_price_per_mtok: 0.50,
|
|
6
|
+
cache_creation_price_per_mtok: 6.25,
|
|
7
|
+
},
|
|
8
|
+
'claude-sonnet-4-6': {
|
|
9
|
+
input_price_per_mtok: 3,
|
|
10
|
+
output_price_per_mtok: 15,
|
|
11
|
+
cache_read_price_per_mtok: 0.30,
|
|
12
|
+
cache_creation_price_per_mtok: 3.75,
|
|
13
|
+
},
|
|
14
|
+
'claude-haiku-4-5': {
|
|
15
|
+
input_price_per_mtok: 1,
|
|
16
|
+
output_price_per_mtok: 5,
|
|
17
|
+
cache_read_price_per_mtok: 0.10,
|
|
18
|
+
cache_creation_price_per_mtok: 1.25,
|
|
19
|
+
},
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export const PLAN_DEFAULTS = {
|
|
23
|
+
pro: 20,
|
|
24
|
+
max5x: 100,
|
|
25
|
+
max20x: 200,
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export function getModelPricing(modelId) {
|
|
29
|
+
return MODEL_PRICING[modelId] || null;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Calculate the API cost for a single usage record.
|
|
34
|
+
* Returns 0 for unknown models.
|
|
35
|
+
*
|
|
36
|
+
* In Claude Code logs, input_tokens is the non-cached input.
|
|
37
|
+
* cache_read_tokens and cache_creation_tokens are separate, additive fields.
|
|
38
|
+
* cost = input * input_rate + cache_read * read_rate + cache_creation * write_rate + output * output_rate
|
|
39
|
+
*/
|
|
40
|
+
export function calculateRecordCost(record) {
|
|
41
|
+
const pricing = MODEL_PRICING[record.model];
|
|
42
|
+
if (!pricing) return 0;
|
|
43
|
+
|
|
44
|
+
const M = 1_000_000;
|
|
45
|
+
|
|
46
|
+
return (
|
|
47
|
+
(record.input_tokens / M) * pricing.input_price_per_mtok +
|
|
48
|
+
(record.cache_read_tokens / M) * pricing.cache_read_price_per_mtok +
|
|
49
|
+
(record.cache_creation_tokens / M) * pricing.cache_creation_price_per_mtok +
|
|
50
|
+
(record.output_tokens / M) * pricing.output_price_per_mtok
|
|
51
|
+
);
|
|
52
|
+
}
|
package/server/routes/api.js
CHANGED
|
@@ -1,130 +1,141 @@
|
|
|
1
|
-
import { Router } from 'express';
|
|
2
|
-
import { parseLogDirectory, parseMultiMachineDirectory } from '../parser.js';
|
|
3
|
-
import { syncLocalToShared } from '../sync.js';
|
|
4
|
-
import { filterByDateRange, autoGranularity, aggregateByTime, aggregateBySession, aggregateByProject, aggregateByModel, aggregateCache } from '../aggregator.js';
|
|
5
|
-
import { calculateRecordCost, PLAN_DEFAULTS } from '../pricing.js';
|
|
6
|
-
import { createQuotaFetcher } from '../quota.js';
|
|
7
|
-
import { getSubscriptionInfo } from '../credentials.js';
|
|
8
|
-
|
|
9
|
-
export function createApiRouter(logBaseDir, options = {}) {
|
|
10
|
-
const router = Router();
|
|
11
|
-
const CACHE_TTL_MS = options.cacheTtlMs || 5000;
|
|
12
|
-
let cachedRecords = [];
|
|
13
|
-
let lastRefreshed = null;
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
const
|
|
82
|
-
const
|
|
83
|
-
const
|
|
84
|
-
const
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
const
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
const
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
1
|
+
import { Router } from 'express';
|
|
2
|
+
import { parseLogDirectory, parseMultiMachineDirectory } from '../parser.js';
|
|
3
|
+
import { syncLocalToShared } from '../sync.js';
|
|
4
|
+
import { filterByDateRange, autoGranularity, aggregateByTime, aggregateBySession, aggregateByProject, aggregateByModel, aggregateCache } from '../aggregator.js';
|
|
5
|
+
import { calculateRecordCost, PLAN_DEFAULTS } from '../pricing.js';
|
|
6
|
+
import { createQuotaFetcher } from '../quota.js';
|
|
7
|
+
import { getSubscriptionInfo } from '../credentials.js';
|
|
8
|
+
|
|
9
|
+
export function createApiRouter(logBaseDir, options = {}) {
|
|
10
|
+
const router = Router();
|
|
11
|
+
const CACHE_TTL_MS = options.cacheTtlMs || 5000;
|
|
12
|
+
let cachedRecords = [];
|
|
13
|
+
let lastRefreshed = null;
|
|
14
|
+
|
|
15
|
+
// Background sync: runs periodically without blocking API requests
|
|
16
|
+
if (options.syncDir) {
|
|
17
|
+
const SYNC_INTERVAL_MS = options.syncIntervalMs || 30000;
|
|
18
|
+
const runBackgroundSync = () => {
|
|
19
|
+
syncLocalToShared(logBaseDir, options.syncDir, options.machineName).catch(err => {
|
|
20
|
+
console.warn('Background sync failed:', err.message);
|
|
21
|
+
});
|
|
22
|
+
};
|
|
23
|
+
runBackgroundSync();
|
|
24
|
+
setInterval(runBackgroundSync, SYNC_INTERVAL_MS).unref();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function refreshRecords() {
|
|
28
|
+
const now = Date.now();
|
|
29
|
+
if (lastRefreshed && (now - lastRefreshed) < CACHE_TTL_MS) return cachedRecords;
|
|
30
|
+
try {
|
|
31
|
+
if (options.syncDir) {
|
|
32
|
+
cachedRecords = parseMultiMachineDirectory(options.syncDir);
|
|
33
|
+
} else {
|
|
34
|
+
cachedRecords = parseLogDirectory(logBaseDir);
|
|
35
|
+
}
|
|
36
|
+
lastRefreshed = now;
|
|
37
|
+
console.log(`Parsed ${cachedRecords.length} records${options.syncDir ? ' (sync mode)' : ''}`);
|
|
38
|
+
} catch (err) {
|
|
39
|
+
console.error('Failed to parse log directory:', err.message);
|
|
40
|
+
if (!lastRefreshed) lastRefreshed = now;
|
|
41
|
+
}
|
|
42
|
+
return cachedRecords;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function applyFilters(query) {
|
|
46
|
+
let records = filterByDateRange(refreshRecords(), query.from, query.to);
|
|
47
|
+
if (query.project) records = records.filter(r => r.project === query.project);
|
|
48
|
+
if (query.model) records = records.filter(r => r.model === query.model);
|
|
49
|
+
return records;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
router.get('/usage', (req, res) => {
|
|
53
|
+
try {
|
|
54
|
+
const records = applyFilters(req.query);
|
|
55
|
+
const granularity = req.query.granularity || autoGranularity(req.query.from, req.query.to);
|
|
56
|
+
const buckets = aggregateByTime(records, granularity);
|
|
57
|
+
const total = { input_tokens: 0, output_tokens: 0, cache_read_tokens: 0, cache_creation_tokens: 0, estimated_api_cost_usd: 0 };
|
|
58
|
+
for (const r of records) {
|
|
59
|
+
total.input_tokens += r.input_tokens; total.output_tokens += r.output_tokens;
|
|
60
|
+
total.cache_read_tokens += r.cache_read_tokens; total.cache_creation_tokens += r.cache_creation_tokens;
|
|
61
|
+
total.estimated_api_cost_usd += calculateRecordCost(r);
|
|
62
|
+
}
|
|
63
|
+
total.estimated_api_cost_usd = Math.round(total.estimated_api_cost_usd * 100) / 100;
|
|
64
|
+
res.json({ granularity, buckets, total });
|
|
65
|
+
} catch (err) {
|
|
66
|
+
res.status(500).json({ error: err.message, code: 'PARSE_ERROR' });
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
router.get('/models', (req, res) => { res.json({ models: aggregateByModel(applyFilters(req.query)) }); });
|
|
71
|
+
router.get('/projects', (req, res) => { res.json({ projects: aggregateByProject(applyFilters(req.query)) }); });
|
|
72
|
+
|
|
73
|
+
router.get('/sessions', (req, res) => {
|
|
74
|
+
const records = applyFilters(req.query);
|
|
75
|
+
let sessions = aggregateBySession(records);
|
|
76
|
+
const sort = req.query.sort || 'date';
|
|
77
|
+
const order = req.query.order || 'desc';
|
|
78
|
+
const sortFn = { date: (a, b) => new Date(b.startTime) - new Date(a.startTime), cost: (a, b) => b.estimated_cost_usd - a.estimated_cost_usd, tokens: (a, b) => b.total_tokens - a.total_tokens }[sort] || ((a, b) => new Date(b.startTime) - new Date(a.startTime));
|
|
79
|
+
sessions.sort(sortFn);
|
|
80
|
+
if (order === 'asc') sessions.reverse();
|
|
81
|
+
const totalTokens = sessions.reduce((sum, s) => sum + s.total_tokens, 0);
|
|
82
|
+
const totalCost = sessions.reduce((sum, s) => sum + s.estimated_cost_usd, 0);
|
|
83
|
+
const page = parseInt(req.query.page) || 1;
|
|
84
|
+
const limit = parseInt(req.query.limit) || 20;
|
|
85
|
+
const totalSessions = sessions.length;
|
|
86
|
+
const totalPages = Math.ceil(totalSessions / limit);
|
|
87
|
+
sessions = sessions.slice((page - 1) * limit, page * limit);
|
|
88
|
+
res.json({ sessions, pagination: { page, limit, total_sessions: totalSessions, total_pages: totalPages }, totals: { total_tokens: totalTokens, estimated_cost_usd: Math.round(totalCost * 100) / 100 } });
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
router.get('/cost', (req, res) => {
|
|
92
|
+
const records = applyFilters(req.query);
|
|
93
|
+
const plan = req.query.plan || 'max5x';
|
|
94
|
+
const customPrice = req.query.customPrice ? parseFloat(req.query.customPrice) : null;
|
|
95
|
+
const subscriptionCost = customPrice || PLAN_DEFAULTS[plan] || 100;
|
|
96
|
+
let apiCost = 0;
|
|
97
|
+
for (const r of records) apiCost += calculateRecordCost(r);
|
|
98
|
+
apiCost = Math.round(apiCost * 100) / 100;
|
|
99
|
+
const dayMap = new Map();
|
|
100
|
+
for (const r of records) {
|
|
101
|
+
const d = new Date(r.timestamp);
|
|
102
|
+
const day = `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')}`;
|
|
103
|
+
dayMap.set(day, (dayMap.get(day) || 0) + calculateRecordCost(r));
|
|
104
|
+
}
|
|
105
|
+
const costPerDay = Array.from(dayMap.entries()).sort(([a], [b]) => a.localeCompare(b)).map(([date, cost]) => {
|
|
106
|
+
const d = new Date(date);
|
|
107
|
+
const daysInMonth = new Date(d.getUTCFullYear(), d.getUTCMonth() + 1, 0).getUTCDate();
|
|
108
|
+
return { date, api_cost: Math.round(cost * 100) / 100, subscription_daily: Math.round((subscriptionCost / daysInMonth) * 100) / 100 };
|
|
109
|
+
});
|
|
110
|
+
const savings = apiCost - subscriptionCost;
|
|
111
|
+
res.json({ plan, subscription_cost_usd: subscriptionCost, api_equivalent_cost_usd: apiCost, savings_usd: Math.round(savings * 100) / 100, savings_percent: apiCost > 0 ? Math.round((savings / apiCost) * 1000) / 10 : 0, cost_per_day: costPerDay });
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
router.get('/cache', (req, res) => { res.json(aggregateCache(applyFilters(req.query))); });
|
|
115
|
+
|
|
116
|
+
const quotaFetcher = options.quotaFetcher || createQuotaFetcher();
|
|
117
|
+
router.get('/quota', async (req, res) => {
|
|
118
|
+
try {
|
|
119
|
+
const data = await quotaFetcher.fetchQuota();
|
|
120
|
+
res.json(data);
|
|
121
|
+
} catch (err) {
|
|
122
|
+
res.json({ available: false, error: err.message });
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
router.get('/subscription', (req, res) => {
|
|
127
|
+
const info = options.getSubscriptionInfo ? options.getSubscriptionInfo() : getSubscriptionInfo();
|
|
128
|
+
res.json(info || { plan: null, subscriptionType: null, rateLimitTier: null });
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
router.get('/status', (req, res) => {
|
|
132
|
+
refreshRecords();
|
|
133
|
+
res.json({
|
|
134
|
+
record_count: cachedRecords.length,
|
|
135
|
+
last_refreshed: lastRefreshed ? new Date(lastRefreshed).toISOString() : null,
|
|
136
|
+
cache_ttl_ms: CACHE_TTL_MS,
|
|
137
|
+
});
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
return router;
|
|
141
|
+
}
|
package/server/sync.js
CHANGED
|
@@ -1,69 +1,69 @@
|
|
|
1
|
-
import fs from 'fs/promises';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
|
|
4
|
-
const ILLEGAL_CHARS = /[/\\:*?"<>|]/g;
|
|
5
|
-
|
|
6
|
-
export function sanitizeMachineName(name) {
|
|
7
|
-
let clean = name.replace(ILLEGAL_CHARS, '-').trim().replace(/^\.+|\.+$/g, '').trim();
|
|
8
|
-
return clean || 'unknown-host';
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
export async function syncLocalToShared(localDir, syncDir, machineName) {
|
|
12
|
-
const safeName = sanitizeMachineName(machineName);
|
|
13
|
-
const machineDir = path.join(syncDir, safeName);
|
|
14
|
-
let syncedFiles = 0;
|
|
15
|
-
const startTime = Date.now();
|
|
16
|
-
|
|
17
|
-
let projectDirs;
|
|
18
|
-
try {
|
|
19
|
-
const entries = await fs.readdir(localDir, { withFileTypes: true });
|
|
20
|
-
projectDirs = entries.filter(d => d.isDirectory());
|
|
21
|
-
} catch {
|
|
22
|
-
return { syncedFiles, machineName: safeName };
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
for (const dir of projectDirs) {
|
|
26
|
-
const localProjPath = path.join(localDir, dir.name);
|
|
27
|
-
let files;
|
|
28
|
-
try {
|
|
29
|
-
files = (await fs.readdir(localProjPath)).filter(f => f.endsWith('.jsonl'));
|
|
30
|
-
} catch {
|
|
31
|
-
continue;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
for (const file of files) {
|
|
35
|
-
const localFile = path.join(localProjPath, file);
|
|
36
|
-
const sharedFile = path.join(machineDir, dir.name, file);
|
|
37
|
-
|
|
38
|
-
try {
|
|
39
|
-
const localStat = await fs.stat(localFile);
|
|
40
|
-
let needsSync = false;
|
|
41
|
-
|
|
42
|
-
try {
|
|
43
|
-
const sharedStat = await fs.stat(sharedFile);
|
|
44
|
-
needsSync = localStat.size > sharedStat.size;
|
|
45
|
-
} catch {
|
|
46
|
-
needsSync = true;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
if (needsSync) {
|
|
50
|
-
await fs.mkdir(path.join(machineDir, dir.name), { recursive: true });
|
|
51
|
-
await fs.copyFile(localFile, sharedFile);
|
|
52
|
-
syncedFiles++;
|
|
53
|
-
}
|
|
54
|
-
} catch (err) {
|
|
55
|
-
console.warn(`Sync warning: failed to sync ${file}: ${err.message}`);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
const elapsed = Date.now() - startTime;
|
|
61
|
-
if (syncedFiles > 0) {
|
|
62
|
-
console.log(`Synced ${syncedFiles} files to ${machineDir}`);
|
|
63
|
-
}
|
|
64
|
-
if (elapsed > 30000) {
|
|
65
|
-
console.warn(`Sync took ${Math.round(elapsed / 1000)}s — shared folder may be on a slow mount`);
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
return { syncedFiles, machineName: safeName };
|
|
69
|
-
}
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
|
|
4
|
+
const ILLEGAL_CHARS = /[/\\:*?"<>|]/g;
|
|
5
|
+
|
|
6
|
+
export function sanitizeMachineName(name) {
|
|
7
|
+
let clean = name.replace(ILLEGAL_CHARS, '-').trim().replace(/^\.+|\.+$/g, '').trim();
|
|
8
|
+
return clean || 'unknown-host';
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export async function syncLocalToShared(localDir, syncDir, machineName) {
|
|
12
|
+
const safeName = sanitizeMachineName(machineName);
|
|
13
|
+
const machineDir = path.join(syncDir, safeName);
|
|
14
|
+
let syncedFiles = 0;
|
|
15
|
+
const startTime = Date.now();
|
|
16
|
+
|
|
17
|
+
let projectDirs;
|
|
18
|
+
try {
|
|
19
|
+
const entries = await fs.readdir(localDir, { withFileTypes: true });
|
|
20
|
+
projectDirs = entries.filter(d => d.isDirectory());
|
|
21
|
+
} catch {
|
|
22
|
+
return { syncedFiles, machineName: safeName };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
for (const dir of projectDirs) {
|
|
26
|
+
const localProjPath = path.join(localDir, dir.name);
|
|
27
|
+
let files;
|
|
28
|
+
try {
|
|
29
|
+
files = (await fs.readdir(localProjPath)).filter(f => f.endsWith('.jsonl'));
|
|
30
|
+
} catch {
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
for (const file of files) {
|
|
35
|
+
const localFile = path.join(localProjPath, file);
|
|
36
|
+
const sharedFile = path.join(machineDir, dir.name, file);
|
|
37
|
+
|
|
38
|
+
try {
|
|
39
|
+
const localStat = await fs.stat(localFile);
|
|
40
|
+
let needsSync = false;
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
const sharedStat = await fs.stat(sharedFile);
|
|
44
|
+
needsSync = localStat.size > sharedStat.size;
|
|
45
|
+
} catch {
|
|
46
|
+
needsSync = true;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (needsSync) {
|
|
50
|
+
await fs.mkdir(path.join(machineDir, dir.name), { recursive: true });
|
|
51
|
+
await fs.copyFile(localFile, sharedFile);
|
|
52
|
+
syncedFiles++;
|
|
53
|
+
}
|
|
54
|
+
} catch (err) {
|
|
55
|
+
console.warn(`Sync warning: failed to sync ${file}: ${err.message}`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const elapsed = Date.now() - startTime;
|
|
61
|
+
if (syncedFiles > 0) {
|
|
62
|
+
console.log(`Synced ${syncedFiles} files to ${machineDir}`);
|
|
63
|
+
}
|
|
64
|
+
if (elapsed > 30000) {
|
|
65
|
+
console.warn(`Sync took ${Math.round(elapsed / 1000)}s — shared folder may be on a slow mount`);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return { syncedFiles, machineName: safeName };
|
|
69
|
+
}
|