agentfit 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +4 -0
- package/README.md +0 -2
- package/app/(dashboard)/ai-insights/page.tsx +271 -0
- package/app/(dashboard)/models/page.tsx +21 -0
- package/app/(dashboard)/page.tsx +2 -0
- package/app/(dashboard)/sessions/[id]/page.tsx +16 -2
- package/app/(dashboard)/settings/page.tsx +168 -0
- package/app/api/analyze/aggregate/route.ts +88 -0
- package/app/api/analyze/estimate/route.ts +62 -0
- package/app/api/analyze/route.ts +142 -0
- package/app/api/cc-versions/route.ts +84 -0
- package/app/api/config/route.ts +35 -0
- package/bin/agentfit.mjs +18 -8
- package/components/analyze-confirm-dialog.tsx +81 -0
- package/components/app-sidebar.tsx +14 -0
- package/components/data-provider.tsx +4 -2
- package/components/model-usage-chart.tsx +216 -0
- package/components/overview-cards.tsx +1 -1
- package/components/session-ai-analysis.tsx +318 -0
- package/components/sessions-table.tsx +169 -15
- package/components/version-lag-chart.tsx +284 -0
- package/electron/main.mjs +61 -34
- package/generated/prisma/browser.ts +5 -0
- package/generated/prisma/client.ts +5 -0
- package/generated/prisma/internal/class.ts +14 -4
- package/generated/prisma/internal/prismaNamespace.ts +95 -2
- package/generated/prisma/internal/prismaNamespaceBrowser.ts +19 -1
- package/generated/prisma/models/Session.ts +57 -1
- package/generated/prisma/models/SessionAnalysis.ts +1321 -0
- package/generated/prisma/models.ts +1 -0
- package/lib/config.ts +45 -0
- package/lib/db.ts +1 -1
- package/lib/openai.ts +253 -0
- package/lib/parse-codex.ts +2 -0
- package/lib/parse-logs.ts +21 -7
- package/lib/queries.ts +5 -1
- package/lib/sync.ts +17 -5
- package/package.json +2 -1
- package/prisma/migrations/20260404151230_add_session_analysis/migration.sql +18 -0
- package/prisma/migrations/20260405230736_add_cli_version/migration.sql +41 -0
- package/prisma/migrations/20260406205546_add_model_counts/migration.sql +42 -0
- package/prisma/schema.prisma +16 -0
- package/prisma/schema.sql +20 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { NextRequest, NextResponse } from 'next/server'
|
|
2
|
+
import path from 'path'
|
|
3
|
+
import os from 'os'
|
|
4
|
+
import fs from 'fs'
|
|
5
|
+
import { parseSessionDetail } from '@/lib/session-detail'
|
|
6
|
+
import { extractUserMessages, estimateCost } from '@/lib/openai'
|
|
7
|
+
|
|
8
|
+
export const dynamic = 'force-dynamic'
|
|
9
|
+
|
|
10
|
+
function findSessionFile(sessionId: string): string | null {
|
|
11
|
+
const projectsDir = path.join(os.homedir(), '.claude', 'projects')
|
|
12
|
+
if (!fs.existsSync(projectsDir)) return null
|
|
13
|
+
for (const dir of fs.readdirSync(projectsDir)) {
|
|
14
|
+
const candidate = path.join(projectsDir, dir, `${sessionId}.jsonl`)
|
|
15
|
+
if (fs.existsSync(candidate)) return candidate
|
|
16
|
+
}
|
|
17
|
+
return null
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function POST(request: NextRequest) {
|
|
21
|
+
try {
|
|
22
|
+
const body = await request.json()
|
|
23
|
+
const sessionIds: string[] = Array.isArray(body.sessionIds)
|
|
24
|
+
? body.sessionIds
|
|
25
|
+
: body.sessionId
|
|
26
|
+
? [body.sessionId]
|
|
27
|
+
: []
|
|
28
|
+
|
|
29
|
+
if (sessionIds.length === 0) {
|
|
30
|
+
return NextResponse.json({ error: 'Missing sessionId or sessionIds' }, { status: 400 })
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
let totalMessages = 0
|
|
34
|
+
let totalInputTokens = 0
|
|
35
|
+
let totalOutputTokens = 0
|
|
36
|
+
let totalCostUSD = 0
|
|
37
|
+
|
|
38
|
+
for (const sessionId of sessionIds) {
|
|
39
|
+
const filePath = findSessionFile(sessionId)
|
|
40
|
+
if (!filePath) continue
|
|
41
|
+
|
|
42
|
+
const detail = parseSessionDetail(filePath, sessionId)
|
|
43
|
+
const messages = extractUserMessages(detail.chatLog)
|
|
44
|
+
const estimate = estimateCost(messages)
|
|
45
|
+
|
|
46
|
+
totalMessages += estimate.messageCount
|
|
47
|
+
totalInputTokens += estimate.estimatedInputTokens
|
|
48
|
+
totalOutputTokens += estimate.estimatedOutputTokens
|
|
49
|
+
totalCostUSD += estimate.estimatedCostUSD
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return NextResponse.json({
|
|
53
|
+
sessionCount: sessionIds.length,
|
|
54
|
+
messageCount: totalMessages,
|
|
55
|
+
estimatedInputTokens: totalInputTokens,
|
|
56
|
+
estimatedOutputTokens: totalOutputTokens,
|
|
57
|
+
estimatedCostUSD: totalCostUSD,
|
|
58
|
+
})
|
|
59
|
+
} catch (error) {
|
|
60
|
+
return NextResponse.json({ error: (error as Error).message }, { status: 500 })
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { NextRequest, NextResponse } from 'next/server'
|
|
2
|
+
import path from 'path'
|
|
3
|
+
import os from 'os'
|
|
4
|
+
import fs from 'fs'
|
|
5
|
+
import { prisma } from '@/lib/db'
|
|
6
|
+
import { getOpenAIKey } from '@/lib/config'
|
|
7
|
+
import { parseSessionDetail } from '@/lib/session-detail'
|
|
8
|
+
import { extractUserMessages, classifyMessages } from '@/lib/openai'
|
|
9
|
+
|
|
10
|
+
export const dynamic = 'force-dynamic'
|
|
11
|
+
|
|
12
|
+
function findSessionFile(sessionId: string): string | null {
|
|
13
|
+
const projectsDir = path.join(os.homedir(), '.claude', 'projects')
|
|
14
|
+
if (!fs.existsSync(projectsDir)) return null
|
|
15
|
+
for (const dir of fs.readdirSync(projectsDir)) {
|
|
16
|
+
const candidate = path.join(projectsDir, dir, `${sessionId}.jsonl`)
|
|
17
|
+
if (fs.existsSync(candidate)) return candidate
|
|
18
|
+
}
|
|
19
|
+
return null
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// GET — retrieve analysis results
|
|
23
|
+
export async function GET(request: NextRequest) {
|
|
24
|
+
const sessionId = request.nextUrl.searchParams.get('sessionId')
|
|
25
|
+
const status = request.nextUrl.searchParams.get('status')
|
|
26
|
+
|
|
27
|
+
try {
|
|
28
|
+
if (status === 'true') {
|
|
29
|
+
// Return all analyzed session IDs
|
|
30
|
+
const analyses = await prisma.sessionAnalysis.findMany({
|
|
31
|
+
select: { sessionId: true, analyzedAt: true, totalMessages: true, costUSD: true },
|
|
32
|
+
})
|
|
33
|
+
return NextResponse.json({ analyses })
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (sessionId) {
|
|
37
|
+
const analysis = await prisma.sessionAnalysis.findUnique({
|
|
38
|
+
where: { sessionId },
|
|
39
|
+
})
|
|
40
|
+
if (!analysis) {
|
|
41
|
+
return NextResponse.json({ analysis: null })
|
|
42
|
+
}
|
|
43
|
+
return NextResponse.json({
|
|
44
|
+
analysis: {
|
|
45
|
+
...analysis,
|
|
46
|
+
classifications: JSON.parse(analysis.classifications),
|
|
47
|
+
},
|
|
48
|
+
})
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return NextResponse.json({ error: 'Missing sessionId or status param' }, { status: 400 })
|
|
52
|
+
} catch (error) {
|
|
53
|
+
return NextResponse.json({ error: (error as Error).message }, { status: 500 })
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// POST — trigger analysis for a session
|
|
58
|
+
export async function POST(request: NextRequest) {
|
|
59
|
+
const apiKey = getOpenAIKey()
|
|
60
|
+
if (!apiKey) {
|
|
61
|
+
return NextResponse.json({ error: 'No OpenAI API key configured. Go to Settings to add one.' }, { status: 401 })
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
const body = await request.json()
|
|
66
|
+
const sessionId = body.sessionId as string
|
|
67
|
+
if (!sessionId) {
|
|
68
|
+
return NextResponse.json({ error: 'Missing sessionId' }, { status: 400 })
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Check if already analyzed
|
|
72
|
+
const existing = await prisma.sessionAnalysis.findUnique({
|
|
73
|
+
where: { sessionId },
|
|
74
|
+
})
|
|
75
|
+
if (existing && !body.force) {
|
|
76
|
+
return NextResponse.json({
|
|
77
|
+
analysis: {
|
|
78
|
+
...existing,
|
|
79
|
+
classifications: JSON.parse(existing.classifications),
|
|
80
|
+
},
|
|
81
|
+
cached: true,
|
|
82
|
+
})
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Find and parse the session
|
|
86
|
+
const filePath = findSessionFile(sessionId)
|
|
87
|
+
if (!filePath) {
|
|
88
|
+
return NextResponse.json({ error: 'Session JSONL not found' }, { status: 404 })
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const detail = parseSessionDetail(filePath, sessionId)
|
|
92
|
+
const messages = extractUserMessages(detail.chatLog)
|
|
93
|
+
|
|
94
|
+
if (messages.length === 0) {
|
|
95
|
+
return NextResponse.json({ error: 'No user messages to analyze' }, { status: 400 })
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Classify
|
|
99
|
+
const result = await classifyMessages(apiKey, messages)
|
|
100
|
+
|
|
101
|
+
// Store results (upsert in case of re-analysis)
|
|
102
|
+
const analysis = await prisma.sessionAnalysis.upsert({
|
|
103
|
+
where: { sessionId },
|
|
104
|
+
create: {
|
|
105
|
+
sessionId,
|
|
106
|
+
model: result.model,
|
|
107
|
+
classifications: JSON.stringify(result.classifications),
|
|
108
|
+
totalMessages: result.totalMessages,
|
|
109
|
+
inputTokens: result.inputTokens,
|
|
110
|
+
outputTokens: result.outputTokens,
|
|
111
|
+
costUSD: result.costUSD,
|
|
112
|
+
},
|
|
113
|
+
update: {
|
|
114
|
+
model: result.model,
|
|
115
|
+
classifications: JSON.stringify(result.classifications),
|
|
116
|
+
totalMessages: result.totalMessages,
|
|
117
|
+
inputTokens: result.inputTokens,
|
|
118
|
+
outputTokens: result.outputTokens,
|
|
119
|
+
costUSD: result.costUSD,
|
|
120
|
+
analyzedAt: new Date(),
|
|
121
|
+
},
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
return NextResponse.json({
|
|
125
|
+
analysis: {
|
|
126
|
+
...analysis,
|
|
127
|
+
classifications: result.classifications,
|
|
128
|
+
},
|
|
129
|
+
cached: false,
|
|
130
|
+
})
|
|
131
|
+
} catch (error) {
|
|
132
|
+
const message = (error as Error).message
|
|
133
|
+
// Surface OpenAI-specific errors clearly
|
|
134
|
+
if (message.includes('401') || message.includes('Incorrect API key')) {
|
|
135
|
+
return NextResponse.json({ error: 'Invalid OpenAI API key' }, { status: 401 })
|
|
136
|
+
}
|
|
137
|
+
if (message.includes('429')) {
|
|
138
|
+
return NextResponse.json({ error: 'OpenAI rate limit exceeded. Try again later.' }, { status: 429 })
|
|
139
|
+
}
|
|
140
|
+
return NextResponse.json({ error: message }, { status: 500 })
|
|
141
|
+
}
|
|
142
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { NextResponse } from 'next/server'
|
|
2
|
+
import fs from 'fs'
|
|
3
|
+
import path from 'path'
|
|
4
|
+
|
|
5
|
+
export const dynamic = 'force-dynamic'
|
|
6
|
+
|
|
7
|
+
const NPM_URL = 'https://registry.npmjs.org/@anthropic-ai/claude-code'
|
|
8
|
+
const CACHE_FILE = path.resolve(process.cwd(), 'data', 'cc-versions.json')
|
|
9
|
+
const CACHE_TTL = 86_400_000 // 24 hours
|
|
10
|
+
|
|
11
|
+
interface VersionCache {
|
|
12
|
+
versions: Record<string, string>
|
|
13
|
+
fetchedAt: number
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
let memCache: VersionCache | null = null
|
|
17
|
+
|
|
18
|
+
function readDiskCache(): VersionCache | null {
|
|
19
|
+
try {
|
|
20
|
+
if (fs.existsSync(CACHE_FILE)) {
|
|
21
|
+
return JSON.parse(fs.readFileSync(CACHE_FILE, 'utf-8'))
|
|
22
|
+
}
|
|
23
|
+
} catch {}
|
|
24
|
+
return null
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function writeDiskCache(cache: VersionCache) {
|
|
28
|
+
try {
|
|
29
|
+
const dir = path.dirname(CACHE_FILE)
|
|
30
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true })
|
|
31
|
+
fs.writeFileSync(CACHE_FILE, JSON.stringify(cache))
|
|
32
|
+
} catch {}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function isFresh(cache: VersionCache | null): boolean {
|
|
36
|
+
return !!cache && Date.now() - cache.fetchedAt < CACHE_TTL
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export async function GET() {
|
|
40
|
+
try {
|
|
41
|
+
// 1. Check in-memory cache
|
|
42
|
+
if (isFresh(memCache)) {
|
|
43
|
+
return NextResponse.json(memCache!.versions)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// 2. Check disk cache (survives server restarts)
|
|
47
|
+
const disk = readDiskCache()
|
|
48
|
+
if (isFresh(disk)) {
|
|
49
|
+
memCache = disk
|
|
50
|
+
return NextResponse.json(disk!.versions)
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// 3. Fetch from npm registry
|
|
54
|
+
const res = await fetch(NPM_URL, {
|
|
55
|
+
headers: { Accept: 'application/json' },
|
|
56
|
+
})
|
|
57
|
+
if (!res.ok) {
|
|
58
|
+
// Fall back to stale cache if available
|
|
59
|
+
if (memCache) return NextResponse.json(memCache.versions)
|
|
60
|
+
if (disk) return NextResponse.json(disk.versions)
|
|
61
|
+
throw new Error(`npm registry returned ${res.status}`)
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const pkg = await res.json()
|
|
65
|
+
const time: Record<string, string> = pkg.time || {}
|
|
66
|
+
|
|
67
|
+
// Filter to only version entries (exclude "created", "modified")
|
|
68
|
+
const versions: Record<string, string> = {}
|
|
69
|
+
for (const [key, value] of Object.entries(time)) {
|
|
70
|
+
if (/^\d+\.\d+\.\d+$/.test(key)) {
|
|
71
|
+
versions[key] = value as string
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const cache: VersionCache = { versions, fetchedAt: Date.now() }
|
|
76
|
+
memCache = cache
|
|
77
|
+
writeDiskCache(cache)
|
|
78
|
+
|
|
79
|
+
return NextResponse.json(versions)
|
|
80
|
+
} catch (error) {
|
|
81
|
+
console.error('Failed to fetch CC versions:', error)
|
|
82
|
+
return NextResponse.json({}, { status: 500 })
|
|
83
|
+
}
|
|
84
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { NextRequest, NextResponse } from 'next/server'
|
|
2
|
+
import { getOpenAIKey, setOpenAIKey, clearOpenAIKey } from '@/lib/config'
|
|
3
|
+
|
|
4
|
+
export const dynamic = 'force-dynamic'
|
|
5
|
+
|
|
6
|
+
// GET — check if API key is configured (returns masked key, not the actual key)
|
|
7
|
+
export async function GET() {
|
|
8
|
+
const key = getOpenAIKey()
|
|
9
|
+
return NextResponse.json({
|
|
10
|
+
hasOpenAIKey: !!key,
|
|
11
|
+
maskedKey: key ? `${key.slice(0, 7)}...${key.slice(-4)}` : null,
|
|
12
|
+
})
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// POST — save or clear the API key
|
|
16
|
+
export async function POST(request: NextRequest) {
|
|
17
|
+
try {
|
|
18
|
+
const body = await request.json()
|
|
19
|
+
|
|
20
|
+
if (body.action === 'clear') {
|
|
21
|
+
clearOpenAIKey()
|
|
22
|
+
return NextResponse.json({ success: true })
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const apiKey = body.apiKey as string
|
|
26
|
+
if (!apiKey?.trim()) {
|
|
27
|
+
return NextResponse.json({ error: 'Missing apiKey' }, { status: 400 })
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
setOpenAIKey(apiKey.trim())
|
|
31
|
+
return NextResponse.json({ success: true })
|
|
32
|
+
} catch (error) {
|
|
33
|
+
return NextResponse.json({ error: (error as Error).message }, { status: 500 })
|
|
34
|
+
}
|
|
35
|
+
}
|
package/bin/agentfit.mjs
CHANGED
|
@@ -1,13 +1,26 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
import { execSync, spawn } from 'child_process'
|
|
4
|
-
import {
|
|
4
|
+
import { createServer } from 'net'
|
|
5
|
+
import { existsSync } from 'fs'
|
|
5
6
|
import path from 'path'
|
|
6
7
|
import { fileURLToPath } from 'url'
|
|
7
8
|
|
|
8
9
|
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
|
9
10
|
const ROOT = path.resolve(__dirname, '..')
|
|
10
|
-
|
|
11
|
+
|
|
12
|
+
function findAvailablePort(startPort) {
|
|
13
|
+
return new Promise((resolve) => {
|
|
14
|
+
const server = createServer()
|
|
15
|
+
server.listen(startPort, () => {
|
|
16
|
+
server.close(() => resolve(startPort))
|
|
17
|
+
})
|
|
18
|
+
server.on('error', () => resolve(findAvailablePort(startPort + 1)))
|
|
19
|
+
})
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const preferredPort = parseInt(process.env.AGENTFIT_PORT || process.env.PORT || '3000', 10)
|
|
23
|
+
const PORT = await findAvailablePort(preferredPort)
|
|
11
24
|
|
|
12
25
|
function info(msg) {
|
|
13
26
|
console.log(`\x1b[1;34m==>\x1b[0m ${msg}`)
|
|
@@ -23,12 +36,6 @@ function run(cmd, opts = {}) {
|
|
|
23
36
|
execSync(cmd, { cwd: ROOT, stdio: 'inherit', ...opts })
|
|
24
37
|
}
|
|
25
38
|
|
|
26
|
-
// ─── Ensure .env exists ─────────────────────────────────────────────
|
|
27
|
-
const envPath = path.join(ROOT, '.env')
|
|
28
|
-
if (!existsSync(envPath)) {
|
|
29
|
-
writeFileSync(envPath, 'DATABASE_URL="file:./agentfit.db"\n')
|
|
30
|
-
}
|
|
31
|
-
|
|
32
39
|
// ─── First-run setup: prisma generate + migrate ─────────────────────
|
|
33
40
|
const generatedClient = path.join(ROOT, 'generated', 'prisma')
|
|
34
41
|
if (!existsSync(generatedClient)) {
|
|
@@ -49,6 +56,9 @@ if (!existsSync(nextDir)) {
|
|
|
49
56
|
}
|
|
50
57
|
|
|
51
58
|
// ─── Start server ───────────────────────────────────────────────────
|
|
59
|
+
if (PORT !== preferredPort) {
|
|
60
|
+
info(`Port ${preferredPort} is in use, using ${PORT} instead`)
|
|
61
|
+
}
|
|
52
62
|
ok(`Starting AgentFit on http://localhost:${PORT}`)
|
|
53
63
|
console.log(' Press Ctrl+C to stop.\n')
|
|
54
64
|
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
'use client'
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
Dialog,
|
|
5
|
+
DialogContent,
|
|
6
|
+
DialogFooter,
|
|
7
|
+
DialogHeader,
|
|
8
|
+
DialogTitle,
|
|
9
|
+
} from '@/components/ui/dialog'
|
|
10
|
+
import { Button } from '@/components/ui/button'
|
|
11
|
+
import { Loader2 } from 'lucide-react'
|
|
12
|
+
import { formatCost } from '@/lib/format'
|
|
13
|
+
|
|
14
|
+
interface AnalyzeConfirmDialogProps {
|
|
15
|
+
open: boolean
|
|
16
|
+
onOpenChange: (open: boolean) => void
|
|
17
|
+
onConfirm: () => void
|
|
18
|
+
loading: boolean
|
|
19
|
+
estimate: {
|
|
20
|
+
sessionCount: number
|
|
21
|
+
messageCount: number
|
|
22
|
+
estimatedCostUSD: number
|
|
23
|
+
} | null
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function AnalyzeConfirmDialog({
|
|
27
|
+
open,
|
|
28
|
+
onOpenChange,
|
|
29
|
+
onConfirm,
|
|
30
|
+
loading,
|
|
31
|
+
estimate,
|
|
32
|
+
}: AnalyzeConfirmDialogProps) {
|
|
33
|
+
return (
|
|
34
|
+
<Dialog open={open} onOpenChange={onOpenChange}>
|
|
35
|
+
<DialogContent>
|
|
36
|
+
<DialogHeader>
|
|
37
|
+
<DialogTitle>Run AI Analysis</DialogTitle>
|
|
38
|
+
</DialogHeader>
|
|
39
|
+
{estimate ? (
|
|
40
|
+
<div className="space-y-3">
|
|
41
|
+
<div className="rounded-lg border p-4 space-y-2">
|
|
42
|
+
<div className="flex justify-between text-sm">
|
|
43
|
+
<span className="text-muted-foreground">Sessions</span>
|
|
44
|
+
<span className="font-medium">{estimate.sessionCount}</span>
|
|
45
|
+
</div>
|
|
46
|
+
<div className="flex justify-between text-sm">
|
|
47
|
+
<span className="text-muted-foreground">User messages</span>
|
|
48
|
+
<span className="font-medium">{estimate.messageCount}</span>
|
|
49
|
+
</div>
|
|
50
|
+
<div className="flex justify-between text-sm">
|
|
51
|
+
<span className="text-muted-foreground">Model</span>
|
|
52
|
+
<span className="font-medium">gpt-4.1-mini</span>
|
|
53
|
+
</div>
|
|
54
|
+
<div className="flex justify-between text-sm border-t pt-2">
|
|
55
|
+
<span className="text-muted-foreground">Estimated cost</span>
|
|
56
|
+
<span className="font-bold">{formatCost(estimate.estimatedCostUSD)}</span>
|
|
57
|
+
</div>
|
|
58
|
+
</div>
|
|
59
|
+
<p className="text-xs text-muted-foreground">
|
|
60
|
+
Each user message will be classified by type, role, skill level, and sentiment.
|
|
61
|
+
Results are cached — you won't be charged again for the same session.
|
|
62
|
+
</p>
|
|
63
|
+
</div>
|
|
64
|
+
) : (
|
|
65
|
+
<div className="flex items-center justify-center py-8">
|
|
66
|
+
<Loader2 className="h-5 w-5 animate-spin text-muted-foreground" />
|
|
67
|
+
</div>
|
|
68
|
+
)}
|
|
69
|
+
<DialogFooter>
|
|
70
|
+
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
|
71
|
+
Cancel
|
|
72
|
+
</Button>
|
|
73
|
+
<Button onClick={onConfirm} disabled={!estimate || loading}>
|
|
74
|
+
{loading && <Loader2 className="mr-1 h-3 w-3 animate-spin" />}
|
|
75
|
+
Analyze
|
|
76
|
+
</Button>
|
|
77
|
+
</DialogFooter>
|
|
78
|
+
</DialogContent>
|
|
79
|
+
</Dialog>
|
|
80
|
+
)
|
|
81
|
+
}
|
|
@@ -9,14 +9,17 @@ import {
|
|
|
9
9
|
Camera,
|
|
10
10
|
ChevronRight,
|
|
11
11
|
Coins,
|
|
12
|
+
Cpu,
|
|
12
13
|
FileText,
|
|
13
14
|
FolderOpen,
|
|
14
15
|
GitBranch,
|
|
15
16
|
HeartPulse,
|
|
17
|
+
Key,
|
|
16
18
|
LayoutDashboard,
|
|
17
19
|
ListTree,
|
|
18
20
|
Puzzle,
|
|
19
21
|
Settings,
|
|
22
|
+
Sparkles,
|
|
20
23
|
Terminal,
|
|
21
24
|
Wrench,
|
|
22
25
|
} from 'lucide-react'
|
|
@@ -74,6 +77,7 @@ const navGroups: NavGroup[] = [
|
|
|
74
77
|
{ title: 'Token Breakdown', icon: Coins, href: '/tokens' },
|
|
75
78
|
{ title: 'Tool Usage', icon: Wrench, href: '/tools' },
|
|
76
79
|
{ title: 'Command Usage', icon: Terminal, href: '/commands' },
|
|
80
|
+
{ title: 'Model Usage', icon: Cpu, href: '/models' },
|
|
77
81
|
],
|
|
78
82
|
},
|
|
79
83
|
{
|
|
@@ -82,6 +86,7 @@ const navGroups: NavGroup[] = [
|
|
|
82
86
|
items: [
|
|
83
87
|
{ title: 'Personality Fit', icon: Brain, href: '/personality' },
|
|
84
88
|
{ title: 'Session Flow', icon: GitBranch, href: '/flow' },
|
|
89
|
+
{ title: 'AI Insights', icon: Sparkles, href: '/ai-insights' },
|
|
85
90
|
{ title: 'Image Analysis', icon: Camera, href: '/images' },
|
|
86
91
|
{ title: 'Reports', icon: FileText, href: '/reports' },
|
|
87
92
|
],
|
|
@@ -170,6 +175,15 @@ export function AppSidebar() {
|
|
|
170
175
|
<span>Data Management</span>
|
|
171
176
|
</SidebarMenuButton>
|
|
172
177
|
</SidebarMenuItem>
|
|
178
|
+
<SidebarMenuItem>
|
|
179
|
+
<SidebarMenuButton
|
|
180
|
+
render={<Link href="/settings" />}
|
|
181
|
+
isActive={pathname === '/settings'}
|
|
182
|
+
>
|
|
183
|
+
<Key className="h-4 w-4" />
|
|
184
|
+
<span>Settings</span>
|
|
185
|
+
</SidebarMenuButton>
|
|
186
|
+
</SidebarMenuItem>
|
|
173
187
|
</SidebarMenu>
|
|
174
188
|
</SidebarGroupContent>
|
|
175
189
|
</SidebarGroup>
|
|
@@ -131,8 +131,10 @@ function filterData(raw: UsageData | null, range: TimeRange, project: string): U
|
|
|
131
131
|
toolUsage[tool] = (toolUsage[tool] || 0) + count
|
|
132
132
|
}
|
|
133
133
|
|
|
134
|
-
// Models
|
|
135
|
-
|
|
134
|
+
// Models — aggregate at message level
|
|
135
|
+
for (const [m, count] of Object.entries(s.modelCounts || {})) {
|
|
136
|
+
models[m] = (models[m] || 0) + count
|
|
137
|
+
}
|
|
136
138
|
}
|
|
137
139
|
|
|
138
140
|
const projects = Array.from(projectMap.values()).sort((a, b) => b.totalCost - a.totalCost)
|