create-claudeportal 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +37 -0
- package/dist/assets/index-BBU5K5iA.js +132 -0
- package/dist/assets/index-fNmv07eE.css +1 -0
- package/dist/index.html +13 -0
- package/index.html +12 -0
- package/mockups/01-chat-conversation-v2.html +803 -0
- package/mockups/01-chat-conversation.html +592 -0
- package/mockups/02-activity-feed.html +648 -0
- package/mockups/03-focused-workspace.html +680 -0
- package/mockups/04-documents-mode.html +1556 -0
- package/package.json +54 -0
- package/server/index.js +140 -0
- package/server/lib/detect-tools.js +93 -0
- package/server/lib/file-scanner.js +46 -0
- package/server/lib/file-watcher.js +45 -0
- package/server/lib/fix-npm-prefix.js +61 -0
- package/server/lib/folder-scanner.js +43 -0
- package/server/lib/install-tools.js +122 -0
- package/server/lib/platform.js +18 -0
- package/server/lib/sse-manager.js +36 -0
- package/server/lib/terminal.js +95 -0
- package/server/lib/validate-folder-path.js +17 -0
- package/server/lib/validate-path.js +13 -0
- package/server/routes/detect.js +64 -0
- package/server/routes/doc-events.js +94 -0
- package/server/routes/events.js +37 -0
- package/server/routes/folder.js +195 -0
- package/server/routes/github.js +21 -0
- package/server/routes/health.js +16 -0
- package/server/routes/install.js +102 -0
- package/server/routes/project.js +18 -0
- package/server/routes/scaffold.js +45 -0
- package/skills-lock.json +15 -0
- package/tsconfig.app.json +17 -0
- package/tsconfig.node.json +11 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/ui/app.js +747 -0
- package/ui/index.html +272 -0
- package/ui/styles.css +788 -0
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
const path = require('path')
|
|
2
|
+
const { getExpandedPath } = require('./detect-tools')
|
|
3
|
+
|
|
4
|
+
let pty = null
|
|
5
|
+
|
|
6
|
+
// Try to load node-pty — it requires native compilation and may not be available
|
|
7
|
+
try {
|
|
8
|
+
pty = require('node-pty')
|
|
9
|
+
} catch {
|
|
10
|
+
console.log(' ⚠ node-pty not available — embedded terminal disabled')
|
|
11
|
+
console.log(' Users can still open their native terminal')
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function isPtyAvailable() {
|
|
15
|
+
return pty !== null
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function createTerminal(cwd) {
|
|
19
|
+
if (!pty) {
|
|
20
|
+
throw new Error('node-pty is not available')
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const shell = process.platform === 'win32'
|
|
24
|
+
? 'powershell.exe'
|
|
25
|
+
: (process.env.SHELL || (process.platform === 'darwin' ? 'zsh' : 'bash'))
|
|
26
|
+
|
|
27
|
+
// Build a clean environment — remove CLAUDECODE so Claude Code
|
|
28
|
+
// doesn't think it's nested inside another session
|
|
29
|
+
const env = { ...process.env, TERM: 'xterm-256color', PATH: getExpandedPath() }
|
|
30
|
+
delete env.CLAUDECODE
|
|
31
|
+
|
|
32
|
+
const ptyProcess = pty.spawn(shell, [], {
|
|
33
|
+
name: 'xterm-256color',
|
|
34
|
+
cols: 80,
|
|
35
|
+
rows: 24,
|
|
36
|
+
cwd: cwd || process.env.HOME,
|
|
37
|
+
env,
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
return ptyProcess
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function attachToWebSocket(ws, ptyProcess) {
|
|
44
|
+
let detached = false
|
|
45
|
+
|
|
46
|
+
// Terminal output → browser
|
|
47
|
+
const dataHandler = ptyProcess.onData((data) => {
|
|
48
|
+
try {
|
|
49
|
+
ws.send(JSON.stringify({ type: 'output', data }))
|
|
50
|
+
} catch {
|
|
51
|
+
// WebSocket might be closed
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
// Terminal exit → browser
|
|
56
|
+
const exitHandler = ptyProcess.onExit(({ exitCode }) => {
|
|
57
|
+
try {
|
|
58
|
+
ws.send(JSON.stringify({ type: 'exit', exitCode }))
|
|
59
|
+
} catch {
|
|
60
|
+
// WebSocket might be closed
|
|
61
|
+
}
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
// Browser input → terminal
|
|
65
|
+
ws.on('message', (msg) => {
|
|
66
|
+
if (detached) return
|
|
67
|
+
try {
|
|
68
|
+
const parsed = JSON.parse(msg.toString())
|
|
69
|
+
switch (parsed.type) {
|
|
70
|
+
case 'input':
|
|
71
|
+
ptyProcess.write(parsed.data)
|
|
72
|
+
break
|
|
73
|
+
case 'resize':
|
|
74
|
+
ptyProcess.resize(parsed.cols, parsed.rows)
|
|
75
|
+
break
|
|
76
|
+
}
|
|
77
|
+
} catch {
|
|
78
|
+
// Invalid message
|
|
79
|
+
}
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
ws.on('close', () => {
|
|
83
|
+
detached = true
|
|
84
|
+
dataHandler.dispose()
|
|
85
|
+
exitHandler.dispose()
|
|
86
|
+
// Only kill if this PTY hasn't already been replaced by a new connection
|
|
87
|
+
try { ptyProcess.kill() } catch {}
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
// Allow the server to detach this connection before ws closes
|
|
91
|
+
// (e.g. when a new connection replaces it)
|
|
92
|
+
return { detach() { detached = true; dataHandler.dispose(); exitHandler.dispose() } }
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
module.exports = { createTerminal, attachToWebSocket, isPtyAvailable }
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
const os = require('os')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const fs = require('fs')
|
|
4
|
+
|
|
5
|
+
function validateFolderPath(folderPath) {
|
|
6
|
+
if (!folderPath || typeof folderPath !== 'string') return null
|
|
7
|
+
const resolved = path.resolve(folderPath)
|
|
8
|
+
if (!resolved.startsWith(os.homedir())) return null
|
|
9
|
+
if (!fs.existsSync(resolved)) return null
|
|
10
|
+
try {
|
|
11
|
+
const stat = fs.statSync(resolved)
|
|
12
|
+
if (!stat.isDirectory()) return null
|
|
13
|
+
} catch { return null }
|
|
14
|
+
return resolved
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
module.exports = { validateFolderPath }
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
const os = require('os')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const fs = require('fs')
|
|
4
|
+
|
|
5
|
+
function validateProjectPath(projectPath) {
|
|
6
|
+
if (!projectPath || typeof projectPath !== 'string') return null
|
|
7
|
+
const resolved = path.resolve(projectPath)
|
|
8
|
+
if (!resolved.startsWith(os.homedir())) return null
|
|
9
|
+
if (!fs.existsSync(resolved)) return null
|
|
10
|
+
return resolved
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
module.exports = { validateProjectPath }
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const path = require('path')
|
|
3
|
+
const fs = require('fs')
|
|
4
|
+
const os = require('os')
|
|
5
|
+
const { detectAll } = require('../lib/detect-tools')
|
|
6
|
+
|
|
7
|
+
const router = express.Router()
|
|
8
|
+
|
|
9
|
+
router.get('/detect', (req, res) => {
|
|
10
|
+
const results = detectAll()
|
|
11
|
+
results.drives = detectCloudDrives()
|
|
12
|
+
res.setHeader('Cache-Control', 'no-store')
|
|
13
|
+
res.json(results)
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
function detectCloudDrives() {
|
|
17
|
+
const home = os.homedir()
|
|
18
|
+
const drives = []
|
|
19
|
+
|
|
20
|
+
// iCloud Drive
|
|
21
|
+
const icloud = path.join(home, 'Library', 'Mobile Documents', 'com~apple~CloudDocs')
|
|
22
|
+
if (fs.existsSync(icloud)) {
|
|
23
|
+
drives.push({ id: 'icloud', name: 'iCloud Drive', path: icloud })
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Dropbox
|
|
27
|
+
const dropbox = path.join(home, 'Dropbox')
|
|
28
|
+
if (fs.existsSync(dropbox)) {
|
|
29
|
+
drives.push({ id: 'dropbox', name: 'Dropbox', path: dropbox })
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// OneDrive — check CloudStorage variants
|
|
33
|
+
const cloudStorage = path.join(home, 'Library', 'CloudStorage')
|
|
34
|
+
if (fs.existsSync(cloudStorage)) {
|
|
35
|
+
try {
|
|
36
|
+
for (const entry of fs.readdirSync(cloudStorage)) {
|
|
37
|
+
const fullPath = path.join(cloudStorage, entry)
|
|
38
|
+
if (!fs.statSync(fullPath).isDirectory()) continue
|
|
39
|
+
|
|
40
|
+
if (entry.startsWith('GoogleDrive-')) {
|
|
41
|
+
drives.push({ id: 'gdrive', name: 'Google Drive', path: fullPath })
|
|
42
|
+
} else if (entry.startsWith('OneDrive-')) {
|
|
43
|
+
const label = entry.replace('OneDrive-', 'OneDrive — ')
|
|
44
|
+
drives.push({ id: 'onedrive', name: label, path: fullPath })
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
} catch {}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// ~/Documents and ~/Desktop as common project locations
|
|
51
|
+
const docs = path.join(home, 'Documents')
|
|
52
|
+
if (fs.existsSync(docs)) {
|
|
53
|
+
drives.push({ id: 'documents', name: 'Documents', path: docs })
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const desktop = path.join(home, 'Desktop')
|
|
57
|
+
if (fs.existsSync(desktop)) {
|
|
58
|
+
drives.push({ id: 'desktop', name: 'Desktop', path: desktop })
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return drives
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
module.exports = router
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const fs = require('fs')
|
|
3
|
+
const path = require('path')
|
|
4
|
+
const { validateFolderPath } = require('../lib/validate-folder-path')
|
|
5
|
+
|
|
6
|
+
const KEEP_ALIVE_INTERVAL = 30 * 1000 // 30 seconds
|
|
7
|
+
const AUTO_CLOSE_TIMEOUT = 5 * 60 * 1000 // 5 minutes
|
|
8
|
+
|
|
9
|
+
// Shared watcher instances, keyed by outputDir
|
|
10
|
+
const watchers = new Map() // outputDir -> { watcher, refCount }
|
|
11
|
+
|
|
12
|
+
function createDocEventsRouter(sseManager) {
|
|
13
|
+
const router = express.Router()
|
|
14
|
+
|
|
15
|
+
router.get('/doc-events', (req, res) => {
|
|
16
|
+
const folderPath = validateFolderPath(req.query.folderPath)
|
|
17
|
+
if (!folderPath) return res.status(400).end()
|
|
18
|
+
|
|
19
|
+
const outputDir = folderPath + '/output'
|
|
20
|
+
|
|
21
|
+
res.writeHead(200, {
|
|
22
|
+
'Content-Type': 'text/event-stream',
|
|
23
|
+
'Cache-Control': 'no-cache',
|
|
24
|
+
'Connection': 'keep-alive',
|
|
25
|
+
})
|
|
26
|
+
res.write('\n')
|
|
27
|
+
|
|
28
|
+
sseManager.addConnection(outputDir, res)
|
|
29
|
+
|
|
30
|
+
if (!fs.existsSync(outputDir)) {
|
|
31
|
+
fs.mkdirSync(outputDir, { recursive: true })
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Reuse existing watcher for this outputDir, or create a new one
|
|
35
|
+
if (watchers.has(outputDir)) {
|
|
36
|
+
watchers.get(outputDir).refCount++
|
|
37
|
+
} else {
|
|
38
|
+
const chokidar = require('chokidar')
|
|
39
|
+
const watcher = chokidar.watch(outputDir, { depth: 0, ignoreInitial: true })
|
|
40
|
+
|
|
41
|
+
const emitFile = (filePath) => {
|
|
42
|
+
const relative = path.relative(outputDir, filePath)
|
|
43
|
+
sseManager.send(outputDir, 'output-created', { file: relative, path: filePath })
|
|
44
|
+
}
|
|
45
|
+
watcher.on('add', emitFile)
|
|
46
|
+
watcher.on('change', emitFile)
|
|
47
|
+
|
|
48
|
+
watcher.on('ready', () => {
|
|
49
|
+
// Catch files created during watcher startup
|
|
50
|
+
try {
|
|
51
|
+
const existing = fs.readdirSync(outputDir).filter(f => !f.startsWith('.'))
|
|
52
|
+
for (const file of existing) {
|
|
53
|
+
const filePath = path.join(outputDir, file)
|
|
54
|
+
const stat = fs.statSync(filePath)
|
|
55
|
+
// Only emit for files created in the last 5 minutes (avoids old outputs)
|
|
56
|
+
if (Date.now() - stat.mtimeMs < 5 * 60 * 1000) {
|
|
57
|
+
sseManager.send(outputDir, 'output-created', { file, path: filePath })
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
} catch {}
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
watchers.set(outputDir, { watcher, refCount: 1 })
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const keepAlive = setInterval(() => {
|
|
67
|
+
try { res.write(': ping\n\n') } catch {}
|
|
68
|
+
}, KEEP_ALIVE_INTERVAL)
|
|
69
|
+
|
|
70
|
+
const autoClose = setTimeout(() => {
|
|
71
|
+
res.end()
|
|
72
|
+
}, AUTO_CLOSE_TIMEOUT)
|
|
73
|
+
|
|
74
|
+
res.on('close', () => {
|
|
75
|
+
clearInterval(keepAlive)
|
|
76
|
+
clearTimeout(autoClose)
|
|
77
|
+
// Defer to ensure SSEManager's close handler removes the connection first
|
|
78
|
+
setTimeout(() => {
|
|
79
|
+
const entry = watchers.get(outputDir)
|
|
80
|
+
if (entry) {
|
|
81
|
+
entry.refCount--
|
|
82
|
+
if (entry.refCount <= 0 && !sseManager.hasConnections(outputDir)) {
|
|
83
|
+
entry.watcher.close()
|
|
84
|
+
watchers.delete(outputDir)
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}, 0)
|
|
88
|
+
})
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
return router
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
module.exports = { createDocEventsRouter }
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const { validateProjectPath } = require('../lib/validate-path')
|
|
3
|
+
|
|
4
|
+
function createEventsRouter(sseManager, fileWatcher) {
|
|
5
|
+
const router = express.Router()
|
|
6
|
+
|
|
7
|
+
router.get('/events', (req, res) => {
|
|
8
|
+
const projectPath = validateProjectPath(req.query.projectPath)
|
|
9
|
+
if (!projectPath) return res.status(400).end()
|
|
10
|
+
|
|
11
|
+
res.writeHead(200, {
|
|
12
|
+
'Content-Type': 'text/event-stream',
|
|
13
|
+
'Cache-Control': 'no-cache',
|
|
14
|
+
'Connection': 'keep-alive',
|
|
15
|
+
})
|
|
16
|
+
res.write('\n')
|
|
17
|
+
|
|
18
|
+
sseManager.addConnection(projectPath, res)
|
|
19
|
+
fileWatcher.watch(projectPath)
|
|
20
|
+
|
|
21
|
+
const keepAlive = setInterval(() => {
|
|
22
|
+
try { res.write(': ping\n\n') } catch {}
|
|
23
|
+
}, 30000)
|
|
24
|
+
|
|
25
|
+
res.on('close', () => {
|
|
26
|
+
clearInterval(keepAlive)
|
|
27
|
+
// Defer to ensure SSEManager's close handler removes the connection first
|
|
28
|
+
setTimeout(() => {
|
|
29
|
+
if (!sseManager.hasConnections(projectPath)) fileWatcher.unwatch(projectPath)
|
|
30
|
+
}, 0)
|
|
31
|
+
})
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
return router
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
module.exports = { createEventsRouter }
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const fs = require('fs')
|
|
3
|
+
const path = require('path')
|
|
4
|
+
const os = require('os')
|
|
5
|
+
const { validateFolderPath } = require('../lib/validate-folder-path')
|
|
6
|
+
const { scanFolder } = require('../lib/folder-scanner')
|
|
7
|
+
|
|
8
|
+
const router = express.Router()
|
|
9
|
+
|
|
10
|
+
const MAX_READ_SIZE = 10 * 1024 * 1024 // 10MB
|
|
11
|
+
|
|
12
|
+
router.get('/folder-scan', (req, res) => {
|
|
13
|
+
const folderPath = validateFolderPath(req.query.folderPath)
|
|
14
|
+
if (!folderPath) return res.status(400).json({ error: 'Invalid folderPath' })
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
const result = scanFolder(folderPath)
|
|
18
|
+
res.json(result)
|
|
19
|
+
} catch (err) {
|
|
20
|
+
res.status(500).json({ error: err.message })
|
|
21
|
+
}
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
router.get('/read-output', (req, res) => {
|
|
25
|
+
const rawPath = req.query.path
|
|
26
|
+
if (!rawPath || typeof rawPath !== 'string') {
|
|
27
|
+
return res.status(400).json({ error: 'Missing path' })
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const resolved = path.resolve(rawPath)
|
|
31
|
+
if (!resolved.startsWith(os.homedir())) {
|
|
32
|
+
return res.status(400).json({ error: 'Path outside home directory' })
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (!resolved.includes('/output/')) return res.status(403).json({ error: 'Can only read files from output directories' })
|
|
36
|
+
|
|
37
|
+
if (!fs.existsSync(resolved)) {
|
|
38
|
+
return res.status(404).json({ error: 'File not found' })
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
let stat
|
|
42
|
+
try {
|
|
43
|
+
stat = fs.statSync(resolved)
|
|
44
|
+
} catch (err) {
|
|
45
|
+
return res.status(500).json({ error: err.message })
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (stat.size > MAX_READ_SIZE) {
|
|
49
|
+
return res.status(413).json({ error: 'File too large (max 10MB)' })
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
try {
|
|
53
|
+
const content = fs.readFileSync(resolved, 'utf8')
|
|
54
|
+
res.json({
|
|
55
|
+
content,
|
|
56
|
+
path: resolved,
|
|
57
|
+
size: stat.size,
|
|
58
|
+
modified: stat.mtime.toISOString(),
|
|
59
|
+
})
|
|
60
|
+
} catch (err) {
|
|
61
|
+
res.status(500).json({ error: err.message })
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
router.post('/ensure-output-dir', (req, res) => {
|
|
66
|
+
const folderPath = validateFolderPath(req.query.folderPath)
|
|
67
|
+
if (!folderPath) return res.status(400).json({ error: 'Invalid folderPath' })
|
|
68
|
+
|
|
69
|
+
const outputDir = path.join(folderPath, 'output')
|
|
70
|
+
try {
|
|
71
|
+
fs.mkdirSync(outputDir, { recursive: true })
|
|
72
|
+
res.json({ outputDir })
|
|
73
|
+
} catch (err) {
|
|
74
|
+
res.status(500).json({ error: err.message })
|
|
75
|
+
}
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
// Read any file under the home directory (for "Open a document" feature)
|
|
79
|
+
router.get('/read-file', (req, res) => {
|
|
80
|
+
const rawPath = req.query.path
|
|
81
|
+
if (!rawPath || typeof rawPath !== 'string') {
|
|
82
|
+
return res.status(400).json({ error: 'path query parameter required' })
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const resolved = path.resolve(rawPath)
|
|
86
|
+
if (!resolved.startsWith(os.homedir())) {
|
|
87
|
+
return res.status(403).json({ error: 'Path outside home directory' })
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (!fs.existsSync(resolved)) {
|
|
91
|
+
return res.status(404).json({ error: 'File not found' })
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
try {
|
|
95
|
+
const stat = fs.statSync(resolved)
|
|
96
|
+
if (stat.size > 10 * 1024 * 1024) {
|
|
97
|
+
return res.status(413).json({ error: 'File too large (max 10MB)' })
|
|
98
|
+
}
|
|
99
|
+
const content = fs.readFileSync(resolved, 'utf8')
|
|
100
|
+
res.json({ content, path: resolved, size: stat.size, modified: stat.mtime.toISOString() })
|
|
101
|
+
} catch (err) {
|
|
102
|
+
res.status(500).json({ error: err.message })
|
|
103
|
+
}
|
|
104
|
+
})
|
|
105
|
+
|
|
106
|
+
// Find the most recently modified file in the output directory
|
|
107
|
+
router.get('/latest-output', (req, res) => {
|
|
108
|
+
const folderPath = validateFolderPath(req.query.folderPath)
|
|
109
|
+
if (!folderPath) return res.status(400).json({ error: 'Invalid folderPath' })
|
|
110
|
+
|
|
111
|
+
const outputDir = path.join(folderPath, 'output')
|
|
112
|
+
if (!fs.existsSync(outputDir)) return res.json({ path: null })
|
|
113
|
+
|
|
114
|
+
try {
|
|
115
|
+
const files = fs.readdirSync(outputDir)
|
|
116
|
+
.filter(f => !f.startsWith('.'))
|
|
117
|
+
.map(f => {
|
|
118
|
+
const fullPath = path.join(outputDir, f)
|
|
119
|
+
const stat = fs.statSync(fullPath)
|
|
120
|
+
return { name: f, path: fullPath, mtime: stat.mtimeMs }
|
|
121
|
+
})
|
|
122
|
+
.sort((a, b) => b.mtime - a.mtime)
|
|
123
|
+
|
|
124
|
+
if (files.length === 0) return res.json({ path: null })
|
|
125
|
+
res.json({ path: files[0].path, name: files[0].name })
|
|
126
|
+
} catch (err) {
|
|
127
|
+
res.status(500).json({ error: err.message })
|
|
128
|
+
}
|
|
129
|
+
})
|
|
130
|
+
|
|
131
|
+
// Open native folder picker (macOS: AppleScript, Linux: zenity)
|
|
132
|
+
router.get('/pick-folder', (req, res) => {
|
|
133
|
+
const { execSync } = require('child_process')
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
let selected
|
|
137
|
+
if (process.platform === 'darwin') {
|
|
138
|
+
selected = execSync(
|
|
139
|
+
`osascript -e 'POSIX path of (choose folder with prompt "Choose a folder")'`,
|
|
140
|
+
{ encoding: 'utf8', timeout: 60000 }
|
|
141
|
+
).trim()
|
|
142
|
+
} else if (process.platform === 'linux') {
|
|
143
|
+
selected = execSync(
|
|
144
|
+
'zenity --file-selection --directory --title="Choose a folder" 2>/dev/null',
|
|
145
|
+
{ encoding: 'utf8', timeout: 60000 }
|
|
146
|
+
).trim()
|
|
147
|
+
} else {
|
|
148
|
+
return res.status(501).json({ error: 'Folder picker not supported on this platform' })
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (!selected) return res.status(400).json({ error: 'No folder selected' })
|
|
152
|
+
|
|
153
|
+
// Remove trailing slash if present
|
|
154
|
+
const cleaned = selected.endsWith('/') ? selected.slice(0, -1) : selected
|
|
155
|
+
res.json({ path: cleaned })
|
|
156
|
+
} catch (err) {
|
|
157
|
+
// User cancelled the dialog
|
|
158
|
+
if (err.status === 1 || err.message?.includes('User canceled')) {
|
|
159
|
+
return res.json({ path: null })
|
|
160
|
+
}
|
|
161
|
+
res.status(500).json({ error: err.message })
|
|
162
|
+
}
|
|
163
|
+
})
|
|
164
|
+
|
|
165
|
+
// Open native file picker (macOS: AppleScript, Linux: zenity)
|
|
166
|
+
router.get('/pick-file', (req, res) => {
|
|
167
|
+
const { execSync } = require('child_process')
|
|
168
|
+
|
|
169
|
+
try {
|
|
170
|
+
let selected
|
|
171
|
+
if (process.platform === 'darwin') {
|
|
172
|
+
selected = execSync(
|
|
173
|
+
`osascript -e 'POSIX path of (choose file with prompt "Choose a document" of type {"md", "txt", "csv", "json", "html"})'`,
|
|
174
|
+
{ encoding: 'utf8', timeout: 60000 }
|
|
175
|
+
).trim()
|
|
176
|
+
} else if (process.platform === 'linux') {
|
|
177
|
+
selected = execSync(
|
|
178
|
+
'zenity --file-selection --title="Choose a document" --file-filter="Documents|*.md *.txt *.csv *.json *.html" 2>/dev/null',
|
|
179
|
+
{ encoding: 'utf8', timeout: 60000 }
|
|
180
|
+
).trim()
|
|
181
|
+
} else {
|
|
182
|
+
return res.status(501).json({ error: 'File picker not supported on this platform' })
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
if (!selected) return res.json({ path: null })
|
|
186
|
+
res.json({ path: selected })
|
|
187
|
+
} catch (err) {
|
|
188
|
+
if (err.status === 1 || err.message?.includes('User canceled')) {
|
|
189
|
+
return res.json({ path: null })
|
|
190
|
+
}
|
|
191
|
+
res.status(500).json({ error: err.message })
|
|
192
|
+
}
|
|
193
|
+
})
|
|
194
|
+
|
|
195
|
+
module.exports = router
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const { execSync } = require('child_process')
|
|
3
|
+
const { getExpandedPath } = require('../lib/detect-tools')
|
|
4
|
+
|
|
5
|
+
const router = express.Router()
|
|
6
|
+
|
|
7
|
+
router.get('/github/repos', (req, res) => {
|
|
8
|
+
res.setHeader('Cache-Control', 'no-store')
|
|
9
|
+
try {
|
|
10
|
+
const output = execSync('gh repo list --json name,url --limit 20', {
|
|
11
|
+
encoding: 'utf8',
|
|
12
|
+
timeout: 10000,
|
|
13
|
+
env: { ...process.env, PATH: getExpandedPath() },
|
|
14
|
+
})
|
|
15
|
+
res.json({ repos: JSON.parse(output) })
|
|
16
|
+
} catch (err) {
|
|
17
|
+
res.json({ repos: [], error: 'Could not fetch repos. Is GitHub CLI authenticated?' })
|
|
18
|
+
}
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
module.exports = router
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const { scanProject } = require('../lib/file-scanner')
|
|
3
|
+
const { validateProjectPath } = require('../lib/validate-path')
|
|
4
|
+
const router = express.Router()
|
|
5
|
+
|
|
6
|
+
router.get('/health-scan', (req, res) => {
|
|
7
|
+
const projectPath = validateProjectPath(req.query.projectPath)
|
|
8
|
+
if (!projectPath) return res.status(400).json({ error: 'Invalid or missing projectPath' })
|
|
9
|
+
try {
|
|
10
|
+
res.json({ checks: scanProject(projectPath) })
|
|
11
|
+
} catch (err) {
|
|
12
|
+
res.status(500).json({ error: err.message })
|
|
13
|
+
}
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
module.exports = router
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const { installTool } = require('../lib/install-tools')
|
|
3
|
+
const { fixNpmPrefix } = require('../lib/fix-npm-prefix')
|
|
4
|
+
const { detectAll } = require('../lib/detect-tools')
|
|
5
|
+
|
|
6
|
+
const router = express.Router()
|
|
7
|
+
|
|
8
|
+
// Install a single tool
|
|
9
|
+
router.post('/install/:toolId', async (req, res) => {
|
|
10
|
+
const { toolId } = req.params
|
|
11
|
+
|
|
12
|
+
res.setHeader('Content-Type', 'text/event-stream')
|
|
13
|
+
res.setHeader('Cache-Control', 'no-cache')
|
|
14
|
+
res.setHeader('Connection', 'keep-alive')
|
|
15
|
+
res.setHeader('X-Accel-Buffering', 'no')
|
|
16
|
+
|
|
17
|
+
const send = (data) => {
|
|
18
|
+
res.write(`data: ${JSON.stringify(data)}\n\n`)
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
send({ tool: toolId, status: 'installing', message: `Installing ${toolId}...` })
|
|
22
|
+
|
|
23
|
+
const result = await installTool(toolId, (message) => {
|
|
24
|
+
send({ tool: toolId, status: 'progress', message })
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
if (result.success) {
|
|
28
|
+
send({ tool: toolId, status: 'done' })
|
|
29
|
+
} else {
|
|
30
|
+
send({ tool: toolId, status: 'error', error: result.error })
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
send({ status: 'complete' })
|
|
34
|
+
res.end()
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
// Install all missing tools
|
|
38
|
+
router.post('/install-all', async (req, res) => {
|
|
39
|
+
res.setHeader('Content-Type', 'text/event-stream')
|
|
40
|
+
res.setHeader('Cache-Control', 'no-cache')
|
|
41
|
+
res.setHeader('Connection', 'keep-alive')
|
|
42
|
+
res.setHeader('X-Accel-Buffering', 'no')
|
|
43
|
+
|
|
44
|
+
let clientDisconnected = false
|
|
45
|
+
req.on('close', () => { clientDisconnected = true })
|
|
46
|
+
|
|
47
|
+
const send = (data) => {
|
|
48
|
+
if (clientDisconnected) return
|
|
49
|
+
try { res.write(`data: ${JSON.stringify(data)}\n\n`) } catch {}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Step 1: Fix npm prefix on Mac (avoid sudo)
|
|
53
|
+
send({ status: 'phase', message: 'Preparing npm...' })
|
|
54
|
+
const prefixResult = fixNpmPrefix()
|
|
55
|
+
if (prefixResult.fixed) {
|
|
56
|
+
send({ status: 'info', message: 'npm configured for global installs (no sudo needed)' })
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Step 2: Detect what's missing
|
|
60
|
+
const { tools } = detectAll()
|
|
61
|
+
const missing = tools.filter((t) => !t.installed && t.required)
|
|
62
|
+
|
|
63
|
+
if (missing.length === 0) {
|
|
64
|
+
send({ status: 'complete', message: 'All tools already installed!' })
|
|
65
|
+
res.end()
|
|
66
|
+
return
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
send({ status: 'info', message: `Installing ${missing.length} tool(s)...` })
|
|
70
|
+
|
|
71
|
+
// Step 3: Install each missing tool
|
|
72
|
+
const results = []
|
|
73
|
+
for (const tool of missing) {
|
|
74
|
+
if (clientDisconnected) break
|
|
75
|
+
send({ tool: tool.id, status: 'installing', message: `Installing ${tool.name}...` })
|
|
76
|
+
|
|
77
|
+
const result = await installTool(tool.id, (message) => {
|
|
78
|
+
send({ tool: tool.id, status: 'progress', message })
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
if (result.success) {
|
|
82
|
+
send({ tool: tool.id, status: 'done', message: `${tool.name} installed` })
|
|
83
|
+
results.push({ id: tool.id, success: true })
|
|
84
|
+
} else {
|
|
85
|
+
send({ tool: tool.id, status: 'error', error: result.error })
|
|
86
|
+
results.push({ id: tool.id, success: false, error: result.error })
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Step 4: Re-detect to confirm
|
|
91
|
+
const updated = detectAll()
|
|
92
|
+
|
|
93
|
+
send({
|
|
94
|
+
status: 'complete',
|
|
95
|
+
results,
|
|
96
|
+
tools: updated.tools,
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
res.end()
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
module.exports = router
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
const express = require('express')
|
|
2
|
+
const { execSync } = require('child_process')
|
|
3
|
+
const { validateProjectPath } = require('../lib/validate-path')
|
|
4
|
+
const router = express.Router()
|
|
5
|
+
|
|
6
|
+
router.get('/project-info', (req, res) => {
|
|
7
|
+
const projectPath = validateProjectPath(req.query.projectPath)
|
|
8
|
+
if (!projectPath) return res.status(400).json({ error: 'Invalid or missing projectPath' })
|
|
9
|
+
|
|
10
|
+
let gitBranch = null
|
|
11
|
+
let gitRemote = null
|
|
12
|
+
try { gitBranch = execSync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath, encoding: 'utf8' }).trim() } catch {}
|
|
13
|
+
try { gitRemote = execSync('git remote get-url origin', { cwd: projectPath, encoding: 'utf8' }).trim() } catch {}
|
|
14
|
+
|
|
15
|
+
res.json({ path: projectPath, gitBranch, gitRemote })
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
module.exports = router
|