@vectorasystems/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/vectora.js +197 -0
- package/package.json +31 -0
- package/src/commands/artifacts.js +88 -0
- package/src/commands/auth.js +207 -0
- package/src/commands/chat.js +116 -0
- package/src/commands/config.js +70 -0
- package/src/commands/projects.js +182 -0
- package/src/commands/run.js +115 -0
- package/src/commands/status.js +47 -0
- package/src/commands/ui.js +22 -0
- package/src/commands/usage.js +62 -0
- package/src/lib/api-client.js +172 -0
- package/src/lib/auth-store.js +62 -0
- package/src/lib/config-store.js +60 -0
- package/src/lib/constants.js +94 -0
- package/src/lib/errors.js +62 -0
- package/src/lib/output.js +98 -0
- package/src/lib/sse-client.js +92 -0
- package/src/lib/workspace-scanner.js +227 -0
- package/src/tui/App.js +73 -0
- package/src/tui/components/Header.js +18 -0
- package/src/tui/components/PhaseTimeline.js +31 -0
- package/src/tui/components/ProjectList.js +43 -0
- package/src/tui/components/StatusBar.js +19 -0
- package/src/tui/hooks/useApi.js +43 -0
- package/src/tui/hooks/useProject.js +41 -0
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
// @vectora/cli — credential persistence (~/.vectora/credentials)
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
|
|
6
|
+
const CREDENTIALS_DIR = path.join(os.homedir(), '.vectora');
|
|
7
|
+
const CREDENTIALS_PATH = path.join(CREDENTIALS_DIR, 'credentials');
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* @typedef {Object} Credentials
|
|
11
|
+
* @property {'device-flow'|'api-key'} method
|
|
12
|
+
* @property {string} apiToken — the vk_ prefixed API key
|
|
13
|
+
* @property {string} userId
|
|
14
|
+
* @property {string} orgId
|
|
15
|
+
* @property {string} [email]
|
|
16
|
+
* @property {string} [orgName]
|
|
17
|
+
* @property {string} createdAt
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Read stored credentials.
|
|
22
|
+
* @returns {Promise<Credentials|null>}
|
|
23
|
+
*/
|
|
24
|
+
export async function getCredentials() {
|
|
25
|
+
try {
|
|
26
|
+
const raw = await fs.readFile(CREDENTIALS_PATH, 'utf-8');
|
|
27
|
+
return JSON.parse(raw);
|
|
28
|
+
} catch {
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Persist credentials to disk.
|
|
35
|
+
* @param {Credentials} creds
|
|
36
|
+
*/
|
|
37
|
+
export async function saveCredentials(creds) {
|
|
38
|
+
await fs.mkdir(CREDENTIALS_DIR, { recursive: true });
|
|
39
|
+
await fs.writeFile(CREDENTIALS_PATH, JSON.stringify(creds, null, 2), 'utf-8');
|
|
40
|
+
// Best-effort: restrict permissions on Unix
|
|
41
|
+
try { await fs.chmod(CREDENTIALS_PATH, 0o600); } catch { /* Windows */ }
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Clear stored credentials.
|
|
46
|
+
*/
|
|
47
|
+
export async function clearCredentials() {
|
|
48
|
+
try { await fs.unlink(CREDENTIALS_PATH); } catch { /* missing is fine */ }
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Get the API token or throw AuthError.
|
|
53
|
+
* @returns {Promise<string>}
|
|
54
|
+
*/
|
|
55
|
+
export async function requireToken() {
|
|
56
|
+
const creds = await getCredentials();
|
|
57
|
+
if (!creds?.apiToken) {
|
|
58
|
+
const { AuthError } = await import('./errors.js');
|
|
59
|
+
throw new AuthError();
|
|
60
|
+
}
|
|
61
|
+
return creds.apiToken;
|
|
62
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
// @vectora/cli — config persistence (~/.vectora/config.json)
|
|
2
|
+
import Conf from 'conf';
|
|
3
|
+
|
|
4
|
+
const DEFAULTS = {
|
|
5
|
+
apiUrl: 'https://api.vectora.build',
|
|
6
|
+
appUrl: 'https://vectora.build',
|
|
7
|
+
defaultWorkspace: null,
|
|
8
|
+
defaultProject: null,
|
|
9
|
+
outputFormat: 'table',
|
|
10
|
+
color: true,
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
const ALLOWED_KEYS = new Set(Object.keys(DEFAULTS));
|
|
14
|
+
|
|
15
|
+
const config = new Conf({
|
|
16
|
+
projectName: 'vectora',
|
|
17
|
+
defaults: DEFAULTS,
|
|
18
|
+
schema: {
|
|
19
|
+
apiUrl: { type: 'string' },
|
|
20
|
+
appUrl: { type: 'string' },
|
|
21
|
+
defaultWorkspace: { type: ['string', 'null'] },
|
|
22
|
+
defaultProject: { type: ['string', 'null'] },
|
|
23
|
+
outputFormat: { type: 'string', enum: ['table', 'json'] },
|
|
24
|
+
color: { type: 'boolean' },
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
export function getConfig() {
|
|
29
|
+
return { ...DEFAULTS, ...config.store };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function getConfigValue(key) {
|
|
33
|
+
if (!ALLOWED_KEYS.has(key)) throw new Error(`Unknown config key: ${key}`);
|
|
34
|
+
return config.get(key);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function setConfigValue(key, value) {
|
|
38
|
+
if (!ALLOWED_KEYS.has(key)) throw new Error(`Unknown config key: ${key}`);
|
|
39
|
+
// Type coercion for boolean
|
|
40
|
+
if (key === 'color') {
|
|
41
|
+
config.set(key, value === 'true' || value === true);
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
// Null for clearing
|
|
45
|
+
if (value === 'null' || value === '') {
|
|
46
|
+
config.set(key, null);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
config.set(key, value);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function listConfig() {
|
|
53
|
+
return config.store;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function resetConfig() {
|
|
57
|
+
config.clear();
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export { ALLOWED_KEYS };
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
// @vectora/cli — constants
|
|
2
|
+
// Inlined from @vectora/engine to avoid dependency in API-connected mode.
|
|
3
|
+
|
|
4
|
+
export const VERSION = '0.1.0';
|
|
5
|
+
|
|
6
|
+
export const BRAND = {
|
|
7
|
+
name: 'VECTORA',
|
|
8
|
+
tagline: 'AI-powered product development',
|
|
9
|
+
accent: '#00e5ff',
|
|
10
|
+
green: '#00c853',
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
/** Forge orchestrator phases (MVP pipeline). */
|
|
14
|
+
export const FORGE_PHASES = [
|
|
15
|
+
'analyze-codebase',
|
|
16
|
+
'plan-mvp',
|
|
17
|
+
'scope-loop',
|
|
18
|
+
'plan-roadmap',
|
|
19
|
+
'package-handoff',
|
|
20
|
+
];
|
|
21
|
+
|
|
22
|
+
/** Temper orchestrator phases (post-MVP pipeline). */
|
|
23
|
+
export const TEMPER_PHASES = [
|
|
24
|
+
'assess-health',
|
|
25
|
+
'plan-growth',
|
|
26
|
+
'harden-reliability',
|
|
27
|
+
'plan-scale',
|
|
28
|
+
'plan-platform',
|
|
29
|
+
];
|
|
30
|
+
|
|
31
|
+
/** All valid phases for `vectora run`. */
|
|
32
|
+
export const VALID_PHASES = [...FORGE_PHASES, ...TEMPER_PHASES];
|
|
33
|
+
|
|
34
|
+
/** Directories to skip during workspace scanning. */
|
|
35
|
+
export const WORKSPACE_IGNORED_DIRS = new Set([
|
|
36
|
+
'.git', 'node_modules', '.next', 'dist', 'build', 'coverage',
|
|
37
|
+
'.turbo', '.cache', 'vendor', 'target', '.idea', '.vscode',
|
|
38
|
+
'.svn', '__pycache__', '.tox', '.eggs', '.mypy_cache',
|
|
39
|
+
'.pytest_cache', '.nuxt', '.output', 'out',
|
|
40
|
+
]);
|
|
41
|
+
|
|
42
|
+
/** Manifest files to detect during workspace scanning. */
|
|
43
|
+
export const WORKSPACE_MANIFEST_FILES = new Set([
|
|
44
|
+
'package.json', 'pnpm-lock.yaml', 'yarn.lock', 'package-lock.json',
|
|
45
|
+
'tsconfig.json', 'next.config.js', 'next.config.mjs', 'next.config.ts',
|
|
46
|
+
'pyproject.toml', 'requirements.txt', 'go.mod', 'Cargo.toml',
|
|
47
|
+
'pom.xml', 'build.gradle', 'docker-compose.yml', 'Dockerfile',
|
|
48
|
+
'README.md', '.env.example',
|
|
49
|
+
]);
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Manifest files whose CONTENTS are safe to read and send to the API.
|
|
53
|
+
* Only small, non-secret config files. Lock files excluded (too large).
|
|
54
|
+
*/
|
|
55
|
+
export const WORKSPACE_MANIFEST_CONTENT_FILES = new Set([
|
|
56
|
+
'package.json',
|
|
57
|
+
'tsconfig.json',
|
|
58
|
+
'next.config.js', 'next.config.mjs', 'next.config.ts',
|
|
59
|
+
'pyproject.toml', 'requirements.txt',
|
|
60
|
+
'go.mod', 'go.sum',
|
|
61
|
+
'Cargo.toml',
|
|
62
|
+
'pom.xml', 'build.gradle',
|
|
63
|
+
'docker-compose.yml', 'Dockerfile',
|
|
64
|
+
'.env.example',
|
|
65
|
+
]);
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* File patterns that should NEVER have their contents read.
|
|
69
|
+
* Lowercase match. Can be exact names or RegExp.
|
|
70
|
+
*/
|
|
71
|
+
export const SENSITIVE_FILE_PATTERNS = [
|
|
72
|
+
'.env', '.env.local', '.env.production', '.env.development',
|
|
73
|
+
'.env.staging', '.env.test',
|
|
74
|
+
/^\.env\..+/, // .env.anything (except .env.example handled above)
|
|
75
|
+
'.npmrc', '.pypirc',
|
|
76
|
+
'credentials', 'credentials.json',
|
|
77
|
+
'secrets.yml', 'secrets.yaml', 'secrets.json',
|
|
78
|
+
'id_rsa', 'id_ed25519', 'id_ecdsa',
|
|
79
|
+
/\.pem$/, /\.key$/, /\.p12$/, /\.pfx$/,
|
|
80
|
+
/\.tfstate$/,
|
|
81
|
+
];
|
|
82
|
+
|
|
83
|
+
export const DEFAULT_WORKSPACE_SCAN_MAX_FILES = 800;
|
|
84
|
+
|
|
85
|
+
/** Max size in bytes for reading a manifest file's contents (100KB). */
|
|
86
|
+
export const MAX_MANIFEST_CONTENT_SIZE = 100 * 1024;
|
|
87
|
+
|
|
88
|
+
/** Exit codes. */
|
|
89
|
+
export const EXIT = {
|
|
90
|
+
OK: 0,
|
|
91
|
+
ERROR: 1,
|
|
92
|
+
AUTH: 2,
|
|
93
|
+
OFFLINE: 3,
|
|
94
|
+
};
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
// @vectora/cli — error classes and handler
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import { EXIT } from './constants.js';
|
|
4
|
+
|
|
5
|
+
export class VectoraApiError extends Error {
|
|
6
|
+
constructor(message, statusCode, code) {
|
|
7
|
+
super(message);
|
|
8
|
+
this.name = 'VectoraApiError';
|
|
9
|
+
this.statusCode = statusCode;
|
|
10
|
+
this.code = code;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export class OfflineError extends Error {
|
|
15
|
+
constructor(url) {
|
|
16
|
+
super(`Cannot reach the Vectora API at ${url}`);
|
|
17
|
+
this.name = 'OfflineError';
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export class AuthError extends Error {
|
|
22
|
+
constructor(message = 'Not authenticated. Run `vectora login` to authenticate.') {
|
|
23
|
+
super(message);
|
|
24
|
+
this.name = 'AuthError';
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Top-level error handler — formats errors for the terminal and sets exit code.
|
|
30
|
+
* @param {Error} error
|
|
31
|
+
*/
|
|
32
|
+
export function handleError(error) {
|
|
33
|
+
if (error instanceof OfflineError) {
|
|
34
|
+
console.error(chalk.red('Offline — ') + error.message);
|
|
35
|
+
console.error(chalk.dim('Check your internet connection or run: vectora config set apiUrl <url>'));
|
|
36
|
+
process.exitCode = EXIT.OFFLINE;
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (error instanceof AuthError) {
|
|
41
|
+
console.error(chalk.red('Auth — ') + error.message);
|
|
42
|
+
process.exitCode = EXIT.AUTH;
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (error instanceof VectoraApiError) {
|
|
47
|
+
const prefix = error.statusCode >= 500 ? 'Server error' : 'API error';
|
|
48
|
+
console.error(chalk.red(`${prefix} (${error.statusCode}) — `) + error.message);
|
|
49
|
+
if (error.statusCode === 403) {
|
|
50
|
+
console.error(chalk.dim('Your plan may not include this feature. Check your billing at the dashboard.'));
|
|
51
|
+
}
|
|
52
|
+
if (error.statusCode === 429) {
|
|
53
|
+
console.error(chalk.dim('Rate limited. Wait a moment and try again.'));
|
|
54
|
+
}
|
|
55
|
+
process.exitCode = EXIT.ERROR;
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Generic
|
|
60
|
+
console.error(chalk.red('Error — ') + (error.message || String(error)));
|
|
61
|
+
process.exitCode = EXIT.ERROR;
|
|
62
|
+
}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
// @vectora/cli — output formatters
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import Table from 'cli-table3';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Render a table to stdout.
|
|
7
|
+
* @param {string[]} head — column headers
|
|
8
|
+
* @param {string[][]} rows — row data
|
|
9
|
+
*/
|
|
10
|
+
export function renderTable(head, rows) {
|
|
11
|
+
const table = new Table({
|
|
12
|
+
head: head.map((h) => chalk.cyan.bold(h)),
|
|
13
|
+
style: { head: [], border: ['dim'] },
|
|
14
|
+
chars: {
|
|
15
|
+
top: '─', 'top-mid': '┬', 'top-left': '┌', 'top-right': '┐',
|
|
16
|
+
bottom: '─', 'bottom-mid': '┴', 'bottom-left': '└', 'bottom-right': '┘',
|
|
17
|
+
left: '│', 'left-mid': '├', mid: '─', 'mid-mid': '┼',
|
|
18
|
+
right: '│', 'right-mid': '┤', middle: '│',
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
for (const row of rows) table.push(row);
|
|
22
|
+
console.log(table.toString());
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Render data as formatted JSON.
|
|
27
|
+
* @param {unknown} data
|
|
28
|
+
*/
|
|
29
|
+
export function renderJson(data) {
|
|
30
|
+
console.log(JSON.stringify(data, null, 2));
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Render a phase status string with color.
|
|
35
|
+
* @param {string} status
|
|
36
|
+
* @returns {string}
|
|
37
|
+
*/
|
|
38
|
+
export function renderPhaseStatus(status) {
|
|
39
|
+
switch (status) {
|
|
40
|
+
case 'completed': return chalk.green('completed');
|
|
41
|
+
case 'failed': return chalk.red('failed');
|
|
42
|
+
case 'running': return chalk.yellow('running');
|
|
43
|
+
case 'queued': return chalk.dim('queued');
|
|
44
|
+
default: return chalk.dim(status);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Render a visual readiness bar.
|
|
50
|
+
* @param {number} score — 0 to 100
|
|
51
|
+
* @returns {string}
|
|
52
|
+
*/
|
|
53
|
+
export function renderReadinessBar(score) {
|
|
54
|
+
const filled = Math.round(score / 10);
|
|
55
|
+
const empty = 10 - filled;
|
|
56
|
+
const bar = chalk.green('█'.repeat(filled)) + chalk.dim('░'.repeat(empty));
|
|
57
|
+
return `[${bar}] ${score}%`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Render a timestamp as a short relative or absolute string.
|
|
62
|
+
* @param {string} iso
|
|
63
|
+
* @returns {string}
|
|
64
|
+
*/
|
|
65
|
+
export function renderTime(iso) {
|
|
66
|
+
if (!iso) return chalk.dim('—');
|
|
67
|
+
const d = new Date(iso);
|
|
68
|
+
const now = Date.now();
|
|
69
|
+
const diffMs = now - d.getTime();
|
|
70
|
+
if (diffMs < 60_000) return chalk.dim('just now');
|
|
71
|
+
if (diffMs < 3600_000) return chalk.dim(`${Math.floor(diffMs / 60_000)}m ago`);
|
|
72
|
+
if (diffMs < 86400_000) return chalk.dim(`${Math.floor(diffMs / 3600_000)}h ago`);
|
|
73
|
+
return chalk.dim(d.toLocaleDateString());
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Print a success message.
|
|
78
|
+
* @param {string} msg
|
|
79
|
+
*/
|
|
80
|
+
export function success(msg) {
|
|
81
|
+
console.log(chalk.green('✓') + ' ' + msg);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Print a warning message.
|
|
86
|
+
* @param {string} msg
|
|
87
|
+
*/
|
|
88
|
+
export function warn(msg) {
|
|
89
|
+
console.log(chalk.yellow('!') + ' ' + msg);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Print an info message.
|
|
94
|
+
* @param {string} msg
|
|
95
|
+
*/
|
|
96
|
+
export function info(msg) {
|
|
97
|
+
console.log(chalk.cyan('i') + ' ' + msg);
|
|
98
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
// @vectora/cli — SSE stream consumer
|
|
2
|
+
// Handles both GET-based (phase SSE) and POST-based (idea-chat SSE) patterns.
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Parse an SSE stream from a ReadableStream<Uint8Array>.
|
|
6
|
+
* Yields `{ event, data }` for each SSE frame.
|
|
7
|
+
* @param {ReadableStream} body
|
|
8
|
+
*/
|
|
9
|
+
export async function* parseSseStream(body) {
|
|
10
|
+
const reader = body.getReader();
|
|
11
|
+
const decoder = new TextDecoder();
|
|
12
|
+
let buffer = '';
|
|
13
|
+
let currentEvent = '';
|
|
14
|
+
|
|
15
|
+
try {
|
|
16
|
+
while (true) {
|
|
17
|
+
const { done, value } = await reader.read();
|
|
18
|
+
if (done) break;
|
|
19
|
+
buffer += decoder.decode(value, { stream: true });
|
|
20
|
+
const lines = buffer.split('\n');
|
|
21
|
+
buffer = lines.pop() ?? '';
|
|
22
|
+
|
|
23
|
+
for (const line of lines) {
|
|
24
|
+
if (line.startsWith('event: ')) {
|
|
25
|
+
currentEvent = line.slice(7).trim();
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
if (line.startsWith('data: ')) {
|
|
29
|
+
try {
|
|
30
|
+
const data = JSON.parse(line.slice(6));
|
|
31
|
+
yield { event: currentEvent, data };
|
|
32
|
+
} catch { /* skip malformed frames */ }
|
|
33
|
+
currentEvent = '';
|
|
34
|
+
}
|
|
35
|
+
// Ignore ': ping' keepalive comments and empty lines
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
} finally {
|
|
39
|
+
reader.releaseLock();
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Stream phase progress events via GET SSE.
|
|
45
|
+
* @param {string} baseUrl
|
|
46
|
+
* @param {string} jobId
|
|
47
|
+
* @param {string} token
|
|
48
|
+
* @yields {{ event: string, data: object }}
|
|
49
|
+
*/
|
|
50
|
+
export async function* streamPhaseProgress(baseUrl, jobId, token) {
|
|
51
|
+
const res = await fetch(`${baseUrl}/v1/phases/${jobId}?token=${token}`, {
|
|
52
|
+
signal: AbortSignal.timeout(5 * 60 * 1000), // 5 min
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
if (!res.ok || !res.body) {
|
|
56
|
+
throw new Error(`Phase SSE failed: HTTP ${res.status}`);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
yield* parseSseStream(res.body);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Stream idea-chat events via POST SSE.
|
|
64
|
+
* @param {string} baseUrl
|
|
65
|
+
* @param {string} token
|
|
66
|
+
* @param {string} projectId
|
|
67
|
+
* @param {string} message
|
|
68
|
+
* @param {object} [opts]
|
|
69
|
+
* @yields {{ event: string, data: object }}
|
|
70
|
+
*/
|
|
71
|
+
export async function* streamIdeaChat(baseUrl, token, projectId, message, opts) {
|
|
72
|
+
const res = await fetch(`${baseUrl}/v1/projects/${projectId}/idea-chat`, {
|
|
73
|
+
method: 'POST',
|
|
74
|
+
headers: {
|
|
75
|
+
'Content-Type': 'application/json',
|
|
76
|
+
Authorization: `Bearer ${token}`,
|
|
77
|
+
},
|
|
78
|
+
body: JSON.stringify({ message, ...opts }),
|
|
79
|
+
signal: AbortSignal.timeout(5 * 60 * 1000),
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
if (!res.ok || !res.body) {
|
|
83
|
+
let errMsg = `Idea chat failed: HTTP ${res.status}`;
|
|
84
|
+
try {
|
|
85
|
+
const errBody = await res.json();
|
|
86
|
+
errMsg = errBody.error ?? errBody.message ?? errMsg;
|
|
87
|
+
} catch { /* ignore */ }
|
|
88
|
+
throw new Error(errMsg);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
yield* parseSseStream(res.body);
|
|
92
|
+
}
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
// @vectora/cli — workspace snapshot collector
|
|
2
|
+
// Hardened BFS scanner with symlink loop detection, binary skip,
|
|
3
|
+
// cross-platform path normalization, and manifest content reading.
|
|
4
|
+
import fs from 'node:fs/promises';
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import {
|
|
7
|
+
WORKSPACE_IGNORED_DIRS,
|
|
8
|
+
WORKSPACE_MANIFEST_FILES,
|
|
9
|
+
WORKSPACE_MANIFEST_CONTENT_FILES,
|
|
10
|
+
SENSITIVE_FILE_PATTERNS,
|
|
11
|
+
DEFAULT_WORKSPACE_SCAN_MAX_FILES,
|
|
12
|
+
MAX_MANIFEST_CONTENT_SIZE,
|
|
13
|
+
} from './constants.js';
|
|
14
|
+
|
|
15
|
+
// Binary file extensions — skip during sampling
|
|
16
|
+
const BINARY_EXTENSIONS = new Set([
|
|
17
|
+
'.png', '.jpg', '.jpeg', '.gif', '.bmp', '.ico', '.webp', '.svg',
|
|
18
|
+
'.mp3', '.mp4', '.wav', '.avi', '.mov', '.mkv', '.flac',
|
|
19
|
+
'.zip', '.gz', '.tar', '.rar', '.7z', '.bz2', '.xz',
|
|
20
|
+
'.wasm', '.so', '.dll', '.dylib', '.exe', '.bin',
|
|
21
|
+
'.pdf', '.doc', '.docx', '.xls', '.xlsx', '.ppt', '.pptx',
|
|
22
|
+
'.ttf', '.otf', '.woff', '.woff2', '.eot',
|
|
23
|
+
'.pyc', '.pyo', '.class', '.o', '.a', '.lib',
|
|
24
|
+
'.sqlite', '.db', '.sqlite3',
|
|
25
|
+
'.DS_Store', '.lock',
|
|
26
|
+
]);
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Infer source roots from the scanned file list.
|
|
30
|
+
*/
|
|
31
|
+
function inferSourceRoots(files) {
|
|
32
|
+
const roots = new Set();
|
|
33
|
+
const srcDirs = new Set(['src', 'app', 'pages', 'server', 'lib', 'cmd', 'internal', 'pkg']);
|
|
34
|
+
for (const f of files) {
|
|
35
|
+
const parts = f.split('/');
|
|
36
|
+
if (parts.length >= 2 && srcDirs.has(parts[0])) {
|
|
37
|
+
roots.add(parts[0]);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return [...roots].sort();
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Check whether a filename matches sensitive file patterns that should never
|
|
45
|
+
* have their contents read (even if they are manifest-like).
|
|
46
|
+
*/
|
|
47
|
+
function isSensitiveFile(name) {
|
|
48
|
+
const lower = name.toLowerCase();
|
|
49
|
+
return SENSITIVE_FILE_PATTERNS.some((p) => {
|
|
50
|
+
if (p instanceof RegExp) return p.test(lower);
|
|
51
|
+
return lower === p || lower.endsWith(p);
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Normalize a path to forward slashes (Windows compat).
|
|
57
|
+
*/
|
|
58
|
+
function normalizePath(p) {
|
|
59
|
+
return p.replace(/\\/g, '/');
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Safely read a manifest file's contents (text, capped at MAX_MANIFEST_CONTENT_SIZE).
|
|
64
|
+
* Returns null on any error or if the file is too large.
|
|
65
|
+
*/
|
|
66
|
+
async function readManifestContent(absPath) {
|
|
67
|
+
try {
|
|
68
|
+
const stat = await fs.stat(absPath);
|
|
69
|
+
if (stat.size > MAX_MANIFEST_CONTENT_SIZE) return null;
|
|
70
|
+
const content = await fs.readFile(absPath, 'utf-8');
|
|
71
|
+
return content;
|
|
72
|
+
} catch {
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Collect a workspace snapshot via BFS directory traversal.
|
|
79
|
+
*
|
|
80
|
+
* Hardened against:
|
|
81
|
+
* - Symlink loops (tracks visited inodes via realpath)
|
|
82
|
+
* - Binary files (skipped by extension)
|
|
83
|
+
* - Sensitive files (never read contents)
|
|
84
|
+
* - Windows paths (normalized to forward slashes)
|
|
85
|
+
* - Deep nesting (max depth cap)
|
|
86
|
+
* - Manifest content reading (safe subset, size-capped)
|
|
87
|
+
*
|
|
88
|
+
* @param {string} workspaceRoot — absolute path to the workspace
|
|
89
|
+
* @param {number} [maxFiles] — cap on sample files (default 800)
|
|
90
|
+
* @returns {Promise<object>}
|
|
91
|
+
*/
|
|
92
|
+
export async function collectWorkspaceSnapshot(workspaceRoot, maxFiles = DEFAULT_WORKSPACE_SCAN_MAX_FILES) {
|
|
93
|
+
const safeMax = Math.max(50, Math.min(maxFiles, 5000));
|
|
94
|
+
const MAX_DEPTH = 30;
|
|
95
|
+
const queue = [{ rel: '', depth: 0 }];
|
|
96
|
+
const visitedDirs = new Set(); // realpath set for symlink loop detection
|
|
97
|
+
const sampleFiles = [];
|
|
98
|
+
const manifests = [];
|
|
99
|
+
const keyFiles = [];
|
|
100
|
+
const manifestContents = {}; // relPath → file content (safe manifests only)
|
|
101
|
+
const warnings = [];
|
|
102
|
+
let totalFiles = 0;
|
|
103
|
+
let totalDirs = 0;
|
|
104
|
+
|
|
105
|
+
// Resolve workspace root realpath once
|
|
106
|
+
let rootReal;
|
|
107
|
+
try {
|
|
108
|
+
rootReal = await fs.realpath(workspaceRoot);
|
|
109
|
+
} catch {
|
|
110
|
+
rootReal = workspaceRoot;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
while (queue.length && sampleFiles.length < safeMax) {
|
|
114
|
+
const { rel, depth } = queue.shift();
|
|
115
|
+
|
|
116
|
+
// Depth guard
|
|
117
|
+
if (depth > MAX_DEPTH) {
|
|
118
|
+
if (warnings.length < 10) warnings.push(`Skipped: ${rel} (depth > ${MAX_DEPTH})`);
|
|
119
|
+
continue;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const abs = path.join(rootReal, rel);
|
|
123
|
+
|
|
124
|
+
// Symlink loop detection: resolve realpath and check if visited
|
|
125
|
+
let realDir;
|
|
126
|
+
try {
|
|
127
|
+
realDir = await fs.realpath(abs);
|
|
128
|
+
} catch {
|
|
129
|
+
continue; // broken symlink or permission error
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (visitedDirs.has(realDir)) {
|
|
133
|
+
if (warnings.length < 10) warnings.push(`Skipped symlink loop: ${rel}`);
|
|
134
|
+
continue;
|
|
135
|
+
}
|
|
136
|
+
visitedDirs.add(realDir);
|
|
137
|
+
|
|
138
|
+
let entries;
|
|
139
|
+
try {
|
|
140
|
+
entries = await fs.readdir(abs, { withFileTypes: true });
|
|
141
|
+
} catch {
|
|
142
|
+
continue; // permission denied or other fs error
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for (const entry of entries) {
|
|
146
|
+
// Skip ignored directories by name
|
|
147
|
+
if (WORKSPACE_IGNORED_DIRS.has(entry.name)) continue;
|
|
148
|
+
|
|
149
|
+
// Skip hidden files/dirs (dotfiles) except known ones
|
|
150
|
+
if (entry.name.startsWith('.') && !WORKSPACE_MANIFEST_FILES.has(entry.name)) continue;
|
|
151
|
+
|
|
152
|
+
const entryRel = normalizePath(rel ? `${rel}/${entry.name}` : entry.name);
|
|
153
|
+
|
|
154
|
+
if (entry.isDirectory() || entry.isSymbolicLink()) {
|
|
155
|
+
// For symlinks, verify the target is a directory
|
|
156
|
+
if (entry.isSymbolicLink()) {
|
|
157
|
+
try {
|
|
158
|
+
const linkTarget = path.join(abs, entry.name);
|
|
159
|
+
const stat = await fs.stat(linkTarget);
|
|
160
|
+
if (!stat.isDirectory()) {
|
|
161
|
+
// It's a symlink to a file — treat as file below
|
|
162
|
+
totalFiles++;
|
|
163
|
+
if (sampleFiles.length < safeMax && !isBinaryFile(entry.name)) {
|
|
164
|
+
sampleFiles.push(entryRel);
|
|
165
|
+
}
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
} catch {
|
|
169
|
+
continue; // broken symlink
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
totalDirs++;
|
|
173
|
+
queue.push({ rel: entryRel, depth: depth + 1 });
|
|
174
|
+
} else if (entry.isFile()) {
|
|
175
|
+
totalFiles++;
|
|
176
|
+
|
|
177
|
+
const isManifest = WORKSPACE_MANIFEST_FILES.has(entry.name);
|
|
178
|
+
if (isManifest) {
|
|
179
|
+
manifests.push(entryRel);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Key files: manifests at workspace root
|
|
183
|
+
if (!rel && isManifest) {
|
|
184
|
+
keyFiles.push(entryRel);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Read safe manifest contents (not sensitive, in the allow-list)
|
|
188
|
+
if (WORKSPACE_MANIFEST_CONTENT_FILES.has(entry.name) && !isSensitiveFile(entry.name)) {
|
|
189
|
+
const content = await readManifestContent(path.join(abs, entry.name));
|
|
190
|
+
if (content !== null) {
|
|
191
|
+
manifestContents[entryRel] = content;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Sample file path (skip binaries)
|
|
196
|
+
if (sampleFiles.length < safeMax && !isBinaryFile(entry.name)) {
|
|
197
|
+
sampleFiles.push(entryRel);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const srcRoots = inferSourceRoots(sampleFiles);
|
|
204
|
+
|
|
205
|
+
return {
|
|
206
|
+
scannedAt: new Date().toISOString(),
|
|
207
|
+
sampledFileCount: sampleFiles.length,
|
|
208
|
+
maxFiles: safeMax,
|
|
209
|
+
manifests,
|
|
210
|
+
manifestContents,
|
|
211
|
+
srcRoots,
|
|
212
|
+
keyFiles,
|
|
213
|
+
sampleFiles,
|
|
214
|
+
totalFiles,
|
|
215
|
+
totalDirs,
|
|
216
|
+
truncated: totalFiles > safeMax,
|
|
217
|
+
...(warnings.length > 0 && { warnings }),
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Check if a filename looks like a binary file by extension.
|
|
223
|
+
*/
|
|
224
|
+
function isBinaryFile(name) {
|
|
225
|
+
const ext = path.extname(name).toLowerCase();
|
|
226
|
+
return BINARY_EXTENSIONS.has(ext);
|
|
227
|
+
}
|