inflight-cli 1.0.4 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -26
- package/dist/commands/login.js +42 -47
- package/dist/commands/logout.d.ts +1 -0
- package/dist/commands/logout.js +14 -0
- package/dist/commands/preview.d.ts +7 -0
- package/dist/commands/preview.js +256 -0
- package/dist/commands/share.js +14 -19
- package/dist/commands/workspace.d.ts +1 -0
- package/dist/commands/workspace.js +51 -0
- package/dist/index.js +14 -12
- package/dist/lib/api.d.ts +3 -3
- package/dist/lib/api.js +4 -3
- package/dist/lib/config.d.ts +1 -1
- package/dist/lib/config.js +7 -4
- package/dist/lib/env.d.ts +2 -0
- package/dist/lib/env.js +2 -0
- package/dist/lib/files.d.ts +29 -0
- package/dist/lib/files.js +187 -0
- package/dist/lib/git.d.ts +25 -0
- package/dist/lib/git.js +158 -13
- package/dist/lib/progress.d.ts +18 -0
- package/dist/lib/progress.js +106 -0
- package/dist/lib/share-api.d.ts +48 -0
- package/dist/lib/share-api.js +186 -0
- package/dist/lib/vercel.d.ts +14 -7
- package/dist/lib/vercel.js +92 -105
- package/dist/providers/index.d.ts +1 -3
- package/dist/providers/vercel.d.ts +1 -2
- package/dist/providers/vercel.js +61 -17
- package/package.json +2 -2
package/dist/lib/api.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { API_URL } from "./env.js";
|
|
2
|
+
export async function apiGetMe(apiKey) {
|
|
3
|
+
const res = await fetch(`${API_URL}/api/cli/me`, {
|
|
3
4
|
headers: { Authorization: `Bearer ${apiKey}` },
|
|
4
5
|
});
|
|
5
6
|
if (!res.ok)
|
|
@@ -7,7 +8,7 @@ export async function apiGetMe(apiKey, apiUrl) {
|
|
|
7
8
|
return res.json();
|
|
8
9
|
}
|
|
9
10
|
export async function apiCreateVersion(opts) {
|
|
10
|
-
const res = await fetch(`${
|
|
11
|
+
const res = await fetch(`${API_URL}/api/cli/version/create`, {
|
|
11
12
|
method: "POST",
|
|
12
13
|
headers: {
|
|
13
14
|
"Content-Type": "application/json",
|
package/dist/lib/config.d.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
export interface GlobalAuth {
|
|
2
2
|
apiKey: string;
|
|
3
|
-
apiUrl: string;
|
|
4
3
|
}
|
|
5
4
|
export declare function readGlobalAuth(): GlobalAuth | null;
|
|
6
5
|
export declare function writeGlobalAuth(auth: GlobalAuth): void;
|
|
6
|
+
export declare function clearGlobalAuth(): void;
|
|
7
7
|
export interface WorkspaceConfig {
|
|
8
8
|
workspaceId: string;
|
|
9
9
|
}
|
package/dist/lib/config.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { homedir, platform } from "os";
|
|
2
2
|
import { join } from "path";
|
|
3
|
-
import { readFileSync, writeFileSync, mkdirSync, existsSync } from "fs";
|
|
3
|
+
import { readFileSync, writeFileSync, mkdirSync, existsSync, unlinkSync } from "fs";
|
|
4
4
|
function getGlobalConfigDir() {
|
|
5
5
|
if (platform() === "darwin") {
|
|
6
6
|
return join(homedir(), "Library", "Application Support", "co.inflight.cli");
|
|
@@ -11,13 +11,11 @@ function getGlobalConfigDir() {
|
|
|
11
11
|
return join(homedir(), ".local", "share", "co.inflight.cli");
|
|
12
12
|
}
|
|
13
13
|
const AUTH_FILE = join(getGlobalConfigDir(), "auth.json");
|
|
14
|
-
const DEFAULT_API_URL = process.env.INFLIGHT_API_URL ?? "https://api.inflight.co";
|
|
15
14
|
export function readGlobalAuth() {
|
|
16
15
|
if (!existsSync(AUTH_FILE))
|
|
17
16
|
return null;
|
|
18
17
|
try {
|
|
19
|
-
|
|
20
|
-
return { ...auth, apiUrl: DEFAULT_API_URL };
|
|
18
|
+
return JSON.parse(readFileSync(AUTH_FILE, "utf-8"));
|
|
21
19
|
}
|
|
22
20
|
catch {
|
|
23
21
|
return null;
|
|
@@ -27,6 +25,11 @@ export function writeGlobalAuth(auth) {
|
|
|
27
25
|
mkdirSync(getGlobalConfigDir(), { recursive: true });
|
|
28
26
|
writeFileSync(AUTH_FILE, JSON.stringify(auth, null, 2), { mode: 0o600 });
|
|
29
27
|
}
|
|
28
|
+
export function clearGlobalAuth() {
|
|
29
|
+
if (existsSync(AUTH_FILE)) {
|
|
30
|
+
unlinkSync(AUTH_FILE);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
30
33
|
// --- Project-level config (per-directory, like .vercel/project.json) ---
|
|
31
34
|
const WORKSPACE_FILE = ".inflight/workspace.json";
|
|
32
35
|
export function readWorkspaceConfig(cwd) {
|
package/dist/lib/env.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File Utilities
|
|
3
|
+
*
|
|
4
|
+
* Handles file reading and chunking for uploading codebases to the share API.
|
|
5
|
+
* Properly handles binary files by encoding them as base64.
|
|
6
|
+
*
|
|
7
|
+
* Ported from inflight-claude-plugin/plugin/local-mcp/utils/file-utils.ts
|
|
8
|
+
*/
|
|
9
|
+
export interface FileContent {
|
|
10
|
+
content: string;
|
|
11
|
+
encoding: "utf-8" | "base64";
|
|
12
|
+
}
|
|
13
|
+
export interface FileMap {
|
|
14
|
+
[path: string]: string | FileContent;
|
|
15
|
+
}
|
|
16
|
+
export declare function readProjectFiles(rootDir: string): FileMap;
|
|
17
|
+
export declare const MAX_CHUNK_SIZE: number;
|
|
18
|
+
export declare const CHUNK_THRESHOLD: number;
|
|
19
|
+
export declare function calculateTotalSize(files: FileMap): number;
|
|
20
|
+
export declare function needsChunkedUpload(files: FileMap): boolean;
|
|
21
|
+
export declare function chunkFiles(files: FileMap, maxChunkSize?: number): FileMap[];
|
|
22
|
+
export declare function getChunkStats(chunks: FileMap[]): {
|
|
23
|
+
totalChunks: number;
|
|
24
|
+
chunkSizes: number[];
|
|
25
|
+
totalFiles: number;
|
|
26
|
+
totalSize: number;
|
|
27
|
+
};
|
|
28
|
+
export declare function getFileCount(files: FileMap): number;
|
|
29
|
+
export declare function formatSize(bytes: number): string;
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File Utilities
|
|
3
|
+
*
|
|
4
|
+
* Handles file reading and chunking for uploading codebases to the share API.
|
|
5
|
+
* Properly handles binary files by encoding them as base64.
|
|
6
|
+
*
|
|
7
|
+
* Ported from inflight-claude-plugin/plugin/local-mcp/utils/file-utils.ts
|
|
8
|
+
*/
|
|
9
|
+
import * as fs from "fs";
|
|
10
|
+
import * as path from "path";
|
|
11
|
+
const BINARY_EXTENSIONS = [
|
|
12
|
+
// Images
|
|
13
|
+
".png", ".jpg", ".jpeg", ".gif", ".webp", ".ico", ".svg", ".bmp", ".tiff", ".avif", ".heic", ".heif",
|
|
14
|
+
// Fonts
|
|
15
|
+
".woff", ".woff2", ".ttf", ".eot", ".otf",
|
|
16
|
+
// Audio/Video
|
|
17
|
+
".mp3", ".mp4", ".webm", ".ogg", ".wav", ".m4a", ".flac", ".avi", ".mov", ".mkv",
|
|
18
|
+
// Documents
|
|
19
|
+
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx",
|
|
20
|
+
// Archives
|
|
21
|
+
".zip", ".tar", ".gz", ".bz2", ".7z", ".rar",
|
|
22
|
+
// Other binary
|
|
23
|
+
".exe", ".dll", ".so", ".dylib", ".bin", ".dat", ".db", ".sqlite", ".sqlite3", ".wasm", ".lockb",
|
|
24
|
+
];
|
|
25
|
+
function isBinaryFile(filename) {
|
|
26
|
+
const ext = path.extname(filename).toLowerCase();
|
|
27
|
+
return BINARY_EXTENSIONS.includes(ext);
|
|
28
|
+
}
|
|
29
|
+
function containsBinaryContent(buffer) {
|
|
30
|
+
const sampleSize = Math.min(buffer.length, 8192);
|
|
31
|
+
let nonPrintable = 0;
|
|
32
|
+
for (let i = 0; i < sampleSize; i++) {
|
|
33
|
+
const byte = buffer[i];
|
|
34
|
+
if (byte === 0)
|
|
35
|
+
return true;
|
|
36
|
+
if (byte < 32 && byte !== 9 && byte !== 10 && byte !== 13) {
|
|
37
|
+
nonPrintable++;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return (nonPrintable / sampleSize) > 0.1;
|
|
41
|
+
}
|
|
42
|
+
const EXCLUDE_PATTERNS = [
|
|
43
|
+
// Version control
|
|
44
|
+
/^\.git$/,
|
|
45
|
+
/(^|\/)\.git\//,
|
|
46
|
+
// Dependencies
|
|
47
|
+
/(^|\/)node_modules\//,
|
|
48
|
+
// Build outputs
|
|
49
|
+
/(^|\/)\.next\//,
|
|
50
|
+
/(^|\/)dist\//,
|
|
51
|
+
/(^|\/)build\//,
|
|
52
|
+
/(^|\/)out\//,
|
|
53
|
+
/(^|\/)\.output\//,
|
|
54
|
+
/(^|\/)\.svelte-kit\//,
|
|
55
|
+
// Cache directories
|
|
56
|
+
/(^|\/)\.vercel\//,
|
|
57
|
+
/(^|\/)\.turbo\//,
|
|
58
|
+
/(^|\/)\.cache\//,
|
|
59
|
+
/(^|\/)\.parcel-cache\//,
|
|
60
|
+
/(^|\/)\.vite\//,
|
|
61
|
+
/(^|\/)\.nuxt\//,
|
|
62
|
+
/(^|\/)\.expo\//,
|
|
63
|
+
// Lock files
|
|
64
|
+
/package-lock\.json$/,
|
|
65
|
+
/yarn\.lock$/,
|
|
66
|
+
/pnpm-lock\.yaml$/,
|
|
67
|
+
/bun\.lockb$/,
|
|
68
|
+
// OS/editor files
|
|
69
|
+
/\.DS_Store$/,
|
|
70
|
+
/\.log$/,
|
|
71
|
+
/Thumbs\.db$/,
|
|
72
|
+
// Coverage
|
|
73
|
+
/(^|\/)coverage\//,
|
|
74
|
+
/(^|\/)\.nyc_output\//,
|
|
75
|
+
];
|
|
76
|
+
const ENV_PATTERNS = [/^\.env/, /\.env\./];
|
|
77
|
+
function shouldExclude(filePath) {
|
|
78
|
+
if (EXCLUDE_PATTERNS.some(pattern => pattern.test(filePath)))
|
|
79
|
+
return true;
|
|
80
|
+
const fileName = filePath.split("/").pop() ?? "";
|
|
81
|
+
if (ENV_PATTERNS.some(pattern => pattern.test(fileName)))
|
|
82
|
+
return true;
|
|
83
|
+
return false;
|
|
84
|
+
}
|
|
85
|
+
export function readProjectFiles(rootDir) {
|
|
86
|
+
const files = {};
|
|
87
|
+
function walkDir(dir) {
|
|
88
|
+
let entries;
|
|
89
|
+
try {
|
|
90
|
+
entries = fs.readdirSync(dir);
|
|
91
|
+
}
|
|
92
|
+
catch {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
for (const entry of entries) {
|
|
96
|
+
const fullPath = path.join(dir, entry);
|
|
97
|
+
const relativePath = path.relative(rootDir, fullPath);
|
|
98
|
+
if (shouldExclude(relativePath))
|
|
99
|
+
continue;
|
|
100
|
+
let stat;
|
|
101
|
+
try {
|
|
102
|
+
stat = fs.statSync(fullPath);
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
if (stat.isDirectory()) {
|
|
108
|
+
walkDir(fullPath);
|
|
109
|
+
}
|
|
110
|
+
else if (stat.isFile()) {
|
|
111
|
+
try {
|
|
112
|
+
const buffer = fs.readFileSync(fullPath);
|
|
113
|
+
const isBinaryByExt = isBinaryFile(entry);
|
|
114
|
+
const isBinaryByContent = !isBinaryByExt && containsBinaryContent(buffer);
|
|
115
|
+
if (isBinaryByExt || isBinaryByContent) {
|
|
116
|
+
files[relativePath] = { content: buffer.toString("base64"), encoding: "base64" };
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
files[relativePath] = buffer.toString("utf-8");
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
// Skip files that can't be read
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
walkDir(rootDir);
|
|
129
|
+
return files;
|
|
130
|
+
}
|
|
131
|
+
// Chunking configuration
|
|
132
|
+
export const MAX_CHUNK_SIZE = 2 * 1024 * 1024; // 2MB per chunk
|
|
133
|
+
export const CHUNK_THRESHOLD = 3 * 1024 * 1024; // 3MB triggers chunked mode
|
|
134
|
+
function getFileSize(file) {
|
|
135
|
+
return typeof file === "string" ? file.length : file.content.length;
|
|
136
|
+
}
|
|
137
|
+
export function calculateTotalSize(files) {
|
|
138
|
+
return Object.values(files).reduce((sum, file) => sum + getFileSize(file), 0);
|
|
139
|
+
}
|
|
140
|
+
export function needsChunkedUpload(files) {
|
|
141
|
+
return calculateTotalSize(files) > CHUNK_THRESHOLD;
|
|
142
|
+
}
|
|
143
|
+
export function chunkFiles(files, maxChunkSize = MAX_CHUNK_SIZE) {
|
|
144
|
+
const chunks = [];
|
|
145
|
+
let currentChunk = {};
|
|
146
|
+
let currentSize = 0;
|
|
147
|
+
const entries = Object.entries(files).sort((a, b) => getFileSize(a[1]) - getFileSize(b[1]));
|
|
148
|
+
for (const [filePath, file] of entries) {
|
|
149
|
+
const fileSize = getFileSize(file);
|
|
150
|
+
if (fileSize > maxChunkSize) {
|
|
151
|
+
if (Object.keys(currentChunk).length > 0) {
|
|
152
|
+
chunks.push(currentChunk);
|
|
153
|
+
currentChunk = {};
|
|
154
|
+
currentSize = 0;
|
|
155
|
+
}
|
|
156
|
+
chunks.push({ [filePath]: file });
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
if (currentSize + fileSize > maxChunkSize && Object.keys(currentChunk).length > 0) {
|
|
160
|
+
chunks.push(currentChunk);
|
|
161
|
+
currentChunk = {};
|
|
162
|
+
currentSize = 0;
|
|
163
|
+
}
|
|
164
|
+
currentChunk[filePath] = file;
|
|
165
|
+
currentSize += fileSize;
|
|
166
|
+
}
|
|
167
|
+
if (Object.keys(currentChunk).length > 0) {
|
|
168
|
+
chunks.push(currentChunk);
|
|
169
|
+
}
|
|
170
|
+
return chunks;
|
|
171
|
+
}
|
|
172
|
+
export function getChunkStats(chunks) {
|
|
173
|
+
const chunkSizes = chunks.map((chunk) => calculateTotalSize(chunk));
|
|
174
|
+
const totalFiles = chunks.reduce((sum, chunk) => sum + Object.keys(chunk).length, 0);
|
|
175
|
+
const totalSize = chunkSizes.reduce((sum, size) => sum + size, 0);
|
|
176
|
+
return { totalChunks: chunks.length, chunkSizes, totalFiles, totalSize };
|
|
177
|
+
}
|
|
178
|
+
export function getFileCount(files) {
|
|
179
|
+
return Object.keys(files).length;
|
|
180
|
+
}
|
|
181
|
+
export function formatSize(bytes) {
|
|
182
|
+
if (bytes < 1024)
|
|
183
|
+
return `${bytes} B`;
|
|
184
|
+
if (bytes < 1024 * 1024)
|
|
185
|
+
return `${(bytes / 1024).toFixed(1)} KB`;
|
|
186
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
187
|
+
}
|
package/dist/lib/git.d.ts
CHANGED
|
@@ -7,5 +7,30 @@ export interface GitInfo {
|
|
|
7
7
|
isDirty: boolean;
|
|
8
8
|
diff: string | null;
|
|
9
9
|
}
|
|
10
|
+
export interface DiffScope {
|
|
11
|
+
mode: "branch" | "uncommitted" | "staged" | "commits" | "files";
|
|
12
|
+
commitCount?: number;
|
|
13
|
+
paths?: string[];
|
|
14
|
+
}
|
|
15
|
+
export interface GitDiffResult {
|
|
16
|
+
diff: string;
|
|
17
|
+
diffStat: string;
|
|
18
|
+
baseBranch: string;
|
|
19
|
+
currentBranch: string;
|
|
20
|
+
}
|
|
21
|
+
export declare function getDefaultBranch(cwd: string): string;
|
|
22
|
+
export declare function getRemoteUrl(cwd: string): string | null;
|
|
23
|
+
/**
|
|
24
|
+
* Get a structured diff result for the share API, supporting multiple scope modes.
|
|
25
|
+
*/
|
|
26
|
+
export declare function getGitDiffResult(cwd: string, scope?: DiffScope): GitDiffResult | null;
|
|
27
|
+
/**
|
|
28
|
+
* Parse a --stat output into structured file change info.
|
|
29
|
+
*/
|
|
30
|
+
export declare function parseDiffStat(diffStat: string): Array<{
|
|
31
|
+
file: string;
|
|
32
|
+
insertions: number;
|
|
33
|
+
deletions: number;
|
|
34
|
+
}>;
|
|
10
35
|
export declare function getGitInfo(cwd: string): GitInfo;
|
|
11
36
|
export declare function isGitRepo(cwd: string): boolean;
|
package/dist/lib/git.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { execSync } from "child_process";
|
|
1
|
+
import { execSync, execFileSync } from "child_process";
|
|
2
2
|
function run(cmd, cwd) {
|
|
3
3
|
try {
|
|
4
4
|
return execSync(cmd, { cwd, stdio: ["pipe", "pipe", "pipe"] })
|
|
@@ -9,7 +9,15 @@ function run(cmd, cwd) {
|
|
|
9
9
|
return null;
|
|
10
10
|
}
|
|
11
11
|
}
|
|
12
|
-
|
|
12
|
+
function runLarge(cmd, cwd) {
|
|
13
|
+
try {
|
|
14
|
+
return execSync(cmd, { cwd, encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 }).trim();
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
// Files matching these patterns are stripped from the diff before sending.
|
|
13
21
|
// They consume token budget without providing useful feedback signal.
|
|
14
22
|
const EXCLUDED_FILE_PATTERNS = [
|
|
15
23
|
/^(dist|build|\.next|out)\//,
|
|
@@ -30,33 +38,170 @@ function isExcludedFile(filePath) {
|
|
|
30
38
|
* Each file section starts with "diff --git a/<path> b/<path>".
|
|
31
39
|
*/
|
|
32
40
|
function filterDiff(raw) {
|
|
33
|
-
// Split on file boundaries, keeping the delimiter
|
|
34
41
|
const sections = raw.split(/(?=^diff --git )/m);
|
|
35
42
|
return sections
|
|
36
43
|
.filter((section) => {
|
|
37
44
|
const match = section.match(/^diff --git a\/.+ b\/(.+)/);
|
|
38
45
|
if (!match)
|
|
39
|
-
return true;
|
|
46
|
+
return true;
|
|
40
47
|
return !isExcludedFile(match[1]);
|
|
41
48
|
})
|
|
42
49
|
.join("");
|
|
43
50
|
}
|
|
51
|
+
const MAX_DIFF_CHARS = 100_000;
|
|
52
|
+
function capDiff(raw) {
|
|
53
|
+
const filtered = filterDiff(raw).slice(0, MAX_DIFF_CHARS);
|
|
54
|
+
return filtered.trim();
|
|
55
|
+
}
|
|
44
56
|
function getDiff(cwd) {
|
|
45
|
-
// Try to find merge base with origin/main or origin/master
|
|
46
57
|
const mergeBase = run("git merge-base origin/main HEAD", cwd) ?? run("git merge-base origin/master HEAD", cwd);
|
|
47
|
-
const raw = mergeBase
|
|
48
|
-
? run(`git diff ${mergeBase}..HEAD`, cwd)
|
|
49
|
-
: run("git diff HEAD~1..HEAD", cwd);
|
|
58
|
+
const raw = mergeBase ? run(`git diff ${mergeBase}..HEAD`, cwd) : run("git diff HEAD~1..HEAD", cwd);
|
|
50
59
|
if (!raw)
|
|
51
60
|
return null;
|
|
52
|
-
|
|
53
|
-
|
|
61
|
+
return capDiff(raw);
|
|
62
|
+
}
|
|
63
|
+
export function getDefaultBranch(cwd) {
|
|
64
|
+
// Check upstream default first
|
|
65
|
+
const remoteHead = run("git symbolic-ref refs/remotes/origin/HEAD", cwd);
|
|
66
|
+
if (remoteHead) {
|
|
67
|
+
const branch = remoteHead.replace("refs/remotes/origin/", "");
|
|
68
|
+
if (branch)
|
|
69
|
+
return branch;
|
|
70
|
+
}
|
|
71
|
+
try {
|
|
72
|
+
execSync("git show-ref --verify --quiet refs/heads/main", { cwd, stdio: "pipe" });
|
|
73
|
+
return "main";
|
|
74
|
+
}
|
|
75
|
+
catch {
|
|
76
|
+
try {
|
|
77
|
+
execSync("git show-ref --verify --quiet refs/heads/master", { cwd, stdio: "pipe" });
|
|
78
|
+
return "master";
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
return "main";
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
export function getRemoteUrl(cwd) {
|
|
86
|
+
return run("git remote get-url origin", cwd);
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Get a structured diff result for the share API, supporting multiple scope modes.
|
|
90
|
+
*/
|
|
91
|
+
export function getGitDiffResult(cwd, scope) {
|
|
92
|
+
const currentBranch = run("git rev-parse --abbrev-ref HEAD", cwd);
|
|
93
|
+
if (!currentBranch)
|
|
94
|
+
return null;
|
|
95
|
+
const baseBranch = getDefaultBranch(cwd);
|
|
96
|
+
const mode = scope?.mode ?? "branch";
|
|
97
|
+
let diff = "";
|
|
98
|
+
let diffStat = "";
|
|
99
|
+
switch (mode) {
|
|
100
|
+
case "uncommitted": {
|
|
101
|
+
diff = runLarge("git diff HEAD", cwd) ?? "";
|
|
102
|
+
diffStat = run("git diff HEAD --stat", cwd) ?? "";
|
|
103
|
+
break;
|
|
104
|
+
}
|
|
105
|
+
case "staged": {
|
|
106
|
+
diff = runLarge("git diff --cached", cwd) ?? "";
|
|
107
|
+
diffStat = run("git diff --cached --stat", cwd) ?? "";
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
110
|
+
case "commits": {
|
|
111
|
+
const count = scope?.commitCount ?? 1;
|
|
112
|
+
diff = runLarge(`git diff HEAD~${count}...HEAD`, cwd) ?? "";
|
|
113
|
+
diffStat = run(`git diff HEAD~${count}...HEAD --stat`, cwd) ?? "";
|
|
114
|
+
if (!diff) {
|
|
115
|
+
// Not enough history — diff from root commit
|
|
116
|
+
const rootCommit = run("git rev-list --max-parents=0 HEAD", cwd)?.split("\n")[0];
|
|
117
|
+
if (rootCommit) {
|
|
118
|
+
diff = runLarge(`git diff ${rootCommit}...HEAD`, cwd) ?? "";
|
|
119
|
+
diffStat = run(`git diff ${rootCommit}...HEAD --stat`, cwd) ?? "";
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
case "files": {
|
|
125
|
+
const paths = scope?.paths ?? [];
|
|
126
|
+
if (paths.length > 0) {
|
|
127
|
+
const targets = [`${baseBranch}...HEAD`, `origin/${baseBranch}...HEAD`];
|
|
128
|
+
for (const target of targets) {
|
|
129
|
+
try {
|
|
130
|
+
diff = execFileSync("git", ["diff", target, "--", ...paths], {
|
|
131
|
+
cwd,
|
|
132
|
+
encoding: "utf-8",
|
|
133
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
134
|
+
}).trim();
|
|
135
|
+
diffStat = execFileSync("git", ["diff", target, "--stat", "--", ...paths], {
|
|
136
|
+
cwd,
|
|
137
|
+
encoding: "utf-8",
|
|
138
|
+
}).trim();
|
|
139
|
+
}
|
|
140
|
+
catch {
|
|
141
|
+
diff = "";
|
|
142
|
+
diffStat = "";
|
|
143
|
+
}
|
|
144
|
+
if (diff)
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
break;
|
|
149
|
+
}
|
|
150
|
+
case "branch":
|
|
151
|
+
default: {
|
|
152
|
+
const targets = [`${baseBranch}...HEAD`, `origin/${baseBranch}...HEAD`, "HEAD"];
|
|
153
|
+
for (const target of targets) {
|
|
154
|
+
const cmd = target === "HEAD" ? "git diff HEAD" : `git diff ${target}`;
|
|
155
|
+
diff = runLarge(cmd, cwd) ?? "";
|
|
156
|
+
diffStat = run(`${cmd} --stat`, cwd) ?? "";
|
|
157
|
+
if (diff)
|
|
158
|
+
break;
|
|
159
|
+
}
|
|
160
|
+
break;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
if (!diff.trim())
|
|
54
164
|
return null;
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
165
|
+
const filtered = capDiff(diff);
|
|
166
|
+
if (!filtered)
|
|
167
|
+
return null;
|
|
168
|
+
return { diff: filtered, diffStat, baseBranch, currentBranch };
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Parse a --stat output into structured file change info.
|
|
172
|
+
*/
|
|
173
|
+
export function parseDiffStat(diffStat) {
|
|
174
|
+
const lines = diffStat.split("\n").filter((l) => l.includes("|"));
|
|
175
|
+
return lines
|
|
176
|
+
.map((line) => {
|
|
177
|
+
const match = line.match(/^\s*(.+?)\s*\|\s*(\d+)\s*([+-]*)/);
|
|
178
|
+
if (!match)
|
|
179
|
+
return null;
|
|
180
|
+
const file = match[1].trim();
|
|
181
|
+
const total = parseInt(match[2], 10);
|
|
182
|
+
const changes = match[3] ?? "";
|
|
183
|
+
const insertions = (changes.match(/\+/g) ?? []).length;
|
|
184
|
+
const deletions = (changes.match(/-/g) ?? []).length;
|
|
185
|
+
if (insertions === 0 && deletions === 0) {
|
|
186
|
+
return { file, insertions: total, deletions: 0 };
|
|
187
|
+
}
|
|
188
|
+
const ratio = total / (insertions + deletions);
|
|
189
|
+
return { file, insertions: Math.round(insertions * ratio), deletions: Math.round(deletions * ratio) };
|
|
190
|
+
})
|
|
191
|
+
.filter((entry) => entry !== null);
|
|
58
192
|
}
|
|
193
|
+
const EMPTY_GIT_INFO = {
|
|
194
|
+
branch: null,
|
|
195
|
+
commitShort: null,
|
|
196
|
+
commitFull: null,
|
|
197
|
+
commitMessage: null,
|
|
198
|
+
remoteUrl: null,
|
|
199
|
+
isDirty: false,
|
|
200
|
+
diff: null,
|
|
201
|
+
};
|
|
59
202
|
export function getGitInfo(cwd) {
|
|
203
|
+
if (!isGitRepo(cwd))
|
|
204
|
+
return EMPTY_GIT_INFO;
|
|
60
205
|
return {
|
|
61
206
|
branch: run("git rev-parse --abbrev-ref HEAD", cwd),
|
|
62
207
|
commitShort: run("git rev-parse --short HEAD", cwd),
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maps raw technical progress messages from the share API to user-friendly equivalents.
|
|
3
|
+
*
|
|
4
|
+
* Phases:
|
|
5
|
+
* 0-12% - Local preparation (git, files, auth)
|
|
6
|
+
* 12-42% - Upload (standard or chunked)
|
|
7
|
+
* 42-70% - Claude analysis (server-side)
|
|
8
|
+
* 70-90% - Prototype deploy (install, start server, tunnel)
|
|
9
|
+
* 90-100% - InFlight version creation & finalization
|
|
10
|
+
*
|
|
11
|
+
* Ported from inflight-claude-plugin/plugin/local-mcp/utils/progress-messages.ts
|
|
12
|
+
*/
|
|
13
|
+
/**
|
|
14
|
+
* Create a progress message handler with built-in deduplication.
|
|
15
|
+
* Returns a function that converts raw progress to friendly messages,
|
|
16
|
+
* returning null for consecutive duplicates.
|
|
17
|
+
*/
|
|
18
|
+
export declare function createProgressHandler(): (percentage: number, rawMessage: string) => string | null;
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maps raw technical progress messages from the share API to user-friendly equivalents.
|
|
3
|
+
*
|
|
4
|
+
* Phases:
|
|
5
|
+
* 0-12% - Local preparation (git, files, auth)
|
|
6
|
+
* 12-42% - Upload (standard or chunked)
|
|
7
|
+
* 42-70% - Claude analysis (server-side)
|
|
8
|
+
* 70-90% - Prototype deploy (install, start server, tunnel)
|
|
9
|
+
* 90-100% - InFlight version creation & finalization
|
|
10
|
+
*
|
|
11
|
+
* Ported from inflight-claude-plugin/plugin/local-mcp/utils/progress-messages.ts
|
|
12
|
+
*/
|
|
13
|
+
const CONTENT_MATCHERS = [
|
|
14
|
+
// Local prep phase
|
|
15
|
+
{ pattern: /Starting share for/i, message: "Preparing your project..." },
|
|
16
|
+
{ pattern: /Checking Share API/i, message: "Preparing your project..." },
|
|
17
|
+
{ pattern: /Share API is healthy/i, message: "Preparing your project..." },
|
|
18
|
+
{ pattern: /Getting git info/i, message: "Loading your code..." },
|
|
19
|
+
{ pattern: /Reading project files|Reading all project/i, message: "Loading your code..." },
|
|
20
|
+
{ pattern: /Analyzing dependencies/i, message: "Loading your code..." },
|
|
21
|
+
{ pattern: /Reading.*analyzed files/i, message: "Loading your code..." },
|
|
22
|
+
{ pattern: /Authenticating with InFlight/i, message: "Loading your code..." },
|
|
23
|
+
{ pattern: /Starting share on server/i, message: "Uploading your project..." },
|
|
24
|
+
// Chunked upload phase
|
|
25
|
+
{ pattern: /Large project.*chunked/i, message: "Uploading your project..." },
|
|
26
|
+
{ pattern: /Splitting into.*chunks/i, message: "Uploading your project..." },
|
|
27
|
+
{ pattern: /Initializing chunked/i, message: "Uploading your project..." },
|
|
28
|
+
{ pattern: /Uploading chunk/i, message: "Uploading your project..." },
|
|
29
|
+
{ pattern: /Finalizing upload/i, message: "Upload complete, starting analysis..." },
|
|
30
|
+
// Server sandbox & upload phase
|
|
31
|
+
{ pattern: /Validating workspace/i, message: "Setting things up..." },
|
|
32
|
+
{ pattern: /Creating sandbox/i, message: "Setting things up..." },
|
|
33
|
+
{ pattern: /Uploading.*files/i, message: "Uploading your project..." },
|
|
34
|
+
{ pattern: /Writing git diff/i, message: "Preparing your changes for review..." },
|
|
35
|
+
{ pattern: /Preparing analysis/i, message: "Getting ready to analyze..." },
|
|
36
|
+
// Claude installation & analysis phase
|
|
37
|
+
{ pattern: /Checking Claude|Installing Claude/i, message: "Getting ready to analyze..." },
|
|
38
|
+
{ pattern: /Setting up.*user|non-root/i, message: "Getting ready to analyze..." },
|
|
39
|
+
{ pattern: /Starting Claude/i, message: "Running a detailed analysis of your changes..." },
|
|
40
|
+
{ pattern: /Claude is analyzing/i, message: "Analyzing your changes..." },
|
|
41
|
+
{ pattern: /Claude is working/i, message: "Still analyzing..." },
|
|
42
|
+
{ pattern: /^Claude:/i, message: "Analyzing your changes..." },
|
|
43
|
+
{ pattern: /Analyzing\.\.\. \(\d+s\)/i, message: "Still analyzing..." },
|
|
44
|
+
{ pattern: /Claude finished/i, message: "Analysis complete! Building your preview..." },
|
|
45
|
+
// Prototype found
|
|
46
|
+
{ pattern: /Prototype ready|Prototype found/i, message: "Analysis complete! Building your preview..." },
|
|
47
|
+
// Deploy phase
|
|
48
|
+
{ pattern: /Configuring preview/i, message: "Building your preview..." },
|
|
49
|
+
{ pattern: /Installing dependencies/i, message: "Installing dependencies..." },
|
|
50
|
+
{ pattern: /Starting dev server/i, message: "Starting your preview..." },
|
|
51
|
+
{ pattern: /Waiting for server/i, message: "Starting your preview..." },
|
|
52
|
+
{ pattern: /Setting up preview tunnel/i, message: "Setting up your preview link..." },
|
|
53
|
+
{ pattern: /Preview tunnel ready/i, message: "Preview link is ready!" },
|
|
54
|
+
{ pattern: /Waiting for Vite/i, message: "Almost there..." },
|
|
55
|
+
{ pattern: /Preview ready/i, message: "Preview is live!" },
|
|
56
|
+
{ pattern: /Preview may still be compiling/i, message: "Almost there..." },
|
|
57
|
+
// Finalization
|
|
58
|
+
{ pattern: /Creating InFlight version/i, message: "Creating your InFlight version..." },
|
|
59
|
+
{ pattern: /Tracking sandbox/i, message: "Saving your share..." },
|
|
60
|
+
{ pattern: /Generating diff summary/i, message: "Summarizing your changes..." },
|
|
61
|
+
{ pattern: /Generating review/i, message: "Preparing feedback questions..." },
|
|
62
|
+
{ pattern: /^Complete!$/i, message: "All done!" },
|
|
63
|
+
{ pattern: /Share complete/i, message: "All done!" },
|
|
64
|
+
];
|
|
65
|
+
const PHASE_MESSAGES = [
|
|
66
|
+
{ maxPct: 5, message: "Preparing your project..." },
|
|
67
|
+
{ maxPct: 12, message: "Loading your code..." },
|
|
68
|
+
{ maxPct: 42, message: "Uploading your project..." },
|
|
69
|
+
{ maxPct: 50, message: "Setting things up..." },
|
|
70
|
+
{ maxPct: 55, message: "Getting ready to analyze..." },
|
|
71
|
+
{ maxPct: 70, message: "Analyzing your changes..." },
|
|
72
|
+
{ maxPct: 80, message: "Building your preview..." },
|
|
73
|
+
{ maxPct: 90, message: "Setting up your preview link..." },
|
|
74
|
+
{ maxPct: 96, message: "Creating your InFlight version..." },
|
|
75
|
+
{ maxPct: 100, message: "Wrapping up..." },
|
|
76
|
+
];
|
|
77
|
+
/**
|
|
78
|
+
* Create a progress message handler with built-in deduplication.
|
|
79
|
+
* Returns a function that converts raw progress to friendly messages,
|
|
80
|
+
* returning null for consecutive duplicates.
|
|
81
|
+
*/
|
|
82
|
+
export function createProgressHandler() {
|
|
83
|
+
let lastFriendlyMessage = "";
|
|
84
|
+
return (percentage, rawMessage) => {
|
|
85
|
+
let friendly;
|
|
86
|
+
for (const matcher of CONTENT_MATCHERS) {
|
|
87
|
+
if (matcher.pattern.test(rawMessage)) {
|
|
88
|
+
friendly = matcher.message;
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
if (!friendly) {
|
|
93
|
+
for (const phase of PHASE_MESSAGES) {
|
|
94
|
+
if (percentage <= phase.maxPct) {
|
|
95
|
+
friendly = phase.message;
|
|
96
|
+
break;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
friendly = friendly ?? "Working on it...";
|
|
101
|
+
if (friendly === lastFriendlyMessage)
|
|
102
|
+
return null;
|
|
103
|
+
lastFriendlyMessage = friendly;
|
|
104
|
+
return friendly;
|
|
105
|
+
};
|
|
106
|
+
}
|