sessioncast-cli 2.0.2 → 2.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/runner.js +23 -0
- package/dist/agent/session-handler.d.ts +1 -2
- package/dist/agent/session-handler.js +34 -79
- package/dist/agent/tmux-executor.d.ts +3 -33
- package/dist/agent/tmux-executor.js +3 -50
- package/dist/agent/tmux.d.ts +2 -6
- package/dist/agent/tmux.js +2 -9
- package/dist/agent/types.d.ts +0 -10
- package/dist/agent/websocket.d.ts +2 -21
- package/dist/agent/websocket.js +10 -46
- package/dist/commands/agent.js +3 -0
- package/dist/index.js +14 -0
- package/dist/sentry.d.ts +4 -0
- package/dist/sentry.js +87 -0
- package/package.json +2 -1
- package/dist/autopilot/index.d.ts +0 -94
- package/dist/autopilot/index.js +0 -322
- package/dist/autopilot/mission-analyzer.d.ts +0 -27
- package/dist/autopilot/mission-analyzer.js +0 -232
- package/dist/autopilot/project-detector.d.ts +0 -12
- package/dist/autopilot/project-detector.js +0 -326
- package/dist/autopilot/source-scanner.d.ts +0 -26
- package/dist/autopilot/source-scanner.js +0 -285
- package/dist/autopilot/speckit-generator.d.ts +0 -60
- package/dist/autopilot/speckit-generator.js +0 -511
- package/dist/autopilot/types.d.ts +0 -110
- package/dist/autopilot/types.js +0 -6
- package/dist/autopilot/workflow-generator.d.ts +0 -33
- package/dist/autopilot/workflow-generator.js +0 -278
- package/dist/commands/autopilot.d.ts +0 -30
- package/dist/commands/autopilot.js +0 -262
- package/dist/commands/project.d.ts +0 -33
- package/dist/commands/project.js +0 -350
- package/dist/project/executor.d.ts +0 -73
- package/dist/project/executor.js +0 -437
- package/dist/project/index.d.ts +0 -4
- package/dist/project/index.js +0 -20
- package/dist/project/manager.d.ts +0 -66
- package/dist/project/manager.js +0 -290
- package/dist/project/relay-client.d.ts +0 -37
- package/dist/project/relay-client.js +0 -204
- package/dist/project/types.d.ts +0 -48
- package/dist/project/types.js +0 -3
- package/dist/utils/fileUtils.d.ts +0 -28
- package/dist/utils/fileUtils.js +0 -159
package/dist/utils/fileUtils.js
DELETED
|
@@ -1,159 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.sanitizeFilename = sanitizeFilename;
|
|
37
|
-
exports.getUploadDirectory = getUploadDirectory;
|
|
38
|
-
exports.handleUploadChunk = handleUploadChunk;
|
|
39
|
-
exports.cleanupStaleUploads = cleanupStaleUploads;
|
|
40
|
-
const fs = __importStar(require("fs"));
|
|
41
|
-
const path = __importStar(require("path"));
|
|
42
|
-
// Track ongoing uploads
|
|
43
|
-
const pendingUploads = new Map();
|
|
44
|
-
// Cleanup stale uploads after 5 minutes
|
|
45
|
-
const UPLOAD_TIMEOUT_MS = 5 * 60 * 1000;
|
|
46
|
-
/**
|
|
47
|
-
* Sanitize filename to prevent path traversal attacks
|
|
48
|
-
*/
|
|
49
|
-
function sanitizeFilename(filename) {
|
|
50
|
-
// Remove any path components
|
|
51
|
-
const basename = path.basename(filename);
|
|
52
|
-
// Remove potentially dangerous characters
|
|
53
|
-
return basename.replace(/[<>:"/\\|?*\x00-\x1f]/g, '_');
|
|
54
|
-
}
|
|
55
|
-
/**
|
|
56
|
-
* Get the working directory for file uploads
|
|
57
|
-
* Priority: SESSIONCAST_UPLOAD_DIR env var > current working directory
|
|
58
|
-
*/
|
|
59
|
-
function getUploadDirectory() {
|
|
60
|
-
return process.env.SESSIONCAST_UPLOAD_DIR || process.cwd();
|
|
61
|
-
}
|
|
62
|
-
/**
|
|
63
|
-
* Handle file upload chunk
|
|
64
|
-
* Returns upload result when all chunks are received, null otherwise
|
|
65
|
-
*/
|
|
66
|
-
async function handleUploadChunk(sessionId, meta, payload) {
|
|
67
|
-
const filename = sanitizeFilename(meta.filename);
|
|
68
|
-
const uploadKey = `${sessionId}:${filename}`;
|
|
69
|
-
const chunkIndex = parseInt(meta.chunkIndex, 10);
|
|
70
|
-
const totalChunks = parseInt(meta.totalChunks, 10);
|
|
71
|
-
const size = parseInt(meta.size, 10);
|
|
72
|
-
// Validate parameters
|
|
73
|
-
if (isNaN(chunkIndex) || isNaN(totalChunks) || isNaN(size)) {
|
|
74
|
-
return { success: false, error: 'Invalid upload metadata' };
|
|
75
|
-
}
|
|
76
|
-
// Check file size limit (10MB)
|
|
77
|
-
const MAX_FILE_SIZE = 10 * 1024 * 1024;
|
|
78
|
-
if (size > MAX_FILE_SIZE) {
|
|
79
|
-
return { success: false, error: `File too large. Max size: ${MAX_FILE_SIZE / 1024 / 1024}MB` };
|
|
80
|
-
}
|
|
81
|
-
// Get or create upload state
|
|
82
|
-
let state = pendingUploads.get(uploadKey);
|
|
83
|
-
if (!state) {
|
|
84
|
-
state = {
|
|
85
|
-
filename,
|
|
86
|
-
totalChunks,
|
|
87
|
-
receivedChunks: new Map(),
|
|
88
|
-
size,
|
|
89
|
-
mimeType: meta.mimeType,
|
|
90
|
-
createdAt: Date.now(),
|
|
91
|
-
};
|
|
92
|
-
pendingUploads.set(uploadKey, state);
|
|
93
|
-
}
|
|
94
|
-
// Decode and store chunk
|
|
95
|
-
try {
|
|
96
|
-
const chunkBuffer = Buffer.from(payload, 'base64');
|
|
97
|
-
state.receivedChunks.set(chunkIndex, chunkBuffer);
|
|
98
|
-
console.log(`[FileUpload] Received chunk ${chunkIndex + 1}/${totalChunks} for ${filename}`);
|
|
99
|
-
}
|
|
100
|
-
catch (e) {
|
|
101
|
-
pendingUploads.delete(uploadKey);
|
|
102
|
-
return { success: false, error: 'Failed to decode chunk data' };
|
|
103
|
-
}
|
|
104
|
-
// Check if all chunks received
|
|
105
|
-
if (state.receivedChunks.size === totalChunks) {
|
|
106
|
-
try {
|
|
107
|
-
// Assemble file from chunks
|
|
108
|
-
const chunks = [];
|
|
109
|
-
for (let i = 0; i < totalChunks; i++) {
|
|
110
|
-
const chunk = state.receivedChunks.get(i);
|
|
111
|
-
if (!chunk) {
|
|
112
|
-
throw new Error(`Missing chunk ${i}`);
|
|
113
|
-
}
|
|
114
|
-
chunks.push(chunk);
|
|
115
|
-
}
|
|
116
|
-
const fileBuffer = Buffer.concat(chunks);
|
|
117
|
-
// Determine save path
|
|
118
|
-
const uploadDir = getUploadDirectory();
|
|
119
|
-
let savePath = path.join(uploadDir, filename);
|
|
120
|
-
// Handle filename collision
|
|
121
|
-
if (fs.existsSync(savePath)) {
|
|
122
|
-
const ext = path.extname(filename);
|
|
123
|
-
const basename = path.basename(filename, ext);
|
|
124
|
-
let counter = 1;
|
|
125
|
-
while (fs.existsSync(savePath)) {
|
|
126
|
-
savePath = path.join(uploadDir, `${basename}_${counter}${ext}`);
|
|
127
|
-
counter++;
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
// Write file
|
|
131
|
-
fs.writeFileSync(savePath, fileBuffer);
|
|
132
|
-
console.log(`[FileUpload] File saved: ${savePath}`);
|
|
133
|
-
// Cleanup
|
|
134
|
-
pendingUploads.delete(uploadKey);
|
|
135
|
-
return { success: true, path: savePath };
|
|
136
|
-
}
|
|
137
|
-
catch (e) {
|
|
138
|
-
pendingUploads.delete(uploadKey);
|
|
139
|
-
const errorMsg = e instanceof Error ? e.message : 'Unknown error';
|
|
140
|
-
return { success: false, error: `Failed to save file: ${errorMsg}` };
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
// Not all chunks received yet
|
|
144
|
-
return null;
|
|
145
|
-
}
|
|
146
|
-
/**
|
|
147
|
-
* Cleanup stale uploads periodically
|
|
148
|
-
*/
|
|
149
|
-
function cleanupStaleUploads() {
|
|
150
|
-
const now = Date.now();
|
|
151
|
-
for (const [key, state] of pendingUploads.entries()) {
|
|
152
|
-
if (now - state.createdAt > UPLOAD_TIMEOUT_MS) {
|
|
153
|
-
console.log(`[FileUpload] Cleaning up stale upload: ${state.filename}`);
|
|
154
|
-
pendingUploads.delete(key);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
}
|
|
158
|
-
// Run cleanup every minute
|
|
159
|
-
setInterval(cleanupStaleUploads, 60 * 1000);
|