@masslessai/push-todo 3.7.6 → 3.7.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/config.js +42 -1
- package/lib/daemon.js +280 -37
- package/lib/self-update.js +115 -0
- package/package.json +1 -1
package/lib/config.js
CHANGED
|
@@ -192,6 +192,27 @@ export function setAutoCompleteEnabled(enabled) {
|
|
|
192
192
|
return setConfigValue('AUTO_COMPLETE', enabled ? 'true' : 'false');
|
|
193
193
|
}
|
|
194
194
|
|
|
195
|
+
/**
|
|
196
|
+
* Check if auto-update is enabled for daemon self-updates.
|
|
197
|
+
* Default: true (daemon checks npm hourly and updates when idle)
|
|
198
|
+
*
|
|
199
|
+
* @returns {boolean}
|
|
200
|
+
*/
|
|
201
|
+
export function getAutoUpdateEnabled() {
|
|
202
|
+
const value = getConfigValue('AUTO_UPDATE', 'true');
|
|
203
|
+
return value.toLowerCase() === 'true' || value === '1' || value.toLowerCase() === 'yes';
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Set auto-update setting.
|
|
208
|
+
*
|
|
209
|
+
* @param {boolean} enabled
|
|
210
|
+
* @returns {boolean} True if successful
|
|
211
|
+
*/
|
|
212
|
+
export function setAutoUpdateEnabled(enabled) {
|
|
213
|
+
return setConfigValue('AUTO_UPDATE', enabled ? 'true' : 'false');
|
|
214
|
+
}
|
|
215
|
+
|
|
195
216
|
/**
|
|
196
217
|
* Get the maximum batch size for queuing tasks.
|
|
197
218
|
* Default: 5
|
|
@@ -270,6 +291,7 @@ export function showSettings() {
|
|
|
270
291
|
const autoCommit = getAutoCommitEnabled();
|
|
271
292
|
const autoMerge = getAutoMergeEnabled();
|
|
272
293
|
const autoComplete = getAutoCompleteEnabled();
|
|
294
|
+
const autoUpdate = getAutoUpdateEnabled();
|
|
273
295
|
const batchSize = getMaxBatchSize();
|
|
274
296
|
|
|
275
297
|
console.log(` auto-commit: ${autoCommit ? 'ON' : 'OFF'}`);
|
|
@@ -281,6 +303,9 @@ export function showSettings() {
|
|
|
281
303
|
console.log(` auto-complete: ${autoComplete ? 'ON' : 'OFF'}`);
|
|
282
304
|
console.log(' Mark task completed after successful merge');
|
|
283
305
|
console.log();
|
|
306
|
+
console.log(` auto-update: ${autoUpdate ? 'ON' : 'OFF'}`);
|
|
307
|
+
console.log(' Daemon auto-updates from npm when idle');
|
|
308
|
+
console.log();
|
|
284
309
|
console.log(` batch-size: ${batchSize}`);
|
|
285
310
|
console.log(' Max tasks for batch queue (1-20)');
|
|
286
311
|
console.log();
|
|
@@ -346,6 +371,22 @@ export function toggleSetting(settingName) {
|
|
|
346
371
|
return false;
|
|
347
372
|
}
|
|
348
373
|
|
|
374
|
+
if (normalized === 'auto-update') {
|
|
375
|
+
const current = getAutoUpdateEnabled();
|
|
376
|
+
const newValue = !current;
|
|
377
|
+
if (setAutoUpdateEnabled(newValue)) {
|
|
378
|
+
console.log(`Auto-update is now ${newValue ? 'ON' : 'OFF'}`);
|
|
379
|
+
if (newValue) {
|
|
380
|
+
console.log('Daemon will auto-update from npm when idle (hourly check).');
|
|
381
|
+
} else {
|
|
382
|
+
console.log('Daemon will NOT auto-update. Manual updates required.');
|
|
383
|
+
}
|
|
384
|
+
return true;
|
|
385
|
+
}
|
|
386
|
+
console.error('Failed to update setting');
|
|
387
|
+
return false;
|
|
388
|
+
}
|
|
389
|
+
|
|
349
390
|
if (normalized === 'batch-size') {
|
|
350
391
|
const batchSize = getMaxBatchSize();
|
|
351
392
|
console.log(`Current batch size: ${batchSize}`);
|
|
@@ -354,7 +395,7 @@ export function toggleSetting(settingName) {
|
|
|
354
395
|
}
|
|
355
396
|
|
|
356
397
|
console.error(`Unknown setting: ${settingName}`);
|
|
357
|
-
console.error('Available settings: auto-commit, auto-merge, auto-complete, batch-size');
|
|
398
|
+
console.error('Available settings: auto-commit, auto-merge, auto-complete, auto-update, batch-size');
|
|
358
399
|
return false;
|
|
359
400
|
}
|
|
360
401
|
|
package/lib/daemon.js
CHANGED
|
@@ -19,6 +19,8 @@ import { homedir, hostname, platform } from 'os';
|
|
|
19
19
|
import { join, dirname } from 'path';
|
|
20
20
|
import { fileURLToPath } from 'url';
|
|
21
21
|
|
|
22
|
+
import { checkForUpdate, performUpdate } from './self-update.js';
|
|
23
|
+
|
|
22
24
|
const __filename = fileURLToPath(import.meta.url);
|
|
23
25
|
const __dirname = dirname(__filename);
|
|
24
26
|
|
|
@@ -230,6 +232,48 @@ function getAutoCompleteEnabled() {
|
|
|
230
232
|
return v.toLowerCase() === 'true' || v === '1' || v.toLowerCase() === 'yes';
|
|
231
233
|
}
|
|
232
234
|
|
|
235
|
+
function getAutoUpdateEnabled() {
|
|
236
|
+
const v = getConfigValueFromFile('AUTO_UPDATE', 'true');
|
|
237
|
+
return v.toLowerCase() === 'true' || v === '1' || v.toLowerCase() === 'yes';
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// ==================== Capabilities Detection ====================
|
|
241
|
+
|
|
242
|
+
let cachedCapabilities = null;
|
|
243
|
+
let lastCapabilityCheck = 0;
|
|
244
|
+
const CAPABILITY_CHECK_INTERVAL = 3600000; // 1 hour
|
|
245
|
+
|
|
246
|
+
function detectCapabilities() {
|
|
247
|
+
const caps = {
|
|
248
|
+
auto_merge: getAutoMergeEnabled(),
|
|
249
|
+
auto_complete: getAutoCompleteEnabled(),
|
|
250
|
+
auto_update: getAutoUpdateEnabled(),
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
try {
|
|
254
|
+
execFileSync('gh', ['--version'], { timeout: 5000, stdio: 'pipe' });
|
|
255
|
+
try {
|
|
256
|
+
execFileSync('gh', ['auth', 'status'], { timeout: 5000, stdio: 'pipe' });
|
|
257
|
+
caps.gh_cli = 'authenticated';
|
|
258
|
+
} catch {
|
|
259
|
+
caps.gh_cli = 'installed_not_authenticated';
|
|
260
|
+
}
|
|
261
|
+
} catch {
|
|
262
|
+
caps.gh_cli = 'not_installed';
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
return caps;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
function getCapabilities() {
|
|
269
|
+
const now = Date.now();
|
|
270
|
+
if (!cachedCapabilities || now - lastCapabilityCheck > CAPABILITY_CHECK_INTERVAL) {
|
|
271
|
+
cachedCapabilities = detectCapabilities();
|
|
272
|
+
lastCapabilityCheck = now;
|
|
273
|
+
}
|
|
274
|
+
return cachedCapabilities;
|
|
275
|
+
}
|
|
276
|
+
|
|
233
277
|
// ==================== E2EE Decryption ====================
|
|
234
278
|
|
|
235
279
|
let decryptTodoField = null;
|
|
@@ -335,6 +379,8 @@ async function fetchQueuedTasks() {
|
|
|
335
379
|
heartbeatHeaders['X-Machine-Id'] = machineId;
|
|
336
380
|
heartbeatHeaders['X-Machine-Name'] = machineName || 'Unknown Mac';
|
|
337
381
|
heartbeatHeaders['X-Git-Remotes'] = gitRemotes.join(',');
|
|
382
|
+
heartbeatHeaders['X-Daemon-Version'] = getVersion();
|
|
383
|
+
heartbeatHeaders['X-Capabilities'] = JSON.stringify(getCapabilities());
|
|
338
384
|
}
|
|
339
385
|
|
|
340
386
|
const response = await apiRequest(`synced-todos?${params}`, {
|
|
@@ -393,7 +439,15 @@ async function updateTaskStatus(displayNumber, status, extra = {}) {
|
|
|
393
439
|
});
|
|
394
440
|
|
|
395
441
|
const result = await response.json().catch(() => null);
|
|
396
|
-
|
|
442
|
+
if (!response.ok) {
|
|
443
|
+
logError(`Task status update failed: HTTP ${response.status} for #${displayNumber} -> ${status}`);
|
|
444
|
+
return false;
|
|
445
|
+
}
|
|
446
|
+
if (result?.success === false) {
|
|
447
|
+
logError(`Task status update rejected for #${displayNumber} -> ${status}: ${JSON.stringify(result)}`);
|
|
448
|
+
return false;
|
|
449
|
+
}
|
|
450
|
+
return true;
|
|
397
451
|
} catch (error) {
|
|
398
452
|
logError(`Failed to update task status: ${error.message}`);
|
|
399
453
|
return false;
|
|
@@ -708,6 +762,110 @@ async function markTaskAsCompleted(displayNumber, taskId, comment) {
|
|
|
708
762
|
}
|
|
709
763
|
}
|
|
710
764
|
|
|
765
|
+
/**
|
|
766
|
+
* Auto-heal: detect if a previous execution already completed work for this task.
|
|
767
|
+
* Checks for existing branch commits and PRs to avoid redundant re-execution.
|
|
768
|
+
* Returns true if the task was healed (status updated, no re-execution needed).
|
|
769
|
+
*/
|
|
770
|
+
async function autoHealExistingWork(displayNumber, summary, projectPath) {
|
|
771
|
+
const suffix = getWorktreeSuffix();
|
|
772
|
+
const branch = `push-${displayNumber}-${suffix}`;
|
|
773
|
+
const gitCwd = projectPath || process.cwd();
|
|
774
|
+
|
|
775
|
+
try {
|
|
776
|
+
// Check if branch has commits ahead of main
|
|
777
|
+
let hasCommits = false;
|
|
778
|
+
try {
|
|
779
|
+
const logResult = execSync(
|
|
780
|
+
`git log HEAD..origin/${branch} --oneline 2>/dev/null || git log HEAD..${branch} --oneline 2>/dev/null`,
|
|
781
|
+
{ cwd: gitCwd, timeout: 10000, stdio: ['ignore', 'pipe', 'pipe'] }
|
|
782
|
+
).toString().trim();
|
|
783
|
+
hasCommits = logResult.length > 0;
|
|
784
|
+
} catch {
|
|
785
|
+
// Branch doesn't exist — no previous work
|
|
786
|
+
return false;
|
|
787
|
+
}
|
|
788
|
+
|
|
789
|
+
if (!hasCommits) {
|
|
790
|
+
return false;
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
log(`Task #${displayNumber}: found existing commits on branch ${branch}`);
|
|
794
|
+
|
|
795
|
+
// Check for existing PR
|
|
796
|
+
let prUrl = null;
|
|
797
|
+
let prState = null;
|
|
798
|
+
try {
|
|
799
|
+
const prResult = execSync(
|
|
800
|
+
`gh pr list --head ${branch} --json url,state --jq '.[0]' 2>/dev/null`,
|
|
801
|
+
{ cwd: gitCwd, timeout: 15000, stdio: ['ignore', 'pipe', 'pipe'] }
|
|
802
|
+
).toString().trim();
|
|
803
|
+
if (prResult) {
|
|
804
|
+
const pr = JSON.parse(prResult);
|
|
805
|
+
prUrl = pr.url;
|
|
806
|
+
prState = pr.state; // OPEN or MERGED
|
|
807
|
+
}
|
|
808
|
+
} catch {
|
|
809
|
+
// gh not available or no PR found
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
if (prUrl && prState === 'MERGED') {
|
|
813
|
+
// PR already merged — task is fully done
|
|
814
|
+
log(`Task #${displayNumber}: PR already merged (${prUrl}), updating status`);
|
|
815
|
+
const executionSummary = `Auto-healed: previous execution completed and PR merged. PR: ${prUrl}`;
|
|
816
|
+
await updateTaskStatus(displayNumber, 'session_finished', {
|
|
817
|
+
summary: executionSummary
|
|
818
|
+
});
|
|
819
|
+
completedToday.push({
|
|
820
|
+
displayNumber, summary,
|
|
821
|
+
completedAt: new Date().toISOString(),
|
|
822
|
+
duration: 0, status: 'session_finished', prUrl
|
|
823
|
+
});
|
|
824
|
+
return true;
|
|
825
|
+
}
|
|
826
|
+
|
|
827
|
+
if (prUrl && prState === 'OPEN') {
|
|
828
|
+
// PR is open — work is done, just needs review
|
|
829
|
+
log(`Task #${displayNumber}: PR already open (${prUrl}), updating status`);
|
|
830
|
+
const executionSummary = `Auto-healed: previous execution completed. PR pending review: ${prUrl}`;
|
|
831
|
+
await updateTaskStatus(displayNumber, 'session_finished', {
|
|
832
|
+
summary: executionSummary
|
|
833
|
+
});
|
|
834
|
+
completedToday.push({
|
|
835
|
+
displayNumber, summary,
|
|
836
|
+
completedAt: new Date().toISOString(),
|
|
837
|
+
duration: 0, status: 'session_finished', prUrl
|
|
838
|
+
});
|
|
839
|
+
return true;
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
if (!prUrl) {
|
|
843
|
+
// Commits exist but no PR — create one and update status
|
|
844
|
+
log(`Task #${displayNumber}: commits exist but no PR, creating PR`);
|
|
845
|
+
const newPrUrl = createPRForTask(displayNumber, summary, projectPath);
|
|
846
|
+
if (newPrUrl) {
|
|
847
|
+
const executionSummary = `Auto-healed: previous execution had uncommitted PR. Created PR: ${newPrUrl}`;
|
|
848
|
+
await updateTaskStatus(displayNumber, 'session_finished', {
|
|
849
|
+
summary: executionSummary
|
|
850
|
+
});
|
|
851
|
+
completedToday.push({
|
|
852
|
+
displayNumber, summary,
|
|
853
|
+
completedAt: new Date().toISOString(),
|
|
854
|
+
duration: 0, status: 'session_finished', prUrl: newPrUrl
|
|
855
|
+
});
|
|
856
|
+
return true;
|
|
857
|
+
}
|
|
858
|
+
// PR creation failed — fall through to re-execute
|
|
859
|
+
log(`Task #${displayNumber}: PR creation failed, will re-execute`);
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
return false;
|
|
863
|
+
} catch (error) {
|
|
864
|
+
log(`Task #${displayNumber}: auto-heal check failed: ${error.message}`);
|
|
865
|
+
return false;
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
|
|
711
869
|
// ==================== Stuck Detection ====================
|
|
712
870
|
|
|
713
871
|
function checkStuckPatterns(displayNumber, line) {
|
|
@@ -868,7 +1026,7 @@ function updateTaskDetail(displayNumber, updates) {
|
|
|
868
1026
|
updateStatusFile();
|
|
869
1027
|
}
|
|
870
1028
|
|
|
871
|
-
function executeTask(task) {
|
|
1029
|
+
async function executeTask(task) {
|
|
872
1030
|
// Decrypt E2EE fields
|
|
873
1031
|
task = decryptTaskFields(task);
|
|
874
1032
|
|
|
@@ -905,7 +1063,7 @@ function executeTask(task) {
|
|
|
905
1063
|
|
|
906
1064
|
if (!existsSync(projectPath)) {
|
|
907
1065
|
logError(`Task #${displayNumber}: Project path does not exist: ${projectPath}`);
|
|
908
|
-
updateTaskStatus(displayNumber, 'failed', {
|
|
1066
|
+
await updateTaskStatus(displayNumber, 'failed', {
|
|
909
1067
|
error: `Project path not found: ${projectPath}`
|
|
910
1068
|
});
|
|
911
1069
|
return null;
|
|
@@ -914,8 +1072,16 @@ function executeTask(task) {
|
|
|
914
1072
|
log(`Task #${displayNumber}: Project ${gitRemote} -> ${projectPath}`);
|
|
915
1073
|
}
|
|
916
1074
|
|
|
917
|
-
// Atomic task claiming
|
|
918
|
-
if (!claimTask(displayNumber)) {
|
|
1075
|
+
// Atomic task claiming - must await to actually check the result
|
|
1076
|
+
if (!(await claimTask(displayNumber))) {
|
|
1077
|
+
log(`Task #${displayNumber}: claim failed, skipping`);
|
|
1078
|
+
return null;
|
|
1079
|
+
}
|
|
1080
|
+
|
|
1081
|
+
// Auto-heal: check if previous execution already completed work for this task
|
|
1082
|
+
const healed = await autoHealExistingWork(displayNumber, summary, projectPath);
|
|
1083
|
+
if (healed) {
|
|
1084
|
+
log(`Task #${displayNumber}: auto-healed from previous execution, skipping re-execution`);
|
|
919
1085
|
return null;
|
|
920
1086
|
}
|
|
921
1087
|
|
|
@@ -935,7 +1101,7 @@ function executeTask(task) {
|
|
|
935
1101
|
// Create worktree
|
|
936
1102
|
const worktreePath = createWorktree(displayNumber, projectPath);
|
|
937
1103
|
if (!worktreePath) {
|
|
938
|
-
updateTaskStatus(displayNumber, 'failed', { error: 'Failed to create git worktree' });
|
|
1104
|
+
await updateTaskStatus(displayNumber, 'failed', { error: 'Failed to create git worktree' });
|
|
939
1105
|
taskDetails.delete(displayNumber);
|
|
940
1106
|
return null;
|
|
941
1107
|
}
|
|
@@ -952,15 +1118,8 @@ IMPORTANT:
|
|
|
952
1118
|
2. ALWAYS commit your changes before finishing. Use a descriptive commit message summarizing what you did. This is critical — uncommitted changes will be lost when the worktree is cleaned up.
|
|
953
1119
|
3. When you're done, the SessionEnd hook will automatically report completion to Supabase.`;
|
|
954
1120
|
|
|
955
|
-
//
|
|
956
|
-
|
|
957
|
-
event: {
|
|
958
|
-
type: 'started',
|
|
959
|
-
timestamp: new Date().toISOString(),
|
|
960
|
-
machineName: getMachineName() || undefined,
|
|
961
|
-
summary: summary.slice(0, 100),
|
|
962
|
-
}
|
|
963
|
-
});
|
|
1121
|
+
// Note: claimTask() already set status to 'running' with atomic: true
|
|
1122
|
+
// No duplicate status update needed here (was causing race conditions)
|
|
964
1123
|
|
|
965
1124
|
// Build Claude command
|
|
966
1125
|
const allowedTools = [
|
|
@@ -1029,10 +1188,10 @@ IMPORTANT:
|
|
|
1029
1188
|
handleTaskCompletion(displayNumber, code);
|
|
1030
1189
|
});
|
|
1031
1190
|
|
|
1032
|
-
child.on('error', (error) => {
|
|
1191
|
+
child.on('error', async (error) => {
|
|
1033
1192
|
logError(`Task #${displayNumber} error: ${error.message}`);
|
|
1034
1193
|
runningTasks.delete(displayNumber);
|
|
1035
|
-
updateTaskStatus(displayNumber, 'failed', { error: error.message });
|
|
1194
|
+
await updateTaskStatus(displayNumber, 'failed', { error: error.message });
|
|
1036
1195
|
taskDetails.delete(displayNumber);
|
|
1037
1196
|
updateStatusFile();
|
|
1038
1197
|
});
|
|
@@ -1048,13 +1207,13 @@ IMPORTANT:
|
|
|
1048
1207
|
return taskInfo;
|
|
1049
1208
|
} catch (error) {
|
|
1050
1209
|
logError(`Error starting Claude for task #${displayNumber}: ${error.message}`);
|
|
1051
|
-
updateTaskStatus(displayNumber, 'failed', { error: error.message });
|
|
1210
|
+
await updateTaskStatus(displayNumber, 'failed', { error: error.message });
|
|
1052
1211
|
taskDetails.delete(displayNumber);
|
|
1053
1212
|
return null;
|
|
1054
1213
|
}
|
|
1055
1214
|
}
|
|
1056
1215
|
|
|
1057
|
-
function handleTaskCompletion(displayNumber, exitCode) {
|
|
1216
|
+
async function handleTaskCompletion(displayNumber, exitCode) {
|
|
1058
1217
|
const taskInfo = runningTasks.get(displayNumber);
|
|
1059
1218
|
if (!taskInfo) return;
|
|
1060
1219
|
|
|
@@ -1097,11 +1256,18 @@ function handleTaskCompletion(displayNumber, exitCode) {
|
|
|
1097
1256
|
executionSummary += ` PR: ${prUrl}`;
|
|
1098
1257
|
}
|
|
1099
1258
|
|
|
1100
|
-
updateTaskStatus(displayNumber, 'session_finished', {
|
|
1259
|
+
const statusUpdated = await updateTaskStatus(displayNumber, 'session_finished', {
|
|
1101
1260
|
duration,
|
|
1102
1261
|
sessionId,
|
|
1103
1262
|
summary: executionSummary
|
|
1104
1263
|
});
|
|
1264
|
+
if (!statusUpdated) {
|
|
1265
|
+
logError(`Task #${displayNumber}: Failed to update status to session_finished — will retry`);
|
|
1266
|
+
// Retry once
|
|
1267
|
+
await updateTaskStatus(displayNumber, 'session_finished', {
|
|
1268
|
+
duration, sessionId, summary: executionSummary
|
|
1269
|
+
});
|
|
1270
|
+
}
|
|
1105
1271
|
|
|
1106
1272
|
if (NOTIFY_ON_COMPLETE) {
|
|
1107
1273
|
const prNote = prUrl ? ' PR ready for review.' : '';
|
|
@@ -1124,7 +1290,10 @@ function handleTaskCompletion(displayNumber, exitCode) {
|
|
|
1124
1290
|
const comment = semanticSummary
|
|
1125
1291
|
? `${semanticSummary} (${durationStr} on ${machineName})`
|
|
1126
1292
|
: `Completed in ${durationStr} on ${machineName}`;
|
|
1127
|
-
markTaskAsCompleted(displayNumber, taskId, comment);
|
|
1293
|
+
const completed = await markTaskAsCompleted(displayNumber, taskId, comment);
|
|
1294
|
+
if (!completed) {
|
|
1295
|
+
logError(`Task #${displayNumber}: Failed to mark as completed — status is session_finished but not completed`);
|
|
1296
|
+
}
|
|
1128
1297
|
}
|
|
1129
1298
|
|
|
1130
1299
|
completedToday.push({
|
|
@@ -1145,7 +1314,7 @@ function handleTaskCompletion(displayNumber, exitCode) {
|
|
|
1145
1314
|
? `${failureSummary}\nExit code ${exitCode}. Ran for ${durationStr} on ${machineName}.`
|
|
1146
1315
|
: `Exit code ${exitCode}: ${stderr.slice(0, 200)}`;
|
|
1147
1316
|
|
|
1148
|
-
updateTaskStatus(displayNumber, 'failed', { error: errorMsg });
|
|
1317
|
+
await updateTaskStatus(displayNumber, 'failed', { error: errorMsg });
|
|
1149
1318
|
|
|
1150
1319
|
if (NOTIFY_ON_FAILURE) {
|
|
1151
1320
|
sendMacNotification(
|
|
@@ -1317,6 +1486,54 @@ async function checkTimeouts() {
|
|
|
1317
1486
|
}
|
|
1318
1487
|
}
|
|
1319
1488
|
|
|
1489
|
+
// ==================== Self-Update ====================
|
|
1490
|
+
|
|
1491
|
+
let pendingUpdateVersion = null;
|
|
1492
|
+
|
|
1493
|
+
function checkAndApplyUpdate() {
|
|
1494
|
+
const currentVersion = getVersion();
|
|
1495
|
+
|
|
1496
|
+
// Check for update (throttled internally to once per hour)
|
|
1497
|
+
if (!pendingUpdateVersion) {
|
|
1498
|
+
const result = checkForUpdate(currentVersion);
|
|
1499
|
+
if (result.available) {
|
|
1500
|
+
log(`Update available: v${currentVersion} -> v${result.version}`);
|
|
1501
|
+
pendingUpdateVersion = result.version;
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
|
|
1505
|
+
// Only apply when no tasks are running
|
|
1506
|
+
if (pendingUpdateVersion && runningTasks.size === 0) {
|
|
1507
|
+
log(`Applying update to v${pendingUpdateVersion}...`);
|
|
1508
|
+
sendMacNotification(
|
|
1509
|
+
'Push Daemon Updating',
|
|
1510
|
+
`v${currentVersion} → v${pendingUpdateVersion}`,
|
|
1511
|
+
'Glass'
|
|
1512
|
+
);
|
|
1513
|
+
|
|
1514
|
+
const success = performUpdate(pendingUpdateVersion);
|
|
1515
|
+
if (success) {
|
|
1516
|
+
log(`Update to v${pendingUpdateVersion} successful. Restarting daemon...`);
|
|
1517
|
+
|
|
1518
|
+
// Spawn new daemon from updated code, then exit
|
|
1519
|
+
const daemonScript = join(__dirname, 'daemon.js');
|
|
1520
|
+
const child = spawn(process.execPath, [daemonScript], {
|
|
1521
|
+
detached: true,
|
|
1522
|
+
stdio: ['ignore', 'ignore', 'ignore'],
|
|
1523
|
+
env: { ...process.env, PUSH_DAEMON: '1' }
|
|
1524
|
+
});
|
|
1525
|
+
writeFileSync(PID_FILE, String(child.pid));
|
|
1526
|
+
child.unref();
|
|
1527
|
+
|
|
1528
|
+
log(`New daemon spawned (PID: ${child.pid}). Old daemon exiting.`);
|
|
1529
|
+
process.exit(0);
|
|
1530
|
+
} else {
|
|
1531
|
+
logError(`Update to v${pendingUpdateVersion} failed, will retry next hour`);
|
|
1532
|
+
pendingUpdateVersion = null;
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1320
1537
|
// ==================== Main Loop ====================
|
|
1321
1538
|
|
|
1322
1539
|
async function pollAndExecute() {
|
|
@@ -1348,7 +1565,13 @@ async function pollAndExecute() {
|
|
|
1348
1565
|
continue;
|
|
1349
1566
|
}
|
|
1350
1567
|
|
|
1351
|
-
|
|
1568
|
+
// Skip tasks already completed this daemon session (prevents re-execution loop)
|
|
1569
|
+
if (completedToday.some(c => c.displayNumber === displayNumber)) {
|
|
1570
|
+
log(`Task #${displayNumber} already completed this session, skipping`);
|
|
1571
|
+
continue;
|
|
1572
|
+
}
|
|
1573
|
+
|
|
1574
|
+
await executeTask(task);
|
|
1352
1575
|
}
|
|
1353
1576
|
|
|
1354
1577
|
updateStatusFile();
|
|
@@ -1364,6 +1587,9 @@ async function mainLoop() {
|
|
|
1364
1587
|
log(`Polling interval: ${POLL_INTERVAL / 1000}s`);
|
|
1365
1588
|
log(`Max concurrent tasks: ${MAX_CONCURRENT_TASKS}`);
|
|
1366
1589
|
log(`E2EE: ${e2eeAvailable ? 'Available' : 'Not available'}`);
|
|
1590
|
+
log(`Auto-update: ${getAutoUpdateEnabled() ? 'Enabled' : 'Disabled'}`);
|
|
1591
|
+
const caps = getCapabilities();
|
|
1592
|
+
log(`Capabilities: gh=${caps.gh_cli}, auto-merge=${caps.auto_merge}, auto-complete=${caps.auto_complete}`);
|
|
1367
1593
|
log(`Log file: ${LOG_FILE}`);
|
|
1368
1594
|
|
|
1369
1595
|
// Show registered projects
|
|
@@ -1399,6 +1625,11 @@ async function mainLoop() {
|
|
|
1399
1625
|
try {
|
|
1400
1626
|
await checkTimeouts();
|
|
1401
1627
|
await pollAndExecute();
|
|
1628
|
+
|
|
1629
|
+
// Self-update check (throttled to once per hour, only applies when idle)
|
|
1630
|
+
if (getAutoUpdateEnabled()) {
|
|
1631
|
+
checkAndApplyUpdate();
|
|
1632
|
+
}
|
|
1402
1633
|
} catch (error) {
|
|
1403
1634
|
logError(`Poll error: ${error.message}`);
|
|
1404
1635
|
}
|
|
@@ -1415,10 +1646,11 @@ async function mainLoop() {
|
|
|
1415
1646
|
|
|
1416
1647
|
// ==================== Signal Handling ====================
|
|
1417
1648
|
|
|
1418
|
-
function cleanup() {
|
|
1649
|
+
async function cleanup() {
|
|
1419
1650
|
log('Daemon shutting down...');
|
|
1420
1651
|
|
|
1421
|
-
// Kill running tasks and
|
|
1652
|
+
// Kill running tasks and collect status update promises
|
|
1653
|
+
const statusPromises = [];
|
|
1422
1654
|
for (const [displayNumber, taskInfo] of runningTasks) {
|
|
1423
1655
|
log(`Killing task #${displayNumber}`);
|
|
1424
1656
|
try {
|
|
@@ -1426,19 +1658,30 @@ function cleanup() {
|
|
|
1426
1658
|
} catch {}
|
|
1427
1659
|
// Mark as failed so the task doesn't stay as 'running' forever
|
|
1428
1660
|
const duration = Math.floor((Date.now() - taskInfo.startTime) / 1000);
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
|
|
1661
|
+
statusPromises.push(
|
|
1662
|
+
updateTaskStatus(displayNumber, 'failed', {
|
|
1663
|
+
error: `Daemon shutdown after ${duration}s`,
|
|
1664
|
+
event: {
|
|
1665
|
+
type: 'daemon_shutdown',
|
|
1666
|
+
timestamp: new Date().toISOString(),
|
|
1667
|
+
machineName: getMachineName() || undefined,
|
|
1668
|
+
summary: `Daemon restarted after ${duration}s`,
|
|
1669
|
+
}
|
|
1670
|
+
})
|
|
1671
|
+
);
|
|
1438
1672
|
const projectPath = taskProjectPaths.get(displayNumber);
|
|
1439
1673
|
cleanupWorktree(displayNumber, projectPath);
|
|
1440
1674
|
}
|
|
1441
1675
|
|
|
1676
|
+
// Wait for all status updates to land (max 5s timeout)
|
|
1677
|
+
if (statusPromises.length > 0) {
|
|
1678
|
+
log(`Waiting for ${statusPromises.length} status update(s) to complete...`);
|
|
1679
|
+
await Promise.race([
|
|
1680
|
+
Promise.allSettled(statusPromises),
|
|
1681
|
+
new Promise(resolve => setTimeout(resolve, 5000))
|
|
1682
|
+
]);
|
|
1683
|
+
}
|
|
1684
|
+
|
|
1442
1685
|
// Clean up files
|
|
1443
1686
|
try { unlinkSync(PID_FILE); } catch {}
|
|
1444
1687
|
|
|
@@ -1453,11 +1696,11 @@ function cleanup() {
|
|
|
1453
1696
|
process.exit(0);
|
|
1454
1697
|
}
|
|
1455
1698
|
|
|
1456
|
-
process.on('SIGTERM', cleanup);
|
|
1457
|
-
process.on('SIGINT', cleanup);
|
|
1699
|
+
process.on('SIGTERM', () => cleanup().catch(() => process.exit(1)));
|
|
1700
|
+
process.on('SIGINT', () => cleanup().catch(() => process.exit(1)));
|
|
1458
1701
|
process.on('uncaughtException', (error) => {
|
|
1459
1702
|
logError(`Uncaught exception: ${error.message}`);
|
|
1460
|
-
cleanup();
|
|
1703
|
+
cleanup().catch(() => process.exit(1));
|
|
1461
1704
|
});
|
|
1462
1705
|
|
|
1463
1706
|
// ==================== Entry Point ====================
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Self-update module for Push daemon.
|
|
3
|
+
*
|
|
4
|
+
* Checks npm registry for newer versions and auto-updates.
|
|
5
|
+
* Safety: Only updates to versions published >1 hour ago.
|
|
6
|
+
* Throttle: Checks at most once per hour.
|
|
7
|
+
* Config: PUSH_AUTO_UPDATE (default true)
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { execFileSync } from 'child_process';
|
|
11
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';
|
|
12
|
+
import { homedir } from 'os';
|
|
13
|
+
import { join } from 'path';
|
|
14
|
+
|
|
15
|
+
const PUSH_DIR = join(homedir(), '.push');
|
|
16
|
+
const LAST_UPDATE_CHECK_FILE = join(PUSH_DIR, 'last_update_check');
|
|
17
|
+
const UPDATE_CHECK_INTERVAL = 3600000; // 1 hour
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Compare semver strings.
|
|
21
|
+
* @returns -1 if a < b, 0 if equal, 1 if a > b
|
|
22
|
+
*/
|
|
23
|
+
export function compareSemver(a, b) {
|
|
24
|
+
const pa = a.split('.').map(Number);
|
|
25
|
+
const pb = b.split('.').map(Number);
|
|
26
|
+
for (let i = 0; i < 3; i++) {
|
|
27
|
+
if ((pa[i] || 0) < (pb[i] || 0)) return -1;
|
|
28
|
+
if ((pa[i] || 0) > (pb[i] || 0)) return 1;
|
|
29
|
+
}
|
|
30
|
+
return 0;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Fetch latest version info from npm registry.
|
|
35
|
+
* @returns {{ version: string, publishedAt: string | null }} or null on failure
|
|
36
|
+
*/
|
|
37
|
+
function fetchLatestVersionInfo() {
|
|
38
|
+
try {
|
|
39
|
+
const result = execFileSync('npm', ['view', '@masslessai/push-todo', '--json'], {
|
|
40
|
+
timeout: 15000,
|
|
41
|
+
encoding: 'utf8',
|
|
42
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
43
|
+
});
|
|
44
|
+
const data = JSON.parse(result);
|
|
45
|
+
const latest = data['dist-tags']?.latest || data.version;
|
|
46
|
+
return {
|
|
47
|
+
version: latest,
|
|
48
|
+
publishedAt: data.time?.[latest] || null
|
|
49
|
+
};
|
|
50
|
+
} catch {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Check if an update is available and safe to install.
|
|
57
|
+
* Throttled to once per hour. Enforces 1-hour age gate on new versions.
|
|
58
|
+
*
|
|
59
|
+
* @param {string} currentVersion
|
|
60
|
+
* @returns {{ available: boolean, version?: string, reason?: string }}
|
|
61
|
+
*/
|
|
62
|
+
export function checkForUpdate(currentVersion) {
|
|
63
|
+
// Throttle: check at most once per hour
|
|
64
|
+
if (existsSync(LAST_UPDATE_CHECK_FILE)) {
|
|
65
|
+
try {
|
|
66
|
+
const lastCheck = parseInt(readFileSync(LAST_UPDATE_CHECK_FILE, 'utf8').trim(), 10);
|
|
67
|
+
if (Date.now() - lastCheck < UPDATE_CHECK_INTERVAL) {
|
|
68
|
+
return { available: false, reason: 'throttled' };
|
|
69
|
+
}
|
|
70
|
+
} catch {}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Record check time
|
|
74
|
+
try {
|
|
75
|
+
mkdirSync(PUSH_DIR, { recursive: true });
|
|
76
|
+
writeFileSync(LAST_UPDATE_CHECK_FILE, String(Date.now()));
|
|
77
|
+
} catch {}
|
|
78
|
+
|
|
79
|
+
const info = fetchLatestVersionInfo();
|
|
80
|
+
if (!info) {
|
|
81
|
+
return { available: false, reason: 'registry_unreachable' };
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Already up to date
|
|
85
|
+
if (compareSemver(currentVersion, info.version) >= 0) {
|
|
86
|
+
return { available: false, reason: 'up_to_date' };
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Safety: only update to versions published >1 hour ago
|
|
90
|
+
if (info.publishedAt) {
|
|
91
|
+
const publishedAge = Date.now() - new Date(info.publishedAt).getTime();
|
|
92
|
+
if (publishedAge < UPDATE_CHECK_INTERVAL) {
|
|
93
|
+
return { available: false, reason: 'too_recent', version: info.version };
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return { available: true, version: info.version };
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Install a specific version globally.
|
|
102
|
+
* @param {string} targetVersion
|
|
103
|
+
* @returns {boolean} true if update succeeded
|
|
104
|
+
*/
|
|
105
|
+
export function performUpdate(targetVersion) {
|
|
106
|
+
try {
|
|
107
|
+
execFileSync('npm', ['install', '-g', `@masslessai/push-todo@${targetVersion}`], {
|
|
108
|
+
timeout: 120000,
|
|
109
|
+
stdio: 'pipe'
|
|
110
|
+
});
|
|
111
|
+
return true;
|
|
112
|
+
} catch {
|
|
113
|
+
return false;
|
|
114
|
+
}
|
|
115
|
+
}
|