jettypod 4.4.81 โ 4.4.83
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/apps/dashboard/components/DragContext.tsx +33 -0
- package/apps/dashboard/components/DraggableCard.tsx +30 -2
- package/apps/dashboard/components/KanbanBoard.tsx +94 -86
- package/hooks/post-checkout +11 -1
- package/jettypod.js +2 -1
- package/lib/database.js +165 -9
- package/lib/db-export.js +20 -17
- package/lib/git-hooks/pre-commit +56 -16
- package/package.json +1 -1
|
@@ -22,6 +22,11 @@ interface EpicDropZoneInfo {
|
|
|
22
22
|
onReorder?: ReorderHandler;
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
+
interface CardPosition {
|
|
26
|
+
id: number;
|
|
27
|
+
rect: DOMRect;
|
|
28
|
+
}
|
|
29
|
+
|
|
25
30
|
interface DragContextType {
|
|
26
31
|
isDragging: boolean;
|
|
27
32
|
draggedItem: WorkItem | null;
|
|
@@ -38,6 +43,9 @@ interface DragContextType {
|
|
|
38
43
|
updatePointerPosition: (x: number, y: number) => void;
|
|
39
44
|
startDrag: (item: WorkItem, cardRect: DOMRect, pointerX: number, pointerY: number) => void;
|
|
40
45
|
handleDrop: () => Promise<void>;
|
|
46
|
+
registerCardPosition: (id: number, rect: DOMRect) => void;
|
|
47
|
+
unregisterCard: (id: number) => void;
|
|
48
|
+
getCardPositions: () => CardPosition[];
|
|
41
49
|
}
|
|
42
50
|
|
|
43
51
|
const DragContext = createContext<DragContextType>({
|
|
@@ -56,6 +64,9 @@ const DragContext = createContext<DragContextType>({
|
|
|
56
64
|
updatePointerPosition: () => {},
|
|
57
65
|
startDrag: () => {},
|
|
58
66
|
handleDrop: async () => {},
|
|
67
|
+
registerCardPosition: () => {},
|
|
68
|
+
unregisterCard: () => {},
|
|
69
|
+
getCardPositions: () => [],
|
|
59
70
|
});
|
|
60
71
|
|
|
61
72
|
interface DragProviderProps {
|
|
@@ -72,6 +83,7 @@ export function DragProvider({ children, renderDragOverlay }: DragProviderProps)
|
|
|
72
83
|
const [draggedCardWidth, setDraggedCardWidth] = useState(0);
|
|
73
84
|
const dropZonesRef = useRef<Map<string, DropZoneInfo>>(new Map());
|
|
74
85
|
const epicDropZonesRef = useRef<Map<string, EpicDropZoneInfo>>(new Map());
|
|
86
|
+
const cardPositionsRef = useRef<Map<number, DOMRect>>(new Map());
|
|
75
87
|
const pointerPositionRef = useRef<{ x: number; y: number }>({ x: 0, y: 0 });
|
|
76
88
|
|
|
77
89
|
const registerDropZone = useCallback((id: string, info: DropZoneInfo) => {
|
|
@@ -90,6 +102,22 @@ export function DragProvider({ children, renderDragOverlay }: DragProviderProps)
|
|
|
90
102
|
epicDropZonesRef.current.delete(id);
|
|
91
103
|
}, []);
|
|
92
104
|
|
|
105
|
+
const registerCardPosition = useCallback((id: number, rect: DOMRect) => {
|
|
106
|
+
cardPositionsRef.current.set(id, rect);
|
|
107
|
+
}, []);
|
|
108
|
+
|
|
109
|
+
const unregisterCard = useCallback((id: number) => {
|
|
110
|
+
cardPositionsRef.current.delete(id);
|
|
111
|
+
}, []);
|
|
112
|
+
|
|
113
|
+
const getCardPositions = useCallback((): CardPosition[] => {
|
|
114
|
+
const positions: CardPosition[] = [];
|
|
115
|
+
cardPositionsRef.current.forEach((rect, id) => {
|
|
116
|
+
positions.push({ id, rect });
|
|
117
|
+
});
|
|
118
|
+
return positions;
|
|
119
|
+
}, []);
|
|
120
|
+
|
|
93
121
|
const updatePointerPosition = useCallback((x: number, y: number) => {
|
|
94
122
|
pointerPositionRef.current = { x, y };
|
|
95
123
|
setDragPosition({ x, y });
|
|
@@ -187,6 +215,9 @@ export function DragProvider({ children, renderDragOverlay }: DragProviderProps)
|
|
|
187
215
|
updatePointerPosition,
|
|
188
216
|
startDrag,
|
|
189
217
|
handleDrop,
|
|
218
|
+
registerCardPosition,
|
|
219
|
+
unregisterCard,
|
|
220
|
+
getCardPositions,
|
|
190
221
|
}}
|
|
191
222
|
>
|
|
192
223
|
{children}
|
|
@@ -203,6 +234,8 @@ export function DragProvider({ children, renderDragOverlay }: DragProviderProps)
|
|
|
203
234
|
pointerEvents: 'none',
|
|
204
235
|
transform: 'scale(1.02)',
|
|
205
236
|
boxShadow: '0 10px 30px rgba(0, 0, 0, 0.2)',
|
|
237
|
+
borderRadius: 8,
|
|
238
|
+
overflow: 'hidden',
|
|
206
239
|
}}
|
|
207
240
|
>
|
|
208
241
|
{renderDragOverlay(draggedItem)}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use client';
|
|
2
2
|
|
|
3
|
-
import { useRef } from 'react';
|
|
3
|
+
import { useRef, useEffect } from 'react';
|
|
4
4
|
import { motion, PanInfo } from 'framer-motion';
|
|
5
5
|
import type { WorkItem } from '@/lib/db';
|
|
6
6
|
import { useDragContext } from './DragContext';
|
|
@@ -12,10 +12,38 @@ interface DraggableCardProps {
|
|
|
12
12
|
}
|
|
13
13
|
|
|
14
14
|
export function DraggableCard({ item, children, disabled = false }: DraggableCardProps) {
|
|
15
|
-
const { startDrag, setDraggedItem, updatePointerPosition, handleDrop } = useDragContext();
|
|
15
|
+
const { startDrag, setDraggedItem, updatePointerPosition, handleDrop, registerCardPosition, unregisterCard } = useDragContext();
|
|
16
16
|
const wasDraggingRef = useRef(false);
|
|
17
17
|
const cardRef = useRef<HTMLDivElement>(null);
|
|
18
18
|
|
|
19
|
+
// Register card position for optimized reorder calculations
|
|
20
|
+
useEffect(() => {
|
|
21
|
+
if (disabled || !cardRef.current) return;
|
|
22
|
+
|
|
23
|
+
const updatePosition = () => {
|
|
24
|
+
if (cardRef.current) {
|
|
25
|
+
registerCardPosition(item.id, cardRef.current.getBoundingClientRect());
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// Initial registration
|
|
30
|
+
updatePosition();
|
|
31
|
+
|
|
32
|
+
// Update on resize/layout changes
|
|
33
|
+
const resizeObserver = new ResizeObserver(updatePosition);
|
|
34
|
+
resizeObserver.observe(cardRef.current);
|
|
35
|
+
|
|
36
|
+
// Update on scroll (positions are viewport-relative)
|
|
37
|
+
const scrollHandler = () => updatePosition();
|
|
38
|
+
window.addEventListener('scroll', scrollHandler, true);
|
|
39
|
+
|
|
40
|
+
return () => {
|
|
41
|
+
unregisterCard(item.id);
|
|
42
|
+
resizeObserver.disconnect();
|
|
43
|
+
window.removeEventListener('scroll', scrollHandler, true);
|
|
44
|
+
};
|
|
45
|
+
}, [item.id, disabled, registerCardPosition, unregisterCard]);
|
|
46
|
+
|
|
19
47
|
if (disabled) {
|
|
20
48
|
return <>{children}</>;
|
|
21
49
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use client';
|
|
2
2
|
|
|
3
|
-
import { useState, useCallback, useRef, useEffect } from 'react';
|
|
3
|
+
import { useState, useCallback, useRef, useEffect, useMemo } from 'react';
|
|
4
4
|
import Link from 'next/link';
|
|
5
5
|
import { useRouter } from 'next/navigation';
|
|
6
6
|
import type { WorkItem, InFlightItem, KanbanGroup } from '@/lib/db';
|
|
@@ -257,26 +257,23 @@ interface EpicGroupProps {
|
|
|
257
257
|
|
|
258
258
|
function EpicGroup({ epicId, epicTitle, items, isInFlight = false, isDraggable = true, onTitleSave, onStatusChange, onEpicAssign, onOrderChange }: EpicGroupProps) {
|
|
259
259
|
const containerRef = useRef<HTMLDivElement>(null);
|
|
260
|
-
const { isDragging, draggedItem, activeEpicZone, registerEpicDropZone, unregisterEpicDropZone } = useDragContext();
|
|
260
|
+
const { isDragging, draggedItem, activeEpicZone, registerEpicDropZone, unregisterEpicDropZone, getCardPositions } = useDragContext();
|
|
261
|
+
|
|
262
|
+
// Get item IDs in this epic for filtering
|
|
263
|
+
const itemIds = useMemo(() => new Set(items.map(item => item.id)), [items]);
|
|
261
264
|
|
|
262
265
|
// Handle reorder within this epic - calculate new display_order based on pointer Y
|
|
263
266
|
const handleEpicReorder = useCallback(async (itemId: number, pointerY: number) => {
|
|
264
|
-
if (!onOrderChange
|
|
265
|
-
|
|
266
|
-
//
|
|
267
|
-
const
|
|
268
|
-
const cardPositions
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
cardPositions.push({
|
|
275
|
-
id,
|
|
276
|
-
midY: (rect.top + rect.bottom) / 2,
|
|
277
|
-
});
|
|
278
|
-
}
|
|
279
|
-
});
|
|
267
|
+
if (!onOrderChange) return;
|
|
268
|
+
|
|
269
|
+
// Use cached card positions from registry, filtered to this epic's items
|
|
270
|
+
const allPositions = getCardPositions();
|
|
271
|
+
const cardPositions = allPositions
|
|
272
|
+
.filter(pos => itemIds.has(pos.id) && pos.id !== itemId)
|
|
273
|
+
.map(pos => ({
|
|
274
|
+
id: pos.id,
|
|
275
|
+
midY: (pos.rect.top + pos.rect.bottom) / 2,
|
|
276
|
+
}));
|
|
280
277
|
|
|
281
278
|
// Skip reorder if this is the only item in the epic (no other cards to reorder against)
|
|
282
279
|
if (cardPositions.length === 0) {
|
|
@@ -306,7 +303,7 @@ function EpicGroup({ epicId, epicTitle, items, isInFlight = false, isDraggable =
|
|
|
306
303
|
// Show user-friendly error notification
|
|
307
304
|
alert('Failed to reorder item. Please try again.');
|
|
308
305
|
}
|
|
309
|
-
}, [onOrderChange]);
|
|
306
|
+
}, [onOrderChange, getCardPositions, itemIds]);
|
|
310
307
|
|
|
311
308
|
// Register as epic drop zone
|
|
312
309
|
useEffect(() => {
|
|
@@ -414,6 +411,78 @@ function KanbanColumn({ title, children, count }: KanbanColumnProps) {
|
|
|
414
411
|
);
|
|
415
412
|
}
|
|
416
413
|
|
|
414
|
+
// Wrapper component that handles backlog reorder with access to drag context
|
|
415
|
+
interface BacklogDropZoneWrapperProps {
|
|
416
|
+
backlog: Map<string, KanbanGroup>;
|
|
417
|
+
onStatusChange?: (id: number, newStatus: string) => Promise<void | { success: boolean; notFound?: boolean }>;
|
|
418
|
+
onOrderChange?: (id: number, newOrder: number) => Promise<void>;
|
|
419
|
+
children: React.ReactNode;
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
function BacklogDropZoneWrapper({ backlog, onStatusChange, onOrderChange, children }: BacklogDropZoneWrapperProps) {
|
|
423
|
+
const { getCardPositions } = useDragContext();
|
|
424
|
+
|
|
425
|
+
// Get all backlog item IDs for filtering positions
|
|
426
|
+
const backlogItemIds = useMemo(() => {
|
|
427
|
+
const ids = new Set<number>();
|
|
428
|
+
for (const group of backlog.values()) {
|
|
429
|
+
for (const item of group.items) {
|
|
430
|
+
ids.add(item.id);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
return ids;
|
|
434
|
+
}, [backlog]);
|
|
435
|
+
|
|
436
|
+
// Handle reorder within backlog - calculate new display_order based on pointer Y
|
|
437
|
+
const handleBacklogReorder = useCallback(async (itemId: number, pointerY: number) => {
|
|
438
|
+
if (!onOrderChange) return;
|
|
439
|
+
|
|
440
|
+
// Use cached card positions from registry, filtered to backlog items
|
|
441
|
+
const allPositions = getCardPositions();
|
|
442
|
+
const cardPositions = allPositions
|
|
443
|
+
.filter(pos => backlogItemIds.has(pos.id) && pos.id !== itemId)
|
|
444
|
+
.map(pos => ({
|
|
445
|
+
id: pos.id,
|
|
446
|
+
midY: (pos.rect.top + pos.rect.bottom) / 2,
|
|
447
|
+
}));
|
|
448
|
+
|
|
449
|
+
// Sort by Y position
|
|
450
|
+
cardPositions.sort((a, b) => a.midY - b.midY);
|
|
451
|
+
|
|
452
|
+
// Find insertion index based on pointer Y
|
|
453
|
+
let insertIndex = cardPositions.length;
|
|
454
|
+
for (let i = 0; i < cardPositions.length; i++) {
|
|
455
|
+
if (pointerY < cardPositions[i].midY) {
|
|
456
|
+
insertIndex = i;
|
|
457
|
+
break;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// Calculate new display_order
|
|
462
|
+
// Use index * 10 to leave room for future insertions
|
|
463
|
+
const newOrder = insertIndex * 10;
|
|
464
|
+
|
|
465
|
+
await onOrderChange(itemId, newOrder);
|
|
466
|
+
}, [onOrderChange, getCardPositions, backlogItemIds]);
|
|
467
|
+
|
|
468
|
+
return (
|
|
469
|
+
<DropZone
|
|
470
|
+
targetStatus="backlog"
|
|
471
|
+
onDrop={async (itemId, newStatus) => {
|
|
472
|
+
if (onStatusChange) await onStatusChange(itemId, newStatus);
|
|
473
|
+
}}
|
|
474
|
+
onReorder={handleBacklogReorder}
|
|
475
|
+
allowReorder={true}
|
|
476
|
+
className="rounded-lg p-2 -m-2 min-h-[100px]"
|
|
477
|
+
highlightClassName="ring-2 ring-amber-400 bg-amber-100/50 dark:bg-amber-900/30"
|
|
478
|
+
reorderHighlightClassName="ring-2 ring-purple-400 bg-purple-100/50 dark:bg-purple-900/30"
|
|
479
|
+
data-testid="backlog-drop-zone"
|
|
480
|
+
>
|
|
481
|
+
{children}
|
|
482
|
+
</DropZone>
|
|
483
|
+
);
|
|
484
|
+
}
|
|
485
|
+
|
|
417
486
|
interface KanbanBoardProps {
|
|
418
487
|
inFlight: InFlightItem[];
|
|
419
488
|
backlog: Map<string, KanbanGroup>;
|
|
@@ -432,7 +501,6 @@ interface KanbanBoardProps {
|
|
|
432
501
|
export function KanbanBoard({ inFlight, backlog, done, onTitleSave, onStatusChange, onOrderChange, onEpicAssign, onUndo, onRedo, canUndo, canRedo }: KanbanBoardProps) {
|
|
433
502
|
const backlogCount = inFlight.length + Array.from(backlog.values()).reduce((sum, g) => sum + g.items.length, 0);
|
|
434
503
|
const doneCount = Array.from(done.values()).reduce((sum, g) => sum + g.items.length, 0);
|
|
435
|
-
const backlogContainerRef = useRef<HTMLDivElement>(null);
|
|
436
504
|
|
|
437
505
|
// Keyboard shortcuts for undo/redo (Cmd+Z / Cmd+Shift+Z)
|
|
438
506
|
useEffect(() => {
|
|
@@ -475,59 +543,6 @@ export function KanbanBoard({ inFlight, backlog, done, onTitleSave, onStatusChan
|
|
|
475
543
|
}
|
|
476
544
|
}
|
|
477
545
|
|
|
478
|
-
// Get flat list of all backlog items for reordering
|
|
479
|
-
const getAllBacklogItems = useCallback((): WorkItem[] => {
|
|
480
|
-
const items: WorkItem[] = [];
|
|
481
|
-
for (const group of backlog.values()) {
|
|
482
|
-
items.push(...group.items);
|
|
483
|
-
}
|
|
484
|
-
return items;
|
|
485
|
-
}, [backlog]);
|
|
486
|
-
|
|
487
|
-
// Handle reorder within backlog - calculate new display_order based on pointer Y
|
|
488
|
-
const handleBacklogReorder = useCallback(async (itemId: number, pointerY: number) => {
|
|
489
|
-
if (!onOrderChange || !backlogContainerRef.current) return;
|
|
490
|
-
|
|
491
|
-
const items = getAllBacklogItems();
|
|
492
|
-
const draggedItem = items.find(item => item.id === itemId);
|
|
493
|
-
if (!draggedItem) return;
|
|
494
|
-
|
|
495
|
-
// Find all card elements in the backlog container
|
|
496
|
-
const cardElements = backlogContainerRef.current.querySelectorAll('[data-item-id]');
|
|
497
|
-
const cardPositions: { id: number; top: number; bottom: number; midY: number }[] = [];
|
|
498
|
-
|
|
499
|
-
cardElements.forEach((el) => {
|
|
500
|
-
const id = parseInt(el.getAttribute('data-item-id') || '0', 10);
|
|
501
|
-
if (id !== itemId) {
|
|
502
|
-
const rect = el.getBoundingClientRect();
|
|
503
|
-
cardPositions.push({
|
|
504
|
-
id,
|
|
505
|
-
top: rect.top,
|
|
506
|
-
bottom: rect.bottom,
|
|
507
|
-
midY: (rect.top + rect.bottom) / 2,
|
|
508
|
-
});
|
|
509
|
-
}
|
|
510
|
-
});
|
|
511
|
-
|
|
512
|
-
// Sort by Y position
|
|
513
|
-
cardPositions.sort((a, b) => a.midY - b.midY);
|
|
514
|
-
|
|
515
|
-
// Find insertion index based on pointer Y
|
|
516
|
-
let insertIndex = cardPositions.length;
|
|
517
|
-
for (let i = 0; i < cardPositions.length; i++) {
|
|
518
|
-
if (pointerY < cardPositions[i].midY) {
|
|
519
|
-
insertIndex = i;
|
|
520
|
-
break;
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
// Calculate new display_order
|
|
525
|
-
// Use index * 10 to leave room for future insertions
|
|
526
|
-
const newOrder = insertIndex * 10;
|
|
527
|
-
|
|
528
|
-
await onOrderChange(itemId, newOrder);
|
|
529
|
-
}, [getAllBacklogItems, onOrderChange]);
|
|
530
|
-
|
|
531
546
|
// Render function for the drag overlay
|
|
532
547
|
const renderDragOverlay = useCallback((item: WorkItem) => {
|
|
533
548
|
// Find epic title if this is an in-flight item
|
|
@@ -595,19 +610,12 @@ export function KanbanBoard({ inFlight, backlog, done, onTitleSave, onStatusChan
|
|
|
595
610
|
)}
|
|
596
611
|
|
|
597
612
|
{/* Backlog Section - Drop Zone with Reordering */}
|
|
598
|
-
<
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
}}
|
|
603
|
-
onReorder={handleBacklogReorder}
|
|
604
|
-
allowReorder={true}
|
|
605
|
-
className="rounded-lg p-2 -m-2 min-h-[100px]"
|
|
606
|
-
highlightClassName="ring-2 ring-amber-400 bg-amber-100/50 dark:bg-amber-900/30"
|
|
607
|
-
reorderHighlightClassName="ring-2 ring-purple-400 bg-purple-100/50 dark:bg-purple-900/30"
|
|
608
|
-
data-testid="backlog-drop-zone"
|
|
613
|
+
<BacklogDropZoneWrapper
|
|
614
|
+
backlog={backlog}
|
|
615
|
+
onStatusChange={onStatusChange}
|
|
616
|
+
onOrderChange={onOrderChange}
|
|
609
617
|
>
|
|
610
|
-
<div
|
|
618
|
+
<div>
|
|
611
619
|
{/* Grouped Backlog Items */}
|
|
612
620
|
{Array.from(backlog.entries()).map(([key, group]) => (
|
|
613
621
|
<EpicGroup
|
|
@@ -627,7 +635,7 @@ export function KanbanBoard({ inFlight, backlog, done, onTitleSave, onStatusChan
|
|
|
627
635
|
<p className="text-sm text-zinc-500 text-center py-4">Drop items here for backlog</p>
|
|
628
636
|
)}
|
|
629
637
|
</div>
|
|
630
|
-
</
|
|
638
|
+
</BacklogDropZoneWrapper>
|
|
631
639
|
|
|
632
640
|
{backlogCount === 0 && inFlight.length === 0 && (
|
|
633
641
|
<p className="text-sm text-zinc-500 text-center py-4">No items in backlog</p>
|
package/hooks/post-checkout
CHANGED
|
@@ -8,6 +8,7 @@
|
|
|
8
8
|
*/
|
|
9
9
|
|
|
10
10
|
const { importAll } = require('jettypod/lib/db-import');
|
|
11
|
+
const { walCheckpoint } = require('jettypod/lib/database');
|
|
11
12
|
const { execSync } = require('child_process');
|
|
12
13
|
|
|
13
14
|
(async () => {
|
|
@@ -80,7 +81,16 @@ const { execSync } = require('child_process');
|
|
|
80
81
|
}
|
|
81
82
|
}
|
|
82
83
|
|
|
83
|
-
// SECOND:
|
|
84
|
+
// SECOND: Checkpoint WAL before importing to prevent corruption
|
|
85
|
+
// This flushes any pending writes to the main database file
|
|
86
|
+
try {
|
|
87
|
+
await walCheckpoint();
|
|
88
|
+
} catch (err) {
|
|
89
|
+
// Checkpoint failure shouldn't block - just log and continue
|
|
90
|
+
console.error('Post-checkout hook warning: WAL checkpoint failed:', err.message);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// THIRD: Import database snapshots
|
|
84
94
|
try {
|
|
85
95
|
await importAll();
|
|
86
96
|
process.exit(0);
|
package/jettypod.js
CHANGED
|
@@ -2801,7 +2801,8 @@ Quick commands:
|
|
|
2801
2801
|
'stable-mode': ['speed_mode_complete'],
|
|
2802
2802
|
'production-mode': ['stable_mode_complete'],
|
|
2803
2803
|
'feature-planning': [],
|
|
2804
|
-
'epic-planning': []
|
|
2804
|
+
'epic-planning': [],
|
|
2805
|
+
'chore-planning': []
|
|
2805
2806
|
};
|
|
2806
2807
|
|
|
2807
2808
|
// Validate skill name
|
package/lib/database.js
CHANGED
|
@@ -270,6 +270,36 @@ function initSchema() {
|
|
|
270
270
|
});
|
|
271
271
|
}
|
|
272
272
|
|
|
273
|
+
/**
|
|
274
|
+
* Force a WAL checkpoint to flush all pending writes to the main database file
|
|
275
|
+
* Should be called before git checkout/merge operations to prevent WAL corruption
|
|
276
|
+
* @param {sqlite3.Database} [database] - Database connection (uses singleton if not provided)
|
|
277
|
+
* @returns {Promise<{success: boolean, pagesWritten: number}>} Checkpoint result
|
|
278
|
+
*/
|
|
279
|
+
function walCheckpoint(database) {
|
|
280
|
+
const db = database || (typeof getDb === 'function' ? getDb() : null);
|
|
281
|
+
if (!db) {
|
|
282
|
+
return Promise.resolve({ success: false, pagesWritten: 0, reason: 'No database connection' });
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
return new Promise((resolve) => {
|
|
286
|
+
// TRUNCATE mode: checkpoint and truncate the WAL file to zero bytes
|
|
287
|
+
db.get('PRAGMA wal_checkpoint(TRUNCATE)', [], (err, row) => {
|
|
288
|
+
if (err) {
|
|
289
|
+
// Don't reject - checkpoint failure shouldn't block operations
|
|
290
|
+
resolve({ success: false, pagesWritten: 0, reason: err.message });
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// row contains: busy (0=success), log (pages in WAL), checkpointed (pages written)
|
|
295
|
+
const success = row && row.busy === 0;
|
|
296
|
+
const pagesWritten = row ? row.checkpointed : 0;
|
|
297
|
+
|
|
298
|
+
resolve({ success, pagesWritten });
|
|
299
|
+
});
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
|
|
273
303
|
/**
|
|
274
304
|
* Check database file integrity using SQLite's built-in integrity check
|
|
275
305
|
* @param {sqlite3.Database} database - Database connection to check
|
|
@@ -390,27 +420,151 @@ async function waitForMigrations() {
|
|
|
390
420
|
}
|
|
391
421
|
}
|
|
392
422
|
|
|
423
|
+
/**
|
|
424
|
+
* Attempt to recover work.db from JSON snapshots
|
|
425
|
+
* Used when database is missing or corrupted
|
|
426
|
+
* @returns {Promise<{recovered: boolean, itemCount: number}>} Recovery result
|
|
427
|
+
*/
|
|
428
|
+
async function recoverFromSnapshots() {
|
|
429
|
+
const snapshotsDir = path.join(getJettypodDir(), 'snapshots');
|
|
430
|
+
const jsonPath = path.join(snapshotsDir, 'work.json');
|
|
431
|
+
|
|
432
|
+
// Check if snapshots exist
|
|
433
|
+
if (!fs.existsSync(jsonPath)) {
|
|
434
|
+
return { recovered: false, itemCount: 0, reason: 'No snapshot file found' };
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// Read and parse snapshot
|
|
438
|
+
let data;
|
|
439
|
+
try {
|
|
440
|
+
const jsonContent = fs.readFileSync(jsonPath, 'utf8');
|
|
441
|
+
data = JSON.parse(jsonContent);
|
|
442
|
+
} catch (err) {
|
|
443
|
+
return { recovered: false, itemCount: 0, reason: `Failed to read snapshot: ${err.message}` };
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Close existing connection and delete corrupted file
|
|
447
|
+
await closeDb();
|
|
448
|
+
const dbFilePath = getDbPath();
|
|
449
|
+
|
|
450
|
+
// Remove corrupted database files (including WAL files)
|
|
451
|
+
const filesToRemove = [dbFilePath, `${dbFilePath}-wal`, `${dbFilePath}-shm`];
|
|
452
|
+
for (const file of filesToRemove) {
|
|
453
|
+
if (fs.existsSync(file)) {
|
|
454
|
+
fs.unlinkSync(file);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Reset singleton to force fresh connection
|
|
459
|
+
resetDb();
|
|
460
|
+
|
|
461
|
+
// Create fresh database (this will create schema)
|
|
462
|
+
const database = getDb();
|
|
463
|
+
await waitForMigrations();
|
|
464
|
+
|
|
465
|
+
// Import data from snapshot
|
|
466
|
+
const tableNames = Object.keys(data);
|
|
467
|
+
let totalItems = 0;
|
|
468
|
+
|
|
469
|
+
for (const tableName of tableNames) {
|
|
470
|
+
const rows = data[tableName];
|
|
471
|
+
if (!rows || rows.length === 0) continue;
|
|
472
|
+
|
|
473
|
+
// Get column names from first row
|
|
474
|
+
const columns = Object.keys(rows[0]);
|
|
475
|
+
const placeholders = columns.map(() => '?').join(', ');
|
|
476
|
+
const columnNames = columns.join(', ');
|
|
477
|
+
const insertSql = `INSERT INTO ${tableName} (${columnNames}) VALUES (${placeholders})`;
|
|
478
|
+
|
|
479
|
+
for (const row of rows) {
|
|
480
|
+
const values = columns.map(col => row[col]);
|
|
481
|
+
await new Promise((resolve, reject) => {
|
|
482
|
+
database.run(insertSql, values, (err) => {
|
|
483
|
+
if (err) reject(err);
|
|
484
|
+
else resolve();
|
|
485
|
+
});
|
|
486
|
+
});
|
|
487
|
+
totalItems++;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
return { recovered: true, itemCount: totalItems };
|
|
492
|
+
}
|
|
493
|
+
|
|
393
494
|
/**
|
|
394
495
|
* Run startup validation checks on the database
|
|
395
496
|
* Call this early in application startup to detect corruption before operations fail
|
|
396
|
-
*
|
|
397
|
-
* @
|
|
497
|
+
* Automatically attempts recovery from snapshots if corruption is detected
|
|
498
|
+
* @returns {Promise<void>} Resolves if database is healthy (or recovered), rejects if unrecoverable
|
|
499
|
+
* @throws {Error} If database is corrupted and recovery fails
|
|
398
500
|
*/
|
|
399
501
|
async function validateOnStartup() {
|
|
400
|
-
|
|
401
|
-
|
|
502
|
+
// Check if database file is missing
|
|
503
|
+
const dbFilePath = getDbPath();
|
|
504
|
+
const dbMissing = !fs.existsSync(dbFilePath);
|
|
505
|
+
|
|
506
|
+
if (dbMissing) {
|
|
507
|
+
// Try to recover from snapshots
|
|
508
|
+
console.log('โ ๏ธ Database file missing, attempting recovery from snapshots...');
|
|
509
|
+
const result = await recoverFromSnapshots();
|
|
510
|
+
if (result.recovered) {
|
|
511
|
+
console.log(`โ
Recovered ${result.itemCount} items from snapshots`);
|
|
512
|
+
return;
|
|
513
|
+
} else {
|
|
514
|
+
// No snapshots - just create fresh DB (getDb will do this)
|
|
515
|
+
console.log('โน๏ธ No snapshots found, creating fresh database');
|
|
516
|
+
getDb();
|
|
517
|
+
await waitForMigrations();
|
|
518
|
+
return;
|
|
519
|
+
}
|
|
520
|
+
}
|
|
402
521
|
|
|
403
|
-
//
|
|
522
|
+
// Database exists - check integrity
|
|
523
|
+
let database;
|
|
524
|
+
try {
|
|
525
|
+
database = getDb();
|
|
526
|
+
await waitForMigrations();
|
|
527
|
+
} catch (err) {
|
|
528
|
+
// Failed to even open the database - try recovery
|
|
529
|
+
console.log('โ ๏ธ Database failed to open, attempting recovery from snapshots...');
|
|
530
|
+
const result = await recoverFromSnapshots();
|
|
531
|
+
if (result.recovered) {
|
|
532
|
+
console.log(`โ
Recovered ${result.itemCount} items from snapshots`);
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
throw new Error(
|
|
536
|
+
`Database failed to open and recovery failed.\n` +
|
|
537
|
+
`Original error: ${err.message}\n` +
|
|
538
|
+
`Recovery error: ${result.reason}\n\n` +
|
|
539
|
+
`Manual recovery options:\n` +
|
|
540
|
+
` 1. Restore from backup: jettypod work restore-backup latest\n` +
|
|
541
|
+
` 2. Check ~/.jettypod-backups/ for global backups`
|
|
542
|
+
);
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// Check file integrity
|
|
404
546
|
const integrity = await checkIntegrity(database);
|
|
405
547
|
if (!integrity.ok) {
|
|
548
|
+
console.log('โ ๏ธ Database corruption detected, attempting recovery from snapshots...');
|
|
549
|
+
const result = await recoverFromSnapshots();
|
|
550
|
+
if (result.recovered) {
|
|
551
|
+
console.log(`โ
Recovered ${result.itemCount} items from snapshots`);
|
|
552
|
+
// Verify the recovered database
|
|
553
|
+
const newDb = getDb();
|
|
554
|
+
const newIntegrity = await checkIntegrity(newDb);
|
|
555
|
+
if (newIntegrity.ok) {
|
|
556
|
+
return;
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Recovery failed or still corrupted
|
|
406
561
|
const errorList = integrity.errors.join('\n - ');
|
|
407
562
|
throw new Error(
|
|
408
|
-
`Database integrity check failed
|
|
563
|
+
`Database integrity check failed and automatic recovery was unsuccessful.\n` +
|
|
409
564
|
`Errors found:\n - ${errorList}\n\n` +
|
|
410
|
-
`
|
|
565
|
+
`Manual recovery options:\n` +
|
|
411
566
|
` 1. Restore from backup: jettypod work restore-backup latest\n` +
|
|
412
|
-
` 2.
|
|
413
|
-
` 3. Check ~/.jettypod-backups/ for global backups`
|
|
567
|
+
` 2. Check ~/.jettypod-backups/ for global backups`
|
|
414
568
|
);
|
|
415
569
|
}
|
|
416
570
|
|
|
@@ -427,7 +581,9 @@ module.exports = {
|
|
|
427
581
|
waitForMigrations,
|
|
428
582
|
validateSchema,
|
|
429
583
|
checkIntegrity,
|
|
584
|
+
walCheckpoint,
|
|
430
585
|
validateOnStartup,
|
|
586
|
+
recoverFromSnapshots,
|
|
431
587
|
dbPath, // Deprecated: use getDbPath() for dynamic path
|
|
432
588
|
jettypodDir // Deprecated: use getJettypodDir() for dynamic path
|
|
433
589
|
};
|
package/lib/db-export.js
CHANGED
|
@@ -83,17 +83,16 @@ async function exportWorkDb() {
|
|
|
83
83
|
try {
|
|
84
84
|
fs.writeFileSync(outputPath, JSON.stringify(data, null, 2), 'utf8');
|
|
85
85
|
} catch (err) {
|
|
86
|
-
//
|
|
87
|
-
|
|
86
|
+
// Throw error to block commit - snapshot backup is critical
|
|
87
|
+
let message = 'Failed to export work.json';
|
|
88
88
|
if (err.code === 'EACCES') {
|
|
89
|
-
|
|
89
|
+
message += ': Permission denied - check directory permissions';
|
|
90
90
|
} else if (err.code === 'ENOSPC') {
|
|
91
|
-
|
|
91
|
+
message += ': No space left on device';
|
|
92
92
|
} else {
|
|
93
|
-
|
|
93
|
+
message += `: ${err.message}`;
|
|
94
94
|
}
|
|
95
|
-
|
|
96
|
-
// Return path anyway so commit can continue
|
|
95
|
+
throw new Error(message);
|
|
97
96
|
}
|
|
98
97
|
|
|
99
98
|
return outputPath;
|
|
@@ -115,15 +114,16 @@ async function exportDatabaseDb() {
|
|
|
115
114
|
try {
|
|
116
115
|
fs.writeFileSync(outputPath, JSON.stringify({}, null, 2), 'utf8');
|
|
117
116
|
} catch (err) {
|
|
118
|
-
|
|
117
|
+
// Throw error to block commit - snapshot backup is critical
|
|
118
|
+
let message = 'Failed to export database.json';
|
|
119
119
|
if (err.code === 'EACCES') {
|
|
120
|
-
|
|
120
|
+
message += ': Permission denied - check directory permissions';
|
|
121
121
|
} else if (err.code === 'ENOSPC') {
|
|
122
|
-
|
|
122
|
+
message += ': No space left on device';
|
|
123
123
|
} else {
|
|
124
|
-
|
|
124
|
+
message += `: ${err.message}`;
|
|
125
125
|
}
|
|
126
|
-
|
|
126
|
+
throw new Error(message);
|
|
127
127
|
}
|
|
128
128
|
return outputPath;
|
|
129
129
|
}
|
|
@@ -142,15 +142,18 @@ async function exportDatabaseDb() {
|
|
|
142
142
|
try {
|
|
143
143
|
fs.writeFileSync(outputPath, JSON.stringify(data, null, 2), 'utf8');
|
|
144
144
|
} catch (writeErr) {
|
|
145
|
-
|
|
145
|
+
// Throw error to block commit - snapshot backup is critical
|
|
146
|
+
let message = 'Failed to export database.json';
|
|
146
147
|
if (writeErr.code === 'EACCES') {
|
|
147
|
-
|
|
148
|
+
message += ': Permission denied - check directory permissions';
|
|
148
149
|
} else if (writeErr.code === 'ENOSPC') {
|
|
149
|
-
|
|
150
|
+
message += ': No space left on device';
|
|
150
151
|
} else {
|
|
151
|
-
|
|
152
|
+
message += `: ${writeErr.message}`;
|
|
152
153
|
}
|
|
153
|
-
|
|
154
|
+
db.close();
|
|
155
|
+
reject(new Error(message));
|
|
156
|
+
return;
|
|
154
157
|
}
|
|
155
158
|
|
|
156
159
|
db.close((closeErr) => {
|
package/lib/git-hooks/pre-commit
CHANGED
|
@@ -55,23 +55,63 @@ if (!checkBranchRestriction()) {
|
|
|
55
55
|
process.exit(1);
|
|
56
56
|
}
|
|
57
57
|
|
|
58
|
-
//
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}
|
|
58
|
+
// Export database snapshots (runs for all commits where .jettypod exists)
|
|
59
|
+
async function exportSnapshots() {
|
|
60
|
+
const jettypodDir = path.join(process.cwd(), '.jettypod');
|
|
61
|
+
if (!fs.existsSync(jettypodDir)) {
|
|
62
|
+
return; // No JettyPod directory, skip export
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
console.log('\n๐ธ Exporting database snapshots...\n');
|
|
64
66
|
|
|
65
|
-
|
|
67
|
+
try {
|
|
68
|
+
const { exportAll } = require('../db-export');
|
|
69
|
+
const paths = await exportAll();
|
|
66
70
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
71
|
+
// Stage the snapshot files
|
|
72
|
+
execSync(`git add "${paths.work}" "${paths.database}"`, {
|
|
73
|
+
stdio: ['pipe', 'pipe', 'pipe']
|
|
74
|
+
});
|
|
70
75
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
76
|
+
console.log('โ
Snapshots exported and staged\n');
|
|
77
|
+
} catch (err) {
|
|
78
|
+
console.error('');
|
|
79
|
+
console.error('โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ');
|
|
80
|
+
console.error('โ Snapshot export failed! Commit blocked.');
|
|
81
|
+
console.error('โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ');
|
|
82
|
+
console.error('');
|
|
83
|
+
console.error(`Error: ${err.message}`);
|
|
84
|
+
console.error('');
|
|
85
|
+
console.error('This prevents data loss - your work items must be backed up.');
|
|
86
|
+
console.error('Check disk space and directory permissions.');
|
|
87
|
+
console.error('');
|
|
88
|
+
process.exit(1);
|
|
89
|
+
}
|
|
77
90
|
}
|
|
91
|
+
|
|
92
|
+
// Main async flow
|
|
93
|
+
(async () => {
|
|
94
|
+
// Export snapshots first (before tests, so they're included in commit)
|
|
95
|
+
await exportSnapshots();
|
|
96
|
+
|
|
97
|
+
// Check if we're in a real project (not a test directory)
|
|
98
|
+
const packageJsonPath = path.join(process.cwd(), 'package.json');
|
|
99
|
+
if (!fs.existsSync(packageJsonPath)) {
|
|
100
|
+
// Skip tests in test directories
|
|
101
|
+
process.exit(0);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
console.log('๐งช Running tests before commit...\n');
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
// Run tests
|
|
108
|
+
execSync('npm test', { stdio: 'inherit' });
|
|
109
|
+
|
|
110
|
+
console.log('\nโ
Tests passed! Proceeding with commit.\n');
|
|
111
|
+
process.exit(0);
|
|
112
|
+
} catch (err) {
|
|
113
|
+
console.log('\nโ Tests failed! Commit blocked.\n');
|
|
114
|
+
console.log('Fix the failing tests or use --no-verify to skip this check.\n');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
})();
|