openwriter 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/pad.js +64 -0
- package/dist/client/assets/index-DNJs7lC-.js +205 -0
- package/dist/client/assets/index-WweytMO1.css +1 -0
- package/dist/client/index.html +16 -0
- package/dist/server/compact.js +214 -0
- package/dist/server/documents.js +230 -0
- package/dist/server/export-html-template.js +109 -0
- package/dist/server/export-routes.js +96 -0
- package/dist/server/gdoc-import.js +200 -0
- package/dist/server/git-sync.js +272 -0
- package/dist/server/helpers.js +87 -0
- package/dist/server/image-upload.js +55 -0
- package/dist/server/index.js +315 -0
- package/dist/server/link-routes.js +116 -0
- package/dist/server/markdown-parse.js +405 -0
- package/dist/server/markdown-serialize.js +263 -0
- package/dist/server/markdown.js +6 -0
- package/dist/server/mcp-client.js +37 -0
- package/dist/server/mcp.js +457 -0
- package/dist/server/plugin-loader.js +36 -0
- package/dist/server/plugin-types.js +5 -0
- package/dist/server/state.js +749 -0
- package/dist/server/sync-routes.js +75 -0
- package/dist/server/text-edit.js +249 -0
- package/dist/server/version-routes.js +79 -0
- package/dist/server/versions.js +198 -0
- package/dist/server/workspace-routes.js +176 -0
- package/dist/server/workspace-tags.js +33 -0
- package/dist/server/workspace-tree.js +200 -0
- package/dist/server/workspace-types.js +38 -0
- package/dist/server/workspaces.js +257 -0
- package/dist/server/ws.js +211 -0
- package/package.json +88 -0
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Express routes for GitHub sync.
|
|
3
|
+
* Mounted in index.ts — follows version-routes.ts pattern.
|
|
4
|
+
*/
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { getSyncStatus, getCapabilities, getPendingFiles, setupWithGh, setupWithPat, connectExisting, pushSync, } from './git-sync.js';
|
|
7
|
+
export function createSyncRouter(broadcastSyncStatus) {
|
|
8
|
+
const router = Router();
|
|
9
|
+
router.get('/api/sync/status', async (_req, res) => {
|
|
10
|
+
try {
|
|
11
|
+
res.json(await getSyncStatus());
|
|
12
|
+
}
|
|
13
|
+
catch (err) {
|
|
14
|
+
res.status(500).json({ state: 'error', error: err.message });
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
router.get('/api/sync/capabilities', async (_req, res) => {
|
|
18
|
+
try {
|
|
19
|
+
res.json(await getCapabilities());
|
|
20
|
+
}
|
|
21
|
+
catch (err) {
|
|
22
|
+
res.status(500).json({ error: err.message });
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
router.get('/api/sync/pending', async (_req, res) => {
|
|
26
|
+
try {
|
|
27
|
+
res.json(await getPendingFiles());
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
res.status(500).json({ error: err.message });
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
router.post('/api/sync/setup', async (req, res) => {
|
|
34
|
+
try {
|
|
35
|
+
const { method, repoName, remoteUrl, pat, isPrivate } = req.body;
|
|
36
|
+
if (method === 'gh') {
|
|
37
|
+
await setupWithGh(repoName || 'openwriter-docs', isPrivate !== false);
|
|
38
|
+
}
|
|
39
|
+
else if (method === 'pat') {
|
|
40
|
+
if (!pat) {
|
|
41
|
+
res.status(400).json({ error: 'PAT is required' });
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
await setupWithPat(pat, repoName || 'openwriter-docs', isPrivate !== false);
|
|
45
|
+
}
|
|
46
|
+
else if (method === 'connect') {
|
|
47
|
+
if (!remoteUrl) {
|
|
48
|
+
res.status(400).json({ error: 'Remote URL is required' });
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
await connectExisting(remoteUrl, pat);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
res.status(400).json({ error: 'Invalid method. Use: gh, pat, or connect' });
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
const status = await getSyncStatus();
|
|
58
|
+
broadcastSyncStatus(status);
|
|
59
|
+
res.json({ success: true, status });
|
|
60
|
+
}
|
|
61
|
+
catch (err) {
|
|
62
|
+
res.status(500).json({ error: err.message });
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
router.post('/api/sync/push', async (_req, res) => {
|
|
66
|
+
try {
|
|
67
|
+
const result = await pushSync(broadcastSyncStatus);
|
|
68
|
+
res.json(result);
|
|
69
|
+
}
|
|
70
|
+
catch (err) {
|
|
71
|
+
res.status(500).json({ state: 'error', error: err.message });
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
return router;
|
|
75
|
+
}
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Text matching + content manipulation helpers for fine-grained agent edits.
|
|
3
|
+
* Works on TipTap JSON node content arrays (text nodes with marks).
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Find text within a flat array of TipTap text nodes.
|
|
7
|
+
* Returns the indices and offsets of the matching text nodes.
|
|
8
|
+
*/
|
|
9
|
+
function findTextInContent(content, searchText) {
|
|
10
|
+
// Build a flat text string from all text nodes
|
|
11
|
+
let flatText = '';
|
|
12
|
+
const offsets = [];
|
|
13
|
+
for (let i = 0; i < content.length; i++) {
|
|
14
|
+
const node = content[i];
|
|
15
|
+
if (node.type !== 'text' || !node.text)
|
|
16
|
+
continue;
|
|
17
|
+
const start = flatText.length;
|
|
18
|
+
flatText += node.text;
|
|
19
|
+
offsets.push({ idx: i, start, end: flatText.length });
|
|
20
|
+
}
|
|
21
|
+
const matchStart = flatText.indexOf(searchText);
|
|
22
|
+
if (matchStart === -1)
|
|
23
|
+
return null;
|
|
24
|
+
const matchEnd = matchStart + searchText.length;
|
|
25
|
+
let startIdx = -1, startOffset = 0;
|
|
26
|
+
let endIdx = -1, endOffset = 0;
|
|
27
|
+
for (const o of offsets) {
|
|
28
|
+
if (startIdx === -1 && matchStart >= o.start && matchStart < o.end) {
|
|
29
|
+
startIdx = o.idx;
|
|
30
|
+
startOffset = matchStart - o.start;
|
|
31
|
+
}
|
|
32
|
+
if (matchEnd > o.start && matchEnd <= o.end) {
|
|
33
|
+
endIdx = o.idx;
|
|
34
|
+
endOffset = matchEnd - o.start;
|
|
35
|
+
break;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
if (startIdx === -1 || endIdx === -1)
|
|
39
|
+
return null;
|
|
40
|
+
return { startIdx, startOffset, endIdx, endOffset };
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Deep clone a node.
|
|
44
|
+
*/
|
|
45
|
+
function cloneNode(node) {
|
|
46
|
+
return JSON.parse(JSON.stringify(node));
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Split a text node at a character offset, returning [before, after].
|
|
50
|
+
* Either may be null if the split is at the boundary.
|
|
51
|
+
*/
|
|
52
|
+
function splitTextNode(node, offset) {
|
|
53
|
+
if (offset <= 0)
|
|
54
|
+
return [null, cloneNode(node)];
|
|
55
|
+
if (offset >= node.text.length)
|
|
56
|
+
return [cloneNode(node), null];
|
|
57
|
+
const before = {
|
|
58
|
+
type: 'text',
|
|
59
|
+
text: node.text.slice(0, offset),
|
|
60
|
+
};
|
|
61
|
+
if (node.marks)
|
|
62
|
+
before.marks = cloneNode(node.marks);
|
|
63
|
+
const after = {
|
|
64
|
+
type: 'text',
|
|
65
|
+
text: node.text.slice(offset),
|
|
66
|
+
};
|
|
67
|
+
if (node.marks)
|
|
68
|
+
after.marks = cloneNode(node.marks);
|
|
69
|
+
return [before, after];
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Apply a single text edit to a node's content array.
|
|
73
|
+
* Returns the modified content array and the character range of the edit
|
|
74
|
+
* (for inline decoration tracking).
|
|
75
|
+
*/
|
|
76
|
+
function applySingleEdit(content, edit) {
|
|
77
|
+
const match = findTextInContent(content, edit.find);
|
|
78
|
+
if (!match)
|
|
79
|
+
return null;
|
|
80
|
+
const { startIdx, startOffset, endIdx, endOffset } = match;
|
|
81
|
+
// Calculate character offsets for the edit range (for inline decorations)
|
|
82
|
+
let charsBefore = 0;
|
|
83
|
+
for (let i = 0; i < startIdx; i++) {
|
|
84
|
+
if (content[i].type === 'text')
|
|
85
|
+
charsBefore += content[i].text.length;
|
|
86
|
+
}
|
|
87
|
+
const editFrom = charsBefore + startOffset;
|
|
88
|
+
// Build new content array with the edit applied
|
|
89
|
+
const result = [];
|
|
90
|
+
// Copy nodes before the match
|
|
91
|
+
for (let i = 0; i < startIdx; i++) {
|
|
92
|
+
result.push(cloneNode(content[i]));
|
|
93
|
+
}
|
|
94
|
+
// Handle the matched region
|
|
95
|
+
if (startIdx === endIdx) {
|
|
96
|
+
// Match is within a single text node
|
|
97
|
+
const node = content[startIdx];
|
|
98
|
+
const [beforePart] = splitTextNode(node, startOffset);
|
|
99
|
+
const [, afterPart] = splitTextNode(node, endOffset);
|
|
100
|
+
if (beforePart)
|
|
101
|
+
result.push(beforePart);
|
|
102
|
+
if (edit.replace !== undefined) {
|
|
103
|
+
// Replace text
|
|
104
|
+
const replaced = {
|
|
105
|
+
type: 'text',
|
|
106
|
+
text: edit.replace,
|
|
107
|
+
};
|
|
108
|
+
if (node.marks)
|
|
109
|
+
replaced.marks = cloneNode(node.marks);
|
|
110
|
+
if (edit.addMark) {
|
|
111
|
+
replaced.marks = replaced.marks || [];
|
|
112
|
+
replaced.marks.push(cloneNode(edit.addMark));
|
|
113
|
+
}
|
|
114
|
+
if (edit.removeMark) {
|
|
115
|
+
replaced.marks = (replaced.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
116
|
+
}
|
|
117
|
+
if (replaced.text)
|
|
118
|
+
result.push(replaced);
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
// Keep text, modify marks
|
|
122
|
+
const matched = {
|
|
123
|
+
type: 'text',
|
|
124
|
+
text: node.text.slice(startOffset, endOffset),
|
|
125
|
+
marks: node.marks ? cloneNode(node.marks) : [],
|
|
126
|
+
};
|
|
127
|
+
if (edit.addMark) {
|
|
128
|
+
matched.marks = matched.marks || [];
|
|
129
|
+
matched.marks.push(cloneNode(edit.addMark));
|
|
130
|
+
}
|
|
131
|
+
if (edit.removeMark) {
|
|
132
|
+
matched.marks = (matched.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
133
|
+
}
|
|
134
|
+
result.push(matched);
|
|
135
|
+
}
|
|
136
|
+
if (afterPart)
|
|
137
|
+
result.push(afterPart);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
// Match spans multiple text nodes
|
|
141
|
+
// Start node: text after startOffset
|
|
142
|
+
const startNode = content[startIdx];
|
|
143
|
+
const [beforeStart] = splitTextNode(startNode, startOffset);
|
|
144
|
+
if (beforeStart)
|
|
145
|
+
result.push(beforeStart);
|
|
146
|
+
if (edit.replace !== undefined) {
|
|
147
|
+
// Replace entire matched span with new text
|
|
148
|
+
const replaced = {
|
|
149
|
+
type: 'text',
|
|
150
|
+
text: edit.replace,
|
|
151
|
+
};
|
|
152
|
+
if (startNode.marks)
|
|
153
|
+
replaced.marks = cloneNode(startNode.marks);
|
|
154
|
+
if (edit.addMark) {
|
|
155
|
+
replaced.marks = replaced.marks || [];
|
|
156
|
+
replaced.marks.push(cloneNode(edit.addMark));
|
|
157
|
+
}
|
|
158
|
+
if (edit.removeMark) {
|
|
159
|
+
replaced.marks = (replaced.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
160
|
+
}
|
|
161
|
+
if (replaced.text)
|
|
162
|
+
result.push(replaced);
|
|
163
|
+
}
|
|
164
|
+
else {
|
|
165
|
+
// Keep text, modify marks on each matched node
|
|
166
|
+
const startText = startNode.text.slice(startOffset);
|
|
167
|
+
if (startText) {
|
|
168
|
+
const modified = {
|
|
169
|
+
type: 'text',
|
|
170
|
+
text: startText,
|
|
171
|
+
marks: startNode.marks ? cloneNode(startNode.marks) : [],
|
|
172
|
+
};
|
|
173
|
+
if (edit.addMark) {
|
|
174
|
+
modified.marks = modified.marks || [];
|
|
175
|
+
modified.marks.push(cloneNode(edit.addMark));
|
|
176
|
+
}
|
|
177
|
+
if (edit.removeMark) {
|
|
178
|
+
modified.marks = (modified.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
179
|
+
}
|
|
180
|
+
result.push(modified);
|
|
181
|
+
}
|
|
182
|
+
// Middle nodes: fully inside match
|
|
183
|
+
for (let i = startIdx + 1; i < endIdx; i++) {
|
|
184
|
+
const midNode = cloneNode(content[i]);
|
|
185
|
+
if (edit.addMark) {
|
|
186
|
+
midNode.marks = midNode.marks || [];
|
|
187
|
+
midNode.marks.push(cloneNode(edit.addMark));
|
|
188
|
+
}
|
|
189
|
+
if (edit.removeMark) {
|
|
190
|
+
midNode.marks = (midNode.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
191
|
+
}
|
|
192
|
+
result.push(midNode);
|
|
193
|
+
}
|
|
194
|
+
// End node: text before endOffset
|
|
195
|
+
const endNode = content[endIdx];
|
|
196
|
+
const endText = endNode.text.slice(0, endOffset);
|
|
197
|
+
if (endText) {
|
|
198
|
+
const modified = {
|
|
199
|
+
type: 'text',
|
|
200
|
+
text: endText,
|
|
201
|
+
marks: endNode.marks ? cloneNode(endNode.marks) : [],
|
|
202
|
+
};
|
|
203
|
+
if (edit.addMark) {
|
|
204
|
+
modified.marks = modified.marks || [];
|
|
205
|
+
modified.marks.push(cloneNode(edit.addMark));
|
|
206
|
+
}
|
|
207
|
+
if (edit.removeMark) {
|
|
208
|
+
modified.marks = (modified.marks || []).filter((m) => m.type !== edit.removeMark);
|
|
209
|
+
}
|
|
210
|
+
result.push(modified);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
// End node remainder
|
|
214
|
+
const endNode = content[endIdx];
|
|
215
|
+
const [, afterEnd] = splitTextNode(endNode, endOffset);
|
|
216
|
+
if (afterEnd)
|
|
217
|
+
result.push(afterEnd);
|
|
218
|
+
}
|
|
219
|
+
// Copy nodes after the match
|
|
220
|
+
for (let i = endIdx + 1; i < content.length; i++) {
|
|
221
|
+
result.push(cloneNode(content[i]));
|
|
222
|
+
}
|
|
223
|
+
// Calculate the edit end position in the new content
|
|
224
|
+
const editTo = editFrom + (edit.replace !== undefined ? edit.replace.length : edit.find.length);
|
|
225
|
+
const editType = edit.replace !== undefined ? 'rewrite' : (edit.addMark ? 'insert' : 'rewrite');
|
|
226
|
+
return { content: result, from: editFrom, to: editTo, type: editType };
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Apply multiple text edits to a TipTap node (block-level node with inline content).
|
|
230
|
+
* Returns the modified node and the array of inline edit ranges for decorations.
|
|
231
|
+
*/
|
|
232
|
+
export function applyTextEditsToNode(node, edits) {
|
|
233
|
+
if (!node.content || !Array.isArray(node.content))
|
|
234
|
+
return null;
|
|
235
|
+
let currentContent = node.content.filter((n) => n.type === 'text');
|
|
236
|
+
const textEditRanges = [];
|
|
237
|
+
for (const edit of edits) {
|
|
238
|
+
const result = applySingleEdit(currentContent, edit);
|
|
239
|
+
if (!result)
|
|
240
|
+
continue;
|
|
241
|
+
currentContent = result.content;
|
|
242
|
+
textEditRanges.push({ from: result.from, to: result.to, type: result.type });
|
|
243
|
+
}
|
|
244
|
+
if (textEditRanges.length === 0)
|
|
245
|
+
return null;
|
|
246
|
+
const modifiedNode = cloneNode(node);
|
|
247
|
+
modifiedNode.content = currentContent;
|
|
248
|
+
return { node: modifiedNode, textEdits: textEditRanges };
|
|
249
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Express routes for document version history.
|
|
3
|
+
* Mounted in index.ts to keep the main file lean.
|
|
4
|
+
*/
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { forceSnapshot, listVersions, getVersionContent, restoreVersion } from './versions.js';
|
|
7
|
+
import { markAllNodesAsPending } from './state.js';
|
|
8
|
+
export function createVersionRouter(s) {
|
|
9
|
+
const router = Router();
|
|
10
|
+
// List versions for current doc
|
|
11
|
+
router.get('/api/versions', (_req, res) => {
|
|
12
|
+
const docId = s.getDocId();
|
|
13
|
+
if (!docId) {
|
|
14
|
+
res.json([]);
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
res.json(listVersions(docId));
|
|
18
|
+
});
|
|
19
|
+
// Get version content by timestamp
|
|
20
|
+
router.get('/api/versions/:ts', (req, res) => {
|
|
21
|
+
const docId = s.getDocId();
|
|
22
|
+
const ts = parseInt(req.params.ts, 10);
|
|
23
|
+
if (!docId || isNaN(ts)) {
|
|
24
|
+
res.status(400).json({ error: 'Invalid request' });
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
const content = getVersionContent(docId, ts);
|
|
28
|
+
if (!content) {
|
|
29
|
+
res.status(404).json({ error: 'Version not found' });
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
res.json({ content });
|
|
33
|
+
});
|
|
34
|
+
// Restore a version
|
|
35
|
+
router.post('/api/versions/:ts/restore', (req, res) => {
|
|
36
|
+
const docId = s.getDocId();
|
|
37
|
+
const ts = parseInt(req.params.ts, 10);
|
|
38
|
+
const mode = req.body.mode;
|
|
39
|
+
if (!docId || isNaN(ts) || !mode) {
|
|
40
|
+
res.status(400).json({ error: 'docId, ts, and mode are required' });
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
// Safety net: snapshot current state before restoring
|
|
44
|
+
try {
|
|
45
|
+
forceSnapshot(docId, s.getFilePath());
|
|
46
|
+
}
|
|
47
|
+
catch { /* best effort */ }
|
|
48
|
+
const parsed = restoreVersion(docId, ts);
|
|
49
|
+
if (!parsed) {
|
|
50
|
+
res.status(404).json({ error: 'Version not found' });
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
if (mode === 'review') {
|
|
54
|
+
markAllNodesAsPending(parsed.document, 'rewrite');
|
|
55
|
+
}
|
|
56
|
+
s.updateDocument(parsed.document);
|
|
57
|
+
s.save();
|
|
58
|
+
const filePath = s.getFilePath();
|
|
59
|
+
const filename = filePath ? filePath.split(/[/\\]/).pop() || '' : '';
|
|
60
|
+
s.broadcastDocumentSwitched(parsed.document, parsed.title, filename);
|
|
61
|
+
res.json({ success: true, mode });
|
|
62
|
+
});
|
|
63
|
+
// Manual checkpoint
|
|
64
|
+
router.post('/api/versions/snapshot', (_req, res) => {
|
|
65
|
+
const docId = s.getDocId();
|
|
66
|
+
if (!docId) {
|
|
67
|
+
res.status(400).json({ error: 'No active document' });
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
try {
|
|
71
|
+
forceSnapshot(docId, s.getFilePath());
|
|
72
|
+
res.json({ success: true });
|
|
73
|
+
}
|
|
74
|
+
catch (err) {
|
|
75
|
+
res.status(500).json({ error: err.message });
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
return router;
|
|
79
|
+
}
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Document version history for OpenWriter.
|
|
3
|
+
* Automatic file-level snapshots so any document state can be recovered.
|
|
4
|
+
* Storage: ~/.openwriter/.versions/{docId}/{timestamp}.md
|
|
5
|
+
*/
|
|
6
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, unlinkSync, statSync } from 'fs';
|
|
7
|
+
import { join } from 'path';
|
|
8
|
+
import { createHash } from 'crypto';
|
|
9
|
+
import { VERSIONS_DIR } from './helpers.js';
|
|
10
|
+
import { markdownToTiptap } from './markdown.js';
|
|
11
|
+
// ============================================================================
|
|
12
|
+
// DEDUP STATE
|
|
13
|
+
// ============================================================================
|
|
14
|
+
const lastSnapshot = new Map();
|
|
15
|
+
const MIN_INTERVAL_MS = 30_000; // 30 seconds between snapshots of same content
|
|
16
|
+
function contentHash(markdown) {
|
|
17
|
+
return createHash('sha256').update(markdown).digest('hex').slice(0, 16);
|
|
18
|
+
}
|
|
19
|
+
// ============================================================================
|
|
20
|
+
// DOC ID
|
|
21
|
+
// ============================================================================
|
|
22
|
+
/**
|
|
23
|
+
* Ensure metadata has a docId. Assigns an 8-char hex ID if missing.
|
|
24
|
+
* Returns the docId (existing or newly assigned).
|
|
25
|
+
*/
|
|
26
|
+
export function ensureDocId(metadata) {
|
|
27
|
+
if (metadata.docId && typeof metadata.docId === 'string') {
|
|
28
|
+
return metadata.docId;
|
|
29
|
+
}
|
|
30
|
+
const id = createHash('sha256')
|
|
31
|
+
.update(Date.now().toString() + Math.random().toString())
|
|
32
|
+
.digest('hex')
|
|
33
|
+
.slice(0, 8);
|
|
34
|
+
metadata.docId = id;
|
|
35
|
+
return id;
|
|
36
|
+
}
|
|
37
|
+
// ============================================================================
|
|
38
|
+
// SNAPSHOT
|
|
39
|
+
// ============================================================================
|
|
40
|
+
function docDir(docId) {
|
|
41
|
+
return join(VERSIONS_DIR, docId);
|
|
42
|
+
}
|
|
43
|
+
function ensureDocDir(docId) {
|
|
44
|
+
const dir = docDir(docId);
|
|
45
|
+
if (!existsSync(dir))
|
|
46
|
+
mkdirSync(dir, { recursive: true });
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Seed the in-memory dedup map from the latest version on disk.
|
|
50
|
+
* Prevents duplicate snapshot after server restart.
|
|
51
|
+
*/
|
|
52
|
+
function seedLastSnapshot(docId) {
|
|
53
|
+
if (lastSnapshot.has(docId))
|
|
54
|
+
return;
|
|
55
|
+
const dir = docDir(docId);
|
|
56
|
+
if (!existsSync(dir))
|
|
57
|
+
return;
|
|
58
|
+
const files = readdirSync(dir)
|
|
59
|
+
.filter((f) => f.endsWith('.md'))
|
|
60
|
+
.map((f) => parseInt(f.replace('.md', ''), 10))
|
|
61
|
+
.filter((ts) => !isNaN(ts))
|
|
62
|
+
.sort((a, b) => b - a); // newest first
|
|
63
|
+
if (files.length === 0)
|
|
64
|
+
return;
|
|
65
|
+
const latest = files[0];
|
|
66
|
+
const content = readFileSync(join(dir, `${latest}.md`), 'utf-8');
|
|
67
|
+
lastSnapshot.set(docId, { time: latest, hash: contentHash(content) });
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Snapshot after every writeToDisk() — skips if content unchanged or within throttle window.
|
|
71
|
+
* Called in best-effort mode (caller wraps in try/catch).
|
|
72
|
+
*/
|
|
73
|
+
export function snapshotIfNeeded(docId, filePath) {
|
|
74
|
+
if (!docId || !filePath || !existsSync(filePath))
|
|
75
|
+
return;
|
|
76
|
+
seedLastSnapshot(docId);
|
|
77
|
+
const markdown = readFileSync(filePath, 'utf-8');
|
|
78
|
+
const hash = contentHash(markdown);
|
|
79
|
+
const now = Date.now();
|
|
80
|
+
const last = lastSnapshot.get(docId);
|
|
81
|
+
if (last) {
|
|
82
|
+
// Skip if content hasn't changed (regardless of time)
|
|
83
|
+
if (hash === last.hash)
|
|
84
|
+
return;
|
|
85
|
+
// Skip if within minimum interval even if content changed
|
|
86
|
+
if ((now - last.time) < MIN_INTERVAL_MS)
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
ensureDocDir(docId);
|
|
90
|
+
writeFileSync(join(docDir(docId), `${now}.md`), markdown, 'utf-8');
|
|
91
|
+
lastSnapshot.set(docId, { time: now, hash });
|
|
92
|
+
pruneVersions(docId);
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Force a snapshot regardless of dedup. Used before restores as a safety net.
|
|
96
|
+
*/
|
|
97
|
+
export function forceSnapshot(docId, filePath) {
|
|
98
|
+
if (!docId || !filePath || !existsSync(filePath))
|
|
99
|
+
return;
|
|
100
|
+
const markdown = readFileSync(filePath, 'utf-8');
|
|
101
|
+
const hash = contentHash(markdown);
|
|
102
|
+
const now = Date.now();
|
|
103
|
+
ensureDocDir(docId);
|
|
104
|
+
writeFileSync(join(docDir(docId), `${now}.md`), markdown, 'utf-8');
|
|
105
|
+
lastSnapshot.set(docId, { time: now, hash });
|
|
106
|
+
}
|
|
107
|
+
// ============================================================================
|
|
108
|
+
// LIST / GET
|
|
109
|
+
// ============================================================================
|
|
110
|
+
/**
|
|
111
|
+
* List all versions for a docId, sorted newest-first.
|
|
112
|
+
*/
|
|
113
|
+
export function listVersions(docId) {
|
|
114
|
+
if (!docId)
|
|
115
|
+
return [];
|
|
116
|
+
const dir = docDir(docId);
|
|
117
|
+
if (!existsSync(dir))
|
|
118
|
+
return [];
|
|
119
|
+
return readdirSync(dir)
|
|
120
|
+
.filter((f) => f.endsWith('.md'))
|
|
121
|
+
.map((f) => {
|
|
122
|
+
const ts = parseInt(f.replace('.md', ''), 10);
|
|
123
|
+
if (isNaN(ts))
|
|
124
|
+
return null;
|
|
125
|
+
const fullPath = join(dir, f);
|
|
126
|
+
try {
|
|
127
|
+
const stat = statSync(fullPath);
|
|
128
|
+
const content = readFileSync(fullPath, 'utf-8');
|
|
129
|
+
const words = content.trim() ? content.trim().split(/\s+/).length : 0;
|
|
130
|
+
return {
|
|
131
|
+
timestamp: ts,
|
|
132
|
+
date: new Date(ts).toISOString(),
|
|
133
|
+
size: stat.size,
|
|
134
|
+
wordCount: words,
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
catch {
|
|
138
|
+
return null;
|
|
139
|
+
}
|
|
140
|
+
})
|
|
141
|
+
.filter((v) => v !== null)
|
|
142
|
+
.sort((a, b) => b.timestamp - a.timestamp);
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Get raw markdown content of a specific version.
|
|
146
|
+
*/
|
|
147
|
+
export function getVersionContent(docId, ts) {
|
|
148
|
+
if (!docId)
|
|
149
|
+
return null;
|
|
150
|
+
const file = join(docDir(docId), `${ts}.md`);
|
|
151
|
+
if (!existsSync(file))
|
|
152
|
+
return null;
|
|
153
|
+
return readFileSync(file, 'utf-8');
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Restore a version. Parses the snapshot markdown into TipTap JSON.
|
|
157
|
+
* Returns the parsed document for the caller to apply.
|
|
158
|
+
*/
|
|
159
|
+
export function restoreVersion(docId, ts) {
|
|
160
|
+
const markdown = getVersionContent(docId, ts);
|
|
161
|
+
if (!markdown)
|
|
162
|
+
return null;
|
|
163
|
+
return markdownToTiptap(markdown);
|
|
164
|
+
}
|
|
165
|
+
// ============================================================================
|
|
166
|
+
// PRUNE
|
|
167
|
+
// ============================================================================
|
|
168
|
+
const MAX_VERSIONS = 50;
|
|
169
|
+
const KEEP_ALL_WITHIN_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
|
|
170
|
+
/**
|
|
171
|
+
* Enforce retention: keep max(MAX_VERSIONS, all from last 7 days).
|
|
172
|
+
*/
|
|
173
|
+
export function pruneVersions(docId) {
|
|
174
|
+
if (!docId)
|
|
175
|
+
return;
|
|
176
|
+
const dir = docDir(docId);
|
|
177
|
+
if (!existsSync(dir))
|
|
178
|
+
return;
|
|
179
|
+
const files = readdirSync(dir)
|
|
180
|
+
.filter((f) => f.endsWith('.md'))
|
|
181
|
+
.map((f) => ({
|
|
182
|
+
name: f,
|
|
183
|
+
ts: parseInt(f.replace('.md', ''), 10),
|
|
184
|
+
}))
|
|
185
|
+
.filter((f) => !isNaN(f.ts))
|
|
186
|
+
.sort((a, b) => b.ts - a.ts); // newest first
|
|
187
|
+
if (files.length <= MAX_VERSIONS)
|
|
188
|
+
return;
|
|
189
|
+
const cutoff = Date.now() - KEEP_ALL_WITHIN_MS;
|
|
190
|
+
// Keep all within 7 days + at most MAX_VERSIONS total
|
|
191
|
+
const toDelete = files.slice(MAX_VERSIONS).filter((f) => f.ts < cutoff);
|
|
192
|
+
for (const f of toDelete) {
|
|
193
|
+
try {
|
|
194
|
+
unlinkSync(join(dir, f.name));
|
|
195
|
+
}
|
|
196
|
+
catch { /* ignore */ }
|
|
197
|
+
}
|
|
198
|
+
}
|