openwriter 0.9.1 → 0.9.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,8 +10,8 @@
10
10
  <link rel="preconnect" href="https://fonts.googleapis.com" />
11
11
  <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
12
12
  <link href="https://fonts.googleapis.com/css2?family=Charter:ital,wght@0,400;0,700;1,400&family=Crimson+Pro:ital,wght@0,300;0,400;0,600;0,700;1,400&family=DM+Sans:ital,wght@0,400;0,500;0,600;0,700;1,400&family=DM+Serif+Display&family=IBM+Plex+Mono:wght@400;500;600&family=IBM+Plex+Sans:wght@400;500;600&family=Inter:wght@400;500;600;700&family=Libre+Baskerville:ital,wght@0,400;0,700;1,400&family=Literata:ital,opsz,wght@0,7..72,400;0,7..72,600;0,7..72,700;1,7..72,400&family=Newsreader:ital,opsz,wght@0,6..72,400;0,6..72,600;1,6..72,400&family=Playfair+Display:wght@400;600;700;900&family=Source+Serif+4:ital,opsz,wght@0,8..60,400;0,8..60,600;0,8..60,700;1,8..60,400&family=Space+Grotesk:wght@400;500;600;700&family=Space+Mono:wght@400;700&display=swap" rel="stylesheet" />
13
- <script type="module" crossorigin src="/assets/index-BYx7Ckkf.js"></script>
14
- <link rel="stylesheet" crossorigin href="/assets/index-BZqt_dqW.css">
13
+ <script type="module" crossorigin src="/assets/index-wRyjoTwK.js"></script>
14
+ <link rel="stylesheet" crossorigin href="/assets/index-ZQ1BICWp.css">
15
15
  </head>
16
16
  <body>
17
17
  <div id="root"></div>
@@ -6,6 +6,7 @@
6
6
  import { Client, OAuth1 } from '@xdevplatform/xdk';
7
7
  import { join, extname } from 'path';
8
8
  import { readFileSync, existsSync } from 'fs';
9
+ import sharp from 'sharp';
9
10
  import twitter from 'twitter-text';
10
11
  const { parseTweet } = twitter;
11
12
  function createXClient(config) {
@@ -196,9 +197,19 @@ const plugin = {
196
197
  res.status(400).json({ success: false, error: 'X API credentials not configured' });
197
198
  return;
198
199
  }
199
- const mediaBase64 = readFileSync(filePath).toString('base64');
200
+ let fileBuffer = readFileSync(filePath);
201
+ let uploadType = mediaType;
202
+ const origSize = fileBuffer.length;
203
+ // Compress large images or PNGs to JPEG to stay under X API limits
204
+ if (fileBuffer.length > 3 * 1024 * 1024 || ext === '.png') {
205
+ fileBuffer = Buffer.from(await sharp(fileBuffer).jpeg({ quality: 85 }).toBuffer());
206
+ uploadType = 'image/jpeg';
207
+ console.log(`[X Plugin] Compressed ${filename}: ${(origSize / 1024 / 1024).toFixed(2)}MB → ${(fileBuffer.length / 1024 / 1024).toFixed(2)}MB`);
208
+ }
209
+ console.log(`[X Plugin] Uploading ${filename}: ${(fileBuffer.length / 1024 / 1024).toFixed(2)}MB, type: ${uploadType}`);
210
+ const mediaBase64 = fileBuffer.toString('base64');
200
211
  const uploadResult = await client.media.upload({
201
- body: { media: mediaBase64, mediaCategory: 'tweet_image', mediaType },
212
+ body: { media: mediaBase64, mediaCategory: 'tweet_image', mediaType: uploadType },
202
213
  });
203
214
  const mediaId = uploadResult?.data?.id
204
215
  || uploadResult?.media_id_string;
@@ -211,6 +222,16 @@ const plugin = {
211
222
  }
212
223
  catch (err) {
213
224
  console.error('[X Plugin] Media upload failed:', err.message);
225
+ if (err.response) {
226
+ try {
227
+ const body = await err.response.text();
228
+ console.error('[X Plugin] X API response:', err.response.status, body);
229
+ }
230
+ catch { /* ignore */ }
231
+ }
232
+ if (err.data)
233
+ console.error('[X Plugin] Error data:', JSON.stringify(err.data));
234
+ console.error('[X Plugin] Full error:', JSON.stringify(err, Object.getOwnPropertyNames(err)));
214
235
  res.status(500).json({ success: false, error: err.message });
215
236
  }
216
237
  });
@@ -54,5 +54,40 @@ export function createImageRouter() {
54
54
  const src = `/_images/${req.file.filename}`;
55
55
  res.json({ src });
56
56
  });
57
+ // Download external URL and save locally
58
+ router.post('/api/download-image', async (req, res) => {
59
+ const { url } = req.body;
60
+ if (!url || typeof url !== 'string') {
61
+ res.status(400).json({ error: 'No URL provided' });
62
+ return;
63
+ }
64
+ try {
65
+ const response = await fetch(url);
66
+ if (!response.ok) {
67
+ res.status(400).json({ error: 'Failed to fetch image' });
68
+ return;
69
+ }
70
+ const contentType = response.headers.get('content-type') || 'image/png';
71
+ if (!contentType.startsWith('image/')) {
72
+ res.status(400).json({ error: 'URL is not an image' });
73
+ return;
74
+ }
75
+ const ext = contentType.includes('jpeg') || contentType.includes('jpg') ? '.jpg'
76
+ : contentType.includes('gif') ? '.gif'
77
+ : contentType.includes('webp') ? '.webp'
78
+ : '.png';
79
+ ensureImagesDir();
80
+ const filename = `${randomUUID().slice(0, 8)}${ext}`;
81
+ const filePath = join(getImagesDir(), filename);
82
+ const buffer = Buffer.from(await response.arrayBuffer());
83
+ const { writeFileSync } = await import('fs');
84
+ writeFileSync(filePath, buffer);
85
+ const src = `/_images/${filename}`;
86
+ res.json({ src });
87
+ }
88
+ catch {
89
+ res.status(500).json({ error: 'Download failed' });
90
+ }
91
+ });
57
92
  return router;
58
93
  }
@@ -460,12 +460,12 @@ export async function startHttpServer(options = {}) {
460
460
  // Agent marks
461
461
  app.post('/api/marks', (req, res) => {
462
462
  try {
463
- const { filename, text, note, nodeId } = req.body;
463
+ const { filename, text, note, nodeId, nodeIds } = req.body;
464
464
  if (!filename || !text || !nodeId) {
465
465
  res.status(400).json({ error: 'filename, text, and nodeId are required' });
466
466
  return;
467
467
  }
468
- const mark = addMark(filename, text, note || '', nodeId);
468
+ const mark = addMark(filename, text, note || '', nodeId, nodeIds);
469
469
  broadcastMarksChanged(filename);
470
470
  res.json({ success: true, mark });
471
471
  }
@@ -39,13 +39,14 @@ function writeMarkFile(filename, data) {
39
39
  }
40
40
  writeFileSync(path, JSON.stringify(data, null, 2));
41
41
  }
42
- export function addMark(filename, text, note, nodeId) {
42
+ export function addMark(filename, text, note, nodeId, nodeIds) {
43
43
  const data = readMarkFile(filename);
44
44
  const mark = {
45
45
  id: randomUUID().slice(0, 8),
46
46
  text,
47
47
  note,
48
48
  nodeId,
49
+ ...(nodeIds && nodeIds.length > 1 ? { nodeIds } : {}),
49
50
  createdAt: new Date().toISOString(),
50
51
  };
51
52
  data.marks.push(mark);
@@ -150,7 +151,13 @@ export function pruneStaleMarks(filename, validNodeIds) {
150
151
  return 0;
151
152
  const validSet = new Set(validNodeIds);
152
153
  const before = data.marks.length;
153
- data.marks = data.marks.filter((m) => validSet.has(m.nodeId));
154
+ data.marks = data.marks.filter((m) => {
155
+ // Multi-node mark: keep if ANY nodeId is still valid
156
+ if (m.nodeIds && m.nodeIds.length > 0) {
157
+ return m.nodeIds.some((id) => validSet.has(id));
158
+ }
159
+ return validSet.has(m.nodeId);
160
+ });
154
161
  const pruned = before - data.marks.length;
155
162
  if (pruned > 0)
156
163
  writeMarkFile(filename, data);
@@ -680,7 +680,8 @@ function applyChangesToDoc(doc, changes) {
680
680
  // Tweet thread: hard-delete paragraphs + adjacent HR immediately.
681
681
  // Tweet compose view can't handle pending deletes near HRs — hard-delete and resync.
682
682
  const delNode = found.parent[found.index];
683
- if (delNode.type === 'paragraph' && state.metadata?.tweetContext) {
683
+ const hardDeleteTypes = ['paragraph', 'image', 'imageLoading'];
684
+ if (hardDeleteTypes.includes(delNode.type) && state.metadata?.tweetContext) {
684
685
  const idx = found.index;
685
686
  if (idx > 0 && found.parent[idx - 1].type === 'horizontalRule') {
686
687
  found.parent.splice(idx, 1);
@@ -3,7 +3,7 @@
3
3
  * Mounted in index.ts to keep the main file lean.
4
4
  */
5
5
  import { Router } from 'express';
6
- import { listWorkspaces, getWorkspace, createWorkspace, deleteWorkspace, reorderWorkspaces, addDoc, removeDoc, moveDoc, reorderDoc, addContainerToWorkspace, removeContainer, renameContainer, renameWorkspace, reorderContainer, } from './workspaces.js';
6
+ import { listWorkspaces, getWorkspace, createWorkspace, deleteWorkspace, reorderWorkspaces, addDoc, removeDoc, moveDoc, reorderDoc, addContainerToWorkspace, removeContainer, renameContainer, renameWorkspace, reorderContainer, crossMoveContainer, promoteContainerToWorkspace, } from './workspaces.js';
7
7
  export function createWorkspaceRouter(b) {
8
8
  const router = Router();
9
9
  router.get('/api/workspaces', (_req, res) => {
@@ -36,6 +36,20 @@ export function createWorkspaceRouter(b) {
36
36
  res.status(400).json({ error: err.message });
37
37
  }
38
38
  });
39
+ // Promote container to standalone workspace
40
+ router.post('/api/workspaces/promote-container', (req, res) => {
41
+ try {
42
+ const { sourceWorkspace, containerId, afterWorkspaceFilename } = req.body;
43
+ if (!sourceWorkspace || !containerId)
44
+ return res.status(400).json({ error: 'sourceWorkspace and containerId required' });
45
+ const result = promoteContainerToWorkspace(sourceWorkspace, containerId, afterWorkspaceFilename ?? null);
46
+ b.broadcastWorkspacesChanged();
47
+ res.json(result);
48
+ }
49
+ catch (err) {
50
+ res.status(400).json({ error: err.message });
51
+ }
52
+ });
39
53
  router.get('/api/workspaces/:filename', (req, res) => {
40
54
  try {
41
55
  res.json(getWorkspace(req.params.filename));
@@ -149,6 +163,20 @@ export function createWorkspaceRouter(b) {
149
163
  res.status(400).json({ error: err.message });
150
164
  }
151
165
  });
166
+ // Cross-workspace container move
167
+ router.post('/api/workspaces/:targetFilename/containers/:containerId/cross-move', (req, res) => {
168
+ try {
169
+ const { sourceWorkspace } = req.body;
170
+ if (!sourceWorkspace)
171
+ return res.status(400).json({ error: 'sourceWorkspace required' });
172
+ const ws = crossMoveContainer(sourceWorkspace, req.params.targetFilename, req.params.containerId, req.body.targetContainerId ?? null, req.body.afterIdentifier ?? null);
173
+ b.broadcastWorkspacesChanged();
174
+ res.json(ws);
175
+ }
176
+ catch (err) {
177
+ res.status(400).json({ error: err.message });
178
+ }
179
+ });
152
180
  // Cross-workspace move (from one workspace to another)
153
181
  router.post('/api/workspaces/:targetFilename/docs/:docFile/cross-move', (req, res) => {
154
182
  try {
@@ -252,6 +252,51 @@ export function moveContainer(wsFile, containerId, targetContainerId, afterIdent
252
252
  writeWorkspace(wsFile, ws);
253
253
  return ws;
254
254
  }
255
+ export function crossMoveContainer(sourceWsFile, targetWsFile, containerId, targetContainerId, afterIdentifier) {
256
+ const sourceWs = getWorkspace(sourceWsFile);
257
+ const removed = removeNode(sourceWs.root, containerId);
258
+ if (removed.type !== 'container')
259
+ throw new Error(`Node "${containerId}" is not a container`);
260
+ writeWorkspace(sourceWsFile, sourceWs);
261
+ const targetWs = getWorkspace(targetWsFile);
262
+ // Insert into target — reuse moveNode-style logic
263
+ const target = targetContainerId === null
264
+ ? targetWs.root
265
+ : (() => { const f = findContainer(targetWs.root, targetContainerId); if (!f)
266
+ throw new Error('Target container not found'); return f.node.items; })();
267
+ if (afterIdentifier === null) {
268
+ target.unshift(removed);
269
+ }
270
+ else {
271
+ const afterIdx = target.findIndex((n) => (n.type === 'doc' && n.file === afterIdentifier) || (n.type === 'container' && n.id === afterIdentifier));
272
+ if (afterIdx === -1)
273
+ target.push(removed);
274
+ else
275
+ target.splice(afterIdx + 1, 0, removed);
276
+ }
277
+ writeWorkspace(targetWsFile, targetWs);
278
+ return targetWs;
279
+ }
280
+ export function promoteContainerToWorkspace(sourceWsFile, containerId, afterWorkspaceFilename) {
281
+ const sourceWs = getWorkspace(sourceWsFile);
282
+ const removed = removeNode(sourceWs.root, containerId);
283
+ if (removed.type !== 'container')
284
+ throw new Error(`Node "${containerId}" is not a container`);
285
+ writeWorkspace(sourceWsFile, sourceWs);
286
+ // Create new workspace with container's children as root
287
+ const slug = sanitizeFilename(removed.name).toLowerCase().replace(/\s+/g, '-');
288
+ const filename = `${slug}-${randomUUID().slice(0, 8)}.json`;
289
+ const workspace = { version: 2, title: removed.name, root: removed.items };
290
+ writeWorkspace(filename, workspace);
291
+ // Append to order then reposition
292
+ const order = readOrder();
293
+ order.push(filename);
294
+ writeOrder(order);
295
+ if (afterWorkspaceFilename !== null || order.length > 1) {
296
+ reorderWorkspaceAfter(filename, afterWorkspaceFilename);
297
+ }
298
+ return { filename, title: removed.name, docCount: countDocs(removed.items) };
299
+ }
255
300
  export function reorderWorkspaceAfter(filename, afterFilename) {
256
301
  ensureWorkspacesDir();
257
302
  const order = readOrder();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openwriter",
3
- "version": "0.9.1",
3
+ "version": "0.9.3",
4
4
  "description": "The open-source writing surface for AI agents. Markdown-native editor with pending change review — your agent writes, you accept or reject.",
5
5
  "type": "module",
6
6
  "license": "MIT",
package/skill/SKILL.md CHANGED
@@ -16,7 +16,7 @@ description: |
16
16
  Requires: OpenWriter MCP server configured. Browser UI at localhost:5050.
17
17
  metadata:
18
18
  author: travsteward
19
- version: "0.4.2"
19
+ version: "0.4.5"
20
20
  repository: https://github.com/travsteward/openwriter
21
21
  license: MIT
22
22
  ---
@@ -402,29 +402,38 @@ Threads are single documents with `horizontalRule` nodes separating each tweet.
402
402
 
403
403
  **Do NOT use `populate_document` for threads.** Use `create_document` with `content_type: "tweet"` + `empty: true`, then `write_to_pad` with `horizontalRule` JSON nodes between tweets. The `content_type` flag sets `tweetContext` metadata automatically.
404
404
 
405
- **Critical: `horizontalRule` separators MUST use TipTap JSON `{ type: "horizontalRule" }`.** Markdown `---` does NOT create proper HR nodes — the thread will render as a single tweet. Tweet text content can be plain markdown strings.
405
+ **THREE RULES for thread HRs:**
406
+
407
+ 1. **`horizontalRule` separators MUST use TipTap JSON `{ type: "horizontalRule" }`.** Markdown `---` does NOT create proper HR nodes.
408
+ 2. **Each HR must be its own change.** Do NOT use content arrays `[{type: "horizontalRule"}, {type: "paragraph", ...}]` — this silently drops the HR.
409
+ 3. **Send the ENTIRE thread in ONE `write_to_pad` call.** Do NOT split across multiple calls. Multiple calls create race conditions — if the user accepts changes between calls, pending HRs can be dropped. One call = atomic = no race conditions.
406
410
 
407
411
  ```
408
412
  1. create_document({ title: "Thread title", content_type: "tweet", empty: true })
409
413
  2. write_to_pad({ docId: "<docId>", changes: [
410
- { operation: "insert", afterNodeId: "end", content: "Tweet 1 text" },
414
+ { operation: "insert", afterNodeId: "end", content: "Tweet 1 paragraph 1" },
415
+ { operation: "insert", afterNodeId: "end", content: "Tweet 1 paragraph 2" },
411
416
  { operation: "insert", afterNodeId: "end", content: { type: "horizontalRule" } },
412
- { operation: "insert", afterNodeId: "end", content: "Tweet 2 text" },
417
+ { operation: "insert", afterNodeId: "end", content: "Tweet 2 paragraph 1" },
418
+ { operation: "insert", afterNodeId: "end", content: "Tweet 2 paragraph 2" },
413
419
  { operation: "insert", afterNodeId: "end", content: { type: "horizontalRule" } },
414
- { operation: "insert", afterNodeId: "end", content: "Tweet 3 text" }
420
+ { operation: "insert", afterNodeId: "end", content: "Tweet 3 paragraph 1" }
415
421
  ]})
416
422
  ```
417
423
 
424
+ **For long threads (many tweets):** still send in ONE call. The changes array can hold dozens of items. Atomicity matters more than streaming feel for threads — a half-built thread with missing HRs is worse than waiting for the full thread to arrive.
425
+
418
426
  ### Inserting New Tweets into Existing Threads
419
427
 
420
- **Insert HR + paragraph as ONE change with a content array.** Two separate calls will fail the browser resyncs on HR insertion and overwrites the second call.
428
+ **Mid-thread insertion is unreliable.** `afterNodeId: "end"` always means document end, not after your last insert. Inserting after specific node IDs mid-document has edge cases with pending changes and image nodes.
421
429
 
422
- ```
423
- write_to_pad({ docId: "...", changes: [
424
- { operation: "insert", afterNodeId: "<last-node-of-previous-tweet>",
425
- content: [{ type: "horizontalRule" }, { type: "paragraph", content: [{ type: "text", text: "New tweet" }] }] }
426
- ]})
427
- ```
430
+ **Preferred approach: rebuild the full thread.** Delete the document and recreate with all tweets in one atomic `write_to_pad` call. This is the only pattern that reliably produces correct thread structure.
431
+
432
+ **If you must insert mid-thread:** use a single `write_to_pad` call with the HR and all content targeting the same `afterNodeId` (the last node of the preceding tweet). Content inserts in reverse order when sharing an afterNodeId, so list changes in reverse. This is fragile — prefer full rebuild.
433
+
434
+ **Do NOT delete empty paragraphs after images.** Images create empty `<p>` nodes after them. These look like junk but HRs (thread separators) are dependent on them. Deleting the empty paragraph kills the HR too, merging two tweets into one. Leave them alone.
435
+
436
+ **NEVER bulk-delete text nodes in a thread that contains images.** Image nodes survive text deletion and become orphans — stranded in the wrong position with no surrounding content. The user must then manually delete every orphan image from the browser. This is catastrophic. If you need to reorder tweets, move text around the existing images, or delete the entire document and start fresh (which properly removes everything including images).
428
437
 
429
438
  ### Paragraph Spacing in Tweets
430
439