@cyber-dash-tech/revela 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/commands/help.ts +2 -1
- package/lib/commands/pdf.ts +41 -0
- package/lib/pdf/export.ts +320 -0
- package/package.json +2 -1
- package/plugin.ts +17 -0
package/lib/commands/help.ts
CHANGED
|
@@ -32,6 +32,7 @@ export async function handleHelp(
|
|
|
32
32
|
`\`/revela designs-add <url>\` — install a design from URL / github:user/repo\n` +
|
|
33
33
|
`\`/revela domains-add <url>\` — install a domain from URL / github:user/repo\n` +
|
|
34
34
|
`\`/revela designs-rm <name>\` — remove an installed design\n` +
|
|
35
|
-
`\`/revela domains-rm <name>\` — remove an installed domain`
|
|
35
|
+
`\`/revela domains-rm <name>\` — remove an installed domain\n` +
|
|
36
|
+
`\`/revela pdf <file>\` — export HTML slide deck to PDF`
|
|
36
37
|
)
|
|
37
38
|
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* lib/commands/pdf.ts
|
|
3
|
+
*
|
|
4
|
+
* Handler for `/revela pdf <file_path>` — exports an HTML slide deck to PDF.
|
|
5
|
+
*
|
|
6
|
+
* Output: same directory and base name as the input, with .pdf extension.
|
|
7
|
+
* Example: slides/my-deck.html → slides/my-deck.pdf
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { resolve } from "path"
|
|
11
|
+
import { exportToPdf } from "../pdf/export"
|
|
12
|
+
|
|
13
|
+
export async function handlePdf(
|
|
14
|
+
filePath: string,
|
|
15
|
+
send: (text: string) => Promise<void>,
|
|
16
|
+
): Promise<void> {
|
|
17
|
+
if (!filePath) {
|
|
18
|
+
await send(
|
|
19
|
+
"**Usage:** `/revela pdf <file_path>`\n\n" +
|
|
20
|
+
"Example: `/revela pdf slides/my-deck.html`"
|
|
21
|
+
)
|
|
22
|
+
return
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const abs = resolve(filePath)
|
|
26
|
+
await send(`Exporting \`${abs}\` to PDF...`)
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
const result = await exportToPdf(filePath)
|
|
30
|
+
const secs = (result.durationMs / 1000).toFixed(1)
|
|
31
|
+
await send(
|
|
32
|
+
`**PDF exported successfully**\n\n` +
|
|
33
|
+
`- Output: \`${result.outputPath}\`\n` +
|
|
34
|
+
`- Slides: ${result.slideCount}\n` +
|
|
35
|
+
`- Time: ${secs}s`
|
|
36
|
+
)
|
|
37
|
+
} catch (e) {
|
|
38
|
+
const msg = e instanceof Error ? e.message : String(e)
|
|
39
|
+
await send(`**PDF export failed**\n\n\`\`\`\n${msg}\n\`\`\``)
|
|
40
|
+
}
|
|
41
|
+
}
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* lib/pdf/export.ts
|
|
3
|
+
*
|
|
4
|
+
* HTML → PDF export using Puppeteer (screenshot each slide) + pdf-lib (assemble PDF).
|
|
5
|
+
*
|
|
6
|
+
* Algorithm:
|
|
7
|
+
* 1. Launch headless Chrome at 1920×1080 (canvas size)
|
|
8
|
+
* 2. Scan HTML for external http(s) image URLs, download them to a temp dir,
|
|
9
|
+
* rewrite the HTML to use file:// local paths — avoids CDN/CORS/headless issues
|
|
10
|
+
* 3. Navigate to the patched HTML file
|
|
11
|
+
* 4. For each .slide: force .reveal.visible, wait 800ms, screenshot .slide-canvas
|
|
12
|
+
* using offsetParent-chain absolute coordinates
|
|
13
|
+
* 5. Assemble screenshots into a multi-page PDF (16:9, 1920×1080pt per page) via pdf-lib
|
|
14
|
+
* 6. Write PDF alongside the HTML file (same directory, .html → .pdf)
|
|
15
|
+
* 7. Clean up temp dir
|
|
16
|
+
*
|
|
17
|
+
* Output path: replaces the .html extension with .pdf, same directory as input.
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
import puppeteer from "puppeteer-core"
|
|
21
|
+
import { PDFDocument } from "pdf-lib"
|
|
22
|
+
import {
|
|
23
|
+
existsSync,
|
|
24
|
+
writeFileSync,
|
|
25
|
+
readFileSync,
|
|
26
|
+
mkdirSync,
|
|
27
|
+
rmSync,
|
|
28
|
+
} from "fs"
|
|
29
|
+
import { resolve, dirname, basename, join, extname } from "path"
|
|
30
|
+
import { pathToFileURL } from "url"
|
|
31
|
+
import { randomBytes } from "crypto"
|
|
32
|
+
|
|
33
|
+
// ── Constants ────────────────────────────────────────────────────────────────
|
|
34
|
+
|
|
35
|
+
/** Canonical slide canvas dimensions — must match the design system */
|
|
36
|
+
const CANVAS_W = 1920
|
|
37
|
+
const CANVAS_H = 1080
|
|
38
|
+
|
|
39
|
+
/** Path to system Chrome on macOS and Linux — same as measure.ts */
|
|
40
|
+
const CHROME_PATHS = [
|
|
41
|
+
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
|
42
|
+
"/Applications/Chromium.app/Contents/MacOS/Chromium",
|
|
43
|
+
"/usr/bin/google-chrome-stable",
|
|
44
|
+
"/usr/bin/google-chrome",
|
|
45
|
+
"/usr/bin/chromium-browser",
|
|
46
|
+
"/usr/bin/chromium",
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
/** Mime type → file extension mapping for downloaded images */
|
|
50
|
+
const MIME_TO_EXT: Record<string, string> = {
|
|
51
|
+
"image/jpeg": ".jpg",
|
|
52
|
+
"image/jpg": ".jpg",
|
|
53
|
+
"image/png": ".png",
|
|
54
|
+
"image/gif": ".gif",
|
|
55
|
+
"image/webp": ".webp",
|
|
56
|
+
"image/svg+xml": ".svg",
|
|
57
|
+
"image/avif": ".avif",
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// ── Helpers ──────────────────────────────────────────────────────────────────
|
|
61
|
+
|
|
62
|
+
function findChromePath(): string {
|
|
63
|
+
for (const p of CHROME_PATHS) {
|
|
64
|
+
if (existsSync(p)) return p
|
|
65
|
+
}
|
|
66
|
+
throw new Error(
|
|
67
|
+
"Could not find a Chrome/Chromium installation.\n" +
|
|
68
|
+
"Tried:\n" + CHROME_PATHS.map((p) => ` ${p}`).join("\n")
|
|
69
|
+
)
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/** Derive output PDF path from input HTML path (same dir, .html → .pdf) */
|
|
73
|
+
export function derivePdfPath(htmlFilePath: string): string {
|
|
74
|
+
const abs = resolve(htmlFilePath)
|
|
75
|
+
const dir = dirname(abs)
|
|
76
|
+
const name = basename(abs).replace(/\.html?$/i, "")
|
|
77
|
+
return join(dir, `${name}.pdf`)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Download all external http(s) images found in the HTML to a temp directory,
|
|
82
|
+
* rewrite their URLs to file:// local paths, and return the patched HTML + temp dir.
|
|
83
|
+
*
|
|
84
|
+
* On any per-image failure (network error, non-200, timeout) the original URL is
|
|
85
|
+
* preserved so the export degrades gracefully (blank image area) rather than failing.
|
|
86
|
+
*/
|
|
87
|
+
async function localizeExternalImages(
|
|
88
|
+
htmlContent: string,
|
|
89
|
+
tmpDir: string
|
|
90
|
+
): Promise<string> {
|
|
91
|
+
// Extract all unique http(s) URLs that appear in src="..." or url("...") contexts
|
|
92
|
+
const urlPattern = /(?:src=["']|url\(["']?)(https?:\/\/[^"')>\s]+)/g
|
|
93
|
+
const uniqueUrls = new Set<string>()
|
|
94
|
+
let match: RegExpExecArray | null
|
|
95
|
+
while ((match = urlPattern.exec(htmlContent)) !== null) {
|
|
96
|
+
uniqueUrls.add(match[1])
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (uniqueUrls.size === 0) return htmlContent
|
|
100
|
+
|
|
101
|
+
// Download each URL in parallel (10s timeout per image)
|
|
102
|
+
const urlToLocal = new Map<string, string>()
|
|
103
|
+
|
|
104
|
+
await Promise.allSettled(
|
|
105
|
+
Array.from(uniqueUrls).map(async (url, i) => {
|
|
106
|
+
try {
|
|
107
|
+
const controller = new AbortController()
|
|
108
|
+
const timer = setTimeout(() => controller.abort(), 10000)
|
|
109
|
+
const res = await fetch(url, {
|
|
110
|
+
signal: controller.signal,
|
|
111
|
+
headers: {
|
|
112
|
+
// Use a real browser UA to avoid CDN blocking headless/bot requests
|
|
113
|
+
"User-Agent":
|
|
114
|
+
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 " +
|
|
115
|
+
"(KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
|
|
116
|
+
},
|
|
117
|
+
})
|
|
118
|
+
clearTimeout(timer)
|
|
119
|
+
|
|
120
|
+
if (!res.ok) return // non-200 → keep original URL
|
|
121
|
+
|
|
122
|
+
const contentType = res.headers.get("content-type") ?? ""
|
|
123
|
+
const mimeBase = contentType.split(";")[0].trim().toLowerCase()
|
|
124
|
+
|
|
125
|
+
// Derive extension: prefer from URL, fall back to Content-Type
|
|
126
|
+
let ext = extname(new URL(url).pathname).toLowerCase()
|
|
127
|
+
if (!ext || ext.length > 6) {
|
|
128
|
+
ext = MIME_TO_EXT[mimeBase] ?? ".bin"
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const localPath = join(tmpDir, `img-${i}${ext}`)
|
|
132
|
+
const buf = new Uint8Array(await res.arrayBuffer())
|
|
133
|
+
writeFileSync(localPath, buf)
|
|
134
|
+
urlToLocal.set(url, pathToFileURL(localPath).href)
|
|
135
|
+
} catch {
|
|
136
|
+
// Network error or timeout — leave original URL, Chrome will show broken image
|
|
137
|
+
}
|
|
138
|
+
})
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
// Replace all occurrences of each downloaded URL in the HTML
|
|
142
|
+
let patched = htmlContent
|
|
143
|
+
for (const [original, local] of urlToLocal) {
|
|
144
|
+
// Escape special regex chars in the URL
|
|
145
|
+
const escaped = original.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
|
|
146
|
+
patched = patched.replace(new RegExp(escaped, "g"), local)
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return patched
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// ── Main export ──────────────────────────────────────────────────────────────
|
|
153
|
+
|
|
154
|
+
export interface ExportResult {
|
|
155
|
+
outputPath: string
|
|
156
|
+
slideCount: number
|
|
157
|
+
durationMs: number
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Export an HTML slide deck to PDF.
|
|
162
|
+
*
|
|
163
|
+
* @param htmlFilePath - Absolute or relative path to the HTML file.
|
|
164
|
+
* @returns ExportResult with output path, slide count, and duration.
|
|
165
|
+
*/
|
|
166
|
+
export async function exportToPdf(htmlFilePath: string): Promise<ExportResult> {
|
|
167
|
+
const startMs = Date.now()
|
|
168
|
+
const abs = resolve(htmlFilePath)
|
|
169
|
+
|
|
170
|
+
if (!existsSync(abs)) {
|
|
171
|
+
throw new Error(`File not found: ${abs}`)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (!/\.html?$/i.test(abs)) {
|
|
175
|
+
throw new Error(`Not an HTML file: ${abs}`)
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const outputPath = derivePdfPath(abs)
|
|
179
|
+
const executablePath = findChromePath()
|
|
180
|
+
|
|
181
|
+
// ── Step 1: Download external images and rewrite HTML ─────────────────────
|
|
182
|
+
const tmpDir = join("/tmp", `revela-pdf-${randomBytes(6).toString("hex")}`)
|
|
183
|
+
mkdirSync(tmpDir, { recursive: true })
|
|
184
|
+
|
|
185
|
+
let tmpHtmlPath: string
|
|
186
|
+
try {
|
|
187
|
+
const originalHtml = readFileSync(abs, "utf-8")
|
|
188
|
+
const patchedHtml = await localizeExternalImages(originalHtml, tmpDir)
|
|
189
|
+
tmpHtmlPath = join(tmpDir, "index.html")
|
|
190
|
+
writeFileSync(tmpHtmlPath, patchedHtml, "utf-8")
|
|
191
|
+
} catch (err) {
|
|
192
|
+
// If patching fails for any reason, fall back to original file
|
|
193
|
+
tmpHtmlPath = abs
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const fileUrl = pathToFileURL(tmpHtmlPath).href
|
|
197
|
+
|
|
198
|
+
// ── Step 2: Launch Puppeteer and screenshot each slide ────────────────────
|
|
199
|
+
const browser = await puppeteer.launch({
|
|
200
|
+
executablePath,
|
|
201
|
+
headless: true,
|
|
202
|
+
args: [
|
|
203
|
+
"--no-sandbox",
|
|
204
|
+
"--disable-setuid-sandbox",
|
|
205
|
+
"--disable-dev-shm-usage",
|
|
206
|
+
// Allow file:// pages to load other local file:// resources (downloaded images)
|
|
207
|
+
"--allow-file-access-from-files",
|
|
208
|
+
`--window-size=${CANVAS_W},${CANVAS_H}`,
|
|
209
|
+
],
|
|
210
|
+
})
|
|
211
|
+
|
|
212
|
+
let screenshots: Buffer[] = []
|
|
213
|
+
|
|
214
|
+
try {
|
|
215
|
+
const page = await browser.newPage()
|
|
216
|
+
|
|
217
|
+
// Set exact canvas viewport so scale === 1
|
|
218
|
+
await page.setViewport({ width: CANVAS_W, height: CANVAS_H })
|
|
219
|
+
|
|
220
|
+
// All images are now local file:// — no external requests needed.
|
|
221
|
+
// domcontentloaded is sufficient; networkidle2 would waste time.
|
|
222
|
+
await page.goto(fileUrl, { waitUntil: "domcontentloaded", timeout: 30000 })
|
|
223
|
+
|
|
224
|
+
// Wait for fonts (Google Fonts may still be external) and CSS animations to settle
|
|
225
|
+
await new Promise((r) => setTimeout(r, 2000))
|
|
226
|
+
|
|
227
|
+
// Disable scroll-snap so offsetParent-based clip coords are accurate.
|
|
228
|
+
// Also ensure html/body are tall enough to contain all slides without clipping.
|
|
229
|
+
await page.evaluate(() => {
|
|
230
|
+
document.documentElement.style.scrollSnapType = "none"
|
|
231
|
+
document.documentElement.style.overflow = "visible"
|
|
232
|
+
document.body.style.overflow = "visible"
|
|
233
|
+
})
|
|
234
|
+
|
|
235
|
+
const slideCount: number = await page.evaluate(
|
|
236
|
+
() => document.querySelectorAll(".slide").length
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
if (slideCount === 0) {
|
|
240
|
+
throw new Error(
|
|
241
|
+
"No .slide elements found in the HTML file.\n" +
|
|
242
|
+
"Make sure this is a revela-generated slide deck."
|
|
243
|
+
)
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Screenshot each slide individually
|
|
247
|
+
for (let idx = 0; idx < slideCount; idx++) {
|
|
248
|
+
// Force reveal animations (no scrollIntoView — we use absolute coords)
|
|
249
|
+
await page.evaluate((i: number) => {
|
|
250
|
+
const slide = document.querySelectorAll(".slide")[i] as HTMLElement | null
|
|
251
|
+
if (!slide) return
|
|
252
|
+
slide.querySelectorAll(".reveal").forEach((el) => el.classList.add("visible"))
|
|
253
|
+
}, idx)
|
|
254
|
+
|
|
255
|
+
// Wait for CSS transitions and JS rendering (ECharts animations, etc.)
|
|
256
|
+
await new Promise((r) => setTimeout(r, 800))
|
|
257
|
+
|
|
258
|
+
// Compute .slide-canvas absolute position by walking the offsetParent chain.
|
|
259
|
+
// getBoundingClientRect() returns viewport-relative coords (always near 0,0) —
|
|
260
|
+
// unusable as screenshot clip coordinates without adding scrollY.
|
|
261
|
+
// offsetParent walk gives document-absolute coords that Puppeteer clip expects.
|
|
262
|
+
const clipRect = await page.evaluate((i: number) => {
|
|
263
|
+
const slide = document.querySelectorAll(".slide")[i] as HTMLElement | null
|
|
264
|
+
if (!slide) return null
|
|
265
|
+
const canvas = slide.querySelector(".slide-canvas") as HTMLElement | null
|
|
266
|
+
if (!canvas) return null
|
|
267
|
+
let top = 0
|
|
268
|
+
let left = 0
|
|
269
|
+
let el: HTMLElement | null = canvas
|
|
270
|
+
while (el) {
|
|
271
|
+
top += el.offsetTop
|
|
272
|
+
left += el.offsetLeft
|
|
273
|
+
el = el.offsetParent as HTMLElement | null
|
|
274
|
+
}
|
|
275
|
+
return { x: left, y: top, width: canvas.offsetWidth, height: canvas.offsetHeight }
|
|
276
|
+
}, idx)
|
|
277
|
+
|
|
278
|
+
if (clipRect && clipRect.width > 0 && clipRect.height > 0) {
|
|
279
|
+
const buf = await page.screenshot({ type: "png", clip: clipRect })
|
|
280
|
+
screenshots.push(buf as Buffer)
|
|
281
|
+
} else {
|
|
282
|
+
// Fallback: screenshot full viewport
|
|
283
|
+
const buf = await page.screenshot({ type: "png" })
|
|
284
|
+
screenshots.push(buf as Buffer)
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
} finally {
|
|
288
|
+
await browser.close()
|
|
289
|
+
// Clean up temp dir (downloaded images + patched HTML)
|
|
290
|
+
try {
|
|
291
|
+
rmSync(tmpDir, { recursive: true, force: true })
|
|
292
|
+
} catch {
|
|
293
|
+
// Non-fatal — /tmp will be cleaned by OS eventually
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// ── Step 3: Assemble PDF with pdf-lib ─────────────────────────────────────
|
|
298
|
+
const pdfDoc = await PDFDocument.create()
|
|
299
|
+
|
|
300
|
+
for (const pngBuf of screenshots) {
|
|
301
|
+
const pngImage = await pdfDoc.embedPng(new Uint8Array(pngBuf))
|
|
302
|
+
// Each page is exactly the canvas size (points = pixels at 1:1 for screen PDF)
|
|
303
|
+
const page = pdfDoc.addPage([CANVAS_W, CANVAS_H])
|
|
304
|
+
page.drawImage(pngImage, {
|
|
305
|
+
x: 0,
|
|
306
|
+
y: 0,
|
|
307
|
+
width: CANVAS_W,
|
|
308
|
+
height: CANVAS_H,
|
|
309
|
+
})
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
const pdfBytes = await pdfDoc.save()
|
|
313
|
+
writeFileSync(outputPath, pdfBytes)
|
|
314
|
+
|
|
315
|
+
return {
|
|
316
|
+
outputPath,
|
|
317
|
+
slideCount: screenshots.length,
|
|
318
|
+
durationMs: Date.now() - startMs,
|
|
319
|
+
}
|
|
320
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cyber-dash-tech/revela",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.8",
|
|
4
4
|
"description": "OpenCode plugin that turns AI into an HTML slide deck generator",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./index.ts",
|
|
@@ -54,6 +54,7 @@
|
|
|
54
54
|
"fflate": "^0.8.2",
|
|
55
55
|
"jimp": "^1.6.1",
|
|
56
56
|
"mammoth": "^1.12.0",
|
|
57
|
+
"pdf-lib": "^1.17.1",
|
|
57
58
|
"puppeteer-core": "^24.40.0",
|
|
58
59
|
"tslog": "^4.10.2",
|
|
59
60
|
"unpdf": "^1.4.0"
|
package/plugin.ts
CHANGED
|
@@ -17,6 +17,7 @@
|
|
|
17
17
|
import type { Plugin } from "@opencode-ai/plugin"
|
|
18
18
|
import { existsSync, readFileSync } from "fs"
|
|
19
19
|
import { extname, basename } from "path"
|
|
20
|
+
import { tmpdir } from "os"
|
|
20
21
|
import { seedBuiltinDesigns } from "./lib/design/designs"
|
|
21
22
|
import { seedBuiltinDomains } from "./lib/domain/domains"
|
|
22
23
|
import { buildPrompt } from "./lib/prompt-builder"
|
|
@@ -43,6 +44,7 @@ import {
|
|
|
43
44
|
handleDomainsAdd,
|
|
44
45
|
handleDomainsRemove,
|
|
45
46
|
} from "./lib/commands/domains"
|
|
47
|
+
import { handlePdf } from "./lib/commands/pdf"
|
|
46
48
|
import designsTool from "./tools/designs"
|
|
47
49
|
import domainsTool from "./tools/domains"
|
|
48
50
|
import researchSaveTool from "./tools/research-save"
|
|
@@ -141,6 +143,17 @@ const server: Plugin = (async (pluginCtx) => {
|
|
|
141
143
|
if (!(opencodeConfig.permission as Record<string, unknown>)["websearch"]) {
|
|
142
144
|
;(opencodeConfig.permission as Record<string, unknown>)["websearch"] = "deny"
|
|
143
145
|
}
|
|
146
|
+
|
|
147
|
+
// Allow read access to OS tmp dir for revela-extracted temp files.
|
|
148
|
+
// pre-read.ts writes DOCX/PPTX/XLSX extracted text to os.tmpdir(), then redirects
|
|
149
|
+
// args.filePath to that temp file. Without this, the read tool triggers an
|
|
150
|
+
// external_directory permission prompt (default: "ask") on every binary file extraction.
|
|
151
|
+
const tmp = tmpdir()
|
|
152
|
+
const perm = opencodeConfig.permission as Record<string, unknown>
|
|
153
|
+
if (typeof perm["external_directory"] !== "string") {
|
|
154
|
+
perm["external_directory"] ??= {}
|
|
155
|
+
;(perm["external_directory"] as Record<string, unknown>)[`${tmp}/**`] = "allow"
|
|
156
|
+
}
|
|
144
157
|
},
|
|
145
158
|
|
|
146
159
|
// ── Route all sub-commands to lib/commands/ handlers ──────────────────
|
|
@@ -198,6 +211,10 @@ const server: Plugin = (async (pluginCtx) => {
|
|
|
198
211
|
await handleDomainsRemove(param, send)
|
|
199
212
|
throw new Error("__REVELA_DOMAINS_RM_HANDLED__")
|
|
200
213
|
}
|
|
214
|
+
if (sub === "pdf") {
|
|
215
|
+
await handlePdf(param, send)
|
|
216
|
+
throw new Error("__REVELA_PDF_HANDLED__")
|
|
217
|
+
}
|
|
201
218
|
|
|
202
219
|
await send(`**Unknown sub-command:** \`${sub}\`\nRun \`/revela\` to see available commands.`)
|
|
203
220
|
throw new Error("__REVELA_UNKNOWN_HANDLED__")
|