nodebb-plugin-pdf-secure2 1.3.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -154,7 +154,7 @@ Controllers.handleChat = async function (req, res) {
154
154
  const answer = await geminiChat.chat(safeName, trimmedQuestion, history || []);
155
155
  return res.json({ answer });
156
156
  } catch (err) {
157
- console.error('[PDF-Secure] Chat error:', err.message);
157
+ console.error('[PDF-Secure] Chat error:', err.message, err.status || '', err.code || '');
158
158
 
159
159
  if (err.message === 'File not found') {
160
160
  return res.status(404).json({ error: 'PDF not found' });
@@ -7,15 +7,9 @@ const GeminiChat = module.exports;
7
7
 
8
8
  let ai = null;
9
9
 
10
- // Cache maps
11
- const fileCache = new Map(); // filename -> {fileUri, uploadedAt}
12
- const contextCache = new Map(); // filename -> {cacheName, expiresAt}
13
- const uploadPromises = new Map(); // filename -> Promise (deduplication)
14
-
15
- const FILE_TTL = 48 * 60 * 60 * 1000; // 48 hours (Gemini Files API limit)
16
- const CONTEXT_TTL = 30 * 60 * 1000; // 30 minutes
17
- const CLEANUP_INTERVAL = 10 * 60 * 1000; // 10 minutes
18
- const SMALL_PDF_THRESHOLD = 8; // pages - skip caching for small PDFs
10
+ // In-memory cache for PDF base64 data (avoids re-reading from disk)
11
+ const pdfDataCache = new Map(); // filename -> { base64, cachedAt }
12
+ const PDF_DATA_TTL = 30 * 60 * 1000; // 30 minutes
19
13
 
20
14
  const SYSTEM_INSTRUCTION = `You are a helpful assistant that answers questions about the provided PDF document.
21
15
  Respond in the same language the user writes their question in.
@@ -23,20 +17,15 @@ Be concise and accurate. When referencing specific information, mention the rele
23
17
 
24
18
  const MODEL_NAME = 'gemini-2.5-flash';
25
19
 
26
- // Periodic cleanup of expired entries
20
+ // Periodic cleanup
27
21
  const cleanupTimer = setInterval(() => {
28
22
  const now = Date.now();
29
- for (const [key, entry] of fileCache.entries()) {
30
- if (now - entry.uploadedAt > FILE_TTL) {
31
- fileCache.delete(key);
32
- }
33
- }
34
- for (const [key, entry] of contextCache.entries()) {
35
- if (now > entry.expiresAt) {
36
- contextCache.delete(key);
23
+ for (const [key, entry] of pdfDataCache.entries()) {
24
+ if (now - entry.cachedAt > PDF_DATA_TTL) {
25
+ pdfDataCache.delete(key);
37
26
  }
38
27
  }
39
- }, CLEANUP_INTERVAL);
28
+ }, 10 * 60 * 1000);
40
29
  cleanupTimer.unref();
41
30
 
42
31
  GeminiChat.init = function (apiKey) {
@@ -58,105 +47,31 @@ GeminiChat.isAvailable = function () {
58
47
  return !!ai;
59
48
  };
60
49
 
61
- async function uploadFile(filename) {
50
+ // Read PDF and cache base64 in memory
51
+ async function getPdfBase64(filename) {
52
+ const cached = pdfDataCache.get(filename);
53
+ if (cached && Date.now() - cached.cachedAt < PDF_DATA_TTL) {
54
+ return cached.base64;
55
+ }
56
+
62
57
  const filePath = pdfHandler.resolveFilePath(filename);
63
58
  if (!filePath || !fs.existsSync(filePath)) {
64
59
  throw new Error('File not found');
65
60
  }
66
61
 
67
62
  const fileBuffer = await fs.promises.readFile(filePath);
68
- const uploadResult = await ai.files.upload({
69
- file: new Blob([fileBuffer], { type: 'application/pdf' }),
70
- config: { displayName: filename },
71
- });
72
-
73
- // Wait for file processing
74
- let file = uploadResult;
75
- while (file.state === 'PROCESSING') {
76
- await new Promise(r => setTimeout(r, 2000));
77
- file = await ai.files.get({ name: file.name });
78
- }
63
+ const base64 = fileBuffer.toString('base64');
79
64
 
80
- if (file.state === 'FAILED') {
81
- throw new Error('File processing failed');
82
- }
83
-
84
- return file;
65
+ pdfDataCache.set(filename, { base64, cachedAt: Date.now() });
66
+ return base64;
85
67
  }
86
68
 
87
- async function getPageCount(filename) {
88
- try {
89
- return await pdfHandler.getTotalPages(filename);
90
- } catch {
91
- return 0;
92
- }
93
- }
94
-
95
- GeminiChat.ensureCache = async function (filename) {
96
- // Check existing context cache
97
- const cached = contextCache.get(filename);
98
- if (cached && Date.now() < cached.expiresAt) {
99
- return { cacheName: cached.cacheName, useCache: true };
100
- }
101
-
102
- // Check if PDF is small enough to skip caching
103
- const pageCount = await getPageCount(filename);
104
- if (pageCount > 0 && pageCount < SMALL_PDF_THRESHOLD) {
105
- // Small PDF: use inline approach
106
- return { filename, useCache: false };
107
- }
108
-
109
- // Deduplicate concurrent upload requests for the same file
110
- if (uploadPromises.has(filename)) {
111
- return uploadPromises.get(filename);
112
- }
113
-
114
- const promise = (async () => {
115
- try {
116
- // Ensure file is uploaded
117
- let fileEntry = fileCache.get(filename);
118
- if (!fileEntry || Date.now() - fileEntry.uploadedAt > FILE_TTL) {
119
- const uploaded = await uploadFile(filename);
120
- fileEntry = { fileUri: uploaded.uri, uploadedAt: Date.now() };
121
- fileCache.set(filename, fileEntry);
122
- console.log('[PDF-Secure] File uploaded to Gemini:', filename);
123
- }
124
-
125
- // Create context cache
126
- const cache = await ai.caches.create({
127
- model: MODEL_NAME,
128
- config: {
129
- contents: [{
130
- role: 'user',
131
- parts: [{ fileData: { fileUri: fileEntry.fileUri, mimeType: 'application/pdf' } }],
132
- }],
133
- systemInstruction: SYSTEM_INSTRUCTION,
134
- ttl: `${CONTEXT_TTL / 1000}s`,
135
- },
136
- });
137
-
138
- const cacheEntry = {
139
- cacheName: cache.name,
140
- expiresAt: Date.now() + CONTEXT_TTL,
141
- };
142
- contextCache.set(filename, cacheEntry);
143
- console.log('[PDF-Secure] Context cache created for:', filename);
144
- return { cacheName: cache.name, useCache: true };
145
- } finally {
146
- uploadPromises.delete(filename);
147
- }
148
- })();
149
-
150
- uploadPromises.set(filename, promise);
151
- return promise;
152
- };
153
-
154
69
  GeminiChat.chat = async function (filename, question, history) {
155
70
  if (!ai) {
156
71
  throw new Error('AI chat is not configured');
157
72
  }
158
73
 
159
- const cacheInfo = await GeminiChat.ensureCache(filename);
74
+ const base64Data = await getPdfBase64(filename);
160
75
 
161
76
  // Build conversation contents from history
162
77
  const contents = [];
@@ -177,50 +92,29 @@ GeminiChat.chat = async function (filename, question, history) {
177
92
  parts: [{ text: question }],
178
93
  });
179
94
 
180
- let response;
181
-
182
- if (cacheInfo.useCache) {
183
- // Use cached context
184
- response = await ai.models.generateContent({
185
- model: MODEL_NAME,
186
- contents,
187
- config: {
188
- cachedContent: cacheInfo.cacheName,
189
- },
190
- });
191
- } else {
192
- // Inline PDF for small files
193
- const filePath = pdfHandler.resolveFilePath(filename);
194
- if (!filePath || !fs.existsSync(filePath)) {
195
- throw new Error('File not found');
196
- }
197
- const fileBuffer = await fs.promises.readFile(filePath);
198
- const base64Data = fileBuffer.toString('base64');
199
-
200
- // Prepend PDF as first message
201
- const inlineContents = [
202
- {
203
- role: 'user',
204
- parts: [
205
- { inlineData: { mimeType: 'application/pdf', data: base64Data } },
206
- { text: 'I am sharing a PDF document with you. Please use it to answer my questions.' },
207
- ],
208
- },
209
- {
210
- role: 'model',
211
- parts: [{ text: 'I have received the PDF document. I am ready to answer your questions about it.' }],
212
- },
213
- ...contents,
214
- ];
215
-
216
- response = await ai.models.generateContent({
217
- model: MODEL_NAME,
218
- contents: inlineContents,
219
- config: {
220
- systemInstruction: SYSTEM_INSTRUCTION,
221
- },
222
- });
223
- }
95
+ // Always use inline PDF — single API call, no upload/cache overhead
96
+ const inlineContents = [
97
+ {
98
+ role: 'user',
99
+ parts: [
100
+ { inlineData: { mimeType: 'application/pdf', data: base64Data } },
101
+ { text: 'I am sharing a PDF document with you. Please use it to answer my questions.' },
102
+ ],
103
+ },
104
+ {
105
+ role: 'model',
106
+ parts: [{ text: 'I have received the PDF document. I am ready to answer your questions about it.' }],
107
+ },
108
+ ...contents,
109
+ ];
110
+
111
+ const response = await ai.models.generateContent({
112
+ model: MODEL_NAME,
113
+ contents: inlineContents,
114
+ config: {
115
+ systemInstruction: SYSTEM_INSTRUCTION,
116
+ },
117
+ });
224
118
 
225
119
  const text = response?.candidates?.[0]?.content?.parts?.[0]?.text;
226
120
  if (!text) {
package/library.js CHANGED
@@ -132,7 +132,7 @@ plugin.init = async (params) => {
132
132
  'Expires': '0',
133
133
  'Referrer-Policy': 'no-referrer',
134
134
  'Permissions-Policy': 'accelerometer=(), camera=(), geolocation=(), gyroscope=(), magnetometer=(), microphone=(), payment=(), usb=()',
135
- 'Content-Security-Policy': "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdnjs.cloudflare.com; style-src 'self' 'unsafe-inline' https://cdnjs.cloudflare.com; img-src 'self' data: blob: https://i.ibb.co; connect-src 'self'; frame-ancestors 'self'",
135
+ 'Content-Security-Policy': "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdnjs.cloudflare.com; worker-src 'self' blob:; style-src 'self' 'unsafe-inline' https://cdnjs.cloudflare.com; img-src 'self' data: blob: https://cdnjs.cloudflare.com https://i.ibb.co; connect-src 'self'; frame-ancestors 'self'",
136
136
  });
137
137
 
138
138
  // Inject the filename, nonce, and key into the cached viewer
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nodebb-plugin-pdf-secure2",
3
- "version": "1.3.0",
3
+ "version": "1.3.1",
4
4
  "description": "Secure PDF viewer plugin for NodeBB - prevents downloading, enables canvas-only rendering with Premium group support",
5
5
  "main": "library.js",
6
6
  "repository": {
Binary file