@realtimex/email-automator 2.2.1 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/server.ts +4 -2
- package/api/src/config/index.ts +3 -3
- package/bin/email-automator.js +4 -24
- package/dist/api/server.js +109 -0
- package/dist/api/src/config/index.js +88 -0
- package/dist/api/src/middleware/auth.js +119 -0
- package/dist/api/src/middleware/errorHandler.js +78 -0
- package/dist/api/src/middleware/index.js +4 -0
- package/dist/api/src/middleware/rateLimit.js +57 -0
- package/dist/api/src/middleware/validation.js +111 -0
- package/dist/api/src/routes/actions.js +173 -0
- package/dist/api/src/routes/auth.js +106 -0
- package/dist/api/src/routes/emails.js +100 -0
- package/dist/api/src/routes/health.js +33 -0
- package/dist/api/src/routes/index.js +19 -0
- package/dist/api/src/routes/migrate.js +61 -0
- package/dist/api/src/routes/rules.js +104 -0
- package/dist/api/src/routes/settings.js +178 -0
- package/dist/api/src/routes/sync.js +118 -0
- package/dist/api/src/services/eventLogger.js +41 -0
- package/dist/api/src/services/gmail.js +350 -0
- package/dist/api/src/services/intelligence.js +243 -0
- package/dist/api/src/services/microsoft.js +256 -0
- package/dist/api/src/services/processor.js +503 -0
- package/dist/api/src/services/scheduler.js +210 -0
- package/dist/api/src/services/supabase.js +59 -0
- package/dist/api/src/utils/contentCleaner.js +94 -0
- package/dist/api/src/utils/crypto.js +68 -0
- package/dist/api/src/utils/logger.js +119 -0
- package/package.json +5 -4
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import { config } from '../config/index.js';
|
|
2
|
+
import { createLogger } from '../utils/logger.js';
|
|
3
|
+
import { EmailProcessorService } from './processor.js';
|
|
4
|
+
import { getServerSupabase } from './supabase.js';
|
|
5
|
+
const logger = createLogger('Scheduler');
|
|
6
|
+
class SyncScheduler {
|
|
7
|
+
jobs = new Map();
|
|
8
|
+
supabase = null;
|
|
9
|
+
constructor() {
|
|
10
|
+
this.supabase = getServerSupabase();
|
|
11
|
+
}
|
|
12
|
+
async start() {
|
|
13
|
+
if (!this.supabase) {
|
|
14
|
+
logger.warn('Supabase not configured, scheduler disabled');
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
logger.info('Starting sync scheduler');
|
|
18
|
+
// Schedule periodic sync for all active accounts
|
|
19
|
+
this.scheduleGlobalSync();
|
|
20
|
+
// Schedule cleanup job
|
|
21
|
+
this.scheduleCleanup();
|
|
22
|
+
}
|
|
23
|
+
stop() {
|
|
24
|
+
logger.info('Stopping sync scheduler');
|
|
25
|
+
for (const job of this.jobs.values()) {
|
|
26
|
+
if (job.timer) {
|
|
27
|
+
clearInterval(job.timer);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
this.jobs.clear();
|
|
31
|
+
}
|
|
32
|
+
scheduleGlobalSync() {
|
|
33
|
+
const jobId = 'global-sync';
|
|
34
|
+
const interval = config.processing.syncIntervalMs;
|
|
35
|
+
const job = {
|
|
36
|
+
id: jobId,
|
|
37
|
+
name: 'Global Email Sync',
|
|
38
|
+
interval,
|
|
39
|
+
lastRun: null,
|
|
40
|
+
isRunning: false,
|
|
41
|
+
timer: null,
|
|
42
|
+
};
|
|
43
|
+
job.timer = setInterval(async () => {
|
|
44
|
+
if (job.isRunning) {
|
|
45
|
+
logger.debug('Global sync already running, skipping');
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
job.isRunning = true;
|
|
49
|
+
try {
|
|
50
|
+
await this.runGlobalSync();
|
|
51
|
+
job.lastRun = new Date();
|
|
52
|
+
}
|
|
53
|
+
catch (error) {
|
|
54
|
+
logger.error('Global sync failed', error);
|
|
55
|
+
}
|
|
56
|
+
finally {
|
|
57
|
+
job.isRunning = false;
|
|
58
|
+
}
|
|
59
|
+
}, interval);
|
|
60
|
+
this.jobs.set(jobId, job);
|
|
61
|
+
logger.info(`Scheduled global sync every ${interval / 1000}s`);
|
|
62
|
+
}
|
|
63
|
+
async runGlobalSync() {
|
|
64
|
+
if (!this.supabase)
|
|
65
|
+
return;
|
|
66
|
+
// Get all active accounts with their user settings
|
|
67
|
+
const { data: accounts, error } = await this.supabase
|
|
68
|
+
.from('email_accounts')
|
|
69
|
+
.select(`
|
|
70
|
+
id,
|
|
71
|
+
user_id,
|
|
72
|
+
provider,
|
|
73
|
+
is_active
|
|
74
|
+
`)
|
|
75
|
+
.eq('is_active', true);
|
|
76
|
+
if (error) {
|
|
77
|
+
logger.error('Failed to fetch accounts for sync', error);
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
if (!accounts || accounts.length === 0) {
|
|
81
|
+
logger.debug('No active accounts to sync');
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
logger.info(`Running global sync for ${accounts.length} accounts`);
|
|
85
|
+
// Group by user to check their sync interval settings
|
|
86
|
+
const userAccounts = new Map();
|
|
87
|
+
for (const account of accounts) {
|
|
88
|
+
const existing = userAccounts.get(account.user_id) || [];
|
|
89
|
+
existing.push(account);
|
|
90
|
+
userAccounts.set(account.user_id, existing);
|
|
91
|
+
}
|
|
92
|
+
// Process each user's accounts
|
|
93
|
+
for (const [userId, userAccountList] of userAccounts) {
|
|
94
|
+
// Check user's sync interval preference
|
|
95
|
+
const { data: settings } = await this.supabase
|
|
96
|
+
.from('user_settings')
|
|
97
|
+
.select('sync_interval_minutes')
|
|
98
|
+
.eq('user_id', userId)
|
|
99
|
+
.single();
|
|
100
|
+
const syncIntervalMs = (settings?.sync_interval_minutes || 5) * 60 * 1000;
|
|
101
|
+
// Check last sync time
|
|
102
|
+
const { data: lastLog } = await this.supabase
|
|
103
|
+
.from('processing_logs')
|
|
104
|
+
.select('started_at')
|
|
105
|
+
.eq('user_id', userId)
|
|
106
|
+
.eq('status', 'success')
|
|
107
|
+
.order('started_at', { ascending: false })
|
|
108
|
+
.limit(1)
|
|
109
|
+
.single();
|
|
110
|
+
if (lastLog) {
|
|
111
|
+
const lastSyncTime = new Date(lastLog.started_at).getTime();
|
|
112
|
+
const now = Date.now();
|
|
113
|
+
if (now - lastSyncTime < syncIntervalMs) {
|
|
114
|
+
logger.debug(`Skipping sync for user ${userId}, last sync was recent`);
|
|
115
|
+
continue;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
// Sync each account
|
|
119
|
+
const processor = new EmailProcessorService(this.supabase);
|
|
120
|
+
for (const account of userAccountList) {
|
|
121
|
+
try {
|
|
122
|
+
await processor.syncAccount(account.id, userId);
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
logger.error('Account sync failed', error, { accountId: account.id });
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
scheduleCleanup() {
|
|
131
|
+
const jobId = 'cleanup';
|
|
132
|
+
const interval = 24 * 60 * 60 * 1000; // Daily
|
|
133
|
+
const job = {
|
|
134
|
+
id: jobId,
|
|
135
|
+
name: 'Data Cleanup',
|
|
136
|
+
interval,
|
|
137
|
+
lastRun: null,
|
|
138
|
+
isRunning: false,
|
|
139
|
+
timer: null,
|
|
140
|
+
};
|
|
141
|
+
job.timer = setInterval(async () => {
|
|
142
|
+
if (job.isRunning)
|
|
143
|
+
return;
|
|
144
|
+
job.isRunning = true;
|
|
145
|
+
try {
|
|
146
|
+
await this.runCleanup();
|
|
147
|
+
job.lastRun = new Date();
|
|
148
|
+
}
|
|
149
|
+
catch (error) {
|
|
150
|
+
logger.error('Cleanup failed', error);
|
|
151
|
+
}
|
|
152
|
+
finally {
|
|
153
|
+
job.isRunning = false;
|
|
154
|
+
}
|
|
155
|
+
}, interval);
|
|
156
|
+
this.jobs.set(jobId, job);
|
|
157
|
+
logger.info('Scheduled daily cleanup');
|
|
158
|
+
}
|
|
159
|
+
async runCleanup() {
|
|
160
|
+
if (!this.supabase)
|
|
161
|
+
return;
|
|
162
|
+
logger.info('Running cleanup job');
|
|
163
|
+
// Delete old processing logs (older than 30 days)
|
|
164
|
+
const thirtyDaysAgo = new Date();
|
|
165
|
+
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
|
166
|
+
const { error: logsError } = await this.supabase
|
|
167
|
+
.from('processing_logs')
|
|
168
|
+
.delete()
|
|
169
|
+
.lt('started_at', thirtyDaysAgo.toISOString());
|
|
170
|
+
if (logsError) {
|
|
171
|
+
logger.error('Failed to cleanup old logs', logsError);
|
|
172
|
+
}
|
|
173
|
+
// Delete emails that were trashed more than 7 days ago
|
|
174
|
+
const sevenDaysAgo = new Date();
|
|
175
|
+
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
|
|
176
|
+
const { error: emailsError } = await this.supabase
|
|
177
|
+
.from('emails')
|
|
178
|
+
.delete()
|
|
179
|
+
.eq('action_taken', 'delete')
|
|
180
|
+
.lt('created_at', sevenDaysAgo.toISOString());
|
|
181
|
+
if (emailsError) {
|
|
182
|
+
logger.error('Failed to cleanup old emails', emailsError);
|
|
183
|
+
}
|
|
184
|
+
logger.info('Cleanup completed');
|
|
185
|
+
}
|
|
186
|
+
getJobStatus() {
|
|
187
|
+
return Array.from(this.jobs.values()).map(job => ({
|
|
188
|
+
id: job.id,
|
|
189
|
+
name: job.name,
|
|
190
|
+
lastRun: job.lastRun,
|
|
191
|
+
isRunning: job.isRunning,
|
|
192
|
+
}));
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Singleton
|
|
196
|
+
let schedulerInstance = null;
|
|
197
|
+
export function getScheduler() {
|
|
198
|
+
if (!schedulerInstance) {
|
|
199
|
+
schedulerInstance = new SyncScheduler();
|
|
200
|
+
}
|
|
201
|
+
return schedulerInstance;
|
|
202
|
+
}
|
|
203
|
+
export function startScheduler() {
|
|
204
|
+
getScheduler().start();
|
|
205
|
+
}
|
|
206
|
+
export function stopScheduler() {
|
|
207
|
+
if (schedulerInstance) {
|
|
208
|
+
schedulerInstance.stop();
|
|
209
|
+
}
|
|
210
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { createClient } from '@supabase/supabase-js';
|
|
2
|
+
import { config } from '../config/index.js';
|
|
3
|
+
import { createLogger } from '../utils/logger.js';
|
|
4
|
+
const logger = createLogger('SupabaseService');
|
|
5
|
+
let serverClient = null;
|
|
6
|
+
export function isValidUrl(url) {
|
|
7
|
+
try {
|
|
8
|
+
return url.startsWith('http://') || url.startsWith('https://');
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
export function getServerSupabase() {
|
|
15
|
+
if (serverClient)
|
|
16
|
+
return serverClient;
|
|
17
|
+
const url = config.supabase.url;
|
|
18
|
+
const key = config.supabase.anonKey;
|
|
19
|
+
if (!url || !key || !isValidUrl(url)) {
|
|
20
|
+
logger.warn('Supabase not configured or invalid URL - skipping client initialization', {
|
|
21
|
+
url: url || 'missing'
|
|
22
|
+
});
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
try {
|
|
26
|
+
serverClient = createClient(url, key, {
|
|
27
|
+
auth: {
|
|
28
|
+
autoRefreshToken: false,
|
|
29
|
+
persistSession: false,
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
logger.info('Server Supabase client initialized');
|
|
33
|
+
return serverClient;
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
logger.error('Failed to initialize Supabase client', error);
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
export function getServiceRoleSupabase() {
|
|
41
|
+
const url = config.supabase.url;
|
|
42
|
+
const key = config.supabase.serviceRoleKey;
|
|
43
|
+
if (!url || !key || !isValidUrl(url)) {
|
|
44
|
+
logger.warn('Service role Supabase not configured or invalid URL');
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
try {
|
|
48
|
+
return createClient(url, key, {
|
|
49
|
+
auth: {
|
|
50
|
+
autoRefreshToken: false,
|
|
51
|
+
persistSession: false,
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
logger.error('Failed to initialize Service Role Supabase client', error);
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
export class ContentCleaner {
|
|
2
|
+
/**
|
|
3
|
+
* Cleans email body by removing noise, quoted replies, and footers.
|
|
4
|
+
* Ported from Python ContentCleaner.
|
|
5
|
+
*/
|
|
6
|
+
static cleanEmailBody(text) {
|
|
7
|
+
if (!text)
|
|
8
|
+
return "";
|
|
9
|
+
// 0. Lightweight HTML -> Markdown Conversion
|
|
10
|
+
// Structure: <br>, <p> -> Newlines
|
|
11
|
+
text = text.replace(/<br\s*\/?\?>/gi, '\n');
|
|
12
|
+
text = text.replace(/<\/p>/gi, '\n\n');
|
|
13
|
+
text = text.replace(/<p.*?>/gi, ''); // Open p tags just gone
|
|
14
|
+
// Structure: Headers <h1>-<h6> -> # Title
|
|
15
|
+
text = text.replace(/<h[1-6].*?>(.*?)<\/h[1-6]>/gsi, (match, p1) => `\n# ${p1}\n`);
|
|
16
|
+
// Structure: Lists <li> -> - Item
|
|
17
|
+
text = text.replace(/<li.*?>(.*?)<\/li>/gsi, (match, p1) => `\n- ${p1}`);
|
|
18
|
+
text = text.replace(/<ul.*?>/gi, '');
|
|
19
|
+
text = text.replace(/<\/ul>/gi, '\n');
|
|
20
|
+
// Links: <a href=\"...\">text</a> -> [text](href)
|
|
21
|
+
text = text.replace(/<a\s+(?:[^>]*?\s+)?href=\"([^\"]*)\"[^>]*>(.*?)<\/a>/gsi, (match, href, content) => `[${content}](${href})`);
|
|
22
|
+
// Images: <img src=\"...\" alt=\"...\"> -> 
|
|
23
|
+
text = text.replace(/<img\s+(?:[^>]*?\s+)?src=\"([^\"]*)\"(?:[^>]*?\s+)?alt=\"([^\"]*)\"[^>]*>/gsi, (match, src, alt) => ``);
|
|
24
|
+
// Style/Script removal (strictly remove content)
|
|
25
|
+
text = text.replace(/<script.*?>.*?<\/script>/gsi, '');
|
|
26
|
+
text = text.replace(/<style.*?>.*?<\/style>/gsi, '');
|
|
27
|
+
// Final Strip of remaining tags
|
|
28
|
+
text = text.replace(/<[^>]+>/g, ' ');
|
|
29
|
+
// Entity decoding (Basic)
|
|
30
|
+
text = text.replace(/ /gi, ' ');
|
|
31
|
+
text = text.replace(/&/gi, '&');
|
|
32
|
+
text = text.replace(/</gi, '<');
|
|
33
|
+
text = text.replace(/>/gi, '>');
|
|
34
|
+
text = text.replace(/"/gi, '"');
|
|
35
|
+
text = text.replace(/'/gi, "'");
|
|
36
|
+
const lines = text.split('\n');
|
|
37
|
+
const cleanedLines = [];
|
|
38
|
+
// Heuristics for reply headers
|
|
39
|
+
const replyHeaderPatterns = [
|
|
40
|
+
/^On .* wrote:$/i,
|
|
41
|
+
/^From: .*$/i,
|
|
42
|
+
/^Sent: .*$/i,
|
|
43
|
+
/^To: .*$/i,
|
|
44
|
+
/^Subject: .*$/i
|
|
45
|
+
];
|
|
46
|
+
// Heuristics for footers
|
|
47
|
+
const footerPatterns = [
|
|
48
|
+
/unsubscribe/i,
|
|
49
|
+
/privacy policy/i,
|
|
50
|
+
/terms of service/i,
|
|
51
|
+
/view in browser/i,
|
|
52
|
+
/copyright \d{4}/i
|
|
53
|
+
];
|
|
54
|
+
for (let line of lines) {
|
|
55
|
+
let lineStripped = line.trim();
|
|
56
|
+
// 2. Quoted text removal (lines starting with >)
|
|
57
|
+
if (lineStripped.startsWith('>')) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
// 3. Check for specific reply separators
|
|
61
|
+
// If we hit a reply header, we truncate the rest (Aggressive strategy per Python code)
|
|
62
|
+
if (/^On .* wrote:$/i.test(lineStripped)) {
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
// 4. Footer removal (simple check on short lines)
|
|
66
|
+
if (lineStripped.length < 100) {
|
|
67
|
+
let isFooter = false;
|
|
68
|
+
for (const pattern of footerPatterns) {
|
|
69
|
+
if (pattern.test(lineStripped)) {
|
|
70
|
+
isFooter = true;
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
if (isFooter) {
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
cleanedLines.push(line);
|
|
79
|
+
}
|
|
80
|
+
// Reassemble
|
|
81
|
+
text = cleanedLines.join('\n');
|
|
82
|
+
// Collapse multiple newlines
|
|
83
|
+
text = text.replace(/\n{3,}/g, '\n\n');
|
|
84
|
+
// Sanitize LLM Special Tokens (Prevent Prompt Injection/Confusion)
|
|
85
|
+
// Break sequences like <|channel|>, [INST], <s>
|
|
86
|
+
text = text.replace(/<\|/g, '< |');
|
|
87
|
+
text = text.replace(/\|>/g, '| >');
|
|
88
|
+
text = text.replace(/\[INST\]/gi, '[ INST ]');
|
|
89
|
+
text = text.replace(/\[\/INST\]/gi, '[ /INST ]');
|
|
90
|
+
text = text.replace(/<s>/gi, '<s>');
|
|
91
|
+
text = text.replace(/<\/s>/gi, '</s>');
|
|
92
|
+
return text.trim();
|
|
93
|
+
}
|
|
94
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { createCipheriv, createDecipheriv, randomBytes, scryptSync } from 'crypto';
|
|
2
|
+
import { config } from '../config/index.js';
|
|
3
|
+
// Edge Functions compatible encryption (matching supabase/functions/_shared/encryption.ts)
|
|
4
|
+
const ALGORITHM = 'aes-256-gcm';
|
|
5
|
+
const IV_LENGTH = 12; // Match Edge Functions
|
|
6
|
+
const KEY_LENGTH = 32;
|
|
7
|
+
function getKey() {
|
|
8
|
+
const secret = config.security.encryptionKey || 'dev-key-not-secure';
|
|
9
|
+
// Match Edge Functions key derivation: pad/slice to 32 chars
|
|
10
|
+
return Buffer.from(secret.padEnd(32, '0').slice(0, 32), 'utf8');
|
|
11
|
+
}
|
|
12
|
+
export function encryptToken(plaintext) {
|
|
13
|
+
if (!plaintext)
|
|
14
|
+
return '';
|
|
15
|
+
const iv = randomBytes(IV_LENGTH);
|
|
16
|
+
const key = getKey();
|
|
17
|
+
const cipher = createCipheriv(ALGORITHM, key, iv);
|
|
18
|
+
const encrypted = Buffer.concat([cipher.update(plaintext, 'utf8'), cipher.final()]);
|
|
19
|
+
const tag = cipher.getAuthTag();
|
|
20
|
+
// Format: base64(iv + ciphertext + tag) - compatible with Edge Functions
|
|
21
|
+
const combined = Buffer.concat([iv, encrypted, tag]);
|
|
22
|
+
return combined.toString('base64');
|
|
23
|
+
}
|
|
24
|
+
export function decryptToken(encrypted) {
|
|
25
|
+
if (!encrypted)
|
|
26
|
+
return '';
|
|
27
|
+
try {
|
|
28
|
+
// Try Edge Functions format first: base64(iv + ciphertext + tag)
|
|
29
|
+
const combined = Buffer.from(encrypted, 'base64');
|
|
30
|
+
if (combined.length < IV_LENGTH + 16) {
|
|
31
|
+
// Too short, might be plaintext
|
|
32
|
+
return encrypted;
|
|
33
|
+
}
|
|
34
|
+
const iv = combined.subarray(0, IV_LENGTH);
|
|
35
|
+
const tag = combined.subarray(combined.length - 16);
|
|
36
|
+
const data = combined.subarray(IV_LENGTH, combined.length - 16);
|
|
37
|
+
const key = getKey();
|
|
38
|
+
const decipher = createDecipheriv(ALGORITHM, key, iv);
|
|
39
|
+
decipher.setAuthTag(tag);
|
|
40
|
+
return Buffer.concat([decipher.update(data), decipher.final()]).toString('utf8');
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
// Try legacy format: salt:iv:tag:encrypted
|
|
44
|
+
try {
|
|
45
|
+
const parts = encrypted.split(':');
|
|
46
|
+
if (parts.length === 4) {
|
|
47
|
+
const [saltB64, ivB64, tagB64, dataB64] = parts;
|
|
48
|
+
const salt = Buffer.from(saltB64, 'base64');
|
|
49
|
+
const iv = Buffer.from(ivB64, 'base64');
|
|
50
|
+
const tag = Buffer.from(tagB64, 'base64');
|
|
51
|
+
const data = Buffer.from(dataB64, 'base64');
|
|
52
|
+
const secret = config.security.encryptionKey || 'dev-key-not-secure';
|
|
53
|
+
const key = scryptSync(secret, salt, KEY_LENGTH);
|
|
54
|
+
const decipher = createDecipheriv(ALGORITHM, key, iv);
|
|
55
|
+
decipher.setAuthTag(tag);
|
|
56
|
+
return Buffer.concat([decipher.update(data), decipher.final()]).toString('utf8');
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
// Fall through to plaintext
|
|
61
|
+
}
|
|
62
|
+
// If all decryption fails, assume plaintext (for migration)
|
|
63
|
+
return encrypted;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
export function generateSecureToken(length = 32) {
|
|
67
|
+
return randomBytes(length).toString('hex');
|
|
68
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { config } from '../config/index.js';
|
|
2
|
+
const LOG_COLORS = {
|
|
3
|
+
debug: '\x1b[36m', // cyan
|
|
4
|
+
info: '\x1b[32m', // green
|
|
5
|
+
warn: '\x1b[33m', // yellow
|
|
6
|
+
error: '\x1b[31m', // red
|
|
7
|
+
reset: '\x1b[0m',
|
|
8
|
+
};
|
|
9
|
+
const LOG_LEVELS = {
|
|
10
|
+
debug: 0,
|
|
11
|
+
info: 1,
|
|
12
|
+
warn: 2,
|
|
13
|
+
error: 3,
|
|
14
|
+
};
|
|
15
|
+
export class Logger {
|
|
16
|
+
minLevel;
|
|
17
|
+
context;
|
|
18
|
+
static supabaseClient = null;
|
|
19
|
+
static currentUserId = null;
|
|
20
|
+
constructor(context) {
|
|
21
|
+
this.minLevel = config.isProduction ? 'info' : 'debug';
|
|
22
|
+
this.context = context;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Set the Supabase client and current user ID for DB persistence.
|
|
26
|
+
* This is called by the auth middleware or server initialization.
|
|
27
|
+
*/
|
|
28
|
+
static setPersistence(client, userId = null) {
|
|
29
|
+
Logger.supabaseClient = client;
|
|
30
|
+
Logger.currentUserId = userId;
|
|
31
|
+
}
|
|
32
|
+
shouldLog(level) {
|
|
33
|
+
return LOG_LEVELS[level] >= LOG_LEVELS[this.minLevel];
|
|
34
|
+
}
|
|
35
|
+
async saveToSupabase(level, message, meta) {
|
|
36
|
+
// Only persist warn and error levels to DB to prevent bloat
|
|
37
|
+
// unless explicitly forced via meta
|
|
38
|
+
const persistLevels = ['warn', 'error'];
|
|
39
|
+
const shouldPersist = persistLevels.includes(level) || meta?._persist === true;
|
|
40
|
+
if (shouldPersist && Logger.supabaseClient) {
|
|
41
|
+
try {
|
|
42
|
+
// Remove internal flags from meta before saving
|
|
43
|
+
const { _persist, ...cleanMeta } = meta || {};
|
|
44
|
+
await Logger.supabaseClient.from('system_logs').insert({
|
|
45
|
+
user_id: Logger.currentUserId,
|
|
46
|
+
level,
|
|
47
|
+
source: this.context || 'System',
|
|
48
|
+
message,
|
|
49
|
+
metadata: cleanMeta,
|
|
50
|
+
created_at: new Date().toISOString()
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
catch (err) {
|
|
54
|
+
// Fail silently to avoid infinite loops if logging fails
|
|
55
|
+
console.error('[Logger] Failed to persist log to Supabase:', err);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
formatMessage(level, message, meta) {
|
|
60
|
+
const timestamp = new Date().toISOString();
|
|
61
|
+
const contextStr = this.context ? `[${this.context}]` : '';
|
|
62
|
+
const { _persist, ...cleanMeta } = meta || {};
|
|
63
|
+
const metaStr = Object.keys(cleanMeta).length > 0 ? ` ${JSON.stringify(cleanMeta)}` : '';
|
|
64
|
+
if (config.isProduction) {
|
|
65
|
+
return JSON.stringify({
|
|
66
|
+
timestamp,
|
|
67
|
+
level,
|
|
68
|
+
context: this.context,
|
|
69
|
+
message,
|
|
70
|
+
...cleanMeta,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
const color = LOG_COLORS[level];
|
|
74
|
+
const reset = LOG_COLORS.reset;
|
|
75
|
+
return `${timestamp} ${color}${level.toUpperCase().padEnd(5)}${reset} ${contextStr} ${message}${metaStr}`;
|
|
76
|
+
}
|
|
77
|
+
debug(message, meta) {
|
|
78
|
+
if (this.shouldLog('debug')) {
|
|
79
|
+
console.debug(this.formatMessage('debug', message, meta));
|
|
80
|
+
this.saveToSupabase('debug', message, meta);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
info(message, meta) {
|
|
84
|
+
if (this.shouldLog('info')) {
|
|
85
|
+
console.info(this.formatMessage('info', message, meta));
|
|
86
|
+
this.saveToSupabase('info', message, meta);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
warn(message, meta) {
|
|
90
|
+
if (this.shouldLog('warn')) {
|
|
91
|
+
console.warn(this.formatMessage('warn', message, meta));
|
|
92
|
+
this.saveToSupabase('warn', message, meta);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
error(message, error, meta) {
|
|
96
|
+
if (this.shouldLog('error')) {
|
|
97
|
+
const errorMeta = { ...meta };
|
|
98
|
+
if (error instanceof Error) {
|
|
99
|
+
errorMeta.errorName = error.name;
|
|
100
|
+
errorMeta.errorMessage = error.message;
|
|
101
|
+
errorMeta.stack = error.stack;
|
|
102
|
+
}
|
|
103
|
+
else if (error) {
|
|
104
|
+
errorMeta.error = error;
|
|
105
|
+
}
|
|
106
|
+
console.error(this.formatMessage('error', message, errorMeta));
|
|
107
|
+
this.saveToSupabase('error', message, errorMeta);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
child(context) {
|
|
111
|
+
return new Logger(this.context ? `${this.context}:${context}` : context);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
// Default logger instance
|
|
115
|
+
export const logger = new Logger();
|
|
116
|
+
// Factory for creating contextual loggers
|
|
117
|
+
export function createLogger(context) {
|
|
118
|
+
return new Logger(context);
|
|
119
|
+
}
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@realtimex/email-automator",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.3.0",
|
|
4
4
|
"type": "module",
|
|
5
|
-
"main": "api/server.
|
|
5
|
+
"main": "dist/api/server.js",
|
|
6
6
|
"bin": {
|
|
7
7
|
"email-automator": "./bin/email-automator.js",
|
|
8
8
|
"email-automator-setup": "./bin/email-automator-setup.js",
|
|
@@ -30,10 +30,11 @@
|
|
|
30
30
|
"scripts": {
|
|
31
31
|
"dev": "vite",
|
|
32
32
|
"dev:api": "tsx watch api/server.ts",
|
|
33
|
-
"build": "
|
|
33
|
+
"build": "npm run build:ui && npm run build:api",
|
|
34
|
+
"build:ui": "vite build",
|
|
34
35
|
"build:api": "tsc -p tsconfig.api.json",
|
|
35
36
|
"preview": "vite preview",
|
|
36
|
-
"serve": "
|
|
37
|
+
"serve": "node dist/api/server.js",
|
|
37
38
|
"start": "node dist/api/server.js",
|
|
38
39
|
"test": "vitest",
|
|
39
40
|
"test:run": "vitest run",
|