teleton 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ import {
2
+ telegramGetMyGiftsExecutor,
3
+ telegramGetMyGiftsTool
4
+ } from "./chunk-XBGUNXF2.js";
5
+ export {
6
+ telegramGetMyGiftsExecutor,
7
+ telegramGetMyGiftsTool
8
+ };
package/dist/index.js ADDED
@@ -0,0 +1,12 @@
1
+ import {
2
+ TonnetApp,
3
+ main
4
+ } from "./chunk-WXVHT6CI.js";
5
+ import "./chunk-XBGUNXF2.js";
6
+ import "./chunk-WDUHRPGA.js";
7
+ import "./chunk-UR2LQEKR.js";
8
+ import "./chunk-7NJ46ZIX.js";
9
+ export {
10
+ TonnetApp,
11
+ main
12
+ };
@@ -0,0 +1,60 @@
1
+ import {
2
+ AnthropicEmbeddingProvider,
3
+ CURRENT_SCHEMA_VERSION,
4
+ ChatStore,
5
+ ContextBuilder,
6
+ HybridSearch,
7
+ KnowledgeIndexer,
8
+ LocalEmbeddingProvider,
9
+ MemoryDatabase,
10
+ MessageStore,
11
+ NoopEmbeddingProvider,
12
+ SessionStore,
13
+ UserStore,
14
+ closeDatabase,
15
+ createEmbeddingProvider,
16
+ deserializeEmbedding,
17
+ embeddingToBlob,
18
+ ensureSchema,
19
+ ensureVectorTables,
20
+ getDatabase,
21
+ getSchemaVersion,
22
+ hashText,
23
+ initializeMemory,
24
+ runMigrations,
25
+ serializeEmbedding,
26
+ setSchemaVersion
27
+ } from "./chunk-WDUHRPGA.js";
28
+ import {
29
+ TaskStore,
30
+ getTaskStore
31
+ } from "./chunk-UR2LQEKR.js";
32
+ export {
33
+ AnthropicEmbeddingProvider,
34
+ CURRENT_SCHEMA_VERSION,
35
+ ChatStore,
36
+ ContextBuilder,
37
+ HybridSearch,
38
+ KnowledgeIndexer,
39
+ LocalEmbeddingProvider,
40
+ MemoryDatabase,
41
+ MessageStore,
42
+ NoopEmbeddingProvider,
43
+ SessionStore,
44
+ TaskStore,
45
+ UserStore,
46
+ closeDatabase,
47
+ createEmbeddingProvider,
48
+ deserializeEmbedding,
49
+ embeddingToBlob,
50
+ ensureSchema,
51
+ ensureVectorTables,
52
+ getDatabase,
53
+ getSchemaVersion,
54
+ getTaskStore,
55
+ hashText,
56
+ initializeMemory,
57
+ runMigrations,
58
+ serializeEmbedding,
59
+ setSchemaVersion
60
+ };
@@ -0,0 +1,59 @@
1
+ import {
2
+ getDatabase
3
+ } from "./chunk-WDUHRPGA.js";
4
+ import "./chunk-UR2LQEKR.js";
5
+ import {
6
+ TELETON_ROOT
7
+ } from "./chunk-7NJ46ZIX.js";
8
+
9
+ // src/session/migrate.ts
10
+ import { readFileSync, existsSync, renameSync } from "fs";
11
+ import { join } from "path";
12
+ var SESSIONS_JSON = join(TELETON_ROOT, "sessions.json");
13
+ var SESSIONS_JSON_BACKUP = join(TELETON_ROOT, "sessions.json.backup");
14
+ function migrateSessionsToDb() {
15
+ if (!existsSync(SESSIONS_JSON)) {
16
+ return 0;
17
+ }
18
+ try {
19
+ console.log("\u{1F504} Migrating sessions from JSON to SQLite...");
20
+ const raw = readFileSync(SESSIONS_JSON, "utf-8");
21
+ const store = JSON.parse(raw);
22
+ const db = getDatabase().getDb();
23
+ let migrated = 0;
24
+ const insertStmt = db.prepare(`
25
+ INSERT OR REPLACE INTO sessions (
26
+ id, chat_id, started_at, updated_at, message_count,
27
+ last_message_id, last_channel, last_to, context_tokens,
28
+ model, provider, last_reset_date
29
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
30
+ `);
31
+ for (const [chatId, session] of Object.entries(store)) {
32
+ insertStmt.run(
33
+ session.sessionId,
34
+ chatId,
35
+ session.createdAt,
36
+ session.updatedAt,
37
+ session.messageCount || 0,
38
+ session.lastMessageId || null,
39
+ session.lastChannel || null,
40
+ session.lastTo || null,
41
+ session.contextTokens || null,
42
+ session.model || null,
43
+ session.provider || null,
44
+ session.lastResetDate || null
45
+ );
46
+ migrated++;
47
+ }
48
+ renameSync(SESSIONS_JSON, SESSIONS_JSON_BACKUP);
49
+ console.log(`\u2705 Migrated ${migrated} sessions to SQLite`);
50
+ console.log(` Backup saved: ${SESSIONS_JSON_BACKUP}`);
51
+ return migrated;
52
+ } catch (error) {
53
+ console.error("\u274C Failed to migrate sessions:", error);
54
+ return 0;
55
+ }
56
+ }
57
+ export {
58
+ migrateSessionsToDb
59
+ };
@@ -0,0 +1,14 @@
1
+ import {
2
+ ALLOWED_EXTENSIONS,
3
+ MAX_FILE_SIZES,
4
+ TELETON_ROOT,
5
+ WORKSPACE_PATHS,
6
+ WORKSPACE_ROOT
7
+ } from "./chunk-7NJ46ZIX.js";
8
+ export {
9
+ ALLOWED_EXTENSIONS,
10
+ MAX_FILE_SIZES,
11
+ TELETON_ROOT,
12
+ WORKSPACE_PATHS,
13
+ WORKSPACE_ROOT
14
+ };
@@ -0,0 +1,377 @@
1
+ import {
2
+ TELETON_ROOT
3
+ } from "./chunk-7NJ46ZIX.js";
4
+
5
+ // src/market/scraper.ts
6
+ import { chromium } from "playwright";
7
+
8
+ // src/market/scraper-db.ts
9
+ import Database from "better-sqlite3";
10
+ import { join } from "path";
11
+ var DB_PATH = join(TELETON_ROOT, "gifts.db");
12
+ function initScraperDb() {
13
+ const db = new Database(DB_PATH);
14
+ db.pragma("journal_mode = WAL");
15
+ db.exec(`
16
+ -- Collections (Plush Pepes, Heart Lockets, etc.)
17
+ CREATE TABLE IF NOT EXISTS gift_collections (
18
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
19
+ address TEXT UNIQUE NOT NULL,
20
+ name TEXT NOT NULL,
21
+ floor_ton REAL,
22
+ floor_usd REAL,
23
+ volume_7d REAL,
24
+ listed_count INTEGER,
25
+ owners INTEGER,
26
+ supply INTEGER,
27
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
28
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
29
+ );
30
+
31
+ -- Mod\xE8les par collection (Cozy Galaxy, Milano, etc.)
32
+ CREATE TABLE IF NOT EXISTS gift_models (
33
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
34
+ collection_id INTEGER NOT NULL,
35
+ name TEXT NOT NULL,
36
+ floor_ton REAL,
37
+ rarity_percent REAL,
38
+ count INTEGER,
39
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
40
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
41
+ FOREIGN KEY (collection_id) REFERENCES gift_collections(id),
42
+ UNIQUE(collection_id, name)
43
+ );
44
+
45
+ -- Historique des prix (pour trends)
46
+ CREATE TABLE IF NOT EXISTS price_history (
47
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
48
+ collection_id INTEGER,
49
+ model_id INTEGER,
50
+ floor_ton REAL NOT NULL,
51
+ floor_usd REAL,
52
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
53
+ FOREIGN KEY (collection_id) REFERENCES gift_collections(id),
54
+ FOREIGN KEY (model_id) REFERENCES gift_models(id)
55
+ );
56
+
57
+ -- Index pour les requ\xEAtes fr\xE9quentes
58
+ CREATE INDEX IF NOT EXISTS idx_price_history_collection ON price_history(collection_id, timestamp);
59
+ CREATE INDEX IF NOT EXISTS idx_price_history_model ON price_history(model_id, timestamp);
60
+ CREATE INDEX IF NOT EXISTS idx_models_collection ON gift_models(collection_id);
61
+ `);
62
+ return db;
63
+ }
64
+ function upsertCollection(db, collection) {
65
+ const stmt = db.prepare(`
66
+ INSERT INTO gift_collections (address, name, floor_ton, floor_usd, volume_7d, updated_at)
67
+ VALUES (@address, @name, @floor_ton, @floor_usd, @volume_7d, CURRENT_TIMESTAMP)
68
+ ON CONFLICT(address) DO UPDATE SET
69
+ name = @name,
70
+ floor_ton = @floor_ton,
71
+ floor_usd = @floor_usd,
72
+ volume_7d = @volume_7d,
73
+ updated_at = CURRENT_TIMESTAMP
74
+ RETURNING id
75
+ `);
76
+ const result = stmt.get({
77
+ address: collection.address,
78
+ name: collection.name,
79
+ floor_ton: collection.floorTON || null,
80
+ floor_usd: collection.floorUSD || null,
81
+ volume_7d: collection.volume7d || null
82
+ });
83
+ return result.id;
84
+ }
85
+ function upsertModel(db, collectionId, model) {
86
+ const stmt = db.prepare(`
87
+ INSERT INTO gift_models (collection_id, name, floor_ton, rarity_percent, count, updated_at)
88
+ VALUES (@collection_id, @name, @floor_ton, @rarity_percent, @count, CURRENT_TIMESTAMP)
89
+ ON CONFLICT(collection_id, name) DO UPDATE SET
90
+ floor_ton = @floor_ton,
91
+ rarity_percent = @rarity_percent,
92
+ count = @count,
93
+ updated_at = CURRENT_TIMESTAMP
94
+ RETURNING id
95
+ `);
96
+ const result = stmt.get({
97
+ collection_id: collectionId,
98
+ name: model.name,
99
+ floor_ton: model.floor || null,
100
+ rarity_percent: model.pct ? parseFloat(model.pct) : null,
101
+ count: model.count || null
102
+ });
103
+ return result.id;
104
+ }
105
+ function addPriceHistory(db, collectionId, modelId, floorTon, floorUsd = null) {
106
+ const stmt = db.prepare(`
107
+ INSERT INTO price_history (collection_id, model_id, floor_ton, floor_usd)
108
+ VALUES (?, ?, ?, ?)
109
+ `);
110
+ stmt.run(collectionId, modelId, floorTon, floorUsd);
111
+ }
112
+ function getScraperStats(db) {
113
+ const collections = db.prepare("SELECT COUNT(*) as count FROM gift_collections").get();
114
+ const models = db.prepare("SELECT COUNT(*) as count FROM gift_models").get();
115
+ const history = db.prepare("SELECT COUNT(*) as count FROM price_history").get();
116
+ const lastUpdate = db.prepare("SELECT MAX(updated_at) as last FROM gift_collections").get();
117
+ return {
118
+ collections: collections.count,
119
+ models: models.count,
120
+ historyEntries: history.count,
121
+ lastUpdate: lastUpdate.last
122
+ };
123
+ }
124
+
125
+ // src/market/scraper.ts
126
+ var BASE_URL = "https://marketapp.ws";
127
+ var PARALLEL_WORKERS = 4;
128
+ async function scrapeAllModels(page, collection, db) {
129
+ try {
130
+ const url = `${BASE_URL}/collection/${collection.address}/?tab=nfts`;
131
+ await page.goto(url, { waitUntil: "domcontentloaded", timeout: 3e4 });
132
+ await page.waitForTimeout(2500);
133
+ try {
134
+ await page.click('button:has-text("Filters")', { timeout: 3e3 });
135
+ await page.waitForTimeout(600);
136
+ } catch (e) {
137
+ return 0;
138
+ }
139
+ try {
140
+ await page.click("text=Model", { timeout: 2e3 });
141
+ await page.waitForTimeout(800);
142
+ } catch (e) {
143
+ return 0;
144
+ }
145
+ const allModels = /* @__PURE__ */ new Map();
146
+ const wrapperHeight = await page.evaluate(() => {
147
+ const wrappers = document.querySelectorAll(".virtual-scroll-wrapper");
148
+ const wrapper = wrappers[1];
149
+ return wrapper ? wrapper.scrollHeight : 0;
150
+ });
151
+ for (let scrollPos = 0; scrollPos <= wrapperHeight + 500; scrollPos += 250) {
152
+ const text = await page.evaluate((pos) => {
153
+ const wrappers = document.querySelectorAll(".virtual-scroll-wrapper");
154
+ const wrapper = wrappers[1];
155
+ if (wrapper) {
156
+ wrapper.scrollTop = pos;
157
+ return wrapper.innerText;
158
+ }
159
+ return "";
160
+ }, scrollPos);
161
+ if (text) {
162
+ const lines = text.split("\n").map((l) => l.trim()).filter((l) => l);
163
+ let currentModel = null;
164
+ for (const line of lines) {
165
+ if (line.length > 1 && line.length < 50 && !line.match(/^[\d,.]+$/) && !line.startsWith("Floor:") && !line.includes("%")) {
166
+ currentModel = { name: line, floor: null, count: null, pct: null };
167
+ }
168
+ if (line.startsWith("Floor:") && currentModel) {
169
+ const match = line.match(/Floor:\s*([\d,.]+)/);
170
+ if (match) currentModel.floor = parseFloat(match[1].replace(/,/g, ""));
171
+ }
172
+ if (currentModel && line.match(/^\d+$/) && !currentModel.count) {
173
+ currentModel.count = parseInt(line);
174
+ }
175
+ if (line.includes("%") && currentModel) {
176
+ currentModel.pct = line;
177
+ if (currentModel.name && currentModel.floor) {
178
+ allModels.set(currentModel.name, { ...currentModel });
179
+ }
180
+ currentModel = null;
181
+ }
182
+ }
183
+ }
184
+ await page.waitForTimeout(80);
185
+ }
186
+ const models = [...allModels.values()];
187
+ const collectionId = upsertCollection(db, {
188
+ address: collection.address,
189
+ name: collection.name
190
+ });
191
+ for (const model of models) {
192
+ const modelId = upsertModel(db, collectionId, model);
193
+ if (model.floor) {
194
+ addPriceHistory(db, collectionId, modelId, model.floor);
195
+ }
196
+ }
197
+ return models.length;
198
+ } catch (error) {
199
+ return -1;
200
+ }
201
+ }
202
+ async function createWorker(browser, db) {
203
+ const context = await browser.newContext({
204
+ userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
205
+ viewport: { width: 1920, height: 1080 }
206
+ });
207
+ const page = await context.newPage();
208
+ return {
209
+ page,
210
+ async scrape(collection) {
211
+ return await scrapeAllModels(page, collection, db);
212
+ },
213
+ async close() {
214
+ await context.close();
215
+ }
216
+ };
217
+ }
218
+ async function getCollections(page, db) {
219
+ await page.goto(`${BASE_URL}/?tab=gifts&sort_by=floor_desc`, {
220
+ waitUntil: "domcontentloaded",
221
+ timeout: 6e4
222
+ });
223
+ await page.waitForTimeout(4e3);
224
+ for (let i = 0; i < 15; i++) {
225
+ await page.evaluate(() => window.scrollBy(0, 2e3));
226
+ await page.waitForTimeout(200);
227
+ }
228
+ const collections = await page.evaluate(() => {
229
+ const results = [];
230
+ const text = document.body.innerText;
231
+ const lines = text.split("\n").map((l) => l.trim()).filter((l) => l);
232
+ for (let i = 0; i < lines.length; i++) {
233
+ if (lines[i] === "1% fee" && i > 0) {
234
+ const name = lines[i - 1];
235
+ if (name.length < 3 || name.length > 40 || name === "Name") continue;
236
+ let floorTON = null;
237
+ let floorUSD = null;
238
+ let volume7d = null;
239
+ let skipNext = 0;
240
+ for (let j = i + 1; j < Math.min(i + 12, lines.length); j++) {
241
+ const val = lines[j];
242
+ if (floorTON === null && val.match(/^[\d,.]+$/)) {
243
+ floorTON = parseFloat(val.replace(/,/g, ""));
244
+ continue;
245
+ }
246
+ if (floorTON !== null && floorUSD === null && val.startsWith("~$")) {
247
+ floorUSD = parseFloat(val.replace("~$", "").replace(/,/g, ""));
248
+ skipNext = 2;
249
+ continue;
250
+ }
251
+ if (skipNext > 0 && (val.match(/^[\d,.]+$/) || val.startsWith("~$"))) {
252
+ skipNext--;
253
+ continue;
254
+ }
255
+ if (floorUSD !== null && volume7d === null && skipNext === 0) {
256
+ const volMatch = val.match(/^([\d,.]+)(K|M)?$/);
257
+ if (volMatch) {
258
+ let vol = parseFloat(volMatch[1].replace(/,/g, ""));
259
+ if (volMatch[2] === "K") vol *= 1e3;
260
+ if (volMatch[2] === "M") vol *= 1e6;
261
+ volume7d = vol;
262
+ break;
263
+ }
264
+ }
265
+ if (val === "1% fee") break;
266
+ }
267
+ if (name && floorTON) {
268
+ results.push({ name, floorTON, floorUSD, volume7d, address: null });
269
+ }
270
+ }
271
+ }
272
+ const links = document.querySelectorAll('a[href*="/collection/"]');
273
+ const addressMap = /* @__PURE__ */ new Map();
274
+ links.forEach((link) => {
275
+ const href = link.getAttribute("href");
276
+ if (!href) return;
277
+ const match = href.match(/\/collection\/([^/?]+)/);
278
+ if (match) {
279
+ const text2 = link.textContent?.trim().split("\n")[0];
280
+ if (text2 && text2.length > 2) addressMap.set(text2, match[1]);
281
+ }
282
+ });
283
+ return results.map((r) => ({
284
+ ...r,
285
+ address: addressMap.get(r.name) || [...addressMap.entries()].find(
286
+ ([k]) => k.toLowerCase().includes(r.name.toLowerCase().slice(0, 10))
287
+ )?.[1] || null
288
+ })).filter((r) => r.address);
289
+ });
290
+ for (const col of collections) {
291
+ const collectionId = upsertCollection(db, col);
292
+ addPriceHistory(db, collectionId, null, col.floorTON, col.floorUSD);
293
+ }
294
+ return collections;
295
+ }
296
+ async function runScraper(options) {
297
+ const workers = options.workers || PARALLEL_WORKERS;
298
+ const limit = options.limit || 0;
299
+ console.log("=".repeat(60));
300
+ console.log(`SCRAPER TOUS LES MOD\xC8LES (${workers} workers)`);
301
+ console.log("=".repeat(60));
302
+ const db = initScraperDb();
303
+ const startTime = Date.now();
304
+ let browser = null;
305
+ try {
306
+ browser = await chromium.launch({ headless: true });
307
+ console.log("\n1. Collections...");
308
+ const mainCtx = await browser.newContext({
309
+ userAgent: "Mozilla/5.0",
310
+ viewport: { width: 1920, height: 1080 }
311
+ });
312
+ const mainPage = await mainCtx.newPage();
313
+ const collections = await getCollections(mainPage, db);
314
+ await mainCtx.close();
315
+ console.log(` \u2713 ${collections.length} collections`);
316
+ console.log(`
317
+ 2. Workers (${workers})...`);
318
+ const workerPool = await Promise.all(
319
+ Array(workers).fill(null).map(() => createWorker(browser, db))
320
+ );
321
+ const toProcess = limit > 0 ? collections.slice(0, limit) : collections;
322
+ console.log(`
323
+ 3. Scraping ${toProcess.length} collections (TOUS les mod\xE8les)...
324
+ `);
325
+ let completed = 0;
326
+ let totalModels = 0;
327
+ const queue = [...toProcess];
328
+ async function processNext(worker) {
329
+ while (queue.length > 0) {
330
+ const col = queue.shift();
331
+ if (!col) break;
332
+ const count = await worker.scrape(col);
333
+ completed++;
334
+ const status = count > 0 ? `\u2713 ${count.toString().padStart(2)}` : count === 0 ? "- 0 " : "\u2717 ";
335
+ if (count > 0) totalModels += count;
336
+ const elapsed2 = ((Date.now() - startTime) / 1e3).toFixed(1);
337
+ console.log(
338
+ ` [${completed.toString().padStart(3)}/${toProcess.length}] ${col.name.padEnd(
339
+ 22
340
+ )} ${status} (${elapsed2}s)`
341
+ );
342
+ }
343
+ }
344
+ await Promise.all(workerPool.map((w) => processNext(w)));
345
+ await Promise.all(workerPool.map((w) => w.close()));
346
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
347
+ const stats = getScraperStats(db);
348
+ console.log("\n" + "=".repeat(60));
349
+ console.log(`TERMIN\xC9 en ${elapsed}s`);
350
+ console.log("=".repeat(60));
351
+ console.log(`Collections: ${stats.collections}`);
352
+ console.log(`Mod\xE8les: ${stats.models}`);
353
+ console.log(`Historique: ${stats.historyEntries}`);
354
+ return {
355
+ success: true,
356
+ collections: stats.collections,
357
+ models: stats.models,
358
+ duration: parseInt(elapsed)
359
+ };
360
+ } catch (error) {
361
+ return {
362
+ success: false,
363
+ collections: 0,
364
+ models: 0,
365
+ duration: Math.round((Date.now() - startTime) / 1e3),
366
+ error: error instanceof Error ? error.message : String(error)
367
+ };
368
+ } finally {
369
+ if (browser) {
370
+ await browser.close();
371
+ }
372
+ db.close();
373
+ }
374
+ }
375
+ export {
376
+ runScraper
377
+ };
@@ -0,0 +1,133 @@
1
+ // src/telegram/task-dependency-resolver.ts
2
+ var MAX_DEPENDENTS_PER_TASK = 10;
3
+ var BATCH_TRIGGER_DELAY_MS = 500;
4
+ var TaskDependencyResolver = class {
5
+ constructor(taskStore, bridge) {
6
+ this.taskStore = taskStore;
7
+ this.bridge = bridge;
8
+ }
9
+ /**
10
+ * Called when a task completes successfully
11
+ * Triggers any dependent tasks that are now ready to execute
12
+ *
13
+ * Security: Processes at most MAX_DEPENDENTS_PER_TASK dependents
14
+ * to prevent DoS via mass dependency chains
15
+ */
16
+ async onTaskComplete(completedTaskId) {
17
+ try {
18
+ const allDependentIds = this.taskStore.getDependents(completedTaskId);
19
+ if (allDependentIds.length === 0) {
20
+ return;
21
+ }
22
+ const dependentIds = allDependentIds.slice(0, MAX_DEPENDENTS_PER_TASK);
23
+ const truncated = allDependentIds.length > MAX_DEPENDENTS_PER_TASK;
24
+ if (truncated) {
25
+ console.warn(
26
+ `\u26A0\uFE0F Task ${completedTaskId} has ${allDependentIds.length} dependents, only processing first ${MAX_DEPENDENTS_PER_TASK} (security limit)`
27
+ );
28
+ }
29
+ console.log(
30
+ `\u{1F4CA} Task ${completedTaskId} completed. Checking ${dependentIds.length} dependent task(s)...`
31
+ );
32
+ const tasksToTrigger = [];
33
+ for (const depId of dependentIds) {
34
+ const task = this.taskStore.getTask(depId);
35
+ if (!task) {
36
+ console.warn(`Dependent task ${depId} not found`);
37
+ continue;
38
+ }
39
+ if (task.status !== "pending") {
40
+ continue;
41
+ }
42
+ if (!this.taskStore.canExecute(depId)) {
43
+ console.log(`\u23F3 Task ${depId} still waiting for dependencies`);
44
+ continue;
45
+ }
46
+ tasksToTrigger.push(task.id);
47
+ }
48
+ for (let i = 0; i < tasksToTrigger.length; i++) {
49
+ if (i > 0) {
50
+ await this.delay(BATCH_TRIGGER_DELAY_MS);
51
+ }
52
+ await this.triggerTask(tasksToTrigger[i]);
53
+ }
54
+ } catch (error) {
55
+ console.error("Error in dependency resolver:", error);
56
+ }
57
+ }
58
+ /**
59
+ * Utility: delay execution
60
+ */
61
+ delay(ms) {
62
+ return new Promise((resolve) => setTimeout(resolve, ms));
63
+ }
64
+ /**
65
+ * Called when a task fails
66
+ * Cancels all dependent tasks (unless they have skipOnParentFailure flag)
67
+ *
68
+ * Security: Processes at most MAX_DEPENDENTS_PER_TASK dependents
69
+ * Cancellation is less risky than triggering but still limited for consistency
70
+ */
71
+ async onTaskFail(failedTaskId) {
72
+ try {
73
+ const allDependentIds = this.taskStore.getDependents(failedTaskId);
74
+ if (allDependentIds.length === 0) {
75
+ return;
76
+ }
77
+ const dependentIds = allDependentIds.slice(0, MAX_DEPENDENTS_PER_TASK);
78
+ const truncated = allDependentIds.length > MAX_DEPENDENTS_PER_TASK;
79
+ if (truncated) {
80
+ console.warn(
81
+ `\u26A0\uFE0F Task ${failedTaskId} has ${allDependentIds.length} dependents, only cancelling first ${MAX_DEPENDENTS_PER_TASK} (security limit)`
82
+ );
83
+ }
84
+ console.log(`\u274C Task ${failedTaskId} failed. Cancelling ${dependentIds.length} dependent task(s)...`);
85
+ for (const depId of dependentIds) {
86
+ const task = this.taskStore.getTask(depId);
87
+ if (!task || task.status !== "pending") {
88
+ continue;
89
+ }
90
+ let skipOnFailure = true;
91
+ if (task.payload) {
92
+ try {
93
+ const payload = JSON.parse(task.payload);
94
+ skipOnFailure = payload.skipOnParentFailure !== false;
95
+ } catch (e) {
96
+ }
97
+ }
98
+ if (skipOnFailure) {
99
+ this.taskStore.cancelTask(depId);
100
+ console.log(` \u21B3 Cancelled task ${depId}: ${task.description}`);
101
+ await this.onTaskFail(depId);
102
+ }
103
+ }
104
+ } catch (error) {
105
+ console.error("Error handling task failure cascade:", error);
106
+ }
107
+ }
108
+ /**
109
+ * Trigger a task by sending [TASK:uuid] message to Saved Messages
110
+ */
111
+ async triggerTask(taskId) {
112
+ try {
113
+ const task = this.taskStore.getTask(taskId);
114
+ if (!task) {
115
+ console.warn(`Cannot trigger task ${taskId}: not found`);
116
+ return;
117
+ }
118
+ console.log(`\u{1F680} Triggering dependent task: ${task.description}`);
119
+ const gramJsClient = this.bridge.getClient().getClient();
120
+ const me = await gramJsClient.getMe();
121
+ await gramJsClient.sendMessage(me, {
122
+ message: `[TASK:${taskId}] ${task.description}`
123
+ });
124
+ console.log(` \u21B3 Sent [TASK:${taskId}] to Saved Messages`);
125
+ } catch (error) {
126
+ console.error(`Error triggering task ${taskId}:`, error);
127
+ this.taskStore.failTask(taskId, `Failed to trigger: ${error}`);
128
+ }
129
+ }
130
+ };
131
+ export {
132
+ TaskDependencyResolver
133
+ };