@llblab/pi-telegram 0.2.3 → 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -1
- package/README.md +1 -1
- package/docs/architecture.md +1 -1
- package/index.ts +12 -3
- package/package.json +2 -2
- package/tests/queue.test.ts +208 -0
package/CHANGELOG.md
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
- `[Docs]` Added short responsibility header comments to every project `.ts` file. Impact: file boundaries are easier to understand while navigating the growing `/lib` split.
|
|
6
6
|
- `[Naming]` Renamed extracted domain modules and mirrored regression suites to use repo-scoped bare domain filenames such as `api.ts`, `queue.ts`, and `queue.test.ts` instead of repeating `telegram-*` in every path. Impact: the internal topology is easier to scan and stays aligned with the repository-level Telegram scope.
|
|
7
|
-
- `[Controls]` Expanded Telegram session controls with a richer `/status` view, inline model selection, and thinking-level controls. Impact: more bridge configuration can be managed directly from Telegram.
|
|
7
|
+
- `[Controls]` Expanded Telegram session controls with a richer `/status` view, inline model selection, and thinking-level controls, and fixed the callback-selection path so idle model and thinking picks apply immediately instead of only becoming visible after a later Telegram interaction. Impact: more bridge configuration can be managed directly from Telegram with more predictable immediate feedback.
|
|
8
8
|
- `[Queue]` Upgraded Telegram turn queueing with previews, reaction-driven prioritization/removal, media-group handling, aborted-turn history preservation, and safer dispatch gating. Impact: follow-up handling is more transparent and less prone to lifecycle races.
|
|
9
9
|
- `[Rendering]` Added Telegram-oriented Markdown rendering and hardened reply streaming/chunking behavior, including narrower monospace Markdown table output without outer side borders, monospace list markers for unordered and ordered lists, and flattened nested quote indentation inside a single Telegram blockquote. Impact: formatted replies render more reliably while preserving literal code blocks and using width more efficiently on narrow Telegram clients.
|
|
10
10
|
- `[Runtime]` Hardened attachment delivery, polling/runtime behavior, Telegram session integration, preview-finalization and reply-transport routing into the replies domain, lazy Telegram API client routing into the Telegram API domain, turn-building extraction into its own domain, menu/model-resolution plus menu-state, pure menu-page derivation, pure menu render-payload builders, menu-message runtime, callback parsing, callback entry handling, callback mutation helpers, full model-callback planning and execution, and interface-polished callback effect ports into the menu domain, direct execute-from-update routing into the updates domain, model-switch restart glue extraction into the model-switch domain, and tool/command/lifecycle-hook registration extraction into a dedicated registration domain. Impact: the bridge is more robust as a daily Telegram frontend for pi.
|
package/README.md
CHANGED
|
@@ -110,7 +110,7 @@ Chat with your bot in Telegram DMs.
|
|
|
110
110
|
Additional fork-specific controls:
|
|
111
111
|
|
|
112
112
|
- `/status` now has a richer view with inline buttons for model and thinking controls, and joins the high-priority control queue when pi is busy
|
|
113
|
-
- `/model` opens the interactive model selector, joins the high-priority control queue when pi is busy, and can restart the active Telegram-owned run on the newly selected model, waiting for the current tool call to finish when needed
|
|
113
|
+
- `/model` opens the interactive model selector, applies idle selections immediately, joins the high-priority control queue when pi is busy, and can restart the active Telegram-owned run on the newly selected model, waiting for the current tool call to finish when needed
|
|
114
114
|
- `/compact` starts session compaction when pi and the Telegram queue are idle
|
|
115
115
|
- Queue reactions: `👍` prioritizes a waiting turn, `👎` removes it
|
|
116
116
|
|
package/docs/architecture.md
CHANGED
|
@@ -122,7 +122,7 @@ The bridge exposes Telegram-side session controls in addition to regular chat fo
|
|
|
122
122
|
Current operator controls include:
|
|
123
123
|
|
|
124
124
|
- `/status` for model, usage, cost, and context visibility, queued as a high-priority control item when needed
|
|
125
|
-
- Inline status buttons for model and thinking adjustments
|
|
125
|
+
- Inline status buttons for model and thinking adjustments, applying idle selections immediately while still respecting busy-run restart rules
|
|
126
126
|
- `/model` for interactive model selection, queued as a high-priority control item when needed and supporting in-flight restart of the active Telegram-owned run on a newly selected model
|
|
127
127
|
- `/compact` for Telegram-triggered pi session compaction when the bridge is idle
|
|
128
128
|
- Queue reactions using `👍` and `👎`
|
package/index.ts
CHANGED
|
@@ -1180,7 +1180,10 @@ export default function (pi: ExtensionAPI) {
|
|
|
1180
1180
|
query.data,
|
|
1181
1181
|
getCurrentTelegramModel(ctx),
|
|
1182
1182
|
{
|
|
1183
|
-
setThinkingLevel: (level) =>
|
|
1183
|
+
setThinkingLevel: (level) => {
|
|
1184
|
+
pi.setThinkingLevel(level);
|
|
1185
|
+
updateStatus(ctx);
|
|
1186
|
+
},
|
|
1184
1187
|
getCurrentThinkingLevel: () => pi.getThinkingLevel(),
|
|
1185
1188
|
updateStatusMessage: async () => showStatusMessage(state, ctx),
|
|
1186
1189
|
answerCallbackQuery,
|
|
@@ -1213,10 +1216,15 @@ export default function (pi: ExtensionAPI) {
|
|
|
1213
1216
|
setModel: (model) => pi.setModel(model),
|
|
1214
1217
|
setCurrentModel: (model) => {
|
|
1215
1218
|
currentTelegramModel = model;
|
|
1219
|
+
updateStatus(ctx);
|
|
1220
|
+
},
|
|
1221
|
+
setThinkingLevel: (level) => {
|
|
1222
|
+
pi.setThinkingLevel(level);
|
|
1223
|
+
updateStatus(ctx);
|
|
1216
1224
|
},
|
|
1217
|
-
setThinkingLevel: (level) => pi.setThinkingLevel(level),
|
|
1218
1225
|
stagePendingModelSwitch: (selection) => {
|
|
1219
1226
|
pendingTelegramModelSwitch = selection;
|
|
1227
|
+
updateStatus(ctx);
|
|
1220
1228
|
},
|
|
1221
1229
|
restartInterruptedTelegramTurn: (selection) => {
|
|
1222
1230
|
return restartTelegramModelSwitchContinuation({
|
|
@@ -1825,8 +1833,9 @@ export default function (pi: ExtensionAPI) {
|
|
|
1825
1833
|
systemPrompt: nextEvent.systemPrompt + suffix,
|
|
1826
1834
|
};
|
|
1827
1835
|
},
|
|
1828
|
-
onModelSelect: (event) => {
|
|
1836
|
+
onModelSelect: (event, ctx) => {
|
|
1829
1837
|
currentTelegramModel = (event as { model: Model<any> }).model;
|
|
1838
|
+
updateStatus(ctx);
|
|
1830
1839
|
},
|
|
1831
1840
|
onAgentStart: async (_event, ctx) => {
|
|
1832
1841
|
currentAbort = () => ctx.abort();
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@llblab/pi-telegram",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.5",
|
|
4
4
|
"private": false,
|
|
5
|
-
"description": "Telegram DM bridge extension for pi",
|
|
5
|
+
"description": "Better Telegram DM bridge extension for pi",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"keywords": [
|
|
8
8
|
"pi-package",
|
package/tests/queue.test.ts
CHANGED
|
@@ -2467,6 +2467,214 @@ test("Extension runtime applies reaction priority and removal before the next di
|
|
|
2467
2467
|
}
|
|
2468
2468
|
});
|
|
2469
2469
|
|
|
2470
|
+
test("Extension runtime applies idle model picks immediately and refreshes status", async () => {
|
|
2471
|
+
const agentDir = join(homedir(), ".pi", "agent");
|
|
2472
|
+
const configPath = join(agentDir, "telegram.json");
|
|
2473
|
+
const previousConfig = await readFile(configPath, "utf8").catch(
|
|
2474
|
+
() => undefined,
|
|
2475
|
+
);
|
|
2476
|
+
const previousArgv = [...process.argv];
|
|
2477
|
+
const handlers = new Map<
|
|
2478
|
+
string,
|
|
2479
|
+
(event: unknown, ctx: unknown) => Promise<unknown>
|
|
2480
|
+
>();
|
|
2481
|
+
const commands = new Map<
|
|
2482
|
+
string,
|
|
2483
|
+
{ handler: (args: string, ctx: unknown) => Promise<void> }
|
|
2484
|
+
>();
|
|
2485
|
+
const runtimeEvents: string[] = [];
|
|
2486
|
+
const statusEvents: string[] = [];
|
|
2487
|
+
const modelA = {
|
|
2488
|
+
provider: "openai",
|
|
2489
|
+
id: "gpt-a",
|
|
2490
|
+
reasoning: true,
|
|
2491
|
+
} as const;
|
|
2492
|
+
const modelB = {
|
|
2493
|
+
provider: "anthropic",
|
|
2494
|
+
id: "claude-b",
|
|
2495
|
+
reasoning: true,
|
|
2496
|
+
} as const;
|
|
2497
|
+
const setModels: Array<string> = [];
|
|
2498
|
+
const thinkingLevels: Array<string> = [];
|
|
2499
|
+
let secondUpdatesResolve: ((value: Response) => void) | undefined;
|
|
2500
|
+
const secondUpdates = new Promise<Response>((resolve) => {
|
|
2501
|
+
secondUpdatesResolve = resolve;
|
|
2502
|
+
});
|
|
2503
|
+
const pi = {
|
|
2504
|
+
on: (
|
|
2505
|
+
event: string,
|
|
2506
|
+
handler: (event: unknown, ctx: unknown) => Promise<unknown>,
|
|
2507
|
+
) => {
|
|
2508
|
+
handlers.set(event, handler);
|
|
2509
|
+
},
|
|
2510
|
+
registerCommand: (
|
|
2511
|
+
name: string,
|
|
2512
|
+
definition: { handler: (args: string, ctx: unknown) => Promise<void> },
|
|
2513
|
+
) => {
|
|
2514
|
+
commands.set(name, definition);
|
|
2515
|
+
},
|
|
2516
|
+
registerTool: () => {},
|
|
2517
|
+
sendUserMessage: () => {},
|
|
2518
|
+
getThinkingLevel: () => thinkingLevels.at(-1) ?? "medium",
|
|
2519
|
+
setModel: async (model: { provider: string; id: string }) => {
|
|
2520
|
+
setModels.push(`${model.provider}/${model.id}`);
|
|
2521
|
+
return true;
|
|
2522
|
+
},
|
|
2523
|
+
setThinkingLevel: (level: string) => {
|
|
2524
|
+
thinkingLevels.push(level);
|
|
2525
|
+
},
|
|
2526
|
+
} as never;
|
|
2527
|
+
const originalFetch = globalThis.fetch;
|
|
2528
|
+
let getUpdatesCalls = 0;
|
|
2529
|
+
let nextMessageId = 100;
|
|
2530
|
+
const callbackAnswers: string[] = [];
|
|
2531
|
+
globalThis.fetch = async (input, init) => {
|
|
2532
|
+
const url = typeof input === "string" ? input : input.toString();
|
|
2533
|
+
const method = url.split("/").at(-1) ?? "";
|
|
2534
|
+
const body =
|
|
2535
|
+
typeof init?.body === "string"
|
|
2536
|
+
? (JSON.parse(init.body) as Record<string, unknown>)
|
|
2537
|
+
: undefined;
|
|
2538
|
+
if (method === "deleteWebhook") {
|
|
2539
|
+
return { json: async () => ({ ok: true, result: true }) } as Response;
|
|
2540
|
+
}
|
|
2541
|
+
if (method === "getUpdates") {
|
|
2542
|
+
getUpdatesCalls += 1;
|
|
2543
|
+
if (getUpdatesCalls === 1) {
|
|
2544
|
+
return {
|
|
2545
|
+
json: async () => ({
|
|
2546
|
+
ok: true,
|
|
2547
|
+
result: [
|
|
2548
|
+
{
|
|
2549
|
+
_: "other",
|
|
2550
|
+
update_id: 1,
|
|
2551
|
+
message: {
|
|
2552
|
+
message_id: 60,
|
|
2553
|
+
chat: { id: 99, type: "private" },
|
|
2554
|
+
from: { id: 77, is_bot: false, first_name: "Test" },
|
|
2555
|
+
text: "/model",
|
|
2556
|
+
},
|
|
2557
|
+
},
|
|
2558
|
+
],
|
|
2559
|
+
}),
|
|
2560
|
+
} as Response;
|
|
2561
|
+
}
|
|
2562
|
+
if (getUpdatesCalls === 2) return secondUpdates;
|
|
2563
|
+
throw new DOMException("stop", "AbortError");
|
|
2564
|
+
}
|
|
2565
|
+
if (method === "sendMessage") {
|
|
2566
|
+
runtimeEvents.push(`send:${String(body?.text ?? "")}`);
|
|
2567
|
+
return {
|
|
2568
|
+
json: async () => ({
|
|
2569
|
+
ok: true,
|
|
2570
|
+
result: { message_id: nextMessageId++ },
|
|
2571
|
+
}),
|
|
2572
|
+
} as Response;
|
|
2573
|
+
}
|
|
2574
|
+
if (method === "editMessageText") {
|
|
2575
|
+
runtimeEvents.push(`edit:${String(body?.text ?? "")}`);
|
|
2576
|
+
return { json: async () => ({ ok: true, result: true }) } as Response;
|
|
2577
|
+
}
|
|
2578
|
+
if (method === "answerCallbackQuery") {
|
|
2579
|
+
callbackAnswers.push(String(body?.text ?? ""));
|
|
2580
|
+
return { json: async () => ({ ok: true, result: true }) } as Response;
|
|
2581
|
+
}
|
|
2582
|
+
if (method === "sendChatAction") {
|
|
2583
|
+
return { json: async () => ({ ok: true, result: true }) } as Response;
|
|
2584
|
+
}
|
|
2585
|
+
throw new Error(`Unexpected Telegram API method: ${method}`);
|
|
2586
|
+
};
|
|
2587
|
+
try {
|
|
2588
|
+
process.argv = [
|
|
2589
|
+
previousArgv[0] ?? "node",
|
|
2590
|
+
previousArgv[1] ?? "index.ts",
|
|
2591
|
+
"--models=anthropic/claude-b:high",
|
|
2592
|
+
];
|
|
2593
|
+
await mkdir(agentDir, { recursive: true });
|
|
2594
|
+
await writeFile(
|
|
2595
|
+
configPath,
|
|
2596
|
+
JSON.stringify(
|
|
2597
|
+
{ botToken: "123:abc", allowedUserId: 77, lastUpdateId: 0 },
|
|
2598
|
+
null,
|
|
2599
|
+
"\t",
|
|
2600
|
+
) + "\n",
|
|
2601
|
+
"utf8",
|
|
2602
|
+
);
|
|
2603
|
+
telegramExtension(pi);
|
|
2604
|
+
const ctx = {
|
|
2605
|
+
hasUI: true,
|
|
2606
|
+
cwd: process.cwd(),
|
|
2607
|
+
model: modelA,
|
|
2608
|
+
signal: undefined,
|
|
2609
|
+
ui: {
|
|
2610
|
+
theme: {
|
|
2611
|
+
fg: (_token: string, text: string) => text,
|
|
2612
|
+
},
|
|
2613
|
+
setStatus: (_slot: string, text: string) => {
|
|
2614
|
+
statusEvents.push(text);
|
|
2615
|
+
},
|
|
2616
|
+
notify: () => {},
|
|
2617
|
+
},
|
|
2618
|
+
sessionManager: {
|
|
2619
|
+
getEntries: () => [],
|
|
2620
|
+
},
|
|
2621
|
+
modelRegistry: {
|
|
2622
|
+
refresh: () => {},
|
|
2623
|
+
getAvailable: () => [modelA, modelB],
|
|
2624
|
+
isUsingOAuth: () => false,
|
|
2625
|
+
},
|
|
2626
|
+
getContextUsage: () => undefined,
|
|
2627
|
+
hasPendingMessages: () => false,
|
|
2628
|
+
isIdle: () => true,
|
|
2629
|
+
abort: () => {},
|
|
2630
|
+
} as never;
|
|
2631
|
+
await handlers.get("session_start")?.({}, ctx);
|
|
2632
|
+
await commands.get("telegram-connect")?.handler("", ctx);
|
|
2633
|
+
await waitForCondition(() =>
|
|
2634
|
+
runtimeEvents.some((event) => event === "send:<b>Choose a model:</b>"),
|
|
2635
|
+
);
|
|
2636
|
+
const statusCountBeforePick = statusEvents.length;
|
|
2637
|
+
secondUpdatesResolve?.({
|
|
2638
|
+
json: async () => ({
|
|
2639
|
+
ok: true,
|
|
2640
|
+
result: [
|
|
2641
|
+
{
|
|
2642
|
+
_: "other",
|
|
2643
|
+
update_id: 2,
|
|
2644
|
+
callback_query: {
|
|
2645
|
+
id: "cb-idle-1",
|
|
2646
|
+
from: { id: 77, is_bot: false, first_name: "Test" },
|
|
2647
|
+
data: "model:pick:0",
|
|
2648
|
+
message: {
|
|
2649
|
+
message_id: 100,
|
|
2650
|
+
chat: { id: 99, type: "private" },
|
|
2651
|
+
},
|
|
2652
|
+
},
|
|
2653
|
+
},
|
|
2654
|
+
],
|
|
2655
|
+
}),
|
|
2656
|
+
} as Response);
|
|
2657
|
+
await waitForCondition(() => setModels.length === 1);
|
|
2658
|
+
assert.deepEqual(setModels, ["anthropic/claude-b"]);
|
|
2659
|
+
assert.deepEqual(thinkingLevels, ["high"]);
|
|
2660
|
+
assert.equal(callbackAnswers.includes("Switched to claude-b"), true);
|
|
2661
|
+
assert.equal(statusEvents.length > statusCountBeforePick, true);
|
|
2662
|
+
assert.equal(
|
|
2663
|
+
runtimeEvents.some((event) => event.startsWith("edit:<b>Context:")),
|
|
2664
|
+
true,
|
|
2665
|
+
);
|
|
2666
|
+
await handlers.get("session_shutdown")?.({}, ctx);
|
|
2667
|
+
} finally {
|
|
2668
|
+
process.argv = previousArgv;
|
|
2669
|
+
globalThis.fetch = originalFetch;
|
|
2670
|
+
if (previousConfig === undefined) {
|
|
2671
|
+
await rm(configPath, { force: true });
|
|
2672
|
+
} else {
|
|
2673
|
+
await writeFile(configPath, previousConfig, "utf8");
|
|
2674
|
+
}
|
|
2675
|
+
}
|
|
2676
|
+
});
|
|
2677
|
+
|
|
2470
2678
|
test("Extension runtime switches model in flight and dispatches a continuation turn after abort", async () => {
|
|
2471
2679
|
const agentDir = join(homedir(), ".pi", "agent");
|
|
2472
2680
|
const configPath = join(agentDir, "telegram.json");
|