hoomanjs 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -20,6 +20,7 @@ It gives you:
20
20
 
21
21
  - a one-shot `exec` command for single prompts
22
22
  - a stateful `chat` interface for interactive sessions
23
+ - a `daemon` command for processing MCP channel notifications in background
23
24
  - an Ink-powered `configure` workflow for editing app config, `instructions.md`, MCP servers, and installed skills
24
25
  - an `acp` command for running Hooman as an Agent Client Protocol (ACP) agent over stdio
25
26
 
@@ -28,6 +29,8 @@ It gives you:
28
29
  - Multiple LLM providers: `ollama`, `openai`, `anthropic`, `google`, `bedrock`
29
30
  - Local configuration under `~/.hooman`
30
31
  - MCP server support via `stdio`, `streamable-http`, and `sse`
32
+ - MCP server `instructions` support: server-provided instructions are appended to the agent system prompt
33
+ - MCP channel notification support through `hooman daemon --channel <name>`
31
34
  - Skill discovery / install / removal through the integrated configure flow
32
35
  - Interactive terminal UI for chat and configuration
33
36
 
@@ -132,9 +135,35 @@ Choose a toolkit size:
132
135
  hooman chat --toolkit max
133
136
  ```
134
137
 
138
+ ### `hooman daemon`
139
+
140
+ Run a long-lived daemon that subscribes to one or more MCP notification channels and feeds each received notification into the agent as a queued prompt.
141
+
142
+ ```bash
143
+ hooman daemon --channel hooman/channel
144
+ ```
145
+
146
+ Subscribe to multiple channels:
147
+
148
+ ```bash
149
+ hooman daemon --channel hooman/channel --channel alerts/channel
150
+ ```
151
+
152
+ Resume or pin a session id:
153
+
154
+ ```bash
155
+ hooman daemon --session my-daemon --channel hooman/channel
156
+ ```
157
+
158
+ Choose a toolkit size:
159
+
160
+ ```bash
161
+ hooman daemon --toolkit full --channel hoomanjs/channel
162
+ ```
163
+
135
164
  ### Toolkit Levels
136
165
 
137
- `exec`, `chat`, and `acp` support `-t, --toolkit <lite|full|max>`.
166
+ `exec`, `chat`, `daemon`, and `acp` support `-t, --toolkit <lite|full|max>`.
138
167
 
139
168
  - `lite` - time, fetch, long-term-memory, installed skills, and configured MCP server tools
140
169
  - `full` - `lite` plus filesystem, shell, and thinking tools
@@ -359,6 +388,15 @@ Supports `region`, `clientConfig`, and optional `apiKey`, with all other values
359
388
  }
360
389
  ```
361
390
 
391
+ ## MCP Notes
392
+
393
+ - MCP server `instructions` from the protocol `initialize` response are appended to Hooman's system prompt, after local `instructions.md` and session-specific prompt overrides.
394
+ - Hooman reads these instructions automatically from connected MCP servers when building the agent.
395
+ - `hooman daemon` can subscribe to server-published notification channels such as `hoomanjs/channel`.
396
+ - Only MCP servers that advertise the requested channel capability are subscribed.
397
+ - When a matching notification is received, Hooman uses `params.content` as the prompt if it is a string; otherwise it JSON-stringifies the notification params and sends that to the agent.
398
+ - Daemon mode processes notifications sequentially, reuses the same agent session over time, and **auto-approves tool calls**.
399
+
362
400
  ## Skills
363
401
 
364
402
  Skills are installed under:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hoomanjs",
3
- "version": "1.1.0",
3
+ "version": "1.3.0",
4
4
  "description": "Bun-powered local AI agent CLI with chat, exec, ACP, MCP, and skills support.",
5
5
  "author": {
6
6
  "name": "Vaibhav Pandey",
@@ -60,6 +60,7 @@
60
60
  "chromadb": "^3.4.3",
61
61
  "cli-spinners": "^3.4.0",
62
62
  "commander": "^14.0.3",
63
+ "fastq": "^1.20.1",
63
64
  "gray-matter": "^4.0.3",
64
65
  "handlebars": "^4.7.9",
65
66
  "ink": "^7.0.0",
package/src/cli.ts CHANGED
@@ -9,6 +9,7 @@ import { createToolApprovalHandler } from "./exec/approvals.ts";
9
9
  import { chat } from "./chat/index.tsx";
10
10
  import { configure } from "./configure/index.tsx";
11
11
  import { runAcpStdio } from "./acp/acp-agent.ts";
12
+ import { main as daemon } from "./daemon/index.ts";
12
13
 
13
14
  async function readPackageMeta(): Promise<{
14
15
  name: string;
@@ -122,6 +123,53 @@ program
122
123
  },
123
124
  );
124
125
 
126
+ program
127
+ .command("daemon")
128
+ .description(
129
+ "Run a background daemon that processes MCP channel notifications as prompts.",
130
+ )
131
+ .option("-s, --session <id>", "Session ID to use.")
132
+ .requiredOption(
133
+ "-c, --channel <name>",
134
+ "MCP notification channel to subscribe to (repeatable).",
135
+ (value: string, previous?: string[]) => [...(previous ?? []), value],
136
+ )
137
+ .option(
138
+ "--debug",
139
+ "Log each MCP channel notification payload to the console.",
140
+ )
141
+ .addOption(createToolkitOption())
142
+ .action(
143
+ async (options: {
144
+ session?: string;
145
+ toolkit?: Toolkit;
146
+ channel?: string[];
147
+ debug?: boolean;
148
+ }) => {
149
+ const sessionId = options.session?.trim() || crypto.randomUUID();
150
+ const channels = options.channel ?? [];
151
+ const {
152
+ agent,
153
+ mcp: { manager },
154
+ } = await bootstrap(
155
+ { sessionId, toolkit: options.toolkit ?? "full" },
156
+ true,
157
+ );
158
+ try {
159
+ await daemon({
160
+ agent,
161
+ manager,
162
+ channels,
163
+ debug: Boolean(options.debug),
164
+ });
165
+ } finally {
166
+ try {
167
+ await manager.disconnect();
168
+ } catch {}
169
+ }
170
+ },
171
+ );
172
+
125
173
  program
126
174
  .command("configure")
127
175
  .description("Manage app config, MCP servers, and installed skills.")
@@ -42,13 +42,14 @@ export async function create(
42
42
  ): Promise<Agent> {
43
43
  const sessionId = meta.sessionId;
44
44
  const userId = meta.userId ?? sessionId;
45
- const toolkit = meta.toolkit ?? "max";
45
+ const toolkit = meta.toolkit ?? "full";
46
46
  const llm = await modelProviders[config.llm.provider]!();
47
47
  const stm = createShortTermMemory(sessionId);
48
48
  const ltm = config.ltm.enabled ? createLongTermMemoryStore(config) : null;
49
49
  const skills = await createSkillsPrompt(registry);
50
50
  const tools = await mcp.manager.listPrefixedTools();
51
- const prompt = [system.content, meta.systemPrompt, skills.content]
51
+ const append = await mcp.manager.listServerInstructions();
52
+ const prompt = [system.content, meta.systemPrompt, ...append, skills.content]
52
53
  .filter((x) => !!x)
53
54
  .join(SECTION_BREAK);
54
55
  return new Agent({
@@ -1,8 +1,8 @@
1
1
  import { Config, type NamedMcpTransport } from "./config.ts";
2
- import { Manager } from "./manager.ts";
2
+ import { Manager, type ChannelMessage } from "./manager.ts";
3
3
 
4
4
  export { Config, Manager };
5
- export type { NamedMcpTransport };
5
+ export type { ChannelMessage, NamedMcpTransport };
6
6
  export { createMcpTools } from "./tools.ts";
7
7
 
8
8
  export function createMcpConfig(path: string): Config {
@@ -4,9 +4,22 @@ import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"
4
4
  import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
5
5
  import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
6
6
  import type { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
7
+ import { z } from "zod";
7
8
  import { Config, type NamedMcpTransport } from "./config.ts";
8
9
  import type { McpTransport } from "./types.ts";
9
10
 
11
+ export type ChannelMessageMeta = {
12
+ server: string;
13
+ channel: string;
14
+ method: string;
15
+ params: unknown;
16
+ };
17
+
18
+ export type ChannelMessage = {
19
+ prompt: string;
20
+ meta: ChannelMessageMeta;
21
+ };
22
+
10
23
  function transportFor(spec: McpTransport): Transport {
11
24
  switch (spec.type) {
12
25
  case "stdio":
@@ -106,12 +119,118 @@ export class Manager {
106
119
  }
107
120
  const map = this.instances!;
108
121
  const batches = await Promise.all(
109
- [...map.entries()].map(async ([serverKey, client]) =>
122
+ [...map.entries()].map(async ([server, client]) =>
110
123
  client
111
124
  .listTools()
112
- .then((tools) => tools.map((t) => new PrefixedMcpTool(serverKey, t))),
125
+ .then((tools) => tools.map((t) => new PrefixedMcpTool(server, t))),
113
126
  ),
114
127
  );
115
128
  return batches.flat();
116
129
  }
130
+
131
+ /**
132
+ * Collects optional server-level instructions from each connected MCP server.
133
+ */
134
+ public async listServerInstructions(): Promise<string[]> {
135
+ if (this.instances === null) {
136
+ this.reload();
137
+ }
138
+ const map = this.instances!;
139
+ const rows = await Promise.all(
140
+ [...map.entries()].map(async ([server, client]) => {
141
+ await client.connect();
142
+ const instructions = client.client.getInstructions()?.trim();
143
+ if (!instructions) {
144
+ return "";
145
+ }
146
+
147
+ return [`MCP server "${server}" instructions:`, "", instructions].join(
148
+ "\n",
149
+ );
150
+ }),
151
+ );
152
+ return rows.filter(Boolean);
153
+ }
154
+
155
+ public async subscribeToChannels(
156
+ channels: readonly string[],
157
+ onMessage: (message: ChannelMessage) => void,
158
+ ): Promise<() => void> {
159
+ if (this.instances === null) {
160
+ this.reload();
161
+ }
162
+
163
+ const map = this.instances!;
164
+ const requested = [
165
+ ...new Set(channels.map((c) => c.trim()).filter(Boolean)),
166
+ ];
167
+ if (requested.length === 0) {
168
+ return () => {};
169
+ }
170
+
171
+ const unsubs: Array<() => void> = [];
172
+ for (const [server, client] of map.entries()) {
173
+ await client.connect();
174
+ const experimental =
175
+ client.client.getServerCapabilities()?.experimental ?? {};
176
+ for (const channel of requested) {
177
+ if (!Object.hasOwn(experimental, channel)) {
178
+ continue;
179
+ }
180
+
181
+ const method = `notifications/${channel}`;
182
+ const schema = z.object({
183
+ method: z.literal(method),
184
+ params: z.unknown().optional(),
185
+ });
186
+ const handler = (notification: {
187
+ method: string;
188
+ params?: unknown;
189
+ }) => {
190
+ const { method, params } = notification;
191
+ const prompt = this.toChannelPrompt(method, params);
192
+ if (!prompt) {
193
+ return;
194
+ }
195
+
196
+ onMessage({
197
+ prompt,
198
+ meta: {
199
+ server,
200
+ channel,
201
+ method,
202
+ params,
203
+ },
204
+ });
205
+ };
206
+ client.client.setNotificationHandler(schema, handler);
207
+ unsubs.push(() => {
208
+ client.client.setNotificationHandler(schema, () => {});
209
+ });
210
+ }
211
+ }
212
+
213
+ return () => {
214
+ for (const off of unsubs) {
215
+ off();
216
+ }
217
+ };
218
+ }
219
+
220
+ private toChannelPrompt(method: string, params?: unknown): string {
221
+ if (
222
+ params &&
223
+ typeof params === "object" &&
224
+ "content" in params &&
225
+ typeof params.content === "string"
226
+ ) {
227
+ return params.content.trim();
228
+ }
229
+
230
+ try {
231
+ return JSON.stringify(params).trim();
232
+ } catch {
233
+ return String(params).trim();
234
+ }
235
+ }
117
236
  }
@@ -2,9 +2,20 @@ import { createHash } from "node:crypto";
2
2
  import fs from "node:fs/promises";
3
3
  import path from "node:path";
4
4
  import os from "node:os";
5
- import { tool } from "@strands-agents/sdk";
5
+ import {
6
+ DocumentBlock,
7
+ ImageBlock,
8
+ TextBlock,
9
+ VideoBlock,
10
+ tool,
11
+ type JSONValue,
12
+ } from "@strands-agents/sdk";
6
13
  import { getCwd } from "../utils/cwd-context.ts";
7
- import type { JSONValue } from "@strands-agents/sdk";
14
+ import {
15
+ detectDocumentFormat,
16
+ detectImageFormat,
17
+ detectVideoFormat,
18
+ } from "../utils/file-formats.ts";
8
19
  import { z } from "zod";
9
20
 
10
21
  const DEFAULT_READ_LIMIT = 250;
@@ -190,7 +201,7 @@ async function readTextFile(
190
201
  const buffer = await fs.readFile(filePath);
191
202
  if (isProbablyBinary(buffer)) {
192
203
  throw new Error(
193
- "File appears to be binary. Use get_file_info or read_file with as_base64 if you extend the tool for binary reads.",
204
+ "File appears to be binary. Call read_file again with `binary: true` images (png/jpeg/gif/webp), videos (mp4/mov/mkv/webm/etc.), and documents (pdf/docx/csv/etc.) are returned as multimodal content blocks the provider can forward to the model; unknown binary types come back as base64.",
194
205
  );
195
206
  }
196
207
 
@@ -211,14 +222,87 @@ async function readTextFile(
211
222
  };
212
223
  }
213
224
 
214
- async function readBinaryFile(filePath: string): Promise<{
215
- path: string;
216
- encoding: "base64";
217
- content: string;
218
- sizeBytes: number;
219
- }> {
225
+ type BinaryReadResult =
226
+ | Array<TextBlock | ImageBlock | VideoBlock | DocumentBlock>
227
+ | {
228
+ path: string;
229
+ encoding: "base64";
230
+ content: string;
231
+ sizeBytes: number;
232
+ };
233
+
234
+ async function readBinaryFile(
235
+ filePath: string,
236
+ options?: { maxBytes?: number },
237
+ ): Promise<BinaryReadResult> {
220
238
  await ensureFile(filePath);
239
+ const stat = await fs.stat(filePath);
240
+
241
+ if (stat.size > (options?.maxBytes ?? DEFAULT_MAX_READ_BYTES)) {
242
+ throw new Error(
243
+ `File too large to read safely (${stat.size} bytes). Use get_file_info for metadata or process the file with another tool.`,
244
+ );
245
+ }
246
+
221
247
  const buffer = await fs.readFile(filePath);
248
+ // ImageBlock / DocumentBlock expect Uint8Array; construct a zero-copy view.
249
+ const bytes = new Uint8Array(
250
+ buffer.buffer,
251
+ buffer.byteOffset,
252
+ buffer.byteLength,
253
+ );
254
+
255
+ const imageFormat = detectImageFormat(filePath);
256
+ if (imageFormat) {
257
+ const metadata = new TextBlock(
258
+ JSON.stringify({
259
+ path: filePath,
260
+ kind: "image",
261
+ format: imageFormat,
262
+ size_bytes: stat.size,
263
+ }),
264
+ );
265
+ const image = new ImageBlock({
266
+ format: imageFormat,
267
+ source: { bytes },
268
+ });
269
+ return [metadata, image];
270
+ }
271
+
272
+ const videoFormat = detectVideoFormat(filePath);
273
+ if (videoFormat) {
274
+ const metadata = new TextBlock(
275
+ JSON.stringify({
276
+ path: filePath,
277
+ kind: "video",
278
+ format: videoFormat,
279
+ size_bytes: stat.size,
280
+ }),
281
+ );
282
+ const video = new VideoBlock({
283
+ format: videoFormat,
284
+ source: { bytes },
285
+ });
286
+ return [metadata, video];
287
+ }
288
+
289
+ const documentFormat = detectDocumentFormat(filePath);
290
+ if (documentFormat) {
291
+ const metadata = new TextBlock(
292
+ JSON.stringify({
293
+ path: filePath,
294
+ kind: "document",
295
+ format: documentFormat,
296
+ size_bytes: stat.size,
297
+ }),
298
+ );
299
+ const document = new DocumentBlock({
300
+ name: path.basename(filePath),
301
+ format: documentFormat,
302
+ source: { bytes },
303
+ });
304
+ return [metadata, document];
305
+ }
222
306
 
223
307
  return {
224
308
  path: filePath,
@@ -437,7 +521,9 @@ function createFilesystemSchema() {
437
521
  binary: z
438
522
  .boolean()
439
523
  .optional()
440
- .describe("Return file as base64 instead of UTF-8 text."),
524
+ .describe(
525
+ "Read as binary. Images, videos, and documents are returned as multimodal content blocks (forwarded to the active provider's native media format where supported); other binary files come back as base64.",
526
+ ),
441
527
  }),
442
528
  readMultipleFiles: z.object({
443
529
  paths: z.array(z.string()).min(1).describe("List of file paths to read."),
@@ -519,17 +605,23 @@ export function createFilesystemTools() {
519
605
  tool({
520
606
  name: "read_file",
521
607
  description:
522
- "Read a text file with optional line offset/limit. For binary files, enable the `binary` option to return base64.",
608
+ "Read a file. Defaults to UTF-8 text with optional line offset/limit. Pass `binary: true` for non-text files: images (jpeg/png/gif/webp), videos (mp4/mov/mkv/webm/etc.), and documents (pdf/docx/csv/etc.) are returned as multimodal content blocks — the active model provider forwards them natively where supported (Bedrock for all; Anthropic for images + docs; Google for images + docs; OpenAI for images; Ollama for images) and logs a warning for unsupported kinds. Any other binary file is returned as base64.",
523
609
  inputSchema: schema.readFile,
524
610
  callback: async (input) => {
525
611
  const filePath = normalizeUserPath(input.path);
526
- const result = input.binary
527
- ? await readBinaryFile(filePath)
528
- : await readTextFile(filePath, {
529
- offset: input.offset,
530
- limit: input.limit,
531
- });
532
612
 
613
+ if (input.binary) {
614
+ // Binary reads can return SDK media blocks (ImageBlock / DocumentBlock)
615
+ // or a plain base64 JSON object. Both are accepted by FunctionTool's
616
+ // result wrapping, but the callback signature is JSONValue, so cast.
617
+ const result = await readBinaryFile(filePath);
618
+ return result as unknown as JSONValue;
619
+ }
620
+
621
+ const result = await readTextFile(filePath, {
622
+ offset: input.offset,
623
+ limit: input.limit,
624
+ });
533
625
  return toJsonValue(result);
534
626
  },
535
627
  }),
@@ -0,0 +1,60 @@
1
+ import path from "node:path";
2
+ import type {
3
+ DocumentFormat,
4
+ ImageFormat,
5
+ VideoFormat,
6
+ } from "@strands-agents/sdk";
7
+
8
+ // Extension → SDK media format. Values must match the unions the Strands SDK
9
+ // exposes so ImageBlock / VideoBlock / DocumentBlock construct cleanly. Each
10
+ // provider adapter (OpenAI, Anthropic, Bedrock, Google, Ollama) converts these
11
+ // into its native shape or gracefully drops unsupported ones with a warning —
12
+ // the paired TextBlock metadata still reaches the model either way.
13
+ const IMAGE_EXT_FORMATS: Record<string, ImageFormat> = {
14
+ ".png": "png",
15
+ ".jpg": "jpeg",
16
+ ".jpeg": "jpeg",
17
+ ".gif": "gif",
18
+ ".webp": "webp",
19
+ };
20
+
21
+ const VIDEO_EXT_FORMATS: Record<string, VideoFormat> = {
22
+ ".mp4": "mp4",
23
+ ".mov": "mov",
24
+ ".mkv": "mkv",
25
+ ".webm": "webm",
26
+ ".flv": "flv",
27
+ ".mpeg": "mpeg",
28
+ ".mpg": "mpg",
29
+ ".wmv": "wmv",
30
+ ".3gp": "3gp",
31
+ };
32
+
33
+ const DOCUMENT_EXT_FORMATS: Record<string, DocumentFormat> = {
34
+ ".pdf": "pdf",
35
+ ".csv": "csv",
36
+ ".doc": "doc",
37
+ ".docx": "docx",
38
+ ".xls": "xls",
39
+ ".xlsx": "xlsx",
40
+ ".html": "html",
41
+ ".htm": "html",
42
+ ".txt": "txt",
43
+ ".md": "md",
44
+ ".json": "json",
45
+ ".xml": "xml",
46
+ };
47
+
48
+ export function detectImageFormat(filePath: string): ImageFormat | undefined {
49
+ return IMAGE_EXT_FORMATS[path.extname(filePath).toLowerCase()];
50
+ }
51
+
52
+ export function detectVideoFormat(filePath: string): VideoFormat | undefined {
53
+ return VIDEO_EXT_FORMATS[path.extname(filePath).toLowerCase()];
54
+ }
55
+
56
+ export function detectDocumentFormat(
57
+ filePath: string,
58
+ ): DocumentFormat | undefined {
59
+ return DOCUMENT_EXT_FORMATS[path.extname(filePath).toLowerCase()];
60
+ }
@@ -0,0 +1,60 @@
1
+ import { stderr } from "node:process";
2
+ import { BeforeToolCallEvent, type Agent } from "@strands-agents/sdk";
3
+ import type {
4
+ ChannelMessage,
5
+ Manager as McpManager,
6
+ } from "../core/mcp/index.ts";
7
+ import { createQueue } from "./queue.ts";
8
+
9
+ type RunDaemonOptions = {
10
+ agent: Agent;
11
+ manager: McpManager;
12
+ channels: string[];
13
+ debug?: boolean;
14
+ };
15
+
16
+ function debug(text: string): void {
17
+ stderr.write(`[daemon] ${text}\n`);
18
+ }
19
+
20
+ export async function main(options: RunDaemonOptions): Promise<void> {
21
+ const channels = [
22
+ ...new Set(options.channels.map((value) => value.trim()).filter(Boolean)),
23
+ ];
24
+ if (channels.length === 0) {
25
+ throw new Error("At least one --channel <name> is required.");
26
+ }
27
+
28
+ // Daemon mode is non-interactive: approve tool calls by default.
29
+ options.agent.addHook(BeforeToolCallEvent, async () => {});
30
+
31
+ let fasterq: Awaited<ReturnType<typeof createQueue>>[0] | null = null;
32
+
33
+ const unsubscribe = await options.manager.subscribeToChannels(
34
+ channels,
35
+ (message) => {
36
+ if (fasterq != null) {
37
+ void fasterq.push(message);
38
+ }
39
+ },
40
+ );
41
+
42
+ const [queue, stop] = await createQueue(async (message: ChannelMessage) => {
43
+ debug(`processing → ${message.meta.server}:${message.meta.channel}`);
44
+ if (options.debug) {
45
+ debug(`raw → ${JSON.stringify(message.meta)}`);
46
+ }
47
+ try {
48
+ await options.agent.invoke(message.prompt);
49
+ } catch (error) {
50
+ const text = error instanceof Error ? error.message : String(error);
51
+ debug(
52
+ `turn failed → ${message.meta.server}:${message.meta.channel}: ${text}`,
53
+ );
54
+ }
55
+ }, unsubscribe);
56
+
57
+ fasterq = queue;
58
+
59
+ await stop();
60
+ }
@@ -0,0 +1,48 @@
1
+ import fastq from "fastq";
2
+ import type { ChannelMessage } from "../core/mcp/index.ts";
3
+
4
+ type MessageQueue = fastq.queueAsPromised<ChannelMessage, void>;
5
+
6
+ export async function createQueue(
7
+ handler: (message: ChannelMessage) => Promise<void>,
8
+ cleanup: () => void,
9
+ ): Promise<[MessageQueue, () => Promise<void>]> {
10
+ let stopping = false;
11
+ let resolver: (() => void) | null = null;
12
+ const queue: MessageQueue = fastq.promise(async (message: ChannelMessage) => {
13
+ await handler(message);
14
+ }, 1);
15
+
16
+ const stopper = new Promise<void>((resolve) => {
17
+ resolver = resolve;
18
+ });
19
+
20
+ const shutdown = () => {
21
+ if (stopping) {
22
+ return;
23
+ }
24
+ stopping = true;
25
+ queue.kill();
26
+ resolver?.();
27
+ };
28
+
29
+ const onSigInt = () => shutdown();
30
+ const onSigTerm = () => shutdown();
31
+
32
+ process.on("SIGINT", onSigInt);
33
+ process.on("SIGTERM", onSigTerm);
34
+
35
+ return [
36
+ queue,
37
+ async () => {
38
+ try {
39
+ await stopper;
40
+ } finally {
41
+ cleanup();
42
+ await queue.drained().catch(() => {});
43
+ process.off("SIGINT", onSigInt);
44
+ process.off("SIGTERM", onSigTerm);
45
+ }
46
+ },
47
+ ];
48
+ }