npcsh 0.3.31__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. npcsh/_state.py +942 -0
  2. npcsh/alicanto.py +1074 -0
  3. npcsh/guac.py +785 -0
  4. npcsh/mcp_helpers.py +357 -0
  5. npcsh/mcp_npcsh.py +822 -0
  6. npcsh/mcp_server.py +184 -0
  7. npcsh/npc.py +218 -0
  8. npcsh/npcsh.py +1161 -0
  9. npcsh/plonk.py +387 -269
  10. npcsh/pti.py +234 -0
  11. npcsh/routes.py +958 -0
  12. npcsh/spool.py +315 -0
  13. npcsh/wander.py +550 -0
  14. npcsh/yap.py +573 -0
  15. npcsh-1.0.0.dist-info/METADATA +596 -0
  16. npcsh-1.0.0.dist-info/RECORD +21 -0
  17. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
  18. npcsh-1.0.0.dist-info/entry_points.txt +9 -0
  19. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
  20. npcsh/audio.py +0 -210
  21. npcsh/cli.py +0 -545
  22. npcsh/command_history.py +0 -566
  23. npcsh/conversation.py +0 -291
  24. npcsh/data_models.py +0 -46
  25. npcsh/dataframes.py +0 -163
  26. npcsh/embeddings.py +0 -168
  27. npcsh/helpers.py +0 -641
  28. npcsh/image.py +0 -298
  29. npcsh/image_gen.py +0 -79
  30. npcsh/knowledge_graph.py +0 -1006
  31. npcsh/llm_funcs.py +0 -2027
  32. npcsh/load_data.py +0 -83
  33. npcsh/main.py +0 -5
  34. npcsh/model_runner.py +0 -189
  35. npcsh/npc_compiler.py +0 -2870
  36. npcsh/npc_sysenv.py +0 -383
  37. npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
  38. npcsh/npc_team/corca.npc +0 -13
  39. npcsh/npc_team/foreman.npc +0 -7
  40. npcsh/npc_team/npcsh.ctx +0 -11
  41. npcsh/npc_team/sibiji.npc +0 -4
  42. npcsh/npc_team/templates/analytics/celona.npc +0 -0
  43. npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  44. npcsh/npc_team/templates/humanities/eriane.npc +0 -4
  45. npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  46. npcsh/npc_team/templates/marketing/slean.npc +0 -4
  47. npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  48. npcsh/npc_team/templates/sales/turnic.npc +0 -4
  49. npcsh/npc_team/templates/software/welxor.npc +0 -0
  50. npcsh/npc_team/tools/bash_executer.tool +0 -32
  51. npcsh/npc_team/tools/calculator.tool +0 -8
  52. npcsh/npc_team/tools/code_executor.tool +0 -16
  53. npcsh/npc_team/tools/generic_search.tool +0 -27
  54. npcsh/npc_team/tools/image_generation.tool +0 -25
  55. npcsh/npc_team/tools/local_search.tool +0 -149
  56. npcsh/npc_team/tools/npcsh_executor.tool +0 -9
  57. npcsh/npc_team/tools/screen_cap.tool +0 -27
  58. npcsh/npc_team/tools/sql_executor.tool +0 -26
  59. npcsh/response.py +0 -623
  60. npcsh/search.py +0 -248
  61. npcsh/serve.py +0 -1460
  62. npcsh/shell.py +0 -538
  63. npcsh/shell_helpers.py +0 -3529
  64. npcsh/stream.py +0 -700
  65. npcsh/video.py +0 -49
  66. npcsh-0.3.31.data/data/npcsh/npc_team/bash_executer.tool +0 -32
  67. npcsh-0.3.31.data/data/npcsh/npc_team/calculator.tool +0 -8
  68. npcsh-0.3.31.data/data/npcsh/npc_team/celona.npc +0 -0
  69. npcsh-0.3.31.data/data/npcsh/npc_team/code_executor.tool +0 -16
  70. npcsh-0.3.31.data/data/npcsh/npc_team/corca.npc +0 -13
  71. npcsh-0.3.31.data/data/npcsh/npc_team/eriane.npc +0 -4
  72. npcsh-0.3.31.data/data/npcsh/npc_team/foreman.npc +0 -7
  73. npcsh-0.3.31.data/data/npcsh/npc_team/generic_search.tool +0 -27
  74. npcsh-0.3.31.data/data/npcsh/npc_team/image_generation.tool +0 -25
  75. npcsh-0.3.31.data/data/npcsh/npc_team/lineru.npc +0 -0
  76. npcsh-0.3.31.data/data/npcsh/npc_team/local_search.tool +0 -149
  77. npcsh-0.3.31.data/data/npcsh/npc_team/maurawa.npc +0 -0
  78. npcsh-0.3.31.data/data/npcsh/npc_team/npcsh.ctx +0 -11
  79. npcsh-0.3.31.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
  80. npcsh-0.3.31.data/data/npcsh/npc_team/raone.npc +0 -0
  81. npcsh-0.3.31.data/data/npcsh/npc_team/screen_cap.tool +0 -27
  82. npcsh-0.3.31.data/data/npcsh/npc_team/sibiji.npc +0 -4
  83. npcsh-0.3.31.data/data/npcsh/npc_team/slean.npc +0 -4
  84. npcsh-0.3.31.data/data/npcsh/npc_team/sql_executor.tool +0 -26
  85. npcsh-0.3.31.data/data/npcsh/npc_team/test_pipeline.py +0 -181
  86. npcsh-0.3.31.data/data/npcsh/npc_team/turnic.npc +0 -4
  87. npcsh-0.3.31.data/data/npcsh/npc_team/welxor.npc +0 -0
  88. npcsh-0.3.31.dist-info/METADATA +0 -1853
  89. npcsh-0.3.31.dist-info/RECORD +0 -76
  90. npcsh-0.3.31.dist-info/entry_points.txt +0 -3
  91. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
npcsh/cli.py DELETED
@@ -1,545 +0,0 @@
1
- import argparse
2
- from .npc_sysenv import (
3
- NPCSH_CHAT_MODEL,
4
- NPCSH_CHAT_PROVIDER,
5
- NPCSH_IMAGE_GEN_MODEL,
6
- NPCSH_IMAGE_GEN_PROVIDER,
7
- NPCSH_API_URL,
8
- NPCSH_REASONING_MODEL,
9
- NPCSH_REASONING_PROVIDER,
10
- NPCSH_DB_PATH,
11
- NPCSH_VISION_MODEL,
12
- NPCSH_VISION_PROVIDER,
13
- NPCSH_DB_PATH,
14
- NPCSH_STREAM_OUTPUT,
15
- NPCSH_SEARCH_PROVIDER,
16
- )
17
- from .serve import start_flask_server
18
- from .npc_compiler import (
19
- initialize_npc_project,
20
- conjure_team,
21
- NPCCompiler,
22
- NPC,
23
- load_npc_from_file,
24
- )
25
- from .llm_funcs import (
26
- check_llm_command,
27
- execute_llm_command,
28
- execute_llm_question,
29
- handle_tool_call,
30
- generate_image,
31
- get_embeddings,
32
- get_llm_response,
33
- get_stream,
34
- get_conversation,
35
- )
36
- from .plonk import plonk, action_space
37
- from .search import search_web
38
- from .shell_helpers import *
39
- import os
40
-
41
- # check if ./npc_team exists
42
- if os.path.exists("./npc_team"):
43
-
44
- npc_directory = os.path.abspath("./npc_team/")
45
- else:
46
- npc_directory = os.path.expanduser("~/.npcsh/npc_team/")
47
-
48
- npc_compiler = NPCCompiler(npc_directory, NPCSH_DB_PATH)
49
-
50
-
51
- def main():
52
-
53
- parser = argparse.ArgumentParser(description="NPC utilities")
54
- known_commands = {
55
- "assemble",
56
- "build",
57
- "compile",
58
- "chat",
59
- "init",
60
- "new",
61
- "plonk",
62
- "sample",
63
- "search",
64
- "select",
65
- "serve",
66
- "spool",
67
- "tools",
68
- "tool",
69
- "local_search",
70
- "rag",
71
- "search",
72
- "vixynt",
73
- "ots",
74
- "whisper",
75
- }
76
-
77
- # Only add prompt as default if first arg isn't a known command
78
- if len(sys.argv) > 1 and sys.argv[1] not in known_commands:
79
- parser.add_argument(
80
- "prompt", nargs="?", help="Generic prompt to send to the default LLM"
81
- )
82
- parser.add_argument(
83
- "--model", "-m", help="model to use", type=str, default=NPCSH_CHAT_MODEL
84
- )
85
- parser.add_argument(
86
- "--provider",
87
- "-pr",
88
- help="provider to use",
89
- type=str,
90
- default=NPCSH_CHAT_PROVIDER,
91
- )
92
- parser.add_argument(
93
- "-n", "--npc", help="name of the NPC", type=str, default="sibiji"
94
- )
95
-
96
- args = parser.parse_args()
97
- db_conn = sqlite3.connect(NPCSH_DB_PATH)
98
- if args.npc is None or args.npc == "sibiji":
99
- npc = load_npc_from_file("~/.npcsh/npc_team/sibiji.npc", db_conn)
100
- else:
101
- npc = load_npc_from_file("./npc_team/" + args.npc + ".npc", db_conn)
102
-
103
- response = check_llm_command(
104
- args.prompt, model=args.model, provider=args.provider, npc=npc, stream=True
105
- )
106
- provider = args.provider
107
- model = args.model
108
- conversation_result = ""
109
- for chunk in response:
110
- if provider == "anthropic":
111
- if chunk.type == "content_block_delta":
112
- chunk_content = chunk.delta.text
113
- if chunk_content:
114
- conversation_result += chunk_content
115
- print(chunk_content, end="")
116
-
117
- elif (
118
- provider == "openai"
119
- or provider == "deepseek"
120
- or provider == "openai-like"
121
- ):
122
- chunk_content = "".join(
123
- choice.delta.content
124
- for choice in chunk.choices
125
- if choice.delta.content is not None
126
- )
127
- if chunk_content:
128
- conversation_result += chunk_content
129
- print(chunk_content, end="")
130
-
131
- elif provider == "ollama":
132
- chunk_content = chunk["message"]["content"]
133
- if chunk_content:
134
- conversation_result += chunk_content
135
- print(chunk_content, end="")
136
- print("\n")
137
- return
138
-
139
- parser.add_argument(
140
- "--model", "-m", help="model to use", type=str, default=NPCSH_CHAT_MODEL
141
- )
142
- parser.add_argument(
143
- "--provider",
144
- "-pr",
145
- help="provider to use",
146
- type=str,
147
- default=NPCSH_CHAT_PROVIDER,
148
- )
149
-
150
- subparsers = parser.add_subparsers(dest="command", help="Commands")
151
-
152
- # Generic prompt parser (for "npc 'prompt'")
153
-
154
- # need it so that this prompt is just automatically resolved as the only argument if no positional ones are provided
155
- # parser.add_argument(
156
- # "prompt", nargs="?", help="Generic prompt to send to the default LLM"
157
- # )
158
-
159
- ### ASSEMBLY LINE PARSER
160
- assembly_parser = subparsers.add_parser("assemble", help="Run an NPC assembly line")
161
- assembly_parser.add_argument("line", help="Assembly line to run")
162
-
163
- ### BUILD PARSER
164
- build_parser = subparsers.add_parser(
165
- "build", help="Build a NPC team into a standalone executable server"
166
- )
167
- build_parser.add_argument(
168
- "directory", nargs="?", default=".", help="Directory to build project in"
169
- )
170
-
171
- # chat
172
- chat_parser = subparsers.add_parser("chat", help="chat with an NPC")
173
- chat_parser.add_argument("-n", "--npc_name", help="name of npc")
174
-
175
- # Compile command
176
- compile_parser = subparsers.add_parser("compile", help="Compile an NPC")
177
- compile_parser.add_argument("path", help="Path to NPC file")
178
-
179
- # Conjure/init command
180
- init_parser = subparsers.add_parser("init", help="Initialize a new NPC project")
181
- init_parser.add_argument(
182
- "directory", nargs="?", default=".", help="Directory to initialize project in"
183
- )
184
- init_parser.add_argument(
185
- "--templates", "-t", help="agent templates(comma-separated list)", type=str
186
- )
187
- init_parser.add_argument(
188
- "--context",
189
- "-ctx",
190
- help="important information when merging templates",
191
- type=str,
192
- )
193
- ### NEW PARSER
194
- new_parser = subparsers.add_parser(
195
- "new", help="Create a new [NPC, tool, assembly_line, ]"
196
- )
197
- new_parser.add_argument(
198
- "type",
199
- help="Type of object to create",
200
- choices=["npc", "tool", "assembly_line"],
201
- )
202
-
203
- new_parser.add_argument(
204
- "--primary_directive",
205
- "-pd",
206
- help="primary directive (when making an npc)",
207
- type=str,
208
- )
209
-
210
- new_parser.add_argument(
211
- "--name",
212
- "-n",
213
- help="name",
214
- type=str,
215
- )
216
-
217
- new_parser.add_argument(
218
- "--description",
219
- "-d",
220
- help="description",
221
- type=str,
222
- )
223
-
224
- new_parser.add_argument("--autogen", help="whether to auto gen", default=False)
225
-
226
- ### plonk
227
- plonk_parser = subparsers.add_parser("plonk", help="computer use with plonk!")
228
- plonk_parser.add_argument(
229
- "--task",
230
- "-t",
231
- help="the task for plonk to accomplish",
232
- type=str,
233
- )
234
- plonk_parser.add_argument(
235
- "--name",
236
- "-n",
237
- help="name of the NPC",
238
- type=str,
239
- )
240
- plonk_parser.add_argument(
241
- "--spell",
242
- "-sp",
243
- help="task for plonk to carry out",
244
- type=str,
245
- )
246
-
247
- # sample
248
- sampler_parser = subparsers.add_parser(
249
- "sample", help="sample question one shot to an llm"
250
- )
251
- sampler_parser.add_argument("prompt", help="prompt for llm")
252
- sampler_parser.add_argument(
253
- "--npc",
254
- "-n",
255
- help="name of the NPC",
256
- type=str,
257
- )
258
- select_parser = subparsers.add_parser("select", help="Select a SQL model to run")
259
- select_parser.add_argument("model", help="Model to run")
260
-
261
- # Serve command
262
- serve_parser = subparsers.add_parser("serve", help="Start the Flask server")
263
- serve_parser.add_argument("--port", "-p", help="Optional port")
264
- serve_parser.add_argument(
265
- "--cors", "-c", help="CORS origins (comma-separated list)", type=str
266
- )
267
- serve_parser.add_argument(
268
- "--templates", "-t", help="agent templates(comma-separated list)", type=str
269
- )
270
- serve_parser.add_argument(
271
- "--context",
272
- "-ctx",
273
- help="important information when merging templates",
274
- type=str,
275
- )
276
- ### spool
277
- spool_parser = subparsers.add_parser("spool", help="Start the Flask server")
278
- spool_parser.add_argument("-n", "--npc", default="sibiji")
279
-
280
- # Tools command
281
- tools_parser = subparsers.add_parser("tools", help="print the available tools")
282
-
283
- # Tool invocation
284
- tool_parser = subparsers.add_parser("tool", help="invoke a tool")
285
- tool_parser.add_argument("tool_name", help="name of the tool to invoke")
286
- tool_parser.add_argument(
287
- "--args", "-a", help="arguments for the tool", nargs="+", default=[]
288
- )
289
- tool_parser.add_argument(
290
- "--flags", "-f", help="flags for the tool", nargs="+", default=[]
291
- )
292
-
293
- # Local search
294
- local_search_parser = subparsers.add_parser("local_search", help="search locally")
295
- local_search_parser.add_argument("query", help="search query")
296
- local_search_parser.add_argument(
297
- "--path", "-p", help="path to search in", default="."
298
- )
299
-
300
- # RAG search
301
- rag_parser = subparsers.add_parser("rag", help="search for a term in the npcsh_db")
302
- rag_parser.add_argument("--name", "-n", help="name of the NPC", required=True)
303
- rag_parser.add_argument(
304
- "--filename", "-f", help="filename to search in", required=True
305
- )
306
- rag_parser.add_argument("--query", "-q", help="search query", required=True)
307
-
308
- # Web search
309
- search_parser = subparsers.add_parser("search", help="search the web")
310
- search_parser.add_argument("--query", "-q", help="search query")
311
- search_parser.add_argument(
312
- "--search_provider",
313
- "-sp",
314
- help="search provider",
315
- default=NPCSH_SEARCH_PROVIDER,
316
- )
317
-
318
- # Image generation
319
- vixynt_parser = subparsers.add_parser("vixynt", help="generate an image")
320
- vixynt_parser.add_argument("spell", help="the prompt to generate the image")
321
-
322
- # Screenshot analysis
323
- ots_parser = subparsers.add_parser("ots", help="analyze screenshot")
324
-
325
- # Voice chat
326
- whisper_parser = subparsers.add_parser("whisper", help="start voice chat")
327
- whisper_parser.add_argument("-n", "--npc_name", help="name of the NPC to chat with")
328
-
329
- args = parser.parse_args()
330
-
331
- # Handle NPC chat if the command matches an NPC name
332
- if args.command and not args.command.startswith("-"):
333
- try:
334
- # Check if command is actually an NPC name
335
- if os.path.exists(f"npcs/{args.command}.npc"):
336
- start_npc_chat(args.command)
337
- return
338
- except Exception as e:
339
- print(f"Error starting chat with NPC {args.command}: {e}")
340
-
341
- if args.command == "serve":
342
- if args.cors:
343
- # Parse the CORS origins from the comma-separated string
344
- cors_origins = [origin.strip() for origin in args.cors.split(",")]
345
- else:
346
- cors_origins = None
347
- if args.templates:
348
- templates = [template.strip() for template in args.templates.split(",")]
349
- else:
350
- templates = None
351
- if args.context:
352
- context = args.context.strip()
353
- else:
354
- context = None
355
- if args.model:
356
- model = args.model
357
- else:
358
- model = NPCSH_CHAT_MODEL
359
- if args.provider:
360
- provider = args.provider
361
- else:
362
- provider = NPCSH_CHAT_PROVIDER
363
-
364
- if context is not None and os.environ.get("WERKZEUG_RUN_MAIN") != "true":
365
- initialize_npc_project(
366
- args.directory,
367
- templates=templates,
368
- context=context,
369
- model=model,
370
- provider=provider,
371
- )
372
-
373
- start_flask_server(
374
- port=args.port if args.port else 5337,
375
- cors_origins=cors_origins,
376
- )
377
- elif args.command == "chat":
378
- npc_name = args.npc_name
379
- npc_path = get_npc_path(npc_name, NPCSH_DB_PATH)
380
- current_npc = load_npc_from_file(npc_path, sqlite3.connect(NPCSH_DB_PATH))
381
- return enter_spool_mode(
382
- model=args.model, provider=args.provider, npc=current_npc
383
- )
384
-
385
- elif args.command == "init":
386
- if args.templates:
387
- templates = [template.strip() for template in args.templates.split(",")]
388
- else:
389
- templates = None
390
- if args.context:
391
- context = args.context.strip()
392
- else:
393
- context = None
394
- if args.model:
395
- model = args.model
396
- else:
397
- model = NPCSH_CHAT_MODEL
398
- if args.provider:
399
- provider = args.provider
400
- else:
401
- provider = NPCSH_CHAT_PROVIDER
402
-
403
- initialize_npc_project(
404
- args.directory,
405
- templates=templates,
406
- context=context,
407
- model=model,
408
- provider=provider,
409
- )
410
-
411
- elif args.command == "compile":
412
- npc_compiler = NPCCompiler(npc_directory, NPCSH_DB_PATH)
413
- compiled = npc_compiler.compile(args.path)
414
- print("NPC compiled to:", compiled)
415
-
416
- elif args.command == "plonk":
417
- task = args.task or args.spell
418
- npc_name = args.name
419
- plonk(
420
- task,
421
- action_space,
422
- model=args.model or NPCSH_CHAT_MODEL,
423
- provider=args.provider or NPCSH_CHAT_PROVIDER,
424
- )
425
-
426
- elif args.command == "sample":
427
- db_conn = sqlite3.connect(NPCSH_DB_PATH)
428
- if args.npc is None or args.npc == "sibiji":
429
- npc = load_npc_from_file("~/.npcsh/npc_team/sibiji.npc", db_conn)
430
- else:
431
- npc = load_npc_from_file("./npc_team/" + args.npc + ".npc", db_conn)
432
-
433
- result = get_llm_response(
434
- args.prompt,
435
- model=args.model,
436
- provider=args.provider,
437
- )
438
- print(result["response"])
439
- elif args.command == "vixynt":
440
- if args.model == NPCSH_CHAT_MODEL:
441
- model = NPCSH_IMAGE_GEN_MODEL
442
- if args.provider == NPCSH_CHAT_PROVIDER:
443
- provider = NPCSH_IMAGE_GEN_PROVIDER
444
- image_path = generate_image(
445
- args.spell,
446
- model=args.model,
447
- provider=args.provider,
448
- )
449
- print(f"Image generated at: {image_path}")
450
-
451
- elif args.command == "ots":
452
- if args.model == NPCSH_CHAT_MODEL:
453
- model = NPCSH_VISION_MODEL
454
- if args.provider == NPCSH_CHAT_PROVIDER:
455
- provider = NPCSH_VISION_PROVIDER
456
-
457
- result = ots(
458
- "",
459
- model=args.model,
460
- provider=args.provider,
461
- )
462
- print(result["output"])
463
-
464
- elif args.command == "whisper":
465
- npc_name = args.npc_name
466
- npc_path = get_npc_path(npc_name, NPCSH_DB_PATH)
467
- current_npc = load_npc_from_file(npc_path, sqlite3.connect(NPCSH_DB_PATH))
468
-
469
- enter_whisper_mode(npc=current_npc)
470
-
471
- elif args.command == "tool":
472
- result = invoke_tool(
473
- args.tool_name,
474
- args=args.args,
475
- flags=args.flags,
476
- )
477
- print(result)
478
-
479
- elif args.command == "tools":
480
- tools = list_available_tools()
481
- for tool in tools:
482
- print(f"- {tool}")
483
-
484
- elif args.command == "local_search":
485
- results = perform_local_search(args.query, path=args.path)
486
- for result in results:
487
- print(f"- {result}")
488
-
489
- elif args.command == "rag":
490
- results = perform_rag_search(
491
- npc_name=args.name,
492
- filename=args.filename,
493
- query=args.query,
494
- )
495
- for result in results:
496
- print(f"- {result}")
497
-
498
- elif args.command == "search":
499
- results = search_web(args.query, provider=args.provider)
500
- for result in results:
501
- print(f"- {result}")
502
-
503
- elif args.command == "new":
504
- # create a new npc, tool, or assembly line
505
- if args.type == "npc":
506
- from .npc_creator import create_new_npc
507
-
508
- create_new_npc(
509
- name=args.name,
510
- primary_directive=args.primary_directive,
511
- description=args.description,
512
- model=args.model or NPCSH_CHAT_MODEL,
513
- provider=args.provider or NPCSH_CHAT_PROVIDER,
514
- autogen=args.autogen,
515
- )
516
- elif args.type == "tool":
517
- from .tool_creator import create_new_tool
518
-
519
- create_new_tool(
520
- name=args.name,
521
- description=args.description,
522
- autogen=args.autogen,
523
- )
524
- elif args.type == "assembly_line":
525
- from .assembly_creator import create_new_assembly_line
526
-
527
- create_new_assembly_line(
528
- name=args.name,
529
- description=args.description,
530
- autogen=args.autogen,
531
- )
532
- elif args.command == "spool":
533
- db_conn = sqlite3.connect(NPCSH_DB_PATH)
534
- if args.npc is None or args.npc == "sibiji":
535
- npc = load_npc_from_file("~/.npcsh/npc_team/sibiji.npc", db_conn)
536
- else:
537
- npc = load_npc_from_file("./npc_team/" + args.npc + ".npc", db_conn)
538
- response = enter_spool_mode(
539
- stream=True,
540
- npc=npc,
541
- )
542
-
543
-
544
- if __name__ == "__main__":
545
- main()