npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. npcpy/__init__.py +0 -7
  2. npcpy/data/audio.py +16 -99
  3. npcpy/data/image.py +43 -42
  4. npcpy/data/load.py +83 -124
  5. npcpy/data/text.py +28 -28
  6. npcpy/data/video.py +8 -32
  7. npcpy/data/web.py +51 -23
  8. npcpy/ft/diff.py +110 -0
  9. npcpy/ft/ge.py +115 -0
  10. npcpy/ft/memory_trainer.py +171 -0
  11. npcpy/ft/model_ensembler.py +357 -0
  12. npcpy/ft/rl.py +360 -0
  13. npcpy/ft/sft.py +248 -0
  14. npcpy/ft/usft.py +128 -0
  15. npcpy/gen/audio_gen.py +24 -0
  16. npcpy/gen/embeddings.py +13 -13
  17. npcpy/gen/image_gen.py +262 -117
  18. npcpy/gen/response.py +615 -415
  19. npcpy/gen/video_gen.py +53 -7
  20. npcpy/llm_funcs.py +1869 -437
  21. npcpy/main.py +1 -1
  22. npcpy/memory/command_history.py +844 -510
  23. npcpy/memory/kg_vis.py +833 -0
  24. npcpy/memory/knowledge_graph.py +892 -1845
  25. npcpy/memory/memory_processor.py +81 -0
  26. npcpy/memory/search.py +188 -90
  27. npcpy/mix/debate.py +192 -3
  28. npcpy/npc_compiler.py +1672 -801
  29. npcpy/npc_sysenv.py +593 -1266
  30. npcpy/serve.py +3120 -0
  31. npcpy/sql/ai_function_tools.py +257 -0
  32. npcpy/sql/database_ai_adapters.py +186 -0
  33. npcpy/sql/database_ai_functions.py +163 -0
  34. npcpy/sql/model_runner.py +19 -19
  35. npcpy/sql/npcsql.py +706 -507
  36. npcpy/sql/sql_model_compiler.py +156 -0
  37. npcpy/tools.py +183 -0
  38. npcpy/work/plan.py +13 -279
  39. npcpy/work/trigger.py +3 -3
  40. npcpy-1.2.32.dist-info/METADATA +803 -0
  41. npcpy-1.2.32.dist-info/RECORD +54 -0
  42. npcpy/data/dataframes.py +0 -171
  43. npcpy/memory/deep_research.py +0 -125
  44. npcpy/memory/sleep.py +0 -557
  45. npcpy/modes/_state.py +0 -78
  46. npcpy/modes/alicanto.py +0 -1075
  47. npcpy/modes/guac.py +0 -785
  48. npcpy/modes/mcp_npcsh.py +0 -822
  49. npcpy/modes/npc.py +0 -213
  50. npcpy/modes/npcsh.py +0 -1158
  51. npcpy/modes/plonk.py +0 -409
  52. npcpy/modes/pti.py +0 -234
  53. npcpy/modes/serve.py +0 -1637
  54. npcpy/modes/spool.py +0 -312
  55. npcpy/modes/wander.py +0 -549
  56. npcpy/modes/yap.py +0 -572
  57. npcpy/npc_team/alicanto.npc +0 -2
  58. npcpy/npc_team/alicanto.png +0 -0
  59. npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
  60. npcpy/npc_team/corca.npc +0 -13
  61. npcpy/npc_team/foreman.npc +0 -7
  62. npcpy/npc_team/frederic.npc +0 -6
  63. npcpy/npc_team/frederic4.png +0 -0
  64. npcpy/npc_team/guac.png +0 -0
  65. npcpy/npc_team/jinxs/automator.jinx +0 -18
  66. npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
  67. npcpy/npc_team/jinxs/calculator.jinx +0 -11
  68. npcpy/npc_team/jinxs/edit_file.jinx +0 -96
  69. npcpy/npc_team/jinxs/file_chat.jinx +0 -14
  70. npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
  71. npcpy/npc_team/jinxs/image_generation.jinx +0 -29
  72. npcpy/npc_team/jinxs/internet_search.jinx +0 -30
  73. npcpy/npc_team/jinxs/local_search.jinx +0 -152
  74. npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
  75. npcpy/npc_team/jinxs/python_executor.jinx +0 -8
  76. npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
  77. npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
  78. npcpy/npc_team/kadiefa.npc +0 -3
  79. npcpy/npc_team/kadiefa.png +0 -0
  80. npcpy/npc_team/npcsh.ctx +0 -9
  81. npcpy/npc_team/npcsh_sibiji.png +0 -0
  82. npcpy/npc_team/plonk.npc +0 -2
  83. npcpy/npc_team/plonk.png +0 -0
  84. npcpy/npc_team/plonkjr.npc +0 -2
  85. npcpy/npc_team/plonkjr.png +0 -0
  86. npcpy/npc_team/sibiji.npc +0 -5
  87. npcpy/npc_team/sibiji.png +0 -0
  88. npcpy/npc_team/spool.png +0 -0
  89. npcpy/npc_team/templates/analytics/celona.npc +0 -0
  90. npcpy/npc_team/templates/hr_support/raone.npc +0 -0
  91. npcpy/npc_team/templates/humanities/eriane.npc +0 -4
  92. npcpy/npc_team/templates/it_support/lineru.npc +0 -0
  93. npcpy/npc_team/templates/marketing/slean.npc +0 -4
  94. npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
  95. npcpy/npc_team/templates/sales/turnic.npc +0 -4
  96. npcpy/npc_team/templates/software/welxor.npc +0 -0
  97. npcpy/npc_team/yap.png +0 -0
  98. npcpy/routes.py +0 -958
  99. npcpy/work/mcp_helpers.py +0 -357
  100. npcpy/work/mcp_server.py +0 -194
  101. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
  102. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
  103. npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
  104. npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
  105. npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
  106. npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
  107. npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
  108. npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
  109. npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
  110. npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
  111. npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
  112. npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
  113. npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
  114. npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
  115. npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
  116. npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
  117. npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
  118. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
  119. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
  120. npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
  121. npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
  122. npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
  123. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
  124. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
  125. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
  126. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
  127. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
  128. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
  129. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
  130. npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
  131. npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
  132. npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
  133. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
  134. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
  135. npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
  136. npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
  137. npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
  138. npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
  139. npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
  140. npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
  141. npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
  142. npcpy-1.0.26.dist-info/METADATA +0 -827
  143. npcpy-1.0.26.dist-info/RECORD +0 -139
  144. npcpy-1.0.26.dist-info/entry_points.txt +0 -11
  145. /npcpy/{modes → ft}/__init__.py +0 -0
  146. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
  147. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
  148. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/modes/spool.py DELETED
@@ -1,312 +0,0 @@
1
- from npcpy.memory.command_history import CommandHistory, start_new_conversation, save_conversation_message
2
- from npcpy.data.load import load_pdf, load_csv, load_json, load_excel, load_txt
3
- from npcpy.data.image import capture_screenshot
4
- from npcpy.data.text import rag_search
5
-
6
- import os
7
- from npcpy.npc_sysenv import (
8
- orange,
9
- get_system_message,
10
- render_markdown,
11
- render_code_block,
12
- print_and_process_stream_with_markdown,
13
- NPCSH_VISION_MODEL, NPCSH_VISION_PROVIDER,
14
- NPCSH_CHAT_MODEL, NPCSH_CHAT_PROVIDER,
15
- NPCSH_STREAM_OUTPUT
16
- )
17
- from npcpy.llm_funcs import (get_llm_response,)
18
-
19
- from npcpy.npc_compiler import NPC
20
- from typing import Any, List, Dict, Union
21
- from npcpy.modes.yap import enter_yap_mode
22
-
23
-
24
- def enter_spool_mode(
25
- npc = None,
26
- team = None,
27
- model: str = NPCSH_CHAT_MODEL,
28
- provider: str = NPCSH_CHAT_PROVIDER,
29
- vision_model:str = NPCSH_VISION_MODEL,
30
- vision_provider:str = NPCSH_VISION_PROVIDER,
31
- files: List[str] = None,
32
- rag_similarity_threshold: float = 0.3,
33
- messages: List[Dict] = None,
34
- conversation_id: str = None,
35
- stream: bool = NPCSH_STREAM_OUTPUT,
36
- ) -> Dict:
37
- """
38
- Function Description:
39
- This function is used to enter the spool mode where files can be loaded into memory.
40
- Args:
41
-
42
- npc : Any : The NPC object.
43
- files : List[str] : List of file paths to load into the context.
44
- Returns:
45
- Dict : The messages and output.
46
- """
47
-
48
- npc_info = f" (NPC: {npc.name})" if npc else ""
49
- print(f"Entering spool mode{npc_info}. Type '/sq' to exit spool mode.")
50
-
51
- spool_context = (
52
- messages.copy() if messages else []
53
- ) # Initialize context with messages
54
-
55
- loaded_content = {} # New dictionary to hold loaded content
56
-
57
- # Create conversation ID if not provided
58
- if not conversation_id:
59
- conversation_id = start_new_conversation()
60
-
61
- command_history = CommandHistory()
62
- # Load specified files if any
63
- if files:
64
- for file in files:
65
- extension = os.path.splitext(file)[1].lower()
66
- try:
67
- if extension == ".pdf":
68
- content = load_pdf(file)["texts"].iloc[0]
69
- elif extension == ".csv":
70
- content = load_csv(file)
71
- else:
72
- print(f"Unsupported file type: {file}")
73
- continue
74
- loaded_content[file] = content
75
- print(f"Loaded content from: {file}")
76
- except Exception as e:
77
- print(f"Error loading {file}: {str(e)}")
78
-
79
- # Add system message to context
80
- system_message = get_system_message(npc) if npc else "You are a helpful assistant."
81
- if len(spool_context) > 0:
82
- if spool_context[0]["role"] != "system":
83
- spool_context.insert(0, {"role": "system", "content": system_message})
84
- else:
85
- spool_context.append({"role": "system", "content": system_message})
86
- # Inherit last n messages if specified
87
- if npc is not None:
88
- if model is None:
89
- model = npc.model
90
- if provider is None:
91
- provider = npc.provider
92
-
93
- while True:
94
- kwargs_to_pass = {}
95
- if npc:
96
- kwargs_to_pass["npc"] = npc
97
-
98
- try:
99
-
100
- user_input = input("spool:in> ").strip()
101
- if len(user_input) == 0:
102
- continue
103
- if user_input.lower() == "/sq":
104
- print("Exiting spool mode.")
105
- break
106
-
107
- if user_input.lower() == "/whisper": # Check for whisper command
108
- messages = enter_yap_mode(spool_context, npc)
109
- continue
110
-
111
- if user_input.startswith("/ots"):
112
- command_parts = user_input.split()
113
- image_paths = []
114
- print('using vision model: ', vision_model)
115
-
116
- # Handle image loading/capturing
117
- if len(command_parts) > 1:
118
- # User provided image path(s)
119
- for img_path in command_parts[1:]:
120
- full_path = os.path.join(os.getcwd(), img_path)
121
- if os.path.exists(full_path):
122
- image_paths.append(full_path)
123
- else:
124
- print(f"Error: Image file not found at {full_path}")
125
- else:
126
- # Capture screenshot
127
- output = capture_screenshot(npc=npc)
128
- if output and "file_path" in output:
129
- image_paths.append(output["file_path"])
130
- print(f"Screenshot captured: {output['filename']}")
131
-
132
- if not image_paths:
133
- print("No valid images provided.")
134
- continue
135
-
136
- # Get user prompt about the image(s)
137
- user_prompt = input(
138
- "Enter a prompt for the LLM about these images (or press Enter to skip): "
139
- )
140
- if not user_prompt:
141
- user_prompt = "Please analyze these images."
142
-
143
- model= vision_model
144
- provider= vision_provider
145
- # Save the user message
146
- message_id = save_conversation_message(
147
- command_history,
148
- conversation_id,
149
- "user",
150
- user_prompt,
151
- wd=os.getcwd(),
152
- model=vision_model,
153
- provider=vision_provider,
154
- npc=npc.name if npc else None,
155
- team=team.name if team else None,
156
-
157
- )
158
-
159
- # Process the request with our unified approach
160
- response = get_llm_response(
161
- user_prompt,
162
- model=vision_model,
163
- provider=provider,
164
- messages=spool_context,
165
- images=image_paths,
166
- stream=stream,
167
- **kwargs_to_pass
168
- )
169
-
170
- # Extract the assistant's response
171
- assistant_reply = response['response']
172
-
173
- spool_context = response['messages']
174
-
175
- if stream:
176
- print(orange(f'spool:{npc.name}:{vision_model}>'), end='', flush=True)
177
-
178
- assistant_reply = print_and_process_stream_with_markdown(assistant_reply, model=model, provider=provider)
179
-
180
- spool_context.append({"role": "assistant", "content": assistant_reply})
181
- if assistant_reply.count("```") % 2 != 0:
182
- assistant_reply = assistant_reply + "```"
183
- # Save the assistant's response
184
- save_conversation_message(
185
- command_history,
186
- conversation_id,
187
- "assistant",
188
- assistant_reply,
189
- wd=os.getcwd(),
190
- model=vision_model,
191
- provider=vision_provider,
192
- npc=npc.name if npc else None,
193
- team=team.name if team else None,
194
-
195
-
196
- )
197
-
198
-
199
-
200
- # Display the response
201
- if not stream:
202
- render_markdown(assistant_reply)
203
-
204
- continue
205
-
206
-
207
-
208
- # Handle RAG context
209
- if loaded_content:
210
- context_content = ""
211
- for filename, content in loaded_content.items():
212
- retrieved_docs = rag_search(
213
- user_input,
214
- content,
215
- similarity_threshold=rag_similarity_threshold,
216
- )
217
- if retrieved_docs:
218
- context_content += (
219
- f"\n\nLoaded content from: {filename}\n{content}\n\n"
220
- )
221
- if len(context_content) > 0:
222
- user_input += f"""
223
- Here is the loaded content that may be relevant to your query:
224
- {context_content}
225
- Please reference it explicitly in your response and use it for answering.
226
- """
227
-
228
- # Save user message
229
- message_id = save_conversation_message(
230
- command_history,
231
- conversation_id,
232
- "user",
233
- user_input,
234
- wd=os.getcwd(),
235
- model=model,
236
- provider=provider,
237
- npc=npc.name if npc else None,
238
- team=team.name if team else None,
239
-
240
- )
241
-
242
- response = get_llm_response(
243
- user_input,
244
- model=model,
245
- provider=provider,
246
- messages=spool_context,
247
- stream=stream,
248
- **kwargs_to_pass
249
- )
250
-
251
- assistant_reply, spool_context = response['response'], response['messages']
252
- if stream:
253
- print(orange(f'{npc.name if npc else "spool"}:{npc.model if npc else model}>'), end='', flush=True)
254
- assistant_reply = print_and_process_stream_with_markdown(assistant_reply, model=model, provider=provider)
255
- # Save assistant message
256
- save_conversation_message(
257
- command_history,
258
- conversation_id,
259
- "assistant",
260
- assistant_reply,
261
- wd=os.getcwd(),
262
- model=model,
263
- provider=provider,
264
- npc=npc.name if npc else None,
265
- team=team.name if team else None,
266
-
267
- )
268
-
269
- # Fix unfinished markdown notation
270
- if assistant_reply.count("```") % 2 != 0:
271
- assistant_reply = assistant_reply + "```"
272
-
273
- if not stream:
274
- render_markdown(assistant_reply)
275
-
276
- except (KeyboardInterrupt, EOFError):
277
- print("\nExiting spool mode.")
278
- break
279
-
280
- return {
281
- "messages": spool_context,
282
- "output": "\n".join(
283
- [msg["content"] for msg in spool_context if msg["role"] == "assistant"]
284
- ),
285
- }
286
- def main():
287
- # Example usage
288
- import argparse
289
- parser = argparse.ArgumentParser(description="Enter spool mode for chatting with an LLM")
290
- parser.add_argument("--model", default=NPCSH_CHAT_MODEL, help="Model to use")
291
- parser.add_argument("--provider", default=NPCSH_CHAT_PROVIDER, help="Provider to use")
292
- parser.add_argument("--files", nargs="*", help="Files to load into context")
293
- parser.add_argument("--stream", default="true", help="Use streaming mode")
294
- parser.add_argument("--npc", type=str, default=os.path.expanduser('~/.npcsh/npc_team/sibiji.npc'), help="Path to NPC file")
295
-
296
-
297
- args = parser.parse_args()
298
-
299
- npc = NPC(file=args.npc)
300
- print('npc: ', args.npc)
301
- print(args.stream)
302
- # Enter spool mode
303
- enter_spool_mode(
304
- npc=npc,
305
- model=args.model,
306
- provider=args.provider,
307
- files=args.files,
308
- stream= args.stream.lower() == "true",
309
- )
310
-
311
- if __name__ == "__main__":
312
- main()