kollabor 0.4.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. core/__init__.py +18 -0
  2. core/application.py +578 -0
  3. core/cli.py +193 -0
  4. core/commands/__init__.py +43 -0
  5. core/commands/executor.py +277 -0
  6. core/commands/menu_renderer.py +319 -0
  7. core/commands/parser.py +186 -0
  8. core/commands/registry.py +331 -0
  9. core/commands/system_commands.py +479 -0
  10. core/config/__init__.py +7 -0
  11. core/config/llm_task_config.py +110 -0
  12. core/config/loader.py +501 -0
  13. core/config/manager.py +112 -0
  14. core/config/plugin_config_manager.py +346 -0
  15. core/config/plugin_schema.py +424 -0
  16. core/config/service.py +399 -0
  17. core/effects/__init__.py +1 -0
  18. core/events/__init__.py +12 -0
  19. core/events/bus.py +129 -0
  20. core/events/executor.py +154 -0
  21. core/events/models.py +258 -0
  22. core/events/processor.py +176 -0
  23. core/events/registry.py +289 -0
  24. core/fullscreen/__init__.py +19 -0
  25. core/fullscreen/command_integration.py +290 -0
  26. core/fullscreen/components/__init__.py +12 -0
  27. core/fullscreen/components/animation.py +258 -0
  28. core/fullscreen/components/drawing.py +160 -0
  29. core/fullscreen/components/matrix_components.py +177 -0
  30. core/fullscreen/manager.py +302 -0
  31. core/fullscreen/plugin.py +204 -0
  32. core/fullscreen/renderer.py +282 -0
  33. core/fullscreen/session.py +324 -0
  34. core/io/__init__.py +52 -0
  35. core/io/buffer_manager.py +362 -0
  36. core/io/config_status_view.py +272 -0
  37. core/io/core_status_views.py +410 -0
  38. core/io/input_errors.py +313 -0
  39. core/io/input_handler.py +2655 -0
  40. core/io/input_mode_manager.py +402 -0
  41. core/io/key_parser.py +344 -0
  42. core/io/layout.py +587 -0
  43. core/io/message_coordinator.py +204 -0
  44. core/io/message_renderer.py +601 -0
  45. core/io/modal_interaction_handler.py +315 -0
  46. core/io/raw_input_processor.py +946 -0
  47. core/io/status_renderer.py +845 -0
  48. core/io/terminal_renderer.py +586 -0
  49. core/io/terminal_state.py +551 -0
  50. core/io/visual_effects.py +734 -0
  51. core/llm/__init__.py +26 -0
  52. core/llm/api_communication_service.py +863 -0
  53. core/llm/conversation_logger.py +473 -0
  54. core/llm/conversation_manager.py +414 -0
  55. core/llm/file_operations_executor.py +1401 -0
  56. core/llm/hook_system.py +402 -0
  57. core/llm/llm_service.py +1629 -0
  58. core/llm/mcp_integration.py +386 -0
  59. core/llm/message_display_service.py +450 -0
  60. core/llm/model_router.py +214 -0
  61. core/llm/plugin_sdk.py +396 -0
  62. core/llm/response_parser.py +848 -0
  63. core/llm/response_processor.py +364 -0
  64. core/llm/tool_executor.py +520 -0
  65. core/logging/__init__.py +19 -0
  66. core/logging/setup.py +208 -0
  67. core/models/__init__.py +5 -0
  68. core/models/base.py +23 -0
  69. core/plugins/__init__.py +13 -0
  70. core/plugins/collector.py +212 -0
  71. core/plugins/discovery.py +386 -0
  72. core/plugins/factory.py +263 -0
  73. core/plugins/registry.py +152 -0
  74. core/storage/__init__.py +5 -0
  75. core/storage/state_manager.py +84 -0
  76. core/ui/__init__.py +6 -0
  77. core/ui/config_merger.py +176 -0
  78. core/ui/config_widgets.py +369 -0
  79. core/ui/live_modal_renderer.py +276 -0
  80. core/ui/modal_actions.py +162 -0
  81. core/ui/modal_overlay_renderer.py +373 -0
  82. core/ui/modal_renderer.py +591 -0
  83. core/ui/modal_state_manager.py +443 -0
  84. core/ui/widget_integration.py +222 -0
  85. core/ui/widgets/__init__.py +27 -0
  86. core/ui/widgets/base_widget.py +136 -0
  87. core/ui/widgets/checkbox.py +85 -0
  88. core/ui/widgets/dropdown.py +140 -0
  89. core/ui/widgets/label.py +78 -0
  90. core/ui/widgets/slider.py +185 -0
  91. core/ui/widgets/text_input.py +224 -0
  92. core/utils/__init__.py +11 -0
  93. core/utils/config_utils.py +656 -0
  94. core/utils/dict_utils.py +212 -0
  95. core/utils/error_utils.py +275 -0
  96. core/utils/key_reader.py +171 -0
  97. core/utils/plugin_utils.py +267 -0
  98. core/utils/prompt_renderer.py +151 -0
  99. kollabor-0.4.9.dist-info/METADATA +298 -0
  100. kollabor-0.4.9.dist-info/RECORD +128 -0
  101. kollabor-0.4.9.dist-info/WHEEL +5 -0
  102. kollabor-0.4.9.dist-info/entry_points.txt +2 -0
  103. kollabor-0.4.9.dist-info/licenses/LICENSE +21 -0
  104. kollabor-0.4.9.dist-info/top_level.txt +4 -0
  105. kollabor_cli_main.py +20 -0
  106. plugins/__init__.py +1 -0
  107. plugins/enhanced_input/__init__.py +18 -0
  108. plugins/enhanced_input/box_renderer.py +103 -0
  109. plugins/enhanced_input/box_styles.py +142 -0
  110. plugins/enhanced_input/color_engine.py +165 -0
  111. plugins/enhanced_input/config.py +150 -0
  112. plugins/enhanced_input/cursor_manager.py +72 -0
  113. plugins/enhanced_input/geometry.py +81 -0
  114. plugins/enhanced_input/state.py +130 -0
  115. plugins/enhanced_input/text_processor.py +115 -0
  116. plugins/enhanced_input_plugin.py +385 -0
  117. plugins/fullscreen/__init__.py +9 -0
  118. plugins/fullscreen/example_plugin.py +327 -0
  119. plugins/fullscreen/matrix_plugin.py +132 -0
  120. plugins/hook_monitoring_plugin.py +1299 -0
  121. plugins/query_enhancer_plugin.py +350 -0
  122. plugins/save_conversation_plugin.py +502 -0
  123. plugins/system_commands_plugin.py +93 -0
  124. plugins/tmux_plugin.py +795 -0
  125. plugins/workflow_enforcement_plugin.py +629 -0
  126. system_prompt/default.md +1286 -0
  127. system_prompt/default_win.md +265 -0
  128. system_prompt/example_with_trender.md +47 -0
@@ -0,0 +1,410 @@
1
+ """Core status views for the Kollabor CLI application."""
2
+
3
+ import logging
4
+ import psutil
5
+ from typing import List
6
+
7
+ from .status_renderer import StatusViewConfig, BlockConfig
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class CoreStatusViews:
13
+ """Provides default core status views for the application."""
14
+
15
+ def __init__(self, llm_service=None, config=None):
16
+ """Initialize core status views.
17
+
18
+ Args:
19
+ llm_service: LLM service instance for status data.
20
+ config: Configuration manager for toggleable sections.
21
+ """
22
+ self.llm_service = llm_service
23
+ self.config = config
24
+
25
+ def register_all_views(self, status_registry) -> None:
26
+ """Register all core status views with the registry.
27
+
28
+ Args:
29
+ status_registry: StatusViewRegistry to register views with.
30
+ """
31
+ try:
32
+ # View 0: Overview - Consolidated view with everything (priority 1100 - highest)
33
+ overview_view = StatusViewConfig(
34
+ name="Overview",
35
+ plugin_source="core",
36
+ priority=1100,
37
+ blocks=[
38
+ BlockConfig(
39
+ width_fraction=0.33,
40
+ content_provider=self._get_overview_ai_session_content,
41
+ title="AI & Session",
42
+ priority=100,
43
+ ),
44
+ BlockConfig(
45
+ width_fraction=0.33,
46
+ content_provider=self._get_overview_model_content,
47
+ title="Model",
48
+ priority=90,
49
+ ),
50
+ BlockConfig(
51
+ width_fraction=0.34,
52
+ content_provider=self._get_overview_performance_content,
53
+ title="Performance",
54
+ priority=80,
55
+ ),
56
+ ],
57
+ )
58
+ status_registry.register_status_view("core", overview_view)
59
+
60
+ # View 1: Session Stats (priority 1000)
61
+ session_view = StatusViewConfig(
62
+ name="Session Stats",
63
+ plugin_source="core",
64
+ priority=1000,
65
+ blocks=[
66
+ BlockConfig(
67
+ width_fraction=0.5,
68
+ content_provider=self._get_session_stats_content,
69
+ title="Session Stats",
70
+ priority=100,
71
+ ),
72
+ BlockConfig(
73
+ width_fraction=0.5,
74
+ content_provider=self._get_ai_status_content,
75
+ title="AI Status",
76
+ priority=90,
77
+ ),
78
+ ],
79
+ )
80
+ status_registry.register_status_view("core", session_view)
81
+
82
+ # View 2: Performance (priority 800)
83
+ performance_view = StatusViewConfig(
84
+ name="Performance",
85
+ plugin_source="core",
86
+ priority=800,
87
+ blocks=[
88
+ BlockConfig(
89
+ width_fraction=1.0,
90
+ content_provider=self._get_performance_content,
91
+ title="Performance",
92
+ priority=100,
93
+ )
94
+ ],
95
+ )
96
+ status_registry.register_status_view("core", performance_view)
97
+
98
+ # View 3: Minimal (priority 600)
99
+ minimal_view = StatusViewConfig(
100
+ name="Minimal",
101
+ plugin_source="core",
102
+ priority=600,
103
+ blocks=[
104
+ BlockConfig(
105
+ width_fraction=1.0,
106
+ content_provider=self._get_minimal_content,
107
+ title="Minimal",
108
+ priority=100,
109
+ )
110
+ ],
111
+ )
112
+ status_registry.register_status_view("core", minimal_view)
113
+
114
+ # View 4: LLM Details (priority 700)
115
+ llm_view = StatusViewConfig(
116
+ name="LLM Details",
117
+ plugin_source="core",
118
+ priority=700,
119
+ blocks=[
120
+ BlockConfig(
121
+ width_fraction=1.0,
122
+ content_provider=self._get_llm_details_content,
123
+ title="LLM Configuration",
124
+ priority=100,
125
+ )
126
+ ],
127
+ )
128
+ status_registry.register_status_view("core", llm_view)
129
+
130
+ logger.info(
131
+ "Registered 5 core status views: "
132
+ "Overview, Session Stats, Performance, LLM Details, Minimal"
133
+ )
134
+
135
+ except Exception as e:
136
+ logger.error(f"Failed to register core status views: {e}")
137
+
138
+ def _get_overview_ai_session_content(self) -> List[str]:
139
+ """Get AI and Session stats for Overview (left column)."""
140
+ try:
141
+ import os
142
+ from pathlib import Path
143
+
144
+ lines = []
145
+
146
+ # Get config toggles
147
+ show_ai = self._get_config("terminal.status.overview.show_ai", True)
148
+ show_session = self._get_config("terminal.status.overview.show_session", True)
149
+ show_directory = self._get_config("terminal.status.overview.show_directory", True)
150
+
151
+ # Current Directory
152
+ if show_directory:
153
+ cwd = Path.cwd()
154
+ # Show just the directory name, or full path if configured
155
+ show_full_path = self._get_config("terminal.status.overview.show_full_path", False)
156
+ if show_full_path:
157
+ dir_display = str(cwd)
158
+ else:
159
+ # Show home as ~ and just the folder name
160
+ try:
161
+ home = Path.home()
162
+ if cwd == home:
163
+ dir_display = "~"
164
+ elif cwd.is_relative_to(home):
165
+ rel_path = cwd.relative_to(home)
166
+ # Show last 2 parts of path for context
167
+ parts = rel_path.parts
168
+ if len(parts) > 2:
169
+ dir_display = f"~/{'/'.join(parts[-2:])}"
170
+ else:
171
+ dir_display = f"~/{rel_path}"
172
+ else:
173
+ # Show last 2 parts of absolute path
174
+ parts = cwd.parts
175
+ if len(parts) > 2:
176
+ dir_display = f"{'/'.join(parts[-2:])}"
177
+ else:
178
+ dir_display = str(cwd)
179
+ except Exception:
180
+ dir_display = cwd.name or str(cwd)
181
+
182
+ lines.append(f"Dir: {dir_display}")
183
+
184
+ # AI Status
185
+ if show_ai and self.llm_service:
186
+ processing = "* Processing" if self.llm_service.is_processing else "✓ Ready"
187
+ queue_size = 0
188
+ if hasattr(self.llm_service, "processing_queue"):
189
+ queue_size = self.llm_service.processing_queue.qsize()
190
+
191
+ lines.append(f"AI: {processing}")
192
+ if queue_size > 0:
193
+ lines.append(f"Queue: {queue_size}")
194
+
195
+ # Session Stats
196
+ if show_session and self.llm_service and hasattr(self.llm_service, "session_stats"):
197
+ stats = self.llm_service.session_stats
198
+ msgs = stats.get('messages', 0)
199
+ tokens_in = stats.get('input_tokens', 0)
200
+ tokens_out = stats.get('output_tokens', 0)
201
+ total_tokens = tokens_in + tokens_out
202
+
203
+ # Format tokens
204
+ if total_tokens < 1000:
205
+ token_display = f"{total_tokens}"
206
+ elif total_tokens < 1000000:
207
+ token_display = f"{total_tokens/1000:.1f}K"
208
+ else:
209
+ token_display = f"{total_tokens/1000000:.1f}M"
210
+
211
+ lines.append(f"Messages: {msgs}")
212
+ lines.append(f"Tokens: {token_display}")
213
+ lines.append(f"In/Out: {tokens_in}/{tokens_out}")
214
+
215
+ return lines if lines else ["Hidden"]
216
+
217
+ except Exception as e:
218
+ logger.error(f"Error getting AI/Session content: {e}")
219
+ return ["Error"]
220
+
221
+ def _get_overview_model_content(self) -> List[str]:
222
+ """Get Model info for Overview (middle column)."""
223
+ try:
224
+ lines = []
225
+
226
+ show_model = self._get_config("terminal.status.overview.show_model", True)
227
+
228
+ if show_model and self.llm_service and hasattr(self.llm_service, "api_service"):
229
+ api_service = self.llm_service.api_service
230
+ model = getattr(api_service, "model", "Unknown")
231
+ temp = getattr(api_service, "temperature", None)
232
+ max_tok = getattr(api_service, "max_tokens", None)
233
+ api_url = getattr(api_service, "api_url", "Unknown")
234
+
235
+ # Extract endpoint from URL
236
+ endpoint = "Unknown"
237
+ if api_url != "Unknown":
238
+ try:
239
+ from urllib.parse import urlparse
240
+ parsed = urlparse(api_url)
241
+ endpoint = parsed.hostname or api_url
242
+ except Exception:
243
+ endpoint = api_url
244
+
245
+ lines.append(f"Model: {model}")
246
+ if endpoint != "Unknown":
247
+ lines.append(f"Endpoint: {endpoint}")
248
+ if temp is not None:
249
+ lines.append(f"Temp: {temp}")
250
+ if max_tok is not None:
251
+ lines.append(f"Max Tokens: {max_tok}")
252
+
253
+ return lines if lines else ["Hidden"]
254
+
255
+ except Exception as e:
256
+ logger.error(f"Error getting model content: {e}")
257
+ return ["Error"]
258
+
259
+ def _get_overview_performance_content(self) -> List[str]:
260
+ """Get Performance info for Overview (right column)."""
261
+ try:
262
+ # Performance metrics disabled per user request
263
+ return []
264
+
265
+ except Exception as e:
266
+ logger.error(f"Error getting performance content: {e}")
267
+ return ["Error"]
268
+
269
+ def _get_config(self, key: str, default):
270
+ """Get config value with fallback to default."""
271
+ if self.config and hasattr(self.config, "get"):
272
+ return self.config.get(key, default)
273
+ return default
274
+
275
+ def _get_session_stats_content(self) -> List[str]:
276
+ """Get session statistics content."""
277
+ try:
278
+ # Get session stats from LLM service
279
+ if self.llm_service and hasattr(self.llm_service, "session_stats"):
280
+ stats = self.llm_service.session_stats
281
+ return [
282
+ f"Messages: {stats.get('messages', 0)}",
283
+ f"Tokens In: {stats.get('input_tokens', 0)}",
284
+ f"Tokens Out: {stats.get('output_tokens', 0)}",
285
+ ]
286
+ return ["Messages: 0", "Tokens: 0"]
287
+ except Exception:
288
+ return ["Session: N/A"]
289
+
290
+ def _get_ai_status_content(self) -> List[str]:
291
+ """Get AI status content."""
292
+ try:
293
+ if self.llm_service:
294
+ processing = (
295
+ "* Processing" if self.llm_service.is_processing else "✓ Ready"
296
+ )
297
+ if hasattr(self.llm_service, "processing_queue"):
298
+ queue_size = self.llm_service.processing_queue.qsize()
299
+ else:
300
+ queue_size = 0
301
+
302
+ # Get model and endpoint info from API service
303
+ model = "Unknown"
304
+ endpoint = "Unknown"
305
+ if hasattr(self.llm_service, "api_service"):
306
+ api_service = self.llm_service.api_service
307
+ model = getattr(api_service, "model", "Unknown")
308
+ api_url = getattr(api_service, "api_url", "Unknown")
309
+ # Extract domain from URL for cleaner display
310
+ if api_url != "Unknown":
311
+ try:
312
+ from urllib.parse import urlparse
313
+
314
+ parsed = urlparse(api_url)
315
+ endpoint = parsed.hostname or api_url
316
+ except Exception:
317
+ endpoint = api_url
318
+
319
+ return [
320
+ f"AI: {processing}",
321
+ f"Model: {model}",
322
+ f"Endpoint: {endpoint}",
323
+ f"Queue: {queue_size}",
324
+ ]
325
+ return ["AI: Unknown"]
326
+ except Exception:
327
+ return ["AI: N/A"]
328
+
329
+ def _get_performance_content(self) -> List[str]:
330
+ """Get performance content."""
331
+ try:
332
+ # Performance metrics disabled per user request
333
+ return []
334
+ except Exception:
335
+ return []
336
+
337
+ def _get_minimal_content(self) -> List[str]:
338
+ """Get minimal view content."""
339
+ try:
340
+ ai_status = "✓ Ready"
341
+ model = "Unknown"
342
+ if self.llm_service:
343
+ if self.llm_service.is_processing:
344
+ ai_status = "* Processing"
345
+
346
+ # Get model info
347
+ if hasattr(self.llm_service, "api_service"):
348
+ model = getattr(self.llm_service.api_service, "model", "Unknown")
349
+
350
+ messages = 0
351
+ tokens = 0
352
+ if self.llm_service and hasattr(self.llm_service, "session_stats"):
353
+ stats = self.llm_service.session_stats
354
+ messages = stats.get("messages", 0)
355
+ input_tokens = stats.get("input_tokens", 0)
356
+ output_tokens = stats.get("output_tokens", 0)
357
+ tokens = input_tokens + output_tokens
358
+
359
+ if tokens < 1000:
360
+ token_display = f"{tokens}"
361
+ else:
362
+ token_display = f"{tokens/1000:.1f}K"
363
+
364
+ return [
365
+ f"AI: {ai_status} ({model}) | Messages: {messages} "
366
+ f"| Tokens: {token_display}"
367
+ ]
368
+ except Exception:
369
+ return ["Status: N/A"]
370
+
371
+ def _get_llm_details_content(self) -> List[str]:
372
+ """Get detailed LLM configuration content."""
373
+ try:
374
+ if not self.llm_service:
375
+ return ["LLM: Not initialized"]
376
+
377
+ ai_status = (
378
+ "* Processing" if self.llm_service.is_processing else "✓ Ready"
379
+ )
380
+ model = "Unknown"
381
+ endpoint = "Unknown"
382
+ temperature = "Unknown"
383
+ max_tokens = "Unknown"
384
+
385
+ if hasattr(self.llm_service, "api_service"):
386
+ api_service = self.llm_service.api_service
387
+ model = getattr(api_service, "model", "Unknown")
388
+ temperature = getattr(api_service, "temperature", "Unknown")
389
+ max_tokens = getattr(api_service, "max_tokens", "Unknown")
390
+ api_url = getattr(api_service, "api_url", "Unknown")
391
+
392
+ # Extract domain from URL for cleaner display
393
+ if api_url != "Unknown":
394
+ try:
395
+ from urllib.parse import urlparse
396
+
397
+ parsed = urlparse(api_url)
398
+ endpoint = parsed.hostname or api_url
399
+ except Exception:
400
+ endpoint = api_url
401
+
402
+ return [
403
+ f"Status: {ai_status}",
404
+ f"Model: {model}",
405
+ f"Endpoint: {endpoint}",
406
+ f"Temperature: {temperature}",
407
+ f"Max Tokens: {max_tokens}",
408
+ ]
409
+ except Exception:
410
+ return ["LLM Details: N/A"]