aline-ai 0.5.4__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/METADATA +1 -1
  2. aline_ai-0.5.6.dist-info/RECORD +95 -0
  3. realign/__init__.py +1 -1
  4. realign/adapters/antigravity.py +28 -20
  5. realign/adapters/base.py +46 -50
  6. realign/adapters/claude.py +14 -14
  7. realign/adapters/codex.py +7 -7
  8. realign/adapters/gemini.py +11 -11
  9. realign/adapters/registry.py +14 -10
  10. realign/claude_detector.py +2 -2
  11. realign/claude_hooks/__init__.py +3 -3
  12. realign/claude_hooks/permission_request_hook_installer.py +31 -32
  13. realign/claude_hooks/stop_hook.py +4 -1
  14. realign/claude_hooks/stop_hook_installer.py +30 -31
  15. realign/cli.py +23 -4
  16. realign/codex_detector.py +11 -11
  17. realign/commands/add.py +88 -65
  18. realign/commands/config.py +3 -12
  19. realign/commands/context.py +3 -1
  20. realign/commands/export_shares.py +86 -127
  21. realign/commands/import_shares.py +145 -155
  22. realign/commands/init.py +166 -30
  23. realign/commands/restore.py +18 -6
  24. realign/commands/search.py +14 -42
  25. realign/commands/upgrade.py +155 -11
  26. realign/commands/watcher.py +98 -219
  27. realign/commands/worker.py +29 -6
  28. realign/config.py +25 -20
  29. realign/context.py +1 -3
  30. realign/dashboard/app.py +34 -24
  31. realign/dashboard/screens/__init__.py +10 -1
  32. realign/dashboard/screens/create_agent.py +244 -0
  33. realign/dashboard/screens/create_event.py +3 -1
  34. realign/dashboard/screens/event_detail.py +14 -6
  35. realign/dashboard/screens/help_screen.py +114 -0
  36. realign/dashboard/screens/session_detail.py +3 -1
  37. realign/dashboard/screens/share_import.py +7 -3
  38. realign/dashboard/tmux_manager.py +54 -9
  39. realign/dashboard/widgets/config_panel.py +85 -1
  40. realign/dashboard/widgets/events_table.py +314 -70
  41. realign/dashboard/widgets/header.py +2 -1
  42. realign/dashboard/widgets/search_panel.py +37 -27
  43. realign/dashboard/widgets/sessions_table.py +404 -85
  44. realign/dashboard/widgets/terminal_panel.py +155 -175
  45. realign/dashboard/widgets/watcher_panel.py +6 -2
  46. realign/dashboard/widgets/worker_panel.py +10 -1
  47. realign/db/__init__.py +1 -1
  48. realign/db/base.py +5 -15
  49. realign/db/locks.py +0 -1
  50. realign/db/migration.py +82 -76
  51. realign/db/schema.py +2 -6
  52. realign/db/sqlite_db.py +23 -41
  53. realign/events/__init__.py +0 -1
  54. realign/events/event_summarizer.py +27 -15
  55. realign/events/session_summarizer.py +29 -15
  56. realign/file_lock.py +1 -0
  57. realign/hooks.py +150 -60
  58. realign/logging_config.py +12 -15
  59. realign/mcp_server.py +30 -51
  60. realign/mcp_watcher.py +0 -1
  61. realign/models/event.py +29 -20
  62. realign/prompts/__init__.py +7 -7
  63. realign/prompts/presets.py +15 -11
  64. realign/redactor.py +99 -59
  65. realign/triggers/__init__.py +9 -9
  66. realign/triggers/antigravity_trigger.py +30 -28
  67. realign/triggers/base.py +4 -3
  68. realign/triggers/claude_trigger.py +104 -85
  69. realign/triggers/codex_trigger.py +15 -5
  70. realign/triggers/gemini_trigger.py +57 -47
  71. realign/triggers/next_turn_trigger.py +3 -1
  72. realign/triggers/registry.py +6 -2
  73. realign/triggers/turn_status.py +3 -1
  74. realign/watcher_core.py +306 -131
  75. realign/watcher_daemon.py +8 -8
  76. realign/worker_core.py +3 -1
  77. realign/worker_daemon.py +3 -1
  78. aline_ai-0.5.4.dist-info/RECORD +0 -93
  79. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/WHEEL +0 -0
  80. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/entry_points.txt +0 -0
  81. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/licenses/LICENSE +0 -0
  82. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/top_level.txt +0 -0
realign/logging_config.py CHANGED
@@ -22,15 +22,15 @@ def get_log_level() -> int:
22
22
  Returns:
23
23
  int: Logging level constant from logging module
24
24
  """
25
- level_name = os.getenv('REALIGN_LOG_LEVEL', 'INFO').upper()
25
+ level_name = os.getenv("REALIGN_LOG_LEVEL", "INFO").upper()
26
26
 
27
27
  # Map string to logging constant
28
28
  level_map = {
29
- 'DEBUG': logging.DEBUG,
30
- 'INFO': logging.INFO,
31
- 'WARNING': logging.WARNING,
32
- 'ERROR': logging.ERROR,
33
- 'CRITICAL': logging.CRITICAL,
29
+ "DEBUG": logging.DEBUG,
30
+ "INFO": logging.INFO,
31
+ "WARNING": logging.WARNING,
32
+ "ERROR": logging.ERROR,
33
+ "CRITICAL": logging.CRITICAL,
34
34
  }
35
35
 
36
36
  return level_map.get(level_name, logging.INFO)
@@ -46,12 +46,12 @@ def get_log_directory() -> Path:
46
46
  Returns:
47
47
  Path: Log directory path
48
48
  """
49
- log_dir_str = os.getenv('REALIGN_LOG_DIR')
49
+ log_dir_str = os.getenv("REALIGN_LOG_DIR")
50
50
 
51
51
  if log_dir_str:
52
52
  log_dir = Path(log_dir_str).expanduser()
53
53
  else:
54
- log_dir = Path.home() / '.aline' / '.logs'
54
+ log_dir = Path.home() / ".aline" / ".logs"
55
55
 
56
56
  # Create directory if it doesn't exist
57
57
  log_dir.mkdir(parents=True, exist_ok=True)
@@ -64,7 +64,7 @@ def setup_logger(
64
64
  log_file: Optional[str] = None,
65
65
  max_bytes: int = 10 * 1024 * 1024, # 10MB
66
66
  backup_count: int = 5,
67
- console_output: bool = False
67
+ console_output: bool = False,
68
68
  ) -> logging.Logger:
69
69
  """
70
70
  Set up a logger with file rotation and optional console output.
@@ -95,8 +95,7 @@ def setup_logger(
95
95
 
96
96
  # Standard formatter with timestamp, level, name, and message
97
97
  formatter = logging.Formatter(
98
- fmt='[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s',
99
- datefmt='%Y-%m-%d %H:%M:%S'
98
+ fmt="[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
100
99
  )
101
100
 
102
101
  # File handler with rotation
@@ -106,10 +105,7 @@ def setup_logger(
106
105
  log_path = log_dir / log_file
107
106
 
108
107
  file_handler = RotatingFileHandler(
109
- log_path,
110
- maxBytes=max_bytes,
111
- backupCount=backup_count,
112
- encoding='utf-8'
108
+ log_path, maxBytes=max_bytes, backupCount=backup_count, encoding="utf-8"
113
109
  )
114
110
  file_handler.setLevel(logging.DEBUG) # Capture all levels to file
115
111
  file_handler.setFormatter(formatter)
@@ -119,6 +115,7 @@ def setup_logger(
119
115
  # If file logging fails, fall back to stderr only
120
116
  # Don't let logging setup break the application
121
117
  import sys
118
+
122
119
  print(f"Warning: Failed to set up file logging: {e}", file=sys.stderr)
123
120
 
124
121
  # Optional console handler (stderr)
realign/mcp_server.py CHANGED
@@ -10,6 +10,7 @@ from urllib.parse import urlparse
10
10
 
11
11
  try:
12
12
  import httpx
13
+
13
14
  HTTPX_AVAILABLE = True
14
15
  except ImportError:
15
16
  HTTPX_AVAILABLE = False
@@ -33,15 +34,15 @@ def extract_share_id(share_url: str) -> str:
33
34
  https://example.com/share/xyz789/chat -> xyz789
34
35
  """
35
36
  # Remove trailing slash
36
- url = share_url.rstrip('/')
37
+ url = share_url.rstrip("/")
37
38
 
38
39
  # Extract path components
39
40
  parsed = urlparse(url)
40
- path_parts = [p for p in parsed.path.split('/') if p]
41
+ path_parts = [p for p in parsed.path.split("/") if p]
41
42
 
42
43
  # Find 'share' in path and get next component
43
- if 'share' in path_parts:
44
- share_idx = path_parts.index('share')
44
+ if "share" in path_parts:
45
+ share_idx = path_parts.index("share")
45
46
  if share_idx + 1 < len(path_parts):
46
47
  return path_parts[share_idx + 1]
47
48
 
@@ -59,7 +60,9 @@ def extract_base_url(share_url: str) -> str:
59
60
  return f"{parsed.scheme}://{parsed.netloc}"
60
61
 
61
62
 
62
- async def authenticate_share(share_url: str, password: Optional[str] = None) -> tuple[str, str, str]:
63
+ async def authenticate_share(
64
+ share_url: str, password: Optional[str] = None
65
+ ) -> tuple[str, str, str]:
63
66
  """
64
67
  Authenticate with share and get session token.
65
68
 
@@ -97,16 +100,13 @@ async def authenticate_share(share_url: str, password: Optional[str] = None) ->
97
100
  password_hash = hashlib.sha256(password.encode()).hexdigest()
98
101
 
99
102
  auth_resp = await client.post(
100
- f"{base_url}/api/share/{share_id}/auth",
101
- json={"password_hash": password_hash}
103
+ f"{base_url}/api/share/{share_id}/auth", json={"password_hash": password_hash}
102
104
  )
103
105
  auth_resp.raise_for_status()
104
106
  data = auth_resp.json()
105
107
  else:
106
108
  # No password needed - create session directly
107
- session_resp = await client.post(
108
- f"{base_url}/api/share/{share_id}/session"
109
- )
109
+ session_resp = await client.post(f"{base_url}/api/share/{share_id}/session")
110
110
  session_resp.raise_for_status()
111
111
  data = session_resp.json()
112
112
 
@@ -117,12 +117,7 @@ async def authenticate_share(share_url: str, password: Optional[str] = None) ->
117
117
  return base_url, share_id, session_token
118
118
 
119
119
 
120
- async def ask_conversation(
121
- base_url: str,
122
- share_id: str,
123
- session_token: str,
124
- question: str
125
- ) -> str:
120
+ async def ask_conversation(base_url: str, share_id: str, session_token: str, question: str) -> str:
126
121
  """
127
122
  Send a question to the remote AI agent and receive the answer.
128
123
 
@@ -147,7 +142,7 @@ async def ask_conversation(
147
142
  resp = await client.post(
148
143
  f"{base_url}/api/chat/{share_id}",
149
144
  headers={"x-session-token": session_token},
150
- json={"messages": [{"role": "user", "parts": [{"type": "text", "text": question}]}]}
145
+ json={"messages": [{"role": "user", "parts": [{"type": "text", "text": question}]}]},
151
146
  )
152
147
  resp.raise_for_status()
153
148
 
@@ -158,13 +153,13 @@ async def ask_conversation(
158
153
 
159
154
  async for chunk in resp.aiter_text():
160
155
  # Split into lines and process each
161
- for line in chunk.split('\n'):
156
+ for line in chunk.split("\n"):
162
157
  line = line.strip()
163
158
  if not line:
164
159
  continue
165
160
 
166
161
  # Remove "data: " prefix if present
167
- if line.startswith('data: '):
162
+ if line.startswith("data: "):
168
163
  line = line[6:]
169
164
 
170
165
  # Try to parse as JSON
@@ -176,8 +171,8 @@ async def ask_conversation(
176
171
  if isinstance(data, dict):
177
172
  # Extract text from text-delta chunks
178
173
  # The field name is 'delta' not 'textDelta'
179
- if data.get('type') == 'text-delta':
180
- delta = data.get('delta', '')
174
+ if data.get("type") == "text-delta":
175
+ delta = data.get("delta", "")
181
176
  if delta:
182
177
  text_chunks.append(delta)
183
178
 
@@ -186,21 +181,22 @@ async def ask_conversation(
186
181
  continue
187
182
 
188
183
  # Combine all text chunks to get the final answer
189
- answer = ''.join(text_chunks)
184
+ answer = "".join(text_chunks)
190
185
 
191
186
  # Apply reasonable length limit to prevent overwhelming the MCP client
192
187
  # If answer is too long, truncate and add notice
193
188
  MAX_RESPONSE_LENGTH = 50000 # ~50KB of text
194
189
  if len(answer) > MAX_RESPONSE_LENGTH:
195
- answer = answer[:MAX_RESPONSE_LENGTH] + "\n\n[Response truncated due to length. Please ask more specific questions to get complete answers.]"
190
+ answer = (
191
+ answer[:MAX_RESPONSE_LENGTH]
192
+ + "\n\n[Response truncated due to length. Please ask more specific questions to get complete answers.]"
193
+ )
196
194
 
197
195
  return answer if answer else "No response received from the agent."
198
196
 
199
197
 
200
198
  async def handle_ask_tool(
201
- share_url: str,
202
- question: str,
203
- password: Optional[str] = None
199
+ share_url: str, question: str, password: Optional[str] = None
204
200
  ) -> list[TextContent]:
205
201
  """
206
202
  Handle the ask_shared_conversation tool.
@@ -221,21 +217,12 @@ async def handle_ask_tool(
221
217
  answer = await ask_conversation(base_url, share_id, token, question)
222
218
 
223
219
  # Return the answer
224
- return [TextContent(
225
- type="text",
226
- text=answer
227
- )]
220
+ return [TextContent(type="text", text=answer)]
228
221
 
229
222
  except ValueError as e:
230
- return [TextContent(
231
- type="text",
232
- text=f"Authentication error: {str(e)}"
233
- )]
223
+ return [TextContent(type="text", text=f"Authentication error: {str(e)}")]
234
224
  except Exception as e:
235
- return [TextContent(
236
- type="text",
237
- text=f"Error querying shared conversation: {str(e)}"
238
- )]
225
+ return [TextContent(type="text", text=f"Error querying shared conversation: {str(e)}")]
239
226
 
240
227
 
241
228
  @app.list_tools()
@@ -288,24 +275,17 @@ async def call_tool(name: str, arguments: Any) -> list[TextContent]:
288
275
  return await handle_ask_tool(
289
276
  share_url=arguments.get("share_url", ""),
290
277
  question=arguments.get("question", ""),
291
- password=arguments.get("password")
278
+ password=arguments.get("password"),
292
279
  )
293
280
  else:
294
- return [TextContent(
295
- type="text",
296
- text=f"Unknown tool: {name}"
297
- )]
281
+ return [TextContent(type="text", text=f"Unknown tool: {name}")]
298
282
 
299
283
 
300
284
  async def async_main():
301
285
  """Main async entry point for the MCP server."""
302
286
  # Start stdio server
303
287
  async with stdio_server() as (read_stream, write_stream):
304
- await app.run(
305
- read_stream,
306
- write_stream,
307
- app.create_initialization_options()
308
- )
288
+ await app.run(read_stream, write_stream, app.create_initialization_options())
309
289
 
310
290
 
311
291
  def main():
@@ -313,9 +293,8 @@ def main():
313
293
  # Check if httpx is available
314
294
  if not HTTPX_AVAILABLE:
315
295
  print(
316
- "Error: httpx package is required for aline-mcp.\n"
317
- "Install with: pip install httpx",
318
- file=sys.stderr
296
+ "Error: httpx package is required for aline-mcp.\n" "Install with: pip install httpx",
297
+ file=sys.stderr,
319
298
  )
320
299
  sys.exit(1)
321
300
 
realign/mcp_watcher.py CHANGED
@@ -32,4 +32,3 @@ _watcher_core.find_all_active_sessions = _find_all_active_sessions_proxy
32
32
  ReAlignConfig = _watcher_core.ReAlignConfig
33
33
  DialogueWatcher = _watcher_core.DialogueWatcher
34
34
  is_path_blacklisted = _watcher_core.is_path_blacklisted
35
-
realign/models/event.py CHANGED
@@ -8,9 +8,10 @@ from enum import Enum
8
8
 
9
9
  class EventSource(Enum):
10
10
  """How the event was generated."""
11
- USER = "user" # Manually created via `aline watcher event generate`
12
- PRESET_DAY = "preset_day" # Auto-generated daily aggregation
13
- PRESET_WEEK = "preset_week" # Auto-generated weekly aggregation
11
+
12
+ USER = "user" # Manually created via `aline watcher event generate`
13
+ PRESET_DAY = "preset_day" # Auto-generated daily aggregation
14
+ PRESET_WEEK = "preset_week" # Auto-generated weekly aggregation
14
15
 
15
16
 
16
17
  @dataclass
@@ -21,17 +22,18 @@ class Event:
21
22
  An Event is a semantic abstraction that groups related sessions
22
23
  together based on user selection or time-based presets.
23
24
  """
24
- id: str # Unique event ID (UUID)
25
- title: str # Human-readable event title
26
- description: Optional[str] = None # Detailed description
27
- source: EventSource = EventSource.USER # How the event was generated
25
+
26
+ id: str # Unique event ID (UUID)
27
+ title: str # Human-readable event title
28
+ description: Optional[str] = None # Detailed description
29
+ source: EventSource = EventSource.USER # How the event was generated
28
30
 
29
31
  # Commit references
30
32
  commit_hashes: List[str] = field(default_factory=list) # Full commit hashes
31
33
 
32
34
  # Temporal metadata
33
- start_timestamp: Optional[datetime] = None # Earliest commit timestamp
34
- end_timestamp: Optional[datetime] = None # Latest commit timestamp
35
+ start_timestamp: Optional[datetime] = None # Earliest commit timestamp
36
+ end_timestamp: Optional[datetime] = None # Latest commit timestamp
35
37
  created_at: datetime = field(default_factory=datetime.now)
36
38
  updated_at: datetime = field(default_factory=datetime.now)
37
39
 
@@ -41,9 +43,9 @@ class Event:
41
43
  session_ids: List[str] = field(default_factory=list) # Related session IDs
42
44
 
43
45
  # Generation metadata
44
- auto_generated: bool = True # Whether auto-generated or manual
45
- generation_method: Optional[str] = None # e.g., "llm_clustering", "time_window"
46
- confidence_score: Optional[float] = None # Clustering confidence (0-1)
46
+ auto_generated: bool = True # Whether auto-generated or manual
47
+ generation_method: Optional[str] = None # e.g., "llm_clustering", "time_window"
48
+ confidence_score: Optional[float] = None # Clustering confidence (0-1)
47
49
 
48
50
  # UI metadata (optional, for shares)
49
51
  ui_metadata: Optional[Dict[str, Any]] = None
@@ -66,11 +68,11 @@ class Event:
66
68
  "auto_generated": self.auto_generated,
67
69
  "generation_method": self.generation_method,
68
70
  "confidence_score": self.confidence_score,
69
- "ui_metadata": self.ui_metadata
71
+ "ui_metadata": self.ui_metadata,
70
72
  }
71
73
 
72
74
  @classmethod
73
- def from_dict(cls, data: Dict[str, Any]) -> 'Event':
75
+ def from_dict(cls, data: Dict[str, Any]) -> "Event":
74
76
  """Create Event from dict."""
75
77
  # Handle legacy data with event_type/status
76
78
  source_value = data.get("source") or data.get("event_type", "user")
@@ -83,8 +85,14 @@ class Event:
83
85
  description=data.get("description"),
84
86
  source=EventSource(source_value),
85
87
  commit_hashes=data.get("commit_hashes", []),
86
- start_timestamp=datetime.fromisoformat(data["start_timestamp"]) if data.get("start_timestamp") else None,
87
- end_timestamp=datetime.fromisoformat(data["end_timestamp"]) if data.get("end_timestamp") else None,
88
+ start_timestamp=(
89
+ datetime.fromisoformat(data["start_timestamp"])
90
+ if data.get("start_timestamp")
91
+ else None
92
+ ),
93
+ end_timestamp=(
94
+ datetime.fromisoformat(data["end_timestamp"]) if data.get("end_timestamp") else None
95
+ ),
88
96
  created_at=datetime.fromisoformat(data["created_at"]),
89
97
  updated_at=datetime.fromisoformat(data["updated_at"]),
90
98
  tags=data.get("tags", []),
@@ -93,13 +101,14 @@ class Event:
93
101
  auto_generated=data.get("auto_generated", True),
94
102
  generation_method=data.get("generation_method"),
95
103
  confidence_score=data.get("confidence_score"),
96
- ui_metadata=data.get("ui_metadata")
104
+ ui_metadata=data.get("ui_metadata"),
97
105
  )
98
106
 
99
107
 
100
108
  @dataclass
101
109
  class EventCollection:
102
110
  """Container for all events in a project."""
111
+
103
112
  version: int = 1 # Schema version for migration
104
113
  events: List[Event] = field(default_factory=list)
105
114
  metadata: Dict[str, Any] = field(default_factory=dict)
@@ -117,14 +126,14 @@ class EventCollection:
117
126
  return {
118
127
  "version": self.version,
119
128
  "metadata": self.metadata,
120
- "events": [e.to_dict() for e in self.events]
129
+ "events": [e.to_dict() for e in self.events],
121
130
  }
122
131
 
123
132
  @classmethod
124
- def from_dict(cls, data: Dict[str, Any]) -> 'EventCollection':
133
+ def from_dict(cls, data: Dict[str, Any]) -> "EventCollection":
125
134
  """Create EventCollection from dict."""
126
135
  return cls(
127
136
  version=data.get("version", 1),
128
137
  events=[Event.from_dict(e) for e in data.get("events", [])],
129
- metadata=data.get("metadata", {})
138
+ metadata=data.get("metadata", {}),
130
139
  )
@@ -11,11 +11,11 @@ from .presets import (
11
11
  )
12
12
 
13
13
  __all__ = [
14
- 'PromptPreset',
15
- 'get_all_presets',
16
- 'get_preset_by_id',
17
- 'get_preset_by_index',
18
- 'load_custom_presets',
19
- 'display_preset_menu',
20
- 'prompt_for_custom_instructions',
14
+ "PromptPreset",
15
+ "get_all_presets",
16
+ "get_preset_by_id",
17
+ "get_preset_by_index",
18
+ "load_custom_presets",
19
+ "display_preset_menu",
20
+ "prompt_for_custom_instructions",
21
21
  ]
@@ -13,7 +13,7 @@ import yaml
13
13
 
14
14
  from ..logging_config import setup_logger
15
15
 
16
- logger = setup_logger('realign.prompts.presets', 'presets.log')
16
+ logger = setup_logger("realign.prompts.presets", "presets.log")
17
17
 
18
18
 
19
19
  @dataclass
@@ -139,29 +139,31 @@ def load_custom_presets() -> List[PromptPreset]:
139
139
  Returns:
140
140
  List of custom PromptPreset objects
141
141
  """
142
- config_path = Path.home() / '.aline' / 'prompt_presets.yaml'
142
+ config_path = Path.home() / ".aline" / "prompt_presets.yaml"
143
143
 
144
144
  if not config_path.exists():
145
145
  logger.debug(f"No custom presets file found at {config_path}")
146
146
  return []
147
147
 
148
148
  try:
149
- with open(config_path, 'r', encoding='utf-8') as f:
149
+ with open(config_path, "r", encoding="utf-8") as f:
150
150
  config = yaml.safe_load(f)
151
151
 
152
- if not config or 'custom_presets' not in config:
152
+ if not config or "custom_presets" not in config:
153
153
  logger.warning("Custom presets file exists but has no 'custom_presets' key")
154
154
  return []
155
155
 
156
156
  custom_presets = []
157
- for preset_data in config['custom_presets']:
157
+ for preset_data in config["custom_presets"]:
158
158
  try:
159
159
  preset = PromptPreset(
160
- id=preset_data['id'],
161
- name=preset_data['name'],
162
- description=preset_data['description'],
163
- allow_custom_instructions=preset_data.get('allow_custom_instructions', True),
164
- custom_instructions_placeholder=preset_data.get('custom_instructions_placeholder', ''),
160
+ id=preset_data["id"],
161
+ name=preset_data["name"],
162
+ description=preset_data["description"],
163
+ allow_custom_instructions=preset_data.get("allow_custom_instructions", True),
164
+ custom_instructions_placeholder=preset_data.get(
165
+ "custom_instructions_placeholder", ""
166
+ ),
165
167
  )
166
168
  custom_presets.append(preset)
167
169
  logger.debug(f"Loaded custom preset: {preset.id}")
@@ -231,7 +233,9 @@ def prompt_for_custom_instructions(preset: PromptPreset) -> str:
231
233
  instructions = input("Instructions: ").strip()
232
234
 
233
235
  if instructions:
234
- print(f"\n✓ Custom instructions added: {instructions[:50]}{'...' if len(instructions) > 50 else ''}")
236
+ print(
237
+ f"\n✓ Custom instructions added: {instructions[:50]}{'...' if len(instructions) > 50 else ''}"
238
+ )
235
239
  else:
236
240
  print("\n✓ No custom instructions")
237
241