glaip-sdk 0.0.5b1__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. glaip_sdk/__init__.py +1 -1
  2. glaip_sdk/_version.py +42 -19
  3. glaip_sdk/branding.py +3 -2
  4. glaip_sdk/cli/commands/__init__.py +1 -1
  5. glaip_sdk/cli/commands/agents.py +452 -285
  6. glaip_sdk/cli/commands/configure.py +14 -13
  7. glaip_sdk/cli/commands/mcps.py +30 -20
  8. glaip_sdk/cli/commands/models.py +5 -3
  9. glaip_sdk/cli/commands/tools.py +111 -106
  10. glaip_sdk/cli/display.py +48 -27
  11. glaip_sdk/cli/io.py +1 -1
  12. glaip_sdk/cli/main.py +26 -5
  13. glaip_sdk/cli/resolution.py +5 -4
  14. glaip_sdk/cli/utils.py +437 -188
  15. glaip_sdk/cli/validators.py +7 -2
  16. glaip_sdk/client/agents.py +276 -153
  17. glaip_sdk/client/base.py +69 -27
  18. glaip_sdk/client/tools.py +44 -26
  19. glaip_sdk/client/validators.py +154 -94
  20. glaip_sdk/config/constants.py +0 -2
  21. glaip_sdk/models.py +5 -4
  22. glaip_sdk/utils/__init__.py +7 -7
  23. glaip_sdk/utils/client_utils.py +191 -101
  24. glaip_sdk/utils/display.py +4 -2
  25. glaip_sdk/utils/general.py +8 -6
  26. glaip_sdk/utils/import_export.py +58 -25
  27. glaip_sdk/utils/rendering/formatting.py +12 -6
  28. glaip_sdk/utils/rendering/models.py +1 -1
  29. glaip_sdk/utils/rendering/renderer/base.py +523 -332
  30. glaip_sdk/utils/rendering/renderer/console.py +6 -5
  31. glaip_sdk/utils/rendering/renderer/debug.py +94 -52
  32. glaip_sdk/utils/rendering/renderer/stream.py +93 -48
  33. glaip_sdk/utils/rendering/steps.py +103 -39
  34. glaip_sdk/utils/rich_utils.py +1 -1
  35. glaip_sdk/utils/run_renderer.py +1 -1
  36. glaip_sdk/utils/serialization.py +9 -3
  37. glaip_sdk/utils/validation.py +2 -2
  38. glaip_sdk-0.0.7.dist-info/METADATA +183 -0
  39. glaip_sdk-0.0.7.dist-info/RECORD +55 -0
  40. glaip_sdk-0.0.5b1.dist-info/METADATA +0 -645
  41. glaip_sdk-0.0.5b1.dist-info/RECORD +0 -55
  42. {glaip_sdk-0.0.5b1.dist-info → glaip_sdk-0.0.7.dist-info}/WHEEL +0 -0
  43. {glaip_sdk-0.0.5b1.dist-info → glaip_sdk-0.0.7.dist-info}/entry_points.txt +0 -0
@@ -9,10 +9,10 @@ Authors:
9
9
  """
10
10
 
11
11
  import logging
12
- from collections.abc import AsyncGenerator
12
+ from collections.abc import AsyncGenerator, Iterator
13
13
  from contextlib import ExitStack
14
14
  from pathlib import Path
15
- from typing import Any, BinaryIO
15
+ from typing import Any, BinaryIO, NoReturn
16
16
 
17
17
  import httpx
18
18
 
@@ -36,14 +36,19 @@ class MultipartData:
36
36
  self.files = files
37
37
  self._exit_stack = ExitStack()
38
38
 
39
- def close(self):
39
+ def close(self) -> None:
40
40
  """Close all opened file handles."""
41
41
  self._exit_stack.close()
42
42
 
43
- def __enter__(self):
43
+ def __enter__(self) -> "MultipartData":
44
44
  return self
45
45
 
46
- def __exit__(self, _exc_type, _exc_val, _exc_tb):
46
+ def __exit__(
47
+ self,
48
+ _exc_type: type[BaseException] | None,
49
+ _exc_val: BaseException | None,
50
+ _exc_tb: Any,
51
+ ) -> None:
47
52
  self.close()
48
53
 
49
54
 
@@ -115,12 +120,38 @@ def find_by_name(
115
120
  return find_by_name_new(items, name, case_sensitive)
116
121
 
117
122
 
118
- def _parse_sse_line(line: str, buf: list, event_type: str = None, event_id: str = None):
119
- """Parse a single SSE line and return updated buffer and event metadata."""
120
- # Normalize CRLF and treat whitespace-only as blank
121
- line = line.rstrip("\r")
123
+ def _handle_blank_line(
124
+ buf: list[str],
125
+ event_type: str | None,
126
+ event_id: str | None,
127
+ ) -> tuple[list[str], str | None, str | None, dict[str, Any] | None, bool]:
128
+ """Handle blank SSE lines by returning accumulated data if buffer exists."""
129
+ if buf:
130
+ data = "\n".join(buf)
131
+ return (
132
+ [],
133
+ None,
134
+ None,
135
+ {
136
+ "event": event_type or "message",
137
+ "id": event_id,
138
+ "data": data,
139
+ },
140
+ False,
141
+ )
142
+ return buf, event_type, event_id, None, False
122
143
 
123
- if not line.strip(): # blank line
144
+
145
+ def _handle_data_line(
146
+ line: str,
147
+ buf: list[str],
148
+ event_type: str | None,
149
+ event_id: str | None,
150
+ ) -> tuple[list[str], str | None, str | None, dict[str, Any] | None, bool]:
151
+ """Handle data: lines, including [DONE] sentinel marker."""
152
+ data_line = line[5:].lstrip()
153
+
154
+ if data_line.strip() == "[DONE]":
124
155
  if buf:
125
156
  data = "\n".join(buf)
126
157
  return (
@@ -132,42 +163,59 @@ def _parse_sse_line(line: str, buf: list, event_type: str = None, event_id: str
132
163
  "id": event_id,
133
164
  "data": data,
134
165
  },
135
- False,
136
- ) # no completion
137
- return buf, event_type, event_id, None, False
166
+ True,
167
+ )
168
+ return buf, event_type, event_id, None, True
169
+
170
+ buf.append(data_line)
171
+ return buf, event_type, event_id, None, False
172
+
173
+
174
+ def _handle_field_line(
175
+ line: str,
176
+ field_type: str,
177
+ current_value: str | None,
178
+ ) -> str | None:
179
+ """Handle event: or id: field lines."""
180
+ if field_type == "event":
181
+ return line[6:].strip() or None
182
+ elif field_type == "id":
183
+ return line[3:].strip() or None
184
+ return current_value
185
+
186
+
187
+ def _parse_sse_line(
188
+ line: str,
189
+ buf: list[str],
190
+ event_type: str | None = None,
191
+ event_id: str | None = None,
192
+ ) -> tuple[list[str], str | None, str | None, dict[str, Any] | None, bool]:
193
+ """Parse a single SSE line and return updated buffer and event metadata."""
194
+ # Normalize CRLF and treat whitespace-only as blank
195
+ line = line.rstrip("\r")
196
+
197
+ if not line.strip(): # blank line
198
+ return _handle_blank_line(buf, event_type, event_id)
138
199
 
139
200
  if line.startswith(":"): # comment
140
201
  return buf, event_type, event_id, None, False
141
202
 
142
203
  if line.startswith("data:"):
143
- data_line = line[5:].lstrip()
144
- if data_line.strip() == "[DONE]": # sentinel end marker
145
- if buf:
146
- data = "\n".join(buf)
147
- return (
148
- [],
149
- None,
150
- None,
151
- {
152
- "event": event_type or "message",
153
- "id": event_id,
154
- "data": data,
155
- },
156
- True,
157
- ) # signal completion
158
- return buf, event_type, event_id, None, True
159
- buf.append(data_line)
160
- elif line.startswith("event:"):
161
- event_type = line[6:].strip() or None
204
+ return _handle_data_line(line, buf, event_type, event_id)
205
+
206
+ if line.startswith("event:"):
207
+ event_type = _handle_field_line(line, "event", event_type)
162
208
  elif line.startswith("id:"):
163
- event_id = line[3:].strip() or None
209
+ event_id = _handle_field_line(line, "id", event_id)
164
210
 
165
211
  return buf, event_type, event_id, None, False
166
212
 
167
213
 
168
214
  def _handle_streaming_error(
169
- e: Exception, timeout_seconds: float = None, agent_name: str = None
170
- ):
215
+ e: Exception,
216
+ timeout_seconds: float | None = None,
217
+ agent_name: str | None = None,
218
+ ) -> NoReturn:
171
219
  """Handle different types of streaming errors with appropriate logging and exceptions."""
172
220
  if isinstance(e, httpx.ReadTimeout):
173
221
  logger.error(f"Read timeout during streaming: {e}")
@@ -198,9 +246,38 @@ def _handle_streaming_error(
198
246
  raise
199
247
 
200
248
 
249
+ def _process_sse_line(
250
+ line: str, buf: list[str], event_type: str | None, event_id: str | None
251
+ ) -> tuple[list[str], str | None, str | None, dict[str, Any] | None, bool]:
252
+ """Process a single SSE line and return updated state."""
253
+ result = _parse_sse_line(line, buf, event_type, event_id)
254
+ buf, event_type, event_id, event_data, completed = result
255
+ return buf, event_type, event_id, event_data, completed
256
+
257
+
258
+ def _yield_event_data(event_data: dict[str, Any] | None) -> Iterator[dict[str, Any]]:
259
+ """Yield event data if available."""
260
+ if event_data:
261
+ yield event_data
262
+
263
+
264
+ def _flush_remaining_buffer(
265
+ buf: list[str], event_type: str | None, event_id: str | None
266
+ ) -> Iterator[dict[str, Any]]:
267
+ """Flush any remaining data in buffer."""
268
+ if buf:
269
+ yield {
270
+ "event": event_type or "message",
271
+ "id": event_id,
272
+ "data": "\n".join(buf),
273
+ }
274
+
275
+
201
276
  def iter_sse_events(
202
- response: httpx.Response, timeout_seconds: float = None, agent_name: str = None
203
- ):
277
+ response: httpx.Response,
278
+ timeout_seconds: float | None = None,
279
+ agent_name: str | None = None,
280
+ ) -> Iterator[dict[str, Any]]:
204
281
  """Iterate over Server-Sent Events with proper parsing.
205
282
 
206
283
  Args:
@@ -226,25 +303,16 @@ def iter_sse_events(
226
303
  if line is None:
227
304
  continue
228
305
 
229
- result = _parse_sse_line(line, buf, event_type, event_id)
230
- if len(result) == 5: # completion signal included
231
- buf, event_type, event_id, event_data, completed = result
232
- else: # normal case
233
- buf, event_type, event_id, event_data = result
234
- completed = False
306
+ buf, event_type, event_id, event_data, completed = _process_sse_line(
307
+ line, buf, event_type, event_id
308
+ )
235
309
 
236
- if event_data:
237
- yield event_data
310
+ yield from _yield_event_data(event_data)
238
311
  if completed:
239
312
  return
240
313
 
241
314
  # Flush any remaining data
242
- if buf:
243
- yield {
244
- "event": event_type or "message",
245
- "id": event_id,
246
- "data": "\n".join(buf),
247
- }
315
+ yield from _flush_remaining_buffer(buf, event_type, event_id)
248
316
 
249
317
  except Exception as e:
250
318
  _handle_streaming_error(e, timeout_seconds, agent_name)
@@ -279,11 +347,7 @@ async def aiter_sse_events(
279
347
  continue
280
348
 
281
349
  result = _parse_sse_line(line, buf, event_type, event_id)
282
- if len(result) == 5: # completion signal included
283
- buf, event_type, event_id, event_data, completed = result
284
- else: # normal case
285
- buf, event_type, event_id, event_data = result
286
- completed = False
350
+ buf, event_type, event_id, event_data, completed = result
287
351
 
288
352
  if event_data:
289
353
  yield event_data
@@ -302,6 +366,66 @@ async def aiter_sse_events(
302
366
  _handle_streaming_error(e, timeout_seconds, agent_name)
303
367
 
304
368
 
369
+ def _create_form_data(message: str) -> dict[str, Any]:
370
+ """Create form data with message and stream flag."""
371
+ return {"input": message, "message": message, "stream": True}
372
+
373
+
374
+ def _prepare_file_entry(
375
+ item: str | BinaryIO, stack: ExitStack
376
+ ) -> tuple[str, tuple[str, BinaryIO, str]]:
377
+ """Prepare a single file entry for multipart data."""
378
+ if isinstance(item, str):
379
+ return _prepare_path_entry(item, stack)
380
+ else:
381
+ return _prepare_stream_entry(item)
382
+
383
+
384
+ def _prepare_path_entry(
385
+ path_str: str, stack: ExitStack
386
+ ) -> tuple[str, tuple[str, BinaryIO, str]]:
387
+ """Prepare a file path entry."""
388
+ file_path = Path(path_str)
389
+ if not file_path.exists():
390
+ raise FileNotFoundError(f"File not found: {path_str}")
391
+
392
+ handle = stack.enter_context(open(file_path, "rb"))
393
+ return (
394
+ "files",
395
+ (
396
+ file_path.name,
397
+ handle,
398
+ "application/octet-stream",
399
+ ),
400
+ )
401
+
402
+
403
+ def _prepare_stream_entry(
404
+ file_obj: BinaryIO,
405
+ ) -> tuple[str, tuple[str, BinaryIO, str]]:
406
+ """Prepare a file object entry."""
407
+ if not hasattr(file_obj, "read"):
408
+ raise ValueError(f"Invalid file object: {file_obj}")
409
+
410
+ raw_name = getattr(file_obj, "name", "file")
411
+ filename = Path(raw_name).name if raw_name else "file"
412
+
413
+ try:
414
+ if hasattr(file_obj, "seek"):
415
+ file_obj.seek(0)
416
+ except (OSError, ValueError):
417
+ pass
418
+
419
+ return (
420
+ "files",
421
+ (
422
+ filename,
423
+ file_obj,
424
+ "application/octet-stream",
425
+ ),
426
+ )
427
+
428
+
305
429
  def prepare_multipart_data(message: str, files: list[str | BinaryIO]) -> MultipartData:
306
430
  """Prepare multipart form data for file uploads.
307
431
 
@@ -316,49 +440,15 @@ def prepare_multipart_data(message: str, files: list[str | BinaryIO]) -> Multipa
316
440
  FileNotFoundError: When a file path doesn't exist
317
441
  ValueError: When a file object is invalid
318
442
  """
319
- # Backend expects 'input' for the main prompt. Keep 'message' for
320
- # backward-compatibility with any legacy handlers.
321
- form_data = {"input": message, "message": message, "stream": True}
322
- file_list = []
323
-
324
- with ExitStack() as stack:
325
- multipart_data = MultipartData(form_data, [])
326
- multipart_data._exit_stack = stack
327
-
328
- for file_item in files:
329
- if isinstance(file_item, str):
330
- # File path - let httpx stream the file handle
331
- file_path = Path(file_item)
332
- if not file_path.exists():
333
- raise FileNotFoundError(f"File not found: {file_item}")
334
-
335
- # Open file and register for cleanup
336
- fh = stack.enter_context(open(file_path, "rb"))
337
- file_list.append(
338
- (
339
- "files",
340
- (
341
- file_path.name,
342
- fh,
343
- "application/octet-stream",
344
- ),
345
- )
346
- )
347
- else:
348
- # File-like object
349
- if hasattr(file_item, "name"):
350
- filename = getattr(file_item, "name", "file")
351
- else:
352
- filename = "file"
353
-
354
- if hasattr(file_item, "read"):
355
- # For file-like objects, we need to read them since httpx expects bytes
356
- file_content = file_item.read()
357
- file_list.append(
358
- ("files", (filename, file_content, "application/octet-stream"))
359
- )
360
- else:
361
- raise ValueError(f"Invalid file object: {file_item}")
362
-
363
- multipart_data.files = file_list
443
+ form_data = _create_form_data(message)
444
+ stack = ExitStack()
445
+ multipart_data = MultipartData(form_data, [])
446
+ multipart_data._exit_stack = stack
447
+
448
+ try:
449
+ file_entries = [_prepare_file_entry(item, stack) for item in files]
450
+ multipart_data.files = file_entries
364
451
  return multipart_data
452
+ except Exception:
453
+ stack.close()
454
+ raise
@@ -4,6 +4,8 @@ Authors:
4
4
  Raymond Christopher (raymond.christopher@gdplabs.id)
5
5
  """
6
6
 
7
+ from typing import Any
8
+
7
9
  from glaip_sdk.utils.rich_utils import RICH_AVAILABLE
8
10
 
9
11
 
@@ -34,7 +36,7 @@ def print_agent_output(output: str, title: str = "Agent Output") -> None:
34
36
  print("=" * (len(title) + 8))
35
37
 
36
38
 
37
- def print_agent_created(agent, title: str = "🤖 Agent Created") -> None:
39
+ def print_agent_created(agent: Any, title: str = "🤖 Agent Created") -> None:
38
40
  """Print agent creation success with rich formatting.
39
41
 
40
42
  Args:
@@ -68,7 +70,7 @@ def print_agent_created(agent, title: str = "🤖 Agent Created") -> None:
68
70
  print(f"Version: {getattr(agent, 'version', '1.0')}")
69
71
 
70
72
 
71
- def print_agent_updated(agent) -> None:
73
+ def print_agent_updated(agent: Any) -> None:
72
74
  """Print agent update success with rich formatting.
73
75
 
74
76
  Args:
@@ -5,7 +5,9 @@ Authors:
5
5
  """
6
6
 
7
7
  import re
8
+ from collections.abc import Iterable, Iterator
8
9
  from datetime import datetime
10
+ from typing import Any
9
11
  from uuid import UUID
10
12
 
11
13
  import click
@@ -58,7 +60,7 @@ def format_file_size(size_bytes: int) -> str:
58
60
  return f"{size_bytes:.1f} TB"
59
61
 
60
62
 
61
- def format_datetime(dt):
63
+ def format_datetime(dt: datetime | str | None) -> str:
62
64
  """Format datetime object to readable string.
63
65
 
64
66
  Args:
@@ -74,7 +76,9 @@ def format_datetime(dt):
74
76
  return str(dt)
75
77
 
76
78
 
77
- def progress_bar(iterable, description: str = "Processing"):
79
+ def progress_bar(
80
+ iterable: Iterable[Any], description: str = "Processing"
81
+ ) -> Iterator[Any]:
78
82
  """Simple progress bar using click.
79
83
 
80
84
  Args:
@@ -86,9 +90,7 @@ def progress_bar(iterable, description: str = "Processing"):
86
90
  """
87
91
  try:
88
92
  with click.progressbar(iterable, label=description) as bar:
89
- for item in bar:
90
- yield item
93
+ yield from bar
91
94
  except ImportError:
92
95
  # Fallback if click not available
93
- for item in iterable:
94
- yield item
96
+ yield from iterable
@@ -40,7 +40,9 @@ def extract_ids_from_export(items: list[Any]) -> list[str]:
40
40
  return ids
41
41
 
42
42
 
43
- def convert_export_to_import_format(data: dict[str, Any]) -> dict[str, Any]:
43
+ def convert_export_to_import_format(
44
+ data: dict[str, Any],
45
+ ) -> dict[str, Any]:
44
46
  """Convert export format to import-compatible format (extract IDs from objects).
45
47
 
46
48
  Args:
@@ -66,6 +68,58 @@ def convert_export_to_import_format(data: dict[str, Any]) -> dict[str, Any]:
66
68
  return import_data
67
69
 
68
70
 
71
+ def _get_default_array_fields() -> list[str]:
72
+ """Get default array fields that should be merged."""
73
+ return ["tools", "agents"]
74
+
75
+
76
+ def _should_use_cli_value(cli_value: Any) -> bool:
77
+ """Check if CLI value should be used."""
78
+ return cli_value is not None and (
79
+ not isinstance(cli_value, list | tuple) or len(cli_value) > 0
80
+ )
81
+
82
+
83
+ def _handle_array_field_merge(
84
+ key: str, cli_value: Any, import_data: dict[str, Any]
85
+ ) -> Any:
86
+ """Handle merging of array fields."""
87
+ import_value = import_data[key]
88
+ if isinstance(import_value, list):
89
+ return list(cli_value) + import_value
90
+ else:
91
+ return cli_value
92
+
93
+
94
+ def _merge_cli_values_with_import(
95
+ merged: dict[str, Any],
96
+ cli_args: dict[str, Any],
97
+ import_data: dict[str, Any],
98
+ array_fields: list[str],
99
+ ) -> None:
100
+ """Merge CLI values into merged dict."""
101
+ for key, cli_value in cli_args.items():
102
+ if _should_use_cli_value(cli_value):
103
+ # CLI value takes precedence (for non-empty values)
104
+ if key in array_fields and key in import_data:
105
+ # For array fields, combine CLI and imported values
106
+ merged[key] = _handle_array_field_merge(key, cli_value, import_data)
107
+ else:
108
+ merged[key] = cli_value
109
+ elif key in import_data:
110
+ # Use imported value if no CLI value
111
+ merged[key] = import_data[key]
112
+
113
+
114
+ def _add_import_only_fields(
115
+ merged: dict[str, Any], import_data: dict[str, Any]
116
+ ) -> None:
117
+ """Add fields that exist only in import data."""
118
+ for key, import_value in import_data.items():
119
+ if key not in merged:
120
+ merged[key] = import_value
121
+
122
+
69
123
  def merge_import_with_cli_args(
70
124
  import_data: dict[str, Any],
71
125
  cli_args: dict[str, Any],
@@ -87,32 +141,11 @@ def merge_import_with_cli_args(
87
141
  - Empty arrays/lists are treated as None (no override)
88
142
  """
89
143
  if array_fields is None:
90
- array_fields = ["tools", "agents"]
144
+ array_fields = _get_default_array_fields()
91
145
 
92
146
  merged = {}
93
-
94
- for key, cli_value in cli_args.items():
95
- if cli_value is not None and (
96
- not isinstance(cli_value, list | tuple) or len(cli_value) > 0
97
- ):
98
- # CLI value takes precedence (for non-empty values)
99
- if key in array_fields and key in import_data:
100
- # For array fields, combine CLI and imported values
101
- import_value = import_data[key]
102
- if isinstance(import_value, list):
103
- merged[key] = list(cli_value) + import_value
104
- else:
105
- merged[key] = cli_value
106
- else:
107
- merged[key] = cli_value
108
- elif key in import_data:
109
- # Use imported value if no CLI value
110
- merged[key] = import_data[key]
111
-
112
- # Add any import-only fields
113
- for key, import_value in import_data.items():
114
- if key not in merged:
115
- merged[key] = import_value
147
+ _merge_cli_values_with_import(merged, cli_args, import_data, array_fields)
148
+ _add_import_only_fields(merged, import_data)
116
149
 
117
150
  return merged
118
151
 
@@ -8,6 +8,8 @@ from __future__ import annotations
8
8
 
9
9
  import re
10
10
  import time
11
+ from collections.abc import Callable
12
+ from typing import Any
11
13
 
12
14
  # Constants for argument formatting
13
15
  DEFAULT_ARGS_MAX_LEN = 100
@@ -65,9 +67,7 @@ def redact_sensitive(text: str | dict | list) -> str | dict | list:
65
67
  for sensitive in ["password", "secret", "token", "key", "api_key"]
66
68
  ):
67
69
  result[key] = "••••••"
68
- elif isinstance(value, dict | list):
69
- result[key] = redact_sensitive(value)
70
- elif isinstance(value, str):
70
+ elif isinstance(value, dict | list) or isinstance(value, str):
71
71
  result[key] = redact_sensitive(value)
72
72
  else:
73
73
  result[key] = value
@@ -156,7 +156,7 @@ def get_step_icon(step_kind: str) -> str:
156
156
  return ""
157
157
 
158
158
 
159
- def is_step_finished(step) -> bool:
159
+ def is_step_finished(step: Any) -> bool:
160
160
  """Check if a step is finished.
161
161
 
162
162
  Args:
@@ -169,7 +169,9 @@ def is_step_finished(step) -> bool:
169
169
 
170
170
 
171
171
  def format_main_title(
172
- header_text: str, has_running_steps: bool, get_spinner_char: callable
172
+ header_text: str,
173
+ has_running_steps: bool,
174
+ get_spinner_char: Callable[[], str],
173
175
  ) -> str:
174
176
  """Generate the main panel title with dynamic status indicators.
175
177
 
@@ -191,7 +193,11 @@ def format_main_title(
191
193
 
192
194
 
193
195
  def print_header_once(
194
- console, text: str, last_header: str, rules_enabled: bool, style: str | None = None
196
+ console: Any,
197
+ text: str,
198
+ last_header: str,
199
+ rules_enabled: bool,
200
+ style: str | None = None,
195
201
  ) -> str:
196
202
  """Print header text only when it changes to avoid duplicate output.
197
203
 
@@ -25,7 +25,7 @@ class Step:
25
25
  started_at: float = field(default_factory=monotonic)
26
26
  duration_ms: int | None = None
27
27
 
28
- def finish(self, duration_raw: float | None):
28
+ def finish(self, duration_raw: float | None) -> None:
29
29
  if isinstance(duration_raw, int | float) and duration_raw > 0:
30
30
  # Use provided duration if it's a positive number (even if very small)
31
31
  self.duration_ms = round(float(duration_raw) * 1000)