mindroot 9.24.0__py3-none-any.whl → 10.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindroot might be problematic. Click here for more details.

@@ -0,0 +1,592 @@
1
+ import os
2
+ import json
3
+ from typing import List, Dict
4
+ import sys
5
+ import traceback
6
+ import re
7
+ import time
8
+ import asyncio
9
+ import aiofiles
10
+ import aiofiles.os
11
+ from mindroot.lib.utils.debug import debug_box
12
+
13
+ class ChatLog:
14
+ def __init__(self, log_id=0, agent=None, parent_log_id=None, context_length: int = 4096, user: str = None):
15
+ self.log_id = log_id
16
+ self.messages = []
17
+ self.parent_log_id = parent_log_id
18
+ self.agent = agent
19
+ if user is None or user == '' or user == 'None':
20
+ raise ValueError('User must be provided')
21
+ # make sure user is string
22
+ if not isinstance(user, str):
23
+ # does it have a username?
24
+ if hasattr(user, 'username'):
25
+ user = user.username
26
+ else:
27
+ # throw an error
28
+ raise ValueError('ChatLog(): user must be a string or have username field')
29
+ self.user = user
30
+ if agent is None or agent == '':
31
+ raise ValueError('Agent must be provided')
32
+ self.context_length = context_length
33
+ self.log_dir = os.environ.get('CHATLOG_DIR', 'data/chat')
34
+ self.log_dir = os.path.join(self.log_dir, self.user)
35
+ self.log_dir = os.path.join(self.log_dir, self.agent)
36
+ if not os.path.exists(self.log_dir):
37
+ os.makedirs(self.log_dir)
38
+ # For backward compatibility, we'll load synchronously in constructor
39
+ # but provide async methods for new code
40
+ self._load_log_sync()
41
+
42
+ def _get_log_data(self) -> Dict[str, any]:
43
+ return {
44
+ 'agent': self.agent,
45
+ 'log_id': self.log_id,
46
+ 'messages': self.messages,
47
+ 'parent_log_id': self.parent_log_id
48
+ }
49
+
50
+ def _calculate_message_length(self, message: Dict[str, str]) -> int:
51
+ return len(json.dumps(message)) // 3
52
+
53
+ def _load_log_sync(self, log_id=None) -> None:
54
+ """Synchronous version for backward compatibility"""
55
+ if log_id is None:
56
+ log_id = self.log_id
57
+ self.log_id = log_id
58
+ log_file = os.path.join(self.log_dir, f'chatlog_{log_id}.json')
59
+ if os.path.exists(log_file):
60
+ with open(log_file, 'r') as f:
61
+ log_data = json.load(f)
62
+ self.agent = log_data.get('agent')
63
+ self.messages = log_data.get('messages', [])
64
+ self.parent_log_id = log_data.get('parent_log_id', None)
65
+ print("Loaded log file at ", log_file)
66
+ print("Message length: ", len(self.messages))
67
+ else:
68
+ print("Could not find log file at ", log_file)
69
+ self.messages = []
70
+
71
+ def _save_log_sync(self) -> None:
72
+ """Synchronous version for backward compatibility"""
73
+ log_file = os.path.join(self.log_dir, f'chatlog_{self.log_id}.json')
74
+ with open(log_file, 'w') as f:
75
+ json.dump(self._get_log_data(), f, indent=2)
76
+
77
+ def add_message(self, message: Dict[str, str]) -> None:
78
+ """Synchronous version for backward compatibility"""
79
+ should_save = self._add_message_impl(message)
80
+ if should_save:
81
+ self._save_log_sync()
82
+ else:
83
+ # Handle the image case that returned False - save synchronously
84
+ if (len(self.messages) > 0 and
85
+ isinstance(self.messages[-1].get('content'), list) and
86
+ len(self.messages[-1]['content']) > 0 and
87
+ self.messages[-1]['content'][0].get('type') == 'image'):
88
+ self._save_log_sync()
89
+
90
+ def _add_message_impl(self, message: Dict[str, str]) -> None:
91
+ """Internal implementation shared by sync and async versions"""
92
+ if len(self.messages)>0 and self.messages[-1]['role'] == message['role']:
93
+ print("found repeat role")
94
+ # check if messasge is str
95
+ # if so, convert to dict with type 'text':
96
+ if type(message['content']) == str:
97
+ message['content'] = [{'type':'text', 'text': message['content']}]
98
+ elif type(message['content']) == list:
99
+ for part in message['content']:
100
+ if part['type'] == 'image':
101
+ print("found image")
102
+ self.messages.append(message)
103
+ return False # Indicate caller should NOT save (we'll handle it)
104
+
105
+ try:
106
+ cmd_list = json.loads(self.messages[-1]['content'][0]['text'])
107
+ if type(cmd_list) != list:
108
+ debug_box("1")
109
+ cmd_list = [cmd_list]
110
+ new_json = json.loads(message['content'][0]['text'])
111
+ if type(new_json) != list:
112
+ debug_box("2")
113
+ new_json = [new_json]
114
+ new_cmd_list = cmd_list + new_json
115
+ debug_box("3")
116
+ self.messages[-1]['content'] = [{ 'type': 'text', 'text': json.dumps(new_cmd_list) }]
117
+ except Exception as e:
118
+ # assume previous mesage was not a command, was a string
119
+ debug_box("4")
120
+ print("Could not combine commands, probably normal if user message and previous system output, assuming string", e)
121
+ if type(self.messages[-1]['content']) == str:
122
+ new_msg_text = self.messages[-1]['content'] + message['content'][0]['text']
123
+ else:
124
+ new_msg_text = self.messages[-1]['content'][0]['text'] + message['content'][0]['text']
125
+ self.messages.append({'role': message['role'], 'content': [{'type': 'text', 'text': new_msg_text}]})
126
+ #print('could not combine commands. probably normal if user message and previous system output', e)
127
+ #print(self.messages[-1])
128
+ #print(message)
129
+ #raise e
130
+ else:
131
+ if len(self.messages)>0:
132
+ print('roles do not repeat, last message role is ', self.messages[-1]['role'], 'new message role is ', message['role'])
133
+ debug_box("5")
134
+ self.messages.append(message)
135
+ self._save_log_sync()
136
+ async def add_message_async(self, message: Dict[str, str]) -> None:
137
+ """Async version for new code that needs non-blocking operations"""
138
+ should_save = self._add_message_impl(message)
139
+ if should_save:
140
+ await self.save_log()
141
+ else:
142
+ # Handle the image case that returned False - save asynchronously
143
+ if (len(self.messages) > 0 and
144
+ isinstance(self.messages[-1].get('content'), list) and
145
+ len(self.messages[-1]['content']) > 0 and
146
+ self.messages[-1]['content'][0].get('type') == 'image'):
147
+ await self.save_log()
148
+
149
+ def get_history(self) -> List[Dict[str, str]]:
150
+ return self.messages
151
+
152
+ def get_recent(self, max_tokens: int = 4096) -> List[Dict[str, str]]:
153
+ recent_messages = []
154
+ total_length = 0
155
+ #print('returning all messages', self.messages)
156
+ json_messages = json.dumps(self.messages)
157
+ return json.loads(json_messages)
158
+
159
+ #for message in self.messages:
160
+ # message_length = self._calculate_message_length(message)
161
+ # if total_length + message_length <= max_tokens:
162
+ # recent_messages.append(message)
163
+ # total_length += message_length
164
+ # else:
165
+ # break
166
+ #
167
+ #return recent_messages
168
+
169
+ async def save_log(self) -> None:
170
+ log_file = os.path.join(self.log_dir, f'chatlog_{self.log_id}.json')
171
+ async with aiofiles.open(log_file, 'w') as f:
172
+ await f.write(json.dumps(self._get_log_data(), indent=2))
173
+
174
+
175
+ async def load_log(self, log_id = None) -> None:
176
+ if log_id is None:
177
+ log_id = self.log_id
178
+ self.log_id = log_id
179
+ log_file = os.path.join(self.log_dir, f'chatlog_{log_id}.json')
180
+ if await aiofiles.os.path.exists(log_file):
181
+ async with aiofiles.open(log_file, 'r') as f:
182
+ content = await f.read()
183
+ log_data = json.loads(content)
184
+ self.agent = log_data.get('agent')
185
+ self.messages = log_data.get('messages', [])
186
+ self.parent_log_id = log_data.get('parent_log_id', None)
187
+ print("Loaded log file at ", log_file)
188
+ print("Message length: ", len(self.messages))
189
+ else:
190
+ print("Could not find log file at ", log_file)
191
+ self.messages = []
192
+
193
+ def count_tokens(self) -> Dict[str, int]:
194
+ """
195
+ Count tokens in the chat log, providing both sequence totals and cumulative request totals.
196
+
197
+ Returns:
198
+ Dict with the following keys:
199
+ - input_tokens_sequence: Total tokens in all user messages
200
+ - output_tokens_sequence: Total tokens in all assistant messages
201
+ - input_tokens_total: Cumulative tokens sent to LLM across all requests
202
+ """
203
+ # Initialize counters
204
+ input_tokens_sequence = 0 # Total tokens in all user messages
205
+ output_tokens_sequence = 0 # Total tokens in all assistant messages
206
+ input_tokens_total = 0 # Cumulative tokens sent to LLM across all requests
207
+
208
+ # Process each message
209
+ for i, message in enumerate(self.messages):
210
+ # Calculate tokens in this message (rough approximation)
211
+ message_tokens = len(json.dumps(message)) // 4
212
+
213
+ # Add to appropriate sequence counter
214
+ if message['role'] == 'assistant':
215
+ output_tokens_sequence += message_tokens
216
+ else: # user or system
217
+ input_tokens_sequence += message_tokens
218
+
219
+ # For each assistant message, calculate the input tokens for that request
220
+ # (which includes all previous messages)
221
+ if message['role'] == 'assistant':
222
+ request_input_tokens = 0
223
+ for j in range(i):
224
+ request_input_tokens += len(json.dumps(self.messages[j])) // 4
225
+ input_tokens_total += request_input_tokens
226
+
227
+ return {
228
+ 'input_tokens_sequence': input_tokens_sequence,
229
+ 'output_tokens_sequence': output_tokens_sequence,
230
+ 'input_tokens_total': input_tokens_total
231
+ }
232
+
233
+ async def find_chatlog_file(log_id: str) -> str:
234
+ """
235
+ Find a chatlog file by its log_id.
236
+
237
+ Args:
238
+ log_id: The log ID to search for
239
+
240
+ Returns:
241
+ The full path to the chatlog file if found, None otherwise
242
+ """
243
+ chat_dir = os.environ.get('CHATLOG_DIR', 'data/chat')
244
+
245
+ # Use os.walk to search through all subdirectories
246
+ for root, dirs, files in await asyncio.to_thread(os.walk, chat_dir):
247
+ for file in files:
248
+ if file == f"chatlog_{log_id}.json":
249
+ return os.path.join(root, file)
250
+
251
+ return None
252
+
253
+ async def find_child_logs_by_parent_id(parent_log_id: str) -> List[str]:
254
+ """
255
+ Find all chat logs that have the given parent_log_id.
256
+
257
+ Args:
258
+ parent_log_id: The parent log ID to search for
259
+
260
+ Returns:
261
+ List of log IDs that have this parent_log_id
262
+ """
263
+ child_log_ids = []
264
+ chat_dir = os.environ.get('CHATLOG_DIR', 'data/chat')
265
+
266
+ # Search through all chatlog files
267
+ for root, dirs, files in await asyncio.to_thread(os.walk, chat_dir):
268
+ for file in files:
269
+ if file.startswith("chatlog_") and file.endswith(".json"):
270
+ try:
271
+ async with aiofiles.open(os.path.join(root, file), 'r') as f:
272
+ content = await f.read()
273
+ log_data = json.loads(content)
274
+ if log_data.get('parent_log_id') == parent_log_id:
275
+ # Extract log_id from the data
276
+ child_log_ids.append(log_data.get('log_id'))
277
+ except (json.JSONDecodeError, IOError):
278
+ continue
279
+
280
+ return child_log_ids
281
+
282
+ def extract_delegate_task_log_ids(messages: List[Dict]) -> List[str]:
283
+ """
284
+ Extract log IDs from delegate_task and delegate_subtask commands in messages.
285
+
286
+ Args:
287
+ messages: List of chat messages
288
+
289
+ Returns:
290
+ List of log IDs found in delegate_task commands
291
+ """
292
+ log_ids = []
293
+
294
+ for message in messages:
295
+ if message['role'] == 'assistant':
296
+ content = message['content']
297
+ # Handle both string and list content formats
298
+ if isinstance(content, str):
299
+ text = content
300
+ elif isinstance(content, list) and len(content) > 0 and 'text' in content[0]:
301
+ text = content[0]['text']
302
+ else:
303
+ continue
304
+
305
+ # Try to parse as JSON
306
+ try:
307
+ commands = json.loads(text)
308
+ if not isinstance(commands, list):
309
+ commands = [commands]
310
+
311
+ for cmd in commands:
312
+ for key, value in cmd.items():
313
+ if key == 'delegate_task' and 'log_id' in value:
314
+ log_ids.append(value['log_id'])
315
+ elif key == 'delegate_subtask' and 'log_id' in value:
316
+ log_ids.append(value['log_id'])
317
+ except (json.JSONDecodeError, TypeError, KeyError):
318
+ # If not JSON, try regex to find log_ids in delegate_task commands
319
+ matches = re.findall(r'"delegate_task"\s*:\s*{\s*"log_id"\s*:\s*"([^"]+)"', text)
320
+ log_ids.extend(matches)
321
+ # Also check for delegate_subtask commands
322
+ matches = re.findall(r'"delegate_subtask"\s*:\s*{\s*"log_id"\s*:\s*"([^"]+)"', text)
323
+ log_ids.extend(matches)
324
+
325
+ return log_ids
326
+
327
+ async def get_cache_dir() -> str:
328
+ """
329
+ Get the directory for token count cache files.
330
+ Creates the directory if it doesn't exist.
331
+ """
332
+ cache_dir = os.environ.get('TOKEN_CACHE_DIR', 'data/token_cache')
333
+ if not await aiofiles.os.path.exists(cache_dir):
334
+ await aiofiles.os.makedirs(cache_dir)
335
+ return cache_dir
336
+
337
+ async def get_cache_path(log_id: str) -> str:
338
+ """
339
+ Get the path to the cache file for a specific log_id.
340
+ """
341
+ cache_dir = await get_cache_dir()
342
+ return os.path.join(cache_dir, f"tokens_{log_id}.json")
343
+
344
+ async def get_cached_token_counts(log_id: str, log_path: str) -> Dict[str, int]:
345
+ """
346
+ Get cached token counts if available and valid.
347
+
348
+ Args:
349
+ log_id: The log ID
350
+ log_path: Path to the actual log file
351
+
352
+ Returns:
353
+ Cached token counts if valid, None otherwise
354
+ """
355
+ cache_path = await get_cache_path(log_id)
356
+
357
+ # If cache doesn't exist, return None
358
+ if not await aiofiles.os.path.exists(cache_path):
359
+ return None
360
+
361
+ try:
362
+ # Get modification times
363
+ log_mtime = await aiofiles.os.path.getmtime(log_path)
364
+ cache_mtime = await aiofiles.os.path.getmtime(cache_path)
365
+ current_time = time.time()
366
+
367
+ # If log was modified after cache was created, cache is invalid
368
+ if log_mtime > cache_mtime:
369
+ return None
370
+
371
+ # Don't recalculate sooner than 3 minutes after last calculation
372
+ if current_time - cache_mtime < 180: # 3 minutes in seconds
373
+ async with aiofiles.open(cache_path, 'r') as f:
374
+ content = await f.read()
375
+ return json.loads(content)
376
+
377
+ # For logs that haven't been modified in over an hour, consider them "finished"
378
+ # and use the cache regardless of when it was last calculated
379
+ if current_time - log_mtime > 3600: # 1 hour in seconds
380
+ async with aiofiles.open(cache_path, 'r') as f:
381
+ content = await f.read()
382
+ return json.loads(content)
383
+
384
+ except (json.JSONDecodeError, IOError) as e:
385
+ print(f"Error reading token cache: {e}")
386
+
387
+ return None
388
+
389
+ async def save_token_counts_to_cache(log_id: str, token_counts: Dict[str, int]) -> None:
390
+ """
391
+ Save token counts to cache.
392
+ """
393
+ cache_path = await get_cache_path(log_id)
394
+ async with aiofiles.open(cache_path, 'w') as f:
395
+ await f.write(json.dumps(token_counts))
396
+
397
+ async def build_token_hierarchy(log_id: str, user: str = None, visited: set = None) -> Dict:
398
+ """
399
+ Build a hierarchical token count structure for a log and its children.
400
+
401
+ Args:
402
+ log_id: The log ID to build hierarchy for
403
+ user: User for the log
404
+ visited: Set of already visited log IDs to prevent infinite recursion
405
+
406
+ Returns:
407
+ Dictionary with hierarchical structure containing:
408
+ - log_id: The log ID
409
+ - individual_counts: Token counts for this log only
410
+ - cumulative_counts: Token counts including all children
411
+ - children: List of child hierarchies
412
+ """
413
+ if visited is None:
414
+ visited = set()
415
+
416
+ if log_id in visited:
417
+ return None # Prevent infinite recursion
418
+
419
+ visited.add(log_id)
420
+
421
+ # Find the chatlog file
422
+ chatlog_path = await find_chatlog_file(log_id)
423
+ if not chatlog_path:
424
+ return None
425
+
426
+ # Load the chat log
427
+ async with aiofiles.open(chatlog_path, 'r') as f:
428
+ content = await f.read()
429
+ log_data = json.loads(content)
430
+
431
+ # Create a temporary ChatLog instance to count tokens
432
+ if user is None:
433
+ try:
434
+ path_parts = chatlog_path.split(os.sep)
435
+ if len(path_parts) >= 4 and path_parts[-4] == 'chat':
436
+ user = path_parts[-3]
437
+ else:
438
+ user = "system"
439
+ except Exception:
440
+ user = "system"
441
+
442
+ temp_log = ChatLog(log_id=log_id, user=user, agent=log_data.get('agent', 'unknown'))
443
+ temp_log.messages = log_data.get('messages', [])
444
+
445
+ # Count tokens for this log only
446
+ individual_counts = temp_log.count_tokens()
447
+
448
+ # Find all child log IDs
449
+ delegated_log_ids = extract_delegate_task_log_ids(temp_log.messages)
450
+ child_logs_by_parent = await find_child_logs_by_parent_id(log_id)
451
+ all_child_log_ids = list(set(delegated_log_ids) | set(child_logs_by_parent))
452
+
453
+ # Build child hierarchies
454
+ children = []
455
+ cumulative_counts = {
456
+ 'input_tokens_sequence': individual_counts['input_tokens_sequence'],
457
+ 'output_tokens_sequence': individual_counts['output_tokens_sequence'],
458
+ 'input_tokens_total': individual_counts['input_tokens_total']
459
+ }
460
+
461
+ for child_id in all_child_log_ids:
462
+ child_hierarchy = await build_token_hierarchy(child_id, user, visited.copy())
463
+ if child_hierarchy:
464
+ children.append(child_hierarchy)
465
+ # Add child's cumulative counts to our cumulative counts
466
+ cumulative_counts['input_tokens_sequence'] += child_hierarchy['cumulative_counts']['input_tokens_sequence']
467
+ cumulative_counts['output_tokens_sequence'] += child_hierarchy['cumulative_counts']['output_tokens_sequence']
468
+ cumulative_counts['input_tokens_total'] += child_hierarchy['cumulative_counts']['input_tokens_total']
469
+
470
+ return {
471
+ 'log_id': log_id,
472
+ 'agent': log_data.get('agent', 'unknown'),
473
+ 'individual_counts': individual_counts,
474
+ 'cumulative_counts': cumulative_counts,
475
+ 'children': children
476
+ }
477
+
478
+ async def count_tokens_for_log_id(log_id: str, user: str = None, hierarchical: bool = False) -> Dict[str, int]:
479
+ """
480
+ Count tokens for a chat log identified by log_id, including any delegated tasks.
481
+
482
+ Args:
483
+ log_id: The log ID to count tokens for
484
+
485
+ Returns:
486
+ Dictionary with token counts or None if log not found.
487
+ If hierarchical=True, includes 'hierarchy' key with tree structure.
488
+ If hierarchical=False (default), returns flat structure for backwards compatibility.
489
+ """
490
+ # Find the chatlog file
491
+ chatlog_path = await find_chatlog_file(log_id)
492
+ if not chatlog_path:
493
+ return None
494
+
495
+ # If hierarchical structure is requested, build and return it
496
+ if hierarchical:
497
+ # Check cache first for hierarchical data
498
+ cached_counts = await get_cached_token_counts(log_id, chatlog_path)
499
+ if cached_counts and 'hierarchy' in cached_counts:
500
+ print(f"Using cached hierarchical token counts for {log_id}")
501
+ return cached_counts
502
+
503
+ print(f"Calculating hierarchical token counts for {log_id}")
504
+ hierarchy = await build_token_hierarchy(log_id, user)
505
+ if hierarchy:
506
+ result = {'hierarchy': hierarchy}
507
+ # Save hierarchical data to cache
508
+ await save_token_counts_to_cache(log_id, result)
509
+ return result
510
+ return None
511
+
512
+ # Check cache first
513
+ cached_counts = await get_cached_token_counts(log_id, chatlog_path)
514
+ if cached_counts:
515
+ print(f"Using cached token counts for {log_id}")
516
+ return cached_counts
517
+
518
+ print(f"Calculating token counts for {log_id}")
519
+
520
+ # Load the chat log
521
+ async with aiofiles.open(chatlog_path, 'r') as f:
522
+ content = await f.read()
523
+ log_data = json.loads(content)
524
+
525
+ # Get parent_log_id if it exists
526
+ parent_log_id = log_data.get('parent_log_id')
527
+
528
+ # Create a temporary ChatLog instance to count tokens
529
+ # Use provided user or try to determine from chatlog path or fallback to "system"
530
+ if user is None:
531
+ # Try to extract user from chatlog path: data/chat/{user}/{agent}/chatlog_{log_id}.json
532
+ try:
533
+ path_parts = chatlog_path.split(os.sep)
534
+ if len(path_parts) >= 4 and path_parts[-4] == 'chat':
535
+ extracted_user = path_parts[-3] # User is third from the end
536
+ user = extracted_user
537
+ print(f"Extracted user '{user}' from chatlog path: {chatlog_path}")
538
+ else:
539
+ user = "system" # Default fallback
540
+ except Exception as e:
541
+ print(f"Error extracting user from path {chatlog_path}: {e}")
542
+ user = "system" # Default fallback
543
+ temp_log = ChatLog(log_id=log_id, user=user, agent=log_data.get('agent', 'unknown'))
544
+ temp_log.messages = log_data.get('messages', [])
545
+
546
+ # Count tokens for this log
547
+ parent_counts = temp_log.count_tokens()
548
+
549
+ # Create combined counts (starting with parent counts)
550
+ combined_counts = {
551
+ 'input_tokens_sequence': parent_counts['input_tokens_sequence'],
552
+ 'output_tokens_sequence': parent_counts['output_tokens_sequence'],
553
+ 'input_tokens_total': parent_counts['input_tokens_total']
554
+ }
555
+
556
+ # Find delegated task log IDs
557
+ delegated_log_ids = extract_delegate_task_log_ids(temp_log.messages)
558
+
559
+ # Also find child logs by parent_log_id
560
+ child_logs_by_parent = await find_child_logs_by_parent_id(log_id)
561
+
562
+ # Combine all child log IDs (delegated tasks and parent_log_id children)
563
+ all_child_log_ids = set(delegated_log_ids) | set(child_logs_by_parent)
564
+
565
+ # If this log has a parent_log_id, we should not double-count it
566
+ # (it will be counted as part of its parent's cumulative total)
567
+ # But we still want to count its own children
568
+
569
+ # Recursively count tokens for all child tasks
570
+ for child_id in all_child_log_ids:
571
+ delegated_counts = await count_tokens_for_log_id(child_id, user=user)
572
+ if delegated_counts:
573
+ combined_counts['input_tokens_sequence'] += delegated_counts['input_tokens_sequence']
574
+ combined_counts['output_tokens_sequence'] += delegated_counts['output_tokens_sequence']
575
+ combined_counts['input_tokens_total'] += delegated_counts['input_tokens_total']
576
+
577
+ # Create final result with both parent and combined counts
578
+ token_counts = {
579
+ # Parent session only counts
580
+ 'input_tokens_sequence': parent_counts['input_tokens_sequence'],
581
+ 'output_tokens_sequence': parent_counts['output_tokens_sequence'],
582
+ 'input_tokens_total': parent_counts['input_tokens_total'],
583
+ # Combined counts (parent + all subtasks)
584
+ 'combined_input_tokens_sequence': combined_counts['input_tokens_sequence'],
585
+ 'combined_output_tokens_sequence': combined_counts['output_tokens_sequence'],
586
+ 'combined_input_tokens_total': combined_counts['input_tokens_total']
587
+ }
588
+
589
+ # Save to cache
590
+ await save_token_counts_to_cache(log_id, token_counts)
591
+
592
+ return token_counts