mycode-sdk 0.7.5__tar.gz → 0.7.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mycode-sdk
3
- Version: 0.7.5
3
+ Version: 0.7.6
4
4
  Summary: Lightweight Python SDK for building AI agents.
5
5
  Project-URL: Homepage, https://github.com/legibet/mycode
6
6
  Project-URL: Repository, https://github.com/legibet/mycode
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "mycode-sdk"
7
- version = "0.7.5"
7
+ version = "0.7.6"
8
8
  description = "Lightweight Python SDK for building AI agents."
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.12"
@@ -631,9 +631,11 @@ class Agent:
631
631
  return
632
632
  if should_compact(total_tokens, self.context_window, self.compact_threshold):
633
633
  try:
634
- async for event in self._compact(adapter, persist):
634
+ async for event in self._compact(adapter, persist, continue_now=bool(tool_calls)):
635
635
  yield event
636
- except (Exception, asyncio.CancelledError):
636
+ except asyncio.CancelledError:
637
+ raise
638
+ except Exception:
637
639
  logger.warning(
638
640
  "Context compaction failed, continuing without compaction",
639
641
  exc_info=True,
@@ -682,6 +684,8 @@ class Agent:
682
684
  self,
683
685
  adapter: ProviderAdapter,
684
686
  persist: PersistCallback,
687
+ *,
688
+ continue_now: bool,
685
689
  ) -> AsyncIterator[Event]:
686
690
  """Generate a conversation summary and replace in-memory messages."""
687
691
 
@@ -711,13 +715,11 @@ class Agent:
711
715
  summary_message = msg
712
716
 
713
717
  if not summary_message:
714
- logger.warning("Compaction produced no response")
715
- return
718
+ raise ValueError("compaction produced no response")
716
719
 
717
720
  summary_text = flatten_message_text(summary_message, include_thinking=False)
718
721
  if not summary_text:
719
- logger.warning("Compaction produced empty summary")
720
- return
722
+ raise ValueError("compaction produced empty summary")
721
723
 
722
724
  summary_total_tokens = (summary_message.get("meta") or {}).get("total_tokens")
723
725
  compact_event = build_compact_event(
@@ -731,9 +733,12 @@ class Agent:
731
733
  # Persist the compact event (append-only — original messages stay in JSONL).
732
734
  await persist(compact_event)
733
735
 
734
- # Rebuild in-memory messages from the compact event.
735
736
  self.messages.append(compact_event)
736
- self.messages = apply_compact(self.messages)
737
+ self.messages = apply_compact(
738
+ self.messages,
739
+ transcript_path=str(self._store.messages_path(self.session_id)) if self._store else None,
740
+ continue_now=continue_now,
741
+ )
737
742
 
738
743
  yield Event(
739
744
  "compact",
@@ -794,6 +794,13 @@
794
794
  "supports_pdf_input": true,
795
795
  "supports_reasoning": true
796
796
  },
797
+ "gpt-5.5-pro": {
798
+ "context_window": 1050000,
799
+ "max_output_tokens": 128000,
800
+ "supports_image_input": true,
801
+ "supports_pdf_input": true,
802
+ "supports_reasoning": true
803
+ },
797
804
  "gpt-image-1": {
798
805
  "context_window": 0,
799
806
  "max_output_tokens": 0,
@@ -1545,6 +1552,13 @@
1545
1552
  "supports_pdf_input": false,
1546
1553
  "supports_reasoning": true
1547
1554
  },
1555
+ "nvidia/nemotron-3-nano-omni-30b-a3b-reasoning:free": {
1556
+ "context_window": 256000,
1557
+ "max_output_tokens": 65536,
1558
+ "supports_image_input": true,
1559
+ "supports_pdf_input": false,
1560
+ "supports_reasoning": true
1561
+ },
1548
1562
  "nvidia/nemotron-3-super-120b-a12b": {
1549
1563
  "context_window": 262144,
1550
1564
  "max_output_tokens": 262144,
@@ -1755,6 +1769,13 @@
1755
1769
  "supports_pdf_input": true,
1756
1770
  "supports_reasoning": true
1757
1771
  },
1772
+ "openai/gpt-5.5-pro": {
1773
+ "context_window": 1050000,
1774
+ "max_output_tokens": 128000,
1775
+ "supports_image_input": true,
1776
+ "supports_pdf_input": true,
1777
+ "supports_reasoning": true
1778
+ },
1758
1779
  "openai/gpt-oss-120b": {
1759
1780
  "context_window": 131072,
1760
1781
  "max_output_tokens": 32768,
@@ -35,26 +35,38 @@ capture everything needed to continue the work seamlessly.
35
35
 
36
36
  Include:
37
37
 
38
- 1. **User Requests**: Every distinct request or instruction the user gave, \
38
+ 1. **Task and Intent**: Describe the user's overall goal what is being \
39
+ built, fixed, or investigated, and why.
40
+ 2. **Decisions and Constraints**: List the decisions made, constraints \
41
+ discovered, and approaches chosen or rejected, with the reasoning behind \
42
+ each.
43
+ 3. **User Requests**: Every distinct request or instruction the user gave, \
39
44
  in chronological order. Preserve the user's original wording for ambiguous \
40
45
  or nuanced requests.
41
- 2. **Completed Work**: What was accomplished — files created, modified, or \
42
- deleted; bugs fixed; features added. Include file paths and function names.
43
- 3. **Current State**: The exact state of the work right now — what is working, \
44
- what is broken, what is partially done.
45
- 4. **Key Decisions**: Important decisions made, constraints discovered, \
46
- approaches chosen or rejected, and why.
47
- 5. **Next Steps**: What remains to be done, any work that was in progress \
48
- when this summary was generated.
46
+ 4. **Files and Changes**: Enumerate every file read, modified, or created \
47
+ paths, what changed, and any code snippets the next turn will need to \
48
+ reason about, quoted verbatim.
49
+ 5. **Errors and Fixes**: List errors encountered with the original message \
50
+ verbatim, the cause if known, and the resolution — or that it remains open.
51
+ 6. **Current State**: What is verified working, what is known broken, what \
52
+ is in progress.
53
+ 7. **Next Step**: The next step to take, with a direct quote from the most \
54
+ recent conversation showing where the work left off.
49
55
 
50
56
  Rules:
51
- - Be specific: include file paths, function names, error messages, and \
52
- concrete details.
57
+ - Be specific: reproduce file paths, function names, error messages, and \
58
+ other identifiers verbatim — never paraphrase them.
53
59
  - Do not add suggestions or opinions — only summarize what happened.
54
60
  - Keep it concise but complete.\
55
61
  """
56
62
 
57
- _COMPACT_ACK = "Understood. I have the context from the conversation summary and will continue the work."
63
+ _CONTINUATION_HEADER = "This session is being continued from a previous conversation that was compacted to fit the context window. The summary below covers the earlier portion of the conversation."
64
+
65
+ _TRANSCRIPT_HINT = "For verbatim details not captured in this summary (exact code snippets, error messages, or earlier output), read the original conversation log at: {path}"
66
+
67
+ _CONTINUATION_FOOTER = 'Resume directly from where the work left off. Do not acknowledge this summary, do not recap, and do not preface with "I\'ll continue" or similar.'
68
+
69
+ _COMPACT_ACK = "Acknowledged."
58
70
 
59
71
 
60
72
  # ---------------------------------------------------------------------
@@ -103,8 +115,17 @@ def build_compact_event(
103
115
  return build_message("compact", [text_block(summary_text)], meta=meta)
104
116
 
105
117
 
106
- def apply_compact(messages: list[ConversationMessage]) -> list[ConversationMessage]:
107
- """Replace the latest compact event with a summary + synthetic ack."""
118
+ def apply_compact(
119
+ messages: list[ConversationMessage],
120
+ *,
121
+ transcript_path: str | None = None,
122
+ continue_now: bool | None = None,
123
+ ) -> list[ConversationMessage]:
124
+ """Replace the latest compact event with a synthetic summary view.
125
+
126
+ ``continue_now`` omits the ack and leaves a user instruction last so the
127
+ agent loop can immediately request the next assistant response.
128
+ """
108
129
 
109
130
  # Only the newest compact event matters. Older history before it is no
110
131
  # longer visible once the summary replaces that earlier conversation.
@@ -122,15 +143,23 @@ def apply_compact(messages: list[ConversationMessage]) -> list[ConversationMessa
122
143
  summary_text = str(block.get("text") or "")
123
144
  break
124
145
 
125
- return [
126
- build_message(
127
- "user",
128
- [text_block(f"[Conversation Summary]\n\n{summary_text}")],
129
- meta={"synthetic": True},
130
- ),
131
- build_message("assistant", [text_block(_COMPACT_ACK)], meta={"synthetic": True}),
132
- *messages[last_compact_index + 1 :],
133
- ]
146
+ tail = messages[last_compact_index + 1 :]
147
+ if continue_now is None:
148
+ # During live tool-loop compaction the next persisted message is the
149
+ # assistant continuation. Waiting compaction has no tail yet.
150
+ continue_now = bool(tail and tail[0].get("role") == "assistant")
151
+
152
+ parts = [_CONTINUATION_HEADER, summary_text]
153
+ if transcript_path:
154
+ parts.append(_TRANSCRIPT_HINT.format(path=transcript_path))
155
+ if continue_now:
156
+ parts.append(_CONTINUATION_FOOTER)
157
+
158
+ result = [build_message("user", [text_block("\n\n".join(parts))], meta={"synthetic": True})]
159
+ if not continue_now:
160
+ result.append(build_message("assistant", [text_block(_COMPACT_ACK)], meta={"synthetic": True}))
161
+ result.extend(tail)
162
+ return result
134
163
 
135
164
 
136
165
  def build_rewind_event(rewind_to: int) -> ConversationMessage:
@@ -317,7 +346,10 @@ class SessionStore:
317
346
  # 2) rewind truncates that visible list by message index
318
347
  # Orphan tool_use blocks (e.g. left open by a server crash) are
319
348
  # closed by the provider adapter at replay time, not here.
320
- visible_messages = apply_compact(raw_messages)
349
+ visible_messages = apply_compact(
350
+ raw_messages,
351
+ transcript_path=str(self.messages_path(session_id)),
352
+ )
321
353
  visible_messages = apply_rewind(visible_messages)
322
354
 
323
355
  return {"session": self._summary(session_id, meta), "messages": visible_messages}
File without changes
File without changes
File without changes