klaude-code 1.2.21__py3-none-any.whl → 1.2.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. klaude_code/cli/debug.py +8 -10
  2. klaude_code/command/__init__.py +0 -3
  3. klaude_code/command/status_cmd.py +1 -1
  4. klaude_code/const/__init__.py +10 -7
  5. klaude_code/core/manager/sub_agent_manager.py +1 -1
  6. klaude_code/core/prompt.py +5 -2
  7. klaude_code/core/prompts/prompt-codex-gpt-5-2-codex.md +117 -0
  8. klaude_code/core/prompts/{prompt-codex-gpt-5-1.md → prompt-codex.md} +9 -42
  9. klaude_code/core/reminders.py +87 -2
  10. klaude_code/core/task.py +37 -18
  11. klaude_code/core/tool/__init__.py +1 -9
  12. klaude_code/core/tool/file/_utils.py +6 -0
  13. klaude_code/core/tool/file/apply_patch_tool.py +30 -72
  14. klaude_code/core/tool/file/diff_builder.py +151 -0
  15. klaude_code/core/tool/file/edit_tool.py +35 -18
  16. klaude_code/core/tool/file/read_tool.py +45 -86
  17. klaude_code/core/tool/file/write_tool.py +40 -30
  18. klaude_code/core/tool/shell/bash_tool.py +147 -0
  19. klaude_code/core/tool/skill/__init__.py +0 -0
  20. klaude_code/core/tool/{memory → skill}/skill_tool.py +16 -39
  21. klaude_code/protocol/commands.py +0 -1
  22. klaude_code/protocol/model.py +31 -11
  23. klaude_code/protocol/tools.py +1 -2
  24. klaude_code/session/export.py +76 -21
  25. klaude_code/session/store.py +4 -2
  26. klaude_code/session/templates/export_session.html +28 -0
  27. klaude_code/skill/__init__.py +27 -0
  28. klaude_code/skill/assets/deslop/SKILL.md +17 -0
  29. klaude_code/skill/assets/dev-docs/SKILL.md +108 -0
  30. klaude_code/skill/assets/handoff/SKILL.md +39 -0
  31. klaude_code/skill/assets/jj-workspace/SKILL.md +20 -0
  32. klaude_code/skill/assets/skill-creator/SKILL.md +139 -0
  33. klaude_code/{core/tool/memory/skill_loader.py → skill/loader.py} +60 -24
  34. klaude_code/skill/manager.py +70 -0
  35. klaude_code/skill/system_skills.py +192 -0
  36. klaude_code/ui/modes/repl/completers.py +103 -3
  37. klaude_code/ui/modes/repl/event_handler.py +7 -3
  38. klaude_code/ui/modes/repl/input_prompt_toolkit.py +42 -3
  39. klaude_code/ui/renderers/assistant.py +7 -2
  40. klaude_code/ui/renderers/common.py +26 -11
  41. klaude_code/ui/renderers/developer.py +12 -5
  42. klaude_code/ui/renderers/diffs.py +85 -1
  43. klaude_code/ui/renderers/metadata.py +4 -2
  44. klaude_code/ui/renderers/thinking.py +1 -1
  45. klaude_code/ui/renderers/tools.py +75 -129
  46. klaude_code/ui/renderers/user_input.py +32 -2
  47. klaude_code/ui/rich/markdown.py +27 -12
  48. klaude_code/ui/rich/status.py +9 -24
  49. klaude_code/ui/rich/theme.py +17 -5
  50. {klaude_code-1.2.21.dist-info → klaude_code-1.2.23.dist-info}/METADATA +19 -13
  51. {klaude_code-1.2.21.dist-info → klaude_code-1.2.23.dist-info}/RECORD +54 -54
  52. klaude_code/command/diff_cmd.py +0 -136
  53. klaude_code/command/prompt-deslop.md +0 -14
  54. klaude_code/command/prompt-dev-docs-update.md +0 -56
  55. klaude_code/command/prompt-dev-docs.md +0 -46
  56. klaude_code/command/prompt-handoff.md +0 -33
  57. klaude_code/command/prompt-jj-workspace.md +0 -18
  58. klaude_code/core/tool/file/multi_edit_tool.md +0 -42
  59. klaude_code/core/tool/file/multi_edit_tool.py +0 -175
  60. klaude_code/core/tool/memory/__init__.py +0 -5
  61. klaude_code/core/tool/memory/memory_tool.md +0 -20
  62. klaude_code/core/tool/memory/memory_tool.py +0 -456
  63. /klaude_code/core/tool/{memory → skill}/skill_tool.md +0 -0
  64. {klaude_code-1.2.21.dist-info → klaude_code-1.2.23.dist-info}/WHEEL +0 -0
  65. {klaude_code-1.2.21.dist-info → klaude_code-1.2.23.dist-info}/entry_points.txt +0 -0
klaude_code/core/task.py CHANGED
@@ -25,7 +25,7 @@ class MetadataAccumulator:
25
25
  """
26
26
 
27
27
  def __init__(self, model_name: str) -> None:
28
- self._main = model.TaskMetadata(model_name=model_name)
28
+ self._main_agent = model.TaskMetadata(model_name=model_name) # Main agent metadata
29
29
  self._sub_agent_metadata: list[model.TaskMetadata] = []
30
30
  self._throughput_weighted_sum: float = 0.0
31
31
  self._throughput_tracked_tokens: int = 0
@@ -36,13 +36,12 @@ class MetadataAccumulator:
36
36
  def add(self, turn_metadata: model.ResponseMetadataItem) -> None:
37
37
  """Merge a turn's metadata into the accumulated state."""
38
38
  self._turn_count += 1
39
- main = self._main
40
39
  usage = turn_metadata.usage
41
40
 
42
41
  if usage is not None:
43
- if main.usage is None:
44
- main.usage = model.Usage()
45
- acc_usage = main.usage
42
+ if self._main_agent.usage is None:
43
+ self._main_agent.usage = model.Usage()
44
+ acc_usage = self._main_agent.usage
46
45
 
47
46
  model.TaskMetadata.merge_usage(acc_usage, usage)
48
47
  acc_usage.currency = usage.currency
@@ -63,9 +62,9 @@ class MetadataAccumulator:
63
62
  self._throughput_tracked_tokens += current_output
64
63
 
65
64
  if turn_metadata.provider is not None:
66
- main.provider = turn_metadata.provider
65
+ self._main_agent.provider = turn_metadata.provider
67
66
  if turn_metadata.model_name:
68
- main.model_name = turn_metadata.model_name
67
+ self._main_agent.model_name = turn_metadata.model_name
69
68
 
70
69
  def add_sub_agent_metadata(self, sub_agent_metadata: model.TaskMetadata) -> None:
71
70
  """Add sub-agent task metadata to the accumulated state."""
@@ -73,21 +72,22 @@ class MetadataAccumulator:
73
72
 
74
73
  def finalize(self, task_duration_s: float) -> model.TaskMetadataItem:
75
74
  """Return the final accumulated metadata with computed throughput and duration."""
76
- main = self._main
77
- if main.usage is not None:
75
+ if self._main_agent.usage is not None:
78
76
  if self._throughput_tracked_tokens > 0:
79
- main.usage.throughput_tps = self._throughput_weighted_sum / self._throughput_tracked_tokens
77
+ self._main_agent.usage.throughput_tps = self._throughput_weighted_sum / self._throughput_tracked_tokens
80
78
  else:
81
- main.usage.throughput_tps = None
79
+ self._main_agent.usage.throughput_tps = None
82
80
 
83
81
  if self._first_token_latency_count > 0:
84
- main.usage.first_token_latency_ms = self._first_token_latency_sum / self._first_token_latency_count
82
+ self._main_agent.usage.first_token_latency_ms = (
83
+ self._first_token_latency_sum / self._first_token_latency_count
84
+ )
85
85
  else:
86
- main.usage.first_token_latency_ms = None
86
+ self._main_agent.usage.first_token_latency_ms = None
87
87
 
88
- main.task_duration_s = task_duration_s
89
- main.turn_count = self._turn_count
90
- return model.TaskMetadataItem(main=main, sub_agent_task_metadata=self._sub_agent_metadata)
88
+ self._main_agent.task_duration_s = task_duration_s
89
+ self._main_agent.turn_count = self._turn_count
90
+ return model.TaskMetadataItem(main_agent=self._main_agent, sub_agent_task_metadata=self._sub_agent_metadata)
91
91
 
92
92
 
93
93
  @dataclass
@@ -126,17 +126,28 @@ class TaskExecutor:
126
126
  self._context = context
127
127
  self._current_turn: TurnExecutor | None = None
128
128
  self._started_at: float = 0.0
129
+ self._metadata_accumulator: MetadataAccumulator | None = None
129
130
 
130
131
  @property
131
132
  def current_turn(self) -> TurnExecutor | None:
132
133
  return self._current_turn
133
134
 
134
135
  def cancel(self) -> list[events.Event]:
135
- """Cancel the current turn and return any resulting events."""
136
+ """Cancel the current turn and return any resulting events including metadata."""
136
137
  ui_events: list[events.Event] = []
137
138
  if self._current_turn is not None:
138
139
  ui_events.extend(self._current_turn.cancel())
139
140
  self._current_turn = None
141
+
142
+ # Emit partial metadata on cancellation
143
+ if self._metadata_accumulator is not None and self._started_at > 0:
144
+ task_duration_s = time.perf_counter() - self._started_at
145
+ accumulated = self._metadata_accumulator.finalize(task_duration_s)
146
+ if accumulated.main_agent.usage is not None:
147
+ session_id = self._context.session_ctx.session_id
148
+ ui_events.append(events.TaskMetadataEvent(metadata=accumulated, session_id=session_id))
149
+ self._context.session_ctx.append_history([accumulated])
150
+
140
151
  return ui_events
141
152
 
142
153
  async def run(self, user_input: model.UserInputPayload) -> AsyncGenerator[events.Event]:
@@ -152,7 +163,8 @@ class TaskExecutor:
152
163
  del user_input # Persisted by the operation handler before launching the task.
153
164
 
154
165
  profile = ctx.profile
155
- metadata_accumulator = MetadataAccumulator(model_name=profile.llm_client.model_name)
166
+ self._metadata_accumulator = MetadataAccumulator(model_name=profile.llm_client.model_name)
167
+ metadata_accumulator = self._metadata_accumulator
156
168
 
157
169
  while True:
158
170
  # Process reminders at the start of each turn
@@ -226,6 +238,13 @@ class TaskExecutor:
226
238
  return
227
239
 
228
240
  if turn is None or turn.task_finished:
241
+ # Sub-agent with empty result should retry instead of finishing
242
+ if ctx.sub_agent_state is not None and turn is not None and not turn.task_result.strip():
243
+ yield events.ErrorEvent(
244
+ error_message="Sub-agent returned empty result, retrying...",
245
+ can_retry=True,
246
+ )
247
+ continue
229
248
  break
230
249
 
231
250
  # Finalize metadata
@@ -1,15 +1,12 @@
1
1
  from .file.apply_patch import DiffError, process_patch
2
2
  from .file.apply_patch_tool import ApplyPatchTool
3
3
  from .file.edit_tool import EditTool
4
- from .file.multi_edit_tool import MultiEditTool
5
4
  from .file.read_tool import ReadTool
6
5
  from .file.write_tool import WriteTool
7
- from .memory.memory_tool import MEMORY_DIR_NAME, MemoryTool
8
- from .memory.skill_loader import Skill, SkillLoader
9
- from .memory.skill_tool import SkillTool
10
6
  from .report_back_tool import ReportBackTool
11
7
  from .shell.bash_tool import BashTool
12
8
  from .shell.command_safety import SafetyCheckResult, is_safe_command
9
+ from .skill.skill_tool import SkillTool
13
10
  from .sub_agent_tool import SubAgentTool
14
11
  from .todo.todo_write_tool import TodoWriteTool
15
12
  from .todo.update_plan_tool import UpdatePlanTool
@@ -32,21 +29,16 @@ from .web.web_fetch_tool import WebFetchTool
32
29
  from .web.web_search_tool import WebSearchTool
33
30
 
34
31
  __all__ = [
35
- "MEMORY_DIR_NAME",
36
32
  "ApplyPatchTool",
37
33
  "BashTool",
38
34
  "DiffError",
39
35
  "EditTool",
40
36
  "FileTracker",
41
- "MemoryTool",
42
37
  "MermaidTool",
43
- "MultiEditTool",
44
38
  "ReadTool",
45
39
  "ReportBackTool",
46
40
  "SafetyCheckResult",
47
41
  "SimpleTruncationStrategy",
48
- "Skill",
49
- "SkillLoader",
50
42
  "SkillTool",
51
43
  "SubAgentTool",
52
44
  "TodoContext",
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import hashlib
5
6
  import os
6
7
  from pathlib import Path
7
8
 
@@ -28,3 +29,8 @@ def write_text(path: str, content: str) -> None:
28
29
  parent.mkdir(parents=True, exist_ok=True)
29
30
  with open(path, "w", encoding="utf-8") as f:
30
31
  f.write(content)
32
+
33
+
34
+ def hash_text_sha256(content: str) -> str:
35
+ """Return SHA-256 for the given text content encoded as UTF-8."""
36
+ return hashlib.sha256(content.encode("utf-8")).hexdigest()
@@ -2,13 +2,14 @@
2
2
 
3
3
  import asyncio
4
4
  import contextlib
5
- import difflib
6
5
  import os
7
6
  from pathlib import Path
8
7
 
9
8
  from pydantic import BaseModel
10
9
 
11
10
  from klaude_code.core.tool.file import apply_patch as apply_patch_module
11
+ from klaude_code.core.tool.file._utils import hash_text_sha256
12
+ from klaude_code.core.tool.file.diff_builder import build_structured_file_diff
12
13
  from klaude_code.core.tool.tool_abc import ToolABC, load_desc
13
14
  from klaude_code.core.tool.tool_context import get_current_file_tracker
14
15
  from klaude_code.core.tool.tool_registry import register
@@ -19,7 +20,7 @@ class ApplyPatchHandler:
19
20
  @classmethod
20
21
  async def handle_apply_patch(cls, patch_text: str) -> model.ToolResultItem:
21
22
  try:
22
- output, diff_text = await asyncio.to_thread(cls._apply_patch_in_thread, patch_text)
23
+ output, diff_ui = await asyncio.to_thread(cls._apply_patch_in_thread, patch_text)
23
24
  except apply_patch_module.DiffError as error:
24
25
  return model.ToolResultItem(status="error", output=str(error))
25
26
  except Exception as error: # pragma: no cover # unexpected errors bubbled to tool result
@@ -27,11 +28,11 @@ class ApplyPatchHandler:
27
28
  return model.ToolResultItem(
28
29
  status="success",
29
30
  output=output,
30
- ui_extra=model.DiffTextUIExtra(diff_text=diff_text),
31
+ ui_extra=diff_ui,
31
32
  )
32
33
 
33
34
  @staticmethod
34
- def _apply_patch_in_thread(patch_text: str) -> tuple[str, str]:
35
+ def _apply_patch_in_thread(patch_text: str) -> tuple[str, model.DiffUIExtra]:
35
36
  ap = apply_patch_module
36
37
  normalized_start = patch_text.lstrip()
37
38
  if not normalized_start.startswith("*** Begin Patch"):
@@ -66,7 +67,7 @@ class ApplyPatchHandler:
66
67
 
67
68
  patch, _ = ap.text_to_patch(patch_text, orig)
68
69
  commit = ap.patch_to_commit(patch, orig)
69
- diff_text = ApplyPatchHandler._commit_to_diff(commit)
70
+ diff_ui = ApplyPatchHandler._commit_to_structured_diff(commit)
70
71
 
71
72
  def write_fn(path: str, content: str) -> None:
72
73
  resolved = resolve_path(path)
@@ -82,7 +83,11 @@ class ApplyPatchHandler:
82
83
  with contextlib.suppress(Exception): # pragma: no cover - file tracker best-effort
83
84
  existing = file_tracker.get(resolved)
84
85
  is_mem = existing.is_memory if existing else False
85
- file_tracker[resolved] = model.FileStatus(mtime=Path(resolved).stat().st_mtime, is_memory=is_mem)
86
+ file_tracker[resolved] = model.FileStatus(
87
+ mtime=Path(resolved).stat().st_mtime,
88
+ content_sha256=hash_text_sha256(content),
89
+ is_memory=is_mem,
90
+ )
86
91
 
87
92
  def remove_fn(path: str) -> None:
88
93
  resolved = resolve_path(path)
@@ -97,74 +102,27 @@ class ApplyPatchHandler:
97
102
  file_tracker.pop(resolved, None)
98
103
 
99
104
  ap.apply_commit(commit, write_fn, remove_fn)
100
- return "Done!", diff_text
105
+ return "Done!", diff_ui
101
106
 
102
107
  @staticmethod
103
- def _commit_to_diff(commit: apply_patch_module.Commit) -> str:
104
- diff_chunks: list[str] = []
105
- for path, change in commit.changes.items():
106
- chunk = ApplyPatchHandler._render_change_diff(path, change)
107
- if chunk:
108
- if diff_chunks:
109
- diff_chunks.append("")
110
- diff_chunks.extend(chunk)
111
- return "\n".join(diff_chunks)
112
-
113
- @staticmethod
114
- def _render_change_diff(path: str, change: apply_patch_module.FileChange) -> list[str]:
115
- lines: list[str] = []
116
- if change.type == apply_patch_module.ActionType.ADD:
117
- lines.append(f"diff --git a/{path} b/{path}")
118
- lines.append("new file mode 100644")
119
- new_lines = ApplyPatchHandler._split_lines(change.new_content)
120
- lines.extend(ApplyPatchHandler._unified_diff([], new_lines, fromfile="/dev/null", tofile=f"b/{path}"))
121
- return lines
122
- if change.type == apply_patch_module.ActionType.DELETE:
123
- lines.append(f"diff --git a/{path} b/{path}")
124
- lines.append("deleted file mode 100644")
125
- old_lines = ApplyPatchHandler._split_lines(change.old_content)
126
- lines.extend(ApplyPatchHandler._unified_diff(old_lines, [], fromfile=f"a/{path}", tofile="/dev/null"))
127
- return lines
128
- if change.type == apply_patch_module.ActionType.UPDATE:
129
- new_path = change.move_path or path
130
- lines.append(f"diff --git a/{path} b/{new_path}")
131
- if change.move_path and change.move_path != path:
132
- lines.append(f"rename from {path}")
133
- lines.append(f"rename to {new_path}")
134
- old_lines = ApplyPatchHandler._split_lines(change.old_content)
135
- new_lines = ApplyPatchHandler._split_lines(change.new_content)
136
- lines.extend(
137
- ApplyPatchHandler._unified_diff(old_lines, new_lines, fromfile=f"a/{path}", tofile=f"b/{new_path}")
138
- )
139
- return lines
140
- return lines
141
-
142
- @staticmethod
143
- def _unified_diff(
144
- old_lines: list[str],
145
- new_lines: list[str],
146
- *,
147
- fromfile: str,
148
- tofile: str,
149
- ) -> list[str]:
150
- diff_lines = list(
151
- difflib.unified_diff(
152
- old_lines,
153
- new_lines,
154
- fromfile=fromfile,
155
- tofile=tofile,
156
- lineterm="",
157
- )
158
- )
159
- if not diff_lines:
160
- diff_lines = [f"--- {fromfile}", f"+++ {tofile}"]
161
- return diff_lines
162
-
163
- @staticmethod
164
- def _split_lines(text: str | None) -> list[str]:
165
- if not text:
166
- return []
167
- return text.splitlines()
108
+ def _commit_to_structured_diff(commit: apply_patch_module.Commit) -> model.DiffUIExtra:
109
+ files: list[model.DiffFileDiff] = []
110
+ for path in sorted(commit.changes):
111
+ change = commit.changes[path]
112
+ if change.type == apply_patch_module.ActionType.ADD:
113
+ files.append(build_structured_file_diff("", change.new_content or "", file_path=path))
114
+ elif change.type == apply_patch_module.ActionType.DELETE:
115
+ files.append(build_structured_file_diff(change.old_content or "", "", file_path=path))
116
+ elif change.type == apply_patch_module.ActionType.UPDATE:
117
+ display_path = path
118
+ if change.move_path and change.move_path != path:
119
+ display_path = f"{path} {change.move_path}"
120
+ files.append(
121
+ build_structured_file_diff(
122
+ change.old_content or "", change.new_content or "", file_path=display_path
123
+ )
124
+ )
125
+ return model.DiffUIExtra(files=files)
168
126
 
169
127
 
170
128
  @register(tools.APPLY_PATCH)
@@ -0,0 +1,151 @@
1
+ from __future__ import annotations
2
+
3
+ import difflib
4
+ from typing import cast
5
+
6
+ from diff_match_patch import diff_match_patch # type: ignore[import-untyped]
7
+
8
+ from klaude_code.protocol import model
9
+
10
+ _MAX_LINE_LENGTH_FOR_CHAR_DIFF = 2000
11
+ _DEFAULT_CONTEXT_LINES = 3
12
+
13
+
14
+ def build_structured_diff(before: str, after: str, *, file_path: str) -> model.DiffUIExtra:
15
+ """Build a structured diff with char-level spans for a single file."""
16
+ file_diff = _build_file_diff(before, after, file_path=file_path)
17
+ return model.DiffUIExtra(files=[file_diff])
18
+
19
+
20
+ def build_structured_file_diff(before: str, after: str, *, file_path: str) -> model.DiffFileDiff:
21
+ """Build a structured diff for a single file."""
22
+ return _build_file_diff(before, after, file_path=file_path)
23
+
24
+
25
+ def _build_file_diff(before: str, after: str, *, file_path: str) -> model.DiffFileDiff:
26
+ before_lines = _split_lines(before)
27
+ after_lines = _split_lines(after)
28
+
29
+ matcher = difflib.SequenceMatcher(None, before_lines, after_lines)
30
+ lines: list[model.DiffLine] = []
31
+ stats_add = 0
32
+ stats_remove = 0
33
+
34
+ grouped_opcodes = matcher.get_grouped_opcodes(n=_DEFAULT_CONTEXT_LINES)
35
+ for group_idx, group in enumerate(grouped_opcodes):
36
+ if group_idx > 0:
37
+ lines.append(_gap_line())
38
+
39
+ # Anchor line numbers to the actual start of the displayed hunk in the "after" file.
40
+ new_line_no = group[0][3] + 1
41
+
42
+ for tag, i1, i2, j1, j2 in group:
43
+ if tag == "equal":
44
+ for line in after_lines[j1:j2]:
45
+ lines.append(_ctx_line(line, new_line_no))
46
+ new_line_no += 1
47
+ elif tag == "delete":
48
+ for line in before_lines[i1:i2]:
49
+ lines.append(_remove_line([model.DiffSpan(op="equal", text=line)]))
50
+ stats_remove += 1
51
+ elif tag == "insert":
52
+ for line in after_lines[j1:j2]:
53
+ lines.append(_add_line([model.DiffSpan(op="equal", text=line)], new_line_no))
54
+ stats_add += 1
55
+ new_line_no += 1
56
+ elif tag == "replace":
57
+ old_block = before_lines[i1:i2]
58
+ new_block = after_lines[j1:j2]
59
+ max_len = max(len(old_block), len(new_block))
60
+ for idx in range(max_len):
61
+ old_line = old_block[idx] if idx < len(old_block) else None
62
+ new_line = new_block[idx] if idx < len(new_block) else None
63
+ if old_line is not None and new_line is not None:
64
+ remove_spans, add_spans = _diff_line_spans(old_line, new_line)
65
+ lines.append(_remove_line(remove_spans))
66
+ lines.append(_add_line(add_spans, new_line_no))
67
+ stats_remove += 1
68
+ stats_add += 1
69
+ new_line_no += 1
70
+ elif old_line is not None:
71
+ lines.append(_remove_line([model.DiffSpan(op="equal", text=old_line)]))
72
+ stats_remove += 1
73
+ elif new_line is not None:
74
+ lines.append(_add_line([model.DiffSpan(op="equal", text=new_line)], new_line_no))
75
+ stats_add += 1
76
+ new_line_no += 1
77
+
78
+ return model.DiffFileDiff(
79
+ file_path=file_path,
80
+ lines=lines,
81
+ stats_add=stats_add,
82
+ stats_remove=stats_remove,
83
+ )
84
+
85
+
86
+ def _split_lines(text: str) -> list[str]:
87
+ if not text:
88
+ return []
89
+ return text.splitlines()
90
+
91
+
92
+ def _ctx_line(text: str, new_line_no: int) -> model.DiffLine:
93
+ return model.DiffLine(
94
+ kind="ctx",
95
+ new_line_no=new_line_no,
96
+ spans=[model.DiffSpan(op="equal", text=text)],
97
+ )
98
+
99
+
100
+ def _gap_line() -> model.DiffLine:
101
+ return model.DiffLine(
102
+ kind="gap",
103
+ new_line_no=None,
104
+ spans=[model.DiffSpan(op="equal", text="")],
105
+ )
106
+
107
+
108
+ def _add_line(spans: list[model.DiffSpan], new_line_no: int) -> model.DiffLine:
109
+ return model.DiffLine(kind="add", new_line_no=new_line_no, spans=_ensure_spans(spans))
110
+
111
+
112
+ def _remove_line(spans: list[model.DiffSpan]) -> model.DiffLine:
113
+ return model.DiffLine(kind="remove", new_line_no=None, spans=_ensure_spans(spans))
114
+
115
+
116
+ def _ensure_spans(spans: list[model.DiffSpan]) -> list[model.DiffSpan]:
117
+ if spans:
118
+ return spans
119
+ return [model.DiffSpan(op="equal", text="")]
120
+
121
+
122
+ def _diff_line_spans(old_line: str, new_line: str) -> tuple[list[model.DiffSpan], list[model.DiffSpan]]:
123
+ if not _should_char_diff(old_line, new_line):
124
+ return (
125
+ [model.DiffSpan(op="equal", text=old_line)],
126
+ [model.DiffSpan(op="equal", text=new_line)],
127
+ )
128
+
129
+ differ = diff_match_patch()
130
+ diffs = cast(list[tuple[int, str]], differ.diff_main(old_line, new_line)) # type: ignore[no-untyped-call]
131
+ differ.diff_cleanupSemantic(diffs) # type: ignore[no-untyped-call]
132
+
133
+ remove_spans: list[model.DiffSpan] = []
134
+ add_spans: list[model.DiffSpan] = []
135
+
136
+ for op, text in diffs:
137
+ if not text:
138
+ continue
139
+ if op == diff_match_patch.DIFF_EQUAL: # type: ignore[no-untyped-call]
140
+ remove_spans.append(model.DiffSpan(op="equal", text=text))
141
+ add_spans.append(model.DiffSpan(op="equal", text=text))
142
+ elif op == diff_match_patch.DIFF_DELETE: # type: ignore[no-untyped-call]
143
+ remove_spans.append(model.DiffSpan(op="delete", text=text))
144
+ elif op == diff_match_patch.DIFF_INSERT: # type: ignore[no-untyped-call]
145
+ add_spans.append(model.DiffSpan(op="insert", text=text))
146
+
147
+ return _ensure_spans(remove_spans), _ensure_spans(add_spans)
148
+
149
+
150
+ def _should_char_diff(old_line: str, new_line: str) -> bool:
151
+ return len(old_line) <= _MAX_LINE_LENGTH_FOR_CHAR_DIFF and len(new_line) <= _MAX_LINE_LENGTH_FOR_CHAR_DIFF
@@ -8,7 +8,8 @@ from pathlib import Path
8
8
 
9
9
  from pydantic import BaseModel, Field
10
10
 
11
- from klaude_code.core.tool.file._utils import file_exists, is_directory, read_text, write_text
11
+ from klaude_code.core.tool.file._utils import file_exists, hash_text_sha256, is_directory, read_text, write_text
12
+ from klaude_code.core.tool.file.diff_builder import build_structured_diff
12
13
  from klaude_code.core.tool.tool_abc import ToolABC, load_desc
13
14
  from klaude_code.core.tool.tool_context import get_current_file_tracker
14
15
  from klaude_code.core.tool.tool_registry import register
@@ -55,7 +56,6 @@ class EditTool(ToolABC):
55
56
  },
56
57
  )
57
58
 
58
- # Validation utility for MultiEdit integration
59
59
  @classmethod
60
60
  def valid(
61
61
  cls, *, content: str, old_string: str, new_string: str, replace_all: bool
@@ -74,7 +74,6 @@ class EditTool(ToolABC):
74
74
  )
75
75
  return None
76
76
 
77
- # Execute utility for MultiEdit integration
78
77
  @classmethod
79
78
  def execute(cls, *, content: str, old_string: str, new_string: str, replace_all: bool) -> str:
80
79
  if old_string == "":
@@ -112,6 +111,7 @@ class EditTool(ToolABC):
112
111
 
113
112
  # FileTracker checks (only for editing existing files)
114
113
  file_tracker = get_current_file_tracker()
114
+ tracked_status: model.FileStatus | None = None
115
115
  if not file_exists(file_path):
116
116
  # We require reading before editing
117
117
  return model.ToolResultItem(
@@ -125,17 +125,6 @@ class EditTool(ToolABC):
125
125
  status="error",
126
126
  output=("File has not been read yet. Read it first before writing to it."),
127
127
  )
128
- try:
129
- current_mtime = Path(file_path).stat().st_mtime
130
- except Exception:
131
- current_mtime = tracked_status.mtime
132
- if current_mtime != tracked_status.mtime:
133
- return model.ToolResultItem(
134
- status="error",
135
- output=(
136
- "File has been modified externally. Either by user or a linter. Read it first before writing to it."
137
- ),
138
- )
139
128
 
140
129
  # Edit existing file: validate and apply
141
130
  try:
@@ -146,6 +135,31 @@ class EditTool(ToolABC):
146
135
  output="File has not been read yet. Read it first before writing to it.",
147
136
  )
148
137
 
138
+ # Re-check external modifications using content hash when available.
139
+ if tracked_status is not None:
140
+ if tracked_status.content_sha256 is not None:
141
+ current_sha256 = hash_text_sha256(before)
142
+ if current_sha256 != tracked_status.content_sha256:
143
+ return model.ToolResultItem(
144
+ status="error",
145
+ output=(
146
+ "File has been modified externally. Either by user or a linter. Read it first before writing to it."
147
+ ),
148
+ )
149
+ else:
150
+ # Backward-compat: old sessions only stored mtime.
151
+ try:
152
+ current_mtime = Path(file_path).stat().st_mtime
153
+ except Exception:
154
+ current_mtime = tracked_status.mtime
155
+ if current_mtime != tracked_status.mtime:
156
+ return model.ToolResultItem(
157
+ status="error",
158
+ output=(
159
+ "File has been modified externally. Either by user or a linter. Read it first before writing to it."
160
+ ),
161
+ )
162
+
149
163
  err = cls.valid(
150
164
  content=before,
151
165
  old_string=args.old_string,
@@ -187,15 +201,18 @@ class EditTool(ToolABC):
187
201
  n=3,
188
202
  )
189
203
  )
190
- diff_text = "\n".join(diff_lines)
191
- ui_extra = model.DiffTextUIExtra(diff_text=diff_text)
204
+ ui_extra = build_structured_diff(before, after, file_path=file_path)
192
205
 
193
- # Update tracker with new mtime
206
+ # Update tracker with new mtime and content hash
194
207
  if file_tracker is not None:
195
208
  with contextlib.suppress(Exception):
196
209
  existing = file_tracker.get(file_path)
197
210
  is_mem = existing.is_memory if existing else False
198
- file_tracker[file_path] = model.FileStatus(mtime=Path(file_path).stat().st_mtime, is_memory=is_mem)
211
+ file_tracker[file_path] = model.FileStatus(
212
+ mtime=Path(file_path).stat().st_mtime,
213
+ content_sha256=hash_text_sha256(after),
214
+ is_memory=is_mem,
215
+ )
199
216
 
200
217
  # Build output message
201
218
  if args.replace_all: