wcgw 3.0.7__py3-none-any.whl → 4.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wcgw might be problematic. Click here for more details.
- wcgw/client/bash_state/bash_state.py +184 -13
- wcgw/client/diff-instructions.txt +29 -15
- wcgw/client/file_ops/diff_edit.py +44 -47
- wcgw/client/file_ops/search_replace.py +107 -72
- wcgw/client/mcp_server/server.py +7 -3
- wcgw/client/memory.py +5 -2
- wcgw/client/modes.py +19 -10
- wcgw/client/repo_ops/display_tree.py +3 -3
- wcgw/client/repo_ops/file_stats.py +152 -0
- wcgw/client/repo_ops/repo_context.py +147 -6
- wcgw/client/tool_prompts.py +14 -16
- wcgw/client/tools.py +496 -81
- wcgw/relay/serve.py +8 -53
- wcgw/types_.py +103 -16
- {wcgw-3.0.7.dist-info → wcgw-4.1.0.dist-info}/METADATA +39 -20
- {wcgw-3.0.7.dist-info → wcgw-4.1.0.dist-info}/RECORD +21 -20
- wcgw_cli/anthropic_client.py +1 -1
- wcgw_cli/openai_client.py +1 -1
- {wcgw-3.0.7.dist-info → wcgw-4.1.0.dist-info}/WHEEL +0 -0
- {wcgw-3.0.7.dist-info → wcgw-4.1.0.dist-info}/entry_points.txt +0 -0
- {wcgw-3.0.7.dist-info → wcgw-4.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,71 +1,83 @@
|
|
|
1
1
|
import re
|
|
2
|
-
from typing import Callable
|
|
2
|
+
from typing import Callable, Optional
|
|
3
3
|
|
|
4
4
|
from .diff_edit import FileEditInput, FileEditOutput, SearchReplaceMatchError
|
|
5
5
|
|
|
6
6
|
# Global regex patterns
|
|
7
7
|
SEARCH_MARKER = re.compile(r"^<<<<<<+\s*SEARCH\s*$")
|
|
8
|
-
DIVIDER_MARKER = re.compile(r"^======*\s*$")
|
|
8
|
+
DIVIDER_MARKER = re.compile(r"^======*\s*$")
|
|
9
9
|
REPLACE_MARKER = re.compile(r"^>>>>>>+\s*REPLACE\s*$")
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
class SearchReplaceSyntaxError(Exception):
|
|
12
13
|
def __init__(self, message: str):
|
|
13
|
-
message =f"""Got syntax error while parsing search replace blocks:
|
|
14
|
+
message = f"""Got syntax error while parsing search replace blocks:
|
|
14
15
|
{message}
|
|
15
16
|
---
|
|
16
17
|
|
|
17
18
|
Make sure blocks are in correct sequence, and the markers are in separate lines:
|
|
18
19
|
|
|
19
|
-
<{
|
|
20
|
+
<{"<<<<<< SEARCH"}
|
|
20
21
|
example old
|
|
21
22
|
=======
|
|
22
23
|
example new
|
|
23
|
-
>{
|
|
24
|
+
>{">>>>>> REPLACE"}
|
|
24
25
|
|
|
25
26
|
"""
|
|
26
27
|
super().__init__(message)
|
|
27
28
|
|
|
29
|
+
|
|
28
30
|
def search_replace_edit(
|
|
29
31
|
lines: list[str], original_content: str, logger: Callable[[str], object]
|
|
30
32
|
) -> tuple[str, str]:
|
|
31
33
|
if not lines:
|
|
32
34
|
raise SearchReplaceSyntaxError("Error: No input to search replace edit")
|
|
33
|
-
|
|
35
|
+
|
|
34
36
|
original_lines = original_content.split("\n")
|
|
35
37
|
n_lines = len(lines)
|
|
36
38
|
i = 0
|
|
37
39
|
search_replace_blocks = list[tuple[list[str], list[str]]]()
|
|
38
|
-
|
|
40
|
+
|
|
39
41
|
while i < n_lines:
|
|
40
42
|
if SEARCH_MARKER.match(lines[i]):
|
|
41
43
|
line_num = i + 1
|
|
42
44
|
search_block = []
|
|
43
45
|
i += 1
|
|
44
|
-
|
|
46
|
+
|
|
45
47
|
while i < n_lines and not DIVIDER_MARKER.match(lines[i]):
|
|
46
48
|
if SEARCH_MARKER.match(lines[i]) or REPLACE_MARKER.match(lines[i]):
|
|
47
|
-
raise SearchReplaceSyntaxError(
|
|
49
|
+
raise SearchReplaceSyntaxError(
|
|
50
|
+
f"Line {i + 1}: Found stray marker in SEARCH block: {lines[i]}"
|
|
51
|
+
)
|
|
48
52
|
search_block.append(lines[i])
|
|
49
53
|
i += 1
|
|
50
|
-
|
|
54
|
+
|
|
51
55
|
if i >= n_lines:
|
|
52
|
-
raise SearchReplaceSyntaxError(
|
|
53
|
-
|
|
56
|
+
raise SearchReplaceSyntaxError(
|
|
57
|
+
f"Line {line_num}: Unclosed SEARCH block - missing ======= marker"
|
|
58
|
+
)
|
|
59
|
+
|
|
54
60
|
if not search_block:
|
|
55
|
-
raise SearchReplaceSyntaxError(
|
|
56
|
-
|
|
61
|
+
raise SearchReplaceSyntaxError(
|
|
62
|
+
f"Line {line_num}: SEARCH block cannot be empty"
|
|
63
|
+
)
|
|
64
|
+
|
|
57
65
|
i += 1
|
|
58
66
|
replace_block = []
|
|
59
|
-
|
|
67
|
+
|
|
60
68
|
while i < n_lines and not REPLACE_MARKER.match(lines[i]):
|
|
61
69
|
if SEARCH_MARKER.match(lines[i]) or DIVIDER_MARKER.match(lines[i]):
|
|
62
|
-
raise SearchReplaceSyntaxError(
|
|
70
|
+
raise SearchReplaceSyntaxError(
|
|
71
|
+
f"Line {i + 1}: Found stray marker in REPLACE block: {lines[i]}"
|
|
72
|
+
)
|
|
63
73
|
replace_block.append(lines[i])
|
|
64
74
|
i += 1
|
|
65
|
-
|
|
75
|
+
|
|
66
76
|
if i >= n_lines:
|
|
67
|
-
raise SearchReplaceSyntaxError(
|
|
68
|
-
|
|
77
|
+
raise SearchReplaceSyntaxError(
|
|
78
|
+
f"Line {line_num}: Unclosed block - missing REPLACE marker"
|
|
79
|
+
)
|
|
80
|
+
|
|
69
81
|
i += 1
|
|
70
82
|
|
|
71
83
|
for line in search_block:
|
|
@@ -78,7 +90,9 @@ def search_replace_edit(
|
|
|
78
90
|
search_replace_blocks.append((search_block, replace_block))
|
|
79
91
|
else:
|
|
80
92
|
if REPLACE_MARKER.match(lines[i]) or DIVIDER_MARKER.match(lines[i]):
|
|
81
|
-
raise SearchReplaceSyntaxError(
|
|
93
|
+
raise SearchReplaceSyntaxError(
|
|
94
|
+
f"Line {i + 1}: Found stray marker outside block: {lines[i]}"
|
|
95
|
+
)
|
|
82
96
|
i += 1
|
|
83
97
|
|
|
84
98
|
if not search_replace_blocks:
|
|
@@ -86,9 +100,10 @@ def search_replace_edit(
|
|
|
86
100
|
"No valid search replace blocks found, ensure your SEARCH/REPLACE blocks are formatted correctly"
|
|
87
101
|
)
|
|
88
102
|
|
|
89
|
-
edited_content, comments_ =
|
|
90
|
-
original_lines,
|
|
103
|
+
edited_content, comments_ = edit_with_individual_fallback(
|
|
104
|
+
original_lines, search_replace_blocks
|
|
91
105
|
)
|
|
106
|
+
|
|
92
107
|
edited_file = "\n".join(edited_content)
|
|
93
108
|
if not comments_:
|
|
94
109
|
comments = "Edited successfully"
|
|
@@ -100,61 +115,81 @@ def search_replace_edit(
|
|
|
100
115
|
return edited_file, comments
|
|
101
116
|
|
|
102
117
|
|
|
103
|
-
def
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
if
|
|
111
|
-
return
|
|
112
|
-
|
|
118
|
+
def identify_first_differing_block(
|
|
119
|
+
best_matches: list[FileEditOutput],
|
|
120
|
+
) -> Optional[list[str]]:
|
|
121
|
+
"""
|
|
122
|
+
Identify the first search block that differs across multiple best matches.
|
|
123
|
+
Returns the search block content that first shows different matches.
|
|
124
|
+
"""
|
|
125
|
+
if not best_matches or len(best_matches) <= 1:
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
# First, check if the number of blocks differs (shouldn't happen, but let's be safe)
|
|
129
|
+
block_counts = [len(match.edited_with_tolerances) for match in best_matches]
|
|
130
|
+
if not all(count == block_counts[0] for count in block_counts):
|
|
131
|
+
# If block counts differ, just return the first search block as problematic
|
|
132
|
+
return (
|
|
133
|
+
best_matches[0].orig_search_blocks[0]
|
|
134
|
+
if best_matches[0].orig_search_blocks
|
|
135
|
+
else None
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# Go through each block position and see if the slices differ
|
|
139
|
+
for i in range(min(block_counts)):
|
|
140
|
+
slices = [match.edited_with_tolerances[i][0] for match in best_matches]
|
|
113
141
|
|
|
114
|
-
|
|
142
|
+
# Check if we have different slices for this block across matches
|
|
143
|
+
if any(s.start != slices[0].start or s.stop != slices[0].stop for s in slices):
|
|
144
|
+
# We found our differing block - return the search block content
|
|
145
|
+
if i < len(best_matches[0].orig_search_blocks):
|
|
146
|
+
return best_matches[0].orig_search_blocks[i]
|
|
147
|
+
else:
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
# If we get here, we couldn't identify a specific differing block
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def edit_with_individual_fallback(
|
|
155
|
+
original_lines: list[str], search_replace_blocks: list[tuple[list[str], list[str]]]
|
|
156
|
+
) -> tuple[list[str], set[str]]:
|
|
157
|
+
outputs = FileEditInput(original_lines, 0, search_replace_blocks, 0).edit_file()
|
|
115
158
|
best_matches, is_error = FileEditOutput.get_best_match(outputs)
|
|
116
159
|
|
|
117
|
-
|
|
118
|
-
best_matches[0].replace_or_throw(3)
|
|
119
|
-
|
|
160
|
+
try:
|
|
161
|
+
edited_content, comments_ = best_matches[0].replace_or_throw(3)
|
|
162
|
+
except SearchReplaceMatchError:
|
|
163
|
+
if len(search_replace_blocks) > 1:
|
|
164
|
+
# Try one at a time
|
|
165
|
+
all_comments = set[str]()
|
|
166
|
+
running_lines = list(original_lines)
|
|
167
|
+
for block in search_replace_blocks:
|
|
168
|
+
running_lines, comments_ = edit_with_individual_fallback(
|
|
169
|
+
running_lines, [block]
|
|
170
|
+
)
|
|
171
|
+
all_comments |= comments_
|
|
172
|
+
return running_lines, all_comments
|
|
173
|
+
raise
|
|
174
|
+
assert not is_error
|
|
120
175
|
|
|
121
176
|
if len(best_matches) > 1:
|
|
122
|
-
#
|
|
123
|
-
|
|
177
|
+
# Find the first block that differs across matches
|
|
178
|
+
first_diff_block = identify_first_differing_block(best_matches)
|
|
179
|
+
if first_diff_block is not None:
|
|
180
|
+
block_content = "\n".join(first_diff_block)
|
|
124
181
|
raise SearchReplaceMatchError(f"""
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
182
|
+
The following block matched more than once:
|
|
183
|
+
```
|
|
184
|
+
{block_content}
|
|
185
|
+
```
|
|
186
|
+
Consider adding more context before and after this block to make the match unique.
|
|
130
187
|
""")
|
|
131
|
-
|
|
132
188
|
else:
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
original_lines, search_replace_blocks, original_lines, set(), 0
|
|
141
|
-
)
|
|
142
|
-
except Exception:
|
|
143
|
-
raise Exception(f"""
|
|
144
|
-
The following block matched more than once:
|
|
145
|
-
---
|
|
146
|
-
```
|
|
147
|
-
{'\n'.join(current_blocks[-1][0])}
|
|
148
|
-
```
|
|
149
|
-
""")
|
|
150
|
-
|
|
151
|
-
best_match = best_matches[0]
|
|
152
|
-
running_lines, comments = best_match.replace_or_throw(3)
|
|
153
|
-
running_comments = running_comments | comments
|
|
154
|
-
return greedy_context_replace(
|
|
155
|
-
original_lines,
|
|
156
|
-
search_replace_blocks,
|
|
157
|
-
running_lines,
|
|
158
|
-
running_comments,
|
|
159
|
-
current_block_offset + 1,
|
|
160
|
-
)
|
|
189
|
+
raise SearchReplaceMatchError("""
|
|
190
|
+
One of the blocks matched more than once
|
|
191
|
+
|
|
192
|
+
Consider adding more context before and after all the blocks to make the match unique.
|
|
193
|
+
""")
|
|
194
|
+
|
|
195
|
+
return edited_content, comments_
|
wcgw/client/mcp_server/server.py
CHANGED
|
@@ -16,7 +16,7 @@ from wcgw.client.tool_prompts import TOOL_PROMPTS
|
|
|
16
16
|
from ...types_ import (
|
|
17
17
|
Initialize,
|
|
18
18
|
)
|
|
19
|
-
from ..bash_state.bash_state import CONFIG, BashState
|
|
19
|
+
from ..bash_state.bash_state import CONFIG, BashState, get_tmpdir
|
|
20
20
|
from ..tools import (
|
|
21
21
|
Context,
|
|
22
22
|
default_enc,
|
|
@@ -155,9 +155,13 @@ async def main() -> None:
|
|
|
155
155
|
global BASH_STATE
|
|
156
156
|
CONFIG.update(3, 55, 5)
|
|
157
157
|
version = str(importlib.metadata.version("wcgw"))
|
|
158
|
-
|
|
158
|
+
|
|
159
|
+
# starting_dir is inside tmp dir
|
|
160
|
+
tmp_dir = get_tmpdir()
|
|
161
|
+
starting_dir = os.path.join(tmp_dir, "claude_playground")
|
|
162
|
+
|
|
159
163
|
with BashState(
|
|
160
|
-
Console(),
|
|
164
|
+
Console(), starting_dir, None, None, None, None, True, None
|
|
161
165
|
) as BASH_STATE:
|
|
162
166
|
BASH_STATE.console.log("wcgw version: " + version)
|
|
163
167
|
# Run the server using stdin/stdout streams
|
wcgw/client/memory.py
CHANGED
|
@@ -2,7 +2,7 @@ import json
|
|
|
2
2
|
import os
|
|
3
3
|
import re
|
|
4
4
|
import shlex
|
|
5
|
-
from typing import Any, Callable, Optional
|
|
5
|
+
from typing import Any, Callable, Optional, TypeVar
|
|
6
6
|
|
|
7
7
|
from ..types_ import ContextSave
|
|
8
8
|
|
|
@@ -59,7 +59,10 @@ def save_memory(
|
|
|
59
59
|
return memory_file_full
|
|
60
60
|
|
|
61
61
|
|
|
62
|
-
|
|
62
|
+
T = TypeVar("T")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def load_memory(
|
|
63
66
|
task_id: str,
|
|
64
67
|
max_tokens: Optional[int],
|
|
65
68
|
encoder: Callable[[str], list[T]],
|
wcgw/client/modes.py
CHANGED
|
@@ -55,38 +55,38 @@ You are now running in "code_writer" mode.
|
|
|
55
55
|
"""
|
|
56
56
|
|
|
57
57
|
path_prompt = """
|
|
58
|
-
- You are allowed to
|
|
58
|
+
- You are allowed to edit files in the provided repository only.
|
|
59
59
|
"""
|
|
60
60
|
|
|
61
61
|
if allowed_file_edit_globs != "all":
|
|
62
62
|
if allowed_file_edit_globs:
|
|
63
63
|
path_prompt = f"""
|
|
64
|
-
- You are allowed to
|
|
64
|
+
- You are allowed to edit files for files matching only the following globs: {", ".join(allowed_file_edit_globs)}
|
|
65
65
|
"""
|
|
66
66
|
else:
|
|
67
67
|
path_prompt = """
|
|
68
|
-
- You are not allowed to
|
|
68
|
+
- You are not allowed to edit files.
|
|
69
69
|
"""
|
|
70
70
|
base += path_prompt
|
|
71
71
|
|
|
72
72
|
path_prompt = """
|
|
73
|
-
- You are allowed to
|
|
73
|
+
- You are allowed to write files in the provided repository only.
|
|
74
74
|
"""
|
|
75
75
|
|
|
76
76
|
if all_write_new_globs != "all":
|
|
77
77
|
if all_write_new_globs:
|
|
78
78
|
path_prompt = f"""
|
|
79
|
-
- You are allowed to
|
|
79
|
+
- You are allowed to write files files matching only the following globs: {", ".join(allowed_file_edit_globs)}
|
|
80
80
|
"""
|
|
81
81
|
else:
|
|
82
82
|
path_prompt = """
|
|
83
|
-
- You are not allowed to
|
|
83
|
+
- You are not allowed to write files.
|
|
84
84
|
"""
|
|
85
85
|
base += path_prompt
|
|
86
86
|
|
|
87
87
|
run_command_common = """
|
|
88
88
|
- Do not use Ctrl-c interrupt commands without asking the user, because often the programs don't show any update but they still are running.
|
|
89
|
-
- Do not use echo to write multi-line files, always use
|
|
89
|
+
- Do not use echo to write multi-line files, always use FileWriteOrEdit tool to update a code.
|
|
90
90
|
- Do not provide code snippets unless asked by the user, instead directly add/edit the code.
|
|
91
91
|
- You should use the provided bash execution, reading and writing file tools to complete objective.
|
|
92
92
|
- First understand about the project by getting the folder structure (ignoring .git, node_modules, venv, etc.)
|
|
@@ -125,8 +125,9 @@ Instructions:
|
|
|
125
125
|
- Do not install new tools/packages before ensuring no such tools/package or an alternative already exists.
|
|
126
126
|
- Do not use artifacts if you have access to the repository and not asked by the user to provide artifacts/snippets. Directly create/update using wcgw tools
|
|
127
127
|
- Do not use Ctrl-c or interrupt commands without asking the user, because often the programs don't show any update but they still are running.
|
|
128
|
-
- Do not use echo to write multi-line files, always use
|
|
129
|
-
|
|
128
|
+
- Do not use echo to write multi-line files, always use FileWriteOrEdit tool to update a code.
|
|
129
|
+
- Provide as many file paths as you need in ReadFiles in one go.
|
|
130
|
+
|
|
130
131
|
Additional instructions:
|
|
131
132
|
Always run `pwd` if you get any file or directory not found error to make sure you're not lost, or to get absolute cwd.
|
|
132
133
|
|
|
@@ -134,18 +135,26 @@ Additional instructions:
|
|
|
134
135
|
|
|
135
136
|
|
|
136
137
|
"""
|
|
137
|
-
ARCHITECT_PROMPT = """
|
|
138
|
+
ARCHITECT_PROMPT = """
|
|
139
|
+
# Instructions
|
|
140
|
+
You are now running in "architect" mode. This means
|
|
138
141
|
- You are not allowed to edit or update any file. You are not allowed to create any file.
|
|
139
142
|
- You are not allowed to run any commands that may change disk, system configuration, packages or environment. Only read-only commands are allowed.
|
|
140
143
|
- Only run commands that allows you to explore the repository, understand the system or read anything of relevance.
|
|
141
144
|
- Do not use Ctrl-c or interrupt commands without asking the user, because often the programs don't show any update but they still are running.
|
|
142
145
|
- You are not allowed to change directory (bash will run in -r mode)
|
|
143
146
|
- Share only snippets when any implementation is requested.
|
|
147
|
+
- Provide as many file paths as you need in ReadFiles in one go.
|
|
148
|
+
|
|
149
|
+
# Disallowed tools (important!)
|
|
150
|
+
- FileWriteOrEdit
|
|
144
151
|
|
|
152
|
+
# Response instructions
|
|
145
153
|
Respond only after doing the following:
|
|
146
154
|
- Read as many relevant files as possible.
|
|
147
155
|
- Be comprehensive in your understanding and search of relevant files.
|
|
148
156
|
- First understand about the project by getting the folder structure (ignoring .git, node_modules, venv, etc.)
|
|
157
|
+
- Share minimal snippets higlighting the changes (avoid large number of lines in the snippets, use ... comments)
|
|
149
158
|
"""
|
|
150
159
|
|
|
151
160
|
|
|
@@ -33,13 +33,13 @@ class DirectoryTree:
|
|
|
33
33
|
abs_path = self.root / rel_path
|
|
34
34
|
|
|
35
35
|
if not abs_path.exists():
|
|
36
|
-
|
|
36
|
+
return
|
|
37
37
|
|
|
38
38
|
if not abs_path.is_file():
|
|
39
|
-
|
|
39
|
+
return
|
|
40
40
|
|
|
41
41
|
if not str(abs_path).startswith(str(self.root)):
|
|
42
|
-
|
|
42
|
+
return
|
|
43
43
|
|
|
44
44
|
self.expanded_files.add(abs_path)
|
|
45
45
|
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from typing import Any, Callable, Dict, TypeVar, cast
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T") # Type variable for generic functions
|
|
8
|
+
F = TypeVar("F", bound=Callable[..., Any]) # Type variable for decorated functions
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FileStats:
|
|
12
|
+
"""Track read, edit, and write counts for a single file."""
|
|
13
|
+
|
|
14
|
+
def __init__(self) -> None:
|
|
15
|
+
self.read_count: int = 0
|
|
16
|
+
self.edit_count: int = 0
|
|
17
|
+
self.write_count: int = 0
|
|
18
|
+
|
|
19
|
+
def increment_read(self) -> None:
|
|
20
|
+
"""Increment the read counter."""
|
|
21
|
+
self.read_count += 1
|
|
22
|
+
|
|
23
|
+
def increment_edit(self) -> None:
|
|
24
|
+
"""Increment the edit counter."""
|
|
25
|
+
self.edit_count += 1
|
|
26
|
+
|
|
27
|
+
def increment_write(self) -> None:
|
|
28
|
+
"""Increment the write counter."""
|
|
29
|
+
self.write_count += 1
|
|
30
|
+
|
|
31
|
+
def to_dict(self) -> Dict[str, int]:
|
|
32
|
+
"""Convert to a dictionary for serialization."""
|
|
33
|
+
return {
|
|
34
|
+
"read_count": self.read_count,
|
|
35
|
+
"edit_count": self.edit_count,
|
|
36
|
+
"write_count": self.write_count,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def from_dict(cls, data: Dict[str, Any]) -> "FileStats":
|
|
41
|
+
"""Create from a serialized dictionary."""
|
|
42
|
+
stats = cls()
|
|
43
|
+
stats.read_count = data.get("read_count", 0)
|
|
44
|
+
stats.edit_count = data.get("edit_count", 0)
|
|
45
|
+
stats.write_count = data.get("write_count", 0)
|
|
46
|
+
return stats
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class WorkspaceStats:
|
|
50
|
+
"""Track file operations statistics for an entire workspace."""
|
|
51
|
+
|
|
52
|
+
def __init__(self) -> None:
|
|
53
|
+
self.files: Dict[str, FileStats] = {} # filepath -> FileStats
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
56
|
+
"""Convert to a dictionary for serialization."""
|
|
57
|
+
return {"files": {k: v.to_dict() for k, v in self.files.items()}}
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def from_dict(cls, data: Dict[str, Any]) -> "WorkspaceStats":
|
|
61
|
+
"""Create from a serialized dictionary."""
|
|
62
|
+
stats = cls()
|
|
63
|
+
files_data = data.get("files", {})
|
|
64
|
+
stats.files = {k: FileStats.from_dict(v) for k, v in files_data.items()}
|
|
65
|
+
return stats
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def safe_stats_operation(func: F) -> F:
|
|
69
|
+
"""
|
|
70
|
+
Decorator to safely perform stats operations without affecting core functionality.
|
|
71
|
+
If an exception occurs, it logs the error but allows the program to continue.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
75
|
+
try:
|
|
76
|
+
return func(*args, **kwargs)
|
|
77
|
+
except Exception as e:
|
|
78
|
+
# Log the error but continue with the operation
|
|
79
|
+
print(f"Warning: Stats tracking error - {e}", file=sys.stderr)
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
# This is a workaround for proper typing with decorators
|
|
83
|
+
return cast(F, wrapper)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def get_stats_path(workspace_path: str) -> str:
|
|
87
|
+
"""
|
|
88
|
+
Get the path to the stats file for a workspace using a hash-based approach.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
workspace_path: The full path of the workspace directory.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
The path to the stats file.
|
|
95
|
+
"""
|
|
96
|
+
# Normalize the path
|
|
97
|
+
workspace_path = os.path.normpath(os.path.expanduser(workspace_path))
|
|
98
|
+
|
|
99
|
+
# Get the basename of the workspace path for readability
|
|
100
|
+
workspace_name = os.path.basename(workspace_path)
|
|
101
|
+
if not workspace_name: # In case of root directory
|
|
102
|
+
workspace_name = "root"
|
|
103
|
+
|
|
104
|
+
# Create a hash of the full path
|
|
105
|
+
path_hash = hashlib.md5(workspace_path.encode()).hexdigest()
|
|
106
|
+
|
|
107
|
+
# Combine to create a unique identifier that's still somewhat readable
|
|
108
|
+
filename = f"{workspace_name}_{path_hash}.json"
|
|
109
|
+
|
|
110
|
+
# Create directory if it doesn't exist
|
|
111
|
+
xdg_data_dir = os.environ.get("XDG_DATA_HOME", os.path.expanduser("~/.local/share"))
|
|
112
|
+
stats_dir = os.path.join(xdg_data_dir, "wcgw/workspace_stats")
|
|
113
|
+
os.makedirs(stats_dir, exist_ok=True)
|
|
114
|
+
|
|
115
|
+
return os.path.join(stats_dir, filename)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@safe_stats_operation
|
|
119
|
+
def load_workspace_stats(workspace_path: str) -> WorkspaceStats:
|
|
120
|
+
"""
|
|
121
|
+
Load the stats for a workspace, or create empty stats if not exists.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
workspace_path: The full path of the workspace directory.
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
WorkspaceStats object containing file operation statistics.
|
|
128
|
+
"""
|
|
129
|
+
stats_path = get_stats_path(workspace_path)
|
|
130
|
+
if os.path.exists(stats_path):
|
|
131
|
+
try:
|
|
132
|
+
with open(stats_path, "r") as f:
|
|
133
|
+
return WorkspaceStats.from_dict(json.load(f))
|
|
134
|
+
except (json.JSONDecodeError, KeyError, ValueError):
|
|
135
|
+
# Handle corrupted file
|
|
136
|
+
return WorkspaceStats()
|
|
137
|
+
else:
|
|
138
|
+
return WorkspaceStats()
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@safe_stats_operation
|
|
142
|
+
def save_workspace_stats(workspace_path: str, stats: WorkspaceStats) -> None:
|
|
143
|
+
"""
|
|
144
|
+
Save the stats for a workspace.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
workspace_path: The full path of the workspace directory.
|
|
148
|
+
stats: WorkspaceStats object to save.
|
|
149
|
+
"""
|
|
150
|
+
stats_path = get_stats_path(workspace_path)
|
|
151
|
+
with open(stats_path, "w") as f:
|
|
152
|
+
json.dump(stats.to_dict(), f, indent=2)
|