codedthemes-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codedthemes/__init__.py +4 -0
- codedthemes/cli.py +372 -0
- codedthemes/config.py +26 -0
- codedthemes/mcp_client.py +64 -0
- codedthemes/patch_utils.py +18 -0
- codedthemes/repo_utils.py +40 -0
- codedthemes/sync_manager.py +73 -0
- codedthemes_cli-0.1.0.dist-info/METADATA +49 -0
- codedthemes_cli-0.1.0.dist-info/RECORD +12 -0
- codedthemes_cli-0.1.0.dist-info/WHEEL +5 -0
- codedthemes_cli-0.1.0.dist-info/entry_points.txt +2 -0
- codedthemes_cli-0.1.0.dist-info/top_level.txt +1 -0
codedthemes/__init__.py
ADDED
codedthemes/cli.py
ADDED
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import sys
|
|
3
|
+
import os
|
|
4
|
+
import jwt
|
|
5
|
+
import requests
|
|
6
|
+
import json
|
|
7
|
+
import shutil
|
|
8
|
+
from getpass import getpass
|
|
9
|
+
|
|
10
|
+
from .config import save_config, load_config
|
|
11
|
+
from .mcp_client import MCPClient
|
|
12
|
+
from .repo_utils import detect_repo_root, zip_repo
|
|
13
|
+
from .patch_utils import apply_patch
|
|
14
|
+
from .sync_manager import SyncManager
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def handle_login(server_url: str = None):
|
|
18
|
+
email = input("Email: ").strip()
|
|
19
|
+
license_key = input("License key: ").strip()
|
|
20
|
+
|
|
21
|
+
client = MCPClient()
|
|
22
|
+
if server_url:
|
|
23
|
+
client.server_url = server_url
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
result = client.call("login", {
|
|
27
|
+
"email": email,
|
|
28
|
+
"license_key": license_key
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
if isinstance(result, dict):
|
|
33
|
+
# The server might have returned {"status": "error", "message": ...}
|
|
34
|
+
if result.get("status") == "error":
|
|
35
|
+
print(f"✖ Login failed: {result.get('message', 'Unknown error')}")
|
|
36
|
+
sys.exit(1)
|
|
37
|
+
# The app.py bridge wraps string returns in {"message": "..."}
|
|
38
|
+
elif "message" in result and "Login failed" in result["message"]:
|
|
39
|
+
print(f"✖ {result['message']}")
|
|
40
|
+
sys.exit(1)
|
|
41
|
+
|
|
42
|
+
# 1. Try to get token directly from API response (if server returns JSON)
|
|
43
|
+
token = result.get("access_token") if isinstance(result, dict) else None
|
|
44
|
+
|
|
45
|
+
# 2. Fallback: Try to read from local file (because server now returns plain text string)
|
|
46
|
+
if not token:
|
|
47
|
+
token_file = os.path.expanduser("~/.mcp_token")
|
|
48
|
+
if os.path.exists(token_file):
|
|
49
|
+
with open(token_file, 'r') as f:
|
|
50
|
+
token = f.read().strip()
|
|
51
|
+
|
|
52
|
+
# 3. Final Verification: Throw error if we STILL don't have a token
|
|
53
|
+
if not token:
|
|
54
|
+
print("✖ Login failed: No access token returned. Please check your credentials.")
|
|
55
|
+
sys.exit(1)
|
|
56
|
+
|
|
57
|
+
# Write the .mcp_token file natively for the user so it acts just like local MCP
|
|
58
|
+
token_file = os.path.expanduser("~/.mcp_token")
|
|
59
|
+
try:
|
|
60
|
+
with open(token_file, 'w') as f:
|
|
61
|
+
f.write(token)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
print(f"Warning: Could not write {token_file}: {e}")
|
|
64
|
+
|
|
65
|
+
save_config({
|
|
66
|
+
"server_url": client.server_url,
|
|
67
|
+
"access_token": token
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
print("✔ Login successful.")
|
|
71
|
+
except Exception as e:
|
|
72
|
+
err_msg = str(e)
|
|
73
|
+
if "timed out" in err_msg.lower():
|
|
74
|
+
print(f"✖ Login failed: License server connection timed out. Please try again.")
|
|
75
|
+
else:
|
|
76
|
+
print(f"✖ Login failed: {err_msg}")
|
|
77
|
+
sys.exit(1)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def handle_init():
|
|
81
|
+
try:
|
|
82
|
+
repo_root = detect_repo_root()
|
|
83
|
+
print(f"🔍 Found repository at {repo_root}")
|
|
84
|
+
|
|
85
|
+
client = MCPClient()
|
|
86
|
+
if not client.token:
|
|
87
|
+
print("✖ Not logged in. Please run 'codedthemes login' first.")
|
|
88
|
+
sys.exit(1)
|
|
89
|
+
|
|
90
|
+
# 0. Get user email from token for registry profiling
|
|
91
|
+
user_email = "unknown_user"
|
|
92
|
+
try:
|
|
93
|
+
decoded = jwt.decode(client.token, options={"verify_signature": False})
|
|
94
|
+
user_email = decoded.get("email", "unknown_user")
|
|
95
|
+
except:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
repo_abs_path = str(repo_root)
|
|
99
|
+
print("📦 Zipping and uploading repository (this may take a moment)...")
|
|
100
|
+
upload_result = client.upload_workspace(repo_abs_path, user_email)
|
|
101
|
+
workspace_id = upload_result.get("workspace_id")
|
|
102
|
+
|
|
103
|
+
if not workspace_id:
|
|
104
|
+
print("✖ Upload failed: No workspace ID returned.")
|
|
105
|
+
sys.exit(1)
|
|
106
|
+
|
|
107
|
+
# 1. Save workspace mapping in config.json
|
|
108
|
+
config = load_config()
|
|
109
|
+
last_workspaces = config.get("workspaces", {})
|
|
110
|
+
last_workspaces[repo_abs_path] = workspace_id
|
|
111
|
+
config["workspaces"] = last_workspaces
|
|
112
|
+
save_config(config)
|
|
113
|
+
|
|
114
|
+
# 2. Initialize sync state (HASHES) in workspace.json
|
|
115
|
+
sync_manager = SyncManager()
|
|
116
|
+
sync_manager.update_sync_state(repo_abs_path, workspace_id, user_email)
|
|
117
|
+
|
|
118
|
+
print(f"✔ Workspace initialized successfully.")
|
|
119
|
+
print(f"✔ Workspace ID: {workspace_id}")
|
|
120
|
+
print("\nYour repository is now synced to the cloud. You can now use the IDE to plan and apply changes.")
|
|
121
|
+
|
|
122
|
+
except Exception as e:
|
|
123
|
+
print(f"✖ Error during initialization: {e}")
|
|
124
|
+
sys.exit(1)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def handle_apply(query: str):
|
|
128
|
+
try:
|
|
129
|
+
repo_root = detect_repo_root()
|
|
130
|
+
print(f"🔍 Found repository at {repo_root}")
|
|
131
|
+
|
|
132
|
+
# Note: zipping logic is there but we are calling tools sequentially as per requirements
|
|
133
|
+
# zip_path = zip_repo(repo_root)
|
|
134
|
+
|
|
135
|
+
client = MCPClient()
|
|
136
|
+
|
|
137
|
+
# 0. Get user email from token for registry profiling
|
|
138
|
+
user_email = "unknown_user"
|
|
139
|
+
if client.token:
|
|
140
|
+
try:
|
|
141
|
+
decoded = jwt.decode(client.token, options={"verify_signature": False})
|
|
142
|
+
user_email = decoded.get("email", "unknown_user")
|
|
143
|
+
except:
|
|
144
|
+
pass
|
|
145
|
+
|
|
146
|
+
sync_manager = SyncManager()
|
|
147
|
+
|
|
148
|
+
config = load_config()
|
|
149
|
+
last_workspaces = config.get("workspaces", {})
|
|
150
|
+
repo_abs_path = str(repo_root)
|
|
151
|
+
workspace_id = last_workspaces.get(repo_abs_path)
|
|
152
|
+
|
|
153
|
+
# 1. Zip and Upload Repository (Only if not already initialized or expired)
|
|
154
|
+
if workspace_id:
|
|
155
|
+
logger_debug = False # toggle for debug
|
|
156
|
+
try:
|
|
157
|
+
# Quick check if workspace is still on server
|
|
158
|
+
check = client.call("check_workspace", {"workspace_id": workspace_id})
|
|
159
|
+
if isinstance(check, dict) and check.get("status") == "ok":
|
|
160
|
+
print(f"✔ Using active workspace: {workspace_id}")
|
|
161
|
+
else:
|
|
162
|
+
workspace_id = None
|
|
163
|
+
except:
|
|
164
|
+
workspace_id = None
|
|
165
|
+
|
|
166
|
+
if not workspace_id:
|
|
167
|
+
print("📦 Zipping and uploading repository (this may take a moment)...")
|
|
168
|
+
upload_result = client.upload_workspace(repo_abs_path, user_email)
|
|
169
|
+
workspace_id = upload_result.get("workspace_id")
|
|
170
|
+
|
|
171
|
+
# Save workspace_id for future reuse
|
|
172
|
+
last_workspaces[repo_abs_path] = workspace_id
|
|
173
|
+
config["workspaces"] = last_workspaces
|
|
174
|
+
save_config(config)
|
|
175
|
+
|
|
176
|
+
# Init sync state locally
|
|
177
|
+
sync_manager.update_sync_state(repo_abs_path, workspace_id, user_email)
|
|
178
|
+
|
|
179
|
+
# Step 1: repo analyzer
|
|
180
|
+
print("🚀 Analyzing repository...")
|
|
181
|
+
analysis = client.call("repo_analyzer", {
|
|
182
|
+
"repo_path": repo_abs_path,
|
|
183
|
+
"workspace_id": workspace_id
|
|
184
|
+
})
|
|
185
|
+
|
|
186
|
+
if isinstance(analysis, dict) and analysis.get("status") == "error":
|
|
187
|
+
print(f"✖ Analysis failed: {analysis.get('message', 'Unknown error')}")
|
|
188
|
+
return
|
|
189
|
+
|
|
190
|
+
if isinstance(analysis, str):
|
|
191
|
+
# The tool might return a string message shown in CLI
|
|
192
|
+
if "Analysis Skipped" in analysis:
|
|
193
|
+
print("✔ Repository is up-to-date. Skipping upload.")
|
|
194
|
+
else:
|
|
195
|
+
print(f"✔ {analysis}")
|
|
196
|
+
|
|
197
|
+
# Step 2: plan changes
|
|
198
|
+
print("🔍 Detecting local changes...")
|
|
199
|
+
local_changes = sync_manager.get_changed_files(repo_abs_path, user_email)
|
|
200
|
+
if local_changes:
|
|
201
|
+
print(f" Detected {len(local_changes)} files changed locally since last apply.")
|
|
202
|
+
|
|
203
|
+
print("🧠 Planning changes...")
|
|
204
|
+
plan = client.call("plan_changes", {
|
|
205
|
+
"query": query,
|
|
206
|
+
"workspace_id": workspace_id,
|
|
207
|
+
"repo_path": str(repo_root),
|
|
208
|
+
"changes_from_cli": local_changes
|
|
209
|
+
})
|
|
210
|
+
|
|
211
|
+
if plan.get("status") == "error":
|
|
212
|
+
print(f"✖ Planning failed: {plan.get('message', 'Unknown error')}")
|
|
213
|
+
return
|
|
214
|
+
|
|
215
|
+
target_files = plan.get("files_to_modify", [])
|
|
216
|
+
plan_instructions = plan.get("plan_instructions", [])
|
|
217
|
+
files_to_create = plan.get("files_to_create", [])
|
|
218
|
+
files_to_delete = plan.get("files_to_delete", [])
|
|
219
|
+
|
|
220
|
+
if not target_files and not files_to_create and not files_to_delete and not plan_instructions:
|
|
221
|
+
print("Information: No changes planned.")
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
print("\n📝 Execution Plan:")
|
|
225
|
+
if plan_instructions:
|
|
226
|
+
print("Instructions:")
|
|
227
|
+
for idx, inst in enumerate(plan_instructions, 1):
|
|
228
|
+
print(f" {idx}. {inst}")
|
|
229
|
+
|
|
230
|
+
if files_to_create:
|
|
231
|
+
print("\nFiles to create:")
|
|
232
|
+
for f in files_to_create:
|
|
233
|
+
print(f" + {f}")
|
|
234
|
+
|
|
235
|
+
print("\nFiles to modify:")
|
|
236
|
+
for f in target_files:
|
|
237
|
+
print(f" ~ {f}")
|
|
238
|
+
|
|
239
|
+
if files_to_delete:
|
|
240
|
+
print("\nFiles to delete:")
|
|
241
|
+
for f in files_to_delete:
|
|
242
|
+
print(f" - {f}")
|
|
243
|
+
|
|
244
|
+
while True:
|
|
245
|
+
choice = input("\nDo you want to proceed with this plan? (y/n): ").strip().lower()
|
|
246
|
+
if choice in ['y', 'yes']:
|
|
247
|
+
break
|
|
248
|
+
elif choice in ['n', 'no']:
|
|
249
|
+
print("✖ Pipeline terminated by user. Please provide a new query.")
|
|
250
|
+
return
|
|
251
|
+
else:
|
|
252
|
+
print("Please enter 'y' for yes or 'n' for no.")
|
|
253
|
+
|
|
254
|
+
# Step 3: execute plan
|
|
255
|
+
print("⚙ Executing plan...")
|
|
256
|
+
result = client.call("execute_plan", {
|
|
257
|
+
"query": query,
|
|
258
|
+
"plan_instructions": plan_instructions,
|
|
259
|
+
"files_to_modify": target_files,
|
|
260
|
+
"files_to_create": files_to_create,
|
|
261
|
+
"files_to_delete": files_to_delete,
|
|
262
|
+
"workspace_id": workspace_id,
|
|
263
|
+
"repo_path": str(repo_root)
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
if not result or (isinstance(result, dict) and (result.get("status") == "error" or "detail" in result)):
|
|
267
|
+
error_msg = result.get("message") or result.get("detail") if isinstance(result, dict) else "Unknown execution error"
|
|
268
|
+
print(f"✖ Execution failed: {error_msg}")
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
# --- LOCAL PATCHING LOGIC (WINDOWS) ---
|
|
272
|
+
# Ensure we are looking at a dictionary
|
|
273
|
+
res_data = result
|
|
274
|
+
if isinstance(result, str):
|
|
275
|
+
try: res_data = json.loads(result)
|
|
276
|
+
except: res_data = {}
|
|
277
|
+
|
|
278
|
+
updates = res_data.get("updates_for_local", [])
|
|
279
|
+
deletes = res_data.get("deleted_files", [])
|
|
280
|
+
|
|
281
|
+
if not updates:
|
|
282
|
+
print(" (!) No file updates received in the response payload.")
|
|
283
|
+
|
|
284
|
+
if updates or deletes:
|
|
285
|
+
# 1. Update/Create files
|
|
286
|
+
for item in updates:
|
|
287
|
+
rel_path, code = item.get("path"), item.get("code")
|
|
288
|
+
if not rel_path or code is None: continue
|
|
289
|
+
|
|
290
|
+
# Normalize path for Windows
|
|
291
|
+
clean_rel_path = rel_path.replace('/', os.sep)
|
|
292
|
+
abs_path = os.path.abspath(os.path.join(str(repo_root), clean_rel_path))
|
|
293
|
+
|
|
294
|
+
try:
|
|
295
|
+
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
|
296
|
+
with open(abs_path, 'w', encoding='utf-8') as f:
|
|
297
|
+
f.write(code)
|
|
298
|
+
print(f"✔ Updated local file: {rel_path}")
|
|
299
|
+
except Exception as write_err:
|
|
300
|
+
print(f"✖ Error writing {rel_path}: {write_err}")
|
|
301
|
+
|
|
302
|
+
# 2. Delete files
|
|
303
|
+
for rel_path in (deletes or []):
|
|
304
|
+
clean_rel_path = rel_path.replace('/', os.sep)
|
|
305
|
+
abs_path = os.path.normpath(os.path.join(str(repo_root), clean_rel_path))
|
|
306
|
+
if os.path.exists(abs_path) and abs_path.startswith(os.path.abspath(str(repo_root))):
|
|
307
|
+
try:
|
|
308
|
+
if os.path.isfile(abs_path): os.remove(abs_path)
|
|
309
|
+
elif os.path.isdir(abs_path): shutil.rmtree(abs_path)
|
|
310
|
+
print(f"✔ Deleted local file/folder: {rel_path}")
|
|
311
|
+
except: pass
|
|
312
|
+
|
|
313
|
+
# Finally, update the sync state so these aren't treated as local user changes next time
|
|
314
|
+
sync_manager.update_sync_state(repo_abs_path, workspace_id, user_email)
|
|
315
|
+
print("✔ Local sync state updated.")
|
|
316
|
+
|
|
317
|
+
# Display granular report details if available
|
|
318
|
+
_details_raw = res_data.get("details", [])
|
|
319
|
+
details_list = _details_raw.get("report", []) if isinstance(_details_raw, dict) else (_details_raw if isinstance(_details_raw, list) else [])
|
|
320
|
+
if details_list:
|
|
321
|
+
print("\n📊 Integration Report:")
|
|
322
|
+
for item in details_list:
|
|
323
|
+
status_icon = "✔" if item.get("success") else "✖"
|
|
324
|
+
f_name = item.get("file_path")
|
|
325
|
+
msg = item.get("message", "")
|
|
326
|
+
print(f" [{status_icon}] {f_name}: {msg}")
|
|
327
|
+
else:
|
|
328
|
+
# Fallback for display
|
|
329
|
+
msg = res_data.get("message", "Changes applied successfully (no local updates needed).")
|
|
330
|
+
print(f"✔ {msg}")
|
|
331
|
+
|
|
332
|
+
# If there was a report but no updates (already up to date), display why
|
|
333
|
+
_details_raw = res_data.get("details", [])
|
|
334
|
+
details_list = _details_raw.get("report", []) if isinstance(_details_raw, dict) else (_details_raw if isinstance(_details_raw, list) else [])
|
|
335
|
+
if details_list:
|
|
336
|
+
print("\n📊 Integration Report (Files already up-to-date):")
|
|
337
|
+
for item in details_list:
|
|
338
|
+
status_icon = "✔" if item.get("success") else "✖"
|
|
339
|
+
f_name = item.get("file_path")
|
|
340
|
+
msg = item.get("message", "")
|
|
341
|
+
print(f" [{status_icon}] {f_name}: {msg}")
|
|
342
|
+
except Exception as e:
|
|
343
|
+
print(f"✖ Error: {e}")
|
|
344
|
+
sys.exit(1)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def main():
|
|
348
|
+
parser = argparse.ArgumentParser(prog="codedthemes", description="CodedThemes CLI client")
|
|
349
|
+
subparsers = parser.add_subparsers(dest="command")
|
|
350
|
+
|
|
351
|
+
login_parser = subparsers.add_parser("login", help="Login to MCP server")
|
|
352
|
+
login_parser.add_argument("--server", help="MCP server URL")
|
|
353
|
+
|
|
354
|
+
apply_parser = subparsers.add_parser("apply", help="Apply changes based on a query")
|
|
355
|
+
apply_parser.add_argument("query", help="The description of changes you want to make")
|
|
356
|
+
|
|
357
|
+
init_parser = subparsers.add_parser("init", help="Initialize and upload repository to the cloud")
|
|
358
|
+
|
|
359
|
+
args = parser.parse_args()
|
|
360
|
+
|
|
361
|
+
if args.command == "login":
|
|
362
|
+
handle_login(args.server)
|
|
363
|
+
elif args.command == "init":
|
|
364
|
+
handle_init()
|
|
365
|
+
elif args.command == "apply":
|
|
366
|
+
handle_apply(args.query)
|
|
367
|
+
else:
|
|
368
|
+
parser.print_help()
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
if __name__ == "__main__":
|
|
372
|
+
main()
|
codedthemes/config.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
# Stores token at ~/.codedthemes/config.json
|
|
5
|
+
CONFIG_DIR = Path.home() / ".codedthemes"
|
|
6
|
+
CONFIG_FILE = CONFIG_DIR / "config.json"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def ensure_config_dir():
|
|
10
|
+
CONFIG_DIR.mkdir(exist_ok=True)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def save_config(data: dict):
|
|
14
|
+
ensure_config_dir()
|
|
15
|
+
with open(CONFIG_FILE, "w") as f:
|
|
16
|
+
json.dump(data, f, indent=2)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def load_config():
|
|
20
|
+
if not CONFIG_FILE.exists():
|
|
21
|
+
return {}
|
|
22
|
+
with open(CONFIG_FILE, "r") as f:
|
|
23
|
+
try:
|
|
24
|
+
return json.load(f)
|
|
25
|
+
except json.JSONDecodeError:
|
|
26
|
+
return {}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import requests
|
|
3
|
+
from .config import load_config
|
|
4
|
+
|
|
5
|
+
class MCPClient:
|
|
6
|
+
def __init__(self):
|
|
7
|
+
config = load_config()
|
|
8
|
+
self.server_url = (
|
|
9
|
+
os.getenv("CODEDTHEMES_SERVER")
|
|
10
|
+
or config.get("server_url")
|
|
11
|
+
or "https://mcp.codedthemes.com/"
|
|
12
|
+
)
|
|
13
|
+
self.token = config.get("access_token")
|
|
14
|
+
|
|
15
|
+
def upload_workspace(self, repo_path: str, user_id: str):
|
|
16
|
+
from .repo_utils import zip_repo
|
|
17
|
+
|
|
18
|
+
zip_path = zip_repo(repo_path)
|
|
19
|
+
repo_name = os.path.basename(repo_path)
|
|
20
|
+
|
|
21
|
+
headers = {}
|
|
22
|
+
if self.token:
|
|
23
|
+
headers["Authorization"] = f"Bearer {self.token}"
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
with open(zip_path, 'rb') as f:
|
|
27
|
+
files = {'file': (f"{repo_name}.zip", f, 'application/zip')}
|
|
28
|
+
data = {'user_id': user_id, 'repo_name': repo_name}
|
|
29
|
+
|
|
30
|
+
response = requests.post(
|
|
31
|
+
f"{self.server_url}/workspaces",
|
|
32
|
+
data=data,
|
|
33
|
+
files=files,
|
|
34
|
+
headers=headers,
|
|
35
|
+
timeout=300
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
if response.status_code != 200:
|
|
39
|
+
raise Exception(f"Upload failed: {response.status_code} - {response.text}")
|
|
40
|
+
|
|
41
|
+
return response.json()
|
|
42
|
+
finally:
|
|
43
|
+
if os.path.exists(zip_path):
|
|
44
|
+
os.remove(zip_path)
|
|
45
|
+
|
|
46
|
+
def call(self, tool_name: str, payload: dict):
|
|
47
|
+
headers = {}
|
|
48
|
+
if self.token:
|
|
49
|
+
headers["Authorization"] = f"Bearer {self.token}"
|
|
50
|
+
|
|
51
|
+
response = requests.post(
|
|
52
|
+
f"{self.server_url}/tools/{tool_name}",
|
|
53
|
+
json=payload,
|
|
54
|
+
headers=headers,
|
|
55
|
+
timeout=300
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
if response.status_code == 401:
|
|
59
|
+
raise Exception("Unauthorized. Please run 'codedthemes login'.")
|
|
60
|
+
|
|
61
|
+
if response.status_code != 200:
|
|
62
|
+
raise Exception(f"Server error: {response.status_code} - {response.text}")
|
|
63
|
+
|
|
64
|
+
return response.json()
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def apply_patch(patch_text: str):
|
|
5
|
+
# Using 'git apply -' to apply from stdin
|
|
6
|
+
process = subprocess.Popen(
|
|
7
|
+
["git", "apply", "-"],
|
|
8
|
+
stdin=subprocess.PIPE,
|
|
9
|
+
stdout=subprocess.PIPE,
|
|
10
|
+
stderr=subprocess.PIPE,
|
|
11
|
+
text=True
|
|
12
|
+
)
|
|
13
|
+
stdout, stderr = process.communicate(input=patch_text)
|
|
14
|
+
|
|
15
|
+
if process.returncode != 0:
|
|
16
|
+
raise Exception(f"Patch failed to apply.\nStdout: {stdout}\nStderr: {stderr}")
|
|
17
|
+
|
|
18
|
+
print("✔ Patch applied successfully.")
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import zipfile
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import tempfile
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def detect_repo_root(start_path=None):
|
|
8
|
+
if not start_path:
|
|
9
|
+
start_path = os.getcwd()
|
|
10
|
+
|
|
11
|
+
current = Path(start_path).resolve()
|
|
12
|
+
|
|
13
|
+
while current != current.parent:
|
|
14
|
+
if (current / "package.json").exists() or (current / "pyproject.toml").exists() or (current / "ai.json").exists() or (current / ".git").exists():
|
|
15
|
+
return current
|
|
16
|
+
current = current.parent
|
|
17
|
+
|
|
18
|
+
raise Exception("No repository root found.")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def zip_repo(repo_root):
|
|
22
|
+
temp_zip = tempfile.NamedTemporaryFile(delete=False, suffix=".zip")
|
|
23
|
+
temp_zip.close() # Close so zipfile can open it
|
|
24
|
+
|
|
25
|
+
with zipfile.ZipFile(temp_zip.name, "w", zipfile.ZIP_DEFLATED) as z:
|
|
26
|
+
for root, dirs, files in os.walk(repo_root):
|
|
27
|
+
# Exclude heavy/build directories
|
|
28
|
+
exclude_dirs = {
|
|
29
|
+
"node_modules", ".git", "venv", ".next", "build", "dist",
|
|
30
|
+
"out", ".cache", "target", ".idea", ".vscode", "__pycache__"
|
|
31
|
+
}
|
|
32
|
+
dirs[:] = [d for d in dirs if d not in exclude_dirs]
|
|
33
|
+
|
|
34
|
+
for file in files:
|
|
35
|
+
full_path = os.path.join(root, file)
|
|
36
|
+
# FORCE FORWARD SLASHES for zip entry names to ensure Linux compatibility
|
|
37
|
+
rel_path = os.path.relpath(full_path, repo_root).replace(os.sep, '/')
|
|
38
|
+
z.write(full_path, rel_path)
|
|
39
|
+
|
|
40
|
+
return temp_zip.name
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import hashlib
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
class SyncManager:
|
|
7
|
+
"""
|
|
8
|
+
Manages local file hashes to identify what files the user changed manually.
|
|
9
|
+
Ensures `.codedthemes/workspace.json` is accurately tracked across CLI runs.
|
|
10
|
+
"""
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self.config_dir = os.path.expanduser("~/.codedthemes")
|
|
13
|
+
self.config_file = os.path.join(self.config_dir, "workspace.json")
|
|
14
|
+
os.makedirs(self.config_dir, exist_ok=True)
|
|
15
|
+
self.workspaces = self.load()
|
|
16
|
+
|
|
17
|
+
def load(self):
|
|
18
|
+
if os.path.exists(self.config_file):
|
|
19
|
+
try:
|
|
20
|
+
with open(self.config_file, 'r') as f: return json.load(f)
|
|
21
|
+
except: pass
|
|
22
|
+
return {}
|
|
23
|
+
|
|
24
|
+
def save(self):
|
|
25
|
+
with open(self.config_file, 'w') as f:
|
|
26
|
+
json.dump(self.workspaces, f, indent=2)
|
|
27
|
+
|
|
28
|
+
def compute_hashes(self, repo_path):
|
|
29
|
+
hashes = {}
|
|
30
|
+
for root, dirs, files in os.walk(repo_path):
|
|
31
|
+
dirs[:] = [d for d in dirs if d not in {'.git', 'node_modules', '__pycache__', 'venv', '.next', 'build', 'dist', 'out', '.cache', 'target', '.idea', '.vscode'}]
|
|
32
|
+
for file in files:
|
|
33
|
+
abs_path = os.path.join(root, file)
|
|
34
|
+
try:
|
|
35
|
+
hasher = hashlib.md5()
|
|
36
|
+
with open(abs_path, 'rb') as f:
|
|
37
|
+
while chunk := f.read(8192): hasher.update(chunk)
|
|
38
|
+
hashes[os.path.relpath(abs_path, repo_path)] = hasher.hexdigest()
|
|
39
|
+
except Exception:
|
|
40
|
+
pass
|
|
41
|
+
return hashes
|
|
42
|
+
|
|
43
|
+
def update_sync_state(self, repo_path, workspace_id, user_id):
|
|
44
|
+
repo_key = f"{user_id}:{os.path.abspath(repo_path)}"
|
|
45
|
+
self.workspaces[repo_key] = {
|
|
46
|
+
"workspace_id": workspace_id,
|
|
47
|
+
"repo_path": os.path.abspath(repo_path),
|
|
48
|
+
"last_sync": datetime.utcnow().isoformat(),
|
|
49
|
+
"file_hashes": self.compute_hashes(repo_path)
|
|
50
|
+
}
|
|
51
|
+
self.save()
|
|
52
|
+
|
|
53
|
+
def get_changed_files(self, repo_path, user_id):
|
|
54
|
+
repo_key = f"{user_id}:{os.path.abspath(repo_path)}"
|
|
55
|
+
if repo_key not in self.workspaces: return []
|
|
56
|
+
|
|
57
|
+
current_hashes = self.compute_hashes(repo_path)
|
|
58
|
+
last_hashes = self.workspaces[repo_key].get("file_hashes", {})
|
|
59
|
+
changes = []
|
|
60
|
+
|
|
61
|
+
for rel_path, fhash in current_hashes.items():
|
|
62
|
+
if rel_path not in last_hashes or last_hashes[rel_path] != fhash:
|
|
63
|
+
try:
|
|
64
|
+
with open(os.path.join(repo_path, rel_path), 'r', encoding='utf-8', errors='ignore') as f:
|
|
65
|
+
changes.append({"path": rel_path, "content": f.read()})
|
|
66
|
+
except Exception:
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
for rel_path in last_hashes:
|
|
70
|
+
if rel_path not in current_hashes:
|
|
71
|
+
changes.append({"path": rel_path, "deleted": True})
|
|
72
|
+
|
|
73
|
+
return changes
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: codedthemes-cli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI tool for Code Theme and Integration
|
|
5
|
+
Author: codedthemes
|
|
6
|
+
Requires-Python: >=3.10
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Requires-Dist: requests
|
|
9
|
+
Requires-Dist: pyjwt
|
|
10
|
+
|
|
11
|
+
# CodedThemes CLI
|
|
12
|
+
|
|
13
|
+
The `codedthemes` CLI is a powerful command-line interface that allows you to interact with the CodeThemeMCP server. It enables you to analyze and modify your repositories using AI, with a focus on theme and style management.
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
Install the CLI globally using pip:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
pip install code-theme-mcp-cli
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Getting Started
|
|
24
|
+
|
|
25
|
+
### 1. Login
|
|
26
|
+
|
|
27
|
+
Before applying any changes, you must authenticate your session with the MCP Server.
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
codedthemes login
|
|
31
|
+
```
|
|
32
|
+
*You will be prompted to enter your Email and License key.*
|
|
33
|
+
|
|
34
|
+
### 2. Apply Changes
|
|
35
|
+
|
|
36
|
+
Navigate to your target project repository and run the `apply` command with your desired changes in quotes:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
codedthemes apply "Update the theme branding from Mantis to Berry"
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
The CLI will automatically:
|
|
43
|
+
1. Package and securely send your repository to the remote server for analysis.
|
|
44
|
+
2. Plan the necessary code modifications using AI.
|
|
45
|
+
3. Automatically apply the patches back to your local files.
|
|
46
|
+
|
|
47
|
+
## Support
|
|
48
|
+
|
|
49
|
+
For issues or feature requests, please contact the CodedThemes support team.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
codedthemes/__init__.py,sha256=Ht0XM-dU2-qaLlJ5fPdYEPT1MazvLjbLrdhaM3MSg3k,171
|
|
2
|
+
codedthemes/cli.py,sha256=n0apwHfD8YFEM7NsUYdr4l5U3Yk7jJKxREId-lQPGTw,15134
|
|
3
|
+
codedthemes/config.py,sha256=HWPNq36a2CLsKCBOJdVDzbv5p4bkqPMUW0ESUIqQZPU,599
|
|
4
|
+
codedthemes/mcp_client.py,sha256=kd3Lg6uDDzpYTzkuYTsER4WB29puHH-PUIRz33YP5nA,2115
|
|
5
|
+
codedthemes/patch_utils.py,sha256=fDHMH4HOjqXyA48B2gJ84dxlmXw5rT9uFpOPDy902J0,525
|
|
6
|
+
codedthemes/repo_utils.py,sha256=MrWd5SMZ2tbbsngbxYhRlk8Gp5HA3gIv3HN0IKyZ30s,1463
|
|
7
|
+
codedthemes/sync_manager.py,sha256=S8bFhvTLyLrC5c3FATtObmQxPO-Uyj-ToXaG7w962aw,3012
|
|
8
|
+
codedthemes_cli-0.1.0.dist-info/METADATA,sha256=5LGID9iggnPj99ZxTZ9Yp04VX6lGNytpgrL-pu6a_Oc,1363
|
|
9
|
+
codedthemes_cli-0.1.0.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
|
10
|
+
codedthemes_cli-0.1.0.dist-info/entry_points.txt,sha256=HufkjM_3xdzJ9qOFVi3MgHz6ixTYQzZFduqSlowHbuw,53
|
|
11
|
+
codedthemes_cli-0.1.0.dist-info/top_level.txt,sha256=up4mYBnkWC1EGPSzOIsG8vNRGXqU8gJCLkgd6Ua4D2k,12
|
|
12
|
+
codedthemes_cli-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
codedthemes
|