devsync 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiconfigkit/__init__.py +0 -0
- aiconfigkit/__main__.py +6 -0
- aiconfigkit/ai_tools/__init__.py +0 -0
- aiconfigkit/ai_tools/base.py +236 -0
- aiconfigkit/ai_tools/capability_registry.py +262 -0
- aiconfigkit/ai_tools/claude.py +91 -0
- aiconfigkit/ai_tools/claude_desktop.py +97 -0
- aiconfigkit/ai_tools/cline.py +92 -0
- aiconfigkit/ai_tools/copilot.py +92 -0
- aiconfigkit/ai_tools/cursor.py +109 -0
- aiconfigkit/ai_tools/detector.py +169 -0
- aiconfigkit/ai_tools/kiro.py +85 -0
- aiconfigkit/ai_tools/mcp_syncer.py +291 -0
- aiconfigkit/ai_tools/roo.py +110 -0
- aiconfigkit/ai_tools/translator.py +390 -0
- aiconfigkit/ai_tools/winsurf.py +102 -0
- aiconfigkit/cli/__init__.py +0 -0
- aiconfigkit/cli/delete.py +118 -0
- aiconfigkit/cli/download.py +274 -0
- aiconfigkit/cli/install.py +237 -0
- aiconfigkit/cli/install_new.py +937 -0
- aiconfigkit/cli/list.py +275 -0
- aiconfigkit/cli/main.py +454 -0
- aiconfigkit/cli/mcp_configure.py +232 -0
- aiconfigkit/cli/mcp_install.py +166 -0
- aiconfigkit/cli/mcp_sync.py +165 -0
- aiconfigkit/cli/package.py +383 -0
- aiconfigkit/cli/package_create.py +323 -0
- aiconfigkit/cli/package_install.py +472 -0
- aiconfigkit/cli/template.py +19 -0
- aiconfigkit/cli/template_backup.py +261 -0
- aiconfigkit/cli/template_init.py +499 -0
- aiconfigkit/cli/template_install.py +261 -0
- aiconfigkit/cli/template_list.py +172 -0
- aiconfigkit/cli/template_uninstall.py +146 -0
- aiconfigkit/cli/template_update.py +225 -0
- aiconfigkit/cli/template_validate.py +234 -0
- aiconfigkit/cli/tools.py +47 -0
- aiconfigkit/cli/uninstall.py +125 -0
- aiconfigkit/cli/update.py +309 -0
- aiconfigkit/core/__init__.py +0 -0
- aiconfigkit/core/checksum.py +211 -0
- aiconfigkit/core/component_detector.py +905 -0
- aiconfigkit/core/conflict_resolution.py +329 -0
- aiconfigkit/core/git_operations.py +539 -0
- aiconfigkit/core/mcp/__init__.py +1 -0
- aiconfigkit/core/mcp/credentials.py +279 -0
- aiconfigkit/core/mcp/manager.py +308 -0
- aiconfigkit/core/mcp/set_manager.py +1 -0
- aiconfigkit/core/mcp/validator.py +1 -0
- aiconfigkit/core/models.py +1661 -0
- aiconfigkit/core/package_creator.py +743 -0
- aiconfigkit/core/package_manifest.py +248 -0
- aiconfigkit/core/repository.py +298 -0
- aiconfigkit/core/secret_detector.py +438 -0
- aiconfigkit/core/template_manifest.py +283 -0
- aiconfigkit/core/version.py +201 -0
- aiconfigkit/storage/__init__.py +0 -0
- aiconfigkit/storage/library.py +429 -0
- aiconfigkit/storage/mcp_tracker.py +1 -0
- aiconfigkit/storage/package_tracker.py +234 -0
- aiconfigkit/storage/template_library.py +229 -0
- aiconfigkit/storage/template_tracker.py +296 -0
- aiconfigkit/storage/tracker.py +416 -0
- aiconfigkit/tui/__init__.py +5 -0
- aiconfigkit/tui/installer.py +511 -0
- aiconfigkit/utils/__init__.py +0 -0
- aiconfigkit/utils/atomic_write.py +90 -0
- aiconfigkit/utils/backup.py +169 -0
- aiconfigkit/utils/dotenv.py +128 -0
- aiconfigkit/utils/git_helpers.py +187 -0
- aiconfigkit/utils/logging.py +60 -0
- aiconfigkit/utils/namespace.py +134 -0
- aiconfigkit/utils/paths.py +205 -0
- aiconfigkit/utils/project.py +109 -0
- aiconfigkit/utils/streaming.py +216 -0
- aiconfigkit/utils/ui.py +194 -0
- aiconfigkit/utils/validation.py +187 -0
- devsync-0.5.5.dist-info/LICENSE +21 -0
- devsync-0.5.5.dist-info/METADATA +477 -0
- devsync-0.5.5.dist-info/RECORD +84 -0
- devsync-0.5.5.dist-info/WHEEL +5 -0
- devsync-0.5.5.dist-info/entry_points.txt +2 -0
- devsync-0.5.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,905 @@
|
|
|
1
|
+
"""Component detection for scanning project directories to find packageable components."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from aiconfigkit.core.checksum import calculate_file_checksum
|
|
10
|
+
from aiconfigkit.core.models import (
|
|
11
|
+
CommandComponent,
|
|
12
|
+
HookComponent,
|
|
13
|
+
InstructionComponent,
|
|
14
|
+
MCPServerComponent,
|
|
15
|
+
MemoryFileComponent,
|
|
16
|
+
PackageComponents,
|
|
17
|
+
ResourceComponent,
|
|
18
|
+
SkillComponent,
|
|
19
|
+
WorkflowComponent,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class DetectedInstruction:
|
|
27
|
+
"""An instruction file detected in the project.
|
|
28
|
+
|
|
29
|
+
Attributes:
|
|
30
|
+
name: Derived instruction name
|
|
31
|
+
file_path: Absolute path to file
|
|
32
|
+
relative_path: Path relative to project root
|
|
33
|
+
source_ide: Which IDE directory it was found in
|
|
34
|
+
content_preview: First 100 chars of content
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
name: str
|
|
38
|
+
file_path: Path
|
|
39
|
+
relative_path: str
|
|
40
|
+
source_ide: str
|
|
41
|
+
content_preview: str = ""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class DetectedMCPServer:
|
|
46
|
+
"""An MCP server configuration detected in the project.
|
|
47
|
+
|
|
48
|
+
Attributes:
|
|
49
|
+
name: Server name from config
|
|
50
|
+
file_path: Path to config file (if file-based)
|
|
51
|
+
config: The MCP server configuration dict
|
|
52
|
+
source: Where the config was found (e.g., ".claude/settings.local.json")
|
|
53
|
+
env_vars: Environment variables used by this server
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
name: str
|
|
57
|
+
file_path: Optional[Path]
|
|
58
|
+
config: dict
|
|
59
|
+
source: str
|
|
60
|
+
env_vars: list[str] = field(default_factory=list)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class DetectedHook:
|
|
65
|
+
"""A hook script detected in the project.
|
|
66
|
+
|
|
67
|
+
Attributes:
|
|
68
|
+
name: Hook name
|
|
69
|
+
file_path: Absolute path to file
|
|
70
|
+
relative_path: Path relative to project root
|
|
71
|
+
hook_type: Type of hook (e.g., PreToolUse, PostToolUse)
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
name: str
|
|
75
|
+
file_path: Path
|
|
76
|
+
relative_path: str
|
|
77
|
+
hook_type: str
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dataclass
|
|
81
|
+
class DetectedCommand:
|
|
82
|
+
"""A command script detected in the project.
|
|
83
|
+
|
|
84
|
+
Attributes:
|
|
85
|
+
name: Command name
|
|
86
|
+
file_path: Absolute path to file
|
|
87
|
+
relative_path: Path relative to project root
|
|
88
|
+
command_type: Type of command (slash, shell)
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
name: str
|
|
92
|
+
file_path: Path
|
|
93
|
+
relative_path: str
|
|
94
|
+
command_type: str
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@dataclass
|
|
98
|
+
class DetectedResource:
|
|
99
|
+
"""A resource file detected in the project.
|
|
100
|
+
|
|
101
|
+
Attributes:
|
|
102
|
+
name: Resource name
|
|
103
|
+
file_path: Absolute path to file
|
|
104
|
+
relative_path: Path relative to project root
|
|
105
|
+
size: File size in bytes
|
|
106
|
+
checksum: SHA256 checksum
|
|
107
|
+
"""
|
|
108
|
+
|
|
109
|
+
name: str
|
|
110
|
+
file_path: Path
|
|
111
|
+
relative_path: str
|
|
112
|
+
size: int
|
|
113
|
+
checksum: str
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@dataclass
|
|
117
|
+
class DetectedSkill:
|
|
118
|
+
"""A Claude skill detected in the project.
|
|
119
|
+
|
|
120
|
+
Skills are directories containing SKILL.md with optional supporting files.
|
|
121
|
+
|
|
122
|
+
Attributes:
|
|
123
|
+
name: Skill name (directory name)
|
|
124
|
+
dir_path: Absolute path to skill directory
|
|
125
|
+
relative_path: Path relative to project root
|
|
126
|
+
description: Description from SKILL.md frontmatter
|
|
127
|
+
has_scripts: Whether skill has supporting scripts
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
name: str
|
|
131
|
+
dir_path: Path
|
|
132
|
+
relative_path: str
|
|
133
|
+
description: str = ""
|
|
134
|
+
has_scripts: bool = False
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
@dataclass
|
|
138
|
+
class DetectedWorkflow:
|
|
139
|
+
"""A Windsurf workflow detected in the project.
|
|
140
|
+
|
|
141
|
+
Attributes:
|
|
142
|
+
name: Workflow name
|
|
143
|
+
file_path: Absolute path to workflow file
|
|
144
|
+
relative_path: Path relative to project root
|
|
145
|
+
description: Workflow description
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
name: str
|
|
149
|
+
file_path: Path
|
|
150
|
+
relative_path: str
|
|
151
|
+
description: str = ""
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@dataclass
|
|
155
|
+
class DetectedMemoryFile:
|
|
156
|
+
"""A CLAUDE.md memory file detected in the project.
|
|
157
|
+
|
|
158
|
+
Memory files persist context across Claude Code sessions.
|
|
159
|
+
|
|
160
|
+
Attributes:
|
|
161
|
+
name: Identifier (path-based for subdirectory files)
|
|
162
|
+
file_path: Absolute path to file
|
|
163
|
+
relative_path: Path relative to project root
|
|
164
|
+
is_root: Whether this is the root CLAUDE.md
|
|
165
|
+
content_preview: First 100 chars of content
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
name: str
|
|
169
|
+
file_path: Path
|
|
170
|
+
relative_path: str
|
|
171
|
+
is_root: bool = False
|
|
172
|
+
content_preview: str = ""
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@dataclass
|
|
176
|
+
class DetectionResult:
|
|
177
|
+
"""Result of component detection scan.
|
|
178
|
+
|
|
179
|
+
Attributes:
|
|
180
|
+
instructions: Detected instruction files
|
|
181
|
+
mcp_servers: Detected MCP server configurations
|
|
182
|
+
hooks: Detected hook scripts
|
|
183
|
+
commands: Detected command scripts (legacy)
|
|
184
|
+
skills: Detected Claude skills
|
|
185
|
+
workflows: Detected Windsurf workflows
|
|
186
|
+
memory_files: Detected CLAUDE.md memory files
|
|
187
|
+
resources: Detected resource files
|
|
188
|
+
warnings: Non-fatal issues encountered
|
|
189
|
+
"""
|
|
190
|
+
|
|
191
|
+
instructions: list[DetectedInstruction] = field(default_factory=list)
|
|
192
|
+
mcp_servers: list[DetectedMCPServer] = field(default_factory=list)
|
|
193
|
+
hooks: list[DetectedHook] = field(default_factory=list)
|
|
194
|
+
commands: list[DetectedCommand] = field(default_factory=list)
|
|
195
|
+
skills: list[DetectedSkill] = field(default_factory=list)
|
|
196
|
+
workflows: list[DetectedWorkflow] = field(default_factory=list)
|
|
197
|
+
memory_files: list[DetectedMemoryFile] = field(default_factory=list)
|
|
198
|
+
resources: list[DetectedResource] = field(default_factory=list)
|
|
199
|
+
warnings: list[str] = field(default_factory=list)
|
|
200
|
+
|
|
201
|
+
@property
|
|
202
|
+
def total_count(self) -> int:
|
|
203
|
+
"""Total number of detected components."""
|
|
204
|
+
return (
|
|
205
|
+
len(self.instructions)
|
|
206
|
+
+ len(self.mcp_servers)
|
|
207
|
+
+ len(self.hooks)
|
|
208
|
+
+ len(self.commands)
|
|
209
|
+
+ len(self.skills)
|
|
210
|
+
+ len(self.workflows)
|
|
211
|
+
+ len(self.memory_files)
|
|
212
|
+
+ len(self.resources)
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class ComponentDetector:
|
|
217
|
+
"""Scans project directories to detect packageable components.
|
|
218
|
+
|
|
219
|
+
Detection locations:
|
|
220
|
+
- Instructions: .claude/rules/, .cursor/rules/, .windsurf/rules/, .github/instructions/**/*
|
|
221
|
+
- Main Copilot instructions: .github/copilot-instructions.md
|
|
222
|
+
- MCP servers: .claude/settings.local.json (mcpServers section), .ai-config-kit/mcp/
|
|
223
|
+
- Hooks: .claude/hooks/
|
|
224
|
+
- Commands: .claude/commands/ (legacy)
|
|
225
|
+
- Skills: .claude/skills/ (directories with SKILL.md)
|
|
226
|
+
- Workflows: .windsurf/workflows/
|
|
227
|
+
- Memory files: CLAUDE.md at root and subdirectories
|
|
228
|
+
- Resources: .ai-config-kit/resources/
|
|
229
|
+
"""
|
|
230
|
+
|
|
231
|
+
INSTRUCTION_LOCATIONS = {
|
|
232
|
+
".claude/rules": "claude",
|
|
233
|
+
".cursor/rules": "cursor",
|
|
234
|
+
".windsurf/rules": "windsurf",
|
|
235
|
+
".kiro/steering": "kiro",
|
|
236
|
+
".clinerules": "cline",
|
|
237
|
+
".roo/rules": "roo",
|
|
238
|
+
".github/instructions": "copilot",
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
# Single-file instruction locations (not directories)
|
|
242
|
+
SINGLE_INSTRUCTION_FILES = {
|
|
243
|
+
".github/copilot-instructions.md": "copilot",
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
INSTRUCTION_EXTENSIONS = {".md", ".mdc", ".instructions.md"}
|
|
247
|
+
|
|
248
|
+
MCP_CONFIG_LOCATIONS = [
|
|
249
|
+
".claude/settings.local.json",
|
|
250
|
+
]
|
|
251
|
+
|
|
252
|
+
HOOK_LOCATIONS = [
|
|
253
|
+
".claude/hooks",
|
|
254
|
+
]
|
|
255
|
+
|
|
256
|
+
COMMAND_LOCATIONS = [
|
|
257
|
+
".claude/commands",
|
|
258
|
+
]
|
|
259
|
+
|
|
260
|
+
SKILL_LOCATIONS = [
|
|
261
|
+
".claude/skills",
|
|
262
|
+
]
|
|
263
|
+
|
|
264
|
+
WORKFLOW_LOCATIONS = [
|
|
265
|
+
".windsurf/workflows",
|
|
266
|
+
]
|
|
267
|
+
|
|
268
|
+
MEMORY_FILE_NAME = "CLAUDE.md"
|
|
269
|
+
|
|
270
|
+
RESOURCE_LOCATIONS = [
|
|
271
|
+
".ai-config-kit/resources",
|
|
272
|
+
]
|
|
273
|
+
|
|
274
|
+
MAX_RESOURCE_SIZE = 200 * 1024 * 1024 # 200 MB
|
|
275
|
+
WARN_RESOURCE_SIZE = 50 * 1024 * 1024 # 50 MB
|
|
276
|
+
|
|
277
|
+
def __init__(self, project_root: Path):
|
|
278
|
+
"""Initialize detector with project root.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
project_root: Path to project root directory
|
|
282
|
+
"""
|
|
283
|
+
self.project_root = project_root.resolve()
|
|
284
|
+
|
|
285
|
+
def detect_all(self) -> DetectionResult:
|
|
286
|
+
"""Detect all packageable components in the project.
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
DetectionResult with all detected components
|
|
290
|
+
"""
|
|
291
|
+
result = DetectionResult()
|
|
292
|
+
|
|
293
|
+
detected_instructions = self._detect_instructions()
|
|
294
|
+
result.instructions = detected_instructions
|
|
295
|
+
|
|
296
|
+
detected_mcp = self._detect_mcp_servers()
|
|
297
|
+
result.mcp_servers = detected_mcp
|
|
298
|
+
|
|
299
|
+
detected_hooks = self._detect_hooks()
|
|
300
|
+
result.hooks = detected_hooks
|
|
301
|
+
|
|
302
|
+
detected_commands = self._detect_commands()
|
|
303
|
+
result.commands = detected_commands
|
|
304
|
+
|
|
305
|
+
detected_skills = self._detect_skills()
|
|
306
|
+
result.skills = detected_skills
|
|
307
|
+
|
|
308
|
+
detected_workflows = self._detect_workflows()
|
|
309
|
+
result.workflows = detected_workflows
|
|
310
|
+
|
|
311
|
+
detected_memory_files = self._detect_memory_files()
|
|
312
|
+
result.memory_files = detected_memory_files
|
|
313
|
+
|
|
314
|
+
detected_resources = self._detect_resources()
|
|
315
|
+
result.resources = detected_resources
|
|
316
|
+
|
|
317
|
+
return result
|
|
318
|
+
|
|
319
|
+
def _detect_instructions(self) -> list[DetectedInstruction]:
|
|
320
|
+
"""Detect instruction files in IDE-specific directories.
|
|
321
|
+
|
|
322
|
+
Supports:
|
|
323
|
+
- Directory-based: .claude/rules/, .cursor/rules/, .windsurf/rules/
|
|
324
|
+
- Recursive: .github/instructions/**/*.instructions.md
|
|
325
|
+
- Single-file: .github/copilot-instructions.md
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
List of detected instructions
|
|
329
|
+
"""
|
|
330
|
+
instructions: list[DetectedInstruction] = []
|
|
331
|
+
|
|
332
|
+
# Detect directory-based instructions
|
|
333
|
+
for location, ide_name in self.INSTRUCTION_LOCATIONS.items():
|
|
334
|
+
dir_path = self.project_root / location
|
|
335
|
+
if not dir_path.exists() or not dir_path.is_dir():
|
|
336
|
+
continue
|
|
337
|
+
|
|
338
|
+
# Use recursive glob for copilot to support subdirectories
|
|
339
|
+
if ide_name == "copilot":
|
|
340
|
+
file_iter = dir_path.rglob("*")
|
|
341
|
+
else:
|
|
342
|
+
file_iter = dir_path.iterdir()
|
|
343
|
+
|
|
344
|
+
for file_path in file_iter:
|
|
345
|
+
if not file_path.is_file():
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
# Check extension
|
|
349
|
+
suffix = file_path.suffix.lower()
|
|
350
|
+
if suffix not in self.INSTRUCTION_EXTENSIONS:
|
|
351
|
+
continue
|
|
352
|
+
|
|
353
|
+
try:
|
|
354
|
+
# For copilot, include subdirectory in name
|
|
355
|
+
if ide_name == "copilot":
|
|
356
|
+
rel_to_dir = file_path.relative_to(dir_path)
|
|
357
|
+
if file_path.name.endswith(".instructions.md"):
|
|
358
|
+
name = str(rel_to_dir).replace(".instructions.md", "").replace("/", "-")
|
|
359
|
+
else:
|
|
360
|
+
name = file_path.stem
|
|
361
|
+
else:
|
|
362
|
+
name = file_path.stem
|
|
363
|
+
relative_path = str(file_path.relative_to(self.project_root))
|
|
364
|
+
content = file_path.read_text(encoding="utf-8")
|
|
365
|
+
content_preview = content[:100] if content else ""
|
|
366
|
+
|
|
367
|
+
instructions.append(
|
|
368
|
+
DetectedInstruction(
|
|
369
|
+
name=name,
|
|
370
|
+
file_path=file_path,
|
|
371
|
+
relative_path=relative_path,
|
|
372
|
+
source_ide=ide_name,
|
|
373
|
+
content_preview=content_preview,
|
|
374
|
+
)
|
|
375
|
+
)
|
|
376
|
+
except Exception as e:
|
|
377
|
+
logger.warning(f"Failed to read instruction {file_path}: {e}")
|
|
378
|
+
|
|
379
|
+
# Detect single-file instructions (e.g., .github/copilot-instructions.md)
|
|
380
|
+
for file_location, ide_name in self.SINGLE_INSTRUCTION_FILES.items():
|
|
381
|
+
file_path = self.project_root / file_location
|
|
382
|
+
if not file_path.exists() or not file_path.is_file():
|
|
383
|
+
continue
|
|
384
|
+
|
|
385
|
+
try:
|
|
386
|
+
name = file_path.stem
|
|
387
|
+
relative_path = str(file_path.relative_to(self.project_root))
|
|
388
|
+
content = file_path.read_text(encoding="utf-8")
|
|
389
|
+
content_preview = content[:100] if content else ""
|
|
390
|
+
|
|
391
|
+
instructions.append(
|
|
392
|
+
DetectedInstruction(
|
|
393
|
+
name=name,
|
|
394
|
+
file_path=file_path,
|
|
395
|
+
relative_path=relative_path,
|
|
396
|
+
source_ide=ide_name,
|
|
397
|
+
content_preview=content_preview,
|
|
398
|
+
)
|
|
399
|
+
)
|
|
400
|
+
except Exception as e:
|
|
401
|
+
logger.warning(f"Failed to read instruction {file_path}: {e}")
|
|
402
|
+
|
|
403
|
+
return instructions
|
|
404
|
+
|
|
405
|
+
def _detect_mcp_servers(self) -> list[DetectedMCPServer]:
|
|
406
|
+
"""Detect MCP server configurations.
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
List of detected MCP servers
|
|
410
|
+
"""
|
|
411
|
+
servers: list[DetectedMCPServer] = []
|
|
412
|
+
|
|
413
|
+
for config_location in self.MCP_CONFIG_LOCATIONS:
|
|
414
|
+
config_path = self.project_root / config_location
|
|
415
|
+
if not config_path.exists():
|
|
416
|
+
continue
|
|
417
|
+
|
|
418
|
+
try:
|
|
419
|
+
with open(config_path, "r", encoding="utf-8") as f:
|
|
420
|
+
config_data = json.load(f)
|
|
421
|
+
|
|
422
|
+
mcp_servers = config_data.get("mcpServers", {})
|
|
423
|
+
for server_name, server_config in mcp_servers.items():
|
|
424
|
+
env_vars = list(server_config.get("env", {}).keys())
|
|
425
|
+
servers.append(
|
|
426
|
+
DetectedMCPServer(
|
|
427
|
+
name=server_name,
|
|
428
|
+
file_path=config_path,
|
|
429
|
+
config=server_config,
|
|
430
|
+
source=config_location,
|
|
431
|
+
env_vars=env_vars,
|
|
432
|
+
)
|
|
433
|
+
)
|
|
434
|
+
except json.JSONDecodeError as e:
|
|
435
|
+
logger.warning(f"Invalid JSON in {config_path}: {e}")
|
|
436
|
+
except Exception as e:
|
|
437
|
+
logger.warning(f"Failed to read MCP config {config_path}: {e}")
|
|
438
|
+
|
|
439
|
+
mcp_dir = self.project_root / ".ai-config-kit" / "mcp"
|
|
440
|
+
if mcp_dir.exists() and mcp_dir.is_dir():
|
|
441
|
+
for file_path in mcp_dir.glob("*.json"):
|
|
442
|
+
try:
|
|
443
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
444
|
+
server_config = json.load(f)
|
|
445
|
+
env_vars = list(server_config.get("env", {}).keys())
|
|
446
|
+
servers.append(
|
|
447
|
+
DetectedMCPServer(
|
|
448
|
+
name=file_path.stem,
|
|
449
|
+
file_path=file_path,
|
|
450
|
+
config=server_config,
|
|
451
|
+
source=str(file_path.relative_to(self.project_root)),
|
|
452
|
+
env_vars=env_vars,
|
|
453
|
+
)
|
|
454
|
+
)
|
|
455
|
+
except Exception as e:
|
|
456
|
+
logger.warning(f"Failed to read MCP config {file_path}: {e}")
|
|
457
|
+
|
|
458
|
+
return servers
|
|
459
|
+
|
|
460
|
+
def _detect_hooks(self) -> list[DetectedHook]:
|
|
461
|
+
"""Detect hook scripts.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
List of detected hooks
|
|
465
|
+
"""
|
|
466
|
+
hooks: list[DetectedHook] = []
|
|
467
|
+
|
|
468
|
+
for location in self.HOOK_LOCATIONS:
|
|
469
|
+
hook_dir = self.project_root / location
|
|
470
|
+
if not hook_dir.exists() or not hook_dir.is_dir():
|
|
471
|
+
continue
|
|
472
|
+
|
|
473
|
+
for file_path in hook_dir.iterdir():
|
|
474
|
+
if not file_path.is_file():
|
|
475
|
+
continue
|
|
476
|
+
|
|
477
|
+
hook_type = self._infer_hook_type(file_path.name)
|
|
478
|
+
name = file_path.stem
|
|
479
|
+
|
|
480
|
+
hooks.append(
|
|
481
|
+
DetectedHook(
|
|
482
|
+
name=name,
|
|
483
|
+
file_path=file_path,
|
|
484
|
+
relative_path=str(file_path.relative_to(self.project_root)),
|
|
485
|
+
hook_type=hook_type,
|
|
486
|
+
)
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
return hooks
|
|
490
|
+
|
|
491
|
+
def _infer_hook_type(self, filename: str) -> str:
|
|
492
|
+
"""Infer hook type from filename.
|
|
493
|
+
|
|
494
|
+
Args:
|
|
495
|
+
filename: Name of the hook file
|
|
496
|
+
|
|
497
|
+
Returns:
|
|
498
|
+
Hook type string
|
|
499
|
+
"""
|
|
500
|
+
filename_lower = filename.lower()
|
|
501
|
+
if "pretooluse" in filename_lower or "pre-tool" in filename_lower:
|
|
502
|
+
return "PreToolUse"
|
|
503
|
+
elif "posttooluse" in filename_lower or "post-tool" in filename_lower:
|
|
504
|
+
return "PostToolUse"
|
|
505
|
+
elif "notification" in filename_lower:
|
|
506
|
+
return "Notification"
|
|
507
|
+
elif "stop" in filename_lower:
|
|
508
|
+
return "Stop"
|
|
509
|
+
return "Unknown"
|
|
510
|
+
|
|
511
|
+
def _detect_commands(self) -> list[DetectedCommand]:
|
|
512
|
+
"""Detect command scripts.
|
|
513
|
+
|
|
514
|
+
Returns:
|
|
515
|
+
List of detected commands
|
|
516
|
+
"""
|
|
517
|
+
commands: list[DetectedCommand] = []
|
|
518
|
+
|
|
519
|
+
for location in self.COMMAND_LOCATIONS:
|
|
520
|
+
cmd_dir = self.project_root / location
|
|
521
|
+
if not cmd_dir.exists() or not cmd_dir.is_dir():
|
|
522
|
+
continue
|
|
523
|
+
|
|
524
|
+
for file_path in cmd_dir.iterdir():
|
|
525
|
+
if not file_path.is_file():
|
|
526
|
+
continue
|
|
527
|
+
|
|
528
|
+
command_type = self._infer_command_type(file_path)
|
|
529
|
+
name = file_path.stem
|
|
530
|
+
|
|
531
|
+
commands.append(
|
|
532
|
+
DetectedCommand(
|
|
533
|
+
name=name,
|
|
534
|
+
file_path=file_path,
|
|
535
|
+
relative_path=str(file_path.relative_to(self.project_root)),
|
|
536
|
+
command_type=command_type,
|
|
537
|
+
)
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
return commands
|
|
541
|
+
|
|
542
|
+
def _infer_command_type(self, file_path: Path) -> str:
|
|
543
|
+
"""Infer command type from file extension.
|
|
544
|
+
|
|
545
|
+
Args:
|
|
546
|
+
file_path: Path to command file
|
|
547
|
+
|
|
548
|
+
Returns:
|
|
549
|
+
Command type string
|
|
550
|
+
"""
|
|
551
|
+
suffix = file_path.suffix.lower()
|
|
552
|
+
if suffix in (".sh", ".bash"):
|
|
553
|
+
return "shell"
|
|
554
|
+
elif suffix in (".md", ".txt"):
|
|
555
|
+
return "slash"
|
|
556
|
+
return "shell"
|
|
557
|
+
|
|
558
|
+
def _detect_resources(self) -> list[DetectedResource]:
|
|
559
|
+
"""Detect resource files.
|
|
560
|
+
|
|
561
|
+
Returns:
|
|
562
|
+
List of detected resources
|
|
563
|
+
"""
|
|
564
|
+
resources: list[DetectedResource] = []
|
|
565
|
+
|
|
566
|
+
for location in self.RESOURCE_LOCATIONS:
|
|
567
|
+
res_dir = self.project_root / location
|
|
568
|
+
if not res_dir.exists() or not res_dir.is_dir():
|
|
569
|
+
continue
|
|
570
|
+
|
|
571
|
+
for file_path in res_dir.rglob("*"):
|
|
572
|
+
if not file_path.is_file():
|
|
573
|
+
continue
|
|
574
|
+
|
|
575
|
+
try:
|
|
576
|
+
size = file_path.stat().st_size
|
|
577
|
+
|
|
578
|
+
if size > self.MAX_RESOURCE_SIZE:
|
|
579
|
+
logger.warning(f"Resource {file_path} exceeds max size ({size} > {self.MAX_RESOURCE_SIZE})")
|
|
580
|
+
continue
|
|
581
|
+
|
|
582
|
+
if size > self.WARN_RESOURCE_SIZE:
|
|
583
|
+
logger.warning(f"Resource {file_path} is large ({size} bytes)")
|
|
584
|
+
|
|
585
|
+
checksum = calculate_file_checksum(str(file_path))
|
|
586
|
+
name = file_path.stem
|
|
587
|
+
|
|
588
|
+
resources.append(
|
|
589
|
+
DetectedResource(
|
|
590
|
+
name=name,
|
|
591
|
+
file_path=file_path,
|
|
592
|
+
relative_path=str(file_path.relative_to(self.project_root)),
|
|
593
|
+
size=size,
|
|
594
|
+
checksum=checksum,
|
|
595
|
+
)
|
|
596
|
+
)
|
|
597
|
+
except Exception as e:
|
|
598
|
+
logger.warning(f"Failed to process resource {file_path}: {e}")
|
|
599
|
+
|
|
600
|
+
return resources
|
|
601
|
+
|
|
602
|
+
def _detect_skills(self) -> list[DetectedSkill]:
|
|
603
|
+
"""Detect Claude skill directories.
|
|
604
|
+
|
|
605
|
+
Skills are directories containing SKILL.md with optional supporting files.
|
|
606
|
+
|
|
607
|
+
Returns:
|
|
608
|
+
List of detected skills
|
|
609
|
+
"""
|
|
610
|
+
skills: list[DetectedSkill] = []
|
|
611
|
+
|
|
612
|
+
for location in self.SKILL_LOCATIONS:
|
|
613
|
+
skill_dir = self.project_root / location
|
|
614
|
+
if not skill_dir.exists() or not skill_dir.is_dir():
|
|
615
|
+
continue
|
|
616
|
+
|
|
617
|
+
for item in skill_dir.iterdir():
|
|
618
|
+
if not item.is_dir():
|
|
619
|
+
continue
|
|
620
|
+
|
|
621
|
+
skill_md = item / "SKILL.md"
|
|
622
|
+
if not skill_md.exists():
|
|
623
|
+
# Also check for Skill.md (case-insensitive)
|
|
624
|
+
skill_md_lower = item / "Skill.md"
|
|
625
|
+
if skill_md_lower.exists():
|
|
626
|
+
skill_md = skill_md_lower
|
|
627
|
+
else:
|
|
628
|
+
continue
|
|
629
|
+
|
|
630
|
+
try:
|
|
631
|
+
name = item.name
|
|
632
|
+
relative_path = str(item.relative_to(self.project_root))
|
|
633
|
+
description = self._extract_skill_description(skill_md)
|
|
634
|
+
has_scripts = any(item.glob("scripts/*")) or any(item.glob("*.sh"))
|
|
635
|
+
|
|
636
|
+
skills.append(
|
|
637
|
+
DetectedSkill(
|
|
638
|
+
name=name,
|
|
639
|
+
dir_path=item,
|
|
640
|
+
relative_path=relative_path,
|
|
641
|
+
description=description,
|
|
642
|
+
has_scripts=has_scripts,
|
|
643
|
+
)
|
|
644
|
+
)
|
|
645
|
+
except Exception as e:
|
|
646
|
+
logger.warning(f"Failed to process skill {item}: {e}")
|
|
647
|
+
|
|
648
|
+
return skills
|
|
649
|
+
|
|
650
|
+
def _extract_skill_description(self, skill_md_path: Path) -> str:
|
|
651
|
+
"""Extract description from SKILL.md frontmatter.
|
|
652
|
+
|
|
653
|
+
Args:
|
|
654
|
+
skill_md_path: Path to SKILL.md file
|
|
655
|
+
|
|
656
|
+
Returns:
|
|
657
|
+
Description string or empty string if not found
|
|
658
|
+
"""
|
|
659
|
+
try:
|
|
660
|
+
content = skill_md_path.read_text(encoding="utf-8")
|
|
661
|
+
if content.startswith("---"):
|
|
662
|
+
# Parse YAML frontmatter
|
|
663
|
+
end_idx = content.find("---", 3)
|
|
664
|
+
if end_idx != -1:
|
|
665
|
+
frontmatter = content[3:end_idx].strip()
|
|
666
|
+
for line in frontmatter.split("\n"):
|
|
667
|
+
if line.startswith("description:"):
|
|
668
|
+
return line.split(":", 1)[1].strip().strip("\"'")
|
|
669
|
+
except Exception:
|
|
670
|
+
pass
|
|
671
|
+
return ""
|
|
672
|
+
|
|
673
|
+
def _detect_workflows(self) -> list[DetectedWorkflow]:
|
|
674
|
+
"""Detect Windsurf workflow files.
|
|
675
|
+
|
|
676
|
+
Returns:
|
|
677
|
+
List of detected workflows
|
|
678
|
+
"""
|
|
679
|
+
workflows: list[DetectedWorkflow] = []
|
|
680
|
+
|
|
681
|
+
for location in self.WORKFLOW_LOCATIONS:
|
|
682
|
+
workflow_dir = self.project_root / location
|
|
683
|
+
if not workflow_dir.exists() or not workflow_dir.is_dir():
|
|
684
|
+
continue
|
|
685
|
+
|
|
686
|
+
for file_path in workflow_dir.rglob("*"):
|
|
687
|
+
if not file_path.is_file():
|
|
688
|
+
continue
|
|
689
|
+
if file_path.suffix.lower() not in {".md", ".yaml", ".yml"}:
|
|
690
|
+
continue
|
|
691
|
+
|
|
692
|
+
try:
|
|
693
|
+
name = file_path.stem
|
|
694
|
+
relative_path = str(file_path.relative_to(self.project_root))
|
|
695
|
+
description = self._extract_workflow_description(file_path)
|
|
696
|
+
|
|
697
|
+
workflows.append(
|
|
698
|
+
DetectedWorkflow(
|
|
699
|
+
name=name,
|
|
700
|
+
file_path=file_path,
|
|
701
|
+
relative_path=relative_path,
|
|
702
|
+
description=description,
|
|
703
|
+
)
|
|
704
|
+
)
|
|
705
|
+
except Exception as e:
|
|
706
|
+
logger.warning(f"Failed to process workflow {file_path}: {e}")
|
|
707
|
+
|
|
708
|
+
return workflows
|
|
709
|
+
|
|
710
|
+
def _extract_workflow_description(self, workflow_path: Path) -> str:
|
|
711
|
+
"""Extract description from workflow file.
|
|
712
|
+
|
|
713
|
+
Args:
|
|
714
|
+
workflow_path: Path to workflow file
|
|
715
|
+
|
|
716
|
+
Returns:
|
|
717
|
+
Description string or empty string if not found
|
|
718
|
+
"""
|
|
719
|
+
try:
|
|
720
|
+
content = workflow_path.read_text(encoding="utf-8")
|
|
721
|
+
if content.startswith("---"):
|
|
722
|
+
end_idx = content.find("---", 3)
|
|
723
|
+
if end_idx != -1:
|
|
724
|
+
frontmatter = content[3:end_idx].strip()
|
|
725
|
+
for line in frontmatter.split("\n"):
|
|
726
|
+
if line.startswith("description:"):
|
|
727
|
+
return line.split(":", 1)[1].strip().strip("\"'")
|
|
728
|
+
except Exception:
|
|
729
|
+
pass
|
|
730
|
+
return ""
|
|
731
|
+
|
|
732
|
+
def _detect_memory_files(self) -> list[DetectedMemoryFile]:
|
|
733
|
+
"""Detect CLAUDE.md memory files.
|
|
734
|
+
|
|
735
|
+
Detects CLAUDE.md at project root and in subdirectories.
|
|
736
|
+
|
|
737
|
+
Returns:
|
|
738
|
+
List of detected memory files
|
|
739
|
+
"""
|
|
740
|
+
memory_files: list[DetectedMemoryFile] = []
|
|
741
|
+
|
|
742
|
+
# Check root CLAUDE.md
|
|
743
|
+
root_memory = self.project_root / self.MEMORY_FILE_NAME
|
|
744
|
+
if root_memory.exists() and root_memory.is_file():
|
|
745
|
+
try:
|
|
746
|
+
content = root_memory.read_text(encoding="utf-8")
|
|
747
|
+
content_preview = content[:100] if content else ""
|
|
748
|
+
memory_files.append(
|
|
749
|
+
DetectedMemoryFile(
|
|
750
|
+
name="CLAUDE",
|
|
751
|
+
file_path=root_memory,
|
|
752
|
+
relative_path=self.MEMORY_FILE_NAME,
|
|
753
|
+
is_root=True,
|
|
754
|
+
content_preview=content_preview,
|
|
755
|
+
)
|
|
756
|
+
)
|
|
757
|
+
except Exception as e:
|
|
758
|
+
logger.warning(f"Failed to read memory file {root_memory}: {e}")
|
|
759
|
+
|
|
760
|
+
# Find CLAUDE.md in subdirectories (not too deep)
|
|
761
|
+
for file_path in self.project_root.rglob(self.MEMORY_FILE_NAME):
|
|
762
|
+
if file_path == root_memory:
|
|
763
|
+
continue
|
|
764
|
+
if not file_path.is_file():
|
|
765
|
+
continue
|
|
766
|
+
|
|
767
|
+
# Skip common non-project directories
|
|
768
|
+
rel_path = file_path.relative_to(self.project_root)
|
|
769
|
+
parts = rel_path.parts
|
|
770
|
+
if any(p.startswith(".") and p not in {".claude", ".github"} for p in parts[:-1]):
|
|
771
|
+
continue
|
|
772
|
+
if any(p in {"node_modules", "venv", ".venv", "__pycache__", "dist", "build"} for p in parts):
|
|
773
|
+
continue
|
|
774
|
+
|
|
775
|
+
try:
|
|
776
|
+
content = file_path.read_text(encoding="utf-8")
|
|
777
|
+
content_preview = content[:100] if content else ""
|
|
778
|
+
# Create name from directory path
|
|
779
|
+
parent_parts = parts[:-1]
|
|
780
|
+
name = "-".join(parent_parts) + "-CLAUDE" if parent_parts else "CLAUDE"
|
|
781
|
+
|
|
782
|
+
memory_files.append(
|
|
783
|
+
DetectedMemoryFile(
|
|
784
|
+
name=name,
|
|
785
|
+
file_path=file_path,
|
|
786
|
+
relative_path=str(rel_path),
|
|
787
|
+
is_root=False,
|
|
788
|
+
content_preview=content_preview,
|
|
789
|
+
)
|
|
790
|
+
)
|
|
791
|
+
except Exception as e:
|
|
792
|
+
logger.warning(f"Failed to read memory file {file_path}: {e}")
|
|
793
|
+
|
|
794
|
+
return memory_files
|
|
795
|
+
|
|
796
|
+
def to_package_components(
|
|
797
|
+
self,
|
|
798
|
+
detection_result: DetectionResult,
|
|
799
|
+
include_descriptions: bool = True,
|
|
800
|
+
) -> PackageComponents:
|
|
801
|
+
"""Convert detection results to PackageComponents for manifest generation.
|
|
802
|
+
|
|
803
|
+
Args:
|
|
804
|
+
detection_result: Detection scan result
|
|
805
|
+
include_descriptions: Whether to generate placeholder descriptions
|
|
806
|
+
|
|
807
|
+
Returns:
|
|
808
|
+
PackageComponents ready for manifest
|
|
809
|
+
"""
|
|
810
|
+
instructions = [
|
|
811
|
+
InstructionComponent(
|
|
812
|
+
name=inst.name,
|
|
813
|
+
file=inst.relative_path,
|
|
814
|
+
description=f"Instruction from {inst.source_ide}" if include_descriptions else "",
|
|
815
|
+
tags=[inst.source_ide],
|
|
816
|
+
)
|
|
817
|
+
for inst in detection_result.instructions
|
|
818
|
+
]
|
|
819
|
+
|
|
820
|
+
mcp_servers = []
|
|
821
|
+
for mcp in detection_result.mcp_servers:
|
|
822
|
+
mcp_servers.append(
|
|
823
|
+
MCPServerComponent(
|
|
824
|
+
name=mcp.name,
|
|
825
|
+
file=f"mcp/{mcp.name}.json",
|
|
826
|
+
description=f"MCP server from {mcp.source}" if include_descriptions else "",
|
|
827
|
+
credentials=[],
|
|
828
|
+
ide_support=["claude"],
|
|
829
|
+
)
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
hooks = [
|
|
833
|
+
HookComponent(
|
|
834
|
+
name=hook.name,
|
|
835
|
+
file=hook.relative_path,
|
|
836
|
+
description=f"{hook.hook_type} hook" if include_descriptions else "",
|
|
837
|
+
hook_type=hook.hook_type,
|
|
838
|
+
ide_support=["claude"],
|
|
839
|
+
)
|
|
840
|
+
for hook in detection_result.hooks
|
|
841
|
+
]
|
|
842
|
+
|
|
843
|
+
commands = [
|
|
844
|
+
CommandComponent(
|
|
845
|
+
name=cmd.name,
|
|
846
|
+
file=cmd.relative_path,
|
|
847
|
+
description=f"{cmd.command_type} command" if include_descriptions else "",
|
|
848
|
+
command_type=cmd.command_type,
|
|
849
|
+
ide_support=["claude"],
|
|
850
|
+
)
|
|
851
|
+
for cmd in detection_result.commands
|
|
852
|
+
]
|
|
853
|
+
|
|
854
|
+
resources = [
|
|
855
|
+
ResourceComponent(
|
|
856
|
+
name=res.name,
|
|
857
|
+
file=res.relative_path,
|
|
858
|
+
description="Resource file" if include_descriptions else "",
|
|
859
|
+
install_path=res.relative_path,
|
|
860
|
+
checksum=f"sha256:{res.checksum}",
|
|
861
|
+
size=res.size,
|
|
862
|
+
)
|
|
863
|
+
for res in detection_result.resources
|
|
864
|
+
]
|
|
865
|
+
|
|
866
|
+
skills = [
|
|
867
|
+
SkillComponent(
|
|
868
|
+
name=skill.name,
|
|
869
|
+
file=skill.relative_path,
|
|
870
|
+
description=skill.description or ("Claude skill" if include_descriptions else ""),
|
|
871
|
+
ide_support=["claude"],
|
|
872
|
+
)
|
|
873
|
+
for skill in detection_result.skills
|
|
874
|
+
]
|
|
875
|
+
|
|
876
|
+
workflows = [
|
|
877
|
+
WorkflowComponent(
|
|
878
|
+
name=wf.name,
|
|
879
|
+
file=wf.relative_path,
|
|
880
|
+
description=wf.description or ("Windsurf workflow" if include_descriptions else ""),
|
|
881
|
+
ide_support=["windsurf"],
|
|
882
|
+
)
|
|
883
|
+
for wf in detection_result.workflows
|
|
884
|
+
]
|
|
885
|
+
|
|
886
|
+
memory_files = [
|
|
887
|
+
MemoryFileComponent(
|
|
888
|
+
name=mem.name,
|
|
889
|
+
file=mem.relative_path,
|
|
890
|
+
description=("Root memory file" if mem.is_root else "Memory file") if include_descriptions else "",
|
|
891
|
+
ide_support=["claude"],
|
|
892
|
+
)
|
|
893
|
+
for mem in detection_result.memory_files
|
|
894
|
+
]
|
|
895
|
+
|
|
896
|
+
return PackageComponents(
|
|
897
|
+
instructions=instructions,
|
|
898
|
+
mcp_servers=mcp_servers,
|
|
899
|
+
hooks=hooks,
|
|
900
|
+
commands=commands,
|
|
901
|
+
skills=skills,
|
|
902
|
+
workflows=workflows,
|
|
903
|
+
memory_files=memory_files,
|
|
904
|
+
resources=resources,
|
|
905
|
+
)
|