@adonis0123/weekly-report 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-skill.json +46 -0
- package/README.md +63 -0
- package/SKILL.md +174 -0
- package/install-skill.js +315 -0
- package/package.json +35 -0
- package/references/WEEKLY_REPORT_FORMAT.md +116 -0
- package/src/__init__.py +3 -0
- package/src/config_manager.py +171 -0
- package/src/date_utils.py +272 -0
- package/src/git_analyzer.py +342 -0
- package/src/report_generator.py +257 -0
- package/src/storage.py +491 -0
- package/uninstall-skill.js +191 -0
package/src/storage.py
ADDED
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
"""存储管理模块
|
|
2
|
+
|
|
3
|
+
管理周报的存储和检索。
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from datetime import date
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Dict, List, Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class ReportEntry:
|
|
16
|
+
summary: str
|
|
17
|
+
details: List[str]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _parse_report_markdown(content: str) -> tuple[list[str], dict[str, list[ReportEntry]]]:
|
|
21
|
+
preamble: list[str] = []
|
|
22
|
+
sections: dict[str, list[ReportEntry]] = {}
|
|
23
|
+
|
|
24
|
+
current_section: Optional[str] = None
|
|
25
|
+
current_entry: Optional[ReportEntry] = None
|
|
26
|
+
started_sections = False
|
|
27
|
+
|
|
28
|
+
for raw_line in content.splitlines():
|
|
29
|
+
line = raw_line.rstrip("\n")
|
|
30
|
+
if not line.strip():
|
|
31
|
+
if not started_sections:
|
|
32
|
+
preamble.append(line)
|
|
33
|
+
continue
|
|
34
|
+
|
|
35
|
+
if line.startswith("#"):
|
|
36
|
+
if not started_sections:
|
|
37
|
+
preamble.append(line)
|
|
38
|
+
continue
|
|
39
|
+
|
|
40
|
+
if not line.startswith(" "):
|
|
41
|
+
started_sections = True
|
|
42
|
+
current_section = line.strip()
|
|
43
|
+
sections.setdefault(current_section, [])
|
|
44
|
+
current_entry = None
|
|
45
|
+
continue
|
|
46
|
+
|
|
47
|
+
if line.startswith(" - "):
|
|
48
|
+
if current_section is None:
|
|
49
|
+
started_sections = True
|
|
50
|
+
current_section = "其他"
|
|
51
|
+
sections.setdefault(current_section, [])
|
|
52
|
+
|
|
53
|
+
current_entry = ReportEntry(summary=line[4:].strip(), details=[])
|
|
54
|
+
sections[current_section].append(current_entry)
|
|
55
|
+
continue
|
|
56
|
+
|
|
57
|
+
if line.startswith(" - ") and current_entry is not None:
|
|
58
|
+
current_entry.details.append(line[6:].strip())
|
|
59
|
+
continue
|
|
60
|
+
|
|
61
|
+
# 兼容意外格式:把未知缩进行作为当前 entry 的 detail
|
|
62
|
+
if current_entry is not None and line.startswith(" "):
|
|
63
|
+
current_entry.details.append(line.strip())
|
|
64
|
+
|
|
65
|
+
return preamble, sections
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _dedupe_preserve_order(items: List[str]) -> List[str]:
|
|
69
|
+
seen: set[str] = set()
|
|
70
|
+
result: List[str] = []
|
|
71
|
+
for item in items:
|
|
72
|
+
key = item.strip()
|
|
73
|
+
if not key or key in seen:
|
|
74
|
+
continue
|
|
75
|
+
seen.add(key)
|
|
76
|
+
result.append(item.strip())
|
|
77
|
+
return result
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _merge_sections(
|
|
81
|
+
existing: dict[str, list[ReportEntry]],
|
|
82
|
+
new: dict[str, list[ReportEntry]],
|
|
83
|
+
) -> dict[str, list[ReportEntry]]:
|
|
84
|
+
merged: dict[str, list[ReportEntry]] = {}
|
|
85
|
+
|
|
86
|
+
# 保留已有 section 的顺序
|
|
87
|
+
for section, entries in existing.items():
|
|
88
|
+
merged[section] = [ReportEntry(e.summary, list(e.details)) for e in entries]
|
|
89
|
+
|
|
90
|
+
# 合并新内容
|
|
91
|
+
for section, entries in new.items():
|
|
92
|
+
if section not in merged:
|
|
93
|
+
merged[section] = [ReportEntry(e.summary, list(e.details)) for e in entries]
|
|
94
|
+
continue
|
|
95
|
+
|
|
96
|
+
by_summary: dict[str, ReportEntry] = {e.summary: e for e in merged[section]}
|
|
97
|
+
for entry in entries:
|
|
98
|
+
if entry.summary in by_summary:
|
|
99
|
+
target = by_summary[entry.summary]
|
|
100
|
+
target.details = _dedupe_preserve_order(target.details + entry.details)
|
|
101
|
+
else:
|
|
102
|
+
merged[section].append(ReportEntry(entry.summary, list(entry.details)))
|
|
103
|
+
by_summary[entry.summary] = merged[section][-1]
|
|
104
|
+
|
|
105
|
+
# 统一去重
|
|
106
|
+
for section, entries in merged.items():
|
|
107
|
+
for entry in entries:
|
|
108
|
+
entry.details = _dedupe_preserve_order(entry.details)
|
|
109
|
+
|
|
110
|
+
return merged
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _render_report_markdown(
|
|
114
|
+
preamble: list[str],
|
|
115
|
+
sections: dict[str, list[ReportEntry]],
|
|
116
|
+
) -> str:
|
|
117
|
+
lines: list[str] = []
|
|
118
|
+
if preamble:
|
|
119
|
+
lines.extend(preamble)
|
|
120
|
+
if lines and lines[-1].strip():
|
|
121
|
+
lines.append("")
|
|
122
|
+
|
|
123
|
+
for section, entries in sections.items():
|
|
124
|
+
lines.append(section)
|
|
125
|
+
for entry in entries:
|
|
126
|
+
lines.append(f" - {entry.summary}")
|
|
127
|
+
for detail in entry.details:
|
|
128
|
+
lines.append(f" - {detail}")
|
|
129
|
+
lines.append("")
|
|
130
|
+
|
|
131
|
+
while lines and not lines[-1].strip():
|
|
132
|
+
lines.pop()
|
|
133
|
+
|
|
134
|
+
return "\n".join(lines) + "\n"
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def merge_report_content(existing: str, new: str) -> str:
|
|
138
|
+
existing_preamble, existing_sections = _parse_report_markdown(existing)
|
|
139
|
+
new_preamble, new_sections = _parse_report_markdown(new)
|
|
140
|
+
|
|
141
|
+
preamble = existing_preamble or new_preamble
|
|
142
|
+
merged_sections = _merge_sections(existing_sections, new_sections)
|
|
143
|
+
return _render_report_markdown(preamble, merged_sections)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def get_storage_dir(base_dir: Optional[Path] = None) -> Path:
|
|
147
|
+
"""获取存储目录
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
base_dir: 基础目录,默认为 ~/.weekly-reports
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
存储目录路径
|
|
154
|
+
"""
|
|
155
|
+
if base_dir is not None:
|
|
156
|
+
return base_dir
|
|
157
|
+
|
|
158
|
+
return Path.home() / ".weekly-reports"
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def get_report_path(
|
|
162
|
+
year: int,
|
|
163
|
+
week: int,
|
|
164
|
+
base_dir: Optional[Path] = None,
|
|
165
|
+
) -> Path:
|
|
166
|
+
"""获取周报文件路径
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
year: 年份
|
|
170
|
+
week: 周数
|
|
171
|
+
base_dir: 存储基础目录
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
周报文件路径
|
|
175
|
+
"""
|
|
176
|
+
storage_dir = get_storage_dir(base_dir)
|
|
177
|
+
return storage_dir / str(year) / f"week-{week:02d}.md"
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def save_report(
|
|
181
|
+
content: str,
|
|
182
|
+
year: int,
|
|
183
|
+
week: int,
|
|
184
|
+
base_dir: Optional[Path] = None,
|
|
185
|
+
) -> Path:
|
|
186
|
+
"""保存周报
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
content: 周报内容
|
|
190
|
+
year: 年份
|
|
191
|
+
week: 周数
|
|
192
|
+
base_dir: 存储基础目录
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
保存的文件路径
|
|
196
|
+
"""
|
|
197
|
+
path = get_report_path(year, week, base_dir)
|
|
198
|
+
|
|
199
|
+
# 确保目录存在
|
|
200
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
201
|
+
|
|
202
|
+
# 同一周多次生成时进行内容合并
|
|
203
|
+
if path.exists():
|
|
204
|
+
existing = path.read_text(encoding="utf-8")
|
|
205
|
+
merged = merge_report_content(existing, content)
|
|
206
|
+
path.write_text(merged, encoding="utf-8")
|
|
207
|
+
else:
|
|
208
|
+
path.write_text(content if content.endswith("\n") else content + "\n", encoding="utf-8")
|
|
209
|
+
|
|
210
|
+
return path
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def list_reports(base_dir: Optional[Path] = None) -> List[Dict[str, Any]]:
|
|
214
|
+
"""列出所有周报
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
base_dir: 存储基础目录
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
周报列表,每项包含 year, week, path
|
|
221
|
+
"""
|
|
222
|
+
storage_dir = get_storage_dir(base_dir)
|
|
223
|
+
|
|
224
|
+
if not storage_dir.exists():
|
|
225
|
+
return []
|
|
226
|
+
|
|
227
|
+
reports = []
|
|
228
|
+
|
|
229
|
+
# 遍历年份目录
|
|
230
|
+
for year_dir in sorted(storage_dir.iterdir(), reverse=True):
|
|
231
|
+
if not year_dir.is_dir() or not year_dir.name.isdigit():
|
|
232
|
+
continue
|
|
233
|
+
|
|
234
|
+
year = int(year_dir.name)
|
|
235
|
+
|
|
236
|
+
# 遍历周报文件
|
|
237
|
+
for report_file in sorted(year_dir.glob("week-*.md"), reverse=True):
|
|
238
|
+
# 从文件名提取周数
|
|
239
|
+
week_str = report_file.stem.replace("week-", "")
|
|
240
|
+
try:
|
|
241
|
+
week = int(week_str)
|
|
242
|
+
except ValueError:
|
|
243
|
+
continue
|
|
244
|
+
|
|
245
|
+
reports.append({
|
|
246
|
+
"year": year,
|
|
247
|
+
"week": week,
|
|
248
|
+
"path": report_file,
|
|
249
|
+
"filename": report_file.name,
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
return reports
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def get_report_by_week(
|
|
256
|
+
year: int,
|
|
257
|
+
week: int,
|
|
258
|
+
base_dir: Optional[Path] = None,
|
|
259
|
+
) -> Optional[Dict[str, Any]]:
|
|
260
|
+
"""按周获取周报
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
year: 年份
|
|
264
|
+
week: 周数
|
|
265
|
+
base_dir: 存储基础目录
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
周报信息字典,不存在时返回 None
|
|
269
|
+
"""
|
|
270
|
+
path = get_report_path(year, week, base_dir)
|
|
271
|
+
|
|
272
|
+
if not path.exists():
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
return {
|
|
276
|
+
"year": year,
|
|
277
|
+
"week": week,
|
|
278
|
+
"path": path,
|
|
279
|
+
"content": path.read_text(encoding="utf-8"),
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def update_index(base_dir: Optional[Path] = None) -> None:
|
|
284
|
+
"""更新周报索引文件
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
base_dir: 存储基础目录
|
|
288
|
+
"""
|
|
289
|
+
storage_dir = get_storage_dir(base_dir)
|
|
290
|
+
storage_dir.mkdir(parents=True, exist_ok=True)
|
|
291
|
+
|
|
292
|
+
reports = list_reports(base_dir)
|
|
293
|
+
|
|
294
|
+
# 按年份分组
|
|
295
|
+
by_year: Dict[int, List[Dict[str, Any]]] = {}
|
|
296
|
+
for report in reports:
|
|
297
|
+
year = report["year"]
|
|
298
|
+
if year not in by_year:
|
|
299
|
+
by_year[year] = []
|
|
300
|
+
by_year[year].append(report)
|
|
301
|
+
|
|
302
|
+
# 生成索引内容
|
|
303
|
+
lines = ["# 周报索引\n"]
|
|
304
|
+
|
|
305
|
+
for year in sorted(by_year.keys(), reverse=True):
|
|
306
|
+
lines.append(f"\n## {year} 年\n")
|
|
307
|
+
for report in by_year[year]:
|
|
308
|
+
week = report["week"]
|
|
309
|
+
filename = report["filename"]
|
|
310
|
+
lines.append(f"- [第 {week} 周](./{year}/{filename})")
|
|
311
|
+
|
|
312
|
+
# 如果没有周报
|
|
313
|
+
if not reports:
|
|
314
|
+
lines.append("\n暂无周报记录。\n")
|
|
315
|
+
|
|
316
|
+
# 写入索引文件
|
|
317
|
+
index_path = storage_dir / "index.md"
|
|
318
|
+
index_path.write_text("\n".join(lines), encoding="utf-8")
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def delete_report(
|
|
322
|
+
year: int,
|
|
323
|
+
week: int,
|
|
324
|
+
base_dir: Optional[Path] = None,
|
|
325
|
+
) -> bool:
|
|
326
|
+
"""删除周报
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
year: 年份
|
|
330
|
+
week: 周数
|
|
331
|
+
base_dir: 存储基础目录
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
是否成功删除
|
|
335
|
+
"""
|
|
336
|
+
path = get_report_path(year, week, base_dir)
|
|
337
|
+
|
|
338
|
+
if not path.exists():
|
|
339
|
+
return False
|
|
340
|
+
|
|
341
|
+
path.unlink()
|
|
342
|
+
return True
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
# ==================== 时间段报告相关函数 ====================
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def get_period_report_path(
|
|
349
|
+
start_date: date,
|
|
350
|
+
end_date: date,
|
|
351
|
+
base_dir: Optional[Path] = None,
|
|
352
|
+
) -> Path:
|
|
353
|
+
"""获取时间段报告文件路径
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
start_date: 开始日期
|
|
357
|
+
end_date: 结束日期
|
|
358
|
+
base_dir: 存储基础目录
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
时间段报告文件路径,格式为 periods/YYYY-MM-DD_to_YYYY-MM-DD.md
|
|
362
|
+
"""
|
|
363
|
+
storage_dir = get_storage_dir(base_dir)
|
|
364
|
+
filename = f"{start_date.isoformat()}_to_{end_date.isoformat()}.md"
|
|
365
|
+
return storage_dir / "periods" / filename
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
def save_period_report(
|
|
369
|
+
content: str,
|
|
370
|
+
start_date: date,
|
|
371
|
+
end_date: date,
|
|
372
|
+
base_dir: Optional[Path] = None,
|
|
373
|
+
) -> Path:
|
|
374
|
+
"""保存时间段报告
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
content: 报告内容
|
|
378
|
+
start_date: 开始日期
|
|
379
|
+
end_date: 结束日期
|
|
380
|
+
base_dir: 存储基础目录
|
|
381
|
+
|
|
382
|
+
Returns:
|
|
383
|
+
保存的文件路径
|
|
384
|
+
"""
|
|
385
|
+
path = get_period_report_path(start_date, end_date, base_dir)
|
|
386
|
+
|
|
387
|
+
# 确保目录存在
|
|
388
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
389
|
+
|
|
390
|
+
# 同一时间段多次生成时进行内容合并
|
|
391
|
+
if path.exists():
|
|
392
|
+
existing = path.read_text(encoding="utf-8")
|
|
393
|
+
merged = merge_report_content(existing, content)
|
|
394
|
+
path.write_text(merged, encoding="utf-8")
|
|
395
|
+
else:
|
|
396
|
+
path.write_text(content if content.endswith("\n") else content + "\n", encoding="utf-8")
|
|
397
|
+
|
|
398
|
+
return path
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def list_period_reports(base_dir: Optional[Path] = None) -> List[Dict[str, Any]]:
|
|
402
|
+
"""列出所有时间段报告
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
base_dir: 存储基础目录
|
|
406
|
+
|
|
407
|
+
Returns:
|
|
408
|
+
时间段报告列表,每项包含 start_date, end_date, path, filename
|
|
409
|
+
"""
|
|
410
|
+
storage_dir = get_storage_dir(base_dir)
|
|
411
|
+
periods_dir = storage_dir / "periods"
|
|
412
|
+
|
|
413
|
+
if not periods_dir.exists():
|
|
414
|
+
return []
|
|
415
|
+
|
|
416
|
+
reports = []
|
|
417
|
+
|
|
418
|
+
# 遍历时间段报告文件
|
|
419
|
+
for report_file in sorted(periods_dir.glob("*_to_*.md"), reverse=True):
|
|
420
|
+
# 从文件名提取日期范围
|
|
421
|
+
stem = report_file.stem # e.g., "2025-07-13_to_2026-01-13"
|
|
422
|
+
parts = stem.split("_to_")
|
|
423
|
+
if len(parts) != 2:
|
|
424
|
+
continue
|
|
425
|
+
|
|
426
|
+
try:
|
|
427
|
+
start_date = date.fromisoformat(parts[0])
|
|
428
|
+
end_date = date.fromisoformat(parts[1])
|
|
429
|
+
except ValueError:
|
|
430
|
+
continue
|
|
431
|
+
|
|
432
|
+
reports.append({
|
|
433
|
+
"start_date": start_date,
|
|
434
|
+
"end_date": end_date,
|
|
435
|
+
"path": report_file,
|
|
436
|
+
"filename": report_file.name,
|
|
437
|
+
})
|
|
438
|
+
|
|
439
|
+
return reports
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
def get_period_report(
|
|
443
|
+
start_date: date,
|
|
444
|
+
end_date: date,
|
|
445
|
+
base_dir: Optional[Path] = None,
|
|
446
|
+
) -> Optional[Dict[str, Any]]:
|
|
447
|
+
"""按日期范围获取时间段报告
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
start_date: 开始日期
|
|
451
|
+
end_date: 结束日期
|
|
452
|
+
base_dir: 存储基础目录
|
|
453
|
+
|
|
454
|
+
Returns:
|
|
455
|
+
报告信息字典,不存在时返回 None
|
|
456
|
+
"""
|
|
457
|
+
path = get_period_report_path(start_date, end_date, base_dir)
|
|
458
|
+
|
|
459
|
+
if not path.exists():
|
|
460
|
+
return None
|
|
461
|
+
|
|
462
|
+
return {
|
|
463
|
+
"start_date": start_date,
|
|
464
|
+
"end_date": end_date,
|
|
465
|
+
"path": path,
|
|
466
|
+
"content": path.read_text(encoding="utf-8"),
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
def delete_period_report(
|
|
471
|
+
start_date: date,
|
|
472
|
+
end_date: date,
|
|
473
|
+
base_dir: Optional[Path] = None,
|
|
474
|
+
) -> bool:
|
|
475
|
+
"""删除时间段报告
|
|
476
|
+
|
|
477
|
+
Args:
|
|
478
|
+
start_date: 开始日期
|
|
479
|
+
end_date: 结束日期
|
|
480
|
+
base_dir: 存储基础目录
|
|
481
|
+
|
|
482
|
+
Returns:
|
|
483
|
+
是否成功删除
|
|
484
|
+
"""
|
|
485
|
+
path = get_period_report_path(start_date, end_date, base_dir)
|
|
486
|
+
|
|
487
|
+
if not path.exists():
|
|
488
|
+
return False
|
|
489
|
+
|
|
490
|
+
path.unlink()
|
|
491
|
+
return True
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (let key of __getOwnPropNames(from))
|
|
11
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
12
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
13
|
+
}
|
|
14
|
+
return to;
|
|
15
|
+
};
|
|
16
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
17
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
18
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
19
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
20
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
21
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
22
|
+
mod
|
|
23
|
+
));
|
|
24
|
+
|
|
25
|
+
// shared/src/uninstall-skill.ts
|
|
26
|
+
var import_fs2 = __toESM(require("fs"));
|
|
27
|
+
var import_path2 = __toESM(require("path"));
|
|
28
|
+
|
|
29
|
+
// shared/src/utils.ts
|
|
30
|
+
var import_fs = __toESM(require("fs"));
|
|
31
|
+
var import_path = __toESM(require("path"));
|
|
32
|
+
var import_os = __toESM(require("os"));
|
|
33
|
+
var CWD = process.env.INIT_CWD || process.cwd();
|
|
34
|
+
var DEFAULT_TARGET = {
|
|
35
|
+
name: "claude-code",
|
|
36
|
+
paths: {
|
|
37
|
+
global: ".claude/skills",
|
|
38
|
+
project: ".claude/skills"
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
function getEnabledTargets(config) {
|
|
42
|
+
if (!config.targets) {
|
|
43
|
+
return [DEFAULT_TARGET];
|
|
44
|
+
}
|
|
45
|
+
return Object.entries(config.targets).filter(([_, target]) => target.enabled).map(([name, target]) => ({
|
|
46
|
+
name,
|
|
47
|
+
paths: target.paths
|
|
48
|
+
}));
|
|
49
|
+
}
|
|
50
|
+
function extractSkillName(packageName) {
|
|
51
|
+
if (packageName.startsWith("@")) {
|
|
52
|
+
return packageName.split("/")[1] || packageName;
|
|
53
|
+
}
|
|
54
|
+
return packageName;
|
|
55
|
+
}
|
|
56
|
+
function detectInstallLocation(targetPaths, isGlobal) {
|
|
57
|
+
if (isGlobal) {
|
|
58
|
+
return {
|
|
59
|
+
type: "personal",
|
|
60
|
+
base: import_path.default.join(import_os.default.homedir(), targetPaths.global)
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
let projectRoot = CWD;
|
|
64
|
+
while (projectRoot !== import_path.default.dirname(projectRoot)) {
|
|
65
|
+
const hasPackageJson = import_fs.default.existsSync(import_path.default.join(projectRoot, "package.json"));
|
|
66
|
+
const hasGit = import_fs.default.existsSync(import_path.default.join(projectRoot, ".git"));
|
|
67
|
+
const isInNodeModules = projectRoot.includes("/node_modules/") || import_path.default.basename(projectRoot) === "node_modules";
|
|
68
|
+
if ((hasPackageJson || hasGit) && !isInNodeModules) {
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
projectRoot = import_path.default.dirname(projectRoot);
|
|
72
|
+
}
|
|
73
|
+
const finalIsInNodeModules = projectRoot.includes("/node_modules/") || import_path.default.basename(projectRoot) === "node_modules";
|
|
74
|
+
if (finalIsInNodeModules) {
|
|
75
|
+
console.warn("\u26A0 Warning: Could not find project root directory, using current directory");
|
|
76
|
+
projectRoot = CWD;
|
|
77
|
+
}
|
|
78
|
+
return {
|
|
79
|
+
type: "project",
|
|
80
|
+
base: import_path.default.join(projectRoot, targetPaths.project)
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
function isGlobalInstall() {
|
|
84
|
+
return process.env.npm_config_global === "true";
|
|
85
|
+
}
|
|
86
|
+
function removeDir(dir) {
|
|
87
|
+
if (import_fs.default.existsSync(dir)) {
|
|
88
|
+
import_fs.default.rmSync(dir, { recursive: true, force: true });
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
function readSkillConfig(dir) {
|
|
92
|
+
const configPath = import_path.default.join(dir, ".claude-skill.json");
|
|
93
|
+
if (!import_fs.default.existsSync(configPath)) {
|
|
94
|
+
throw new Error(".claude-skill.json not found");
|
|
95
|
+
}
|
|
96
|
+
return JSON.parse(import_fs.default.readFileSync(configPath, "utf-8"));
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// shared/src/uninstall-skill.ts
|
|
100
|
+
function updateManifest(skillsDir, config) {
|
|
101
|
+
const manifestPath = import_path2.default.join(skillsDir, ".skills-manifest.json");
|
|
102
|
+
if (!import_fs2.default.existsSync(manifestPath)) {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
try {
|
|
106
|
+
const manifest = JSON.parse(import_fs2.default.readFileSync(manifestPath, "utf-8"));
|
|
107
|
+
if (manifest.skills && manifest.skills[config.name]) {
|
|
108
|
+
delete manifest.skills[config.name];
|
|
109
|
+
import_fs2.default.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));
|
|
110
|
+
console.log(" \u2713 Updated manifest");
|
|
111
|
+
}
|
|
112
|
+
} catch (error) {
|
|
113
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
114
|
+
console.warn(" Warning: Could not update manifest:", message);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
function uninstallFromTarget(target, config) {
|
|
118
|
+
console.log(`
|
|
119
|
+
\u{1F5D1}\uFE0F Uninstalling from ${target.name}...`);
|
|
120
|
+
const isGlobal = isGlobalInstall();
|
|
121
|
+
const location = detectInstallLocation(target.paths, isGlobal);
|
|
122
|
+
const skillName = extractSkillName(config.name);
|
|
123
|
+
const skillNameTargetDir = import_path2.default.join(location.base, skillName);
|
|
124
|
+
const fullPackageNameTargetDir = import_path2.default.join(location.base, config.name);
|
|
125
|
+
let removed = false;
|
|
126
|
+
if (import_fs2.default.existsSync(skillNameTargetDir)) {
|
|
127
|
+
removeDir(skillNameTargetDir);
|
|
128
|
+
console.log(` \u2713 Removed skill directory: ${skillName}`);
|
|
129
|
+
removed = true;
|
|
130
|
+
}
|
|
131
|
+
if (import_fs2.default.existsSync(fullPackageNameTargetDir) && fullPackageNameTargetDir !== skillNameTargetDir) {
|
|
132
|
+
removeDir(fullPackageNameTargetDir);
|
|
133
|
+
console.log(` \u2713 Removed skill directory: ${config.name}`);
|
|
134
|
+
removed = true;
|
|
135
|
+
}
|
|
136
|
+
updateManifest(location.base, config);
|
|
137
|
+
if (removed) {
|
|
138
|
+
console.log(` \u2705 Uninstalled from ${target.name}`);
|
|
139
|
+
return true;
|
|
140
|
+
} else {
|
|
141
|
+
console.log(` \u2139\uFE0F Skill was not installed in ${target.name}`);
|
|
142
|
+
return false;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
function uninstallSkill() {
|
|
146
|
+
console.log("\u{1F5D1}\uFE0F Uninstalling AI Coding Skill...\n");
|
|
147
|
+
const packageDir = __dirname;
|
|
148
|
+
let config;
|
|
149
|
+
try {
|
|
150
|
+
config = readSkillConfig(packageDir);
|
|
151
|
+
} catch {
|
|
152
|
+
console.warn("Warning: .claude-skill.json not found, skipping cleanup");
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
const enabledTargets = getEnabledTargets(config);
|
|
156
|
+
console.log(`Uninstalling skill "${config.name}" from ${enabledTargets.length} target(s):`);
|
|
157
|
+
enabledTargets.forEach((target) => {
|
|
158
|
+
console.log(` \u2022 ${target.name}`);
|
|
159
|
+
});
|
|
160
|
+
const uninstalledFrom = [];
|
|
161
|
+
for (const target of enabledTargets) {
|
|
162
|
+
try {
|
|
163
|
+
const success = uninstallFromTarget(target, config);
|
|
164
|
+
if (success) {
|
|
165
|
+
uninstalledFrom.push(target.name);
|
|
166
|
+
}
|
|
167
|
+
} catch (error) {
|
|
168
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
169
|
+
console.error(`
|
|
170
|
+
\u274C Failed to uninstall from ${target.name}:`, message);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
console.log("\n" + "=".repeat(60));
|
|
174
|
+
if (uninstalledFrom.length > 0) {
|
|
175
|
+
console.log("\u2705 Uninstallation Complete!");
|
|
176
|
+
console.log("=".repeat(60));
|
|
177
|
+
console.log("\nUninstalled from:");
|
|
178
|
+
uninstalledFrom.forEach((target) => {
|
|
179
|
+
console.log(` \u2022 ${target}`);
|
|
180
|
+
});
|
|
181
|
+
} else {
|
|
182
|
+
console.log("\u2139\uFE0F Skill was not installed");
|
|
183
|
+
console.log("=".repeat(60));
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
try {
|
|
187
|
+
uninstallSkill();
|
|
188
|
+
} catch (error) {
|
|
189
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
190
|
+
console.error("\n\u26A0\uFE0F Warning during uninstall:", message);
|
|
191
|
+
}
|