@giwonn/claude-daily-review 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.ko.md +143 -0
- package/README.md +143 -0
- package/dist/on-stop.js +274 -0
- package/dist/on-stop.js.map +1 -0
- package/dist/storage-cli.js +267 -0
- package/dist/storage-cli.js.map +1 -0
- package/hooks/hooks.json +38 -0
- package/package.json +31 -0
- package/prompts/session-end.md +233 -0
- package/prompts/session-start.md +358 -0
- package/skills/daily-review-setup.md +178 -0
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
3
|
+
var __esm = (fn, res) => function __init() {
|
|
4
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
5
|
+
};
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
// src/core/github-storage.ts
|
|
12
|
+
var github_storage_exports = {};
|
|
13
|
+
__export(github_storage_exports, {
|
|
14
|
+
GitHubStorageAdapter: () => GitHubStorageAdapter
|
|
15
|
+
});
|
|
16
|
+
var GitHubStorageAdapter;
|
|
17
|
+
var init_github_storage = __esm({
|
|
18
|
+
"src/core/github-storage.ts"() {
|
|
19
|
+
"use strict";
|
|
20
|
+
GitHubStorageAdapter = class {
|
|
21
|
+
constructor(owner, repo, token, basePath) {
|
|
22
|
+
this.owner = owner;
|
|
23
|
+
this.repo = repo;
|
|
24
|
+
this.token = token;
|
|
25
|
+
this.basePath = basePath;
|
|
26
|
+
this.baseUrl = `https://api.github.com/repos/${owner}/${repo}/contents`;
|
|
27
|
+
this.headers = {
|
|
28
|
+
Authorization: `Bearer ${token}`,
|
|
29
|
+
Accept: "application/vnd.github.v3+json",
|
|
30
|
+
"Content-Type": "application/json"
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
baseUrl;
|
|
34
|
+
headers;
|
|
35
|
+
getUrl(path) {
|
|
36
|
+
return `${this.baseUrl}/${this.basePath}/${path}`;
|
|
37
|
+
}
|
|
38
|
+
async getSha(path) {
|
|
39
|
+
const res = await fetch(this.getUrl(path), { method: "GET", headers: this.headers });
|
|
40
|
+
if (res.status === 404) return null;
|
|
41
|
+
const data = await res.json();
|
|
42
|
+
return data.sha || null;
|
|
43
|
+
}
|
|
44
|
+
async read(path) {
|
|
45
|
+
const res = await fetch(this.getUrl(path), { method: "GET", headers: this.headers });
|
|
46
|
+
if (res.status === 404) return null;
|
|
47
|
+
const data = await res.json();
|
|
48
|
+
const content = data.content;
|
|
49
|
+
return Buffer.from(content, "base64").toString("utf-8");
|
|
50
|
+
}
|
|
51
|
+
async write(path, content) {
|
|
52
|
+
const sha = await this.getSha(path);
|
|
53
|
+
const body = {
|
|
54
|
+
message: `update ${path}`,
|
|
55
|
+
content: Buffer.from(content).toString("base64")
|
|
56
|
+
};
|
|
57
|
+
if (sha) body.sha = sha;
|
|
58
|
+
const res = await fetch(this.getUrl(path), {
|
|
59
|
+
method: "PUT",
|
|
60
|
+
headers: this.headers,
|
|
61
|
+
body: JSON.stringify(body)
|
|
62
|
+
});
|
|
63
|
+
if (!res.ok && res.status === 409) {
|
|
64
|
+
const freshSha = await this.getSha(path);
|
|
65
|
+
if (freshSha) body.sha = freshSha;
|
|
66
|
+
await fetch(this.getUrl(path), {
|
|
67
|
+
method: "PUT",
|
|
68
|
+
headers: this.headers,
|
|
69
|
+
body: JSON.stringify(body)
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
async append(path, content) {
|
|
74
|
+
const existing = await this.read(path);
|
|
75
|
+
const newContent = existing ? existing + content : content;
|
|
76
|
+
await this.write(path, newContent);
|
|
77
|
+
}
|
|
78
|
+
async exists(path) {
|
|
79
|
+
const res = await fetch(this.getUrl(path), { method: "GET", headers: this.headers });
|
|
80
|
+
return res.status !== 404;
|
|
81
|
+
}
|
|
82
|
+
async list(dir) {
|
|
83
|
+
const res = await fetch(this.getUrl(dir), { method: "GET", headers: this.headers });
|
|
84
|
+
if (res.status === 404) return [];
|
|
85
|
+
const data = await res.json();
|
|
86
|
+
if (!Array.isArray(data)) return [];
|
|
87
|
+
return data.map((entry) => entry.name);
|
|
88
|
+
}
|
|
89
|
+
async mkdir(_dir) {
|
|
90
|
+
}
|
|
91
|
+
async isDirectory(path) {
|
|
92
|
+
const res = await fetch(this.getUrl(path), { method: "GET", headers: this.headers });
|
|
93
|
+
if (res.status === 404) return false;
|
|
94
|
+
const data = await res.json();
|
|
95
|
+
return Array.isArray(data);
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
// src/core/config.ts
|
|
102
|
+
import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, mkdirSync as mkdirSync2 } from "fs";
|
|
103
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
104
|
+
|
|
105
|
+
// src/core/local-storage.ts
|
|
106
|
+
import {
|
|
107
|
+
readFileSync,
|
|
108
|
+
writeFileSync,
|
|
109
|
+
appendFileSync,
|
|
110
|
+
existsSync,
|
|
111
|
+
mkdirSync,
|
|
112
|
+
readdirSync,
|
|
113
|
+
statSync
|
|
114
|
+
} from "fs";
|
|
115
|
+
import { dirname, join } from "path";
|
|
116
|
+
var LocalStorageAdapter = class {
|
|
117
|
+
constructor(basePath) {
|
|
118
|
+
this.basePath = basePath;
|
|
119
|
+
}
|
|
120
|
+
resolve(path) {
|
|
121
|
+
return join(this.basePath, path);
|
|
122
|
+
}
|
|
123
|
+
async read(path) {
|
|
124
|
+
const full = this.resolve(path);
|
|
125
|
+
if (!existsSync(full)) return null;
|
|
126
|
+
return readFileSync(full, "utf-8");
|
|
127
|
+
}
|
|
128
|
+
async write(path, content) {
|
|
129
|
+
const full = this.resolve(path);
|
|
130
|
+
mkdirSync(dirname(full), { recursive: true });
|
|
131
|
+
writeFileSync(full, content, "utf-8");
|
|
132
|
+
}
|
|
133
|
+
async append(path, content) {
|
|
134
|
+
const full = this.resolve(path);
|
|
135
|
+
mkdirSync(dirname(full), { recursive: true });
|
|
136
|
+
appendFileSync(full, content, "utf-8");
|
|
137
|
+
}
|
|
138
|
+
async exists(path) {
|
|
139
|
+
return existsSync(this.resolve(path));
|
|
140
|
+
}
|
|
141
|
+
async list(dir) {
|
|
142
|
+
const full = this.resolve(dir);
|
|
143
|
+
if (!existsSync(full)) return [];
|
|
144
|
+
return readdirSync(full);
|
|
145
|
+
}
|
|
146
|
+
async mkdir(dir) {
|
|
147
|
+
mkdirSync(this.resolve(dir), { recursive: true });
|
|
148
|
+
}
|
|
149
|
+
async isDirectory(path) {
|
|
150
|
+
try {
|
|
151
|
+
return statSync(this.resolve(path)).isDirectory();
|
|
152
|
+
} catch {
|
|
153
|
+
return false;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
// src/core/config.ts
|
|
159
|
+
function getConfigPath() {
|
|
160
|
+
const dataDir = process.env.CLAUDE_PLUGIN_DATA;
|
|
161
|
+
if (!dataDir) {
|
|
162
|
+
throw new Error("CLAUDE_PLUGIN_DATA environment variable is not set");
|
|
163
|
+
}
|
|
164
|
+
return join2(dataDir, "config.json");
|
|
165
|
+
}
|
|
166
|
+
function isOldConfig(raw) {
|
|
167
|
+
if (!raw || typeof raw !== "object") return false;
|
|
168
|
+
return "vaultPath" in raw && "reviewFolder" in raw;
|
|
169
|
+
}
|
|
170
|
+
function migrateOldConfig(old) {
|
|
171
|
+
return {
|
|
172
|
+
storage: {
|
|
173
|
+
type: "local",
|
|
174
|
+
local: {
|
|
175
|
+
basePath: join2(old.vaultPath, old.reviewFolder)
|
|
176
|
+
}
|
|
177
|
+
},
|
|
178
|
+
language: old.language,
|
|
179
|
+
periods: old.periods,
|
|
180
|
+
profile: old.profile
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
function loadConfig() {
|
|
184
|
+
const configPath = getConfigPath();
|
|
185
|
+
if (!existsSync2(configPath)) return null;
|
|
186
|
+
const raw = JSON.parse(readFileSync2(configPath, "utf-8"));
|
|
187
|
+
if (isOldConfig(raw)) {
|
|
188
|
+
const migrated = migrateOldConfig(raw);
|
|
189
|
+
saveConfig(migrated);
|
|
190
|
+
return migrated;
|
|
191
|
+
}
|
|
192
|
+
return raw;
|
|
193
|
+
}
|
|
194
|
+
function saveConfig(config) {
|
|
195
|
+
const configPath = getConfigPath();
|
|
196
|
+
mkdirSync2(dirname2(configPath), { recursive: true });
|
|
197
|
+
writeFileSync2(configPath, JSON.stringify(config, null, 2), "utf-8");
|
|
198
|
+
}
|
|
199
|
+
async function createStorageAdapter(config) {
|
|
200
|
+
if (config.storage.type === "local") {
|
|
201
|
+
return new LocalStorageAdapter(config.storage.local.basePath);
|
|
202
|
+
}
|
|
203
|
+
if (config.storage.type === "github") {
|
|
204
|
+
const { GitHubStorageAdapter: GitHubStorageAdapter2 } = await Promise.resolve().then(() => (init_github_storage(), github_storage_exports));
|
|
205
|
+
const g = config.storage.github;
|
|
206
|
+
return new GitHubStorageAdapter2(g.owner, g.repo, g.token, g.basePath);
|
|
207
|
+
}
|
|
208
|
+
throw new Error(`Unknown storage type: ${config.storage.type}`);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// src/cli/storage-cli.ts
|
|
212
|
+
async function main() {
|
|
213
|
+
const [command, ...args] = process.argv.slice(2);
|
|
214
|
+
const config = loadConfig();
|
|
215
|
+
if (!config) {
|
|
216
|
+
process.stderr.write("config not found\n");
|
|
217
|
+
process.exit(1);
|
|
218
|
+
}
|
|
219
|
+
const storage = await createStorageAdapter(config);
|
|
220
|
+
switch (command) {
|
|
221
|
+
case "read": {
|
|
222
|
+
const content = await storage.read(args[0]);
|
|
223
|
+
if (content !== null) process.stdout.write(content);
|
|
224
|
+
break;
|
|
225
|
+
}
|
|
226
|
+
case "write": {
|
|
227
|
+
let data = "";
|
|
228
|
+
process.stdin.setEncoding("utf-8");
|
|
229
|
+
for await (const chunk of process.stdin) {
|
|
230
|
+
data += chunk;
|
|
231
|
+
}
|
|
232
|
+
await storage.write(args[0], data);
|
|
233
|
+
break;
|
|
234
|
+
}
|
|
235
|
+
case "append": {
|
|
236
|
+
let data = "";
|
|
237
|
+
process.stdin.setEncoding("utf-8");
|
|
238
|
+
for await (const chunk of process.stdin) {
|
|
239
|
+
data += chunk;
|
|
240
|
+
}
|
|
241
|
+
await storage.append(args[0], data);
|
|
242
|
+
break;
|
|
243
|
+
}
|
|
244
|
+
case "list": {
|
|
245
|
+
const entries = await storage.list(args[0]);
|
|
246
|
+
process.stdout.write(entries.join("\n") + "\n");
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
case "exists": {
|
|
250
|
+
const exists = await storage.exists(args[0]);
|
|
251
|
+
process.stdout.write(exists ? "true\n" : "false\n");
|
|
252
|
+
process.exit(exists ? 0 : 1);
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
default:
|
|
256
|
+
process.stderr.write(`Unknown command: ${command}
|
|
257
|
+
Usage: storage-cli <read|write|append|list|exists> <path>
|
|
258
|
+
`);
|
|
259
|
+
process.exit(1);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
main().catch((err) => {
|
|
263
|
+
process.stderr.write(`Error: ${err.message}
|
|
264
|
+
`);
|
|
265
|
+
process.exit(1);
|
|
266
|
+
});
|
|
267
|
+
//# sourceMappingURL=storage-cli.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/github-storage.ts","../src/core/config.ts","../src/core/local-storage.ts","../src/cli/storage-cli.ts"],"sourcesContent":["import type { StorageAdapter } from \"./storage.js\";\n\nexport class GitHubStorageAdapter implements StorageAdapter {\n private baseUrl: string;\n private headers: Record<string, string>;\n\n constructor(\n private owner: string,\n private repo: string,\n private token: string,\n private basePath: string,\n ) {\n this.baseUrl = `https://api.github.com/repos/${owner}/${repo}/contents`;\n this.headers = {\n Authorization: `Bearer ${token}`,\n Accept: \"application/vnd.github.v3+json\",\n \"Content-Type\": \"application/json\",\n };\n }\n\n private getUrl(path: string): string {\n return `${this.baseUrl}/${this.basePath}/${path}`;\n }\n\n private async getSha(path: string): Promise<string | null> {\n const res = await fetch(this.getUrl(path), { method: \"GET\", headers: this.headers });\n if (res.status === 404) return null;\n const data = await res.json() as Record<string, unknown>;\n return (data.sha as string) || null;\n }\n\n async read(path: string): Promise<string | null> {\n const res = await fetch(this.getUrl(path), { method: \"GET\", headers: this.headers });\n if (res.status === 404) return null;\n const data = await res.json() as Record<string, unknown>;\n const content = data.content as string;\n return Buffer.from(content, \"base64\").toString(\"utf-8\");\n }\n\n async write(path: string, content: string): Promise<void> {\n const sha = await this.getSha(path);\n const body: Record<string, unknown> = {\n message: `update ${path}`,\n content: Buffer.from(content).toString(\"base64\"),\n };\n if (sha) body.sha = sha;\n\n const res = await fetch(this.getUrl(path), {\n method: \"PUT\",\n headers: this.headers,\n body: JSON.stringify(body),\n });\n\n if (!res.ok && res.status === 409) {\n const freshSha = await this.getSha(path);\n if (freshSha) body.sha = freshSha;\n await fetch(this.getUrl(path), {\n method: \"PUT\",\n headers: this.headers,\n body: JSON.stringify(body),\n });\n }\n }\n\n async append(path: string, content: string): Promise<void> {\n const existing = await this.read(path);\n const newContent = existing ? existing + content : content;\n await this.write(path, newContent);\n }\n\n async exists(path: string): Promise<boolean> {\n const res = await fetch(this.getUrl(path), { method: \"GET\", headers: this.headers });\n return res.status !== 404;\n }\n\n async list(dir: string): Promise<string[]> {\n const res = await fetch(this.getUrl(dir), { method: \"GET\", headers: this.headers });\n if (res.status === 404) return [];\n const data = await res.json() as Array<{ name: string }>;\n if (!Array.isArray(data)) return [];\n return data.map((entry) => entry.name);\n }\n\n async mkdir(_dir: string): Promise<void> {\n // GitHub creates directories implicitly when files are created\n }\n\n async isDirectory(path: string): Promise<boolean> {\n const res = await fetch(this.getUrl(path), { method: \"GET\", headers: this.headers });\n if (res.status === 404) return false;\n const data = await res.json();\n return Array.isArray(data);\n }\n}\n","import { readFileSync, writeFileSync, existsSync, mkdirSync } from \"fs\";\nimport { dirname, join } from \"path\";\nimport type { StorageAdapter } from \"./storage.js\";\nimport { LocalStorageAdapter } from \"./local-storage.js\";\n\nexport interface Profile {\n company: string;\n role: string;\n team: string;\n context: string;\n}\n\nexport interface Periods {\n daily: true;\n weekly: boolean;\n monthly: boolean;\n quarterly: boolean;\n yearly: boolean;\n}\n\nexport interface LocalStorageConfig {\n basePath: string;\n}\n\nexport interface GitHubStorageConfig {\n owner: string;\n repo: string;\n token: string;\n basePath: string;\n}\n\nexport interface StorageConfig {\n type: \"local\" | \"github\";\n local?: LocalStorageConfig;\n github?: GitHubStorageConfig;\n}\n\nexport interface Config {\n storage: StorageConfig;\n language: string;\n periods: Periods;\n profile: Profile;\n}\n\ninterface OldConfig {\n vaultPath: string;\n reviewFolder: string;\n language: string;\n periods: Periods;\n profile: Profile;\n}\n\nconst DEFAULT_PERIODS: Periods = {\n daily: true,\n weekly: true,\n monthly: true,\n quarterly: true,\n yearly: false,\n};\n\nconst DEFAULT_PROFILE: Profile = {\n company: \"\",\n role: \"\",\n team: \"\",\n context: \"\",\n};\n\nexport function getConfigPath(): string {\n const dataDir = process.env.CLAUDE_PLUGIN_DATA;\n if (!dataDir) {\n throw new Error(\"CLAUDE_PLUGIN_DATA environment variable is not set\");\n }\n return join(dataDir, \"config.json\");\n}\n\nfunction isOldConfig(raw: unknown): raw is OldConfig {\n if (!raw || typeof raw !== \"object\") return false;\n return \"vaultPath\" in raw && \"reviewFolder\" in raw;\n}\n\nfunction migrateOldConfig(old: OldConfig): Config {\n return {\n storage: {\n type: \"local\",\n local: {\n basePath: join(old.vaultPath, old.reviewFolder),\n },\n },\n language: old.language,\n periods: old.periods,\n profile: old.profile,\n };\n}\n\nexport function loadConfig(): Config | null {\n const configPath = getConfigPath();\n if (!existsSync(configPath)) return null;\n const raw = JSON.parse(readFileSync(configPath, \"utf-8\"));\n if (isOldConfig(raw)) {\n const migrated = migrateOldConfig(raw);\n saveConfig(migrated);\n return migrated;\n }\n return raw as Config;\n}\n\nexport function saveConfig(config: Config): void {\n const configPath = getConfigPath();\n mkdirSync(dirname(configPath), { recursive: true });\n writeFileSync(configPath, JSON.stringify(config, null, 2), \"utf-8\");\n}\n\nexport function validateConfig(config: unknown): config is Config {\n if (!config || typeof config !== \"object\") return false;\n const c = config as Record<string, unknown>;\n if (!c.storage || typeof c.storage !== \"object\") return false;\n const s = c.storage as Record<string, unknown>;\n if (s.type !== \"local\" && s.type !== \"github\") return false;\n if (s.type === \"local\") {\n if (!s.local || typeof s.local !== \"object\") return false;\n const l = s.local as Record<string, unknown>;\n if (typeof l.basePath !== \"string\" || l.basePath === \"\") return false;\n }\n if (s.type === \"github\") {\n if (!s.github || typeof s.github !== \"object\") return false;\n const g = s.github as Record<string, unknown>;\n if (typeof g.owner !== \"string\" || !g.owner) return false;\n if (typeof g.repo !== \"string\" || !g.repo) return false;\n if (typeof g.token !== \"string\" || !g.token) return false;\n }\n return true;\n}\n\nexport function createDefaultLocalConfig(basePath: string): Config {\n return {\n storage: { type: \"local\", local: { basePath } },\n language: \"ko\",\n periods: { ...DEFAULT_PERIODS },\n profile: { ...DEFAULT_PROFILE },\n };\n}\n\nexport function createDefaultGitHubConfig(owner: string, repo: string, token: string): Config {\n return {\n storage: { type: \"github\", github: { owner, repo, token, basePath: \"daily-review\" } },\n language: \"ko\",\n periods: { ...DEFAULT_PERIODS },\n profile: { ...DEFAULT_PROFILE },\n };\n}\n\nexport async function createStorageAdapter(config: Config): Promise<StorageAdapter> {\n if (config.storage.type === \"local\") {\n return new LocalStorageAdapter(config.storage.local!.basePath);\n }\n if (config.storage.type === \"github\") {\n const { GitHubStorageAdapter } = await import(\"./github-storage.js\");\n const g = config.storage.github!;\n return new GitHubStorageAdapter(g.owner, g.repo, g.token, g.basePath);\n }\n throw new Error(`Unknown storage type: ${(config.storage as any).type}`);\n}\n","import {\n readFileSync,\n writeFileSync,\n appendFileSync,\n existsSync,\n mkdirSync,\n readdirSync,\n statSync,\n} from \"fs\";\nimport { dirname, join } from \"path\";\nimport type { StorageAdapter } from \"./storage.js\";\n\nexport class LocalStorageAdapter implements StorageAdapter {\n constructor(private basePath: string) {}\n\n private resolve(path: string): string {\n return join(this.basePath, path);\n }\n\n async read(path: string): Promise<string | null> {\n const full = this.resolve(path);\n if (!existsSync(full)) return null;\n return readFileSync(full, \"utf-8\");\n }\n\n async write(path: string, content: string): Promise<void> {\n const full = this.resolve(path);\n mkdirSync(dirname(full), { recursive: true });\n writeFileSync(full, content, \"utf-8\");\n }\n\n async append(path: string, content: string): Promise<void> {\n const full = this.resolve(path);\n mkdirSync(dirname(full), { recursive: true });\n appendFileSync(full, content, \"utf-8\");\n }\n\n async exists(path: string): Promise<boolean> {\n return existsSync(this.resolve(path));\n }\n\n async list(dir: string): Promise<string[]> {\n const full = this.resolve(dir);\n if (!existsSync(full)) return [];\n return readdirSync(full);\n }\n\n async mkdir(dir: string): Promise<void> {\n mkdirSync(this.resolve(dir), { recursive: true });\n }\n\n async isDirectory(path: string): Promise<boolean> {\n try {\n return statSync(this.resolve(path)).isDirectory();\n } catch {\n return false;\n }\n }\n}\n","// src/cli/storage-cli.ts\nimport { loadConfig, createStorageAdapter } from \"../core/config.js\";\n\nasync function main() {\n const [command, ...args] = process.argv.slice(2);\n const config = loadConfig();\n if (!config) {\n process.stderr.write(\"config not found\\n\");\n process.exit(1);\n }\n\n const storage = await createStorageAdapter(config);\n\n switch (command) {\n case \"read\": {\n const content = await storage.read(args[0]);\n if (content !== null) process.stdout.write(content);\n break;\n }\n case \"write\": {\n let data = \"\";\n process.stdin.setEncoding(\"utf-8\");\n for await (const chunk of process.stdin) {\n data += chunk;\n }\n await storage.write(args[0], data);\n break;\n }\n case \"append\": {\n let data = \"\";\n process.stdin.setEncoding(\"utf-8\");\n for await (const chunk of process.stdin) {\n data += chunk;\n }\n await storage.append(args[0], data);\n break;\n }\n case \"list\": {\n const entries = await storage.list(args[0]);\n process.stdout.write(entries.join(\"\\n\") + \"\\n\");\n break;\n }\n case \"exists\": {\n const exists = await storage.exists(args[0]);\n process.stdout.write(exists ? \"true\\n\" : \"false\\n\");\n process.exit(exists ? 0 : 1);\n break;\n }\n default:\n process.stderr.write(`Unknown command: ${command}\\nUsage: storage-cli <read|write|append|list|exists> <path>\\n`);\n process.exit(1);\n }\n}\n\nmain().catch((err) => {\n process.stderr.write(`Error: ${err.message}\\n`);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA,IAEa;AAFb;AAAA;AAAA;AAEO,IAAM,uBAAN,MAAqD;AAAA,MAI1D,YACU,OACA,MACA,OACA,UACR;AAJQ;AACA;AACA;AACA;AAER,aAAK,UAAU,gCAAgC,KAAK,IAAI,IAAI;AAC5D,aAAK,UAAU;AAAA,UACb,eAAe,UAAU,KAAK;AAAA,UAC9B,QAAQ;AAAA,UACR,gBAAgB;AAAA,QAClB;AAAA,MACF;AAAA,MAfQ;AAAA,MACA;AAAA,MAgBA,OAAO,MAAsB;AACnC,eAAO,GAAG,KAAK,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI;AAAA,MACjD;AAAA,MAEA,MAAc,OAAO,MAAsC;AACzD,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,SAAS,KAAK,QAAQ,CAAC;AACnF,YAAI,IAAI,WAAW,IAAK,QAAO;AAC/B,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,eAAQ,KAAK,OAAkB;AAAA,MACjC;AAAA,MAEA,MAAM,KAAK,MAAsC;AAC/C,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,SAAS,KAAK,QAAQ,CAAC;AACnF,YAAI,IAAI,WAAW,IAAK,QAAO;AAC/B,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,cAAM,UAAU,KAAK;AACrB,eAAO,OAAO,KAAK,SAAS,QAAQ,EAAE,SAAS,OAAO;AAAA,MACxD;AAAA,MAEA,MAAM,MAAM,MAAc,SAAgC;AACxD,cAAM,MAAM,MAAM,KAAK,OAAO,IAAI;AAClC,cAAM,OAAgC;AAAA,UACpC,SAAS,UAAU,IAAI;AAAA,UACvB,SAAS,OAAO,KAAK,OAAO,EAAE,SAAS,QAAQ;AAAA,QACjD;AACA,YAAI,IAAK,MAAK,MAAM;AAEpB,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,IAAI,GAAG;AAAA,UACzC,QAAQ;AAAA,UACR,SAAS,KAAK;AAAA,UACd,MAAM,KAAK,UAAU,IAAI;AAAA,QAC3B,CAAC;AAED,YAAI,CAAC,IAAI,MAAM,IAAI,WAAW,KAAK;AACjC,gBAAM,WAAW,MAAM,KAAK,OAAO,IAAI;AACvC,cAAI,SAAU,MAAK,MAAM;AACzB,gBAAM,MAAM,KAAK,OAAO,IAAI,GAAG;AAAA,YAC7B,QAAQ;AAAA,YACR,SAAS,KAAK;AAAA,YACd,MAAM,KAAK,UAAU,IAAI;AAAA,UAC3B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MAEA,MAAM,OAAO,MAAc,SAAgC;AACzD,cAAM,WAAW,MAAM,KAAK,KAAK,IAAI;AACrC,cAAM,aAAa,WAAW,WAAW,UAAU;AACnD,cAAM,KAAK,MAAM,MAAM,UAAU;AAAA,MACnC;AAAA,MAEA,MAAM,OAAO,MAAgC;AAC3C,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,SAAS,KAAK,QAAQ,CAAC;AACnF,eAAO,IAAI,WAAW;AAAA,MACxB;AAAA,MAEA,MAAM,KAAK,KAAgC;AACzC,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,GAAG,GAAG,EAAE,QAAQ,OAAO,SAAS,KAAK,QAAQ,CAAC;AAClF,YAAI,IAAI,WAAW,IAAK,QAAO,CAAC;AAChC,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,YAAI,CAAC,MAAM,QAAQ,IAAI,EAAG,QAAO,CAAC;AAClC,eAAO,KAAK,IAAI,CAAC,UAAU,MAAM,IAAI;AAAA,MACvC;AAAA,MAEA,MAAM,MAAM,MAA6B;AAAA,MAEzC;AAAA,MAEA,MAAM,YAAY,MAAgC;AAChD,cAAM,MAAM,MAAM,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,SAAS,KAAK,QAAQ,CAAC;AACnF,YAAI,IAAI,WAAW,IAAK,QAAO;AAC/B,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,eAAO,MAAM,QAAQ,IAAI;AAAA,MAC3B;AAAA,IACF;AAAA;AAAA;;;AC7FA,SAAS,gBAAAA,eAAc,iBAAAC,gBAAe,cAAAC,aAAY,aAAAC,kBAAiB;AACnE,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACD9B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,SAAS,YAAY;AAGvB,IAAM,sBAAN,MAAoD;AAAA,EACzD,YAAoB,UAAkB;AAAlB;AAAA,EAAmB;AAAA,EAE/B,QAAQ,MAAsB;AACpC,WAAO,KAAK,KAAK,UAAU,IAAI;AAAA,EACjC;AAAA,EAEA,MAAM,KAAK,MAAsC;AAC/C,UAAM,OAAO,KAAK,QAAQ,IAAI;AAC9B,QAAI,CAAC,WAAW,IAAI,EAAG,QAAO;AAC9B,WAAO,aAAa,MAAM,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,MAAM,MAAc,SAAgC;AACxD,UAAM,OAAO,KAAK,QAAQ,IAAI;AAC9B,cAAU,QAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC5C,kBAAc,MAAM,SAAS,OAAO;AAAA,EACtC;AAAA,EAEA,MAAM,OAAO,MAAc,SAAgC;AACzD,UAAM,OAAO,KAAK,QAAQ,IAAI;AAC9B,cAAU,QAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC5C,mBAAe,MAAM,SAAS,OAAO;AAAA,EACvC;AAAA,EAEA,MAAM,OAAO,MAAgC;AAC3C,WAAO,WAAW,KAAK,QAAQ,IAAI,CAAC;AAAA,EACtC;AAAA,EAEA,MAAM,KAAK,KAAgC;AACzC,UAAM,OAAO,KAAK,QAAQ,GAAG;AAC7B,QAAI,CAAC,WAAW,IAAI,EAAG,QAAO,CAAC;AAC/B,WAAO,YAAY,IAAI;AAAA,EACzB;AAAA,EAEA,MAAM,MAAM,KAA4B;AACtC,cAAU,KAAK,QAAQ,GAAG,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,EAClD;AAAA,EAEA,MAAM,YAAY,MAAgC;AAChD,QAAI;AACF,aAAO,SAAS,KAAK,QAAQ,IAAI,CAAC,EAAE,YAAY;AAAA,IAClD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;ADSO,SAAS,gBAAwB;AACtC,QAAM,UAAU,QAAQ,IAAI;AAC5B,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AACA,SAAOC,MAAK,SAAS,aAAa;AACpC;AAEA,SAAS,YAAY,KAAgC;AACnD,MAAI,CAAC,OAAO,OAAO,QAAQ,SAAU,QAAO;AAC5C,SAAO,eAAe,OAAO,kBAAkB;AACjD;AAEA,SAAS,iBAAiB,KAAwB;AAChD,SAAO;AAAA,IACL,SAAS;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,QACL,UAAUA,MAAK,IAAI,WAAW,IAAI,YAAY;AAAA,MAChD;AAAA,IACF;AAAA,IACA,UAAU,IAAI;AAAA,IACd,SAAS,IAAI;AAAA,IACb,SAAS,IAAI;AAAA,EACf;AACF;AAEO,SAAS,aAA4B;AAC1C,QAAM,aAAa,cAAc;AACjC,MAAI,CAACC,YAAW,UAAU,EAAG,QAAO;AACpC,QAAM,MAAM,KAAK,MAAMC,cAAa,YAAY,OAAO,CAAC;AACxD,MAAI,YAAY,GAAG,GAAG;AACpB,UAAM,WAAW,iBAAiB,GAAG;AACrC,eAAW,QAAQ;AACnB,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,WAAW,QAAsB;AAC/C,QAAM,aAAa,cAAc;AACjC,EAAAC,WAAUC,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,EAAAC,eAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,GAAG,OAAO;AACpE;AAyCA,eAAsB,qBAAqB,QAAyC;AAClF,MAAI,OAAO,QAAQ,SAAS,SAAS;AACnC,WAAO,IAAI,oBAAoB,OAAO,QAAQ,MAAO,QAAQ;AAAA,EAC/D;AACA,MAAI,OAAO,QAAQ,SAAS,UAAU;AACpC,UAAM,EAAE,sBAAAC,sBAAqB,IAAI,MAAM;AACvC,UAAM,IAAI,OAAO,QAAQ;AACzB,WAAO,IAAIA,sBAAqB,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,EACtE;AACA,QAAM,IAAI,MAAM,yBAA0B,OAAO,QAAgB,IAAI,EAAE;AACzE;;;AE9JA,eAAe,OAAO;AACpB,QAAM,CAAC,SAAS,GAAG,IAAI,IAAI,QAAQ,KAAK,MAAM,CAAC;AAC/C,QAAM,SAAS,WAAW;AAC1B,MAAI,CAAC,QAAQ;AACX,YAAQ,OAAO,MAAM,oBAAoB;AACzC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,MAAM,qBAAqB,MAAM;AAEjD,UAAQ,SAAS;AAAA,IACf,KAAK,QAAQ;AACX,YAAM,UAAU,MAAM,QAAQ,KAAK,KAAK,CAAC,CAAC;AAC1C,UAAI,YAAY,KAAM,SAAQ,OAAO,MAAM,OAAO;AAClD;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AACZ,UAAI,OAAO;AACX,cAAQ,MAAM,YAAY,OAAO;AACjC,uBAAiB,SAAS,QAAQ,OAAO;AACvC,gBAAQ;AAAA,MACV;AACA,YAAM,QAAQ,MAAM,KAAK,CAAC,GAAG,IAAI;AACjC;AAAA,IACF;AAAA,IACA,KAAK,UAAU;AACb,UAAI,OAAO;AACX,cAAQ,MAAM,YAAY,OAAO;AACjC,uBAAiB,SAAS,QAAQ,OAAO;AACvC,gBAAQ;AAAA,MACV;AACA,YAAM,QAAQ,OAAO,KAAK,CAAC,GAAG,IAAI;AAClC;AAAA,IACF;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,UAAU,MAAM,QAAQ,KAAK,KAAK,CAAC,CAAC;AAC1C,cAAQ,OAAO,MAAM,QAAQ,KAAK,IAAI,IAAI,IAAI;AAC9C;AAAA,IACF;AAAA,IACA,KAAK,UAAU;AACb,YAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,CAAC,CAAC;AAC3C,cAAQ,OAAO,MAAM,SAAS,WAAW,SAAS;AAClD,cAAQ,KAAK,SAAS,IAAI,CAAC;AAC3B;AAAA,IACF;AAAA,IACA;AACE,cAAQ,OAAO,MAAM,oBAAoB,OAAO;AAAA;AAAA,CAA+D;AAC/G,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,OAAO,MAAM,UAAU,IAAI,OAAO;AAAA,CAAI;AAC9C,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":["readFileSync","writeFileSync","existsSync","mkdirSync","dirname","join","join","existsSync","readFileSync","mkdirSync","dirname","writeFileSync","GitHubStorageAdapter"]}
|
package/hooks/hooks.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"hooks": {
|
|
3
|
+
"Stop": [
|
|
4
|
+
{
|
|
5
|
+
"hooks": [
|
|
6
|
+
{
|
|
7
|
+
"type": "command",
|
|
8
|
+
"command": "node \"${CLAUDE_PLUGIN_ROOT}/dist/on-stop.js\"",
|
|
9
|
+
"async": true,
|
|
10
|
+
"timeout": 10
|
|
11
|
+
}
|
|
12
|
+
]
|
|
13
|
+
}
|
|
14
|
+
],
|
|
15
|
+
"SessionEnd": [
|
|
16
|
+
{
|
|
17
|
+
"hooks": [
|
|
18
|
+
{
|
|
19
|
+
"type": "agent",
|
|
20
|
+
"prompt": "Follow the instructions in the file at ${CLAUDE_PLUGIN_ROOT}/prompts/session-end.md exactly. The CLAUDE_PLUGIN_DATA directory is: ${CLAUDE_PLUGIN_DATA}. The plugin root is: ${CLAUDE_PLUGIN_ROOT}",
|
|
21
|
+
"timeout": 120
|
|
22
|
+
}
|
|
23
|
+
]
|
|
24
|
+
}
|
|
25
|
+
],
|
|
26
|
+
"SessionStart": [
|
|
27
|
+
{
|
|
28
|
+
"hooks": [
|
|
29
|
+
{
|
|
30
|
+
"type": "agent",
|
|
31
|
+
"prompt": "Follow the instructions in the file at ${CLAUDE_PLUGIN_ROOT}/prompts/session-start.md exactly. The CLAUDE_PLUGIN_DATA directory is: ${CLAUDE_PLUGIN_DATA}. The plugin root is: ${CLAUDE_PLUGIN_ROOT}",
|
|
32
|
+
"timeout": 180
|
|
33
|
+
}
|
|
34
|
+
]
|
|
35
|
+
}
|
|
36
|
+
]
|
|
37
|
+
}
|
|
38
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@giwonn/claude-daily-review",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Claude Code plugin that auto-captures conversations for daily review and career documentation",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/hooks/on-stop.js",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"build": "tsup",
|
|
9
|
+
"test": "vitest run",
|
|
10
|
+
"test:watch": "vitest"
|
|
11
|
+
},
|
|
12
|
+
"keywords": ["claude-code", "plugin", "daily-review", "obsidian"],
|
|
13
|
+
"repository": {
|
|
14
|
+
"type": "git",
|
|
15
|
+
"url": "https://github.com/giwonn/claude-daily-review"
|
|
16
|
+
},
|
|
17
|
+
"license": "MIT",
|
|
18
|
+
"files": [
|
|
19
|
+
"dist",
|
|
20
|
+
"hooks",
|
|
21
|
+
"prompts",
|
|
22
|
+
"skills",
|
|
23
|
+
"README.md"
|
|
24
|
+
],
|
|
25
|
+
"devDependencies": {
|
|
26
|
+
"typescript": "^5.4.0",
|
|
27
|
+
"vitest": "^3.0.0",
|
|
28
|
+
"tsup": "^8.0.0",
|
|
29
|
+
"@types/node": "^20.0.0"
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
# SessionEnd Agent Prompt — claude-daily-review
|
|
2
|
+
|
|
3
|
+
You are a session review generator for the claude-daily-review plugin. Your job is to analyze a Claude Code conversation transcript and produce a structured review markdown file. This review will later be merged into a daily review document.
|
|
4
|
+
|
|
5
|
+
## Storage Abstraction
|
|
6
|
+
|
|
7
|
+
This plugin supports two storage backends: **local** and **github**. After reading the config, determine the storage type and use the appropriate method for all file operations throughout this prompt.
|
|
8
|
+
|
|
9
|
+
- **If `storage.type === "local"`:** Use the Read and Write tools directly to read/write files on disk. Paths are relative to `storage.local.basePath`.
|
|
10
|
+
- **If `storage.type === "github"`:** Use the storage-cli tool via Bash for all file operations. The CLI commands are:
|
|
11
|
+
```bash
|
|
12
|
+
node "${CLAUDE_PLUGIN_ROOT}/dist/storage-cli.js" read <path>
|
|
13
|
+
echo "<content>" | node "${CLAUDE_PLUGIN_ROOT}/dist/storage-cli.js" write <path>
|
|
14
|
+
echo "<content>" | node "${CLAUDE_PLUGIN_ROOT}/dist/storage-cli.js" append <path>
|
|
15
|
+
node "${CLAUDE_PLUGIN_ROOT}/dist/storage-cli.js" list <dir>
|
|
16
|
+
node "${CLAUDE_PLUGIN_ROOT}/dist/storage-cli.js" exists <path>
|
|
17
|
+
```
|
|
18
|
+
All `<path>` arguments are relative to the configured `storage.github.basePath` (e.g., `daily-review`). The CLI handles GitHub API calls internally.
|
|
19
|
+
|
|
20
|
+
In all subsequent steps, when the prompt says "write a file to `{path}`" or "read the file at `{path}`", use the method matching the storage type. For local storage, the full path is `{storage.local.basePath}/{path}`. For GitHub storage, pass `{path}` directly to the storage-cli.
|
|
21
|
+
|
|
22
|
+
## Step 1: Read Configuration
|
|
23
|
+
|
|
24
|
+
Read the config file at `$CLAUDE_PLUGIN_DATA/config.json`. Parse it as JSON. The structure is:
|
|
25
|
+
|
|
26
|
+
```json
|
|
27
|
+
{
|
|
28
|
+
"storage": {
|
|
29
|
+
"type": "local",
|
|
30
|
+
"local": { "basePath": "/path/to/vault/daily-review" }
|
|
31
|
+
},
|
|
32
|
+
"language": "ko",
|
|
33
|
+
"periods": { "daily": true, "weekly": true, "monthly": true, "quarterly": true, "yearly": false },
|
|
34
|
+
"profile": {
|
|
35
|
+
"company": "ABC Corp",
|
|
36
|
+
"role": "프론트엔드 개발자",
|
|
37
|
+
"team": "결제플랫폼팀",
|
|
38
|
+
"context": "B2B SaaS 결제 시스템 개발 및 운영"
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Or for GitHub storage:
|
|
44
|
+
```json
|
|
45
|
+
{
|
|
46
|
+
"storage": {
|
|
47
|
+
"type": "github",
|
|
48
|
+
"github": { "owner": "user", "repo": "repo", "token": "tok", "basePath": "daily-review" }
|
|
49
|
+
},
|
|
50
|
+
"language": "ko",
|
|
51
|
+
"periods": { ... },
|
|
52
|
+
"profile": { ... }
|
|
53
|
+
}
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
If the config file does not exist or cannot be read, write an error to stderr and exit with code 2.
|
|
57
|
+
|
|
58
|
+
Determine the storage type from `config.storage.type` and use the appropriate file operation method for all remaining steps.
|
|
59
|
+
|
|
60
|
+
## Step 2: Read Hook Input
|
|
61
|
+
|
|
62
|
+
Read stdin as JSON. It contains:
|
|
63
|
+
|
|
64
|
+
```json
|
|
65
|
+
{
|
|
66
|
+
"session_id": "abc123",
|
|
67
|
+
"transcript_path": "/path/to/transcript.jsonl",
|
|
68
|
+
"cwd": "/home/user/projects/my-app"
|
|
69
|
+
}
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
- `session_id`: Unique identifier for this session
|
|
73
|
+
- `transcript_path`: Path to the JSONL transcript file containing the full conversation
|
|
74
|
+
- `cwd`: Working directory where Claude Code was running
|
|
75
|
+
|
|
76
|
+
## Step 3: Read and Parse Transcript
|
|
77
|
+
|
|
78
|
+
Read the file at `transcript_path`. It is in JSONL format (one JSON object per line). Each line represents a conversation turn. Parse all lines and reconstruct the conversation flow.
|
|
79
|
+
|
|
80
|
+
If the transcript file is empty or cannot be read, write `.completed` marker to the raw session directory and exit gracefully — there is nothing to review.
|
|
81
|
+
|
|
82
|
+
## Step 4: Analyze and Classify
|
|
83
|
+
|
|
84
|
+
Classify the conversation content into groups:
|
|
85
|
+
|
|
86
|
+
### Project Detection
|
|
87
|
+
|
|
88
|
+
Derive the project name from `cwd`:
|
|
89
|
+
- Extract the last path component as the project name (e.g., `/home/user/projects/my-app` -> `my-app`)
|
|
90
|
+
- If cwd is a home directory or root, classify as "uncategorized"
|
|
91
|
+
|
|
92
|
+
### Content Grouping
|
|
93
|
+
|
|
94
|
+
Within each project group, identify distinct work topics. A topic is a coherent unit of work (e.g., "authentication refactoring", "bug fix in payment module"). Group related Q&A and discussion under the same topic.
|
|
95
|
+
|
|
96
|
+
### What to Extract
|
|
97
|
+
|
|
98
|
+
For each topic, extract:
|
|
99
|
+
1. **작업 요약 (Work Summary):** What was done, in 1-2 concise sentences. Frame with business context using the profile information.
|
|
100
|
+
2. **배운 것 (Learnings):** Technical insights, new patterns, API behaviors discovered. Bullet points.
|
|
101
|
+
3. **고민한 포인트 (Decision Points):** Decisions made and their reasoning. Format: "X vs Y -> chose X (reason)"
|
|
102
|
+
4. **질문과 답변 (Q&A):** Key questions asked and their answers. Not raw conversation — distill to the essential knowledge.
|
|
103
|
+
|
|
104
|
+
If profile information is available, use it to frame summaries with business context. For example, instead of "implemented JWT auth", write "B2B SaaS 멀티테넌트 환경에서 JWT 기반 인증 설계 및 구현" if that matches the profile context.
|
|
105
|
+
|
|
106
|
+
### What to Skip
|
|
107
|
+
|
|
108
|
+
- Trivial exchanges ("thanks", "ok", small talk)
|
|
109
|
+
- Repeated or redundant information
|
|
110
|
+
- Raw code dumps — summarize what the code does instead
|
|
111
|
+
- Debugging back-and-forth — summarize the root cause and fix
|
|
112
|
+
|
|
113
|
+
## Step 5: Generate Review Markdown
|
|
114
|
+
|
|
115
|
+
Write the review to: `.reviews/{session_id}.md` (using the appropriate storage method)
|
|
116
|
+
|
|
117
|
+
Use the configured `language` for all generated text. If language is "ko", write in Korean. If "en", write in English. Etc.
|
|
118
|
+
|
|
119
|
+
### Review File Format
|
|
120
|
+
|
|
121
|
+
```markdown
|
|
122
|
+
---
|
|
123
|
+
date: {YYYY-MM-DD}
|
|
124
|
+
type: session-review
|
|
125
|
+
session_id: {session_id}
|
|
126
|
+
projects: [{project-names}]
|
|
127
|
+
tags: [{technology-tags}]
|
|
128
|
+
---
|
|
129
|
+
|
|
130
|
+
## [{project-name}] {topic-title}
|
|
131
|
+
**작업 요약:** {concise summary with business context}
|
|
132
|
+
**배운 것:**
|
|
133
|
+
- {learning 1}
|
|
134
|
+
- {learning 2}
|
|
135
|
+
**고민한 포인트:**
|
|
136
|
+
- {decision}: {option A} vs {option B} → {chosen} ({reason})
|
|
137
|
+
**질문과 답변:**
|
|
138
|
+
- Q: {distilled question}
|
|
139
|
+
→ A: {concise answer}
|
|
140
|
+
|
|
141
|
+
## [{project-name}] {another-topic}
|
|
142
|
+
...
|
|
143
|
+
|
|
144
|
+
## 미분류
|
|
145
|
+
**질문과 답변:**
|
|
146
|
+
- Q: {question}
|
|
147
|
+
→ A: {answer}
|
|
148
|
+
|
|
149
|
+
## Tags
|
|
150
|
+
#{project-name} #{technology1} #{technology2}
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
### Important Formatting Rules
|
|
154
|
+
|
|
155
|
+
- Use Obsidian-compatible tags: `#project-name #technology`
|
|
156
|
+
- Project names in square brackets: `[my-app]`
|
|
157
|
+
- Use `→` for answer indicators in Q&A
|
|
158
|
+
- Omit sections that have no content (e.g., if there are no decision points, skip 고민한 포인트)
|
|
159
|
+
- If the entire session is uncategorized, omit project sections and only include the 미분류 section
|
|
160
|
+
- Include the YAML frontmatter with date, type, session_id, projects list, and tags list
|
|
161
|
+
|
|
162
|
+
### Section Labels by Language
|
|
163
|
+
|
|
164
|
+
| Section | ko | en |
|
|
165
|
+
|---------|----|----|
|
|
166
|
+
| Work Summary | 작업 요약 | Work Summary |
|
|
167
|
+
| Learnings | 배운 것 | Learnings |
|
|
168
|
+
| Decision Points | 고민한 포인트 | Decision Points |
|
|
169
|
+
| Q&A | 질문과 답변 | Q&A |
|
|
170
|
+
| Uncategorized | 미분류 | Uncategorized |
|
|
171
|
+
| Tags | Tags | Tags |
|
|
172
|
+
|
|
173
|
+
## Step 6: Update Project Summary (if applicable)
|
|
174
|
+
|
|
175
|
+
If the session involved project work (not just uncategorized), update the project summary file at:
|
|
176
|
+
`projects/{project-name}/summary.md` (using the appropriate storage method)
|
|
177
|
+
|
|
178
|
+
- If the file exists, read it and append/update relevant sections
|
|
179
|
+
- If the file does not exist, create it with this template:
|
|
180
|
+
|
|
181
|
+
```markdown
|
|
182
|
+
---
|
|
183
|
+
project: {project-name}
|
|
184
|
+
type: project-summary
|
|
185
|
+
started: {today's date}
|
|
186
|
+
last-updated: {today's date}
|
|
187
|
+
tags: [{technology-tags}]
|
|
188
|
+
---
|
|
189
|
+
|
|
190
|
+
# {project-name} 프로젝트 요약
|
|
191
|
+
|
|
192
|
+
## 프로젝트 개요
|
|
193
|
+
{inferred from conversation context and profile}
|
|
194
|
+
|
|
195
|
+
## 기술 스택
|
|
196
|
+
- {technologies used}
|
|
197
|
+
|
|
198
|
+
## 주요 구현 사항
|
|
199
|
+
- {what was implemented today}
|
|
200
|
+
|
|
201
|
+
## 핵심 의사결정 로그
|
|
202
|
+
- {date}: {decision} → {choice} ({reason})
|
|
203
|
+
|
|
204
|
+
## 배운 것 (누적)
|
|
205
|
+
- {learnings from this session}
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
When updating an existing summary:
|
|
209
|
+
- Update `last-updated` in frontmatter
|
|
210
|
+
- Add new tags if any
|
|
211
|
+
- Append new implementation items to 주요 구현 사항
|
|
212
|
+
- Append new decisions to 핵심 의사결정 로그
|
|
213
|
+
- Append new learnings to 배운 것 (avoid duplicates)
|
|
214
|
+
|
|
215
|
+
## Step 7: Mark Session as Completed
|
|
216
|
+
|
|
217
|
+
Write a `.completed` marker file to the raw session directory:
|
|
218
|
+
`.raw/{session_id}/.completed` (using the appropriate storage method)
|
|
219
|
+
|
|
220
|
+
The content of the marker file should be the current ISO timestamp (e.g., `2026-03-28T15:30:00.000Z`).
|
|
221
|
+
|
|
222
|
+
This marker prevents the session from being reprocessed during recovery.
|
|
223
|
+
|
|
224
|
+
## Critical Rules
|
|
225
|
+
|
|
226
|
+
1. **Be concise but meaningful.** These reviews serve as career documentation. Every line should be worth reading months later.
|
|
227
|
+
2. **Extract insights, not conversations.** Transform raw dialogue into structured knowledge.
|
|
228
|
+
3. **Use profile context.** If the user works on "B2B SaaS 결제 시스템", frame summaries in that context.
|
|
229
|
+
4. **Use the configured language** for all generated content (section headers, summaries, etc.).
|
|
230
|
+
5. **Create directories as needed.** Use `mkdir -p` equivalent before writing any file.
|
|
231
|
+
6. **Never delete raw data.** Only add the `.completed` marker — never remove or modify `.raw/` files.
|
|
232
|
+
7. **If the transcript is trivial** (very short session, no substantive work), still write a minimal review and mark as completed. Do not skip the session.
|
|
233
|
+
8. **Obsidian compatibility.** Use valid markdown, proper YAML frontmatter, and Obsidian-style tags.
|