ai-cli-log 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/.ai-cli-log/0001.txt +360 -0
  2. package/.ai-cli-log/0002.txt +1791 -0
  3. package/.ai-cli-log/0003.txt +338 -0
  4. package/.ai-cli-log/0004.txt +116 -0
  5. package/.ai-cli-log/0005.txt +105 -0
  6. package/.ai-cli-log/config.json +32 -0
  7. package/.ai-cli-log/gemini-2025-07-13T13-33-13-a-quick-gemini-test.txt +174 -0
  8. package/.ai-cli-log/gemini-2025-07-13T13-37-00-typescript-check-pass.txt +105 -0
  9. package/.ai-cli-log/gemini-2025-07-13T13-44-15-rename-ai-cli-logs-to-log.txt +164 -0
  10. package/.ai-cli-log/gemini-20250705-154601.txt +1320 -0
  11. package/.ai-cli-log/gemini-20250705-155547.txt +726 -0
  12. package/.ai-cli-log/gemini-20250705-165038.txt +66 -0
  13. package/.ai-cli-log/gemini-20250705-171429.txt +216 -0
  14. package/.ai-cli-log/gemini-20250705-191202.txt +448 -0
  15. package/.ai-cli-log/gemini-20250705-193741.txt +901 -0
  16. package/.ai-cli-log/gemini-20250705-194435.txt +110 -0
  17. package/.ai-cli-log/gemini-20250705-195926.txt +415 -0
  18. package/.ai-cli-log/gemini-20250705-201738.txt +246 -0
  19. package/.ai-cli-log/gemini-20250713-204921.txt +3036 -0
  20. package/.ai-cli-log/gemini-20250713-215941-update-ai-cli-log-documentation.txt +400 -0
  21. package/.ai-cli-log/gemini-20250713-220544-removed-debug-logs-successfully.txt +258 -0
  22. package/.ai-cli-log/gemini-20250713-221128-sessionsummarytxt.txt +112 -0
  23. package/.ai-cli-log/gemini-20250714-084659.txt +86 -0
  24. package/.ai-cli-log/gemini-20250714-085847-update-prompt-configuration.txt +189 -0
  25. package/.ai-cli-log/gemini-20250714-090905-add-sgpt-custom-summarizer.txt +284 -0
  26. package/.ai-cli-log/gemini-20250714-092329.txt +110 -0
  27. package/.ai-cli-log/gemini-20250714-092935.txt +183 -0
  28. package/.ai-cli-log/gemini-20250714-093205-heres-a-concise-summary-of-the-terminal-session-fix-sgpt-option-error-this-captures-1-the-action-fix-2-the-tool-involved-sgpt-3-the-issue-option-error-4-follows-the-requested-lowercase-hyphenated-format.txt +140 -0
  29. package/.ai-cli-log/gemini-20250714-094141-heres-the-concise-summary-build-check-clean-this-captures-1.txt +111 -0
  30. package/.ai-cli-log/gemini-20250714-094405-heres-the-concise-summary-clean-build-check-this-captures-1.txt +111 -0
  31. package/.ai-cli-log/gemini-20250714-094816-json-summary-format.txt +132 -0
  32. package/.ai-cli-log/gemini-20250714-094833-optimize-summary-logic.txt +342 -0
  33. package/.ai-cli-log/gemini-20250714-133202-refactor-config-initialization.txt +1729 -0
  34. package/.ai-cli-log/gemini-20250714-134138-update-summary-logic.txt +153 -0
  35. package/.ai-cli-log/gemini-20250714-134749-json-summary-format.txt +214 -0
  36. package/.ai-cli-log/gemini-20250714-140527.txt +715 -0
  37. package/.ai-cli-log/gemini-20250714-142018.txt +86 -0
  38. package/.ai-cli-log/gemini-20250714-142027-update-summary-format.txt +86 -0
  39. package/.ai-cli-log/gemini-20250714-142100-session-complete.txt +86 -0
  40. package/.ai-cli-log/gemini-20250714-142129-refactor-readme-structure.txt +584 -0
  41. package/.ai-cli-log/gemini-20250714-213153.txt +1195 -0
  42. package/.ai-cli-log/session-20250705-150655.txt +174 -0
  43. package/.ai-cli-log/session-20250705-151726.txt +313 -0
  44. package/.github/workflows/node.js.yml +30 -0
  45. package/GEMINI.md +5 -4
  46. package/README.md +230 -45
  47. package/dist/index.js +368 -71
  48. package/package.json +1 -1
  49. package/src/index.ts +419 -89
package/dist/index.js CHANGED
@@ -1,3 +1,4 @@
1
+ #!/usr/bin/env node
1
2
  "use strict";
2
3
  var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
4
  if (k2 === undefined) k2 = k;
@@ -32,87 +33,383 @@ var __importStar = (this && this.__importStar) || (function () {
32
33
  return result;
33
34
  };
34
35
  })();
36
+ var __importDefault = (this && this.__importDefault) || function (mod) {
37
+ return (mod && mod.__esModule) ? mod : { "default": mod };
38
+ };
35
39
  Object.defineProperty(exports, "__esModule", { value: true });
36
40
  const pty = __importStar(require("node-pty"));
37
41
  const fs = __importStar(require("fs"));
38
42
  const path = __importStar(require("path"));
43
+ const os = __importStar(require("os"));
44
+ const child_process_1 = require("child_process");
39
45
  const headless_1 = require("@xterm/headless");
40
- const args = process.argv.slice(2);
41
- const command = args[0];
42
- const commandArgs = args.slice(1);
43
- if (!command) {
44
- console.error('Usage: ai-cli-log <command> [args...]');
45
- process.exit(1);
46
+ const readline_1 = __importDefault(require("readline"));
47
+ // --- 1. CONFIGURATION & TYPE DEFINITIONS ---
48
+ const GLOBAL_CONFIG_DIR = path.join(os.homedir(), '.config', 'ai-cli-log');
49
+ const GLOBAL_CONFIG_PATH = path.join(GLOBAL_CONFIG_DIR, 'config.json');
50
+ const LOCAL_CONFIG_PATH = path.join(process.cwd(), '.ai-cli-log', 'config.json');
51
+ function findConfigPath() {
52
+ if (fs.existsSync(LOCAL_CONFIG_PATH))
53
+ return LOCAL_CONFIG_PATH;
54
+ if (fs.existsSync(GLOBAL_CONFIG_PATH))
55
+ return GLOBAL_CONFIG_PATH;
56
+ return null;
46
57
  }
47
- const logsDir = path.join(process.cwd(), '.ai-cli-logs');
48
- if (!fs.existsSync(logsDir)) {
49
- fs.mkdirSync(logsDir);
58
+ function readConfig() {
59
+ const configPath = findConfigPath();
60
+ if (!configPath)
61
+ return { summarizer: { summarizers: [] } };
62
+ try {
63
+ const content = fs.readFileSync(configPath, 'utf-8');
64
+ return JSON.parse(content);
65
+ }
66
+ catch (error) {
67
+ console.error(`Error reading or parsing config file at ${configPath}:`, error);
68
+ return { summarizer: { summarizers: [] } };
69
+ }
50
70
  }
51
- // Initialize xterm.js in headless mode
52
- const xterm = new headless_1.Terminal({
53
- rows: process.stdout.rows,
54
- cols: process.stdout.columns,
55
- scrollback: Infinity, // Set scrollback to Infinity for unlimited buffer
56
- allowProposedApi: true,
57
- });
58
- const term = pty.spawn(command, commandArgs, {
59
- name: 'xterm-color',
60
- cols: process.stdout.columns,
61
- rows: process.stdout.rows,
62
- cwd: process.cwd(),
63
- env: process.env,
64
- });
65
- // Pipe pty output to xterm.js and also to stdout
66
- term.onData((data) => {
67
- process.stdout.write(data);
68
- xterm.write(data);
69
- });
70
- // Pipe stdin to pty
71
- process.stdin.on('data', (data) => {
72
- term.write(data.toString());
73
- });
74
- process.stdin.setRawMode(true);
75
- process.stdin.resume();
76
- term.onExit(({ exitCode, signal }) => {
77
- // Add a small delay to ensure xterm.js has processed all output
78
- setTimeout(() => {
79
- // Extract rendered text from xterm.js buffer
80
- let renderedOutput = '';
81
- // Iterate over the entire buffer, including scrollback.
82
- // The total number of lines is the sum of lines in scrollback (baseY) and visible rows.
83
- for (let i = 0; i < xterm.buffer.active.baseY + xterm.rows; i++) {
84
- const line = xterm.buffer.active.getLine(i);
85
- if (line) {
86
- // translateToString(true) gets the line content, and we trim trailing whitespace.
87
- const lineText = line.translateToString(true).replace(/\s+$/, '');
88
- renderedOutput += lineText + '\n';
71
+ function writeConfig(config, isLocal) {
72
+ const targetPath = isLocal ? LOCAL_CONFIG_PATH : GLOBAL_CONFIG_PATH;
73
+ const targetDir = path.dirname(targetPath);
74
+ try {
75
+ if (!fs.existsSync(targetDir)) {
76
+ fs.mkdirSync(targetDir, { recursive: true });
77
+ }
78
+ fs.writeFileSync(targetPath, JSON.stringify(config, null, 2));
79
+ console.log(`✔ Configuration successfully saved to ${targetPath}`);
80
+ }
81
+ catch (error) {
82
+ console.error(`Error writing config file to ${targetPath}:`, error);
83
+ }
84
+ }
85
+ // --- 2. COMMAND IMPLEMENTATIONS ---
86
+ async function handleInitCommand(isLocal) {
87
+ const targetPath = isLocal ? LOCAL_CONFIG_PATH : GLOBAL_CONFIG_PATH;
88
+ const rl = readline_1.default.createInterface({ input: process.stdin, output: process.stdout });
89
+ const ask = (question) => new Promise(resolve => rl.question(question, resolve));
90
+ try {
91
+ if (fs.existsSync(targetPath)) {
92
+ const warning = `Configuration file already exists. Continuing will allow you to add or update default summarizers (gemini-pro, ollama, sgpt) with the latest recommended settings, while preserving any other custom summarizers you have added.`;
93
+ console.log(warning);
94
+ const answer = await ask('Do you want to continue? (y/N): ');
95
+ if (answer.toLowerCase() !== 'y') {
96
+ console.log('Initialization cancelled.');
97
+ return;
89
98
  }
90
99
  }
91
- const now = new Date();
92
- const year = now.getFullYear();
93
- const month = (now.getMonth() + 1).toString().padStart(2, '0');
94
- const day = now.getDate().toString().padStart(2, '0');
95
- const hours = now.getHours().toString().padStart(2, '0');
96
- const minutes = now.getMinutes().toString().padStart(2, '0');
97
- const seconds = now.getSeconds().toString().padStart(2, '0');
98
- const prefix = command || 'session';
99
- const logFileName = `${prefix}-${year}${month}${day}-${hours}${minutes}${seconds}.md`;
100
- const logFilePath = path.join(logsDir, logFileName);
101
- fs.writeFile(logFilePath, renderedOutput, (err) => {
102
- if (err) {
103
- console.error('Error writing log file:', err);
100
+ console.log('\nScanning for available AI tools...');
101
+ const availableTools = [];
102
+ const checkTool = (tool) => new Promise(resolve => {
103
+ const proc = (0, child_process_1.spawn)('which', [tool], { stdio: 'ignore' });
104
+ proc.on('close', code => {
105
+ if (code === 0) {
106
+ console.log(` - Found ${tool}!`);
107
+ availableTools.push(tool);
108
+ }
109
+ resolve();
110
+ });
111
+ proc.on('error', () => resolve());
112
+ });
113
+ await Promise.all([checkTool('gemini'), checkTool('ollama'), checkTool('sgpt'), checkTool('claude')]);
114
+ if (availableTools.length === 0) {
115
+ console.log('No supported AI tools (gemini, ollama, sgpt, claude) found in your PATH.');
116
+ const createEmpty = await ask('\n> Would you like to create an empty configuration file to manually add a custom summarizer? (y/N): ');
117
+ if (createEmpty.toLowerCase() === 'y') {
118
+ const config = readConfig(); // Read to not overwrite existing unrelated config
119
+ writeConfig(config, isLocal);
104
120
  }
105
121
  else {
106
- console.log(`Session logged to ${path.relative(process.cwd(), logFilePath)}`);
122
+ console.log('Initialization cancelled.');
123
+ }
124
+ return;
125
+ }
126
+ const config = readConfig();
127
+ const summarizersToUpdate = [];
128
+ const newPrompt = 'You are a log summarizer. Your response MUST be a valid JSON object with one key: "summary" (a 3-5 word, lowercase, filename-friendly phrase). Example: {"summary": "refactor-database-schema"}. The session content is:';
129
+ if (availableTools.includes('gemini')) {
130
+ const add = await ask('\n> Found Gemini. Add/update the \'gemini-pro\' summarizer? (Y/n): ');
131
+ if (add.toLowerCase() !== 'n') {
132
+ summarizersToUpdate.push({
133
+ name: 'gemini-pro',
134
+ tool: 'gemini',
135
+ prompt: newPrompt,
136
+ maxLines: 100,
137
+ });
138
+ }
139
+ }
140
+ if (availableTools.includes('ollama')) {
141
+ const add = await ask('\n> Found Ollama. Add/update the \'ollama\' summarizer? (Y/n): ');
142
+ if (add.toLowerCase() !== 'n') {
143
+ const modelInput = await ask(' - Which Ollama model to use? (press Enter for \'llama3\'): ');
144
+ const model = modelInput || 'llama3';
145
+ summarizersToUpdate.push({
146
+ name: 'ollama',
147
+ tool: 'ollama',
148
+ model: model,
149
+ prompt: newPrompt,
150
+ maxLines: 50,
151
+ });
152
+ }
153
+ }
154
+ if (availableTools.includes('claude')) {
155
+ const add = await ask('\n> Found Claude. Add/update the \'claude-opus\' summarizer? (Y/n): ');
156
+ if (add.toLowerCase() !== 'n') {
157
+ summarizersToUpdate.push({
158
+ name: 'claude-opus',
159
+ tool: 'claude',
160
+ prompt: newPrompt,
161
+ maxLines: 100,
162
+ });
163
+ }
164
+ }
165
+ if (availableTools.includes('sgpt')) {
166
+ const add = await ask('\n> Found ShellGPT. Add/update the \'sgpt\' summarizer? (Y/n): ');
167
+ if (add.toLowerCase() !== 'n') {
168
+ summarizersToUpdate.push({
169
+ name: 'sgpt',
170
+ tool: 'custom',
171
+ command: ['sgpt', '--chat', 'session-summary', '"{{prompt}}"'],
172
+ prompt: newPrompt,
173
+ maxLines: 100,
174
+ });
175
+ }
176
+ }
177
+ if (summarizersToUpdate.length === 0) {
178
+ console.log('\nNo configurations were added or updated.');
179
+ return;
180
+ }
181
+ // "Update-or-add" logic
182
+ summarizersToUpdate.forEach(newS => {
183
+ const existingIndex = config.summarizer.summarizers.findIndex(s => s.name === newS.name);
184
+ if (existingIndex !== -1) {
185
+ config.summarizer.summarizers[existingIndex] = newS; // Update
186
+ console.log(` - Updated existing summarizer: "${newS.name}"`);
187
+ }
188
+ else {
189
+ config.summarizer.summarizers.push(newS); // Add
190
+ console.log(` - Added new summarizer: "${newS.name}"`);
107
191
  }
108
- process.exit(exitCode);
109
192
  });
110
- }, 500); // 500ms delay
111
- });
112
- process.on('SIGINT', () => {
113
- term.kill('SIGINT');
114
- });
115
- process.on('resize', () => {
116
- term.resize(process.stdout.columns, process.stdout.rows);
117
- xterm.resize(process.stdout.columns, process.stdout.rows);
118
- });
193
+ // Set default only if it wasn't set before
194
+ if (!config.summarizer.default && config.summarizer.summarizers.length > 0) {
195
+ const priority = ['gemini-pro', 'claude-opus', 'ollama', 'sgpt'];
196
+ for (const name of priority) {
197
+ if (config.summarizer.summarizers.some(s => s.name === name)) {
198
+ config.summarizer.default = name;
199
+ console.log(`\n✔ Set "${config.summarizer.default}" as the default summarizer.`);
200
+ break;
201
+ }
202
+ }
203
+ }
204
+ writeConfig(config, isLocal);
205
+ }
206
+ finally {
207
+ rl.close();
208
+ }
209
+ }
210
+ async function getAiSummary(content, summarizerName) {
211
+ const config = readConfig();
212
+ const name = summarizerName || config.summarizer.default;
213
+ if (!name) {
214
+ console.warn(`\nWarning: No default summarizer set. Please run 'ai-cli-log --init'.`);
215
+ return null;
216
+ }
217
+ const summarizer = config.summarizer.summarizers.find(s => s.name === name);
218
+ if (!summarizer) {
219
+ console.warn(`\nWarning: No summarizer named "${name}" found. Please check your configuration.`);
220
+ return null;
221
+ }
222
+ const { tool, model, prompt, maxLines = 0, command: customCommand } = summarizer;
223
+ let sampledContent = content;
224
+ const lines = content.split('\n');
225
+ if (maxLines > 0 && lines.length > maxLines * 2) {
226
+ const head = lines.slice(0, maxLines).join('\n');
227
+ const tail = lines.slice(-maxLines).join('\n');
228
+ sampledContent = `${head}\n\n[... Session content truncated ...]\n\n${tail}`;
229
+ console.log(`\n(Session content long, sampling first and last ${maxLines} lines for summary)`);
230
+ }
231
+ let command;
232
+ let inputForStdin;
233
+ switch (tool) {
234
+ case 'ollama':
235
+ command = ['ollama', 'run', model || ''];
236
+ inputForStdin = `${prompt}\n\n${sampledContent}`;
237
+ break;
238
+ case 'gemini':
239
+ command = ['gemini', '-p', prompt];
240
+ inputForStdin = sampledContent;
241
+ break;
242
+ case 'claude':
243
+ command = ['claude', '-p', prompt];
244
+ inputForStdin = sampledContent;
245
+ break;
246
+ case 'custom':
247
+ if (!customCommand) {
248
+ console.error(`Custom summarizer "${name}" is missing the "command" definition.`);
249
+ return null;
250
+ }
251
+ command = customCommand.map(arg => arg.replace('{{prompt}}', prompt));
252
+ inputForStdin = sampledContent;
253
+ break;
254
+ default:
255
+ console.error(`Tool "${tool}" is not directly supported yet.`);
256
+ return null;
257
+ }
258
+ command = command.filter(Boolean);
259
+ const [cmd, ...args] = command;
260
+ return new Promise((resolve) => {
261
+ const proc = (0, child_process_1.spawn)(cmd, args, { stdio: ['pipe', 'pipe', 'pipe'] });
262
+ let summary = '', errorOutput = '';
263
+ proc.stdout.on('data', data => {
264
+ const output = data.toString();
265
+ summary += output;
266
+ });
267
+ proc.stderr.on('data', data => {
268
+ const error = data.toString();
269
+ errorOutput += error;
270
+ });
271
+ proc.on('close', code => {
272
+ if (code !== 0) {
273
+ console.error(`\nSummarizer command exited with code ${code}. Stderr: ${errorOutput}`);
274
+ resolve(null);
275
+ }
276
+ else {
277
+ resolve(summary.trim());
278
+ }
279
+ });
280
+ proc.on('error', err => {
281
+ console.error(`\nFailed to start summarizer command "${cmd}". Is it in your PATH?`, err);
282
+ resolve(null);
283
+ });
284
+ proc.stdin.write(inputForStdin);
285
+ proc.stdin.end();
286
+ });
287
+ }
288
+ function runLoggingSession(command, commandArgs, summaryArg) {
289
+ const logsDir = path.dirname(LOCAL_CONFIG_PATH);
290
+ if (!fs.existsSync(logsDir))
291
+ fs.mkdirSync(logsDir, { recursive: true });
292
+ const xterm = new headless_1.Terminal({
293
+ rows: process.stdout.rows || 24,
294
+ cols: process.stdout.columns || 80,
295
+ scrollback: Infinity,
296
+ allowProposedApi: true,
297
+ });
298
+ const term = pty.spawn(command, commandArgs, {
299
+ name: 'xterm-color',
300
+ cols: process.stdout.columns || 80,
301
+ rows: process.stdout.rows || 24,
302
+ cwd: process.cwd(),
303
+ env: process.env,
304
+ });
305
+ const onData = (data) => {
306
+ process.stdout.write(data);
307
+ xterm.write(data);
308
+ };
309
+ term.onData(onData);
310
+ const onStdin = (data) => term.write(data.toString());
311
+ if (process.stdin.isTTY) {
312
+ process.stdin.setRawMode(true);
313
+ process.stdin.resume();
314
+ process.stdin.on('data', onStdin);
315
+ }
316
+ const onExit = async ({ exitCode }) => {
317
+ term.kill();
318
+ if (process.stdin.isTTY) {
319
+ process.stdin.removeListener('data', onStdin);
320
+ process.stdin.setRawMode(false);
321
+ process.stdin.pause();
322
+ }
323
+ setTimeout(async () => {
324
+ var _a;
325
+ let renderedOutput = '';
326
+ for (let i = 0; i < xterm.buffer.active.baseY + xterm.rows; i++) {
327
+ renderedOutput += ((_a = xterm.buffer.active.getLine(i)) === null || _a === void 0 ? void 0 : _a.translateToString(true)) + '\n';
328
+ }
329
+ renderedOutput = renderedOutput.trim();
330
+ if (renderedOutput.trim().length === 0) {
331
+ console.log('\nSession had no output, not saving log file.');
332
+ process.exit(exitCode);
333
+ }
334
+ const now = new Date();
335
+ const pad = (n) => n.toString().padStart(2, '0');
336
+ const timestamp = `${now.getFullYear()}${pad(now.getMonth() + 1)}${pad(now.getDate())}-${pad(now.getHours())}${pad(now.getMinutes())}${pad(now.getSeconds())}`;
337
+ const prefix = command || 'session';
338
+ let logFileName = `${prefix}-${timestamp}.txt`;
339
+ if (summaryArg) {
340
+ const startTime = Date.now();
341
+ const rawSummaryJson = await getAiSummary(renderedOutput, typeof summaryArg === 'string' ? summaryArg : undefined);
342
+ const endTime = Date.now();
343
+ if (rawSummaryJson) {
344
+ const duration = (endTime - startTime) / 1000;
345
+ const config = readConfig();
346
+ const summarizerName = (typeof summaryArg === 'string' ? summaryArg : config.summarizer.default) || 'default';
347
+ try {
348
+ const summaryData = JSON.parse(rawSummaryJson);
349
+ const slug = summaryData.summary;
350
+ if (typeof slug !== 'string') {
351
+ throw new Error('Invalid JSON structure from summarizer: "summary" key is missing or not a string.');
352
+ }
353
+ console.log(`\nSummary by ${summarizerName} (took ${duration.toFixed(1)}s): "${slug}"`);
354
+ logFileName = `${prefix}-${timestamp}-${slug}.txt`;
355
+ }
356
+ catch (e) {
357
+ console.error(`\nError parsing summary JSON from ${summarizerName}. Using raw output as fallback.`);
358
+ console.error(`Raw output: ${rawSummaryJson}`);
359
+ const slugify = (text) => text.toLowerCase().replace(/\s+/g, '-').replace(/[^a-z0-9-]/g, '');
360
+ const slug = slugify(rawSummaryJson).split('-').slice(0, 10).join('-');
361
+ logFileName = `${prefix}-${timestamp}-${slug}.txt`;
362
+ }
363
+ }
364
+ }
365
+ const logFilePath = path.join(logsDir, logFileName);
366
+ fs.writeFile(logFilePath, renderedOutput, (err) => {
367
+ if (err) {
368
+ console.error('\nError writing log file:', err);
369
+ }
370
+ else {
371
+ console.log(`\nSession logged to ${path.relative(process.cwd(), logFilePath)}`);
372
+ }
373
+ process.exit(exitCode);
374
+ });
375
+ }, 200);
376
+ };
377
+ term.onExit(onExit);
378
+ process.on('resize', () => {
379
+ term.resize(process.stdout.columns, process.stdout.rows);
380
+ xterm.resize(process.stdout.columns, process.stdout.rows);
381
+ });
382
+ }
383
+ // --- 3. MAIN ENTRY POINT & ARGUMENT PARSER ---
384
+ function main() {
385
+ const args = process.argv.slice(2);
386
+ if (args.includes('--version') || args.includes('-v')) {
387
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
388
+ const pkg = require('../package.json');
389
+ console.log(pkg.version);
390
+ return;
391
+ }
392
+ if (args.includes('--init')) {
393
+ const isLocal = args.includes('--local');
394
+ handleInitCommand(isLocal);
395
+ return;
396
+ }
397
+ const summaryArgRaw = args.find(arg => arg.startsWith('--with-summary') || arg.startsWith('-s'));
398
+ const otherArgs = args.filter(arg => !arg.startsWith('--with-summary') &&
399
+ !arg.startsWith('-s') &&
400
+ arg !== '--init' &&
401
+ arg !== '--local');
402
+ const command = otherArgs[0];
403
+ const commandArgs = otherArgs.slice(1);
404
+ if (!command) {
405
+ console.error('Usage: ai-cli-log [-s[=<summarizer>]] <command> [args...]');
406
+ console.error(' ai-cli-log --init [--local]');
407
+ process.exit(1);
408
+ }
409
+ let summaryArg = false;
410
+ if (summaryArgRaw) {
411
+ summaryArg = summaryArgRaw.includes('=') ? summaryArgRaw.split('=')[1] : true;
412
+ }
413
+ runLoggingSession(command, commandArgs, summaryArg);
414
+ }
415
+ main();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-cli-log",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "Seamlessly log your AI-powered coding conversations. This command-line interface (CLI) tool captures your terminal interactions with AI models like Gemini and Claude, saving entire sessions as clean plain text documents for easy review and documentation.",
5
5
  "main": "dist/index.js",
6
6
  "bin": {