scai 0.1.55 → 0.1.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/config.js CHANGED
@@ -1,79 +1,199 @@
1
1
  import fs from 'fs';
2
- import { CONFIG_PATH, SCAI_HOME, INDEX_DIR } from './constants.js'; // Correctly import INDEX_DIR from constants
3
- // Default configuration values
2
+ import path from 'path';
3
+ import { CONFIG_PATH, SCAI_HOME, SCAI_REPOS } from './constants.js';
4
+ import { getDbForRepo } from './db/client.js';
5
+ import { getRepoKeyForPath, normalizePath } from './utils/normalizePath.js';
6
+ import chalk from 'chalk';
4
7
  const defaultConfig = {
5
8
  model: 'llama3',
6
9
  language: 'ts',
7
- indexDir: INDEX_DIR, // Default index directory from constants
8
- githubToken: '', // Add githubToken to default config
10
+ indexDir: '',
11
+ githubToken: '',
12
+ repos: {},
13
+ activeRepo: undefined,
9
14
  };
10
- // Function to ensure the configuration directory exists
11
15
  function ensureConfigDir() {
12
16
  if (!fs.existsSync(SCAI_HOME)) {
13
17
  fs.mkdirSync(SCAI_HOME, { recursive: true });
14
18
  }
15
19
  }
16
- // Function to read the configuration file
17
20
  function readConfig() {
18
21
  try {
19
22
  const content = fs.readFileSync(CONFIG_PATH, 'utf-8');
20
23
  return { ...defaultConfig, ...JSON.parse(content) };
21
24
  }
22
25
  catch {
23
- return defaultConfig; // Return default config if read fails
26
+ return defaultConfig;
24
27
  }
25
28
  }
26
- // Function to write the configuration to the config file
27
- function writeConfig(newConfig) {
29
+ export function writeConfig(newCfg) {
28
30
  ensureConfigDir();
29
31
  const current = readConfig();
30
- const merged = { ...current, ...newConfig };
32
+ const merged = {
33
+ ...current,
34
+ ...newCfg,
35
+ repos: {
36
+ ...current.repos,
37
+ ...(newCfg.repos || {}),
38
+ },
39
+ };
31
40
  fs.writeFileSync(CONFIG_PATH, JSON.stringify(merged, null, 2));
32
41
  }
33
42
  export const Config = {
34
- // Get the current model from the config
35
43
  getModel() {
36
- return readConfig().model;
44
+ const cfg = readConfig();
45
+ const repoCfg = cfg.repos?.[cfg.activeRepo ?? ''];
46
+ return repoCfg?.model || cfg.model;
37
47
  },
38
- // Set a new model in the config
39
48
  setModel(model) {
40
- writeConfig({ model });
41
- console.log(`📦 Model set to: ${model}`);
49
+ const cfg = readConfig();
50
+ const active = cfg.activeRepo;
51
+ if (active) {
52
+ cfg.repos[active] = { ...cfg.repos[active], model };
53
+ writeConfig(cfg);
54
+ console.log(`📦 Model set to: ${model}`);
55
+ }
56
+ else {
57
+ writeConfig({ model });
58
+ console.log(`📦 Default model set to: ${model}`);
59
+ }
42
60
  },
43
- // Get the current language from the config
44
61
  getLanguage() {
45
- return readConfig().language;
62
+ const cfg = readConfig();
63
+ const repoCfg = cfg.repos?.[cfg.activeRepo ?? ''];
64
+ return repoCfg?.language || cfg.language;
46
65
  },
47
- // Set a new language in the config
48
66
  setLanguage(language) {
49
- writeConfig({ language });
50
- console.log(`🗣️ Language set to: ${language}`);
67
+ const cfg = readConfig();
68
+ const active = cfg.activeRepo;
69
+ if (active) {
70
+ cfg.repos[active] = { ...cfg.repos[active], language };
71
+ writeConfig(cfg);
72
+ console.log(`🗣️ Language set to: ${language}`);
73
+ }
74
+ else {
75
+ writeConfig({ language });
76
+ console.log(`🗣️ Default language set to: ${language}`);
77
+ }
51
78
  },
52
- // Get the index directory from the config
53
79
  getIndexDir() {
54
- return readConfig().indexDir;
80
+ const config = readConfig();
81
+ const activeRepo = config.activeRepo;
82
+ if (activeRepo) {
83
+ const normalized = normalizePath(activeRepo);
84
+ return normalizePath(config.repos[normalized]?.indexDir ?? '');
85
+ }
86
+ return '';
87
+ },
88
+ async setIndexDir(indexDir) {
89
+ const absPath = path.resolve(indexDir); // Resolve the index directory to an absolute path
90
+ const repoKey = normalizePath(absPath); // Normalize path for the repo (get repo name, not full path)
91
+ // Ensure repoKey doesn't contain an absolute path, only the repo name or a relative path
92
+ const scaiRepoRoot = path.join(SCAI_REPOS, path.basename(repoKey)); // Use repo name as key to avoid double paths
93
+ // Set the active repo to the provided indexDir
94
+ const cfg = readConfig();
95
+ cfg.activeRepo = repoKey;
96
+ await writeConfig(cfg); // Persist the change in activeRepo
97
+ // Call setRepoIndexDir to update the repo's indexDir and other settings
98
+ await this.setRepoIndexDir(scaiRepoRoot, absPath); // Set the indexDir for the repo
99
+ // Ensure base folders exist
100
+ fs.mkdirSync(scaiRepoRoot, { recursive: true });
101
+ fs.mkdirSync(path.join(scaiRepoRoot, 'summaries'), { recursive: true });
102
+ fs.mkdirSync(path.join(scaiRepoRoot, 'metadata'), { recursive: true });
103
+ // Init DB if not exists
104
+ const dbPath = path.join(scaiRepoRoot, 'db.sqlite');
105
+ if (!fs.existsSync(dbPath)) {
106
+ console.log(`Database not found. Initializing DB at ${normalizePath(dbPath)}`);
107
+ getDbForRepo(); // Now DB creation works after config update
108
+ }
109
+ console.log(`✅ Index directory set to: ${normalizePath(absPath)}`);
110
+ },
111
+ /**
112
+ * Set both the scaiRepoRoot for the config and the indexDir (the actual repo root path)
113
+ * @param scaiRepoRoot
114
+ * @param indexDir
115
+ */
116
+ async setRepoIndexDir(scaiRepoRoot, indexDir) {
117
+ const normalizedRepoPath = normalizePath(scaiRepoRoot);
118
+ const normalizedIndexDir = normalizePath(indexDir);
119
+ const cfg = readConfig();
120
+ if (!cfg.repos[normalizedRepoPath]) {
121
+ cfg.repos[normalizedRepoPath] = {};
122
+ }
123
+ cfg.repos[normalizedRepoPath] = {
124
+ ...cfg.repos[normalizedRepoPath],
125
+ indexDir: normalizedIndexDir, // Ensure the indexDir is always normalized
126
+ };
127
+ await writeConfig(cfg); // Persist the config update
128
+ console.log(`✅ Repo index directory set for ${normalizedRepoPath} : ${normalizedIndexDir}`);
55
129
  },
56
- // Set a new index directory in the config
57
- setIndexDir(indexDir) {
58
- writeConfig({ indexDir });
59
- console.log(`📁 Index directory set to: ${indexDir}`);
130
+ setActiveRepo(repoKey) {
131
+ const cfg = readConfig();
132
+ cfg.activeRepo = repoKey;
133
+ writeConfig(cfg);
134
+ console.log(`✅ Active repo switched to: ${repoKey}`);
135
+ },
136
+ printAllRepos() {
137
+ const cfg = readConfig();
138
+ const keys = Object.keys(cfg.repos || {});
139
+ if (!keys.length) {
140
+ console.log('ℹ️ No repositories configured yet.');
141
+ return;
142
+ }
143
+ console.log('📁 Configured repositories:\n');
144
+ for (const key of keys) {
145
+ const r = cfg.repos[key];
146
+ const isActive = cfg.activeRepo === key;
147
+ // Use chalk to ensure proper coloring
148
+ const label = isActive
149
+ ? chalk.green(`✅ ${key} (active)`) // Active repo in green
150
+ : chalk.white(` ${key}`); // Inactive repos in white
151
+ console.log(`- ${label}`);
152
+ console.log(` ↳ indexDir: ${r.indexDir}`);
153
+ }
60
154
  },
61
- // Get the GitHub token from the config
155
+ // Method to get GitHub token for the active repo
62
156
  getGitHubToken() {
63
- return readConfig().githubToken || null;
157
+ const cfg = readConfig();
158
+ const active = cfg.activeRepo;
159
+ if (active) {
160
+ // Normalize the active repo path and fetch token from repos[activeRepo]
161
+ const normalizedActiveRepo = normalizePath(active);
162
+ return cfg.repos[normalizedActiveRepo]?.githubToken || null;
163
+ }
164
+ // If no activeRepo, fall back to the global githubToken field
165
+ return cfg.githubToken || null;
64
166
  },
65
- // Set the GitHub token in the config
66
167
  setGitHubToken(token) {
67
- writeConfig({ githubToken: token });
68
- console.log("✅ GitHub token updated");
168
+ const cfg = readConfig();
169
+ const active = cfg.activeRepo;
170
+ if (active) {
171
+ const repoKey = getRepoKeyForPath(active, cfg) ?? normalizePath(active);
172
+ if (!cfg.repos[repoKey]) {
173
+ cfg.repos[repoKey] = {};
174
+ }
175
+ cfg.repos[repoKey] = {
176
+ ...cfg.repos[repoKey],
177
+ githubToken: token,
178
+ };
179
+ }
180
+ else {
181
+ cfg.githubToken = token;
182
+ }
183
+ writeConfig(cfg);
184
+ console.log('✅ GitHub token updated');
69
185
  },
70
- // Show the current configuration
71
186
  show() {
72
187
  const cfg = readConfig();
188
+ const active = cfg.activeRepo;
73
189
  console.log(`🔧 Current configuration:`);
74
- console.log(` Model : ${cfg.model}`);
75
- console.log(` Language : ${cfg.language}`);
76
- console.log(` Index dir : ${cfg.indexDir}`);
77
- console.log(` GitHub Token: ${cfg.githubToken ? '*****' : 'Not Set'}`);
78
- }
190
+ console.log(` Active index dir: ${active || 'Not Set'}`);
191
+ const repoCfg = active ? cfg.repos?.[active] : {};
192
+ console.log(` Model : ${repoCfg?.model || cfg.model}`);
193
+ console.log(` Language : ${repoCfg?.language || cfg.language}`);
194
+ console.log(` GitHub Token : ${cfg.githubToken ? '*****' : 'Not Set'}`);
195
+ },
196
+ getRaw() {
197
+ return readConfig();
198
+ },
79
199
  };
package/dist/constants.js CHANGED
@@ -7,10 +7,9 @@ import fs from 'fs';
7
7
  */
8
8
  export const SCAI_HOME = path.join(os.homedir(), '.scai');
9
9
  /**
10
- * Full path to the SQLite database used by SCAI:
11
- * ~/.scai/db.sqlite
10
+ * Repos dir for multi-repo setup
12
11
  */
13
- export const DB_PATH = path.join(SCAI_HOME, 'db.sqlite');
12
+ export const SCAI_REPOS = path.join(SCAI_HOME, 'repos');
14
13
  /**
15
14
  * Path to the daemon process ID file (if running in background mode):
16
15
  * ~/.scai/daemon.pid
@@ -32,26 +31,24 @@ export const LOG_PATH = path.join(SCAI_HOME, 'daemon.log');
32
31
  */
33
32
  export const PROMPT_LOG_PATH = path.join(SCAI_HOME, 'prompt.log');
34
33
  /**
35
- * Get the active index directory.
34
+ * Get the active index directory based on the active repo.
36
35
  *
37
- * - If the user has configured an `indexDir`, use it.
38
- * - If not, default to the users home directory (`~`), not `.scai`.
36
+ * - If there is an active repository, return its `indexDir` from the config.
37
+ * - If no active repo is set, default to the user's home directory (`~`).
39
38
  */
40
39
  export function getIndexDir() {
41
40
  try {
42
41
  const config = JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
43
- return config.indexDir || os.homedir(); // 🏠 Default: ~
42
+ const activeRepo = config.activeRepo;
43
+ if (activeRepo && config.repos[activeRepo]) {
44
+ return config.repos[activeRepo].indexDir || os.homedir(); // Repo-specific indexDir or default to home
45
+ }
46
+ return os.homedir(); // Fallback to home if no active repo
44
47
  }
45
48
  catch (e) {
46
- return os.homedir(); // 🏠 Fallback if config file is missing or invalid
49
+ return os.homedir(); // Fallback if config file is missing or invalid
47
50
  }
48
51
  }
49
- /**
50
- * On-demand index directory to scan for files.
51
- *
52
- * Used by indexing logic (`scai index`) to determine what folder to scan.
53
- */
54
- export const INDEX_DIR = getIndexDir();
55
52
  /**
56
53
  * Limit for number of related files included in model prompt.
57
54
  */
@@ -61,6 +58,6 @@ export const RELATED_FILES_LIMIT = 3;
61
58
  */
62
59
  export const CANDIDATE_LIMIT = 100;
63
60
  /**
64
- * Limit number of summery lines
61
+ * Limit number of summary lines
65
62
  */
66
63
  export const MAX_SUMMARY_LINES = 12;
@@ -1,13 +1,12 @@
1
1
  import { indexFunctionsForFile } from '../db/functionIndex.js';
2
- import { db } from '../db/client.js';
3
2
  import fs from 'fs/promises';
4
3
  import fsSync from 'fs';
5
4
  import { generateEmbedding } from '../lib/generateEmbedding.js';
6
- import { DB_PATH } from '../constants.js';
7
5
  import { log } from '../utils/log.js';
8
6
  import lockfile from 'proper-lockfile';
9
7
  import { summaryModule } from '../pipeline/modules/summaryModule.js';
10
8
  import { classifyFile } from '../fileRules/classifyFile.js';
9
+ import { getDbForRepo, getDbPathForRepo } from '../db/client.js';
11
10
  import { markFileAsSkippedByPath, selectUnprocessedFiles, updateFileWithSummaryAndEmbedding, } from '../db/sqlTemplates.js';
12
11
  const MAX_FILES_PER_BATCH = 5;
13
12
  /**
@@ -16,7 +15,7 @@ const MAX_FILES_PER_BATCH = 5;
16
15
  */
17
16
  async function lockDb() {
18
17
  try {
19
- return await lockfile.lock(DB_PATH);
18
+ return await lockfile.lock(getDbPathForRepo());
20
19
  }
21
20
  catch (err) {
22
21
  log('❌ Failed to acquire DB lock: ' + err);
@@ -34,6 +33,7 @@ async function lockDb() {
34
33
  export async function runDaemonBatch() {
35
34
  log('🟡 Starting daemon batch...');
36
35
  // Selects up to MAX_FILES_PER_BATCH files that haven't been processed yet
36
+ const db = getDbForRepo();
37
37
  const rows = db.prepare(selectUnprocessedFiles).all(MAX_FILES_PER_BATCH);
38
38
  if (rows.length === 0) {
39
39
  log('✅ No files left to process.');
@@ -1,4 +1,4 @@
1
- import { db } from '../db/client.js';
1
+ import { getDbForRepo } from '../db/client.js';
2
2
  import { runDaemonBatch } from './daemonBatch.js';
3
3
  import { log } from '../utils/log.js';
4
4
  const SLEEP_MS = 2000;
@@ -7,6 +7,7 @@ const IDLE_SLEEP_MS = 5000;
7
7
  log('🛠️ daemonWorker.js loaded');
8
8
  async function isQueueEmpty() {
9
9
  try {
10
+ const db = getDbForRepo();
10
11
  const row = db.prepare(`
11
12
  SELECT COUNT(*) AS count
12
13
  FROM files
package/dist/db/client.js CHANGED
@@ -1,9 +1,31 @@
1
- import Database from 'better-sqlite3';
2
1
  import fs from 'fs';
3
- import { DB_PATH, SCAI_HOME } from '../constants.js';
4
- // Ensure the directory exists
5
- fs.mkdirSync(SCAI_HOME, { recursive: true });
6
- // Open the database connection
7
- export const db = new Database(DB_PATH);
8
- // Set journal_mode to WAL for better concurrency
9
- db.pragma('journal_mode = WAL');
2
+ import path from 'path';
3
+ import { SCAI_HOME } from '../constants.js';
4
+ import { Config } from '../config.js';
5
+ import Database from 'better-sqlite3';
6
+ /**
7
+ * Returns a per-repo SQLite database instance.
8
+ * Ensures the directory and file are created.
9
+ */
10
+ export function getDbForRepo() {
11
+ const repoRoot = Config.getIndexDir();
12
+ if (!repoRoot) {
13
+ throw new Error('No index directory set. Please set an index directory first.');
14
+ }
15
+ fs.mkdirSync(SCAI_HOME, { recursive: true });
16
+ const dbPath = getDbPathForRepo();
17
+ fs.mkdirSync(path.dirname(dbPath), { recursive: true });
18
+ const db = new Database(dbPath);
19
+ db.pragma('journal_mode = WAL');
20
+ return db;
21
+ }
22
+ export function getDbPathForRepo() {
23
+ const repoRoot = Config.getIndexDir();
24
+ if (!repoRoot) {
25
+ throw new Error('No index directory set. Please set an index directory first.');
26
+ }
27
+ // Use path.basename to get the repo name from the full path
28
+ const repoName = path.basename(repoRoot); // Get the last part of the path (the repo name)
29
+ const scaiRepoPath = path.join(SCAI_HOME, 'repos', repoName, 'db.sqlite');
30
+ return scaiRepoPath;
31
+ }
@@ -1,4 +1,3 @@
1
- import { db } from './client.js';
2
1
  import fs from 'fs';
3
2
  import path from 'path';
4
3
  import { generateEmbedding } from '../lib/generateEmbedding.js';
@@ -6,6 +5,7 @@ import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
6
5
  import * as sqlTemplates from './sqlTemplates.js';
7
6
  import { stringSimilarity } from 'string-similarity-js';
8
7
  import { CANDIDATE_LIMIT } from '../constants.js';
8
+ import { getDbForRepo } from './client.js';
9
9
  /**
10
10
  * 📄 Index a single file into the database.
11
11
  *
@@ -20,6 +20,7 @@ export function indexFile(filePath, summary, type) {
20
20
  const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
21
21
  const fileName = path.basename(normalizedPath); // Extracting the filename
22
22
  // Insert into files table
23
+ const db = getDbForRepo();
23
24
  db.prepare(sqlTemplates.upsertFileTemplate).run({
24
25
  path: normalizedPath,
25
26
  filename: fileName, // Pass filename
@@ -51,6 +52,7 @@ export function indexFile(filePath, summary, type) {
51
52
  */
52
53
  export function queryFiles(safeQuery, limit = 10) {
53
54
  console.log(`Executing search query: ${safeQuery}`);
55
+ const db = getDbForRepo();
54
56
  const results = db.prepare(`
55
57
  SELECT f.id, f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at
56
58
  FROM files f
@@ -81,6 +83,7 @@ export async function searchFiles(query, topK = 5) {
81
83
  }
82
84
  const safeQuery = sanitizeQueryForFts(query);
83
85
  console.log(`Executing search query in FTS5: ${safeQuery}`);
86
+ const db = getDbForRepo();
84
87
  const ftsResults = db.prepare(`
85
88
  SELECT fts.rowid AS id, f.path, f.filename, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
86
89
  FROM files f
@@ -158,6 +161,7 @@ export function getFunctionsForFiles(fileIds) {
158
161
  if (!fileIds.length)
159
162
  return {};
160
163
  const placeholders = fileIds.map(() => '?').join(',');
164
+ const db = getDbForRepo();
161
165
  const stmt = db.prepare(`
162
166
  SELECT f.file_id, f.name, f.start_line, f.end_line, f.content
163
167
  FROM functions f
@@ -1,8 +1,9 @@
1
- import { db } from '../client.js';
1
+ import { getDbForRepo } from '../client.js';
2
2
  import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
3
3
  export async function extractFromJava(filePath, _content, fileId) {
4
4
  console.warn(`⛔️ Java extraction not implemented: ${filePath}`);
5
5
  // Mark the file as skipped with the relevant status update
6
+ const db = getDbForRepo();
6
7
  db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
7
8
  return false;
8
9
  }
@@ -1,11 +1,11 @@
1
1
  import { parse } from 'acorn';
2
2
  import { ancestor as walkAncestor } from 'acorn-walk';
3
3
  import { generateEmbedding } from '../../lib/generateEmbedding.js';
4
- import { db } from '../client.js';
5
4
  import path from 'path';
6
5
  import { log } from '../../utils/log.js';
7
6
  import fs from 'fs';
8
7
  import { markFileAsSkippedTemplate, markFileAsExtractedTemplate, markFileAsFailedTemplate } from '../sqlTemplates.js';
8
+ import { getDbForRepo } from '../client.js';
9
9
  function getFunctionName(node, parent, fileName) {
10
10
  if (node.id?.name)
11
11
  return node.id.name;
@@ -20,6 +20,7 @@ function getFunctionName(node, parent, fileName) {
20
20
  return `${fileName}:<anon>`;
21
21
  }
22
22
  export async function extractFromJS(filePath, content, fileId) {
23
+ const db = getDbForRepo();
23
24
  try {
24
25
  const code = fs.readFileSync(filePath, 'utf-8');
25
26
  console.log(`[Debug] Attempting to parse: ${filePath}`);
@@ -1,8 +1,9 @@
1
- import { db } from '../client.js';
1
+ import { getDbForRepo } from '../client.js';
2
2
  import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
3
3
  export async function extractFromXML(filePath, _content, fileId) {
4
4
  console.warn(`⛔️ XML extraction not implemented: ${filePath}`);
5
5
  // Mark the file as skipped with the relevant status update
6
+ const db = getDbForRepo();
6
7
  db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
7
8
  return false;
8
9
  }
@@ -3,13 +3,14 @@ import { detectFileType } from '../../fileRules/detectFileType.js';
3
3
  import { extractFromJava } from './extractFromJava.js';
4
4
  import { extractFromJS } from './extractFromJs.js';
5
5
  import { extractFromXML } from './extractFromXML.js';
6
- import { db } from '../client.js';
6
+ import { getDbForRepo } from '../client.js';
7
7
  import { markFileAsFailedTemplate, markFileAsSkippedByPath } from '../sqlTemplates.js';
8
8
  /**
9
9
  * Detects file type and delegates to the appropriate extractor.
10
10
  */
11
11
  export async function extractFunctionsFromFile(filePath, content, fileId) {
12
12
  const type = detectFileType(filePath).trim().toLowerCase();
13
+ const db = getDbForRepo();
13
14
  try {
14
15
  if (type === 'js' || type === 'ts' || type === 'javascript' || type === 'typescript') {
15
16
  log(`✅ Attempting to extract JS functions from ${filePath}`);
package/dist/db/schema.js CHANGED
@@ -1,5 +1,6 @@
1
- import { db } from "./client.js";
1
+ import { getDbForRepo } from "./client.js";
2
2
  export function initSchema() {
3
+ const db = getDbForRepo();
3
4
  db.exec(`
4
5
  -- Create the files table
5
6
  CREATE TABLE IF NOT EXISTS files (
@@ -0,0 +1,69 @@
1
+ import { Octokit } from '@octokit/rest';
2
+ /**
3
+ * Parses the PR diff to determine the correct position for inline comments.
4
+ * The position is the "index" of the changed line in the diff.
5
+ * @param diff The diff content of the PR
6
+ * @param lineNumber The line number to convert to a position in the diff
7
+ */
8
+ function getLinePositionFromDiff(diff, lineNumber) {
9
+ const lines = diff.split('\n');
10
+ let currentLine = 0;
11
+ // Iterate through the lines and determine the correct position for the lineNumber
12
+ for (let i = 0; i < lines.length; i++) {
13
+ // Only count the lines that are part of a diff chunk
14
+ if (lines[i].startsWith('+') || lines[i].startsWith('-')) {
15
+ currentLine++;
16
+ if (currentLine === lineNumber) {
17
+ return i; // Position is the index of the changed line in the diff
18
+ }
19
+ }
20
+ }
21
+ return null; // Return null if lineNumber is not found in the diff
22
+ }
23
+ /**
24
+ * Posts an inline review comment on a specific line of a PR.
25
+ *
26
+ * @param token GitHub personal access token
27
+ * @param owner Repository owner (e.g. 'my-org')
28
+ * @param repo Repository name (e.g. 'my-repo')
29
+ * @param prNumber Pull Request number
30
+ * @param fileName Path to the file in the PR (relative to repo root)
31
+ * @param lineNumber Line number to comment on in the file (not in the diff)
32
+ * @param comment Text of the comment
33
+ */
34
+ export async function postReviewComment(token, owner, repo, prNumber, fileName, lineNumber, comment) {
35
+ const octokit = new Octokit({ auth: `token ${token}` });
36
+ // First, get PR details so we can retrieve the head commit SHA
37
+ const pr = await octokit.pulls.get({
38
+ owner,
39
+ repo,
40
+ pull_number: prNumber,
41
+ });
42
+ const commitId = pr.data.head.sha;
43
+ // Fetch the PR diff by getting the full diff URL from the PR
44
+ const diffUrl = pr.data.diff_url;
45
+ const diffRes = await fetch(diffUrl);
46
+ const diff = await diffRes.text();
47
+ // Get the position of the line in the diff
48
+ const position = getLinePositionFromDiff(diff, lineNumber);
49
+ if (position === null) {
50
+ console.error(`❌ Unable to find line ${lineNumber} in the diff for PR #${prNumber}.`);
51
+ return;
52
+ }
53
+ // Now, post the inline comment
54
+ try {
55
+ await octokit.pulls.createReviewComment({
56
+ owner,
57
+ repo,
58
+ pull_number: prNumber,
59
+ commit_id: commitId,
60
+ path: fileName,
61
+ body: comment,
62
+ position: position, // Use the position calculated from the diff
63
+ });
64
+ console.log(`✅ Inline comment posted to ${fileName} at diff position ${position}.`);
65
+ }
66
+ catch (err) {
67
+ console.error(`❌ Error posting inline comment: ${err.message}`);
68
+ }
69
+ }