@grec0/memory-bank-mcp 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,54 @@
1
+ /**
2
+ * @fileoverview Structured logger for Memory Bank MCP
3
+ * Ensures all logs are written to stderr to avoid breaking JSON-RPC on stdout
4
+ */
5
+ export var LogLevel;
6
+ (function (LogLevel) {
7
+ LogLevel[LogLevel["DEBUG"] = 0] = "DEBUG";
8
+ LogLevel[LogLevel["INFO"] = 1] = "INFO";
9
+ LogLevel[LogLevel["WARN"] = 2] = "WARN";
10
+ LogLevel[LogLevel["ERROR"] = 3] = "ERROR";
11
+ })(LogLevel || (LogLevel = {}));
12
+ export class Logger {
13
+ static instance;
14
+ level = LogLevel.INFO;
15
+ constructor() { }
16
+ static getInstance() {
17
+ if (!Logger.instance) {
18
+ Logger.instance = new Logger();
19
+ }
20
+ return Logger.instance;
21
+ }
22
+ setLevel(level) {
23
+ this.level = level;
24
+ }
25
+ formatMessage(level, message) {
26
+ const timestamp = new Date().toISOString();
27
+ return `[${timestamp}] [${level}] ${message}`;
28
+ }
29
+ debug(message) {
30
+ if (this.level <= LogLevel.DEBUG) {
31
+ console.error(this.formatMessage("DEBUG", message));
32
+ }
33
+ }
34
+ info(message) {
35
+ if (this.level <= LogLevel.INFO) {
36
+ console.error(this.formatMessage("INFO", message));
37
+ }
38
+ }
39
+ warn(message) {
40
+ if (this.level <= LogLevel.WARN) {
41
+ console.error(this.formatMessage("WARN", message));
42
+ }
43
+ }
44
+ error(message, error) {
45
+ if (this.level <= LogLevel.ERROR) {
46
+ const errorMsg = error ? ` ${error instanceof Error ? error.message : String(error)}` : "";
47
+ console.error(this.formatMessage("ERROR", message + errorMsg));
48
+ if (error instanceof Error && error.stack) {
49
+ console.error(error.stack);
50
+ }
51
+ }
52
+ }
53
+ }
54
+ export const logger = Logger.getInstance();
@@ -122,16 +122,16 @@ export class VectorStore {
122
122
  }
123
123
  }
124
124
  /**
125
- * Deletes all chunks from a specific file
125
+ * Deletes all chunks from a specific file in a specific project
126
126
  */
127
- async deleteChunksByFile(filePath) {
127
+ async deleteChunksByFile(filePath, projectId) {
128
128
  await this.ensureInitialized();
129
129
  if (!this.table) {
130
130
  return;
131
131
  }
132
132
  try {
133
- await this.table.delete(`filePath = '${filePath}'`);
134
- console.error(`Deleted all chunks from file: ${filePath}`);
133
+ await this.table.delete(`"filePath" = '${filePath}' AND "projectId" = '${projectId}'`);
134
+ console.error(`Deleted all chunks from file: ${filePath} (project: ${projectId.substring(0, 8)}...)`);
135
135
  }
136
136
  catch (error) {
137
137
  console.error(`Error deleting chunks by file: ${error}`);
@@ -184,6 +184,7 @@ export class VectorStore {
184
184
  fileHash: result.fileHash,
185
185
  timestamp: result.timestamp,
186
186
  context: result.context,
187
+ projectId: result.projectId,
187
188
  },
188
189
  score,
189
190
  distance,
@@ -222,6 +223,7 @@ export class VectorStore {
222
223
  fileHash: r.fileHash,
223
224
  timestamp: r.timestamp,
224
225
  context: r.context,
226
+ projectId: r.projectId,
225
227
  }));
226
228
  }
227
229
  catch (error) {
@@ -322,6 +324,47 @@ export class VectorStore {
322
324
  return new Map();
323
325
  }
324
326
  }
327
+ /**
328
+ * Gets aggregated statistics for all indexed files in a single query
329
+ * Returns a map of filePath -> { lastIndexed, chunkCount, fileHash }
330
+ */
331
+ async getIndexedFileStats() {
332
+ await this.ensureInitialized();
333
+ if (!this.table) {
334
+ return new Map();
335
+ }
336
+ try {
337
+ // Fetch all chunks in one go - much faster than N queries
338
+ // querying only necessary columns to reduce memory usage
339
+ const allChunks = await this.table.query()
340
+ .select(['filePath', 'timestamp', 'fileHash'])
341
+ .toArray();
342
+ const stats = new Map();
343
+ for (const chunk of allChunks) {
344
+ const current = stats.get(chunk.filePath);
345
+ if (!current) {
346
+ stats.set(chunk.filePath, {
347
+ lastIndexed: chunk.timestamp,
348
+ chunkCount: 1,
349
+ fileHash: chunk.fileHash
350
+ });
351
+ }
352
+ else {
353
+ // Update stats
354
+ current.chunkCount++;
355
+ // Keep the latest timestamp
356
+ if (chunk.timestamp > current.lastIndexed) {
357
+ current.lastIndexed = chunk.timestamp;
358
+ }
359
+ }
360
+ }
361
+ return stats;
362
+ }
363
+ catch (error) {
364
+ console.error(`Error getting indexed file stats: ${error}`);
365
+ return new Map();
366
+ }
367
+ }
325
368
  }
326
369
  /**
327
370
  * Creates a vector store from environment variables
package/dist/index.js CHANGED
@@ -216,7 +216,7 @@ async function validateEnvironment() {
216
216
  vectorStore = createVectorStore();
217
217
  await vectorStore.initialize();
218
218
  console.error("✓ Vector store initialized");
219
- indexManager = createIndexManager(embeddingService, vectorStore);
219
+ indexManager = createIndexManager(embeddingService, vectorStore, workspaceRoot);
220
220
  console.error("✓ Index manager initialized");
221
221
  }
222
222
  catch (error) {
@@ -21,18 +21,22 @@ function buildDirectoryTree(files, indexedFiles, pendingFiles, rootPath) {
21
21
  // Build tree structure
22
22
  const dirMap = new Map();
23
23
  dirMap.set("", root);
24
+ // Helper to normalize paths to forward slashes
25
+ const normalize = (p) => p.replace(/\\/g, "/");
24
26
  // Sort files by path for consistent tree building
25
- const sortedFiles = [...files].sort((a, b) => a.path.localeCompare(b.path));
27
+ const sortedFiles = [...files].sort((a, b) => normalize(a.path).localeCompare(normalize(b.path)));
26
28
  for (const file of sortedFiles) {
27
- const parts = file.path.split(path.sep);
29
+ const normalizedPath = normalize(file.path);
30
+ const parts = normalizedPath.split("/");
28
31
  let currentPath = "";
29
32
  // Create directory nodes
30
33
  for (let i = 0; i < parts.length - 1; i++) {
34
+ const part = parts[i];
31
35
  const parentPath = currentPath;
32
- currentPath = currentPath ? path.join(currentPath, parts[i]) : parts[i];
36
+ currentPath = currentPath ? `${currentPath}/${part}` : part;
33
37
  if (!dirMap.has(currentPath)) {
34
38
  const dirNode = {
35
- name: parts[i],
39
+ name: part,
36
40
  path: currentPath,
37
41
  type: "directory",
38
42
  status: "indexed",
@@ -50,10 +54,10 @@ function buildDirectoryTree(files, indexedFiles, pendingFiles, rootPath) {
50
54
  }
51
55
  // Add file node
52
56
  const fileName = parts[parts.length - 1];
53
- const fileDir = parts.length > 1 ? path.dirname(file.path) : "";
57
+ const fileDir = parts.length > 1 ? parts.slice(0, -1).join("/") : "";
54
58
  const parentDir = dirMap.get(fileDir);
55
59
  if (parentDir && parentDir.children) {
56
- const indexed = indexedFiles.has(file.path);
60
+ const indexed = indexedFiles.has(normalize(file.path)); // Use normalized path for lookup
57
61
  const pending = pendingFiles.has(file.path);
58
62
  const fileNode = {
59
63
  name: fileName,
@@ -66,33 +70,43 @@ function buildDirectoryTree(files, indexedFiles, pendingFiles, rootPath) {
66
70
  chunkCount: indexed ? indexedFiles.get(file.path).chunks : 0,
67
71
  };
68
72
  parentDir.children.push(fileNode);
69
- // Update parent stats
70
- let current = parentDir;
71
- while (current) {
72
- current.fileCount = (current.fileCount || 0) + 1;
73
- if (indexed)
74
- current.indexedCount = (current.indexedCount || 0) + 1;
75
- if (pending)
76
- current.pendingCount = (current.pendingCount || 0) + 1;
77
- // Find parent
78
- const parentPath = path.dirname(current.path);
79
- current = parentPath !== current.path ? dirMap.get(parentPath === "." ? "" : parentPath) : null;
80
- }
81
73
  }
82
74
  }
83
- // Sort children (directories first, then files)
84
- const sortChildren = (node) => {
75
+ // Calculate stats bottom-up
76
+ const calculateNodeStats = (node) => {
77
+ if (node.type === "file") {
78
+ return;
79
+ }
80
+ let fileCount = 0;
81
+ let indexedCount = 0;
82
+ let pendingCount = 0;
85
83
  if (node.children) {
86
84
  node.children.sort((a, b) => {
87
- if (a.type !== b.type) {
85
+ if (a.type !== b.type)
88
86
  return a.type === "directory" ? -1 : 1;
89
- }
90
87
  return a.name.localeCompare(b.name);
91
88
  });
92
- node.children.forEach(sortChildren);
89
+ for (const child of node.children) {
90
+ if (child.type === "directory") {
91
+ calculateNodeStats(child);
92
+ fileCount += child.fileCount || 0;
93
+ indexedCount += child.indexedCount || 0;
94
+ pendingCount += child.pendingCount || 0;
95
+ }
96
+ else {
97
+ fileCount++;
98
+ if (child.status === "indexed")
99
+ indexedCount++;
100
+ if (child.status === "pending_reindex")
101
+ pendingCount++;
102
+ }
103
+ }
93
104
  }
105
+ node.fileCount = fileCount;
106
+ node.indexedCount = indexedCount;
107
+ node.pendingCount = pendingCount;
94
108
  };
95
- sortChildren(root);
109
+ calculateNodeStats(root);
96
110
  return root;
97
111
  }
98
112
  /**
@@ -112,9 +126,11 @@ function calculateStats(files, indexedFiles, pendingFiles, totalChunks) {
112
126
  languageBreakdown: {},
113
127
  directoryBreakdown: {},
114
128
  };
129
+ // Helper to normalize paths
130
+ const normalize = (p) => p.replace(/\\/g, "/");
115
131
  for (const file of files) {
116
132
  stats.totalSize += file.size;
117
- const indexed = indexedFiles.has(file.path);
133
+ const indexed = indexedFiles.has(normalize(file.path));
118
134
  const pending = pendingFiles.has(file.path);
119
135
  if (pending) {
120
136
  stats.pendingReindexFiles++;
@@ -137,7 +153,7 @@ function calculateStats(files, indexedFiles, pendingFiles, totalChunks) {
137
153
  stats.languageBreakdown[file.language].total++;
138
154
  if (indexed) {
139
155
  stats.languageBreakdown[file.language].indexed++;
140
- stats.languageBreakdown[file.language].chunks += indexedFiles.get(file.path).chunks;
156
+ stats.languageBreakdown[file.language].chunks += indexedFiles.get(normalize(file.path)).chunks;
141
157
  }
142
158
  // Directory breakdown
143
159
  const dir = path.dirname(file.path);
@@ -214,7 +230,7 @@ export async function analyzeCoverage(indexManager, vectorStore, workspaceRoot)
214
230
  const maxScanTime = 10000; // 10 seconds max
215
231
  let allFiles = [];
216
232
  try {
217
- allFiles = scanFiles({
233
+ allFiles = await scanFiles({
218
234
  rootPath: workspaceRoot,
219
235
  recursive: true
220
236
  });
@@ -231,33 +247,37 @@ export async function analyzeCoverage(indexManager, vectorStore, workspaceRoot)
231
247
  console.error(`Error escaneando archivos: ${error}`);
232
248
  throw error;
233
249
  }
234
- // 2. Get indexed files from vector store
235
- console.error("Obteniendo archivos indexados...");
250
+ // 2. Get indexed files stats in ONE batch query (Optimized)
251
+ console.error("Obteniendo estadísticas de archivos indexados...");
236
252
  await vectorStore.initialize();
237
- const fileHashes = await vectorStore.getFileHashes();
253
+ // This single call replaces thousands of potential DB queries
254
+ // It returns Map<filePath, { lastIndexed, chunkCount, fileHash }>
255
+ const indexedFileStats = await vectorStore.getIndexedFileStats();
238
256
  // 3. Get index metadata
239
257
  const indexStats = await indexManager.getStats();
240
- // 4. Build indexed files map with chunk counts
258
+ // 4. Adapt to expected format for efficient loopups
259
+ // Helper to normalize paths to forward slashes
260
+ const normalize = (p) => p.replace(/\\/g, "/");
261
+ // 4. Adapt to expected format for efficient loopups
241
262
  const indexedFiles = new Map();
242
- // Get chunks grouped by file from vector store
243
- for (const [filePath, hash] of fileHashes) {
244
- const chunks = await vectorStore.getChunksByFile(filePath);
245
- if (chunks.length > 0) {
246
- indexedFiles.set(filePath, {
247
- lastIndexed: chunks[0].timestamp,
248
- chunks: chunks.length,
249
- });
250
- }
263
+ const normalizedStats = new Map();
264
+ for (const [path, stats] of indexedFileStats) {
265
+ const normPath = normalize(path);
266
+ normalizedStats.set(normPath, stats);
267
+ indexedFiles.set(normPath, {
268
+ lastIndexed: stats.lastIndexed,
269
+ chunks: stats.chunkCount
270
+ });
251
271
  }
252
272
  // 5. Identify pending files (files that changed)
253
273
  const pendingFiles = new Set();
254
274
  for (const file of allFiles) {
255
- const indexed = indexedFiles.get(file.path);
256
- if (indexed) {
257
- // Check if file hash matches
258
- const chunks = await vectorStore.getChunksByFile(file.path);
259
- if (chunks.length > 0 && chunks[0].fileHash !== file.hash) {
260
- pendingFiles.add(file.path);
275
+ const normPath = normalize(file.path);
276
+ const stats = normalizedStats.get(normPath);
277
+ if (stats) {
278
+ // Check if file hash matches the one in DB
279
+ if (stats.fileHash !== file.hash) {
280
+ pendingFiles.add(file.path); // keep original path for file system ops if needed
261
281
  }
262
282
  }
263
283
  }
@@ -21,6 +21,7 @@ export async function indexCode(params, indexManager, workspaceRoot) {
21
21
  // Run indexing
22
22
  const result = await indexManager.indexFiles({
23
23
  rootPath: targetPath,
24
+ projectRoot: workspaceRoot,
24
25
  recursive: params.recursive !== false,
25
26
  forceReindex: params.forceReindex || false,
26
27
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@grec0/memory-bank-mcp",
3
- "version": "0.0.2",
3
+ "version": "0.0.3",
4
4
  "description": "MCP server for semantic code indexing with Memory Bank - AI-powered codebase understanding",
5
5
  "license": "MIT",
6
6
  "author": "@grec0",
@@ -41,6 +41,7 @@
41
41
  "@modelcontextprotocol/sdk": "1.6.1",
42
42
  "@types/node": "^22",
43
43
  "ignore": "^5.3.0",
44
+ "js-tiktoken": "^1.0.21",
44
45
  "openai": "^4.0.0",
45
46
  "zod": "^3.22.4",
46
47
  "zod-to-json-schema": "^3.23.5"
@@ -1,49 +0,0 @@
1
- // Global variables to store user IDs
2
- let adminUserId = null;
3
- import { getUserIdByEmail, getUserIdByUsername } from "./utils.js";
4
- /**
5
- * Gets the admin user ID by looking up the user by email or username
6
- *
7
- * This function will try the following methods in order:
8
- * 1. Use the cached admin user ID if available
9
- * 2. Use the PLANKA_ADMIN_ID environment variable if set (for backwards compatibility)
10
- * 3. Look up the admin user ID by email using PLANKA_ADMIN_EMAIL
11
- * 4. Look up the admin user ID by username using PLANKA_ADMIN_USERNAME
12
- */
13
- export async function getAdminUserId() {
14
- if (adminUserId) {
15
- return adminUserId;
16
- }
17
- try {
18
- // Check for direct admin ID (for backwards compatibility)
19
- const directAdminId = process.env.PLANKA_ADMIN_ID;
20
- if (directAdminId) {
21
- adminUserId = directAdminId;
22
- return adminUserId;
23
- }
24
- // Try to get the admin ID by email
25
- const adminEmail = process.env.PLANKA_ADMIN_EMAIL;
26
- if (adminEmail) {
27
- const id = await getUserIdByEmail(adminEmail);
28
- if (id) {
29
- adminUserId = id;
30
- return adminUserId;
31
- }
32
- }
33
- // If that fails, try to get the admin ID by username
34
- const adminUsername = process.env.PLANKA_ADMIN_USERNAME;
35
- if (adminUsername) {
36
- const id = await getUserIdByUsername(adminUsername);
37
- if (id) {
38
- adminUserId = id;
39
- return adminUserId;
40
- }
41
- }
42
- console.error("Could not determine admin user ID. Please set PLANKA_ADMIN_ID, PLANKA_ADMIN_EMAIL, or PLANKA_ADMIN_USERNAME.");
43
- return null;
44
- }
45
- catch (error) {
46
- console.error("Failed to get admin user ID:", error);
47
- return null;
48
- }
49
- }
@@ -1,215 +0,0 @@
1
- import { getUserAgent } from "universal-user-agent";
2
- import { createPlankaError } from "./errors.js";
3
- import { VERSION } from "./version.js";
4
- import https from "https";
5
- import nodeFetch from "node-fetch";
6
- // Global variables to store tokens
7
- let agentToken = null;
8
- // Create HTTPS agent for insecure connections if needed
9
- let httpsAgent;
10
- if (process.env.PLANKA_ALLOW_INSECURE === "true") {
11
- console.error("[DEBUG] Allowing insecure HTTPS connections (certificate validation disabled)");
12
- httpsAgent = new https.Agent({
13
- rejectUnauthorized: false,
14
- });
15
- }
16
- // Custom fetch function that uses the agent
17
- const customFetch = httpsAgent
18
- ? (url, options) => {
19
- return nodeFetch(url, { ...options, agent: httpsAgent });
20
- }
21
- : fetch;
22
- async function parseResponseBody(response) {
23
- const contentType = response.headers.get("content-type");
24
- if (contentType?.includes("application/json")) {
25
- return response.json();
26
- }
27
- return response.text();
28
- }
29
- export function buildUrl(baseUrl, params) {
30
- const url = new URL(baseUrl);
31
- Object.entries(params).forEach(([key, value]) => {
32
- if (value !== undefined) {
33
- url.searchParams.append(key, value.toString());
34
- }
35
- });
36
- return url.toString();
37
- }
38
- const USER_AGENT = `modelcontextprotocol/servers/planka/v${VERSION} ${getUserAgent()}`;
39
- async function authenticateAgent() {
40
- const email = process.env.PLANKA_AGENT_EMAIL;
41
- const password = process.env.PLANKA_AGENT_PASSWORD;
42
- console.error(`[DEBUG] Attempting authentication with email: ${email}`);
43
- if (!email || !password) {
44
- throw new Error("PLANKA_AGENT_EMAIL and PLANKA_AGENT_PASSWORD environment variables are required");
45
- }
46
- const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
47
- // Construir la URL correctamente para el endpoint de tokens
48
- const url = baseUrl.endsWith('/')
49
- ? `${baseUrl}api/access-tokens`
50
- : `${baseUrl}/api/access-tokens`;
51
- console.error(`[DEBUG] Authentication URL: ${url}`);
52
- console.error(`[DEBUG] Base URL: ${baseUrl}`);
53
- try {
54
- const requestBody = JSON.stringify({
55
- emailOrUsername: email,
56
- password: password,
57
- });
58
- console.error(`[DEBUG] Request body: ${requestBody}`);
59
- const response = await customFetch(url, {
60
- method: "POST",
61
- headers: {
62
- "Accept": "application/json",
63
- "Content-Type": "application/json",
64
- "User-Agent": USER_AGENT,
65
- },
66
- body: requestBody,
67
- credentials: "include",
68
- });
69
- console.error(`[DEBUG] Response status: ${response.status}`);
70
- console.error(`[DEBUG] Response headers:`, Object.fromEntries(response.headers.entries()));
71
- const responseBody = await parseResponseBody(response);
72
- console.error(`[DEBUG] Response body:`, responseBody);
73
- if (!response.ok) {
74
- throw createPlankaError(response.status, responseBody);
75
- }
76
- // The token is directly in the item field
77
- const { item } = responseBody;
78
- agentToken = item;
79
- console.error(`[DEBUG] Authentication successful, token length: ${item?.length}`);
80
- return item;
81
- }
82
- catch (error) {
83
- console.error(`[DEBUG] Authentication error:`, error);
84
- // Rethrow with more context
85
- const errorMessage = error instanceof Error ? error.message : String(error);
86
- throw new Error(`Failed to authenticate agent with Planka: ${errorMessage}`);
87
- }
88
- }
89
- async function getAuthToken() {
90
- if (agentToken) {
91
- return agentToken;
92
- }
93
- return authenticateAgent();
94
- }
95
- export async function plankaRequest(path, options = {}) {
96
- const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
97
- // Ensure path starts with /api/ if not already present
98
- const normalizedPath = path.startsWith("/api/") ? path : `/api/${path}`;
99
- // Construir la URL correctamente
100
- const url = baseUrl.endsWith('/')
101
- ? `${baseUrl}${normalizedPath.substring(1)}` // Remove leading slash if baseUrl ends with /
102
- : `${baseUrl}${normalizedPath}`;
103
- const headers = {
104
- "Accept": "application/json",
105
- "Content-Type": "application/json",
106
- "User-Agent": USER_AGENT,
107
- ...options.headers,
108
- };
109
- // Remove Content-Type header for FormData
110
- if (options.body instanceof FormData) {
111
- delete headers["Content-Type"];
112
- }
113
- // Add authentication token if not skipped
114
- if (!options.skipAuth) {
115
- try {
116
- const token = await getAuthToken();
117
- headers["Authorization"] = `Bearer ${token}`;
118
- }
119
- catch (error) {
120
- const errorMessage = error instanceof Error
121
- ? error.message
122
- : String(error);
123
- throw new Error(`Failed to get authentication token: ${errorMessage}`);
124
- }
125
- }
126
- try {
127
- const response = await customFetch(url, {
128
- method: options.method || "GET",
129
- headers,
130
- body: options.body instanceof FormData
131
- ? options.body
132
- : options.body
133
- ? JSON.stringify(options.body)
134
- : undefined,
135
- credentials: "include", // Include cookies for Planka authentication
136
- });
137
- const responseBody = await parseResponseBody(response);
138
- if (!response.ok) {
139
- throw createPlankaError(response.status, responseBody);
140
- }
141
- return responseBody;
142
- }
143
- catch (error) {
144
- const errorMessage = error instanceof Error ? error.message : String(error);
145
- throw new Error(`Failed to make Planka request to ${url}: ${errorMessage}`);
146
- }
147
- }
148
- export function validateProjectName(name) {
149
- const sanitized = name.trim();
150
- if (!sanitized) {
151
- throw new Error("Project name cannot be empty");
152
- }
153
- return sanitized;
154
- }
155
- export function validateBoardName(name) {
156
- const sanitized = name.trim();
157
- if (!sanitized) {
158
- throw new Error("Board name cannot be empty");
159
- }
160
- return sanitized;
161
- }
162
- export function validateListName(name) {
163
- const sanitized = name.trim();
164
- if (!sanitized) {
165
- throw new Error("List name cannot be empty");
166
- }
167
- return sanitized;
168
- }
169
- export function validateCardName(name) {
170
- const sanitized = name.trim();
171
- if (!sanitized) {
172
- throw new Error("Card name cannot be empty");
173
- }
174
- return sanitized;
175
- }
176
- /**
177
- * Looks up a user ID by email
178
- *
179
- * @param {string} email - The email of the user to look up
180
- * @returns {Promise<string | null>} The user ID if found, null otherwise
181
- */
182
- export async function getUserIdByEmail(email) {
183
- try {
184
- // Get all users
185
- const response = await plankaRequest("/api/users");
186
- const { items } = response;
187
- // Find the user with the matching email
188
- const user = items.find((user) => user.email === email);
189
- return user ? user.id : null;
190
- }
191
- catch (error) {
192
- console.error(`Failed to get user ID by email: ${error instanceof Error ? error.message : String(error)}`);
193
- return null;
194
- }
195
- }
196
- /**
197
- * Looks up a user ID by username
198
- *
199
- * @param {string} username - The username of the user to look up
200
- * @returns {Promise<string | null>} The user ID if found, null otherwise
201
- */
202
- export async function getUserIdByUsername(username) {
203
- try {
204
- // Get all users
205
- const response = await plankaRequest("/api/users");
206
- const { items } = response;
207
- // Find the user with the matching username
208
- const user = items.find((user) => user.username === username);
209
- return user ? user.id : null;
210
- }
211
- catch (error) {
212
- console.error(`Failed to get user ID by username: ${error instanceof Error ? error.message : String(error)}`);
213
- return null;
214
- }
215
- }