@vheins/local-memory-mcp 0.7.1 → 0.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@
8
8
  <link rel="preconnect" href="https://fonts.googleapis.com">
9
9
  <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
10
10
  <link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700;800&family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet">
11
- <script type="module" crossorigin src="/assets/index-CRhOgOlp.js"></script>
11
+ <script type="module" crossorigin src="/assets/index-Bd8FKzVy.js"></script>
12
12
  <link rel="stylesheet" crossorigin href="/assets/index-Bd7v94SO.css">
13
13
  </head>
14
14
  <body>
@@ -6,18 +6,19 @@ import {
6
6
  TOOL_DEFINITIONS,
7
7
  listResources,
8
8
  logger
9
- } from "../chunk-J4O2HJ2K.js";
9
+ } from "../chunk-ZCK6RZFX.js";
10
10
 
11
11
  // src/dashboard/server.ts
12
12
  import express from "express";
13
13
  import path3 from "path";
14
- import fs2 from "fs";
14
+ import fs3 from "fs";
15
15
  import { fileURLToPath as fileURLToPath3 } from "url";
16
16
 
17
17
  // src/mcp/client.ts
18
18
  import { spawn } from "child_process";
19
19
  import { createInterface } from "readline";
20
20
  import path from "path";
21
+ import fs from "fs";
21
22
  import { fileURLToPath } from "url";
22
23
  var __dirname = path.dirname(fileURLToPath(import.meta.url));
23
24
  var RETRY_DELAYS = [1e3, 2e3, 4e3];
@@ -36,7 +37,7 @@ var MCPClient = class {
36
37
  }
37
38
  async start() {
38
39
  if (this.process) return;
39
- const serverPath = this.serverPathOverride || path.join(__dirname, "./server.js");
40
+ const serverPath = this.serverPathOverride || (fs.existsSync(path.join(__dirname, "../mcp/server.js")) ? path.join(__dirname, "../mcp/server.js") : path.join(__dirname, "./server.js"));
40
41
  this.process = spawn("node", [serverPath], {
41
42
  stdio: ["pipe", "pipe", "inherit"]
42
43
  });
@@ -78,7 +79,7 @@ var MCPClient = class {
78
79
  }
79
80
  }
80
81
  } catch (err) {
81
- logger.error("Failed to parse MCP response", { error: String(err) });
82
+ logger.error("Failed to parse MCP response", { error: String(err), rawLine: line });
82
83
  }
83
84
  });
84
85
  await this.callWithRetry("initialize", {
@@ -200,7 +201,7 @@ import { Router as Router4 } from "express";
200
201
  import { Router } from "express";
201
202
 
202
203
  // src/dashboard/controllers/SystemController.ts
203
- import fs from "fs";
204
+ import fs2 from "fs";
204
205
  import path2 from "path";
205
206
  import { fileURLToPath as fileURLToPath2 } from "url";
206
207
 
@@ -251,15 +252,25 @@ function condenseRecentActions(actions, limit) {
251
252
  var __dirname2 = path2.dirname(fileURLToPath2(import.meta.url));
252
253
  var pkg = { version: "0.0.0" };
253
254
  try {
254
- const pkgPath = path2.join(__dirname2, "../../../package.json");
255
- if (fs.existsSync(pkgPath)) {
256
- const data = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
255
+ let currentDir = __dirname2;
256
+ let pkgPath = "";
257
+ while (currentDir !== path2.parse(currentDir).root) {
258
+ const checkPath = path2.join(currentDir, "package.json");
259
+ if (fs2.existsSync(checkPath)) {
260
+ pkgPath = checkPath;
261
+ break;
262
+ }
263
+ currentDir = path2.dirname(currentDir);
264
+ }
265
+ if (pkgPath) {
266
+ const data = JSON.parse(fs2.readFileSync(pkgPath, "utf8"));
257
267
  if (data.version) pkg.version = data.version;
258
268
  }
259
269
  } catch {
260
270
  }
261
271
  var SystemController = class {
262
- static getHealth(req, res) {
272
+ static async getHealth(req, res) {
273
+ await db.refresh();
263
274
  const stats = db.system.getGlobalStats();
264
275
  const health = {
265
276
  connected: mcpClient.isConnected(),
@@ -271,8 +282,9 @@ var SystemController = class {
271
282
  };
272
283
  res.json(jsonApiRes(health, "health"));
273
284
  }
274
- static getRepos(req, res) {
285
+ static async getRepos(req, res) {
275
286
  try {
287
+ await db.refresh();
276
288
  const repos = db.system.listRepoNavigation();
277
289
  res.json(
278
290
  jsonApiRes(
@@ -285,8 +297,9 @@ var SystemController = class {
285
297
  res.status(500).json(jsonApiError(message));
286
298
  }
287
299
  }
288
- static getStats(req, res) {
300
+ static async getStats(req, res) {
289
301
  try {
302
+ await db.refresh();
290
303
  const repo = req.query.repo;
291
304
  if (!repo) return res.status(400).json(jsonApiError("repo is required", 400));
292
305
  const stats = db.system.getDashboardStats(repo);
@@ -296,8 +309,9 @@ var SystemController = class {
296
309
  res.status(500).json(jsonApiError(message));
297
310
  }
298
311
  }
299
- static getRecentActions(req, res) {
312
+ static async getRecentActions(req, res) {
300
313
  try {
314
+ await db.refresh();
301
315
  const repo = req.query.repo;
302
316
  const pageSize = Math.min(50, Math.max(1, parseInt(req.query.pageSize) || 10));
303
317
  const page = Math.max(1, parseInt(req.query.page) || 1);
@@ -343,8 +357,9 @@ var SystemController = class {
343
357
  const caps = { tools, resources, prompts };
344
358
  res.json(jsonApiRes(caps, "capability"));
345
359
  }
346
- static getExport(req, res) {
360
+ static async getExport(req, res) {
347
361
  try {
362
+ await db.refresh();
348
363
  const { repo } = req.query;
349
364
  if (!repo) return res.status(400).json(jsonApiError("repo is required", 400));
350
365
  const memories = db.memories.getAllMemoriesWithStats(repo);
@@ -405,8 +420,9 @@ import { Router as Router2 } from "express";
405
420
  // src/dashboard/controllers/MemoriesController.ts
406
421
  import { randomUUID } from "crypto";
407
422
  var MemoriesController = class {
408
- static list(req, res) {
423
+ static async list(req, res) {
409
424
  try {
425
+ await db.refresh();
410
426
  const query = req.query;
411
427
  const { repo, type, search, minImportance, maxImportance, sortBy, sortOrder } = query;
412
428
  const page = Math.max(1, parseInt(query.page || "1", 10));
@@ -438,8 +454,9 @@ var MemoriesController = class {
438
454
  res.status(500).json(jsonApiError(message));
439
455
  }
440
456
  }
441
- static get(req, res) {
457
+ static async get(req, res) {
442
458
  try {
459
+ await db.refresh();
443
460
  const memory = db.memories.getByIdWithStats(req.params.id);
444
461
  if (!memory) throw new Error("Memory not found");
445
462
  db.actions.logAction("read", memory.scope.repo, { memoryId: memory.id, resultCount: 1 });
@@ -449,8 +466,9 @@ var MemoriesController = class {
449
466
  res.status(404).json(jsonApiError(message, 404));
450
467
  }
451
468
  }
452
- static create(req, res) {
469
+ static async create(req, res) {
453
470
  try {
471
+ await db.refresh();
454
472
  const attributes = getAttributes(req);
455
473
  const { repo, type, content } = attributes;
456
474
  if (!repo || !type || !content) return res.status(400).json(jsonApiError("Required fields missing", 400));
@@ -469,8 +487,9 @@ var MemoriesController = class {
469
487
  res.status(500).json(jsonApiError(message));
470
488
  }
471
489
  }
472
- static update(req, res) {
490
+ static async update(req, res) {
473
491
  try {
492
+ await db.refresh();
474
493
  const { id } = req.params;
475
494
  const existing = db.memories.getByIdWithStats ? db.memories.getByIdWithStats(id) : db.memories.getById(id);
476
495
  if (!existing) return res.status(404).json(jsonApiError("Memory not found", 404));
@@ -495,8 +514,9 @@ var MemoriesController = class {
495
514
  res.status(500).json(jsonApiError(message));
496
515
  }
497
516
  }
498
- static delete(req, res) {
517
+ static async delete(req, res) {
499
518
  try {
519
+ await db.refresh();
500
520
  const { id } = req.params;
501
521
  const existing = db.memories.getByIdWithStats ? db.memories.getByIdWithStats(id) : db.memories.getById(id);
502
522
  if (!existing) return res.status(404).json(jsonApiError("Memory not found", 404));
@@ -508,8 +528,9 @@ var MemoriesController = class {
508
528
  res.status(500).json(jsonApiError(message));
509
529
  }
510
530
  }
511
- static bulkCreate(req, res) {
531
+ static async bulkCreate(req, res) {
512
532
  try {
533
+ await db.refresh();
513
534
  const { items, repo } = getAttributes(req);
514
535
  if (!Array.isArray(items) || !repo)
515
536
  return res.status(400).json(jsonApiError("Invalid payload: requires 'items' array and 'repo'", 400));
@@ -528,8 +549,9 @@ var MemoriesController = class {
528
549
  res.status(500).json(jsonApiError(message));
529
550
  }
530
551
  }
531
- static bulkAction(req, res) {
552
+ static async bulkAction(req, res) {
532
553
  try {
554
+ await db.refresh();
533
555
  const { action, ids, updates } = getAttributes(req);
534
556
  if (!Array.isArray(ids) || !action)
535
557
  return res.status(400).json(jsonApiError("Invalid payload: requires 'ids' array and 'action'", 400));
@@ -575,8 +597,9 @@ import { Router as Router3 } from "express";
575
597
  // src/dashboard/controllers/TasksController.ts
576
598
  import { randomUUID as randomUUID2 } from "crypto";
577
599
  var TasksController = class {
578
- static list(req, res) {
600
+ static async list(req, res) {
579
601
  try {
602
+ await db.refresh();
580
603
  const query = req.query;
581
604
  const { repo, status, search } = query;
582
605
  const page = Math.max(1, parseInt(req.query.page || "1", 10));
@@ -606,8 +629,9 @@ var TasksController = class {
606
629
  res.status(500).json(jsonApiError(message));
607
630
  }
608
631
  }
609
- static get(req, res) {
632
+ static async get(req, res) {
610
633
  try {
634
+ await db.refresh();
611
635
  const task = db.tasks.getTaskById(req.params.id);
612
636
  if (!task) throw new Error("Task not found");
613
637
  db.actions.logAction("read", task.repo, { taskId: task.id });
@@ -617,8 +641,9 @@ var TasksController = class {
617
641
  res.status(404).json(jsonApiError(message, 404));
618
642
  }
619
643
  }
620
- static create(req, res) {
644
+ static async create(req, res) {
621
645
  try {
646
+ await db.refresh();
622
647
  const attributes = getAttributes(req);
623
648
  const { repo, task_code, title } = attributes;
624
649
  if (!repo || !task_code || !title) return res.status(400).json(jsonApiError("Required fields missing", 400));
@@ -638,8 +663,9 @@ var TasksController = class {
638
663
  res.status(500).json(jsonApiError(message));
639
664
  }
640
665
  }
641
- static update(req, res) {
666
+ static async update(req, res) {
642
667
  try {
668
+ await db.refresh();
643
669
  const { id } = req.params;
644
670
  const attributes = getAttributes(req);
645
671
  const existingTask = db.tasks.getTaskById(id);
@@ -682,8 +708,9 @@ var TasksController = class {
682
708
  res.status(500).json(jsonApiError(message));
683
709
  }
684
710
  }
685
- static delete(req, res) {
711
+ static async delete(req, res) {
686
712
  try {
713
+ await db.refresh();
687
714
  const { id } = req.params;
688
715
  const task = db.tasks.getTaskById(id);
689
716
  if (!task) return res.status(404).json(jsonApiError("Task not found", 404));
@@ -695,8 +722,9 @@ var TasksController = class {
695
722
  res.status(500).json(jsonApiError(message));
696
723
  }
697
724
  }
698
- static bulkCreate(req, res) {
725
+ static async bulkCreate(req, res) {
699
726
  try {
727
+ await db.refresh();
700
728
  const { items, repo } = getAttributes(req);
701
729
  if (!Array.isArray(items) || !repo)
702
730
  return res.status(400).json(jsonApiError("Invalid payload: requires 'items' array and 'repo'", 400));
@@ -716,8 +744,9 @@ var TasksController = class {
716
744
  res.status(500).json(jsonApiError(message));
717
745
  }
718
746
  }
719
- static getTimeStats(req, res) {
747
+ static async getTimeStats(req, res) {
720
748
  try {
749
+ await db.refresh();
721
750
  const { repo } = req.query;
722
751
  if (!repo) return res.status(400).json(jsonApiError("repo is required", 400));
723
752
  const stats = {
@@ -744,8 +773,9 @@ var TasksController = class {
744
773
  res.status(500).json(jsonApiError(message));
745
774
  }
746
775
  }
747
- static updateComment(req, res) {
776
+ static async updateComment(req, res) {
748
777
  try {
778
+ await db.refresh();
749
779
  const { id } = req.params;
750
780
  const { comment } = getAttributes(req);
751
781
  const existingComment = db.tasks.getTaskCommentById(id);
@@ -757,8 +787,9 @@ var TasksController = class {
757
787
  res.status(500).json(jsonApiError(message));
758
788
  }
759
789
  }
760
- static deleteComment(req, res) {
790
+ static async deleteComment(req, res) {
761
791
  try {
792
+ await db.refresh();
762
793
  const { id } = req.params;
763
794
  db.tasks.deleteTaskComment(id);
764
795
  res.json(jsonApiRes({ message: "Deleted" }, "status"));
@@ -794,8 +825,8 @@ var __dirname3 = path3.dirname(fileURLToPath3(import.meta.url));
794
825
  var pkg2 = { version: "0.0.0" };
795
826
  try {
796
827
  const pkgPath = path3.join(__dirname3, "../../package.json");
797
- if (fs2.existsSync(pkgPath)) {
798
- const data = JSON.parse(fs2.readFileSync(pkgPath, "utf8"));
828
+ if (fs3.existsSync(pkgPath)) {
829
+ const data = JSON.parse(fs3.readFileSync(pkgPath, "utf8"));
799
830
  if (data.version) pkg2.version = data.version;
800
831
  }
801
832
  } catch {
@@ -814,7 +845,7 @@ app.use((req, res, next) => {
814
845
  next();
815
846
  });
816
847
  app.use("/api", routes_default);
817
- var staticRoot = fs2.existsSync(path3.join(__dirname3, "public")) ? path3.join(__dirname3, "public") : path3.join(process.cwd(), "src", "dashboard", "public");
848
+ var staticRoot = fs3.existsSync(path3.join(__dirname3, "public")) ? path3.join(__dirname3, "public") : path3.join(process.cwd(), "src", "dashboard", "public");
818
849
  app.use(express.static(staticRoot));
819
850
  app.use((req, res, next) => {
820
851
  if (req.path.startsWith("/api")) return next();
@@ -42,7 +42,7 @@ import {
42
42
  setLogLevel,
43
43
  updateSessionFromInitialize,
44
44
  updateSessionRoots
45
- } from "../chunk-J4O2HJ2K.js";
45
+ } from "../chunk-ZCK6RZFX.js";
46
46
 
47
47
  // src/mcp/server.ts
48
48
  import readline from "readline";
@@ -214,7 +214,7 @@ function createMcpResponse(data, summary, options) {
214
214
  resourceLinks,
215
215
  structuredContentPathHint,
216
216
  contentSummary,
217
- includeSerializedStructuredContent = "auto"
217
+ includeSerializedStructuredContent = true
218
218
  } = options || {};
219
219
  void includeSerializedStructuredContent;
220
220
  let finalData = data;
@@ -239,12 +239,12 @@ function createMcpResponse(data, summary, options) {
239
239
  }
240
240
  }
241
241
  const content = [];
242
- if (contentSummary?.trim().length) {
242
+ if (contentSummary && contentSummary.trim().length > 0) {
243
243
  content.push({
244
244
  type: "text",
245
245
  text: contentSummary.trim()
246
246
  });
247
- } else if (summary.trim().length > 0) {
247
+ } else if (summary && summary.trim().length > 0) {
248
248
  const pointerText = structuredContentPathHint ? `Read structuredContent.${structuredContentPathHint} for details.` : `Read structuredContent for machine-readable results.`;
249
249
  content.push({
250
250
  type: "text",
@@ -264,6 +264,9 @@ function createMcpResponse(data, summary, options) {
264
264
  structuredContent: finalData,
265
265
  isError: false
266
266
  };
267
+ if (includeSerializedStructuredContent === false) {
268
+ delete response.structuredContent;
269
+ }
267
270
  response.content = content;
268
271
  return response;
269
272
  }
@@ -622,23 +625,21 @@ async function handleMemorySearch(params, db2, vectors2) {
622
625
  memoriesByType[typeLabel].push(m);
623
626
  }
624
627
  let contentSummary;
625
- if (!validated.structured) {
626
- if (paginatedResults.length > 0) {
627
- const parts = [];
628
- for (const [memType, items] of Object.entries(memoriesByType)) {
629
- parts.push(`${capitalize(memType)}:`);
630
- parts.push("- code|importance|title");
631
- for (const m of items) {
632
- const code = m.code || "-";
633
- parts.push(`- ${code}|${m.importance}|${m.title}`);
634
- }
635
- parts.push("");
628
+ if (paginatedResults.length > 0) {
629
+ const parts = [];
630
+ for (const [memType, items] of Object.entries(memoriesByType)) {
631
+ parts.push(`${capitalize(memType)}:`);
632
+ parts.push("- code|importance|title");
633
+ for (const m of items) {
634
+ const code = m.code || "-";
635
+ parts.push(`- ${code}|${m.importance}|${m.title}`);
636
636
  }
637
- parts.push("Use memory-detail with memory_id (or code) for full content.");
638
- contentSummary = parts.join("\n").trim();
639
- } else {
640
- contentSummary = `No memories found for "${validated.query}" in repo "${validated.repo}".`;
637
+ parts.push("");
641
638
  }
639
+ parts.push("Use memory-detail with memory_id (or code) for full content.");
640
+ contentSummary = parts.join("\n").trim();
641
+ } else {
642
+ contentSummary = `No memories found for "${validated.query}" in repo "${validated.repo}".`;
642
643
  }
643
644
  const structuredData = {
644
645
  schema: "memory-search",
@@ -652,9 +653,10 @@ async function handleMemorySearch(params, db2, vectors2) {
652
653
  rows
653
654
  }
654
655
  };
655
- return createMcpResponse(structuredData, contentSummary || "", {
656
+ return createMcpResponse(structuredData, contentSummary || `Found ${total} memories for "${validated.query}".`, {
656
657
  contentSummary,
657
- includeSerializedStructuredContent: false
658
+ structuredContentPathHint: "results",
659
+ includeSerializedStructuredContent: validated.structured
658
660
  });
659
661
  }
660
662
  function capitalize(str) {
@@ -769,7 +771,7 @@ async function handleMemoryRecap(params, db2) {
769
771
  };
770
772
  return createMcpResponse(structuredData, contentSummary || "", {
771
773
  contentSummary,
772
- includeSerializedStructuredContent: false
774
+ includeSerializedStructuredContent: validated.structured
773
775
  });
774
776
  }
775
777
  function capitalize2(str) {
@@ -940,7 +942,7 @@ async function handleTaskList(args, storage) {
940
942
  }
941
943
  return createMcpResponse(structuredData, contentSummary || "", {
942
944
  contentSummary,
943
- includeSerializedStructuredContent: false
945
+ includeSerializedStructuredContent: isStructuredRequest
944
946
  });
945
947
  }
946
948
  async function handleTaskCreate(args, storage) {
@@ -977,6 +979,11 @@ async function handleTaskCreate(args, storage) {
977
979
  }
978
980
  }
979
981
  const statusTimestamps2 = deriveTaskStatusTimestamps(normalizedStatus, now2);
982
+ const tags2 = [...taskData.tags || []];
983
+ const phaseTag2 = `phase:${taskData.phase}`;
984
+ if (!tags2.includes(phaseTag2)) {
985
+ tags2.push(phaseTag2);
986
+ }
980
987
  const task2 = {
981
988
  id: randomUUID2(),
982
989
  repo,
@@ -995,7 +1002,7 @@ async function handleTaskCreate(args, storage) {
995
1002
  finished_at: statusTimestamps2.finished_at,
996
1003
  canceled_at: statusTimestamps2.canceled_at,
997
1004
  est_tokens: taskData.est_tokens ?? 0,
998
- tags: taskData.tags || [],
1005
+ tags: tags2,
999
1006
  metadata: taskData.metadata || {},
1000
1007
  parent_id: taskData.parent_id || null,
1001
1008
  depends_on: taskData.depends_on || null
@@ -1042,6 +1049,11 @@ async function handleTaskCreate(args, storage) {
1042
1049
  const taskId = randomUUID2();
1043
1050
  const now = (/* @__PURE__ */ new Date()).toISOString();
1044
1051
  const statusTimestamps = deriveTaskStatusTimestamps(status || "backlog", now);
1052
+ const finalTags = [...singleTask.tags || []];
1053
+ const phaseTag = `phase:${phase}`;
1054
+ if (!finalTags.includes(phaseTag)) {
1055
+ finalTags.push(phaseTag);
1056
+ }
1045
1057
  const task = {
1046
1058
  id: taskId,
1047
1059
  repo,
@@ -1060,7 +1072,7 @@ async function handleTaskCreate(args, storage) {
1060
1072
  finished_at: statusTimestamps.finished_at,
1061
1073
  canceled_at: statusTimestamps.canceled_at,
1062
1074
  est_tokens: est_tokens ?? 0,
1063
- tags: tags || [],
1075
+ tags: finalTags,
1064
1076
  metadata: metadata || {},
1065
1077
  parent_id: parent_id || null,
1066
1078
  depends_on: depends_on || null
@@ -1200,7 +1212,8 @@ async function handleTaskUpdate(args, storage, vectors2) {
1200
1212
  if (!comment || comment.trim() === "") {
1201
1213
  throw new Error("comment is required when changing task status");
1202
1214
  }
1203
- if ((existingTask.status === "backlog" || existingTask.status === "pending" || existingTask.status === "blocked") && updates.status === "completed") {
1215
+ const isStartable = existingTask.status === "backlog" || existingTask.status === "pending" || existingTask.status === "blocked";
1216
+ if (isStartable && updates.status === "completed") {
1204
1217
  throw new Error(
1205
1218
  `Cannot transition task ${targetId} from '${existingTask.status}' directly to 'completed'. Must be 'in_progress' first.`
1206
1219
  );
@@ -1213,6 +1226,18 @@ async function handleTaskUpdate(args, storage, vectors2) {
1213
1226
  throw new Error(`Duplicate task_code: '${updates.task_code}' already exists`);
1214
1227
  }
1215
1228
  const finalUpdates = { ...updates };
1229
+ if (updates.phase !== void 0 || updates.tags !== void 0) {
1230
+ let currentTags = updates.tags || existingTask.tags || [];
1231
+ currentTags = currentTags.filter((t) => !t.startsWith("phase:"));
1232
+ const finalPhase = updates.phase !== void 0 ? updates.phase : existingTask.phase;
1233
+ if (finalPhase) {
1234
+ const phaseTag = `phase:${finalPhase}`;
1235
+ if (!currentTags.includes(phaseTag)) {
1236
+ currentTags.push(phaseTag);
1237
+ }
1238
+ }
1239
+ finalUpdates.tags = currentTags;
1240
+ }
1216
1241
  if (updates.status === "completed") finalUpdates.finished_at = now;
1217
1242
  else if (updates.status === "canceled") finalUpdates.canceled_at = now;
1218
1243
  else if (updates.status === "in_progress" && existingTask.status !== "in_progress")
@@ -1591,7 +1616,8 @@ async function handleMemoryAcknowledge(params, db2) {
1591
1616
 
1592
1617
  // src/mcp/tools/memory.detail.ts
1593
1618
  async function handleMemoryDetail(args, storage) {
1594
- const { id, code } = MemoryDetailSchema.parse(args);
1619
+ const validated = MemoryDetailSchema.parse(args);
1620
+ const { id, code } = validated;
1595
1621
  let memory;
1596
1622
  if (id) {
1597
1623
  memory = storage.memories.getById(id);
@@ -1618,7 +1644,7 @@ async function handleMemoryDetail(args, storage) {
1618
1644
  const content = lines.join("\n");
1619
1645
  return createMcpResponse(memory, content, {
1620
1646
  contentSummary: content,
1621
- includeSerializedStructuredContent: false
1647
+ includeSerializedStructuredContent: validated.structured
1622
1648
  });
1623
1649
  }
1624
1650
 
@@ -1670,7 +1696,7 @@ async function handleTaskGet(args, storage) {
1670
1696
  };
1671
1697
  return createMcpResponse(structuredData, contentSummary || "", {
1672
1698
  contentSummary,
1673
- includeSerializedStructuredContent: false
1699
+ includeSerializedStructuredContent: isStructuredRequest
1674
1700
  });
1675
1701
  }
1676
1702
 
@@ -0,0 +1,61 @@
1
+ ---
2
+ name: export-task-to-github
3
+ description: Guide for exporting local tasks from Local Memory MCP to GitHub Issues
4
+ arguments:
5
+ - name: owner
6
+ description: GitHub repository owner (e.g., 'vheins')
7
+ required: true
8
+ - name: repo
9
+ description: GitHub repository name (e.g., 'local-memory-mcp')
10
+ required: true
11
+ - name: task_id
12
+ description: Unique ID of the local task to export
13
+ required: true
14
+ agent: Integration Architect
15
+ ---
16
+ # Skill: export-task-to-github
17
+
18
+ ## Purpose
19
+ You are an **Integration Architect**. Your goal is to export a specific local task from our `local-memory-mcp` system into a high-quality **GitHub Issue**.
20
+
21
+ ## Instructions
22
+
23
+ ### 1. Task Retrieval (MANDATORY)
24
+ 1. **Fetch Task Details**: Call `local-memory-mcp` MCP tool `task-detail` using the provided `task_id`.
25
+ 2. **Verify Content**: Ensure the task has a clear title and description. If information is missing, use `memory-search` to find related context.
26
+
27
+ ### 2. GitHub Sync & Conflict Check
28
+ 1. **Search Existing Issues**: Use `github-mcp-server`'s `search_issues` tool. Search for the local `task_code` (e.g., "FEAT-123") or similar keywords in the target repository.
29
+ 2. **Avoid Duplicates**: If a Github issue for this task already exists, do NOT create a new one. Instead, update the local task with the existing Github issue URL in metadata.
30
+
31
+ ### 3. Issue Creation
32
+ If no duplicate is found, create the GitHub issue using `github-mcp-server`'s `issue_write` (method: 'create'):
33
+
34
+ - **Title**: Use the local task title exactly.
35
+ - **Body**: Use the local task description exactly.
36
+ - **Metadata**: Include the local `task_code` and `task_id` at the bottom of the body for traceability.
37
+ - **Initial Comment**: If the local task has existing comments, post them as the first comment on the newly created GitHub issue using `add_issue_comment`.
38
+
39
+ ### 4. Linkage & Cleanup
40
+ 1. **Update Local Task**: Once the GitHub issue is created, get the Issue Number and URL.
41
+ 2. **Task Update**: Use `local-memory-mcp` tool `task-update` to:
42
+ - Add the GitHub URL to `metadata`.
43
+ - Add a comment stating "Exported to GitHub Issue #{{number}}".
44
+
45
+ ### 5. Confirmation
46
+ Provide the link to the newly created (or existing) GitHub issue.
47
+
48
+ ---
49
+
50
+ ### ✅ ALLOWED OUTPUT (STRICT)
51
+ Your output MUST ONLY consist of calls to:
52
+ - `mcp_local-memory_task-detail`
53
+ - `mcp_local-memory_task-update`
54
+ - `mcp_github-mcp-server_search_issues`
55
+ - `mcp_github-mcp-server_issue_write`
56
+ - `mcp_github-mcp-server_add_issue_comment`
57
+
58
+ **❌ DO NOT:**
59
+ - Output explanations or narrative text during execution.
60
+ - Modify the original title or description of the task when exporting.
61
+ - Create duplicate issues.
@@ -13,11 +13,16 @@ Please follow these steps:
13
13
  2. **Review Existing Tasks**: Call `local-memory-mcp` MCP tools `task-list` for the current repository to identify tasks already imported.
14
14
  3. **Map and Create**: For each relevant issue that hasn't been imported yet:
15
15
  - Use 'task-manage' with action='create'.
16
+ - **MANDATORY**: Keep the original GitHub **title** and **description** exactly as they are. Do NOT summarize or modify them.
16
17
  - Set 'task_code' to 'GH-{{issue_number}}' (e.g., GH-123).
17
18
  - Set 'title' to the issue title.
18
- - Set 'description' to the issue body (abbreviate if extremely long).
19
+ - Set 'description' to the issue body.
19
20
  - Map GitHub labels to 'tags' if applicable.
20
21
  - Default 'phase' to 'backlog' or 'triage'.
21
22
  - Set 'metadata' to include the original GitHub URL.
22
- 4. **Avoid Duplicates**: Do not import issues that already have a corresponding 'GH-{{number}}' task code in our system.
23
- 5. **Confirmation**: Provide a summary of how many tasks were successfully created.
23
+ 4. **Import Comments**: If the issue has comments:
24
+ - Use `github-mcp-server`'s `issue_read` tool with `method='get_comments'` to fetch all comments.
25
+ - For each comment, add it to the created task using the `task-update` tool, appending it to the `comments` array or adding a specific comment metadata.
26
+ 5. **Avoid Duplicates**: Do not import issues that already have a corresponding 'GH-{{number}}' task code in our system.
27
+ 6. **Confirmation**: Provide a summary of how many tasks were successfully created.
28
+
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vheins/local-memory-mcp",
3
- "version": "0.7.1",
3
+ "version": "0.7.3",
4
4
  "description": "MCP Local Memory Service for coding copilot agents",
5
5
  "mcpName": "io.github.vheins/local-memory-mcp",
6
6
  "type": "module",
@@ -38,6 +38,7 @@
38
38
  "both": "npm run start & npm run dashboard",
39
39
  "test": "vitest --run",
40
40
  "test:watch": "vitest",
41
+ "type-check": "tsc --noEmit && svelte-check --tsconfig ./src/dashboard/ui/tsconfig.json",
41
42
  "lint": "eslint . --ext .ts,.svelte",
42
43
  "lint:fix": "eslint . --ext .ts,.svelte --fix",
43
44
  "format": "prettier --write \"src/**/*.{ts,js,svelte}\""