@iloom/cli 0.7.4 → 0.7.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/LICENSE +1 -1
  2. package/README.md +2 -4
  3. package/dist/{BranchNamingService-UB2EJGFQ.js → BranchNamingService-AO7BPIUJ.js} +2 -2
  4. package/dist/{ClaudeContextManager-M57BQUMY.js → ClaudeContextManager-Y2YJC6BU.js} +4 -4
  5. package/dist/{ClaudeService-FLZ2IXAO.js → ClaudeService-NDVFQRKC.js} +3 -3
  6. package/dist/{LoomLauncher-5PPVFTFN.js → LoomLauncher-U2B3VHPC.js} +4 -4
  7. package/dist/{PRManager-YTG6XPMG.js → PRManager-7F3AAY66.js} +4 -4
  8. package/dist/README.md +2 -4
  9. package/dist/agents/iloom-issue-analyze-and-plan.md +1 -1
  10. package/dist/agents/iloom-issue-analyzer.md +1 -1
  11. package/dist/agents/iloom-issue-complexity-evaluator.md +1 -1
  12. package/dist/agents/iloom-issue-enhancer.md +1 -1
  13. package/dist/agents/iloom-issue-implementer.md +1 -1
  14. package/dist/agents/iloom-issue-planner.md +1 -1
  15. package/dist/agents/iloom-issue-reviewer.md +1 -1
  16. package/dist/{chunk-7GKMQJGQ.js → chunk-64HCHVJM.js} +2 -2
  17. package/dist/{chunk-JWUYPJ7K.js → chunk-6YAMWLCP.js} +3 -3
  18. package/dist/{chunk-33P5VSKS.js → chunk-C7YW5IMS.js} +2 -2
  19. package/dist/{chunk-37V2NBYR.js → chunk-CAXFWFV6.js} +2 -2
  20. package/dist/chunk-CFQVOTHO.js +111 -0
  21. package/dist/chunk-CFQVOTHO.js.map +1 -0
  22. package/dist/{chunk-AFRICMSW.js → chunk-ENMTWE74.js} +2 -2
  23. package/dist/{chunk-RVLRPQU4.js → chunk-ETY2SBW5.js} +21 -18
  24. package/dist/chunk-ETY2SBW5.js.map +1 -0
  25. package/dist/{chunk-ITIXKM24.js → chunk-IGKPPACU.js} +2 -2
  26. package/dist/chunk-IGKPPACU.js.map +1 -0
  27. package/dist/{chunk-GH4FLYV5.js → chunk-NEPH2O4C.js} +2 -2
  28. package/dist/{chunk-GJMEKEI5.js → chunk-NPEMVE27.js} +342 -6
  29. package/dist/chunk-NPEMVE27.js.map +1 -0
  30. package/dist/{chunk-XAHE76RL.js → chunk-O36JLYNW.js} +2 -2
  31. package/dist/{chunk-6VQNF44G.js → chunk-Q457PKGH.js} +2 -2
  32. package/dist/{chunk-7FM7AL7S.js → chunk-VYKKWU36.js} +2 -2
  33. package/dist/{chunk-EDDIAWVM.js → chunk-WT4UGBE2.js} +8 -7
  34. package/dist/chunk-WT4UGBE2.js.map +1 -0
  35. package/dist/{chunk-453NC377.js → chunk-WZYBHD7P.js} +3 -106
  36. package/dist/chunk-WZYBHD7P.js.map +1 -0
  37. package/dist/{claude-SNWHWWWM.js → claude-V4HRPR4Z.js} +2 -2
  38. package/dist/{cleanup-PLMS2KWF.js → cleanup-IO4KV2DL.js} +9 -6
  39. package/dist/{cleanup-PLMS2KWF.js.map → cleanup-IO4KV2DL.js.map} +1 -1
  40. package/dist/cli.js +84 -63
  41. package/dist/cli.js.map +1 -1
  42. package/dist/{commit-NAGJH4J4.js → commit-3ULFKXNB.js} +4 -4
  43. package/dist/{dev-server-UKAPBGUR.js → dev-server-OAP3RZC6.js} +4 -3
  44. package/dist/{dev-server-UKAPBGUR.js.map → dev-server-OAP3RZC6.js.map} +1 -1
  45. package/dist/{feedback-ICJ44XGB.js → feedback-ZLAX3BVL.js} +3 -3
  46. package/dist/{ignite-U2JSVOEZ.js → ignite-HA2OJF6Z.js} +20 -36
  47. package/dist/ignite-HA2OJF6Z.js.map +1 -0
  48. package/dist/index.js +1 -1
  49. package/dist/index.js.map +1 -1
  50. package/dist/{init-YDKOPB54.js → init-S6IEGRSX.js} +3 -3
  51. package/dist/mcp/issue-management-server.js +420 -14
  52. package/dist/mcp/issue-management-server.js.map +1 -1
  53. package/dist/{open-QI63XQ4F.js → open-IN3LUZXX.js} +4 -3
  54. package/dist/{open-QI63XQ4F.js.map → open-IN3LUZXX.js.map} +1 -1
  55. package/dist/{projects-TWY4RT2Z.js → projects-CTRTTMSK.js} +25 -9
  56. package/dist/projects-CTRTTMSK.js.map +1 -0
  57. package/dist/prompts/issue-prompt.txt +16 -0
  58. package/dist/prompts/pr-prompt.txt +33 -13
  59. package/dist/prompts/regular-prompt.txt +7 -0
  60. package/dist/{rebase-AONLKM2V.js → rebase-RLEVFHWN.js} +3 -3
  61. package/dist/{run-YDVYORT2.js → run-QEIS2EH2.js} +4 -3
  62. package/dist/{run-YDVYORT2.js.map → run-QEIS2EH2.js.map} +1 -1
  63. package/dist/{summary-7KYFRAIM.js → summary-MPOOQIOX.js} +38 -7
  64. package/dist/summary-MPOOQIOX.js.map +1 -0
  65. package/dist/{test-webserver-NRMGT2HB.js → test-webserver-J6SMNLU2.js} +3 -2
  66. package/dist/{test-webserver-NRMGT2HB.js.map → test-webserver-J6SMNLU2.js.map} +1 -1
  67. package/package.json +1 -1
  68. package/dist/chunk-453NC377.js.map +0 -1
  69. package/dist/chunk-EDDIAWVM.js.map +0 -1
  70. package/dist/chunk-GJMEKEI5.js.map +0 -1
  71. package/dist/chunk-ITIXKM24.js.map +0 -1
  72. package/dist/chunk-RVLRPQU4.js.map +0 -1
  73. package/dist/ignite-U2JSVOEZ.js.map +0 -1
  74. package/dist/projects-TWY4RT2Z.js.map +0 -1
  75. package/dist/summary-7KYFRAIM.js.map +0 -1
  76. /package/dist/{BranchNamingService-UB2EJGFQ.js.map → BranchNamingService-AO7BPIUJ.js.map} +0 -0
  77. /package/dist/{ClaudeContextManager-M57BQUMY.js.map → ClaudeContextManager-Y2YJC6BU.js.map} +0 -0
  78. /package/dist/{ClaudeService-FLZ2IXAO.js.map → ClaudeService-NDVFQRKC.js.map} +0 -0
  79. /package/dist/{LoomLauncher-5PPVFTFN.js.map → LoomLauncher-U2B3VHPC.js.map} +0 -0
  80. /package/dist/{PRManager-YTG6XPMG.js.map → PRManager-7F3AAY66.js.map} +0 -0
  81. /package/dist/{chunk-7GKMQJGQ.js.map → chunk-64HCHVJM.js.map} +0 -0
  82. /package/dist/{chunk-JWUYPJ7K.js.map → chunk-6YAMWLCP.js.map} +0 -0
  83. /package/dist/{chunk-33P5VSKS.js.map → chunk-C7YW5IMS.js.map} +0 -0
  84. /package/dist/{chunk-37V2NBYR.js.map → chunk-CAXFWFV6.js.map} +0 -0
  85. /package/dist/{chunk-AFRICMSW.js.map → chunk-ENMTWE74.js.map} +0 -0
  86. /package/dist/{chunk-GH4FLYV5.js.map → chunk-NEPH2O4C.js.map} +0 -0
  87. /package/dist/{chunk-XAHE76RL.js.map → chunk-O36JLYNW.js.map} +0 -0
  88. /package/dist/{chunk-6VQNF44G.js.map → chunk-Q457PKGH.js.map} +0 -0
  89. /package/dist/{chunk-7FM7AL7S.js.map → chunk-VYKKWU36.js.map} +0 -0
  90. /package/dist/{claude-SNWHWWWM.js.map → claude-V4HRPR4Z.js.map} +0 -0
  91. /package/dist/{commit-NAGJH4J4.js.map → commit-3ULFKXNB.js.map} +0 -0
  92. /package/dist/{feedback-ICJ44XGB.js.map → feedback-ZLAX3BVL.js.map} +0 -0
  93. /package/dist/{init-YDKOPB54.js.map → init-S6IEGRSX.js.map} +0 -0
  94. /package/dist/{rebase-AONLKM2V.js.map → rebase-RLEVFHWN.js.map} +0 -0
@@ -17,6 +17,247 @@ import {
17
17
  getIssueNodeId,
18
18
  updateIssueComment
19
19
  } from "./chunk-GCPAZSGV.js";
20
+ import {
21
+ logger
22
+ } from "./chunk-VT4PDUYT.js";
23
+
24
+ // src/utils/image-processor.ts
25
+ import { tmpdir } from "os";
26
+ import { join, extname } from "path";
27
+ import { existsSync, mkdirSync, createWriteStream, unlinkSync } from "fs";
28
+ import { pipeline } from "stream/promises";
29
+ import { Readable } from "stream";
30
+ import { createHash } from "crypto";
31
+ import { execa } from "execa";
32
+ var SUPPORTED_EXTENSIONS = [".png", ".jpg", ".jpeg", ".gif", ".webp", ".svg"];
33
+ var MAX_IMAGE_SIZE = 10 * 1024 * 1024;
34
+ var REQUEST_TIMEOUT_MS = 3e4;
35
+ var CACHE_DIR = join(tmpdir(), "iloom-images");
36
+ var cachedGitHubToken;
37
+ function extractMarkdownImageUrls(content) {
38
+ if (!content) {
39
+ return [];
40
+ }
41
+ const matches = [];
42
+ const markdownRegex = /!\[([^\]]*)\]\(((?:[^()\s]|\((?:[^()\s]|\([^()]*\))*\))+)\)/g;
43
+ let match;
44
+ while ((match = markdownRegex.exec(content)) !== null) {
45
+ const url = match[2];
46
+ if (url) {
47
+ matches.push({
48
+ fullMatch: match[0],
49
+ url,
50
+ isMarkdown: true
51
+ });
52
+ }
53
+ }
54
+ const htmlImgRegex = /<img\s+[^>]*src=["']([^"']+)["'][^>]*\/?>/gi;
55
+ while ((match = htmlImgRegex.exec(content)) !== null) {
56
+ const url = match[1];
57
+ if (url) {
58
+ matches.push({
59
+ fullMatch: match[0],
60
+ url,
61
+ isMarkdown: false
62
+ });
63
+ }
64
+ }
65
+ return matches;
66
+ }
67
+ function isAuthenticatedImageUrl(url) {
68
+ try {
69
+ const parsedUrl = new URL(url);
70
+ const hostname = parsedUrl.hostname.toLowerCase();
71
+ if (hostname === "uploads.linear.app") {
72
+ return true;
73
+ }
74
+ if (hostname === "private-user-images.githubusercontent.com") {
75
+ return true;
76
+ }
77
+ if (hostname === "github.com" && parsedUrl.pathname.startsWith("/user-attachments/assets/")) {
78
+ return true;
79
+ }
80
+ return false;
81
+ } catch {
82
+ return false;
83
+ }
84
+ }
85
+ function getExtensionFromUrl(url) {
86
+ try {
87
+ const parsedUrl = new URL(url);
88
+ const pathname = parsedUrl.pathname;
89
+ const ext = extname(pathname).toLowerCase();
90
+ if (SUPPORTED_EXTENSIONS.includes(ext)) {
91
+ return ext;
92
+ }
93
+ return null;
94
+ } catch {
95
+ return null;
96
+ }
97
+ }
98
+ function getCacheKey(url) {
99
+ const parsedUrl = new URL(url);
100
+ if (parsedUrl.hostname === "private-user-images.githubusercontent.com") {
101
+ parsedUrl.searchParams.delete("jwt");
102
+ }
103
+ const stableUrl = parsedUrl.toString();
104
+ const hash = createHash("sha256").update(stableUrl).digest("hex").slice(0, 16);
105
+ const ext = getExtensionFromUrl(url) ?? ".png";
106
+ return `${hash}${ext}`;
107
+ }
108
+ function getCachedImagePath(url) {
109
+ const cacheKey = getCacheKey(url);
110
+ const cachedPath = join(CACHE_DIR, cacheKey);
111
+ if (existsSync(cachedPath)) {
112
+ return cachedPath;
113
+ }
114
+ return void 0;
115
+ }
116
+ async function getAuthToken(provider) {
117
+ if (provider === "github") {
118
+ if (cachedGitHubToken !== void 0) {
119
+ return cachedGitHubToken;
120
+ }
121
+ try {
122
+ const result = await execa("gh", ["auth", "token"]);
123
+ cachedGitHubToken = result.stdout.trim();
124
+ return cachedGitHubToken;
125
+ } catch (error) {
126
+ const message = error instanceof Error ? error.message : String(error);
127
+ logger.warn(`Failed to get GitHub auth token via gh CLI: ${message}`);
128
+ return void 0;
129
+ }
130
+ }
131
+ if (provider === "linear") {
132
+ return process.env.LINEAR_API_TOKEN;
133
+ }
134
+ return void 0;
135
+ }
136
+ async function downloadAndSaveImage(url, destPath, authHeader) {
137
+ const headers = {};
138
+ if (authHeader) {
139
+ headers["Authorization"] = authHeader;
140
+ }
141
+ const controller = new AbortController();
142
+ const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
143
+ try {
144
+ const response = await fetch(url, { headers, signal: controller.signal });
145
+ if (!response.ok) {
146
+ throw new Error(`Failed to download image: ${response.status} ${response.statusText}`);
147
+ }
148
+ const contentLength = response.headers.get("Content-Length");
149
+ if (contentLength && parseInt(contentLength, 10) > MAX_IMAGE_SIZE) {
150
+ throw new Error(`Image too large: ${contentLength} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`);
151
+ }
152
+ if (!response.body) {
153
+ throw new Error("Response body is null");
154
+ }
155
+ const reader = response.body.getReader();
156
+ let bytesWritten = 0;
157
+ const nodeReadable = new Readable({
158
+ async read() {
159
+ try {
160
+ const { done, value } = await reader.read();
161
+ if (done) {
162
+ this.push(null);
163
+ return;
164
+ }
165
+ bytesWritten += value.byteLength;
166
+ if (bytesWritten > MAX_IMAGE_SIZE) {
167
+ reader.cancel();
168
+ this.destroy(new Error(`Image too large: ${bytesWritten} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`));
169
+ return;
170
+ }
171
+ this.push(Buffer.from(value));
172
+ } catch (err) {
173
+ this.destroy(err instanceof Error ? err : new Error(String(err)));
174
+ }
175
+ }
176
+ });
177
+ if (!existsSync(CACHE_DIR)) {
178
+ mkdirSync(CACHE_DIR, { recursive: true });
179
+ }
180
+ const writeStream = createWriteStream(destPath);
181
+ try {
182
+ await pipeline(nodeReadable, writeStream);
183
+ } catch (pipelineError) {
184
+ try {
185
+ if (existsSync(destPath)) {
186
+ unlinkSync(destPath);
187
+ }
188
+ } catch {
189
+ }
190
+ throw pipelineError;
191
+ }
192
+ } catch (error) {
193
+ if (error instanceof Error && error.name === "AbortError") {
194
+ throw new Error(`Image download timed out after ${REQUEST_TIMEOUT_MS}ms`);
195
+ }
196
+ throw error;
197
+ } finally {
198
+ clearTimeout(timeoutId);
199
+ }
200
+ }
201
+ function getCacheDestPath(url) {
202
+ if (!existsSync(CACHE_DIR)) {
203
+ mkdirSync(CACHE_DIR, { recursive: true });
204
+ }
205
+ const cacheKey = getCacheKey(url);
206
+ return join(CACHE_DIR, cacheKey);
207
+ }
208
+ function rewriteMarkdownUrls(content, urlMap) {
209
+ let result = content;
210
+ for (const [originalUrl, localPath] of urlMap) {
211
+ const escapedUrl = originalUrl.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
212
+ const urlRegex = new RegExp(escapedUrl, "g");
213
+ result = result.replace(urlRegex, localPath);
214
+ }
215
+ return result;
216
+ }
217
+ async function processMarkdownImages(content, provider) {
218
+ if (!content) {
219
+ return "";
220
+ }
221
+ const images = extractMarkdownImageUrls(content);
222
+ if (images.length === 0) {
223
+ return content;
224
+ }
225
+ const authImages = images.filter((img) => isAuthenticatedImageUrl(img.url));
226
+ if (authImages.length === 0) {
227
+ return content;
228
+ }
229
+ const authToken = await getAuthToken(provider);
230
+ const uniqueUrls = [...new Set(authImages.map((img) => img.url))];
231
+ const urlMap = /* @__PURE__ */ new Map();
232
+ const downloadPromises = uniqueUrls.map(async (url) => {
233
+ try {
234
+ const cachedPath = getCachedImagePath(url);
235
+ if (cachedPath) {
236
+ logger.debug(`Using cached image: ${cachedPath}`);
237
+ return { url, localPath: cachedPath };
238
+ }
239
+ logger.debug(`Downloading image: ${url}`);
240
+ const destPath = getCacheDestPath(url);
241
+ await downloadAndSaveImage(
242
+ url,
243
+ destPath,
244
+ authToken ? `Bearer ${authToken}` : void 0
245
+ );
246
+ return { url, localPath: destPath };
247
+ } catch (error) {
248
+ const message = error instanceof Error ? error.message : String(error);
249
+ logger.warn(`Failed to download image ${url}: ${message}`);
250
+ return null;
251
+ }
252
+ });
253
+ const results = await Promise.all(downloadPromises);
254
+ for (const result of results) {
255
+ if (result !== null) {
256
+ urlMap.set(result.url, result.localPath);
257
+ }
258
+ }
259
+ return rewriteMarkdownUrls(content, urlMap);
260
+ }
20
261
 
21
262
  // src/mcp/GitHubIssueManagementProvider.ts
22
263
  function normalizeAuthor(author) {
@@ -96,6 +337,86 @@ var GitHubIssueManagementProvider = class {
96
337
  ...comment.updatedAt && { updatedAt: comment.updatedAt }
97
338
  }));
98
339
  }
340
+ result.body = await processMarkdownImages(result.body, "github");
341
+ if (result.comments) {
342
+ for (const comment of result.comments) {
343
+ comment.body = await processMarkdownImages(comment.body, "github");
344
+ }
345
+ }
346
+ return result;
347
+ }
348
+ /**
349
+ * Fetch pull request details using gh CLI
350
+ * Normalizes GitHub-specific fields to provider-agnostic format
351
+ */
352
+ async getPR(input) {
353
+ const { number, includeComments = true, repo } = input;
354
+ const prNumber = parseInt(number, 10);
355
+ if (isNaN(prNumber)) {
356
+ throw new Error(`Invalid GitHub PR number: ${number}. GitHub PR IDs must be numeric.`);
357
+ }
358
+ const baseFields = "number,title,body,state,url,author,headRefName,baseRefName,files,commits";
359
+ const fields = includeComments ? `${baseFields},comments` : baseFields;
360
+ const args = [
361
+ "pr",
362
+ "view",
363
+ String(prNumber),
364
+ "--json",
365
+ fields
366
+ ];
367
+ if (repo) {
368
+ args.push("--repo", repo);
369
+ }
370
+ const raw = await executeGhCommand(args);
371
+ const result = {
372
+ // Core fields
373
+ id: String(raw.number),
374
+ number: raw.number,
375
+ title: raw.title,
376
+ body: raw.body,
377
+ state: raw.state,
378
+ url: raw.url,
379
+ // Normalized author
380
+ author: normalizeAuthor(raw.author),
381
+ // PR-specific fields
382
+ headRefName: raw.headRefName,
383
+ baseRefName: raw.baseRefName,
384
+ // Optional files
385
+ ...raw.files && {
386
+ files: raw.files
387
+ },
388
+ // Optional commits - normalize author
389
+ ...raw.commits && {
390
+ commits: raw.commits.map((commit) => {
391
+ var _a;
392
+ return {
393
+ oid: commit.oid,
394
+ messageHeadline: commit.messageHeadline,
395
+ author: ((_a = commit.authors) == null ? void 0 : _a[0]) ? {
396
+ id: commit.authors[0].email,
397
+ displayName: commit.authors[0].name,
398
+ name: commit.authors[0].name,
399
+ email: commit.authors[0].email
400
+ } : null
401
+ };
402
+ })
403
+ }
404
+ };
405
+ if (raw.comments !== void 0) {
406
+ result.comments = raw.comments.map((comment) => ({
407
+ id: extractNumericIdFromUrl(comment.url),
408
+ body: comment.body,
409
+ createdAt: comment.createdAt,
410
+ author: normalizeAuthor(comment.author),
411
+ ...comment.updatedAt && { updatedAt: comment.updatedAt }
412
+ }));
413
+ }
414
+ result.body = await processMarkdownImages(result.body, "github");
415
+ if (result.comments) {
416
+ for (const comment of result.comments) {
417
+ comment.body = await processMarkdownImages(comment.body, "github");
418
+ }
419
+ }
99
420
  return result;
100
421
  }
101
422
  /**
@@ -115,9 +436,10 @@ var GitHubIssueManagementProvider = class {
115
436
  "--jq",
116
437
  "{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}"
117
438
  ]);
439
+ const processedBody = await processMarkdownImages(raw.body, "github");
118
440
  return {
119
441
  id: String(raw.id),
120
- body: raw.body,
442
+ body: processedBody,
121
443
  author: normalizeAuthor(raw.user),
122
444
  created_at: raw.created_at,
123
445
  ...raw.updated_at && { updated_at: raw.updated_at },
@@ -194,7 +516,7 @@ var GitHubIssueManagementProvider = class {
194
516
 
195
517
  // src/utils/linear-markup-converter.ts
196
518
  import { appendFileSync } from "fs";
197
- import { join, dirname, basename, extname } from "path";
519
+ import { join as join2, dirname, basename, extname as extname2 } from "path";
198
520
  var LinearMarkupConverter = class {
199
521
  /**
200
522
  * Convert HTML details/summary blocks to Linear's collapsible format
@@ -330,7 +652,7 @@ ${content}
330
652
  */
331
653
  static getTimestampedLogPath(logFilePath) {
332
654
  const dir = dirname(logFilePath);
333
- const ext = extname(logFilePath);
655
+ const ext = extname2(logFilePath);
334
656
  const base = basename(logFilePath, ext);
335
657
  const now = /* @__PURE__ */ new Date();
336
658
  const timestamp = [
@@ -342,7 +664,7 @@ ${content}
342
664
  String(now.getMinutes()).padStart(2, "0"),
343
665
  String(now.getSeconds()).padStart(2, "0")
344
666
  ].join("");
345
- return join(dir, `${base}-${timestamp}${ext}`);
667
+ return join2(dir, `${base}-${timestamp}${ext}`);
346
668
  }
347
669
  };
348
670
 
@@ -391,8 +713,21 @@ var LinearIssueManagementProvider = class {
391
713
  } catch {
392
714
  }
393
715
  }
716
+ result.body = await processMarkdownImages(result.body, "linear");
717
+ if (result.comments) {
718
+ for (const comment of result.comments) {
719
+ comment.body = await processMarkdownImages(comment.body, "linear");
720
+ }
721
+ }
394
722
  return result;
395
723
  }
724
+ /**
725
+ * Fetch pull request details
726
+ * Linear does not support PRs - this throws an error directing to use GitHub
727
+ */
728
+ async getPR(_input) {
729
+ throw new Error("Linear does not support pull requests. PRs exist only on GitHub. Use the GitHub provider for PR operations.");
730
+ }
396
731
  /**
397
732
  * Fetch comments for an issue
398
733
  */
@@ -417,9 +752,10 @@ var LinearIssueManagementProvider = class {
417
752
  async getComment(input) {
418
753
  const { commentId } = input;
419
754
  const raw = await getLinearComment(commentId);
755
+ const processedBody = await processMarkdownImages(raw.body, "linear");
420
756
  return {
421
757
  id: raw.id,
422
- body: raw.body,
758
+ body: processedBody,
423
759
  author: null,
424
760
  // Linear SDK doesn't return comment author info in basic fetch
425
761
  created_at: raw.createdAt
@@ -514,4 +850,4 @@ var IssueManagementProviderFactory = class {
514
850
  export {
515
851
  IssueManagementProviderFactory
516
852
  };
517
- //# sourceMappingURL=chunk-GJMEKEI5.js.map
853
+ //# sourceMappingURL=chunk-NPEMVE27.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/utils/image-processor.ts","../src/mcp/GitHubIssueManagementProvider.ts","../src/utils/linear-markup-converter.ts","../src/mcp/LinearIssueManagementProvider.ts","../src/mcp/IssueManagementProviderFactory.ts"],"sourcesContent":["/* global fetch, AbortController, setTimeout, clearTimeout */\nimport { tmpdir } from 'node:os'\nimport { join, extname } from 'node:path'\nimport { existsSync, mkdirSync, createWriteStream, unlinkSync } from 'node:fs'\nimport { pipeline } from 'node:stream/promises'\nimport { Readable } from 'node:stream'\nimport { createHash } from 'node:crypto'\nimport { execa } from 'execa'\nimport { logger } from './logger.js'\nimport type { IssueProvider } from '../mcp/types.js'\n\n/**\n * Represents a matched image in markdown content\n */\nexport interface ImageMatch {\n fullMatch: string\n url: string\n isMarkdown: boolean // true for ![](url), false for <img>\n}\n\n/**\n * Supported image extensions\n */\nconst SUPPORTED_EXTENSIONS = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg']\n\n/**\n * Maximum allowed image size (10MB)\n */\nconst MAX_IMAGE_SIZE = 10 * 1024 * 1024\n\n/**\n * Request timeout in milliseconds (30 seconds)\n */\nconst REQUEST_TIMEOUT_MS = 30000\n\n/**\n * Cache directory path for downloaded images\n */\nexport const CACHE_DIR = join(tmpdir(), 'iloom-images')\n\n/**\n * Cached GitHub auth token (module-level to avoid repeated `gh auth token` calls)\n */\nlet cachedGitHubToken: string | undefined\n\n/**\n * Extract all image URLs from markdown content\n * Handles both ![alt](url) and <img src=\"url\"> formats\n *\n * @param content - Markdown content to parse\n * @returns Array of image matches with full match string and URL\n */\nexport function extractMarkdownImageUrls(content: string): ImageMatch[] {\n if (!content) {\n return []\n }\n\n const matches: ImageMatch[] = []\n\n // Regex for markdown images: ![alt](url)\n // Captures the entire match and the URL separately\n // Handles parentheses in URLs by matching balanced parens\n // The URL part matches: non-paren chars OR (balanced paren group)*, followed by non-paren/non-space chars\n const markdownRegex = /!\\[([^\\]]*)\\]\\(((?:[^()\\s]|\\((?:[^()\\s]|\\([^()]*\\))*\\))+)\\)/g\n let match: RegExpExecArray | null\n\n while ((match = markdownRegex.exec(content)) !== null) {\n const url = match[2]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: true\n })\n }\n }\n\n // Regex for HTML img tags: <img ... src=\"url\" ...>\n // Handles both double and single quotes, and self-closing tags\n const htmlImgRegex = /<img\\s+[^>]*src=[\"']([^\"']+)[\"'][^>]*\\/?>/gi\n\n while ((match = htmlImgRegex.exec(content)) !== null) {\n const url = match[1]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: false\n })\n }\n }\n\n return matches\n}\n\n/**\n * Check if URL requires authentication to download\n * - Linear: uploads.linear.app\n * - GitHub: private-user-images.githubusercontent.com\n *\n * @param url - Image URL to check\n * @returns true if URL requires authentication\n */\nexport function isAuthenticatedImageUrl(url: string): boolean {\n try {\n const parsedUrl = new URL(url)\n const hostname = parsedUrl.hostname.toLowerCase()\n\n // Linear uploads require authentication\n if (hostname === 'uploads.linear.app') {\n return true\n }\n\n // GitHub private user images require authentication\n if (hostname === 'private-user-images.githubusercontent.com') {\n return true\n }\n\n // GitHub user-attachments (uploaded images in issues/PRs) require authentication\n if (hostname === 'github.com' && parsedUrl.pathname.startsWith('/user-attachments/assets/')) {\n return true\n }\n\n return false\n } catch {\n // Invalid URL - treat as not authenticated\n return false\n }\n}\n\n/**\n * Get extension from URL pathname\n *\n * @param url - URL to extract extension from\n * @returns Extension including dot (e.g., '.png') or null if not found\n */\nfunction getExtensionFromUrl(url: string): string | null {\n try {\n const parsedUrl = new URL(url)\n const pathname = parsedUrl.pathname\n const ext = extname(pathname).toLowerCase()\n\n if (SUPPORTED_EXTENSIONS.includes(ext)) {\n return ext\n }\n return null\n } catch {\n return null\n }\n}\n\n/**\n * Generate cache key from URL\n * For GitHub URLs, strips JWT query params to ensure consistent caching\n * Returns hash + original extension\n *\n * @param url - Image URL to generate cache key for\n * @returns Cache key (hash + extension)\n */\nexport function getCacheKey(url: string): string {\n const parsedUrl = new URL(url)\n\n // For GitHub private images, remove jwt query param to get stable cache key\n // The jwt changes each fetch but the base URL is the same for the same image\n if (parsedUrl.hostname === 'private-user-images.githubusercontent.com') {\n parsedUrl.searchParams.delete('jwt')\n }\n\n // Get URL without volatile params for hashing\n const stableUrl = parsedUrl.toString()\n\n // Generate SHA256 hash of the stable URL (first 16 chars for brevity)\n const hash = createHash('sha256').update(stableUrl).digest('hex').slice(0, 16)\n\n // Extract extension from URL pathname, default to .png\n const ext = getExtensionFromUrl(url) ?? '.png'\n\n return `${hash}${ext}`\n}\n\n/**\n * Check if image is already cached\n * Returns file path if exists, undefined otherwise\n *\n * @param url - Image URL to check cache for\n * @returns Cached file path or undefined\n */\nexport function getCachedImagePath(url: string): string | undefined {\n const cacheKey = getCacheKey(url)\n const cachedPath = join(CACHE_DIR, cacheKey)\n\n if (existsSync(cachedPath)) {\n return cachedPath\n }\n return undefined\n}\n\n/**\n * Get authentication token for the given provider\n *\n * @param provider - Provider type ('github' or 'linear')\n * @returns Authentication token or undefined\n */\nasync function getAuthToken(provider: IssueProvider): Promise<string | undefined> {\n if (provider === 'github') {\n // Return cached token if available\n if (cachedGitHubToken !== undefined) {\n return cachedGitHubToken\n }\n\n try {\n // Execute `gh auth token` to get GitHub token\n const result = await execa('gh', ['auth', 'token'])\n cachedGitHubToken = result.stdout.trim()\n return cachedGitHubToken\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to get GitHub auth token via gh CLI: ${message}`)\n return undefined\n }\n }\n\n if (provider === 'linear') {\n // Linear token from environment variable\n return process.env.LINEAR_API_TOKEN\n }\n\n return undefined\n}\n\n/**\n * Clear the cached GitHub auth token (for testing purposes)\n */\nexport function clearCachedGitHubToken(): void {\n cachedGitHubToken = undefined\n}\n\n/**\n * Download image from URL and stream it directly to a file\n *\n * @param url - Image URL to download\n * @param destPath - Destination file path\n * @param authHeader - Optional Authorization header value\n * @throws Error if download fails, times out, or exceeds size limit\n */\nexport async function downloadAndSaveImage(\n url: string,\n destPath: string,\n authHeader?: string\n): Promise<void> {\n const headers: Record<string, string> = {}\n if (authHeader) {\n headers['Authorization'] = authHeader\n }\n\n // Set up abort controller for timeout\n const controller = new AbortController()\n const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS)\n\n try {\n const response = await fetch(url, { headers, signal: controller.signal })\n\n if (!response.ok) {\n throw new Error(`Failed to download image: ${response.status} ${response.statusText}`)\n }\n\n // Check Content-Length header if available\n const contentLength = response.headers.get('Content-Length')\n if (contentLength && parseInt(contentLength, 10) > MAX_IMAGE_SIZE) {\n throw new Error(`Image too large: ${contentLength} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`)\n }\n\n if (!response.body) {\n throw new Error('Response body is null')\n }\n\n // Convert ReadableStream to Node.js Readable\n const reader = response.body.getReader()\n let bytesWritten = 0\n\n const nodeReadable = new Readable({\n async read(): Promise<void> {\n try {\n const { done, value } = await reader.read()\n if (done) {\n this.push(null)\n return\n }\n\n bytesWritten += value.byteLength\n if (bytesWritten > MAX_IMAGE_SIZE) {\n reader.cancel()\n this.destroy(new Error(`Image too large: ${bytesWritten} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`))\n return\n }\n\n this.push(Buffer.from(value))\n } catch (err) {\n this.destroy(err instanceof Error ? err : new Error(String(err)))\n }\n }\n })\n\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Stream to file\n const writeStream = createWriteStream(destPath)\n\n try {\n await pipeline(nodeReadable, writeStream)\n } catch (pipelineError) {\n // Clean up partial file on error\n try {\n if (existsSync(destPath)) {\n unlinkSync(destPath)\n }\n } catch {\n // Ignore cleanup errors\n }\n throw pipelineError\n }\n } catch (error) {\n if (error instanceof Error && error.name === 'AbortError') {\n throw new Error(`Image download timed out after ${REQUEST_TIMEOUT_MS}ms`)\n }\n throw error\n } finally {\n clearTimeout(timeoutId)\n }\n}\n\n/**\n * Get the destination path for caching an image\n *\n * @param url - Original image URL (used to generate cache key)\n * @returns Local file path where image should be saved\n */\nexport function getCacheDestPath(url: string): string {\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Generate cache key from URL\n const cacheKey = getCacheKey(url)\n return join(CACHE_DIR, cacheKey)\n}\n\n/**\n * Rewrite image URLs in markdown content\n *\n * @param content - Original markdown content\n * @param urlMap - Map of original URLs to local file paths\n * @returns Content with URLs replaced\n */\nexport function rewriteMarkdownUrls(\n content: string,\n urlMap: Map<string, string>\n): string {\n let result = content\n\n for (const [originalUrl, localPath] of urlMap) {\n // Escape special regex characters in the URL\n const escapedUrl = originalUrl.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n const urlRegex = new RegExp(escapedUrl, 'g')\n result = result.replace(urlRegex, localPath)\n }\n\n return result\n}\n\n/**\n * Main entry point: process all images in markdown content\n * Downloads authenticated images (with caching), saves locally, rewrites URLs\n *\n * @param content - Markdown content to process\n * @param provider - Image provider for authentication ('github' or 'linear')\n * @returns Content with authenticated image URLs replaced with local file paths\n */\nexport async function processMarkdownImages(\n content: string,\n provider: IssueProvider\n): Promise<string> {\n // Early return if empty\n if (!content) {\n return ''\n }\n\n // Extract all image URLs\n const images = extractMarkdownImageUrls(content)\n if (images.length === 0) {\n return content\n }\n\n // Filter to only authenticated URLs\n const authImages = images.filter(img => isAuthenticatedImageUrl(img.url))\n if (authImages.length === 0) {\n return content\n }\n\n // Get auth token for provider\n const authToken = await getAuthToken(provider)\n\n // Deduplicate URLs (same image might appear multiple times)\n const uniqueUrls = [...new Set(authImages.map(img => img.url))]\n\n // Build URL map - process all unique URLs in parallel\n const urlMap = new Map<string, string>()\n\n // Download/cache images in parallel\n const downloadPromises = uniqueUrls.map(async (url) => {\n try {\n // Check cache first\n const cachedPath = getCachedImagePath(url)\n if (cachedPath) {\n logger.debug(`Using cached image: ${cachedPath}`)\n return { url, localPath: cachedPath }\n }\n\n // Cache miss - download and stream directly to file\n logger.debug(`Downloading image: ${url}`)\n const destPath = getCacheDestPath(url)\n await downloadAndSaveImage(\n url,\n destPath,\n authToken ? `Bearer ${authToken}` : undefined\n )\n return { url, localPath: destPath }\n } catch (error) {\n // Graceful degradation - log warning, return null to keep original URL\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to download image ${url}: ${message}`)\n return null\n }\n })\n\n const results = await Promise.all(downloadPromises)\n\n // Build URL map from results\n for (const result of results) {\n if (result !== null) {\n urlMap.set(result.url, result.localPath)\n }\n }\n\n // Rewrite and return\n return rewriteMarkdownUrls(content, urlMap)\n}\n","/**\n * GitHub implementation of Issue Management Provider\n * Uses GitHub CLI for all operations\n * Normalizes GitHub-specific fields (login) to provider-agnostic core fields (id, displayName)\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n\tFlexibleAuthor,\n} from './types.js'\nimport {\n\texecuteGhCommand,\n\tcreateIssueComment,\n\tupdateIssueComment,\n\tcreatePRComment,\n\tcreateIssue,\n\tgetIssueNodeId,\n\taddSubIssue,\n} from '../utils/github.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * GitHub-specific author structure from API\n */\ninterface GitHubAuthor {\n\tlogin: string\n\tid?: number\n\tavatarUrl?: string\n\turl?: string\n}\n\n/**\n * Normalize GitHub author to FlexibleAuthor format\n */\nfunction normalizeAuthor(author: GitHubAuthor | null | undefined): FlexibleAuthor | null {\n\tif (!author) return null\n\n\treturn {\n\t\tid: author.id ? String(author.id) : author.login,\n\t\tdisplayName: author.login, // GitHub uses login as primary identifier\n\t\tlogin: author.login, // Preserve original GitHub field\n\t\t...(author.avatarUrl && { avatarUrl: author.avatarUrl }),\n\t\t...(author.url && { url: author.url }),\n\t}\n}\n\n/**\n * Extract numeric comment ID from GitHub comment URL\n * URL format: https://github.com/owner/repo/issues/123#issuecomment-3615239386\n */\nexport function extractNumericIdFromUrl(url: string): string {\n\tconst match = url.match(/#issuecomment-(\\d+)$/)\n\tif (!match?.[1]) {\n\t\tthrow new Error(`Cannot extract comment ID from URL: ${url}`)\n\t}\n\treturn match[1]\n}\n\n/**\n * GitHub-specific implementation of IssueManagementProvider\n */\nexport class GitHubIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'github'\n\treadonly issuePrefix = '#'\n\n\t/**\n\t * Fetch issue details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst issueNumber = parseInt(number, 10)\n\t\tif (isNaN(issueNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst fields = includeComments\n\t\t\t? 'body,title,comments,labels,assignees,milestone,author,state,number,url'\n\t\t\t: 'body,title,labels,assignees,milestone,author,state,number,url'\n\n\t\t// Use gh issue view to fetch issue details\n\t\tinterface GitHubIssueResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\tlabels?: Array<{ name: string; color?: string; description?: string }>\n\t\t\tassignees?: Array<GitHubAuthor>\n\t\t\tmilestone?: { title: string; number?: number; state?: string }\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'issue',\n\t\t\t'view',\n\t\t\tString(issueNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided (gh CLI handles both owner/repo and URL formats)\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubIssueResponse>(args)\n\n\t\t// Normalize to IssueResult with core fields + passthrough\n\t\tconst result: IssueResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'github',\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// Optional flexible fields\n\t\t\t...(raw.assignees && {\n\t\t\t\tassignees: raw.assignees.map(a => normalizeAuthor(a)).filter((a): a is FlexibleAuthor => a !== null),\n\t\t\t}),\n\t\t\t...(raw.labels && {\n\t\t\t\tlabels: raw.labels,\n\t\t\t}),\n\n\t\t\t// GitHub-specific passthrough fields\n\t\t\t...(raw.milestone && {\n\t\t\t\tmilestone: raw.milestone,\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\t// (GitHub CLI returns GraphQL node IDs in the id field, but REST API expects numeric IDs)\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getPR(input: GetPRInput): Promise<PRResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst prNumber = parseInt(number, 10)\n\t\tif (isNaN(prNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub PR number: ${number}. GitHub PR IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst baseFields = 'number,title,body,state,url,author,headRefName,baseRefName,files,commits'\n\t\tconst fields = includeComments\n\t\t\t? `${baseFields},comments`\n\t\t\t: baseFields\n\n\t\t// GitHub PR response structure\n\t\tinterface GitHubPRResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\theadRefName: string\n\t\t\tbaseRefName: string\n\t\t\tfiles?: Array<{\n\t\t\t\tpath: string\n\t\t\t\tadditions: number\n\t\t\t\tdeletions: number\n\t\t\t}>\n\t\t\tcommits?: Array<{\n\t\t\t\toid: string\n\t\t\t\tmessageHeadline: string\n\t\t\t\tauthors: Array<{ name: string; email: string }>\n\t\t\t}>\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'pr',\n\t\t\t'view',\n\t\t\tString(prNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubPRResponse>(args)\n\n\t\t// Normalize to PRResult with core fields + passthrough\n\t\tconst result: PRResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\tnumber: raw.number,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// PR-specific fields\n\t\t\theadRefName: raw.headRefName,\n\t\t\tbaseRefName: raw.baseRefName,\n\n\t\t\t// Optional files\n\t\t\t...(raw.files && {\n\t\t\t\tfiles: raw.files,\n\t\t\t}),\n\n\t\t\t// Optional commits - normalize author\n\t\t\t...(raw.commits && {\n\t\t\t\tcommits: raw.commits.map(commit => ({\n\t\t\t\t\toid: commit.oid,\n\t\t\t\t\tmessageHeadline: commit.messageHeadline,\n\t\t\t\t\tauthor: commit.authors?.[0]\n\t\t\t\t\t\t? {\n\t\t\t\t\t\t\tid: commit.authors[0].email,\n\t\t\t\t\t\t\tdisplayName: commit.authors[0].name,\n\t\t\t\t\t\t\tname: commit.authors[0].name,\n\t\t\t\t\t\t\temail: commit.authors[0].email,\n\t\t\t\t\t\t}\n\t\t\t\t\t\t: null,\n\t\t\t\t})),\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID using gh API\n\t * Normalizes author to FlexibleAuthor format\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId, repo } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub API\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// GitHub API response structure\n\t\tinterface GitHubCommentResponse {\n\t\t\tid: number\n\t\t\tbody: string\n\t\t\tuser: GitHubAuthor\n\t\t\tcreated_at: string\n\t\t\tupdated_at?: string\n\t\t\thtml_url?: string\n\t\t\treactions?: Record<string, unknown>\n\t\t}\n\n\t\t// Use explicit repo path if provided, otherwise use :owner/:repo placeholder\n\t\tconst apiPath = repo\n\t\t\t? `repos/${repo}/issues/comments/${numericCommentId}`\n\t\t\t: `repos/:owner/:repo/issues/comments/${numericCommentId}`\n\n\t\t// Use gh api to fetch specific comment\n\t\tconst raw = await executeGhCommand<GitHubCommentResponse>([\n\t\t\t'api',\n\t\t\tapiPath,\n\t\t\t'--jq',\n\t\t\t'{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}',\n\t\t])\n\n\t\t// Process authenticated images in comment body\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'github')\n\n\t\t// Normalize to CommentDetailResult\n\t\treturn {\n\t\t\tid: String(raw.id),\n\t\t\tbody: processedBody,\n\t\t\tauthor: normalizeAuthor(raw.user),\n\t\t\tcreated_at: raw.created_at,\n\t\t\t...(raw.updated_at && { updated_at: raw.updated_at }),\n\t\t\t// Passthrough GitHub-specific fields\n\t\t\t...(raw.html_url && { html_url: raw.html_url }),\n\t\t\t...(raw.reactions && { reactions: raw.reactions }),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue or PR\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body, type } = input\n\n\t\t// Convert string ID to number for GitHub utilities\n\t\tconst numericId = parseInt(number, 10)\n\t\tif (isNaN(numericId)) {\n\t\t\tthrow new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utilities\n\t\tconst result =\n\t\t\ttype === 'issue'\n\t\t\t\t? await createIssueComment(numericId, body)\n\t\t\t\t: await createPRComment(numericId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub utility\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utility\n\t\tconst result = await updateIssueComment(numericCommentId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\tconst result = await createIssue(title, body, { labels, repo })\n\n\t\t// Ensure number is numeric\n\t\tconst issueNumber = typeof result.number === 'number'\n\t\t\t? result.number\n\t\t\t: parseInt(String(result.number), 10)\n\n\t\treturn {\n\t\t\tid: String(issueNumber),\n\t\t\turl: result.url,\n\t\t\tnumber: issueNumber,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * GitHub requires two-step process: create issue, then link via GraphQL\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\t// Convert parent identifier to number\n\t\tconst parentNumber = parseInt(parentId, 10)\n\t\tif (isNaN(parentNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub parent issue number: ${parentId}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Step 1: Get parent issue's GraphQL node ID\n\t\tconst parentNodeId = await getIssueNodeId(parentNumber, repo)\n\n\t\t// Step 2: Create the child issue\n\t\tconst childResult = await createIssue(title, body, { labels, repo })\n\t\tconst childNumber = typeof childResult.number === 'number'\n\t\t\t? childResult.number\n\t\t\t: parseInt(String(childResult.number), 10)\n\n\t\t// Step 3: Get child issue's GraphQL node ID\n\t\tconst childNodeId = await getIssueNodeId(childNumber, repo)\n\n\t\t// Step 4: Link child to parent via GraphQL mutation\n\t\tawait addSubIssue(parentNodeId, childNodeId)\n\n\t\treturn {\n\t\t\tid: String(childNumber),\n\t\t\turl: childResult.url,\n\t\t\tnumber: childNumber,\n\t\t}\n\t}\n}\n","import { appendFileSync } from 'node:fs'\nimport { join, dirname, basename, extname } from 'node:path'\n\n/**\n * Utility class for converting HTML details/summary format to Linear's collapsible format\n *\n * Converts:\n * <details>\n * <summary>Header</summary>\n * CONTENT\n * </details>\n *\n * Into Linear format:\n * +++ Header\n *\n * CONTENT\n *\n * +++\n */\nexport class LinearMarkupConverter {\n\t/**\n\t * Convert HTML details/summary blocks to Linear's collapsible format\n\t * Handles nested details blocks recursively\n\t *\n\t * @param text - Text containing HTML details/summary blocks\n\t * @returns Text with details/summary converted to Linear format\n\t */\n\tstatic convertDetailsToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Process from innermost to outermost to handle nesting correctly\n\t\t// Keep converting until no more details blocks are found\n\t\tlet previousText = ''\n\t\tlet currentText = text\n\n\t\twhile (previousText !== currentText) {\n\t\t\tpreviousText = currentText\n\t\t\tcurrentText = this.convertSinglePass(currentText)\n\t\t}\n\n\t\treturn currentText\n\t}\n\n\t/**\n\t * Perform a single pass of details block conversion\n\t * Converts the innermost details blocks first\n\t */\n\tprivate static convertSinglePass(text: string): string {\n\t\t// Match <details> blocks with optional attributes on the details tag\n\t\t// Supports multiline content between tags\n\t\tconst detailsRegex = /<details[^>]*>\\s*<summary[^>]*>(.*?)<\\/summary>\\s*(.*?)\\s*<\\/details>/gis\n\n\t\treturn text.replace(detailsRegex, (_match, summary, content) => {\n\t\t\t// Clean up the summary - trim whitespace and decode HTML entities\n\t\t\tconst cleanSummary = this.cleanText(summary)\n\n\t\t\t// Clean up the content - preserve internal structure but normalize outer whitespace\n\t\t\t// Note: Don't recursively convert here - the while loop handles that\n\t\t\tconst cleanContent = this.cleanContent(content)\n\n\t\t\t// Build Linear collapsible format\n\t\t\t// Always include blank lines around content for readability\n\t\t\tif (cleanContent) {\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n${cleanContent}\\n\\n+++`\n\t\t\t} else {\n\t\t\t\t// Empty content - use minimal format\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n+++`\n\t\t\t}\n\t\t})\n\t}\n\n\t/**\n\t * Clean text by trimming whitespace and decoding common HTML entities\n\t */\n\tprivate static cleanText(text: string): string {\n\t\treturn text\n\t\t\t.trim()\n\t\t\t.replace(/&lt;/g, '<')\n\t\t\t.replace(/&gt;/g, '>')\n\t\t\t.replace(/&amp;/g, '&')\n\t\t\t.replace(/&quot;/g, '\"')\n\t\t\t.replace(/&#39;/g, \"'\")\n\t}\n\n\t/**\n\t * Clean content while preserving internal structure\n\t * - Removes leading/trailing whitespace\n\t * - Normalizes internal blank lines (max 2 consecutive newlines)\n\t * - Preserves code blocks and other formatting\n\t */\n\tprivate static cleanContent(content: string): string {\n\t\tif (!content) {\n\t\t\treturn ''\n\t\t}\n\n\t\t// Trim outer whitespace\n\t\tlet cleaned = content.trim()\n\n\t\t// Normalize excessive blank lines (3+ newlines -> 2 newlines)\n\t\tcleaned = cleaned.replace(/\\n{3,}/g, '\\n\\n')\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Check if text contains HTML details/summary blocks\n\t * Useful for conditional conversion\n\t */\n\tstatic hasDetailsBlocks(text: string): boolean {\n\t\tif (!text) {\n\t\t\treturn false\n\t\t}\n\n\t\tconst detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\\/summary>.*?<\\/details>/is\n\t\treturn detailsRegex.test(text)\n\t}\n\n\t/**\n\t * Remove wrapper tags from code sample details blocks\n\t * Identifies details blocks where summary contains \"X lines\" pattern\n\t * and removes the details/summary tags while preserving the content\n\t *\n\t * @param text - Text containing potential code sample details blocks\n\t * @returns Text with code sample wrappers removed\n\t */\n\tstatic removeCodeSampleWrappers(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Match details blocks where summary contains \"X lines\" (e.g., \"45 lines\", \"120 lines\")\n\t\t// Pattern: <details><summary>...N lines...</summary>CONTENT</details>\n\t\t// Use [^<]* to match summary content without allowing nested tags to interfere\n\t\t// Then use [\\s\\S]*? for the content to allow any characters including newlines\n\t\tconst codeSampleRegex = /<details[^>]*>\\s*<summary[^>]*>([^<]*\\d+\\s+lines[^<]*)<\\/summary>\\s*([\\s\\S]*?)<\\/details>/gi\n\n\t\treturn text.replace(codeSampleRegex, (_match, _summary, content) => {\n\t\t\t// Return just the content, without any wrapper tags\n\t\t\t// Preserve the content exactly as-is\n\t\t\treturn content.trim()\n\t\t})\n\t}\n\n\t/**\n\t * Convert text for Linear - applies all necessary conversions\n\t * Currently only converts details/summary blocks, but can be extended\n\t * for other HTML to Linear markdown conversions\n\t */\n\tstatic convertToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Log input if logging is enabled\n\t\tthis.logConversion('INPUT', text)\n\n\t\t// Apply all conversions\n\t\tlet converted = text\n\n\t\t// First, remove code sample wrappers (details blocks with \"X lines\" pattern)\n\t\t// This prevents them from being converted to Linear's +++ format\n\t\tconverted = this.removeCodeSampleWrappers(converted)\n\n\t\t// Then convert remaining details/summary blocks to Linear format\n\t\tconverted = this.convertDetailsToLinear(converted)\n\n\t\t// Log output if logging is enabled\n\t\tthis.logConversion('OUTPUT', converted)\n\n\t\treturn converted\n\t}\n\n\t/**\n\t * Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set\n\t */\n\tprivate static logConversion(label: string, content: string): void {\n\t\tconst logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE\n\t\tif (!logFilePath) {\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst timestampedPath = this.getTimestampedLogPath(logFilePath)\n\t\t\tconst timestamp = new Date().toISOString()\n\t\t\tconst separator = '================================'\n\n\t\t\tconst logEntry = `${separator}\\n[${timestamp}] CONVERSION ${label}\\n${separator}\\n${label}:\\n${content}\\n\\n`\n\n\t\t\tappendFileSync(timestampedPath, logEntry, 'utf-8')\n\t\t} catch {\n\t\t\t// Silently fail - don't crash if logging fails\n\t\t\t// This is a debug feature and shouldn't break the conversion\n\t\t}\n\t}\n\n\t/**\n\t * Generate timestamped log file path\n\t * Example: debug.log -> debug-20231202-161234.log\n\t */\n\tprivate static getTimestampedLogPath(logFilePath: string): string {\n\t\tconst dir = dirname(logFilePath)\n\t\tconst ext = extname(logFilePath)\n\t\tconst base = basename(logFilePath, ext)\n\n\t\t// Generate timestamp: YYYYMMDD-HHMMSS\n\t\tconst now = new Date()\n\t\tconst timestamp = [\n\t\t\tnow.getFullYear(),\n\t\t\tString(now.getMonth() + 1).padStart(2, '0'),\n\t\t\tString(now.getDate()).padStart(2, '0'),\n\t\t].join('') + '-' + [\n\t\t\tString(now.getHours()).padStart(2, '0'),\n\t\t\tString(now.getMinutes()).padStart(2, '0'),\n\t\t\tString(now.getSeconds()).padStart(2, '0'),\n\t\t].join('')\n\n\t\treturn join(dir, `${base}-${timestamp}${ext}`)\n\t}\n}\n","/**\n * Linear implementation of Issue Management Provider\n * Uses @linear/sdk for all operations\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n} from './types.js'\nimport {\n\tfetchLinearIssue,\n\tcreateLinearComment,\n\tgetLinearComment,\n\tupdateLinearComment,\n\tfetchLinearIssueComments,\n\tcreateLinearIssue,\n\tcreateLinearChildIssue,\n} from '../utils/linear.js'\nimport { LinearMarkupConverter } from '../utils/linear-markup-converter.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * Linear-specific implementation of IssueManagementProvider\n */\nexport class LinearIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'linear'\n\treadonly issuePrefix = ''\n\n\t/**\n\t * Cached team key extracted from issue identifiers (e.g., \"ENG-123\" -> \"ENG\")\n\t * Used as fallback when teamKey is not explicitly provided to createIssue()\n\t */\n\tprivate cachedTeamKey: string | undefined = undefined\n\n\t/**\n\t * Fetch issue details using Linear SDK\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Extract and cache team key from identifier (e.g., \"ENG-123\" -> \"ENG\")\n\t\t// This enables createIssue() to use the team key as a fallback\n\t\tconst match = number.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tif (match?.[1]) {\n\t\t\tthis.cachedTeamKey = match[1].toUpperCase()\n\t\t}\n\n\t\t// Fetch issue - Linear uses alphanumeric identifiers like \"ENG-123\"\n\t\tconst raw = await fetchLinearIssue(number)\n\n\t\t// Map Linear state name to open/closed\n\t\tconst state = raw.state && (raw.state.toLowerCase().includes('done') || raw.state.toLowerCase().includes('completed') || raw.state.toLowerCase().includes('canceled'))\n\t\t\t? 'closed'\n\t\t\t: 'open'\n\n\t\t// Build result\n\t\tconst result: IssueResult = {\n\t\t\tid: raw.identifier,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.description ?? '',\n\t\t\tstate,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'linear',\n\t\t\tauthor: null, // Linear SDK doesn't return author in basic fetch\n\n\t\t\t// Linear-specific fields\n\t\t\tlinearState: raw.state,\n\t\t\tcreatedAt: raw.createdAt,\n\t\t\tupdatedAt: raw.updatedAt,\n\t\t}\n\n\t\t// Fetch comments if requested\n\t\tif (includeComments) {\n\t\t\ttry {\n\t\t\t\tconst comments = await this.fetchIssueComments(number)\n\t\t\t\tif (comments) {\n\t\t\t\t\tresult.comments = comments\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// If comments fail, continue without them\n\t\t\t}\n\t\t}\n\n\t\t// Process images in body and comments to make them accessible\n\t\tresult.body = await processMarkdownImages(result.body, 'linear')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'linear')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details\n\t * Linear does not support PRs - this throws an error directing to use GitHub\n\t */\n\tasync getPR(_input: GetPRInput): Promise<PRResult> {\n\t\tthrow new Error('Linear does not support pull requests. PRs exist only on GitHub. Use the GitHub provider for PR operations.')\n\t}\n\n\t/**\n\t * Fetch comments for an issue\n\t */\n\tprivate async fetchIssueComments(identifier: string): Promise<IssueResult['comments']> {\n\t\ttry {\n\t\t\tconst comments = await fetchLinearIssueComments(identifier)\n\n\t\t\treturn comments.map(comment => ({\n\t\t\t\tid: comment.id,\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t} catch {\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\n\t\tconst raw = await getLinearComment(commentId)\n\n\t\t// Process images to make them accessible\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'linear')\n\n\t\treturn {\n\t\t\tid: raw.id,\n\t\t\tbody: processedBody,\n\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\tcreated_at: raw.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body } = input\n\t\t// Note: Linear doesn't distinguish between issue and PR comments\n\t\t// (Linear doesn't have PRs - that's GitHub-specific)\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await createLinearComment(number, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tcreated_at: result.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await updateLinearComment(commentId, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tupdated_at: result.updatedAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, teamKey } = input\n\n\t\t// Fallback chain: explicit param > settings (via env) > cached key from getIssue()\n\t\tconst effectiveTeamKey = teamKey ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear issue creation. Configure issueManagement.linear.teamId in settings, or call getIssue first to extract the team from an issue identifier.')\n\t\t}\n\n\t\tconst result = await createLinearIssue(title, body, effectiveTeamKey, labels)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * Linear supports atomic creation with parentId field\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, teamKey } = input\n\n\t\t// Fetch parent issue to get UUID (parentId in input is identifier like \"ENG-123\")\n\t\tconst parentIssue = await fetchLinearIssue(parentId)\n\n\t\t// Extract team key from parent identifier if not provided\n\t\tconst match = parentId.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tconst effectiveTeamKey = teamKey ?? match?.[1]?.toUpperCase() ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear child issue creation. Provide teamKey parameter or use a parent identifier with team prefix.')\n\t\t}\n\n\t\t// Create child issue with parent's UUID\n\t\tconst result = await createLinearChildIssue(\n\t\t\ttitle,\n\t\t\tbody,\n\t\t\teffectiveTeamKey,\n\t\t\tparentIssue.id, // UUID, not identifier\n\t\t\tlabels\n\t\t)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n}\n","/**\n * Factory for creating issue management providers\n */\n\nimport type { IssueManagementProvider, IssueProvider } from './types.js'\nimport { GitHubIssueManagementProvider } from './GitHubIssueManagementProvider.js'\nimport { LinearIssueManagementProvider } from './LinearIssueManagementProvider.js'\n\n/**\n * Factory class for creating issue management providers\n */\nexport class IssueManagementProviderFactory {\n\t/**\n\t * Create an issue management provider based on the provider type\n\t */\n\tstatic create(provider: IssueProvider): IssueManagementProvider {\n\t\tswitch (provider) {\n\t\t\tcase 'github':\n\t\t\t\treturn new GitHubIssueManagementProvider()\n\t\t\tcase 'linear':\n\t\t\t\treturn new LinearIssueManagementProvider()\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unsupported issue management provider: ${provider}`)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AACA,SAAS,cAAc;AACvB,SAAS,MAAM,eAAe;AAC9B,SAAS,YAAY,WAAW,mBAAmB,kBAAkB;AACrE,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,kBAAkB;AAC3B,SAAS,aAAa;AAgBtB,IAAM,uBAAuB,CAAC,QAAQ,QAAQ,SAAS,QAAQ,SAAS,MAAM;AAK9E,IAAM,iBAAiB,KAAK,OAAO;AAKnC,IAAM,qBAAqB;AAKpB,IAAM,YAAY,KAAK,OAAO,GAAG,cAAc;AAKtD,IAAI;AASG,SAAS,yBAAyB,SAA+B;AACtE,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAwB,CAAC;AAM/B,QAAM,gBAAgB;AACtB,MAAI;AAEJ,UAAQ,QAAQ,cAAc,KAAK,OAAO,OAAO,MAAM;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAIA,QAAM,eAAe;AAErB,UAAQ,QAAQ,aAAa,KAAK,OAAO,OAAO,MAAM;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,wBAAwB,KAAsB;AAC5D,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU,SAAS,YAAY;AAGhD,QAAI,aAAa,sBAAsB;AACrC,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,6CAA6C;AAC5D,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,gBAAgB,UAAU,SAAS,WAAW,2BAA2B,GAAG;AAC3F,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAQA,SAAS,oBAAoB,KAA4B;AACvD,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU;AAC3B,UAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAE1C,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,SAAS,YAAY,KAAqB;AAC/C,QAAM,YAAY,IAAI,IAAI,GAAG;AAI7B,MAAI,UAAU,aAAa,6CAA6C;AACtE,cAAU,aAAa,OAAO,KAAK;AAAA,EACrC;AAGA,QAAM,YAAY,UAAU,SAAS;AAGrC,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAG7E,QAAM,MAAM,oBAAoB,GAAG,KAAK;AAExC,SAAO,GAAG,IAAI,GAAG,GAAG;AACtB;AASO,SAAS,mBAAmB,KAAiC;AAClE,QAAM,WAAW,YAAY,GAAG;AAChC,QAAM,aAAa,KAAK,WAAW,QAAQ;AAE3C,MAAI,WAAW,UAAU,GAAG;AAC1B,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAQA,eAAe,aAAa,UAAsD;AAChF,MAAI,aAAa,UAAU;AAEzB,QAAI,sBAAsB,QAAW;AACnC,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,MAAM,MAAM,CAAC,QAAQ,OAAO,CAAC;AAClD,0BAAoB,OAAO,OAAO,KAAK;AACvC,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,+CAA+C,OAAO,EAAE;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,aAAa,UAAU;AAEzB,WAAO,QAAQ,IAAI;AAAA,EACrB;AAEA,SAAO;AACT;AAiBA,eAAsB,qBACpB,KACA,UACA,YACe;AACf,QAAM,UAAkC,CAAC;AACzC,MAAI,YAAY;AACd,YAAQ,eAAe,IAAI;AAAA,EAC7B;AAGA,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,kBAAkB;AAEzE,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,KAAK,EAAE,SAAS,QAAQ,WAAW,OAAO,CAAC;AAExE,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IACvF;AAGA,UAAM,gBAAgB,SAAS,QAAQ,IAAI,gBAAgB;AAC3D,QAAI,iBAAiB,SAAS,eAAe,EAAE,IAAI,gBAAgB;AACjE,YAAM,IAAI,MAAM,oBAAoB,aAAa,kBAAkB,cAAc,aAAa;AAAA,IAChG;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAGA,UAAM,SAAS,SAAS,KAAK,UAAU;AACvC,QAAI,eAAe;AAEnB,UAAM,eAAe,IAAI,SAAS;AAAA,MAChC,MAAM,OAAsB;AAC1B,YAAI;AACF,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,iBAAK,KAAK,IAAI;AACd;AAAA,UACF;AAEA,0BAAgB,MAAM;AACtB,cAAI,eAAe,gBAAgB;AACjC,mBAAO,OAAO;AACd,iBAAK,QAAQ,IAAI,MAAM,oBAAoB,YAAY,kBAAkB,cAAc,aAAa,CAAC;AACrG;AAAA,UACF;AAEA,eAAK,KAAK,OAAO,KAAK,KAAK,CAAC;AAAA,QAC9B,SAAS,KAAK;AACZ,eAAK,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,QAClE;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,gBAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,IAC1C;AAGA,UAAM,cAAc,kBAAkB,QAAQ;AAE9C,QAAI;AACF,YAAM,SAAS,cAAc,WAAW;AAAA,IAC1C,SAAS,eAAe;AAEtB,UAAI;AACF,YAAI,WAAW,QAAQ,GAAG;AACxB,qBAAW,QAAQ;AAAA,QACrB;AAAA,MACF,QAAQ;AAAA,MAER;AACA,YAAM;AAAA,IACR;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,SAAS,MAAM,SAAS,cAAc;AACzD,YAAM,IAAI,MAAM,kCAAkC,kBAAkB,IAAI;AAAA,IAC1E;AACA,UAAM;AAAA,EACR,UAAE;AACA,iBAAa,SAAS;AAAA,EACxB;AACF;AAQO,SAAS,iBAAiB,KAAqB;AAEpD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAGA,QAAM,WAAW,YAAY,GAAG;AAChC,SAAO,KAAK,WAAW,QAAQ;AACjC;AASO,SAAS,oBACd,SACA,QACQ;AACR,MAAI,SAAS;AAEb,aAAW,CAAC,aAAa,SAAS,KAAK,QAAQ;AAE7C,UAAM,aAAa,YAAY,QAAQ,uBAAuB,MAAM;AACpE,UAAM,WAAW,IAAI,OAAO,YAAY,GAAG;AAC3C,aAAS,OAAO,QAAQ,UAAU,SAAS;AAAA,EAC7C;AAEA,SAAO;AACT;AAUA,eAAsB,sBACpB,SACA,UACiB;AAEjB,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,yBAAyB,OAAO;AAC/C,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAGA,QAAM,aAAa,OAAO,OAAO,SAAO,wBAAwB,IAAI,GAAG,CAAC;AACxE,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,MAAM,aAAa,QAAQ;AAG7C,QAAM,aAAa,CAAC,GAAG,IAAI,IAAI,WAAW,IAAI,SAAO,IAAI,GAAG,CAAC,CAAC;AAG9D,QAAM,SAAS,oBAAI,IAAoB;AAGvC,QAAM,mBAAmB,WAAW,IAAI,OAAO,QAAQ;AACrD,QAAI;AAEF,YAAM,aAAa,mBAAmB,GAAG;AACzC,UAAI,YAAY;AACd,eAAO,MAAM,uBAAuB,UAAU,EAAE;AAChD,eAAO,EAAE,KAAK,WAAW,WAAW;AAAA,MACtC;AAGA,aAAO,MAAM,sBAAsB,GAAG,EAAE;AACxC,YAAM,WAAW,iBAAiB,GAAG;AACrC,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,YAAY,UAAU,SAAS,KAAK;AAAA,MACtC;AACA,aAAO,EAAE,KAAK,WAAW,SAAS;AAAA,IACpC,SAAS,OAAO;AAEd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,4BAA4B,GAAG,KAAK,OAAO,EAAE;AACzD,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,UAAU,MAAM,QAAQ,IAAI,gBAAgB;AAGlD,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,MAAM;AACnB,aAAO,IAAI,OAAO,KAAK,OAAO,SAAS;AAAA,IACzC;AAAA,EACF;AAGA,SAAO,oBAAoB,SAAS,MAAM;AAC5C;;;ACpZA,SAAS,gBAAgB,QAAgE;AACxF,MAAI,CAAC,OAAQ,QAAO;AAEpB,SAAO;AAAA,IACN,IAAI,OAAO,KAAK,OAAO,OAAO,EAAE,IAAI,OAAO;AAAA,IAC3C,aAAa,OAAO;AAAA;AAAA,IACpB,OAAO,OAAO;AAAA;AAAA,IACd,GAAI,OAAO,aAAa,EAAE,WAAW,OAAO,UAAU;AAAA,IACtD,GAAI,OAAO,OAAO,EAAE,KAAK,OAAO,IAAI;AAAA,EACrC;AACD;AAMO,SAAS,wBAAwB,KAAqB;AAC5D,QAAM,QAAQ,IAAI,MAAM,sBAAsB;AAC9C,MAAI,EAAC,+BAAQ,KAAI;AAChB,UAAM,IAAI,MAAM,uCAAuC,GAAG,EAAE;AAAA,EAC7D;AACA,SAAO,MAAM,CAAC;AACf;AAKO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,cAAc,SAAS,QAAQ,EAAE;AACvC,QAAI,MAAM,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,gCAAgC,MAAM,qCAAqC;AAAA,IAC5F;AAGA,UAAM,SAAS,kBACZ,2EACA;AAuBH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAsC,IAAI;AAG5D,UAAM,SAAsB;AAAA;AAAA,MAE3B,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA,MACT,UAAU;AAAA;AAAA,MAGV,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI,UAAU,IAAI,OAAK,gBAAgB,CAAC,CAAC,EAAE,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,MACpG;AAAA,MACA,GAAI,IAAI,UAAU;AAAA,QACjB,QAAQ,IAAI;AAAA,MACb;AAAA;AAAA,MAGA,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI;AAAA,MAChB;AAAA,IACD;AAKA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,OAAsC;AACjD,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,WAAW,SAAS,QAAQ,EAAE;AACpC,QAAI,MAAM,QAAQ,GAAG;AACpB,YAAM,IAAI,MAAM,6BAA6B,MAAM,kCAAkC;AAAA,IACtF;AAGA,UAAM,aAAa;AACnB,UAAM,SAAS,kBACZ,GAAG,UAAU,cACb;AAgCH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,QAAQ;AAAA,MACf;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAmC,IAAI;AAGzD,UAAM,SAAmB;AAAA;AAAA,MAExB,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,QAAQ,IAAI;AAAA,MACZ,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA;AAAA,MAGT,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,aAAa,IAAI;AAAA,MACjB,aAAa,IAAI;AAAA;AAAA,MAGjB,GAAI,IAAI,SAAS;AAAA,QAChB,OAAO,IAAI;AAAA,MACZ;AAAA;AAAA,MAGA,GAAI,IAAI,WAAW;AAAA,QAClB,SAAS,IAAI,QAAQ,IAAI,YAAO;AA9QpC;AA8QwC;AAAA,YACnC,KAAK,OAAO;AAAA,YACZ,iBAAiB,OAAO;AAAA,YACxB,UAAQ,YAAO,YAAP,mBAAiB,MACtB;AAAA,cACD,IAAI,OAAO,QAAQ,CAAC,EAAE;AAAA,cACtB,aAAa,OAAO,QAAQ,CAAC,EAAE;AAAA,cAC/B,MAAM,OAAO,QAAQ,CAAC,EAAE;AAAA,cACxB,OAAO,OAAO,QAAQ,CAAC,EAAE;AAAA,YAC1B,IACE;AAAA,UACJ;AAAA,SAAE;AAAA,MACH;AAAA,IACD;AAIA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAcA,UAAM,UAAU,OACb,SAAS,IAAI,oBAAoB,gBAAgB,KACjD,sCAAsC,gBAAgB;AAGzD,UAAM,MAAM,MAAM,iBAAwC;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD,CAAC;AAGD,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAGpE,WAAO;AAAA,MACN,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM;AAAA,MACN,QAAQ,gBAAgB,IAAI,IAAI;AAAA,MAChC,YAAY,IAAI;AAAA,MAChB,GAAI,IAAI,cAAc,EAAE,YAAY,IAAI,WAAW;AAAA;AAAA,MAEnD,GAAI,IAAI,YAAY,EAAE,UAAU,IAAI,SAAS;AAAA,MAC7C,GAAI,IAAI,aAAa,EAAE,WAAW,IAAI,UAAU;AAAA,IACjD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,MAAM,KAAK,IAAI;AAG/B,UAAM,YAAY,SAAS,QAAQ,EAAE;AACrC,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY,MAAM,+BAA+B;AAAA,IACxF;AAGA,UAAM,SACL,SAAS,UACN,MAAM,mBAAmB,WAAW,IAAI,IACxC,MAAM,gBAAgB,WAAW,IAAI;AAGzC,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAGA,UAAM,SAAS,MAAM,mBAAmB,kBAAkB,IAAI;AAG9D,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,KAAK,IAAI;AAGtC,UAAM,SAAS,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAG9D,UAAM,cAAc,OAAO,OAAO,WAAW,WAC1C,OAAO,SACP,SAAS,OAAO,OAAO,MAAM,GAAG,EAAE;AAErC,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,OAAO;AAAA,MACZ,QAAQ;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAChF,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,KAAK,IAAI;AAIhD,UAAM,eAAe,SAAS,UAAU,EAAE;AAC1C,QAAI,MAAM,YAAY,GAAG;AACxB,YAAM,IAAI,MAAM,uCAAuC,QAAQ,qCAAqC;AAAA,IACrG;AAGA,UAAM,eAAe,MAAM,eAAe,cAAc,IAAI;AAG5D,UAAM,cAAc,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AACnE,UAAM,cAAc,OAAO,YAAY,WAAW,WAC/C,YAAY,SACZ,SAAS,OAAO,YAAY,MAAM,GAAG,EAAE;AAG1C,UAAM,cAAc,MAAM,eAAe,aAAa,IAAI;AAG1D,UAAM,YAAY,cAAc,WAAW;AAE3C,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,YAAY;AAAA,MACjB,QAAQ;AAAA,IACT;AAAA,EACD;AACD;;;ACpdA,SAAS,sBAAsB;AAC/B,SAAS,QAAAA,OAAM,SAAS,UAAU,WAAAC,gBAAe;AAkB1C,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlC,OAAO,uBAAuB,MAAsB;AACnD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAIA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,iBAAiB,aAAa;AACpC,qBAAe;AACf,oBAAc,KAAK,kBAAkB,WAAW;AAAA,IACjD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,kBAAkB,MAAsB;AAGtD,UAAM,eAAe;AAErB,WAAO,KAAK,QAAQ,cAAc,CAAC,QAAQ,SAAS,YAAY;AAE/D,YAAM,eAAe,KAAK,UAAU,OAAO;AAI3C,YAAM,eAAe,KAAK,aAAa,OAAO;AAI9C,UAAI,cAAc;AACjB,eAAO,OAAO,YAAY;AAAA;AAAA,EAAO,YAAY;AAAA;AAAA;AAAA,MAC9C,OAAO;AAEN,eAAO,OAAO,YAAY;AAAA;AAAA;AAAA,MAC3B;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,UAAU,MAAsB;AAC9C,WAAO,KACL,KAAK,EACL,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,UAAU,GAAG,EACrB,QAAQ,WAAW,GAAG,EACtB,QAAQ,UAAU,GAAG;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAe,aAAa,SAAyB;AACpD,QAAI,CAAC,SAAS;AACb,aAAO;AAAA,IACR;AAGA,QAAI,UAAU,QAAQ,KAAK;AAG3B,cAAU,QAAQ,QAAQ,WAAW,MAAM;AAE3C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAiB,MAAuB;AAC9C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAEA,UAAM,eAAe;AACrB,WAAO,aAAa,KAAK,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,yBAAyB,MAAsB;AACrD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAMA,UAAM,kBAAkB;AAExB,WAAO,KAAK,QAAQ,iBAAiB,CAAC,QAAQ,UAAU,YAAY;AAGnE,aAAO,QAAQ,KAAK;AAAA,IACrB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,gBAAgB,MAAsB;AAC5C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAGA,SAAK,cAAc,SAAS,IAAI;AAGhC,QAAI,YAAY;AAIhB,gBAAY,KAAK,yBAAyB,SAAS;AAGnD,gBAAY,KAAK,uBAAuB,SAAS;AAGjD,SAAK,cAAc,UAAU,SAAS;AAEtC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,OAAe,SAAuB;AAClE,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI,CAAC,aAAa;AACjB;AAAA,IACD;AAEA,QAAI;AACH,YAAM,kBAAkB,KAAK,sBAAsB,WAAW;AAC9D,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY;AAElB,YAAM,WAAW,GAAG,SAAS;AAAA,GAAM,SAAS,gBAAgB,KAAK;AAAA,EAAK,SAAS;AAAA,EAAK,KAAK;AAAA,EAAM,OAAO;AAAA;AAAA;AAEtG,qBAAe,iBAAiB,UAAU,OAAO;AAAA,IAClD,QAAQ;AAAA,IAGR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,sBAAsB,aAA6B;AACjE,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,MAAMA,SAAQ,WAAW;AAC/B,UAAM,OAAO,SAAS,aAAa,GAAG;AAGtC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY;AAAA,MACjB,IAAI,YAAY;AAAA,MAChB,OAAO,IAAI,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MAC1C,OAAO,IAAI,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACtC,EAAE,KAAK,EAAE,IAAI,MAAM;AAAA,MAClB,OAAO,IAAI,SAAS,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACtC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACxC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACzC,EAAE,KAAK,EAAE;AAET,WAAOD,MAAK,KAAK,GAAG,IAAI,IAAI,SAAS,GAAG,GAAG,EAAE;AAAA,EAC9C;AACD;;;ACzLO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAMvB;AAAA;AAAA;AAAA;AAAA,SAAQ,gBAAoC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK5C,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAI3C,UAAM,QAAQ,OAAO,MAAM,oBAAoB;AAC/C,QAAI,+BAAQ,IAAI;AACf,WAAK,gBAAgB,MAAM,CAAC,EAAE,YAAY;AAAA,IAC3C;AAGA,UAAM,MAAM,MAAM,iBAAiB,MAAM;AAGzC,UAAM,QAAQ,IAAI,UAAU,IAAI,MAAM,YAAY,EAAE,SAAS,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,UAAU,KACjK,WACA;AAGH,UAAM,SAAsB;AAAA,MAC3B,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI,eAAe;AAAA,MACzB;AAAA,MACA,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA;AAAA,MAGR,aAAa,IAAI;AAAA,MACjB,WAAW,IAAI;AAAA,MACf,WAAW,IAAI;AAAA,IAChB;AAGA,QAAI,iBAAiB;AACpB,UAAI;AACH,cAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,YAAI,UAAU;AACb,iBAAO,WAAW;AAAA,QACnB;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,QAAuC;AAClD,UAAM,IAAI,MAAM,6GAA6G;AAAA,EAC9H;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,YAAsD;AACtF,QAAI;AACH,YAAM,WAAW,MAAM,yBAAyB,UAAU;AAE1D,aAAO,SAAS,IAAI,cAAY;AAAA,QAC/B,IAAI,QAAQ;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA;AAAA,QACR,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH,QAAQ;AACP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAEtB,UAAM,MAAM,MAAM,iBAAiB,SAAS;AAG5C,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAEpE,WAAO;AAAA,MACN,IAAI,IAAI;AAAA,MACR,MAAM;AAAA,MACN,QAAQ;AAAA;AAAA,MACR,YAAY,IAAI;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,KAAK,IAAI;AAKzB,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,QAAQ,aAAa;AAE9D,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAG5B,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,WAAW,aAAa;AAEjE,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGzC,UAAM,mBAAmB,WAAW,QAAQ,IAAI,mBAAmB,KAAK;AAExE,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,0KAA0K;AAAA,IAC3L;AAEA,UAAM,SAAS,MAAM,kBAAkB,OAAO,MAAM,kBAAkB,MAAM;AAE5E,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAtNlF;AAuNE,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGnD,UAAM,cAAc,MAAM,iBAAiB,QAAQ;AAGnD,UAAM,QAAQ,SAAS,MAAM,oBAAoB;AACjD,UAAM,mBAAmB,aAAW,oCAAQ,OAAR,mBAAY,kBAAiB,QAAQ,IAAI,mBAAmB,KAAK;AAErG,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,6HAA6H;AAAA,IAC9I;AAGA,UAAM,SAAS,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA;AAAA,MACZ;AAAA,IACD;AAEA,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AACD;;;ACvOO,IAAM,iCAAN,MAAqC;AAAA;AAAA;AAAA;AAAA,EAI3C,OAAO,OAAO,UAAkD;AAC/D,YAAQ,UAAU;AAAA,MACjB,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C;AACC,cAAM,IAAI,MAAM,0CAA0C,QAAQ,EAAE;AAAA,IACtE;AAAA,EACD;AACD;","names":["join","extname"]}
@@ -9,7 +9,7 @@ import {
9
9
  detectClaudeCli,
10
10
  launchClaude,
11
11
  launchClaudeInNewTerminalWindow
12
- } from "./chunk-ITIXKM24.js";
12
+ } from "./chunk-IGKPPACU.js";
13
13
  import {
14
14
  logger
15
15
  } from "./chunk-VT4PDUYT.js";
@@ -122,4 +122,4 @@ var ClaudeService = class {
122
122
  export {
123
123
  ClaudeService
124
124
  };
125
- //# sourceMappingURL=chunk-XAHE76RL.js.map
125
+ //# sourceMappingURL=chunk-O36JLYNW.js.map
@@ -15,7 +15,7 @@ var ClaudeBranchNameStrategy = class {
15
15
  this.claudeModel = claudeModel;
16
16
  }
17
17
  async generate(issueNumber, title) {
18
- const { generateBranchName } = await import("./claude-SNWHWWWM.js");
18
+ const { generateBranchName } = await import("./claude-V4HRPR4Z.js");
19
19
  return generateBranchName(title, issueNumber, this.claudeModel);
20
20
  }
21
21
  };
@@ -52,4 +52,4 @@ export {
52
52
  ClaudeBranchNameStrategy,
53
53
  DefaultBranchNamingService
54
54
  };
55
- //# sourceMappingURL=chunk-6VQNF44G.js.map
55
+ //# sourceMappingURL=chunk-Q457PKGH.js.map
@@ -10,7 +10,7 @@ import {
10
10
  } from "./chunk-ZX3GTM7O.js";
11
11
  import {
12
12
  launchClaude
13
- } from "./chunk-ITIXKM24.js";
13
+ } from "./chunk-IGKPPACU.js";
14
14
  import {
15
15
  getLogger
16
16
  } from "./chunk-6MLEBAYZ.js";
@@ -244,4 +244,4 @@ export {
244
244
  IssueEnhancementService,
245
245
  capitalizeFirstLetter
246
246
  };
247
- //# sourceMappingURL=chunk-7FM7AL7S.js.map
247
+ //# sourceMappingURL=chunk-VYKKWU36.js.map
@@ -4,7 +4,7 @@ import {
4
4
  } from "./chunk-NXMDEL3F.js";
5
5
  import {
6
6
  IssueManagementProviderFactory
7
- } from "./chunk-GJMEKEI5.js";
7
+ } from "./chunk-NPEMVE27.js";
8
8
  import {
9
9
  hasMultipleRemotes
10
10
  } from "./chunk-FXDYIV3K.js";
@@ -20,7 +20,7 @@ import {
20
20
  import {
21
21
  generateDeterministicSessionId,
22
22
  launchClaude
23
- } from "./chunk-ITIXKM24.js";
23
+ } from "./chunk-IGKPPACU.js";
24
24
  import {
25
25
  logger
26
26
  } from "./chunk-VT4PDUYT.js";
@@ -278,10 +278,11 @@ var SessionSummaryService = class {
278
278
  * @param summary - The summary text to post
279
279
  * @param worktreePath - Path to worktree for loading settings (optional)
280
280
  */
281
- async postSummary(issueNumber, summary, worktreePath) {
281
+ async postSummary(issueNumber, summary, worktreePath, prNumber) {
282
282
  const settings = await this.settingsManager.loadSettings(worktreePath);
283
- await this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd());
284
- logger.success("Session summary posted to issue");
283
+ await this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd(), prNumber);
284
+ const target = prNumber ? `PR #${prNumber}` : "issue";
285
+ logger.success(`Session summary posted to ${target}`);
285
286
  }
286
287
  /**
287
288
  * Determine if summary should be generated based on loom type and settings
@@ -352,7 +353,7 @@ var SessionSummaryService = class {
352
353
  */
353
354
  async postSummaryToIssue(issueNumber, summary, settings, worktreePath, prNumber) {
354
355
  var _a;
355
- const providerType = ((_a = settings.issueManagement) == null ? void 0 : _a.provider) ?? "github";
356
+ const providerType = prNumber !== void 0 ? "github" : ((_a = settings.issueManagement) == null ? void 0 : _a.provider) ?? "github";
356
357
  const provider = IssueManagementProviderFactory.create(providerType);
357
358
  const finalSummary = await this.applyAttributionWithSettings(summary, settings, worktreePath);
358
359
  const targetNumber = prNumber ?? issueNumber;
@@ -368,4 +369,4 @@ var SessionSummaryService = class {
368
369
  export {
369
370
  SessionSummaryService
370
371
  };
371
- //# sourceMappingURL=chunk-EDDIAWVM.js.map
372
+ //# sourceMappingURL=chunk-WT4UGBE2.js.map