@iloom/cli 0.7.5 → 0.7.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{PRManager-6ZJZRG5Z.js → PRManager-7F3AAY66.js} +3 -3
- package/dist/{chunk-TB6475EW.js → chunk-6YAMWLCP.js} +2 -2
- package/dist/{chunk-77VLG2KP.js → chunk-ETY2SBW5.js} +3 -3
- package/dist/chunk-ETY2SBW5.js.map +1 -0
- package/dist/{chunk-LZBSLO6S.js → chunk-NPEMVE27.js} +267 -6
- package/dist/chunk-NPEMVE27.js.map +1 -0
- package/dist/{chunk-KSXA2NOJ.js → chunk-WT4UGBE2.js} +7 -6
- package/dist/chunk-WT4UGBE2.js.map +1 -0
- package/dist/{cleanup-DB7EFBF3.js → cleanup-IO4KV2DL.js} +2 -2
- package/dist/cli.js +7 -7
- package/dist/{commit-NGMDWWAP.js → commit-3ULFKXNB.js} +2 -2
- package/dist/mcp/issue-management-server.js +263 -5
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/{summary-2KLNHVTN.js → summary-MPOOQIOX.js} +37 -6
- package/dist/summary-MPOOQIOX.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-77VLG2KP.js.map +0 -1
- package/dist/chunk-KSXA2NOJ.js.map +0 -1
- package/dist/chunk-LZBSLO6S.js.map +0 -1
- package/dist/summary-2KLNHVTN.js.map +0 -1
- /package/dist/{PRManager-6ZJZRG5Z.js.map → PRManager-7F3AAY66.js.map} +0 -0
- /package/dist/{chunk-TB6475EW.js.map → chunk-6YAMWLCP.js.map} +0 -0
- /package/dist/{cleanup-DB7EFBF3.js.map → cleanup-IO4KV2DL.js.map} +0 -0
- /package/dist/{commit-NGMDWWAP.js.map → commit-3ULFKXNB.js.map} +0 -0
|
@@ -17,6 +17,247 @@ import {
|
|
|
17
17
|
getIssueNodeId,
|
|
18
18
|
updateIssueComment
|
|
19
19
|
} from "./chunk-GCPAZSGV.js";
|
|
20
|
+
import {
|
|
21
|
+
logger
|
|
22
|
+
} from "./chunk-VT4PDUYT.js";
|
|
23
|
+
|
|
24
|
+
// src/utils/image-processor.ts
|
|
25
|
+
import { tmpdir } from "os";
|
|
26
|
+
import { join, extname } from "path";
|
|
27
|
+
import { existsSync, mkdirSync, createWriteStream, unlinkSync } from "fs";
|
|
28
|
+
import { pipeline } from "stream/promises";
|
|
29
|
+
import { Readable } from "stream";
|
|
30
|
+
import { createHash } from "crypto";
|
|
31
|
+
import { execa } from "execa";
|
|
32
|
+
var SUPPORTED_EXTENSIONS = [".png", ".jpg", ".jpeg", ".gif", ".webp", ".svg"];
|
|
33
|
+
var MAX_IMAGE_SIZE = 10 * 1024 * 1024;
|
|
34
|
+
var REQUEST_TIMEOUT_MS = 3e4;
|
|
35
|
+
var CACHE_DIR = join(tmpdir(), "iloom-images");
|
|
36
|
+
var cachedGitHubToken;
|
|
37
|
+
function extractMarkdownImageUrls(content) {
|
|
38
|
+
if (!content) {
|
|
39
|
+
return [];
|
|
40
|
+
}
|
|
41
|
+
const matches = [];
|
|
42
|
+
const markdownRegex = /!\[([^\]]*)\]\(((?:[^()\s]|\((?:[^()\s]|\([^()]*\))*\))+)\)/g;
|
|
43
|
+
let match;
|
|
44
|
+
while ((match = markdownRegex.exec(content)) !== null) {
|
|
45
|
+
const url = match[2];
|
|
46
|
+
if (url) {
|
|
47
|
+
matches.push({
|
|
48
|
+
fullMatch: match[0],
|
|
49
|
+
url,
|
|
50
|
+
isMarkdown: true
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const htmlImgRegex = /<img\s+[^>]*src=["']([^"']+)["'][^>]*\/?>/gi;
|
|
55
|
+
while ((match = htmlImgRegex.exec(content)) !== null) {
|
|
56
|
+
const url = match[1];
|
|
57
|
+
if (url) {
|
|
58
|
+
matches.push({
|
|
59
|
+
fullMatch: match[0],
|
|
60
|
+
url,
|
|
61
|
+
isMarkdown: false
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return matches;
|
|
66
|
+
}
|
|
67
|
+
function isAuthenticatedImageUrl(url) {
|
|
68
|
+
try {
|
|
69
|
+
const parsedUrl = new URL(url);
|
|
70
|
+
const hostname = parsedUrl.hostname.toLowerCase();
|
|
71
|
+
if (hostname === "uploads.linear.app") {
|
|
72
|
+
return true;
|
|
73
|
+
}
|
|
74
|
+
if (hostname === "private-user-images.githubusercontent.com") {
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
if (hostname === "github.com" && parsedUrl.pathname.startsWith("/user-attachments/assets/")) {
|
|
78
|
+
return true;
|
|
79
|
+
}
|
|
80
|
+
return false;
|
|
81
|
+
} catch {
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
function getExtensionFromUrl(url) {
|
|
86
|
+
try {
|
|
87
|
+
const parsedUrl = new URL(url);
|
|
88
|
+
const pathname = parsedUrl.pathname;
|
|
89
|
+
const ext = extname(pathname).toLowerCase();
|
|
90
|
+
if (SUPPORTED_EXTENSIONS.includes(ext)) {
|
|
91
|
+
return ext;
|
|
92
|
+
}
|
|
93
|
+
return null;
|
|
94
|
+
} catch {
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function getCacheKey(url) {
|
|
99
|
+
const parsedUrl = new URL(url);
|
|
100
|
+
if (parsedUrl.hostname === "private-user-images.githubusercontent.com") {
|
|
101
|
+
parsedUrl.searchParams.delete("jwt");
|
|
102
|
+
}
|
|
103
|
+
const stableUrl = parsedUrl.toString();
|
|
104
|
+
const hash = createHash("sha256").update(stableUrl).digest("hex").slice(0, 16);
|
|
105
|
+
const ext = getExtensionFromUrl(url) ?? ".png";
|
|
106
|
+
return `${hash}${ext}`;
|
|
107
|
+
}
|
|
108
|
+
function getCachedImagePath(url) {
|
|
109
|
+
const cacheKey = getCacheKey(url);
|
|
110
|
+
const cachedPath = join(CACHE_DIR, cacheKey);
|
|
111
|
+
if (existsSync(cachedPath)) {
|
|
112
|
+
return cachedPath;
|
|
113
|
+
}
|
|
114
|
+
return void 0;
|
|
115
|
+
}
|
|
116
|
+
async function getAuthToken(provider) {
|
|
117
|
+
if (provider === "github") {
|
|
118
|
+
if (cachedGitHubToken !== void 0) {
|
|
119
|
+
return cachedGitHubToken;
|
|
120
|
+
}
|
|
121
|
+
try {
|
|
122
|
+
const result = await execa("gh", ["auth", "token"]);
|
|
123
|
+
cachedGitHubToken = result.stdout.trim();
|
|
124
|
+
return cachedGitHubToken;
|
|
125
|
+
} catch (error) {
|
|
126
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
127
|
+
logger.warn(`Failed to get GitHub auth token via gh CLI: ${message}`);
|
|
128
|
+
return void 0;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
if (provider === "linear") {
|
|
132
|
+
return process.env.LINEAR_API_TOKEN;
|
|
133
|
+
}
|
|
134
|
+
return void 0;
|
|
135
|
+
}
|
|
136
|
+
async function downloadAndSaveImage(url, destPath, authHeader) {
|
|
137
|
+
const headers = {};
|
|
138
|
+
if (authHeader) {
|
|
139
|
+
headers["Authorization"] = authHeader;
|
|
140
|
+
}
|
|
141
|
+
const controller = new AbortController();
|
|
142
|
+
const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
|
|
143
|
+
try {
|
|
144
|
+
const response = await fetch(url, { headers, signal: controller.signal });
|
|
145
|
+
if (!response.ok) {
|
|
146
|
+
throw new Error(`Failed to download image: ${response.status} ${response.statusText}`);
|
|
147
|
+
}
|
|
148
|
+
const contentLength = response.headers.get("Content-Length");
|
|
149
|
+
if (contentLength && parseInt(contentLength, 10) > MAX_IMAGE_SIZE) {
|
|
150
|
+
throw new Error(`Image too large: ${contentLength} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`);
|
|
151
|
+
}
|
|
152
|
+
if (!response.body) {
|
|
153
|
+
throw new Error("Response body is null");
|
|
154
|
+
}
|
|
155
|
+
const reader = response.body.getReader();
|
|
156
|
+
let bytesWritten = 0;
|
|
157
|
+
const nodeReadable = new Readable({
|
|
158
|
+
async read() {
|
|
159
|
+
try {
|
|
160
|
+
const { done, value } = await reader.read();
|
|
161
|
+
if (done) {
|
|
162
|
+
this.push(null);
|
|
163
|
+
return;
|
|
164
|
+
}
|
|
165
|
+
bytesWritten += value.byteLength;
|
|
166
|
+
if (bytesWritten > MAX_IMAGE_SIZE) {
|
|
167
|
+
reader.cancel();
|
|
168
|
+
this.destroy(new Error(`Image too large: ${bytesWritten} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`));
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
this.push(Buffer.from(value));
|
|
172
|
+
} catch (err) {
|
|
173
|
+
this.destroy(err instanceof Error ? err : new Error(String(err)));
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
if (!existsSync(CACHE_DIR)) {
|
|
178
|
+
mkdirSync(CACHE_DIR, { recursive: true });
|
|
179
|
+
}
|
|
180
|
+
const writeStream = createWriteStream(destPath);
|
|
181
|
+
try {
|
|
182
|
+
await pipeline(nodeReadable, writeStream);
|
|
183
|
+
} catch (pipelineError) {
|
|
184
|
+
try {
|
|
185
|
+
if (existsSync(destPath)) {
|
|
186
|
+
unlinkSync(destPath);
|
|
187
|
+
}
|
|
188
|
+
} catch {
|
|
189
|
+
}
|
|
190
|
+
throw pipelineError;
|
|
191
|
+
}
|
|
192
|
+
} catch (error) {
|
|
193
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
194
|
+
throw new Error(`Image download timed out after ${REQUEST_TIMEOUT_MS}ms`);
|
|
195
|
+
}
|
|
196
|
+
throw error;
|
|
197
|
+
} finally {
|
|
198
|
+
clearTimeout(timeoutId);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
function getCacheDestPath(url) {
|
|
202
|
+
if (!existsSync(CACHE_DIR)) {
|
|
203
|
+
mkdirSync(CACHE_DIR, { recursive: true });
|
|
204
|
+
}
|
|
205
|
+
const cacheKey = getCacheKey(url);
|
|
206
|
+
return join(CACHE_DIR, cacheKey);
|
|
207
|
+
}
|
|
208
|
+
function rewriteMarkdownUrls(content, urlMap) {
|
|
209
|
+
let result = content;
|
|
210
|
+
for (const [originalUrl, localPath] of urlMap) {
|
|
211
|
+
const escapedUrl = originalUrl.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
212
|
+
const urlRegex = new RegExp(escapedUrl, "g");
|
|
213
|
+
result = result.replace(urlRegex, localPath);
|
|
214
|
+
}
|
|
215
|
+
return result;
|
|
216
|
+
}
|
|
217
|
+
async function processMarkdownImages(content, provider) {
|
|
218
|
+
if (!content) {
|
|
219
|
+
return "";
|
|
220
|
+
}
|
|
221
|
+
const images = extractMarkdownImageUrls(content);
|
|
222
|
+
if (images.length === 0) {
|
|
223
|
+
return content;
|
|
224
|
+
}
|
|
225
|
+
const authImages = images.filter((img) => isAuthenticatedImageUrl(img.url));
|
|
226
|
+
if (authImages.length === 0) {
|
|
227
|
+
return content;
|
|
228
|
+
}
|
|
229
|
+
const authToken = await getAuthToken(provider);
|
|
230
|
+
const uniqueUrls = [...new Set(authImages.map((img) => img.url))];
|
|
231
|
+
const urlMap = /* @__PURE__ */ new Map();
|
|
232
|
+
const downloadPromises = uniqueUrls.map(async (url) => {
|
|
233
|
+
try {
|
|
234
|
+
const cachedPath = getCachedImagePath(url);
|
|
235
|
+
if (cachedPath) {
|
|
236
|
+
logger.debug(`Using cached image: ${cachedPath}`);
|
|
237
|
+
return { url, localPath: cachedPath };
|
|
238
|
+
}
|
|
239
|
+
logger.debug(`Downloading image: ${url}`);
|
|
240
|
+
const destPath = getCacheDestPath(url);
|
|
241
|
+
await downloadAndSaveImage(
|
|
242
|
+
url,
|
|
243
|
+
destPath,
|
|
244
|
+
authToken ? `Bearer ${authToken}` : void 0
|
|
245
|
+
);
|
|
246
|
+
return { url, localPath: destPath };
|
|
247
|
+
} catch (error) {
|
|
248
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
249
|
+
logger.warn(`Failed to download image ${url}: ${message}`);
|
|
250
|
+
return null;
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
const results = await Promise.all(downloadPromises);
|
|
254
|
+
for (const result of results) {
|
|
255
|
+
if (result !== null) {
|
|
256
|
+
urlMap.set(result.url, result.localPath);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return rewriteMarkdownUrls(content, urlMap);
|
|
260
|
+
}
|
|
20
261
|
|
|
21
262
|
// src/mcp/GitHubIssueManagementProvider.ts
|
|
22
263
|
function normalizeAuthor(author) {
|
|
@@ -96,6 +337,12 @@ var GitHubIssueManagementProvider = class {
|
|
|
96
337
|
...comment.updatedAt && { updatedAt: comment.updatedAt }
|
|
97
338
|
}));
|
|
98
339
|
}
|
|
340
|
+
result.body = await processMarkdownImages(result.body, "github");
|
|
341
|
+
if (result.comments) {
|
|
342
|
+
for (const comment of result.comments) {
|
|
343
|
+
comment.body = await processMarkdownImages(comment.body, "github");
|
|
344
|
+
}
|
|
345
|
+
}
|
|
99
346
|
return result;
|
|
100
347
|
}
|
|
101
348
|
/**
|
|
@@ -164,6 +411,12 @@ var GitHubIssueManagementProvider = class {
|
|
|
164
411
|
...comment.updatedAt && { updatedAt: comment.updatedAt }
|
|
165
412
|
}));
|
|
166
413
|
}
|
|
414
|
+
result.body = await processMarkdownImages(result.body, "github");
|
|
415
|
+
if (result.comments) {
|
|
416
|
+
for (const comment of result.comments) {
|
|
417
|
+
comment.body = await processMarkdownImages(comment.body, "github");
|
|
418
|
+
}
|
|
419
|
+
}
|
|
167
420
|
return result;
|
|
168
421
|
}
|
|
169
422
|
/**
|
|
@@ -183,9 +436,10 @@ var GitHubIssueManagementProvider = class {
|
|
|
183
436
|
"--jq",
|
|
184
437
|
"{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}"
|
|
185
438
|
]);
|
|
439
|
+
const processedBody = await processMarkdownImages(raw.body, "github");
|
|
186
440
|
return {
|
|
187
441
|
id: String(raw.id),
|
|
188
|
-
body:
|
|
442
|
+
body: processedBody,
|
|
189
443
|
author: normalizeAuthor(raw.user),
|
|
190
444
|
created_at: raw.created_at,
|
|
191
445
|
...raw.updated_at && { updated_at: raw.updated_at },
|
|
@@ -262,7 +516,7 @@ var GitHubIssueManagementProvider = class {
|
|
|
262
516
|
|
|
263
517
|
// src/utils/linear-markup-converter.ts
|
|
264
518
|
import { appendFileSync } from "fs";
|
|
265
|
-
import { join, dirname, basename, extname } from "path";
|
|
519
|
+
import { join as join2, dirname, basename, extname as extname2 } from "path";
|
|
266
520
|
var LinearMarkupConverter = class {
|
|
267
521
|
/**
|
|
268
522
|
* Convert HTML details/summary blocks to Linear's collapsible format
|
|
@@ -398,7 +652,7 @@ ${content}
|
|
|
398
652
|
*/
|
|
399
653
|
static getTimestampedLogPath(logFilePath) {
|
|
400
654
|
const dir = dirname(logFilePath);
|
|
401
|
-
const ext =
|
|
655
|
+
const ext = extname2(logFilePath);
|
|
402
656
|
const base = basename(logFilePath, ext);
|
|
403
657
|
const now = /* @__PURE__ */ new Date();
|
|
404
658
|
const timestamp = [
|
|
@@ -410,7 +664,7 @@ ${content}
|
|
|
410
664
|
String(now.getMinutes()).padStart(2, "0"),
|
|
411
665
|
String(now.getSeconds()).padStart(2, "0")
|
|
412
666
|
].join("");
|
|
413
|
-
return
|
|
667
|
+
return join2(dir, `${base}-${timestamp}${ext}`);
|
|
414
668
|
}
|
|
415
669
|
};
|
|
416
670
|
|
|
@@ -459,6 +713,12 @@ var LinearIssueManagementProvider = class {
|
|
|
459
713
|
} catch {
|
|
460
714
|
}
|
|
461
715
|
}
|
|
716
|
+
result.body = await processMarkdownImages(result.body, "linear");
|
|
717
|
+
if (result.comments) {
|
|
718
|
+
for (const comment of result.comments) {
|
|
719
|
+
comment.body = await processMarkdownImages(comment.body, "linear");
|
|
720
|
+
}
|
|
721
|
+
}
|
|
462
722
|
return result;
|
|
463
723
|
}
|
|
464
724
|
/**
|
|
@@ -492,9 +752,10 @@ var LinearIssueManagementProvider = class {
|
|
|
492
752
|
async getComment(input) {
|
|
493
753
|
const { commentId } = input;
|
|
494
754
|
const raw = await getLinearComment(commentId);
|
|
755
|
+
const processedBody = await processMarkdownImages(raw.body, "linear");
|
|
495
756
|
return {
|
|
496
757
|
id: raw.id,
|
|
497
|
-
body:
|
|
758
|
+
body: processedBody,
|
|
498
759
|
author: null,
|
|
499
760
|
// Linear SDK doesn't return comment author info in basic fetch
|
|
500
761
|
created_at: raw.createdAt
|
|
@@ -589,4 +850,4 @@ var IssueManagementProviderFactory = class {
|
|
|
589
850
|
export {
|
|
590
851
|
IssueManagementProviderFactory
|
|
591
852
|
};
|
|
592
|
-
//# sourceMappingURL=chunk-
|
|
853
|
+
//# sourceMappingURL=chunk-NPEMVE27.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/image-processor.ts","../src/mcp/GitHubIssueManagementProvider.ts","../src/utils/linear-markup-converter.ts","../src/mcp/LinearIssueManagementProvider.ts","../src/mcp/IssueManagementProviderFactory.ts"],"sourcesContent":["/* global fetch, AbortController, setTimeout, clearTimeout */\nimport { tmpdir } from 'node:os'\nimport { join, extname } from 'node:path'\nimport { existsSync, mkdirSync, createWriteStream, unlinkSync } from 'node:fs'\nimport { pipeline } from 'node:stream/promises'\nimport { Readable } from 'node:stream'\nimport { createHash } from 'node:crypto'\nimport { execa } from 'execa'\nimport { logger } from './logger.js'\nimport type { IssueProvider } from '../mcp/types.js'\n\n/**\n * Represents a matched image in markdown content\n */\nexport interface ImageMatch {\n fullMatch: string\n url: string\n isMarkdown: boolean // true for , false for <img>\n}\n\n/**\n * Supported image extensions\n */\nconst SUPPORTED_EXTENSIONS = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg']\n\n/**\n * Maximum allowed image size (10MB)\n */\nconst MAX_IMAGE_SIZE = 10 * 1024 * 1024\n\n/**\n * Request timeout in milliseconds (30 seconds)\n */\nconst REQUEST_TIMEOUT_MS = 30000\n\n/**\n * Cache directory path for downloaded images\n */\nexport const CACHE_DIR = join(tmpdir(), 'iloom-images')\n\n/**\n * Cached GitHub auth token (module-level to avoid repeated `gh auth token` calls)\n */\nlet cachedGitHubToken: string | undefined\n\n/**\n * Extract all image URLs from markdown content\n * Handles both  and <img src=\"url\"> formats\n *\n * @param content - Markdown content to parse\n * @returns Array of image matches with full match string and URL\n */\nexport function extractMarkdownImageUrls(content: string): ImageMatch[] {\n if (!content) {\n return []\n }\n\n const matches: ImageMatch[] = []\n\n // Regex for markdown images: \n // Captures the entire match and the URL separately\n // Handles parentheses in URLs by matching balanced parens\n // The URL part matches: non-paren chars OR (balanced paren group)*, followed by non-paren/non-space chars\n const markdownRegex = /!\\[([^\\]]*)\\]\\(((?:[^()\\s]|\\((?:[^()\\s]|\\([^()]*\\))*\\))+)\\)/g\n let match: RegExpExecArray | null\n\n while ((match = markdownRegex.exec(content)) !== null) {\n const url = match[2]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: true\n })\n }\n }\n\n // Regex for HTML img tags: <img ... src=\"url\" ...>\n // Handles both double and single quotes, and self-closing tags\n const htmlImgRegex = /<img\\s+[^>]*src=[\"']([^\"']+)[\"'][^>]*\\/?>/gi\n\n while ((match = htmlImgRegex.exec(content)) !== null) {\n const url = match[1]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: false\n })\n }\n }\n\n return matches\n}\n\n/**\n * Check if URL requires authentication to download\n * - Linear: uploads.linear.app\n * - GitHub: private-user-images.githubusercontent.com\n *\n * @param url - Image URL to check\n * @returns true if URL requires authentication\n */\nexport function isAuthenticatedImageUrl(url: string): boolean {\n try {\n const parsedUrl = new URL(url)\n const hostname = parsedUrl.hostname.toLowerCase()\n\n // Linear uploads require authentication\n if (hostname === 'uploads.linear.app') {\n return true\n }\n\n // GitHub private user images require authentication\n if (hostname === 'private-user-images.githubusercontent.com') {\n return true\n }\n\n // GitHub user-attachments (uploaded images in issues/PRs) require authentication\n if (hostname === 'github.com' && parsedUrl.pathname.startsWith('/user-attachments/assets/')) {\n return true\n }\n\n return false\n } catch {\n // Invalid URL - treat as not authenticated\n return false\n }\n}\n\n/**\n * Get extension from URL pathname\n *\n * @param url - URL to extract extension from\n * @returns Extension including dot (e.g., '.png') or null if not found\n */\nfunction getExtensionFromUrl(url: string): string | null {\n try {\n const parsedUrl = new URL(url)\n const pathname = parsedUrl.pathname\n const ext = extname(pathname).toLowerCase()\n\n if (SUPPORTED_EXTENSIONS.includes(ext)) {\n return ext\n }\n return null\n } catch {\n return null\n }\n}\n\n/**\n * Generate cache key from URL\n * For GitHub URLs, strips JWT query params to ensure consistent caching\n * Returns hash + original extension\n *\n * @param url - Image URL to generate cache key for\n * @returns Cache key (hash + extension)\n */\nexport function getCacheKey(url: string): string {\n const parsedUrl = new URL(url)\n\n // For GitHub private images, remove jwt query param to get stable cache key\n // The jwt changes each fetch but the base URL is the same for the same image\n if (parsedUrl.hostname === 'private-user-images.githubusercontent.com') {\n parsedUrl.searchParams.delete('jwt')\n }\n\n // Get URL without volatile params for hashing\n const stableUrl = parsedUrl.toString()\n\n // Generate SHA256 hash of the stable URL (first 16 chars for brevity)\n const hash = createHash('sha256').update(stableUrl).digest('hex').slice(0, 16)\n\n // Extract extension from URL pathname, default to .png\n const ext = getExtensionFromUrl(url) ?? '.png'\n\n return `${hash}${ext}`\n}\n\n/**\n * Check if image is already cached\n * Returns file path if exists, undefined otherwise\n *\n * @param url - Image URL to check cache for\n * @returns Cached file path or undefined\n */\nexport function getCachedImagePath(url: string): string | undefined {\n const cacheKey = getCacheKey(url)\n const cachedPath = join(CACHE_DIR, cacheKey)\n\n if (existsSync(cachedPath)) {\n return cachedPath\n }\n return undefined\n}\n\n/**\n * Get authentication token for the given provider\n *\n * @param provider - Provider type ('github' or 'linear')\n * @returns Authentication token or undefined\n */\nasync function getAuthToken(provider: IssueProvider): Promise<string | undefined> {\n if (provider === 'github') {\n // Return cached token if available\n if (cachedGitHubToken !== undefined) {\n return cachedGitHubToken\n }\n\n try {\n // Execute `gh auth token` to get GitHub token\n const result = await execa('gh', ['auth', 'token'])\n cachedGitHubToken = result.stdout.trim()\n return cachedGitHubToken\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to get GitHub auth token via gh CLI: ${message}`)\n return undefined\n }\n }\n\n if (provider === 'linear') {\n // Linear token from environment variable\n return process.env.LINEAR_API_TOKEN\n }\n\n return undefined\n}\n\n/**\n * Clear the cached GitHub auth token (for testing purposes)\n */\nexport function clearCachedGitHubToken(): void {\n cachedGitHubToken = undefined\n}\n\n/**\n * Download image from URL and stream it directly to a file\n *\n * @param url - Image URL to download\n * @param destPath - Destination file path\n * @param authHeader - Optional Authorization header value\n * @throws Error if download fails, times out, or exceeds size limit\n */\nexport async function downloadAndSaveImage(\n url: string,\n destPath: string,\n authHeader?: string\n): Promise<void> {\n const headers: Record<string, string> = {}\n if (authHeader) {\n headers['Authorization'] = authHeader\n }\n\n // Set up abort controller for timeout\n const controller = new AbortController()\n const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS)\n\n try {\n const response = await fetch(url, { headers, signal: controller.signal })\n\n if (!response.ok) {\n throw new Error(`Failed to download image: ${response.status} ${response.statusText}`)\n }\n\n // Check Content-Length header if available\n const contentLength = response.headers.get('Content-Length')\n if (contentLength && parseInt(contentLength, 10) > MAX_IMAGE_SIZE) {\n throw new Error(`Image too large: ${contentLength} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`)\n }\n\n if (!response.body) {\n throw new Error('Response body is null')\n }\n\n // Convert ReadableStream to Node.js Readable\n const reader = response.body.getReader()\n let bytesWritten = 0\n\n const nodeReadable = new Readable({\n async read(): Promise<void> {\n try {\n const { done, value } = await reader.read()\n if (done) {\n this.push(null)\n return\n }\n\n bytesWritten += value.byteLength\n if (bytesWritten > MAX_IMAGE_SIZE) {\n reader.cancel()\n this.destroy(new Error(`Image too large: ${bytesWritten} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`))\n return\n }\n\n this.push(Buffer.from(value))\n } catch (err) {\n this.destroy(err instanceof Error ? err : new Error(String(err)))\n }\n }\n })\n\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Stream to file\n const writeStream = createWriteStream(destPath)\n\n try {\n await pipeline(nodeReadable, writeStream)\n } catch (pipelineError) {\n // Clean up partial file on error\n try {\n if (existsSync(destPath)) {\n unlinkSync(destPath)\n }\n } catch {\n // Ignore cleanup errors\n }\n throw pipelineError\n }\n } catch (error) {\n if (error instanceof Error && error.name === 'AbortError') {\n throw new Error(`Image download timed out after ${REQUEST_TIMEOUT_MS}ms`)\n }\n throw error\n } finally {\n clearTimeout(timeoutId)\n }\n}\n\n/**\n * Get the destination path for caching an image\n *\n * @param url - Original image URL (used to generate cache key)\n * @returns Local file path where image should be saved\n */\nexport function getCacheDestPath(url: string): string {\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Generate cache key from URL\n const cacheKey = getCacheKey(url)\n return join(CACHE_DIR, cacheKey)\n}\n\n/**\n * Rewrite image URLs in markdown content\n *\n * @param content - Original markdown content\n * @param urlMap - Map of original URLs to local file paths\n * @returns Content with URLs replaced\n */\nexport function rewriteMarkdownUrls(\n content: string,\n urlMap: Map<string, string>\n): string {\n let result = content\n\n for (const [originalUrl, localPath] of urlMap) {\n // Escape special regex characters in the URL\n const escapedUrl = originalUrl.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n const urlRegex = new RegExp(escapedUrl, 'g')\n result = result.replace(urlRegex, localPath)\n }\n\n return result\n}\n\n/**\n * Main entry point: process all images in markdown content\n * Downloads authenticated images (with caching), saves locally, rewrites URLs\n *\n * @param content - Markdown content to process\n * @param provider - Image provider for authentication ('github' or 'linear')\n * @returns Content with authenticated image URLs replaced with local file paths\n */\nexport async function processMarkdownImages(\n content: string,\n provider: IssueProvider\n): Promise<string> {\n // Early return if empty\n if (!content) {\n return ''\n }\n\n // Extract all image URLs\n const images = extractMarkdownImageUrls(content)\n if (images.length === 0) {\n return content\n }\n\n // Filter to only authenticated URLs\n const authImages = images.filter(img => isAuthenticatedImageUrl(img.url))\n if (authImages.length === 0) {\n return content\n }\n\n // Get auth token for provider\n const authToken = await getAuthToken(provider)\n\n // Deduplicate URLs (same image might appear multiple times)\n const uniqueUrls = [...new Set(authImages.map(img => img.url))]\n\n // Build URL map - process all unique URLs in parallel\n const urlMap = new Map<string, string>()\n\n // Download/cache images in parallel\n const downloadPromises = uniqueUrls.map(async (url) => {\n try {\n // Check cache first\n const cachedPath = getCachedImagePath(url)\n if (cachedPath) {\n logger.debug(`Using cached image: ${cachedPath}`)\n return { url, localPath: cachedPath }\n }\n\n // Cache miss - download and stream directly to file\n logger.debug(`Downloading image: ${url}`)\n const destPath = getCacheDestPath(url)\n await downloadAndSaveImage(\n url,\n destPath,\n authToken ? `Bearer ${authToken}` : undefined\n )\n return { url, localPath: destPath }\n } catch (error) {\n // Graceful degradation - log warning, return null to keep original URL\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to download image ${url}: ${message}`)\n return null\n }\n })\n\n const results = await Promise.all(downloadPromises)\n\n // Build URL map from results\n for (const result of results) {\n if (result !== null) {\n urlMap.set(result.url, result.localPath)\n }\n }\n\n // Rewrite and return\n return rewriteMarkdownUrls(content, urlMap)\n}\n","/**\n * GitHub implementation of Issue Management Provider\n * Uses GitHub CLI for all operations\n * Normalizes GitHub-specific fields (login) to provider-agnostic core fields (id, displayName)\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n\tFlexibleAuthor,\n} from './types.js'\nimport {\n\texecuteGhCommand,\n\tcreateIssueComment,\n\tupdateIssueComment,\n\tcreatePRComment,\n\tcreateIssue,\n\tgetIssueNodeId,\n\taddSubIssue,\n} from '../utils/github.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * GitHub-specific author structure from API\n */\ninterface GitHubAuthor {\n\tlogin: string\n\tid?: number\n\tavatarUrl?: string\n\turl?: string\n}\n\n/**\n * Normalize GitHub author to FlexibleAuthor format\n */\nfunction normalizeAuthor(author: GitHubAuthor | null | undefined): FlexibleAuthor | null {\n\tif (!author) return null\n\n\treturn {\n\t\tid: author.id ? String(author.id) : author.login,\n\t\tdisplayName: author.login, // GitHub uses login as primary identifier\n\t\tlogin: author.login, // Preserve original GitHub field\n\t\t...(author.avatarUrl && { avatarUrl: author.avatarUrl }),\n\t\t...(author.url && { url: author.url }),\n\t}\n}\n\n/**\n * Extract numeric comment ID from GitHub comment URL\n * URL format: https://github.com/owner/repo/issues/123#issuecomment-3615239386\n */\nexport function extractNumericIdFromUrl(url: string): string {\n\tconst match = url.match(/#issuecomment-(\\d+)$/)\n\tif (!match?.[1]) {\n\t\tthrow new Error(`Cannot extract comment ID from URL: ${url}`)\n\t}\n\treturn match[1]\n}\n\n/**\n * GitHub-specific implementation of IssueManagementProvider\n */\nexport class GitHubIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'github'\n\treadonly issuePrefix = '#'\n\n\t/**\n\t * Fetch issue details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst issueNumber = parseInt(number, 10)\n\t\tif (isNaN(issueNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst fields = includeComments\n\t\t\t? 'body,title,comments,labels,assignees,milestone,author,state,number,url'\n\t\t\t: 'body,title,labels,assignees,milestone,author,state,number,url'\n\n\t\t// Use gh issue view to fetch issue details\n\t\tinterface GitHubIssueResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\tlabels?: Array<{ name: string; color?: string; description?: string }>\n\t\t\tassignees?: Array<GitHubAuthor>\n\t\t\tmilestone?: { title: string; number?: number; state?: string }\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'issue',\n\t\t\t'view',\n\t\t\tString(issueNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided (gh CLI handles both owner/repo and URL formats)\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubIssueResponse>(args)\n\n\t\t// Normalize to IssueResult with core fields + passthrough\n\t\tconst result: IssueResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'github',\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// Optional flexible fields\n\t\t\t...(raw.assignees && {\n\t\t\t\tassignees: raw.assignees.map(a => normalizeAuthor(a)).filter((a): a is FlexibleAuthor => a !== null),\n\t\t\t}),\n\t\t\t...(raw.labels && {\n\t\t\t\tlabels: raw.labels,\n\t\t\t}),\n\n\t\t\t// GitHub-specific passthrough fields\n\t\t\t...(raw.milestone && {\n\t\t\t\tmilestone: raw.milestone,\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\t// (GitHub CLI returns GraphQL node IDs in the id field, but REST API expects numeric IDs)\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getPR(input: GetPRInput): Promise<PRResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst prNumber = parseInt(number, 10)\n\t\tif (isNaN(prNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub PR number: ${number}. GitHub PR IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst baseFields = 'number,title,body,state,url,author,headRefName,baseRefName,files,commits'\n\t\tconst fields = includeComments\n\t\t\t? `${baseFields},comments`\n\t\t\t: baseFields\n\n\t\t// GitHub PR response structure\n\t\tinterface GitHubPRResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\theadRefName: string\n\t\t\tbaseRefName: string\n\t\t\tfiles?: Array<{\n\t\t\t\tpath: string\n\t\t\t\tadditions: number\n\t\t\t\tdeletions: number\n\t\t\t}>\n\t\t\tcommits?: Array<{\n\t\t\t\toid: string\n\t\t\t\tmessageHeadline: string\n\t\t\t\tauthors: Array<{ name: string; email: string }>\n\t\t\t}>\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'pr',\n\t\t\t'view',\n\t\t\tString(prNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubPRResponse>(args)\n\n\t\t// Normalize to PRResult with core fields + passthrough\n\t\tconst result: PRResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\tnumber: raw.number,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// PR-specific fields\n\t\t\theadRefName: raw.headRefName,\n\t\t\tbaseRefName: raw.baseRefName,\n\n\t\t\t// Optional files\n\t\t\t...(raw.files && {\n\t\t\t\tfiles: raw.files,\n\t\t\t}),\n\n\t\t\t// Optional commits - normalize author\n\t\t\t...(raw.commits && {\n\t\t\t\tcommits: raw.commits.map(commit => ({\n\t\t\t\t\toid: commit.oid,\n\t\t\t\t\tmessageHeadline: commit.messageHeadline,\n\t\t\t\t\tauthor: commit.authors?.[0]\n\t\t\t\t\t\t? {\n\t\t\t\t\t\t\tid: commit.authors[0].email,\n\t\t\t\t\t\t\tdisplayName: commit.authors[0].name,\n\t\t\t\t\t\t\tname: commit.authors[0].name,\n\t\t\t\t\t\t\temail: commit.authors[0].email,\n\t\t\t\t\t\t}\n\t\t\t\t\t\t: null,\n\t\t\t\t})),\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID using gh API\n\t * Normalizes author to FlexibleAuthor format\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId, repo } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub API\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// GitHub API response structure\n\t\tinterface GitHubCommentResponse {\n\t\t\tid: number\n\t\t\tbody: string\n\t\t\tuser: GitHubAuthor\n\t\t\tcreated_at: string\n\t\t\tupdated_at?: string\n\t\t\thtml_url?: string\n\t\t\treactions?: Record<string, unknown>\n\t\t}\n\n\t\t// Use explicit repo path if provided, otherwise use :owner/:repo placeholder\n\t\tconst apiPath = repo\n\t\t\t? `repos/${repo}/issues/comments/${numericCommentId}`\n\t\t\t: `repos/:owner/:repo/issues/comments/${numericCommentId}`\n\n\t\t// Use gh api to fetch specific comment\n\t\tconst raw = await executeGhCommand<GitHubCommentResponse>([\n\t\t\t'api',\n\t\t\tapiPath,\n\t\t\t'--jq',\n\t\t\t'{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}',\n\t\t])\n\n\t\t// Process authenticated images in comment body\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'github')\n\n\t\t// Normalize to CommentDetailResult\n\t\treturn {\n\t\t\tid: String(raw.id),\n\t\t\tbody: processedBody,\n\t\t\tauthor: normalizeAuthor(raw.user),\n\t\t\tcreated_at: raw.created_at,\n\t\t\t...(raw.updated_at && { updated_at: raw.updated_at }),\n\t\t\t// Passthrough GitHub-specific fields\n\t\t\t...(raw.html_url && { html_url: raw.html_url }),\n\t\t\t...(raw.reactions && { reactions: raw.reactions }),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue or PR\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body, type } = input\n\n\t\t// Convert string ID to number for GitHub utilities\n\t\tconst numericId = parseInt(number, 10)\n\t\tif (isNaN(numericId)) {\n\t\t\tthrow new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utilities\n\t\tconst result =\n\t\t\ttype === 'issue'\n\t\t\t\t? await createIssueComment(numericId, body)\n\t\t\t\t: await createPRComment(numericId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub utility\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utility\n\t\tconst result = await updateIssueComment(numericCommentId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\tconst result = await createIssue(title, body, { labels, repo })\n\n\t\t// Ensure number is numeric\n\t\tconst issueNumber = typeof result.number === 'number'\n\t\t\t? result.number\n\t\t\t: parseInt(String(result.number), 10)\n\n\t\treturn {\n\t\t\tid: String(issueNumber),\n\t\t\turl: result.url,\n\t\t\tnumber: issueNumber,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * GitHub requires two-step process: create issue, then link via GraphQL\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\t// Convert parent identifier to number\n\t\tconst parentNumber = parseInt(parentId, 10)\n\t\tif (isNaN(parentNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub parent issue number: ${parentId}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Step 1: Get parent issue's GraphQL node ID\n\t\tconst parentNodeId = await getIssueNodeId(parentNumber, repo)\n\n\t\t// Step 2: Create the child issue\n\t\tconst childResult = await createIssue(title, body, { labels, repo })\n\t\tconst childNumber = typeof childResult.number === 'number'\n\t\t\t? childResult.number\n\t\t\t: parseInt(String(childResult.number), 10)\n\n\t\t// Step 3: Get child issue's GraphQL node ID\n\t\tconst childNodeId = await getIssueNodeId(childNumber, repo)\n\n\t\t// Step 4: Link child to parent via GraphQL mutation\n\t\tawait addSubIssue(parentNodeId, childNodeId)\n\n\t\treturn {\n\t\t\tid: String(childNumber),\n\t\t\turl: childResult.url,\n\t\t\tnumber: childNumber,\n\t\t}\n\t}\n}\n","import { appendFileSync } from 'node:fs'\nimport { join, dirname, basename, extname } from 'node:path'\n\n/**\n * Utility class for converting HTML details/summary format to Linear's collapsible format\n *\n * Converts:\n * <details>\n * <summary>Header</summary>\n * CONTENT\n * </details>\n *\n * Into Linear format:\n * +++ Header\n *\n * CONTENT\n *\n * +++\n */\nexport class LinearMarkupConverter {\n\t/**\n\t * Convert HTML details/summary blocks to Linear's collapsible format\n\t * Handles nested details blocks recursively\n\t *\n\t * @param text - Text containing HTML details/summary blocks\n\t * @returns Text with details/summary converted to Linear format\n\t */\n\tstatic convertDetailsToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Process from innermost to outermost to handle nesting correctly\n\t\t// Keep converting until no more details blocks are found\n\t\tlet previousText = ''\n\t\tlet currentText = text\n\n\t\twhile (previousText !== currentText) {\n\t\t\tpreviousText = currentText\n\t\t\tcurrentText = this.convertSinglePass(currentText)\n\t\t}\n\n\t\treturn currentText\n\t}\n\n\t/**\n\t * Perform a single pass of details block conversion\n\t * Converts the innermost details blocks first\n\t */\n\tprivate static convertSinglePass(text: string): string {\n\t\t// Match <details> blocks with optional attributes on the details tag\n\t\t// Supports multiline content between tags\n\t\tconst detailsRegex = /<details[^>]*>\\s*<summary[^>]*>(.*?)<\\/summary>\\s*(.*?)\\s*<\\/details>/gis\n\n\t\treturn text.replace(detailsRegex, (_match, summary, content) => {\n\t\t\t// Clean up the summary - trim whitespace and decode HTML entities\n\t\t\tconst cleanSummary = this.cleanText(summary)\n\n\t\t\t// Clean up the content - preserve internal structure but normalize outer whitespace\n\t\t\t// Note: Don't recursively convert here - the while loop handles that\n\t\t\tconst cleanContent = this.cleanContent(content)\n\n\t\t\t// Build Linear collapsible format\n\t\t\t// Always include blank lines around content for readability\n\t\t\tif (cleanContent) {\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n${cleanContent}\\n\\n+++`\n\t\t\t} else {\n\t\t\t\t// Empty content - use minimal format\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n+++`\n\t\t\t}\n\t\t})\n\t}\n\n\t/**\n\t * Clean text by trimming whitespace and decoding common HTML entities\n\t */\n\tprivate static cleanText(text: string): string {\n\t\treturn text\n\t\t\t.trim()\n\t\t\t.replace(/</g, '<')\n\t\t\t.replace(/>/g, '>')\n\t\t\t.replace(/&/g, '&')\n\t\t\t.replace(/"/g, '\"')\n\t\t\t.replace(/'/g, \"'\")\n\t}\n\n\t/**\n\t * Clean content while preserving internal structure\n\t * - Removes leading/trailing whitespace\n\t * - Normalizes internal blank lines (max 2 consecutive newlines)\n\t * - Preserves code blocks and other formatting\n\t */\n\tprivate static cleanContent(content: string): string {\n\t\tif (!content) {\n\t\t\treturn ''\n\t\t}\n\n\t\t// Trim outer whitespace\n\t\tlet cleaned = content.trim()\n\n\t\t// Normalize excessive blank lines (3+ newlines -> 2 newlines)\n\t\tcleaned = cleaned.replace(/\\n{3,}/g, '\\n\\n')\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Check if text contains HTML details/summary blocks\n\t * Useful for conditional conversion\n\t */\n\tstatic hasDetailsBlocks(text: string): boolean {\n\t\tif (!text) {\n\t\t\treturn false\n\t\t}\n\n\t\tconst detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\\/summary>.*?<\\/details>/is\n\t\treturn detailsRegex.test(text)\n\t}\n\n\t/**\n\t * Remove wrapper tags from code sample details blocks\n\t * Identifies details blocks where summary contains \"X lines\" pattern\n\t * and removes the details/summary tags while preserving the content\n\t *\n\t * @param text - Text containing potential code sample details blocks\n\t * @returns Text with code sample wrappers removed\n\t */\n\tstatic removeCodeSampleWrappers(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Match details blocks where summary contains \"X lines\" (e.g., \"45 lines\", \"120 lines\")\n\t\t// Pattern: <details><summary>...N lines...</summary>CONTENT</details>\n\t\t// Use [^<]* to match summary content without allowing nested tags to interfere\n\t\t// Then use [\\s\\S]*? for the content to allow any characters including newlines\n\t\tconst codeSampleRegex = /<details[^>]*>\\s*<summary[^>]*>([^<]*\\d+\\s+lines[^<]*)<\\/summary>\\s*([\\s\\S]*?)<\\/details>/gi\n\n\t\treturn text.replace(codeSampleRegex, (_match, _summary, content) => {\n\t\t\t// Return just the content, without any wrapper tags\n\t\t\t// Preserve the content exactly as-is\n\t\t\treturn content.trim()\n\t\t})\n\t}\n\n\t/**\n\t * Convert text for Linear - applies all necessary conversions\n\t * Currently only converts details/summary blocks, but can be extended\n\t * for other HTML to Linear markdown conversions\n\t */\n\tstatic convertToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Log input if logging is enabled\n\t\tthis.logConversion('INPUT', text)\n\n\t\t// Apply all conversions\n\t\tlet converted = text\n\n\t\t// First, remove code sample wrappers (details blocks with \"X lines\" pattern)\n\t\t// This prevents them from being converted to Linear's +++ format\n\t\tconverted = this.removeCodeSampleWrappers(converted)\n\n\t\t// Then convert remaining details/summary blocks to Linear format\n\t\tconverted = this.convertDetailsToLinear(converted)\n\n\t\t// Log output if logging is enabled\n\t\tthis.logConversion('OUTPUT', converted)\n\n\t\treturn converted\n\t}\n\n\t/**\n\t * Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set\n\t */\n\tprivate static logConversion(label: string, content: string): void {\n\t\tconst logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE\n\t\tif (!logFilePath) {\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst timestampedPath = this.getTimestampedLogPath(logFilePath)\n\t\t\tconst timestamp = new Date().toISOString()\n\t\t\tconst separator = '================================'\n\n\t\t\tconst logEntry = `${separator}\\n[${timestamp}] CONVERSION ${label}\\n${separator}\\n${label}:\\n${content}\\n\\n`\n\n\t\t\tappendFileSync(timestampedPath, logEntry, 'utf-8')\n\t\t} catch {\n\t\t\t// Silently fail - don't crash if logging fails\n\t\t\t// This is a debug feature and shouldn't break the conversion\n\t\t}\n\t}\n\n\t/**\n\t * Generate timestamped log file path\n\t * Example: debug.log -> debug-20231202-161234.log\n\t */\n\tprivate static getTimestampedLogPath(logFilePath: string): string {\n\t\tconst dir = dirname(logFilePath)\n\t\tconst ext = extname(logFilePath)\n\t\tconst base = basename(logFilePath, ext)\n\n\t\t// Generate timestamp: YYYYMMDD-HHMMSS\n\t\tconst now = new Date()\n\t\tconst timestamp = [\n\t\t\tnow.getFullYear(),\n\t\t\tString(now.getMonth() + 1).padStart(2, '0'),\n\t\t\tString(now.getDate()).padStart(2, '0'),\n\t\t].join('') + '-' + [\n\t\t\tString(now.getHours()).padStart(2, '0'),\n\t\t\tString(now.getMinutes()).padStart(2, '0'),\n\t\t\tString(now.getSeconds()).padStart(2, '0'),\n\t\t].join('')\n\n\t\treturn join(dir, `${base}-${timestamp}${ext}`)\n\t}\n}\n","/**\n * Linear implementation of Issue Management Provider\n * Uses @linear/sdk for all operations\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n} from './types.js'\nimport {\n\tfetchLinearIssue,\n\tcreateLinearComment,\n\tgetLinearComment,\n\tupdateLinearComment,\n\tfetchLinearIssueComments,\n\tcreateLinearIssue,\n\tcreateLinearChildIssue,\n} from '../utils/linear.js'\nimport { LinearMarkupConverter } from '../utils/linear-markup-converter.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * Linear-specific implementation of IssueManagementProvider\n */\nexport class LinearIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'linear'\n\treadonly issuePrefix = ''\n\n\t/**\n\t * Cached team key extracted from issue identifiers (e.g., \"ENG-123\" -> \"ENG\")\n\t * Used as fallback when teamKey is not explicitly provided to createIssue()\n\t */\n\tprivate cachedTeamKey: string | undefined = undefined\n\n\t/**\n\t * Fetch issue details using Linear SDK\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Extract and cache team key from identifier (e.g., \"ENG-123\" -> \"ENG\")\n\t\t// This enables createIssue() to use the team key as a fallback\n\t\tconst match = number.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tif (match?.[1]) {\n\t\t\tthis.cachedTeamKey = match[1].toUpperCase()\n\t\t}\n\n\t\t// Fetch issue - Linear uses alphanumeric identifiers like \"ENG-123\"\n\t\tconst raw = await fetchLinearIssue(number)\n\n\t\t// Map Linear state name to open/closed\n\t\tconst state = raw.state && (raw.state.toLowerCase().includes('done') || raw.state.toLowerCase().includes('completed') || raw.state.toLowerCase().includes('canceled'))\n\t\t\t? 'closed'\n\t\t\t: 'open'\n\n\t\t// Build result\n\t\tconst result: IssueResult = {\n\t\t\tid: raw.identifier,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.description ?? '',\n\t\t\tstate,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'linear',\n\t\t\tauthor: null, // Linear SDK doesn't return author in basic fetch\n\n\t\t\t// Linear-specific fields\n\t\t\tlinearState: raw.state,\n\t\t\tcreatedAt: raw.createdAt,\n\t\t\tupdatedAt: raw.updatedAt,\n\t\t}\n\n\t\t// Fetch comments if requested\n\t\tif (includeComments) {\n\t\t\ttry {\n\t\t\t\tconst comments = await this.fetchIssueComments(number)\n\t\t\t\tif (comments) {\n\t\t\t\t\tresult.comments = comments\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// If comments fail, continue without them\n\t\t\t}\n\t\t}\n\n\t\t// Process images in body and comments to make them accessible\n\t\tresult.body = await processMarkdownImages(result.body, 'linear')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'linear')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details\n\t * Linear does not support PRs - this throws an error directing to use GitHub\n\t */\n\tasync getPR(_input: GetPRInput): Promise<PRResult> {\n\t\tthrow new Error('Linear does not support pull requests. PRs exist only on GitHub. Use the GitHub provider for PR operations.')\n\t}\n\n\t/**\n\t * Fetch comments for an issue\n\t */\n\tprivate async fetchIssueComments(identifier: string): Promise<IssueResult['comments']> {\n\t\ttry {\n\t\t\tconst comments = await fetchLinearIssueComments(identifier)\n\n\t\t\treturn comments.map(comment => ({\n\t\t\t\tid: comment.id,\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t} catch {\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\n\t\tconst raw = await getLinearComment(commentId)\n\n\t\t// Process images to make them accessible\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'linear')\n\n\t\treturn {\n\t\t\tid: raw.id,\n\t\t\tbody: processedBody,\n\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\tcreated_at: raw.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body } = input\n\t\t// Note: Linear doesn't distinguish between issue and PR comments\n\t\t// (Linear doesn't have PRs - that's GitHub-specific)\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await createLinearComment(number, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tcreated_at: result.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await updateLinearComment(commentId, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tupdated_at: result.updatedAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, teamKey } = input\n\n\t\t// Fallback chain: explicit param > settings (via env) > cached key from getIssue()\n\t\tconst effectiveTeamKey = teamKey ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear issue creation. Configure issueManagement.linear.teamId in settings, or call getIssue first to extract the team from an issue identifier.')\n\t\t}\n\n\t\tconst result = await createLinearIssue(title, body, effectiveTeamKey, labels)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * Linear supports atomic creation with parentId field\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, teamKey } = input\n\n\t\t// Fetch parent issue to get UUID (parentId in input is identifier like \"ENG-123\")\n\t\tconst parentIssue = await fetchLinearIssue(parentId)\n\n\t\t// Extract team key from parent identifier if not provided\n\t\tconst match = parentId.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tconst effectiveTeamKey = teamKey ?? match?.[1]?.toUpperCase() ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear child issue creation. Provide teamKey parameter or use a parent identifier with team prefix.')\n\t\t}\n\n\t\t// Create child issue with parent's UUID\n\t\tconst result = await createLinearChildIssue(\n\t\t\ttitle,\n\t\t\tbody,\n\t\t\teffectiveTeamKey,\n\t\t\tparentIssue.id, // UUID, not identifier\n\t\t\tlabels\n\t\t)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n}\n","/**\n * Factory for creating issue management providers\n */\n\nimport type { IssueManagementProvider, IssueProvider } from './types.js'\nimport { GitHubIssueManagementProvider } from './GitHubIssueManagementProvider.js'\nimport { LinearIssueManagementProvider } from './LinearIssueManagementProvider.js'\n\n/**\n * Factory class for creating issue management providers\n */\nexport class IssueManagementProviderFactory {\n\t/**\n\t * Create an issue management provider based on the provider type\n\t */\n\tstatic create(provider: IssueProvider): IssueManagementProvider {\n\t\tswitch (provider) {\n\t\t\tcase 'github':\n\t\t\t\treturn new GitHubIssueManagementProvider()\n\t\t\tcase 'linear':\n\t\t\t\treturn new LinearIssueManagementProvider()\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unsupported issue management provider: ${provider}`)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AACA,SAAS,cAAc;AACvB,SAAS,MAAM,eAAe;AAC9B,SAAS,YAAY,WAAW,mBAAmB,kBAAkB;AACrE,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,kBAAkB;AAC3B,SAAS,aAAa;AAgBtB,IAAM,uBAAuB,CAAC,QAAQ,QAAQ,SAAS,QAAQ,SAAS,MAAM;AAK9E,IAAM,iBAAiB,KAAK,OAAO;AAKnC,IAAM,qBAAqB;AAKpB,IAAM,YAAY,KAAK,OAAO,GAAG,cAAc;AAKtD,IAAI;AASG,SAAS,yBAAyB,SAA+B;AACtE,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAwB,CAAC;AAM/B,QAAM,gBAAgB;AACtB,MAAI;AAEJ,UAAQ,QAAQ,cAAc,KAAK,OAAO,OAAO,MAAM;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAIA,QAAM,eAAe;AAErB,UAAQ,QAAQ,aAAa,KAAK,OAAO,OAAO,MAAM;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,wBAAwB,KAAsB;AAC5D,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU,SAAS,YAAY;AAGhD,QAAI,aAAa,sBAAsB;AACrC,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,6CAA6C;AAC5D,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,gBAAgB,UAAU,SAAS,WAAW,2BAA2B,GAAG;AAC3F,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAQA,SAAS,oBAAoB,KAA4B;AACvD,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU;AAC3B,UAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAE1C,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,SAAS,YAAY,KAAqB;AAC/C,QAAM,YAAY,IAAI,IAAI,GAAG;AAI7B,MAAI,UAAU,aAAa,6CAA6C;AACtE,cAAU,aAAa,OAAO,KAAK;AAAA,EACrC;AAGA,QAAM,YAAY,UAAU,SAAS;AAGrC,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAG7E,QAAM,MAAM,oBAAoB,GAAG,KAAK;AAExC,SAAO,GAAG,IAAI,GAAG,GAAG;AACtB;AASO,SAAS,mBAAmB,KAAiC;AAClE,QAAM,WAAW,YAAY,GAAG;AAChC,QAAM,aAAa,KAAK,WAAW,QAAQ;AAE3C,MAAI,WAAW,UAAU,GAAG;AAC1B,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAQA,eAAe,aAAa,UAAsD;AAChF,MAAI,aAAa,UAAU;AAEzB,QAAI,sBAAsB,QAAW;AACnC,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,MAAM,MAAM,CAAC,QAAQ,OAAO,CAAC;AAClD,0BAAoB,OAAO,OAAO,KAAK;AACvC,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,+CAA+C,OAAO,EAAE;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,aAAa,UAAU;AAEzB,WAAO,QAAQ,IAAI;AAAA,EACrB;AAEA,SAAO;AACT;AAiBA,eAAsB,qBACpB,KACA,UACA,YACe;AACf,QAAM,UAAkC,CAAC;AACzC,MAAI,YAAY;AACd,YAAQ,eAAe,IAAI;AAAA,EAC7B;AAGA,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,kBAAkB;AAEzE,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,KAAK,EAAE,SAAS,QAAQ,WAAW,OAAO,CAAC;AAExE,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IACvF;AAGA,UAAM,gBAAgB,SAAS,QAAQ,IAAI,gBAAgB;AAC3D,QAAI,iBAAiB,SAAS,eAAe,EAAE,IAAI,gBAAgB;AACjE,YAAM,IAAI,MAAM,oBAAoB,aAAa,kBAAkB,cAAc,aAAa;AAAA,IAChG;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAGA,UAAM,SAAS,SAAS,KAAK,UAAU;AACvC,QAAI,eAAe;AAEnB,UAAM,eAAe,IAAI,SAAS;AAAA,MAChC,MAAM,OAAsB;AAC1B,YAAI;AACF,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,iBAAK,KAAK,IAAI;AACd;AAAA,UACF;AAEA,0BAAgB,MAAM;AACtB,cAAI,eAAe,gBAAgB;AACjC,mBAAO,OAAO;AACd,iBAAK,QAAQ,IAAI,MAAM,oBAAoB,YAAY,kBAAkB,cAAc,aAAa,CAAC;AACrG;AAAA,UACF;AAEA,eAAK,KAAK,OAAO,KAAK,KAAK,CAAC;AAAA,QAC9B,SAAS,KAAK;AACZ,eAAK,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,QAClE;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,gBAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,IAC1C;AAGA,UAAM,cAAc,kBAAkB,QAAQ;AAE9C,QAAI;AACF,YAAM,SAAS,cAAc,WAAW;AAAA,IAC1C,SAAS,eAAe;AAEtB,UAAI;AACF,YAAI,WAAW,QAAQ,GAAG;AACxB,qBAAW,QAAQ;AAAA,QACrB;AAAA,MACF,QAAQ;AAAA,MAER;AACA,YAAM;AAAA,IACR;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,SAAS,MAAM,SAAS,cAAc;AACzD,YAAM,IAAI,MAAM,kCAAkC,kBAAkB,IAAI;AAAA,IAC1E;AACA,UAAM;AAAA,EACR,UAAE;AACA,iBAAa,SAAS;AAAA,EACxB;AACF;AAQO,SAAS,iBAAiB,KAAqB;AAEpD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAGA,QAAM,WAAW,YAAY,GAAG;AAChC,SAAO,KAAK,WAAW,QAAQ;AACjC;AASO,SAAS,oBACd,SACA,QACQ;AACR,MAAI,SAAS;AAEb,aAAW,CAAC,aAAa,SAAS,KAAK,QAAQ;AAE7C,UAAM,aAAa,YAAY,QAAQ,uBAAuB,MAAM;AACpE,UAAM,WAAW,IAAI,OAAO,YAAY,GAAG;AAC3C,aAAS,OAAO,QAAQ,UAAU,SAAS;AAAA,EAC7C;AAEA,SAAO;AACT;AAUA,eAAsB,sBACpB,SACA,UACiB;AAEjB,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,yBAAyB,OAAO;AAC/C,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAGA,QAAM,aAAa,OAAO,OAAO,SAAO,wBAAwB,IAAI,GAAG,CAAC;AACxE,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,MAAM,aAAa,QAAQ;AAG7C,QAAM,aAAa,CAAC,GAAG,IAAI,IAAI,WAAW,IAAI,SAAO,IAAI,GAAG,CAAC,CAAC;AAG9D,QAAM,SAAS,oBAAI,IAAoB;AAGvC,QAAM,mBAAmB,WAAW,IAAI,OAAO,QAAQ;AACrD,QAAI;AAEF,YAAM,aAAa,mBAAmB,GAAG;AACzC,UAAI,YAAY;AACd,eAAO,MAAM,uBAAuB,UAAU,EAAE;AAChD,eAAO,EAAE,KAAK,WAAW,WAAW;AAAA,MACtC;AAGA,aAAO,MAAM,sBAAsB,GAAG,EAAE;AACxC,YAAM,WAAW,iBAAiB,GAAG;AACrC,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,YAAY,UAAU,SAAS,KAAK;AAAA,MACtC;AACA,aAAO,EAAE,KAAK,WAAW,SAAS;AAAA,IACpC,SAAS,OAAO;AAEd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,4BAA4B,GAAG,KAAK,OAAO,EAAE;AACzD,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,UAAU,MAAM,QAAQ,IAAI,gBAAgB;AAGlD,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,MAAM;AACnB,aAAO,IAAI,OAAO,KAAK,OAAO,SAAS;AAAA,IACzC;AAAA,EACF;AAGA,SAAO,oBAAoB,SAAS,MAAM;AAC5C;;;ACpZA,SAAS,gBAAgB,QAAgE;AACxF,MAAI,CAAC,OAAQ,QAAO;AAEpB,SAAO;AAAA,IACN,IAAI,OAAO,KAAK,OAAO,OAAO,EAAE,IAAI,OAAO;AAAA,IAC3C,aAAa,OAAO;AAAA;AAAA,IACpB,OAAO,OAAO;AAAA;AAAA,IACd,GAAI,OAAO,aAAa,EAAE,WAAW,OAAO,UAAU;AAAA,IACtD,GAAI,OAAO,OAAO,EAAE,KAAK,OAAO,IAAI;AAAA,EACrC;AACD;AAMO,SAAS,wBAAwB,KAAqB;AAC5D,QAAM,QAAQ,IAAI,MAAM,sBAAsB;AAC9C,MAAI,EAAC,+BAAQ,KAAI;AAChB,UAAM,IAAI,MAAM,uCAAuC,GAAG,EAAE;AAAA,EAC7D;AACA,SAAO,MAAM,CAAC;AACf;AAKO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,cAAc,SAAS,QAAQ,EAAE;AACvC,QAAI,MAAM,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,gCAAgC,MAAM,qCAAqC;AAAA,IAC5F;AAGA,UAAM,SAAS,kBACZ,2EACA;AAuBH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAsC,IAAI;AAG5D,UAAM,SAAsB;AAAA;AAAA,MAE3B,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA,MACT,UAAU;AAAA;AAAA,MAGV,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI,UAAU,IAAI,OAAK,gBAAgB,CAAC,CAAC,EAAE,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,MACpG;AAAA,MACA,GAAI,IAAI,UAAU;AAAA,QACjB,QAAQ,IAAI;AAAA,MACb;AAAA;AAAA,MAGA,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI;AAAA,MAChB;AAAA,IACD;AAKA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,OAAsC;AACjD,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,WAAW,SAAS,QAAQ,EAAE;AACpC,QAAI,MAAM,QAAQ,GAAG;AACpB,YAAM,IAAI,MAAM,6BAA6B,MAAM,kCAAkC;AAAA,IACtF;AAGA,UAAM,aAAa;AACnB,UAAM,SAAS,kBACZ,GAAG,UAAU,cACb;AAgCH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,QAAQ;AAAA,MACf;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAmC,IAAI;AAGzD,UAAM,SAAmB;AAAA;AAAA,MAExB,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,QAAQ,IAAI;AAAA,MACZ,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA;AAAA,MAGT,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,aAAa,IAAI;AAAA,MACjB,aAAa,IAAI;AAAA;AAAA,MAGjB,GAAI,IAAI,SAAS;AAAA,QAChB,OAAO,IAAI;AAAA,MACZ;AAAA;AAAA,MAGA,GAAI,IAAI,WAAW;AAAA,QAClB,SAAS,IAAI,QAAQ,IAAI,YAAO;AA9QpC;AA8QwC;AAAA,YACnC,KAAK,OAAO;AAAA,YACZ,iBAAiB,OAAO;AAAA,YACxB,UAAQ,YAAO,YAAP,mBAAiB,MACtB;AAAA,cACD,IAAI,OAAO,QAAQ,CAAC,EAAE;AAAA,cACtB,aAAa,OAAO,QAAQ,CAAC,EAAE;AAAA,cAC/B,MAAM,OAAO,QAAQ,CAAC,EAAE;AAAA,cACxB,OAAO,OAAO,QAAQ,CAAC,EAAE;AAAA,YAC1B,IACE;AAAA,UACJ;AAAA,SAAE;AAAA,MACH;AAAA,IACD;AAIA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAcA,UAAM,UAAU,OACb,SAAS,IAAI,oBAAoB,gBAAgB,KACjD,sCAAsC,gBAAgB;AAGzD,UAAM,MAAM,MAAM,iBAAwC;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD,CAAC;AAGD,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAGpE,WAAO;AAAA,MACN,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM;AAAA,MACN,QAAQ,gBAAgB,IAAI,IAAI;AAAA,MAChC,YAAY,IAAI;AAAA,MAChB,GAAI,IAAI,cAAc,EAAE,YAAY,IAAI,WAAW;AAAA;AAAA,MAEnD,GAAI,IAAI,YAAY,EAAE,UAAU,IAAI,SAAS;AAAA,MAC7C,GAAI,IAAI,aAAa,EAAE,WAAW,IAAI,UAAU;AAAA,IACjD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,MAAM,KAAK,IAAI;AAG/B,UAAM,YAAY,SAAS,QAAQ,EAAE;AACrC,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY,MAAM,+BAA+B;AAAA,IACxF;AAGA,UAAM,SACL,SAAS,UACN,MAAM,mBAAmB,WAAW,IAAI,IACxC,MAAM,gBAAgB,WAAW,IAAI;AAGzC,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAGA,UAAM,SAAS,MAAM,mBAAmB,kBAAkB,IAAI;AAG9D,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,KAAK,IAAI;AAGtC,UAAM,SAAS,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAG9D,UAAM,cAAc,OAAO,OAAO,WAAW,WAC1C,OAAO,SACP,SAAS,OAAO,OAAO,MAAM,GAAG,EAAE;AAErC,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,OAAO;AAAA,MACZ,QAAQ;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAChF,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,KAAK,IAAI;AAIhD,UAAM,eAAe,SAAS,UAAU,EAAE;AAC1C,QAAI,MAAM,YAAY,GAAG;AACxB,YAAM,IAAI,MAAM,uCAAuC,QAAQ,qCAAqC;AAAA,IACrG;AAGA,UAAM,eAAe,MAAM,eAAe,cAAc,IAAI;AAG5D,UAAM,cAAc,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AACnE,UAAM,cAAc,OAAO,YAAY,WAAW,WAC/C,YAAY,SACZ,SAAS,OAAO,YAAY,MAAM,GAAG,EAAE;AAG1C,UAAM,cAAc,MAAM,eAAe,aAAa,IAAI;AAG1D,UAAM,YAAY,cAAc,WAAW;AAE3C,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,YAAY;AAAA,MACjB,QAAQ;AAAA,IACT;AAAA,EACD;AACD;;;ACpdA,SAAS,sBAAsB;AAC/B,SAAS,QAAAA,OAAM,SAAS,UAAU,WAAAC,gBAAe;AAkB1C,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlC,OAAO,uBAAuB,MAAsB;AACnD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAIA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,iBAAiB,aAAa;AACpC,qBAAe;AACf,oBAAc,KAAK,kBAAkB,WAAW;AAAA,IACjD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,kBAAkB,MAAsB;AAGtD,UAAM,eAAe;AAErB,WAAO,KAAK,QAAQ,cAAc,CAAC,QAAQ,SAAS,YAAY;AAE/D,YAAM,eAAe,KAAK,UAAU,OAAO;AAI3C,YAAM,eAAe,KAAK,aAAa,OAAO;AAI9C,UAAI,cAAc;AACjB,eAAO,OAAO,YAAY;AAAA;AAAA,EAAO,YAAY;AAAA;AAAA;AAAA,MAC9C,OAAO;AAEN,eAAO,OAAO,YAAY;AAAA;AAAA;AAAA,MAC3B;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,UAAU,MAAsB;AAC9C,WAAO,KACL,KAAK,EACL,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,UAAU,GAAG,EACrB,QAAQ,WAAW,GAAG,EACtB,QAAQ,UAAU,GAAG;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAe,aAAa,SAAyB;AACpD,QAAI,CAAC,SAAS;AACb,aAAO;AAAA,IACR;AAGA,QAAI,UAAU,QAAQ,KAAK;AAG3B,cAAU,QAAQ,QAAQ,WAAW,MAAM;AAE3C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAiB,MAAuB;AAC9C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAEA,UAAM,eAAe;AACrB,WAAO,aAAa,KAAK,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,yBAAyB,MAAsB;AACrD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAMA,UAAM,kBAAkB;AAExB,WAAO,KAAK,QAAQ,iBAAiB,CAAC,QAAQ,UAAU,YAAY;AAGnE,aAAO,QAAQ,KAAK;AAAA,IACrB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,gBAAgB,MAAsB;AAC5C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAGA,SAAK,cAAc,SAAS,IAAI;AAGhC,QAAI,YAAY;AAIhB,gBAAY,KAAK,yBAAyB,SAAS;AAGnD,gBAAY,KAAK,uBAAuB,SAAS;AAGjD,SAAK,cAAc,UAAU,SAAS;AAEtC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,OAAe,SAAuB;AAClE,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI,CAAC,aAAa;AACjB;AAAA,IACD;AAEA,QAAI;AACH,YAAM,kBAAkB,KAAK,sBAAsB,WAAW;AAC9D,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY;AAElB,YAAM,WAAW,GAAG,SAAS;AAAA,GAAM,SAAS,gBAAgB,KAAK;AAAA,EAAK,SAAS;AAAA,EAAK,KAAK;AAAA,EAAM,OAAO;AAAA;AAAA;AAEtG,qBAAe,iBAAiB,UAAU,OAAO;AAAA,IAClD,QAAQ;AAAA,IAGR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,sBAAsB,aAA6B;AACjE,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,MAAMA,SAAQ,WAAW;AAC/B,UAAM,OAAO,SAAS,aAAa,GAAG;AAGtC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY;AAAA,MACjB,IAAI,YAAY;AAAA,MAChB,OAAO,IAAI,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MAC1C,OAAO,IAAI,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACtC,EAAE,KAAK,EAAE,IAAI,MAAM;AAAA,MAClB,OAAO,IAAI,SAAS,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACtC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACxC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACzC,EAAE,KAAK,EAAE;AAET,WAAOD,MAAK,KAAK,GAAG,IAAI,IAAI,SAAS,GAAG,GAAG,EAAE;AAAA,EAC9C;AACD;;;ACzLO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAMvB;AAAA;AAAA;AAAA;AAAA,SAAQ,gBAAoC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK5C,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAI3C,UAAM,QAAQ,OAAO,MAAM,oBAAoB;AAC/C,QAAI,+BAAQ,IAAI;AACf,WAAK,gBAAgB,MAAM,CAAC,EAAE,YAAY;AAAA,IAC3C;AAGA,UAAM,MAAM,MAAM,iBAAiB,MAAM;AAGzC,UAAM,QAAQ,IAAI,UAAU,IAAI,MAAM,YAAY,EAAE,SAAS,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,UAAU,KACjK,WACA;AAGH,UAAM,SAAsB;AAAA,MAC3B,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI,eAAe;AAAA,MACzB;AAAA,MACA,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA;AAAA,MAGR,aAAa,IAAI;AAAA,MACjB,WAAW,IAAI;AAAA,MACf,WAAW,IAAI;AAAA,IAChB;AAGA,QAAI,iBAAiB;AACpB,UAAI;AACH,cAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,YAAI,UAAU;AACb,iBAAO,WAAW;AAAA,QACnB;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,QAAuC;AAClD,UAAM,IAAI,MAAM,6GAA6G;AAAA,EAC9H;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,YAAsD;AACtF,QAAI;AACH,YAAM,WAAW,MAAM,yBAAyB,UAAU;AAE1D,aAAO,SAAS,IAAI,cAAY;AAAA,QAC/B,IAAI,QAAQ;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA;AAAA,QACR,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH,QAAQ;AACP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAEtB,UAAM,MAAM,MAAM,iBAAiB,SAAS;AAG5C,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAEpE,WAAO;AAAA,MACN,IAAI,IAAI;AAAA,MACR,MAAM;AAAA,MACN,QAAQ;AAAA;AAAA,MACR,YAAY,IAAI;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,KAAK,IAAI;AAKzB,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,QAAQ,aAAa;AAE9D,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAG5B,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,WAAW,aAAa;AAEjE,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGzC,UAAM,mBAAmB,WAAW,QAAQ,IAAI,mBAAmB,KAAK;AAExE,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,0KAA0K;AAAA,IAC3L;AAEA,UAAM,SAAS,MAAM,kBAAkB,OAAO,MAAM,kBAAkB,MAAM;AAE5E,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAtNlF;AAuNE,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGnD,UAAM,cAAc,MAAM,iBAAiB,QAAQ;AAGnD,UAAM,QAAQ,SAAS,MAAM,oBAAoB;AACjD,UAAM,mBAAmB,aAAW,oCAAQ,OAAR,mBAAY,kBAAiB,QAAQ,IAAI,mBAAmB,KAAK;AAErG,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,6HAA6H;AAAA,IAC9I;AAGA,UAAM,SAAS,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA;AAAA,MACZ;AAAA,IACD;AAEA,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AACD;;;ACvOO,IAAM,iCAAN,MAAqC;AAAA;AAAA;AAAA;AAAA,EAI3C,OAAO,OAAO,UAAkD;AAC/D,YAAQ,UAAU;AAAA,MACjB,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C;AACC,cAAM,IAAI,MAAM,0CAA0C,QAAQ,EAAE;AAAA,IACtE;AAAA,EACD;AACD;","names":["join","extname"]}
|
|
@@ -4,7 +4,7 @@ import {
|
|
|
4
4
|
} from "./chunk-NXMDEL3F.js";
|
|
5
5
|
import {
|
|
6
6
|
IssueManagementProviderFactory
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-NPEMVE27.js";
|
|
8
8
|
import {
|
|
9
9
|
hasMultipleRemotes
|
|
10
10
|
} from "./chunk-FXDYIV3K.js";
|
|
@@ -278,10 +278,11 @@ var SessionSummaryService = class {
|
|
|
278
278
|
* @param summary - The summary text to post
|
|
279
279
|
* @param worktreePath - Path to worktree for loading settings (optional)
|
|
280
280
|
*/
|
|
281
|
-
async postSummary(issueNumber, summary, worktreePath) {
|
|
281
|
+
async postSummary(issueNumber, summary, worktreePath, prNumber) {
|
|
282
282
|
const settings = await this.settingsManager.loadSettings(worktreePath);
|
|
283
|
-
await this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd());
|
|
284
|
-
|
|
283
|
+
await this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd(), prNumber);
|
|
284
|
+
const target = prNumber ? `PR #${prNumber}` : "issue";
|
|
285
|
+
logger.success(`Session summary posted to ${target}`);
|
|
285
286
|
}
|
|
286
287
|
/**
|
|
287
288
|
* Determine if summary should be generated based on loom type and settings
|
|
@@ -352,7 +353,7 @@ var SessionSummaryService = class {
|
|
|
352
353
|
*/
|
|
353
354
|
async postSummaryToIssue(issueNumber, summary, settings, worktreePath, prNumber) {
|
|
354
355
|
var _a;
|
|
355
|
-
const providerType = ((_a = settings.issueManagement) == null ? void 0 : _a.provider) ?? "github";
|
|
356
|
+
const providerType = prNumber !== void 0 ? "github" : ((_a = settings.issueManagement) == null ? void 0 : _a.provider) ?? "github";
|
|
356
357
|
const provider = IssueManagementProviderFactory.create(providerType);
|
|
357
358
|
const finalSummary = await this.applyAttributionWithSettings(summary, settings, worktreePath);
|
|
358
359
|
const targetNumber = prNumber ?? issueNumber;
|
|
@@ -368,4 +369,4 @@ var SessionSummaryService = class {
|
|
|
368
369
|
export {
|
|
369
370
|
SessionSummaryService
|
|
370
371
|
};
|
|
371
|
-
//# sourceMappingURL=chunk-
|
|
372
|
+
//# sourceMappingURL=chunk-WT4UGBE2.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/SessionSummaryService.ts","../src/utils/claude-transcript.ts"],"sourcesContent":["/**\n * SessionSummaryService: Generates and posts Claude session summaries\n *\n * This service orchestrates:\n * 1. Reading session metadata to get session ID\n * 2. Loading and processing the session-summary prompt template\n * 3. Invoking Claude headless to generate the summary\n * 4. Posting the summary as a comment to the issue/PR\n */\n\nimport path from 'path'\nimport os from 'os'\nimport fs from 'fs-extra'\nimport { logger } from '../utils/logger.js'\nimport { launchClaude, generateDeterministicSessionId } from '../utils/claude.js'\nimport { readSessionContext } from '../utils/claude-transcript.js'\nimport { PromptTemplateManager } from './PromptTemplateManager.js'\nimport { MetadataManager } from './MetadataManager.js'\nimport { SettingsManager, type IloomSettings } from './SettingsManager.js'\nimport { IssueManagementProviderFactory } from '../mcp/IssueManagementProviderFactory.js'\nimport type { IssueProvider } from '../mcp/types.js'\nimport { hasMultipleRemotes } from '../utils/remote.js'\nimport type { RecapFile, RecapOutput } from '../mcp/recap-types.js'\nimport { formatRecapMarkdown } from '../utils/recap-formatter.js'\n\nconst RECAPS_DIR = path.join(os.homedir(), '.config', 'iloom-ai', 'recaps')\n\n/**\n * Slugify path to recap filename (matches MetadataManager/RecapCommand algorithm)\n *\n * Algorithm:\n * 1. Trim trailing slashes\n * 2. Replace all path separators (/ or \\) with ___ (triple underscore)\n * 3. Replace any other non-alphanumeric characters (except _ and -) with -\n * 4. Append .json\n */\nfunction slugifyPath(loomPath: string): string {\n\tlet slug = loomPath.replace(/[/\\\\]+$/, '')\n\tslug = slug.replace(/[/\\\\]/g, '___')\n\tslug = slug.replace(/[^a-zA-Z0-9_-]/g, '-')\n\treturn `${slug}.json`\n}\n\n/**\n * Read recap file for a worktree path with graceful degradation\n * Returns formatted recap string or null if not found/error\n */\nasync function readRecapFile(worktreePath: string): Promise<string | null> {\n\ttry {\n\t\tconst filePath = path.join(RECAPS_DIR, slugifyPath(worktreePath))\n\t\tif (await fs.pathExists(filePath)) {\n\t\t\tconst content = await fs.readFile(filePath, 'utf8')\n\t\t\tconst recap = JSON.parse(content) as RecapFile\n\n\t\t\t// Check if recap has any meaningful content\n\t\t\tconst hasGoal = recap.goal !== null && recap.goal !== undefined\n\t\t\tconst hasComplexity = recap.complexity !== null && recap.complexity !== undefined\n\t\t\tconst hasEntries = Array.isArray(recap.entries) && recap.entries.length > 0\n\t\t\tconst hasArtifacts = Array.isArray(recap.artifacts) && recap.artifacts.length > 0\n\t\t\tconst hasContent = hasGoal || hasComplexity || hasEntries || hasArtifacts\n\n\t\t\tif (hasContent) {\n\t\t\t\t// Convert RecapFile (optional fields) to RecapOutput (required fields)\n\t\t\t\t// Same pattern as RecapCommand.ts:61-66\n\t\t\t\tconst recapOutput: RecapOutput = {\n\t\t\t\t\tfilePath,\n\t\t\t\t\tgoal: recap.goal ?? null,\n\t\t\t\t\tcomplexity: recap.complexity ?? null,\n\t\t\t\t\tentries: recap.entries ?? [],\n\t\t\t\t\tartifacts: recap.artifacts ?? [],\n\t\t\t\t}\n\t\t\t\treturn formatRecapMarkdown(recapOutput)\n\t\t\t}\n\t\t}\n\t\treturn null\n\t} catch {\n\t\t// Graceful degradation - return null on any error\n\t\treturn null\n\t}\n}\n\n/**\n * Input for generating and posting a session summary\n */\nexport interface SessionSummaryInput {\n\tworktreePath: string\n\tissueNumber: string | number\n\tbranchName: string\n\tloomType: 'issue' | 'pr' | 'branch'\n\t/** Optional PR number - when provided, summary is posted to the PR instead of the issue */\n\tprNumber?: number\n}\n\n/**\n * Result from generating a session summary\n */\nexport interface SessionSummaryResult {\n\tsummary: string\n\tsessionId: string\n}\n\n/**\n * Service that generates and posts Claude session summaries to issues\n */\nexport class SessionSummaryService {\n\tprivate templateManager: PromptTemplateManager\n\tprivate metadataManager: MetadataManager\n\tprivate settingsManager: SettingsManager\n\n\tconstructor(\n\t\ttemplateManager?: PromptTemplateManager,\n\t\tmetadataManager?: MetadataManager,\n\t\tsettingsManager?: SettingsManager\n\t) {\n\t\tthis.templateManager = templateManager ?? new PromptTemplateManager()\n\t\tthis.metadataManager = metadataManager ?? new MetadataManager()\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t}\n\n\t/**\n\t * Generate and post a session summary to the issue\n\t *\n\t * Non-blocking: Catches all errors and logs warnings instead of throwing\n\t * This ensures the finish workflow continues even if summary generation fails\n\t */\n\tasync generateAndPostSummary(input: SessionSummaryInput): Promise<void> {\n\t\ttry {\n\t\t\t// 1. Skip for branch type (no issue to comment on)\n\t\t\tif (input.loomType === 'branch') {\n\t\t\t\tlogger.debug('Skipping session summary: branch type has no associated issue')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 2. Read metadata to get sessionId, or generate deterministically\n\t\t\tconst metadata = await this.metadataManager.readMetadata(input.worktreePath)\n\t\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(input.worktreePath)\n\n\t\t\t// 3. Load settings to check generateSummary config\n\t\t\tconst settings = await this.settingsManager.loadSettings(input.worktreePath)\n\t\t\tif (!this.shouldGenerateSummary(input.loomType, settings)) {\n\t\t\t\tlogger.debug(`Skipping session summary: generateSummary is disabled for ${input.loomType} workflow`)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tlogger.info('Generating session summary...')\n\n\t\t\t// 4. Try to read compact summaries from session transcript for additional context\n\t\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\t\tconst compactSummaries = await readSessionContext(input.worktreePath, sessionId)\n\t\t\tif (compactSummaries) {\n\t\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t\t}\n\n\t\t\t// 5. Try to read recap data for high-signal context\n\t\t\tconst recapData = await readRecapFile(input.worktreePath)\n\t\t\tif (recapData) {\n\t\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No recap data found')\n\t\t\t}\n\n\t\t\t// 6. Load and process the session-summary template\n\t\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\t\tISSUE_NUMBER: String(input.issueNumber),\n\t\t\t\tBRANCH_NAME: input.branchName,\n\t\t\t\tLOOM_TYPE: input.loomType,\n\t\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\t\tRECAP_DATA: recapData ?? '',\n\t\t\t})\n\n\t\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t\t// 7. Invoke Claude headless to generate summary\n\t\t\t// Use --resume with session ID so Claude knows which conversation to summarize\n\t\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\t\theadless: true,\n\t\t\t\tmodel: summaryModel,\n\t\t\t\tsessionId: sessionId, // Resume this session so Claude has conversation context\n\t\t\t\tnoSessionPersistence: true, // Don't persist new data after generating summary\n\t\t\t})\n\n\t\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\t\tlogger.warn('Session summary generation returned empty result')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tconst summary = summaryResult.trim()\n\n\t\t\t// 8. Skip posting if summary is too short (likely failed generation)\n\t\t\tif (summary.length < 100) {\n\t\t\t\tlogger.warn('Session summary too short, skipping post')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 9. Post summary to issue or PR (PR takes priority when prNumber is provided)\n\t\t\tawait this.postSummaryToIssue(input.issueNumber, summary, settings, input.worktreePath, input.prNumber)\n\n\t\t\tconst targetDescription = input.prNumber ? `PR #${input.prNumber}` : 'issue'\n\t\t\tlogger.success(`Session summary posted to ${targetDescription}`)\n\t\t} catch (error) {\n\t\t\t// Non-blocking: Log warning but don't throw\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error)\n\t\t\tlogger.warn(`Failed to generate session summary: ${errorMessage}`)\n\t\t\tlogger.debug('Session summary generation error details:', { error })\n\t\t}\n\t}\n\n\t/**\n\t * Generate a session summary without posting it\n\t *\n\t * This method is useful for previewing the summary or for use by CLI commands\n\t * that want to display the summary before optionally posting it.\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param branchName - Name of the branch\n\t * @param loomType - Type of loom ('issue' | 'pr' | 'branch')\n\t * @param issueNumber - Issue or PR number (optional, for template variables)\n\t * @returns The generated summary and session ID\n\t * @throws Error if Claude invocation fails\n\t */\n\tasync generateSummary(\n\t\tworktreePath: string,\n\t\tbranchName: string,\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tissueNumber?: string | number\n\t): Promise<SessionSummaryResult> {\n\t\t// 1. Read metadata or generate deterministic session ID\n\t\tconst metadata = await this.metadataManager.readMetadata(worktreePath)\n\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(worktreePath)\n\n\t\t// 2. Load settings for model configuration\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\n\t\tlogger.info('Generating session summary...')\n\n\t\t// 3. Try to read compact summaries from session transcript for additional context\n\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\tconst compactSummaries = await readSessionContext(worktreePath, sessionId)\n\t\tif (compactSummaries) {\n\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t}\n\n\t\t// 4. Try to read recap data for high-signal context\n\t\tconst recapData = await readRecapFile(worktreePath)\n\t\tif (recapData) {\n\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No recap data found')\n\t\t}\n\n\t\t// 5. Load and process the session-summary template\n\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\tISSUE_NUMBER: issueNumber !== undefined ? String(issueNumber) : '',\n\t\t\tBRANCH_NAME: branchName,\n\t\t\tLOOM_TYPE: loomType,\n\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\tRECAP_DATA: recapData ?? '',\n\t\t})\n\n\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t// 6. Invoke Claude headless to generate summary\n\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\theadless: true,\n\t\t\tmodel: summaryModel,\n\t\t\tsessionId: sessionId,\n\t\t\tnoSessionPersistence: true, // Don't persist new data after generating summary\n\t\t})\n\n\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\tthrow new Error('Session summary generation returned empty result')\n\t\t}\n\n\t\tconst summary = summaryResult.trim()\n\n\t\t// 7. Check if summary is too short (likely failed generation)\n\t\tif (summary.length < 100) {\n\t\t\tthrow new Error('Session summary too short - generation may have failed')\n\t\t}\n\n\t\treturn {\n\t\t\tsummary,\n\t\t\tsessionId: sessionId,\n\t\t}\n\t}\n\n\t/**\n\t * Post a summary to an issue (used by both generateAndPostSummary and CLI commands)\n\t *\n\t * @param issueNumber - Issue or PR number to post to\n\t * @param summary - The summary text to post\n\t * @param worktreePath - Path to worktree for loading settings (optional)\n\t */\n\tasync postSummary(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tworktreePath?: string,\n\t\tprNumber?: number\n\t): Promise<void> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\tawait this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd(), prNumber)\n\t\tconst target = prNumber ? `PR #${prNumber}` : 'issue'\n\t\tlogger.success(`Session summary posted to ${target}`)\n\t}\n\n\t/**\n\t * Determine if summary should be generated based on loom type and settings\n\t *\n\t * @param loomType - The type of loom being finished\n\t * @param settings - The loaded iloom settings\n\t * @returns true if summary should be generated\n\t */\n\tshouldGenerateSummary(\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tsettings: IloomSettings\n\t): boolean {\n\t\t// Branch type never generates summaries (no issue to comment on)\n\t\tif (loomType === 'branch') {\n\t\t\treturn false\n\t\t}\n\n\t\t// Get workflow-specific config\n\t\tconst workflowConfig =\n\t\t\tloomType === 'issue'\n\t\t\t\t? settings.workflows?.issue\n\t\t\t\t: settings.workflows?.pr\n\n\t\t// Default to true if not explicitly set (for issue and pr types)\n\t\treturn workflowConfig?.generateSummary ?? true\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on settings\n\t *\n\t * @param summary - The summary text\n\t * @param worktreePath - Path to worktree for loading settings and detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttribution(summary: string, worktreePath: string): Promise<string> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\treturn this.applyAttributionWithSettings(summary, settings, worktreePath)\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on provided settings\n\t *\n\t * @param summary - The summary text\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttributionWithSettings(\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string\n\t): Promise<string> {\n\t\tconst attributionSetting = settings.attribution ?? 'upstreamOnly'\n\t\tlogger.debug(`Attribution setting from config: ${settings.attribution}`)\n\t\tlogger.debug(`Attribution setting (with default): ${attributionSetting}`)\n\n\t\tlet shouldShowAttribution = false\n\t\tif (attributionSetting === 'on') {\n\t\t\tshouldShowAttribution = true\n\t\t\tlogger.debug('Attribution: always on')\n\t\t} else if (attributionSetting === 'upstreamOnly') {\n\t\t\t// Only show attribution when contributing to external repos (multiple remotes)\n\t\t\tshouldShowAttribution = await hasMultipleRemotes(worktreePath)\n\t\t\tlogger.debug(`Attribution: upstreamOnly, hasMultipleRemotes=${shouldShowAttribution}`)\n\t\t} else {\n\t\t\tlogger.debug('Attribution: off')\n\t\t}\n\t\t// 'off' keeps shouldShowAttribution = false\n\n\t\tlogger.debug(`Should show attribution: ${shouldShowAttribution}`)\n\t\tif (shouldShowAttribution) {\n\t\t\tlogger.debug('Attribution footer appended to summary')\n\t\t\treturn `${summary}\\n\\n---\\n*Generated with 🤖❤️ by [iloom.ai](https://iloom.ai)*`\n\t\t}\n\n\t\treturn summary\n\t}\n\n\t/**\n\t * Post the summary as a comment to the issue or PR\n\t *\n\t * @param issueNumber - The issue number (used when prNumber is not provided)\n\t * @param summary - The summary text to post\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for attribution detection\n\t * @param prNumber - Optional PR number - when provided, posts to the PR instead\n\t */\n\tprivate async postSummaryToIssue(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string,\n\t\tprNumber?: number\n\t): Promise<void> {\n\t\t// Get the issue management provider from settings\n\t\t// PRs only exist on GitHub, so always use 'github' provider when prNumber is provided\n\t\t// (see types.ts:32-33 and LinearIssueManagementProvider.getPR())\n\t\tconst providerType = prNumber !== undefined\n\t\t\t? 'github'\n\t\t\t: (settings.issueManagement?.provider ?? 'github') as IssueProvider\n\t\tconst provider = IssueManagementProviderFactory.create(providerType)\n\n\t\t// Apply attribution if configured\n\t\tconst finalSummary = await this.applyAttributionWithSettings(summary, settings, worktreePath)\n\n\t\t// When prNumber is provided, post to the PR instead of the issue\n\t\tconst targetNumber = prNumber ?? issueNumber\n\t\tconst targetType = prNumber !== undefined ? 'pr' : 'issue'\n\n\t\t// Create the comment\n\t\tawait provider.createComment({\n\t\t\tnumber: String(targetNumber),\n\t\t\tbody: finalSummary,\n\t\t\ttype: targetType,\n\t\t})\n\t}\n}\n","/**\n * Claude Transcript Utilities\n *\n * Provides functions to read and parse Claude Code session transcript files\n * stored in ~/.claude/projects/. These transcripts contain the full conversation\n * history including compact summaries from when conversations were compacted.\n */\n\nimport { readFile } from 'fs/promises'\nimport { homedir } from 'os'\nimport { join } from 'path'\nimport { logger } from './logger.js'\n\n/**\n * Entry in a Claude Code JSONL transcript file\n */\nexport interface TranscriptEntry {\n\ttype: 'user' | 'assistant' | 'system' | 'file-history-snapshot' | 'queue-operation'\n\tsessionId?: string\n\tmessage?: { role: string; content: string | Array<{ type: string; text?: string }> }\n\tisCompactSummary?: boolean\n\tisVisibleInTranscriptOnly?: boolean\n\tsubtype?: string // 'compact_boundary' for compaction markers\n\tcontent?: string\n\ttimestamp?: string\n\tuuid?: string\n\tparentUuid?: string\n}\n\n/**\n * Get the Claude projects directory path encoding for a worktree path\n * Encoding: /Users/adam/Projects/foo_bar -> -Users-adam-Projects-foo-bar\n *\n * Claude Code encodes paths by replacing both '/' and '_' with '-'\n *\n * @param worktreePath - Absolute path to the worktree\n * @returns Encoded directory name for Claude projects\n */\nexport function getClaudeProjectPath(worktreePath: string): string {\n\t// Replace all '/' and '_' with '-' (matching Claude Code's encoding)\n\treturn worktreePath.replace(/[/_]/g, '-')\n}\n\n/**\n * Get the full path to the Claude projects directory\n * @returns Path to ~/.claude/projects/\n */\nexport function getClaudeProjectsDir(): string {\n\treturn join(homedir(), '.claude', 'projects')\n}\n\n/**\n * Find the session transcript file for a given worktree and session ID\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @returns Full path to the transcript file, or null if not found\n */\nexport function findSessionTranscript(worktreePath: string, sessionId: string): string | null {\n\tconst projectsDir = getClaudeProjectsDir()\n\tconst projectDirName = getClaudeProjectPath(worktreePath)\n\tconst transcriptPath = join(projectsDir, projectDirName, `${sessionId}.jsonl`)\n\treturn transcriptPath\n}\n\n/**\n * Extract the content from a compact summary message\n * Handles both string content and array content formats\n */\nfunction extractMessageContent(message: TranscriptEntry['message']): string | null {\n\tif (!message) return null\n\n\tif (typeof message.content === 'string') {\n\t\treturn message.content\n\t}\n\n\tif (Array.isArray(message.content)) {\n\t\t// Concatenate all text elements\n\t\treturn message.content\n\t\t\t.filter((item) => item.type === 'text' && item.text)\n\t\t\t.map((item) => item.text)\n\t\t\t.join('\\n')\n\t}\n\n\treturn null\n}\n\n/**\n * Extract compact summaries from a session transcript file\n *\n * Returns empty array if file doesn't exist or no summaries found.\n * Each compact summary contains structured history of pre-compaction conversation.\n *\n * @param transcriptPath - Full path to the transcript JSONL file\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Array of compact summary content strings, newest first\n */\nexport async function extractCompactSummaries(\n\ttranscriptPath: string,\n\tmaxSummaries = 3\n): Promise<string[]> {\n\ttry {\n\t\tconst content = await readFile(transcriptPath, 'utf-8')\n\t\tconst lines = content.split('\\n').filter((line) => line.trim())\n\n\t\tconst summaries: string[] = []\n\n\t\tfor (const line of lines) {\n\t\t\ttry {\n\t\t\t\tconst entry = JSON.parse(line) as TranscriptEntry\n\n\t\t\t\t// Look for compact summary entries\n\t\t\t\tif (entry.isCompactSummary === true && entry.message) {\n\t\t\t\t\tconst summaryContent = extractMessageContent(entry.message)\n\t\t\t\t\tif (summaryContent) {\n\t\t\t\t\t\tsummaries.push(summaryContent)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// Skip malformed JSON lines\n\t\t\t\tlogger.debug('Skipping malformed JSONL line in transcript')\n\t\t\t}\n\t\t}\n\n\t\t// Return most recent summaries (they appear in order in the file)\n\t\t// Limit to maxSummaries\n\t\treturn summaries.slice(-maxSummaries)\n\t} catch (error) {\n\t\t// File not found or permission error - return empty array (graceful degradation)\n\t\tif (error instanceof Error && 'code' in error && error.code === 'ENOENT') {\n\t\t\tlogger.debug('Transcript file not found:', transcriptPath)\n\t\t} else {\n\t\t\tlogger.debug('Error reading transcript file:', error)\n\t\t}\n\t\treturn []\n\t}\n}\n\n/**\n * Read session transcript and extract compact summaries for summary generation\n *\n * This is the main entry point for SessionSummaryService to get pre-compaction\n * conversation context. It gracefully handles all error cases.\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Formatted string of compact summaries, or null if none found\n */\nexport async function readSessionContext(\n\tworktreePath: string,\n\tsessionId: string,\n\tmaxSummaries = 3\n): Promise<string | null> {\n\tconst transcriptPath = findSessionTranscript(worktreePath, sessionId)\n\tif (!transcriptPath) {\n\t\treturn null\n\t}\n\n\tlogger.debug(`Checking transcript at: ${transcriptPath}`)\n\n\tconst summaries = await extractCompactSummaries(transcriptPath, maxSummaries)\n\n\tif (summaries.length === 0) {\n\t\treturn null\n\t}\n\n\t// Format summaries with separators\n\t// Newest summaries are at the end, so we reverse to show newest first\n\tconst formattedSummaries = summaries\n\t\t.reverse()\n\t\t.map((summary, index) => {\n\t\t\tconst header =\n\t\t\t\tsummaries.length > 1\n\t\t\t\t\t? `### Compact Summary ${index + 1} of ${summaries.length}\\n\\n`\n\t\t\t\t\t: ''\n\t\t\treturn `${header}${summary}`\n\t\t})\n\t\t.join('\\n\\n---\\n\\n')\n\n\treturn formattedSummaries\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,QAAQ;;;ACJf,SAAS,gBAAgB;AACzB,SAAS,eAAe;AACxB,SAAS,YAAY;AA4Bd,SAAS,qBAAqB,cAA8B;AAElE,SAAO,aAAa,QAAQ,SAAS,GAAG;AACzC;AAMO,SAAS,uBAA+B;AAC9C,SAAO,KAAK,QAAQ,GAAG,WAAW,UAAU;AAC7C;AASO,SAAS,sBAAsB,cAAsB,WAAkC;AAC7F,QAAM,cAAc,qBAAqB;AACzC,QAAM,iBAAiB,qBAAqB,YAAY;AACxD,QAAM,iBAAiB,KAAK,aAAa,gBAAgB,GAAG,SAAS,QAAQ;AAC7E,SAAO;AACR;AAMA,SAAS,sBAAsB,SAAoD;AAClF,MAAI,CAAC,QAAS,QAAO;AAErB,MAAI,OAAO,QAAQ,YAAY,UAAU;AACxC,WAAO,QAAQ;AAAA,EAChB;AAEA,MAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAEnC,WAAO,QAAQ,QACb,OAAO,CAAC,SAAS,KAAK,SAAS,UAAU,KAAK,IAAI,EAClD,IAAI,CAAC,SAAS,KAAK,IAAI,EACvB,KAAK,IAAI;AAAA,EACZ;AAEA,SAAO;AACR;AAYA,eAAsB,wBACrB,gBACA,eAAe,GACK;AACpB,MAAI;AACH,UAAM,UAAU,MAAM,SAAS,gBAAgB,OAAO;AACtD,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC;AAE9D,UAAM,YAAsB,CAAC;AAE7B,eAAW,QAAQ,OAAO;AACzB,UAAI;AACH,cAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,YAAI,MAAM,qBAAqB,QAAQ,MAAM,SAAS;AACrD,gBAAM,iBAAiB,sBAAsB,MAAM,OAAO;AAC1D,cAAI,gBAAgB;AACnB,sBAAU,KAAK,cAAc;AAAA,UAC9B;AAAA,QACD;AAAA,MACD,QAAQ;AAEP,eAAO,MAAM,6CAA6C;AAAA,MAC3D;AAAA,IACD;AAIA,WAAO,UAAU,MAAM,CAAC,YAAY;AAAA,EACrC,SAAS,OAAO;AAEf,QAAI,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS,UAAU;AACzE,aAAO,MAAM,8BAA8B,cAAc;AAAA,IAC1D,OAAO;AACN,aAAO,MAAM,kCAAkC,KAAK;AAAA,IACrD;AACA,WAAO,CAAC;AAAA,EACT;AACD;AAaA,eAAsB,mBACrB,cACA,WACA,eAAe,GACU;AACzB,QAAM,iBAAiB,sBAAsB,cAAc,SAAS;AACpE,MAAI,CAAC,gBAAgB;AACpB,WAAO;AAAA,EACR;AAEA,SAAO,MAAM,2BAA2B,cAAc,EAAE;AAExD,QAAM,YAAY,MAAM,wBAAwB,gBAAgB,YAAY;AAE5E,MAAI,UAAU,WAAW,GAAG;AAC3B,WAAO;AAAA,EACR;AAIA,QAAM,qBAAqB,UACzB,QAAQ,EACR,IAAI,CAAC,SAAS,UAAU;AACxB,UAAM,SACL,UAAU,SAAS,IAChB,uBAAuB,QAAQ,CAAC,OAAO,UAAU,MAAM;AAAA;AAAA,IACvD;AACJ,WAAO,GAAG,MAAM,GAAG,OAAO;AAAA,EAC3B,CAAC,EACA,KAAK,aAAa;AAEpB,SAAO;AACR;;;AD5JA,IAAM,aAAa,KAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,YAAY,QAAQ;AAW1E,SAAS,YAAY,UAA0B;AAC9C,MAAI,OAAO,SAAS,QAAQ,WAAW,EAAE;AACzC,SAAO,KAAK,QAAQ,UAAU,KAAK;AACnC,SAAO,KAAK,QAAQ,mBAAmB,GAAG;AAC1C,SAAO,GAAG,IAAI;AACf;AAMA,eAAe,cAAc,cAA8C;AAC1E,MAAI;AACH,UAAM,WAAW,KAAK,KAAK,YAAY,YAAY,YAAY,CAAC;AAChE,QAAI,MAAM,GAAG,WAAW,QAAQ,GAAG;AAClC,YAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,YAAM,QAAQ,KAAK,MAAM,OAAO;AAGhC,YAAM,UAAU,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtD,YAAM,gBAAgB,MAAM,eAAe,QAAQ,MAAM,eAAe;AACxE,YAAM,aAAa,MAAM,QAAQ,MAAM,OAAO,KAAK,MAAM,QAAQ,SAAS;AAC1E,YAAM,eAAe,MAAM,QAAQ,MAAM,SAAS,KAAK,MAAM,UAAU,SAAS;AAChF,YAAM,aAAa,WAAW,iBAAiB,cAAc;AAE7D,UAAI,YAAY;AAGf,cAAM,cAA2B;AAAA,UAChC;AAAA,UACA,MAAM,MAAM,QAAQ;AAAA,UACpB,YAAY,MAAM,cAAc;AAAA,UAChC,SAAS,MAAM,WAAW,CAAC;AAAA,UAC3B,WAAW,MAAM,aAAa,CAAC;AAAA,QAChC;AACA,eAAO,oBAAoB,WAAW;AAAA,MACvC;AAAA,IACD;AACA,WAAO;AAAA,EACR,QAAQ;AAEP,WAAO;AAAA,EACR;AACD;AAyBO,IAAM,wBAAN,MAA4B;AAAA,EAKlC,YACC,iBACA,iBACA,iBACC;AACD,SAAK,kBAAkB,mBAAmB,IAAI,sBAAsB;AACpE,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,uBAAuB,OAA2C;AACvE,QAAI;AAEH,UAAI,MAAM,aAAa,UAAU;AAChC,eAAO,MAAM,+DAA+D;AAC5E;AAAA,MACD;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,YAAM,aAAY,qCAAU,cAAa,+BAA+B,MAAM,YAAY;AAG1F,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,UAAI,CAAC,KAAK,sBAAsB,MAAM,UAAU,QAAQ,GAAG;AAC1D,eAAO,MAAM,6DAA6D,MAAM,QAAQ,WAAW;AACnG;AAAA,MACD;AAEA,aAAO,KAAK,+BAA+B;AAG3C,aAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,YAAM,mBAAmB,MAAM,mBAAmB,MAAM,cAAc,SAAS;AAC/E,UAAI,kBAAkB;AACrB,eAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,MAC1E,OAAO;AACN,eAAO,MAAM,kDAAkD;AAAA,MAChE;AAGA,YAAM,YAAY,MAAM,cAAc,MAAM,YAAY;AACxD,UAAI,WAAW;AACd,eAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,MAC5D,OAAO;AACN,eAAO,MAAM,qBAAqB;AAAA,MACnC;AAGA,YAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,QACtE,cAAc,OAAO,MAAM,WAAW;AAAA,QACtC,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,QACjB,mBAAmB,oBAAoB;AAAA,QACvC,YAAY,aAAa;AAAA,MAC1B,CAAC;AAED,aAAO,MAAM,8BAA8B,MAAM;AAIjD,YAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,YAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,QAChD,UAAU;AAAA,QACV,OAAO;AAAA,QACP;AAAA;AAAA,QACA,sBAAsB;AAAA;AAAA,MACvB,CAAC;AAED,UAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,eAAO,KAAK,kDAAkD;AAC9D;AAAA,MACD;AAEA,YAAM,UAAU,cAAc,KAAK;AAGnC,UAAI,QAAQ,SAAS,KAAK;AACzB,eAAO,KAAK,0CAA0C;AACtD;AAAA,MACD;AAGA,YAAM,KAAK,mBAAmB,MAAM,aAAa,SAAS,UAAU,MAAM,cAAc,MAAM,QAAQ;AAEtG,YAAM,oBAAoB,MAAM,WAAW,OAAO,MAAM,QAAQ,KAAK;AACrE,aAAO,QAAQ,6BAA6B,iBAAiB,EAAE;AAAA,IAChE,SAAS,OAAO;AAEf,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,aAAO,KAAK,uCAAuC,YAAY,EAAE;AACjE,aAAO,MAAM,6CAA6C,EAAE,MAAM,CAAC;AAAA,IACpE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,gBACL,cACA,YACA,UACA,aACgC;AAEhC,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,aAAY,qCAAU,cAAa,+BAA+B,YAAY;AAGpF,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AAErE,WAAO,KAAK,+BAA+B;AAG3C,WAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,UAAM,mBAAmB,MAAM,mBAAmB,cAAc,SAAS;AACzE,QAAI,kBAAkB;AACrB,aAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,IAC1E,OAAO;AACN,aAAO,MAAM,kDAAkD;AAAA,IAChE;AAGA,UAAM,YAAY,MAAM,cAAc,YAAY;AAClD,QAAI,WAAW;AACd,aAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,IAC5D,OAAO;AACN,aAAO,MAAM,qBAAqB;AAAA,IACnC;AAGA,UAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,MACtE,cAAc,gBAAgB,SAAY,OAAO,WAAW,IAAI;AAAA,MAChE,aAAa;AAAA,MACb,WAAW;AAAA,MACX,mBAAmB,oBAAoB;AAAA,MACvC,YAAY,aAAa;AAAA,IAC1B,CAAC;AAED,WAAO,MAAM,8BAA8B,MAAM;AAGjD,UAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,UAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,MAChD,UAAU;AAAA,MACV,OAAO;AAAA,MACP;AAAA,MACA,sBAAsB;AAAA;AAAA,IACvB,CAAC;AAED,QAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACnE;AAEA,UAAM,UAAU,cAAc,KAAK;AAGnC,QAAI,QAAQ,SAAS,KAAK;AACzB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IACzE;AAEA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,YACL,aACA,SACA,cACA,UACgB;AAChB,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,KAAK,mBAAmB,aAAa,SAAS,UAAU,gBAAgB,QAAQ,IAAI,GAAG,QAAQ;AACrG,UAAM,SAAS,WAAW,OAAO,QAAQ,KAAK;AAC9C,WAAO,QAAQ,6BAA6B,MAAM,EAAE;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,sBACC,UACA,UACU;AAjUZ;AAmUE,QAAI,aAAa,UAAU;AAC1B,aAAO;AAAA,IACR;AAGA,UAAM,iBACL,aAAa,WACV,cAAS,cAAT,mBAAoB,SACpB,cAAS,cAAT,mBAAoB;AAGxB,YAAO,iDAAgB,oBAAmB;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,iBAAiB,SAAiB,cAAuC;AAC9E,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,WAAO,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,6BACL,SACA,UACA,cACkB;AAClB,UAAM,qBAAqB,SAAS,eAAe;AACnD,WAAO,MAAM,oCAAoC,SAAS,WAAW,EAAE;AACvE,WAAO,MAAM,uCAAuC,kBAAkB,EAAE;AAExE,QAAI,wBAAwB;AAC5B,QAAI,uBAAuB,MAAM;AAChC,8BAAwB;AACxB,aAAO,MAAM,wBAAwB;AAAA,IACtC,WAAW,uBAAuB,gBAAgB;AAEjD,8BAAwB,MAAM,mBAAmB,YAAY;AAC7D,aAAO,MAAM,iDAAiD,qBAAqB,EAAE;AAAA,IACtF,OAAO;AACN,aAAO,MAAM,kBAAkB;AAAA,IAChC;AAGA,WAAO,MAAM,4BAA4B,qBAAqB,EAAE;AAChE,QAAI,uBAAuB;AAC1B,aAAO,MAAM,wCAAwC;AACrD,aAAO,GAAG,OAAO;AAAA;AAAA;AAAA;AAAA,IAClB;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,mBACb,aACA,SACA,UACA,cACA,UACgB;AAnZlB;AAuZE,UAAM,eAAe,aAAa,SAC/B,aACC,cAAS,oBAAT,mBAA0B,aAAY;AAC1C,UAAM,WAAW,+BAA+B,OAAO,YAAY;AAGnE,UAAM,eAAe,MAAM,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAG5F,UAAM,eAAe,YAAY;AACjC,UAAM,aAAa,aAAa,SAAY,OAAO;AAGnD,UAAM,SAAS,cAAc;AAAA,MAC5B,QAAQ,OAAO,YAAY;AAAA,MAC3B,MAAM;AAAA,MACN,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AACD;","names":[]}
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
EnvironmentManager,
|
|
6
6
|
LoomManager,
|
|
7
7
|
ResourceCleanup
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-ETY2SBW5.js";
|
|
9
9
|
import {
|
|
10
10
|
ProcessManager
|
|
11
11
|
} from "./chunk-WZYBHD7P.js";
|
|
@@ -484,4 +484,4 @@ var CleanupCommand = class {
|
|
|
484
484
|
export {
|
|
485
485
|
CleanupCommand
|
|
486
486
|
};
|
|
487
|
-
//# sourceMappingURL=cleanup-
|
|
487
|
+
//# sourceMappingURL=cleanup-IO4KV2DL.js.map
|