@glasstrace/sdk 0.15.1 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/README.md +148 -1
  2. package/dist/adapters/drizzle.js +2 -5
  3. package/dist/adapters/drizzle.js.map +1 -1
  4. package/dist/{chunk-PD2SKFQQ.js → chunk-55FBXXER.js} +4 -8
  5. package/dist/{chunk-PD2SKFQQ.js.map → chunk-55FBXXER.js.map} +1 -1
  6. package/dist/{chunk-ZNOD6FC7.js → chunk-CTJI2YKA.js} +8 -15
  7. package/dist/{chunk-ZNOD6FC7.js.map → chunk-CTJI2YKA.js.map} +1 -1
  8. package/dist/{chunk-WK7MPK2T.js → chunk-DQ25VOKK.js} +1 -89
  9. package/dist/chunk-DQ25VOKK.js.map +1 -0
  10. package/dist/{chunk-BL3YDC6V.js → chunk-DXRZKKSO.js} +1 -6
  11. package/dist/{chunk-BL3YDC6V.js.map → chunk-DXRZKKSO.js.map} +1 -1
  12. package/dist/{chunk-2LDBR3F3.js → chunk-E33Y7BQH.js} +36 -74
  13. package/dist/chunk-E33Y7BQH.js.map +1 -0
  14. package/dist/{chunk-YMEXDDTA.js → chunk-GSGX76Q5.js} +4 -99
  15. package/dist/chunk-GSGX76Q5.js.map +1 -0
  16. package/dist/{chunk-ECEN724Y.js → chunk-J5BW7V2D.js} +4 -8
  17. package/dist/{chunk-ECEN724Y.js.map → chunk-J5BW7V2D.js.map} +1 -1
  18. package/dist/{chunk-BGZ7J74D.js → chunk-NSBPE2FW.js} +2 -16
  19. package/dist/{chunk-A2AZL6MZ.js → chunk-O63DJKIJ.js} +169 -18
  20. package/dist/chunk-O63DJKIJ.js.map +1 -0
  21. package/dist/chunk-UUUKI65I.js +851 -0
  22. package/dist/chunk-UUUKI65I.js.map +1 -0
  23. package/dist/chunk-VUZCLMIX.js +57 -0
  24. package/dist/chunk-VUZCLMIX.js.map +1 -0
  25. package/dist/{chunk-OSXIUKD5.js → chunk-WZXVS2EO.js} +1 -6
  26. package/dist/{chunk-OSXIUKD5.js.map → chunk-WZXVS2EO.js.map} +1 -1
  27. package/dist/{chunk-ROFOJQWN.js → chunk-XNDHQN4S.js} +7 -11
  28. package/dist/{chunk-ROFOJQWN.js.map → chunk-XNDHQN4S.js.map} +1 -1
  29. package/dist/cli/init.cjs +673 -161
  30. package/dist/cli/init.cjs.map +1 -1
  31. package/dist/cli/init.d.cts +54 -1
  32. package/dist/cli/init.d.ts +54 -1
  33. package/dist/cli/init.js +146 -37
  34. package/dist/cli/init.js.map +1 -1
  35. package/dist/cli/mcp-add.cjs +16 -16
  36. package/dist/cli/mcp-add.cjs.map +1 -1
  37. package/dist/cli/mcp-add.js +10 -13
  38. package/dist/cli/mcp-add.js.map +1 -1
  39. package/dist/cli/status.cjs +2 -2
  40. package/dist/cli/status.js +4 -7
  41. package/dist/cli/status.js.map +1 -1
  42. package/dist/cli/uninit.cjs +56 -59
  43. package/dist/cli/uninit.cjs.map +1 -1
  44. package/dist/cli/uninit.js +4 -4
  45. package/dist/cli/validate.cjs +2 -2
  46. package/dist/cli/validate.js +3 -6
  47. package/dist/cli/validate.js.map +1 -1
  48. package/dist/{esm-MDK7CZID.js → esm-KBPHCVB4.js} +3 -3
  49. package/dist/{getMachineId-bsd-4NIRBWME.js → getMachineId-bsd-345PYXFX.js} +4 -7
  50. package/dist/{getMachineId-bsd-4NIRBWME.js.map → getMachineId-bsd-345PYXFX.js.map} +1 -1
  51. package/dist/{getMachineId-darwin-2XNOCCJQ.js → getMachineId-darwin-5L2D25AD.js} +4 -7
  52. package/dist/{getMachineId-darwin-2XNOCCJQ.js.map → getMachineId-darwin-5L2D25AD.js.map} +1 -1
  53. package/dist/{getMachineId-linux-V6YSQEY7.js → getMachineId-linux-KJR4P5HN.js} +3 -6
  54. package/dist/{getMachineId-linux-V6YSQEY7.js.map → getMachineId-linux-KJR4P5HN.js.map} +1 -1
  55. package/dist/{getMachineId-unsupported-4FKBJNVO.js → getMachineId-unsupported-NDNXDYDY.js} +3 -6
  56. package/dist/{getMachineId-unsupported-4FKBJNVO.js.map → getMachineId-unsupported-NDNXDYDY.js.map} +1 -1
  57. package/dist/{getMachineId-win-WLRZBKVG.js → getMachineId-win-T7PJNJXG.js} +4 -7
  58. package/dist/{getMachineId-win-WLRZBKVG.js.map → getMachineId-win-T7PJNJXG.js.map} +1 -1
  59. package/dist/index.cjs +565 -463
  60. package/dist/index.cjs.map +1 -1
  61. package/dist/index.d.cts +93 -4
  62. package/dist/index.d.ts +93 -4
  63. package/dist/index.js +286 -702
  64. package/dist/index.js.map +1 -1
  65. package/dist/{monorepo-YILKGQXQ.js → monorepo-N5Z63XP7.js} +4 -4
  66. package/dist/{source-map-uploader-3GWUQDTS.js → source-map-uploader-26QPRSCG.js} +5 -4
  67. package/dist/source-map-uploader-26QPRSCG.js.map +1 -0
  68. package/package.json +1 -1
  69. package/dist/chunk-2LDBR3F3.js.map +0 -1
  70. package/dist/chunk-A2AZL6MZ.js.map +0 -1
  71. package/dist/chunk-BGZ7J74D.js.map +0 -1
  72. package/dist/chunk-UPS5BGER.js +0 -182
  73. package/dist/chunk-UPS5BGER.js.map +0 -1
  74. package/dist/chunk-WK7MPK2T.js.map +0 -1
  75. package/dist/chunk-YMEXDDTA.js.map +0 -1
  76. /package/dist/{esm-MDK7CZID.js.map → chunk-NSBPE2FW.js.map} +0 -0
  77. /package/dist/{monorepo-YILKGQXQ.js.map → esm-KBPHCVB4.js.map} +0 -0
  78. /package/dist/{source-map-uploader-3GWUQDTS.js.map → monorepo-N5Z63XP7.js.map} +0 -0
package/README.md CHANGED
@@ -6,7 +6,154 @@ them to coding agents through an MCP server and live dashboard.
6
6
 
7
7
  > **Status: Pre-release** -- not yet published to npm.
8
8
 
9
- See the [monorepo README](../../README.md) for the planned API.
9
+ See the [monorepo README](../../README.md) for the full API overview,
10
+ including the [Coexistence with Other OTel Tools](../../README.md#coexistence-with-other-otel-tools)
11
+ section which documents automatic span-processor attachment onto a
12
+ pre-registered OTel provider (Sentry, Datadog, Next.js 16 production)
13
+ and manual integration via `createGlasstraceSpanProcessor()`.
14
+
15
+ ## Initialize
16
+
17
+ ```bash
18
+ npx glasstrace init
19
+ ```
20
+
21
+ The `init` command scaffolds the files Glasstrace needs and merges into
22
+ your existing setup rather than overwriting.
23
+
24
+ ### Instrumentation file precedence
25
+
26
+ Init picks the first matching location:
27
+
28
+ 1. An existing `src/instrumentation.{ts,js,mjs}` — the user has already
29
+ committed to this location, so merge there.
30
+ 2. An existing `instrumentation.{ts,js,mjs}` at the project root — same
31
+ rationale.
32
+ 3. A new `src/instrumentation.ts` when the project contains a `src/`
33
+ directory at its root (the common Next.js convention).
34
+ 4. A new `instrumentation.ts` at the project root.
35
+
36
+ Next.js only loads instrumentation from one of the two locations —
37
+ scaffolding to the wrong one silently prevents the SDK from starting,
38
+ so the layout is resolved automatically.
39
+
40
+ ### Merge into existing instrumentation
41
+
42
+ When an instrumentation file already exists, init merges instead of
43
+ overwriting:
44
+
45
+ - If the file exports a `register()` function, init inserts
46
+ `registerGlasstrace()` as the first statement of the existing body
47
+ and imports `registerGlasstrace` at the top of the file.
48
+ - If the file has no `register()` function (for example, it only
49
+ contains a top-level Sentry import), init appends a new
50
+ `export async function register()` that calls `registerGlasstrace()`.
51
+ - If `registerGlasstrace()` is already present, init is a no-op.
52
+
53
+ Before modifying an existing file, init prompts for confirmation. Pass
54
+ `--force` (or `--yes`) to skip the prompt in automated environments.
55
+
56
+ ### Both-layout conflict
57
+
58
+ If both `instrumentation.ts` (root) and `src/instrumentation.ts` exist,
59
+ init exits non-zero without modifying either file. Next.js's loader
60
+ behavior is undefined when both are present — it loads one and ignores
61
+ the other. Merge your code into `src/instrumentation.ts`, delete the
62
+ root file, then re-run init.
63
+
64
+ ## Init & Verification
65
+
66
+ ```bash
67
+ npx glasstrace init
68
+ ```
69
+
70
+ `glasstrace init` scaffolds instrumentation, configures MCP, and
71
+ verifies server-side registration of the anonymous key before
72
+ reporting success. The verification step uses `node:https` directly —
73
+ bypassing any `fetch` patching introduced by Next.js 16 — so a silent
74
+ init-hang cannot leave your installation in a broken state.
75
+
76
+ | Exit code | Meaning |
77
+ |-----------|---------|
78
+ | `0` | Scaffolding succeeded AND the server confirmed the anon key. |
79
+ | `1` | Scaffolding failed. No verification attempted. |
80
+ | `2` | Scaffolding succeeded but server verification failed. Safe to re-run. |
81
+
82
+ On a non-zero verification exit, the error message distinguishes three
83
+ classes so you can act on them:
84
+
85
+ - `fetch failed: <reason>` — transport error (DNS, TCP, TLS, timeout).
86
+ - `server rejected the key (HTTP <status>)` — 4xx/5xx status.
87
+ - `server returned malformed response` — 2xx with unparseable body.
88
+
89
+ Transport errors are retried twice (500 ms + 1500 ms backoff, 20-second
90
+ total cap). HTTP 4xx/5xx and malformed responses are surfaced
91
+ immediately. Set `GLASSTRACE_SKIP_INIT_VERIFY=1` to skip verification
92
+ for offline installs.
93
+
94
+ ## Server Action detection (Next.js)
95
+
96
+ Next.js does not emit a dedicated OTel span for Server Actions. The SDK
97
+ applies a post-hoc heuristic at enrichment time: any `POST` to a page
98
+ route (not `/api/*`, not `/_next/*`) is almost always a Server Action
99
+ invocation in idiomatic App Router code. When the heuristic matches,
100
+ the SDK adds the attribute:
101
+
102
+ ```
103
+ glasstrace.next.action.detected = true
104
+ ```
105
+
106
+ The attribute is labeled `detected` rather than `confirmed` because rare
107
+ false-positives are possible (legacy form POSTs, hand-rolled page-route
108
+ POST handlers). The heuristic cannot identify *which* Server Action
109
+ ran — that requires the `Next-Action` request header, which the
110
+ Glasstrace browser extension captures.
111
+
112
+ ### Correlating a trace with browser extension data
113
+
114
+ To correlate a server-captured trace with extension-side action data,
115
+ call `captureCorrelationId` from a Next.js `middleware.ts` (or any
116
+ custom server request hook that runs inside the request's OTel context):
117
+
118
+ ```ts
119
+ // middleware.ts
120
+ import { captureCorrelationId } from "@glasstrace/sdk";
121
+ import { NextResponse } from "next/server";
122
+
123
+ export function middleware(req: Request) {
124
+ captureCorrelationId(req);
125
+ return NextResponse.next();
126
+ }
127
+ ```
128
+
129
+ `captureCorrelationId` reads the `x-gt-cid` header from an incoming
130
+ request and sets it as `glasstrace.correlation.id` on the currently
131
+ active span. It accepts either a Fetch-API `Request` / `NextRequest`
132
+ or a Node `IncomingMessage`. The helper is defensive: no active span,
133
+ missing header, or malformed input are all silent no-ops — it never
134
+ throws from a request hook.
135
+
136
+ ### Installation nudge
137
+
138
+ When the heuristic fires and the span has no
139
+ `glasstrace.correlation.id` attribute (i.e. the extension was not
140
+ active for that request), the SDK writes a single stderr nudge per
141
+ process recommending the browser extension:
142
+
143
+ ```
144
+ [glasstrace] Detected a Next.js Server Action trace. Install the
145
+ Glasstrace browser extension to capture the Server Action identifier
146
+ for precise action-level debugging. https://glasstrace.dev/ext
147
+ ```
148
+
149
+ Silence the nudge by setting:
150
+
151
+ ```
152
+ GLASSTRACE_SUPPRESS_ACTION_NUDGE=1
153
+ ```
154
+
155
+ The nudge never fires in production (detected via `NODE_ENV` or
156
+ `VERCEL_ENV`) unless `GLASSTRACE_FORCE_ENABLE=true` is also set.
10
157
 
11
158
  ## License
12
159
 
@@ -1,13 +1,10 @@
1
1
  import {
2
2
  SpanKind,
3
3
  trace
4
- } from "../chunk-WK7MPK2T.js";
5
- import {
6
- init_esm_shims
7
- } from "../chunk-BGZ7J74D.js";
4
+ } from "../chunk-DQ25VOKK.js";
5
+ import "../chunk-NSBPE2FW.js";
8
6
 
9
7
  // src/adapters/drizzle.ts
10
- init_esm_shims();
11
8
  function extractOperation(query) {
12
9
  const trimmed = query.trimStart().toUpperCase();
13
10
  if (trimmed.startsWith("SELECT")) return "SELECT";
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/adapters/drizzle.ts"],"sourcesContent":["import { trace, SpanKind, type Tracer } from \"@opentelemetry/api\";\n\n/**\n * Options for the Glasstrace Drizzle logger.\n */\nexport interface GlasstraceDrizzleLoggerOptions {\n /** Whether to capture query parameters. Defaults to false (safe default). */\n captureParams?: boolean;\n}\n\n/**\n * Extracts the SQL operation (SELECT, INSERT, UPDATE, DELETE) from a query.\n * Returns 'unknown' if the operation cannot be determined.\n */\nfunction extractOperation(query: string): string {\n const trimmed = query.trimStart().toUpperCase();\n if (trimmed.startsWith(\"SELECT\")) return \"SELECT\";\n if (trimmed.startsWith(\"INSERT\")) return \"INSERT\";\n if (trimmed.startsWith(\"UPDATE\")) return \"UPDATE\";\n if (trimmed.startsWith(\"DELETE\")) return \"DELETE\";\n return \"unknown\";\n}\n\n/**\n * Extracts the table name from a SQL query using best-effort regex.\n * Returns undefined if the table cannot be determined.\n */\nfunction extractTable(query: string): string | undefined {\n // FROM table_name (SELECT, DELETE)\n const fromMatch = /\\bFROM\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (fromMatch) return fromMatch[1];\n\n // INSERT INTO table_name\n const insertMatch = /\\bINSERT\\s+INTO\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (insertMatch) return insertMatch[1];\n\n // UPDATE table_name\n const updateMatch = /\\bUPDATE\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (updateMatch) return updateMatch[1];\n\n return undefined;\n}\n\n/**\n * Implements Drizzle's Logger interface to create OTel spans for Drizzle queries.\n *\n * Exported via `@glasstrace/sdk/drizzle` subpath to avoid bundling Drizzle\n * for Prisma-only users.\n *\n * When OTel is not initialized, tracer.startSpan() returns a no-op span\n * and the logger still executes without errors.\n */\nexport class GlasstraceDrizzleLogger {\n private readonly tracer: Tracer;\n private readonly captureParams: boolean;\n\n constructor(options?: GlasstraceDrizzleLoggerOptions) {\n this.tracer = trace.getTracer(\"glasstrace-drizzle\");\n this.captureParams = options?.captureParams ?? false;\n }\n\n /**\n * Called by Drizzle ORM for each query execution.\n * Creates an OTel span with query metadata.\n */\n logQuery(query: string, params: unknown[]): void {\n const operation = extractOperation(query);\n const spanName =\n operation === \"unknown\" ? \"drizzle.query\" : `drizzle.${operation}`;\n\n const span = this.tracer.startSpan(spanName, {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"drizzle\",\n \"db.statement\": query,\n \"db.operation\": operation,\n \"glasstrace.orm.provider\": \"drizzle\",\n },\n });\n\n // Table extraction\n const table = extractTable(query);\n if (table !== undefined) {\n span.setAttribute(\"db.sql.table\", table);\n }\n\n // Param handling\n if (this.captureParams) {\n try {\n span.setAttribute(\"db.sql.params\", JSON.stringify(params));\n } catch {\n span.setAttribute(\"db.sql.params\", \"[serialization_error]\");\n }\n } else {\n span.setAttribute(\"db.sql.params\", \"[REDACTED]\");\n }\n\n span.end();\n }\n}\n"],"mappings":";;;;;;;;;AAAA;AAcA,SAAS,iBAAiB,OAAuB;AAC/C,QAAM,UAAU,MAAM,UAAU,EAAE,YAAY;AAC9C,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,SAAO;AACT;AAMA,SAAS,aAAa,OAAmC;AAEvD,QAAM,YAAY,8BAA8B,KAAK,KAAK;AAC1D,MAAI,UAAW,QAAO,UAAU,CAAC;AAGjC,QAAM,cAAc,uCAAuC,KAAK,KAAK;AACrE,MAAI,YAAa,QAAO,YAAY,CAAC;AAGrC,QAAM,cAAc,gCAAgC,KAAK,KAAK;AAC9D,MAAI,YAAa,QAAO,YAAY,CAAC;AAErC,SAAO;AACT;AAWO,IAAM,0BAAN,MAA8B;AAAA,EAClB;AAAA,EACA;AAAA,EAEjB,YAAY,SAA0C;AACpD,SAAK,SAAS,MAAM,UAAU,oBAAoB;AAClD,SAAK,gBAAgB,SAAS,iBAAiB;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,OAAe,QAAyB;AAC/C,UAAM,YAAY,iBAAiB,KAAK;AACxC,UAAM,WACJ,cAAc,YAAY,kBAAkB,WAAW,SAAS;AAElE,UAAM,OAAO,KAAK,OAAO,UAAU,UAAU;AAAA,MAC3C,MAAM,SAAS;AAAA,MACf,YAAY;AAAA,QACV,aAAa;AAAA,QACb,gBAAgB;AAAA,QAChB,gBAAgB;AAAA,QAChB,2BAA2B;AAAA,MAC7B;AAAA,IACF,CAAC;AAGD,UAAM,QAAQ,aAAa,KAAK;AAChC,QAAI,UAAU,QAAW;AACvB,WAAK,aAAa,gBAAgB,KAAK;AAAA,IACzC;AAGA,QAAI,KAAK,eAAe;AACtB,UAAI;AACF,aAAK,aAAa,iBAAiB,KAAK,UAAU,MAAM,CAAC;AAAA,MAC3D,QAAQ;AACN,aAAK,aAAa,iBAAiB,uBAAuB;AAAA,MAC5D;AAAA,IACF,OAAO;AACL,WAAK,aAAa,iBAAiB,YAAY;AAAA,IACjD;AAEA,SAAK,IAAI;AAAA,EACX;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/adapters/drizzle.ts"],"sourcesContent":["import { trace, SpanKind, type Tracer } from \"@opentelemetry/api\";\n\n/**\n * Options for the Glasstrace Drizzle logger.\n */\nexport interface GlasstraceDrizzleLoggerOptions {\n /** Whether to capture query parameters. Defaults to false (safe default). */\n captureParams?: boolean;\n}\n\n/**\n * Extracts the SQL operation (SELECT, INSERT, UPDATE, DELETE) from a query.\n * Returns 'unknown' if the operation cannot be determined.\n */\nfunction extractOperation(query: string): string {\n const trimmed = query.trimStart().toUpperCase();\n if (trimmed.startsWith(\"SELECT\")) return \"SELECT\";\n if (trimmed.startsWith(\"INSERT\")) return \"INSERT\";\n if (trimmed.startsWith(\"UPDATE\")) return \"UPDATE\";\n if (trimmed.startsWith(\"DELETE\")) return \"DELETE\";\n return \"unknown\";\n}\n\n/**\n * Extracts the table name from a SQL query using best-effort regex.\n * Returns undefined if the table cannot be determined.\n */\nfunction extractTable(query: string): string | undefined {\n // FROM table_name (SELECT, DELETE)\n const fromMatch = /\\bFROM\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (fromMatch) return fromMatch[1];\n\n // INSERT INTO table_name\n const insertMatch = /\\bINSERT\\s+INTO\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (insertMatch) return insertMatch[1];\n\n // UPDATE table_name\n const updateMatch = /\\bUPDATE\\s+[\"'`]?(\\w+)[\"'`]?/i.exec(query);\n if (updateMatch) return updateMatch[1];\n\n return undefined;\n}\n\n/**\n * Implements Drizzle's Logger interface to create OTel spans for Drizzle queries.\n *\n * Exported via `@glasstrace/sdk/drizzle` subpath to avoid bundling Drizzle\n * for Prisma-only users.\n *\n * When OTel is not initialized, tracer.startSpan() returns a no-op span\n * and the logger still executes without errors.\n */\nexport class GlasstraceDrizzleLogger {\n private readonly tracer: Tracer;\n private readonly captureParams: boolean;\n\n constructor(options?: GlasstraceDrizzleLoggerOptions) {\n this.tracer = trace.getTracer(\"glasstrace-drizzle\");\n this.captureParams = options?.captureParams ?? false;\n }\n\n /**\n * Called by Drizzle ORM for each query execution.\n * Creates an OTel span with query metadata.\n */\n logQuery(query: string, params: unknown[]): void {\n const operation = extractOperation(query);\n const spanName =\n operation === \"unknown\" ? \"drizzle.query\" : `drizzle.${operation}`;\n\n const span = this.tracer.startSpan(spanName, {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"drizzle\",\n \"db.statement\": query,\n \"db.operation\": operation,\n \"glasstrace.orm.provider\": \"drizzle\",\n },\n });\n\n // Table extraction\n const table = extractTable(query);\n if (table !== undefined) {\n span.setAttribute(\"db.sql.table\", table);\n }\n\n // Param handling\n if (this.captureParams) {\n try {\n span.setAttribute(\"db.sql.params\", JSON.stringify(params));\n } catch {\n span.setAttribute(\"db.sql.params\", \"[serialization_error]\");\n }\n } else {\n span.setAttribute(\"db.sql.params\", \"[REDACTED]\");\n }\n\n span.end();\n }\n}\n"],"mappings":";;;;;;;AAcA,SAAS,iBAAiB,OAAuB;AAC/C,QAAM,UAAU,MAAM,UAAU,EAAE,YAAY;AAC9C,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,MAAI,QAAQ,WAAW,QAAQ,EAAG,QAAO;AACzC,SAAO;AACT;AAMA,SAAS,aAAa,OAAmC;AAEvD,QAAM,YAAY,8BAA8B,KAAK,KAAK;AAC1D,MAAI,UAAW,QAAO,UAAU,CAAC;AAGjC,QAAM,cAAc,uCAAuC,KAAK,KAAK;AACrE,MAAI,YAAa,QAAO,YAAY,CAAC;AAGrC,QAAM,cAAc,gCAAgC,KAAK,KAAK;AAC9D,MAAI,YAAa,QAAO,YAAY,CAAC;AAErC,SAAO;AACT;AAWO,IAAM,0BAAN,MAA8B;AAAA,EAClB;AAAA,EACA;AAAA,EAEjB,YAAY,SAA0C;AACpD,SAAK,SAAS,MAAM,UAAU,oBAAoB;AAClD,SAAK,gBAAgB,SAAS,iBAAiB;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,OAAe,QAAyB;AAC/C,UAAM,YAAY,iBAAiB,KAAK;AACxC,UAAM,WACJ,cAAc,YAAY,kBAAkB,WAAW,SAAS;AAElE,UAAM,OAAO,KAAK,OAAO,UAAU,UAAU;AAAA,MAC3C,MAAM,SAAS;AAAA,MACf,YAAY;AAAA,QACV,aAAa;AAAA,QACb,gBAAgB;AAAA,QAChB,gBAAgB;AAAA,QAChB,2BAA2B;AAAA,MAC7B;AAAA,IACF,CAAC;AAGD,UAAM,QAAQ,aAAa,KAAK;AAChC,QAAI,UAAU,QAAW;AACvB,WAAK,aAAa,gBAAgB,KAAK;AAAA,IACzC;AAGA,QAAI,KAAK,eAAe;AACtB,UAAI;AACF,aAAK,aAAa,iBAAiB,KAAK,UAAU,MAAM,CAAC;AAAA,MAC3D,QAAQ;AACN,aAAK,aAAa,iBAAiB,uBAAuB;AAAA,MAC5D;AAAA,IACF,OAAO;AACL,WAAK,aAAa,iBAAiB,YAAY;AAAA,IACjD;AAEA,SAAK,IAAI;AAAA,EACX;AACF;","names":[]}
@@ -1,14 +1,10 @@
1
1
  import {
2
2
  NEXT_CONFIG_NAMES
3
- } from "./chunk-BL3YDC6V.js";
4
- import {
5
- init_esm_shims
6
- } from "./chunk-BGZ7J74D.js";
3
+ } from "./chunk-DXRZKKSO.js";
7
4
 
8
5
  // src/cli/monorepo.ts
9
- init_esm_shims();
10
- import * as fs from "fs";
11
- import * as path from "path";
6
+ import * as fs from "node:fs";
7
+ import * as path from "node:path";
12
8
  function resolveProjectRoot(cwd) {
13
9
  if (hasNextConfig(cwd)) {
14
10
  return { projectRoot: cwd, isMonorepo: false };
@@ -243,4 +239,4 @@ export {
243
239
  findNextJsApps,
244
240
  parsePnpmWorkspaceYaml
245
241
  };
246
- //# sourceMappingURL=chunk-PD2SKFQQ.js.map
242
+ //# sourceMappingURL=chunk-55FBXXER.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/cli/monorepo.ts"],"sourcesContent":["import * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport { NEXT_CONFIG_NAMES } from \"./constants.js\";\n\n/** Result of classifying the project root directory. */\nexport interface ProjectClassification {\n /** The directory to scaffold into (may differ from cwd for monorepos). */\n projectRoot: string;\n /** Whether this was auto-resolved from a monorepo root. */\n isMonorepo: boolean;\n /** If monorepo, the relative path from cwd to the resolved app. */\n appRelativePath?: string;\n}\n\n/**\n * Classifies the current directory and resolves the target project root.\n *\n * Classification logic:\n * 1. If the directory contains a Next.js config file, it is a Next.js app\n * directory. Returns it directly.\n * 1b. If no config file exists but package.json lists \"next\" as a dependency,\n * it is still a Next.js app (config files are optional since Next.js 12).\n * 2. If the directory contains monorepo markers (pnpm-workspace.yaml,\n * turbo.json, lerna.json, or a workspaces field in package.json),\n * scans workspace packages for Next.js apps.\n * 3. Otherwise, fails with a user-facing error.\n *\n * @param cwd - The current working directory\n * @returns The resolved project classification\n * @throws Error with a user-facing message if the location is invalid\n */\nexport function resolveProjectRoot(cwd: string): ProjectClassification {\n // Step 1: Check if cwd is a Next.js app directory (config file)\n if (hasNextConfig(cwd)) {\n return { projectRoot: cwd, isMonorepo: false };\n }\n\n // Step 1b: Check if cwd has \"next\" as a dependency (config is optional)\n if (hasNextDependency(cwd)) {\n return { projectRoot: cwd, isMonorepo: false };\n }\n\n // Step 2: Check for monorepo markers\n if (isMonorepoRoot(cwd)) {\n // findNextJsApps throws if no workspace globs are found (e.g., turbo.json\n // exists but no pnpm-workspace.yaml or workspaces in package.json)\n const apps = findNextJsApps(cwd);\n\n if (apps.length === 0) {\n throw new Error(\n \"This is a monorepo but no Next.js apps were found in workspace packages.\",\n );\n }\n\n if (apps.length === 1) {\n const appDir = apps[0];\n const relativePath = path.relative(cwd, appDir);\n return {\n projectRoot: appDir,\n isMonorepo: true,\n appRelativePath: relativePath,\n };\n }\n\n // Multiple apps found — cannot auto-resolve\n const appList = apps\n .map((app) => ` - ${path.relative(cwd, app)}`)\n .join(\"\\n\");\n throw new Error(\n `Found multiple Next.js apps:\\n${appList}\\nRun init from the specific app directory you want to instrument.`,\n );\n }\n\n // Step 3: Neither Next.js app nor monorepo\n throw new Error(\n \"No Next.js project found in the current directory.\\n\" +\n \"Run this command from your Next.js app directory, or from a monorepo root.\",\n );\n}\n\n/**\n * Checks whether the given directory contains a Next.js config file.\n */\nfunction hasNextConfig(dir: string): boolean {\n return NEXT_CONFIG_NAMES.some((name) =>\n fs.existsSync(path.join(dir, name)),\n );\n}\n\n/**\n * Checks whether the given directory's package.json lists \"next\" as a\n * dependency or devDependency. This handles the case where a Next.js app\n * has no explicit config file (config files are optional since Next.js 12).\n */\nfunction hasNextDependency(dir: string): boolean {\n const packageJsonPath = path.join(dir, \"package.json\");\n if (!fs.existsSync(packageJsonPath)) return false;\n\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n const deps = pkg[\"dependencies\"];\n const devDeps = pkg[\"devDependencies\"];\n\n if (typeof deps === \"object\" && deps !== null && \"next\" in deps) return true;\n if (typeof devDeps === \"object\" && devDeps !== null && \"next\" in devDeps) return true;\n } catch {\n // Invalid JSON — not a Next.js indicator\n }\n\n return false;\n}\n\n/**\n * Detects monorepo markers in the given directory.\n *\n * Checks for:\n * - pnpm-workspace.yaml\n * - turbo.json\n * - lerna.json\n * - \"workspaces\" field in package.json\n */\nexport function isMonorepoRoot(dir: string): boolean {\n // Check for standalone monorepo marker files\n if (fs.existsSync(path.join(dir, \"pnpm-workspace.yaml\"))) return true;\n if (fs.existsSync(path.join(dir, \"turbo.json\"))) return true;\n if (fs.existsSync(path.join(dir, \"lerna.json\"))) return true;\n\n // Check for \"workspaces\" field in package.json\n const packageJsonPath = path.join(dir, \"package.json\");\n if (fs.existsSync(packageJsonPath)) {\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n if (pkg[\"workspaces\"] !== undefined) return true;\n } catch {\n // Invalid JSON — not a monorepo indicator\n }\n }\n\n return false;\n}\n\n/**\n * Finds Next.js apps in workspace packages.\n *\n * Parses workspace globs from:\n * - pnpm-workspace.yaml (packages array)\n * - package.json workspaces field (string[] or { packages: string[] })\n * - lerna.json packages field (string[])\n *\n * Expands the workspace globs using filesystem traversal and returns\n * absolute paths of directories that contain a Next.js config file or\n * have \"next\" as a dependency in package.json.\n *\n * @param monorepoRoot - Absolute path to the monorepo root directory\n * @returns Sorted array of absolute paths to Next.js app directories\n */\nexport function findNextJsApps(monorepoRoot: string): string[] {\n const { includeGlobs, negationPatterns } = collectWorkspaceGlobs(monorepoRoot);\n\n if (includeGlobs.length === 0) {\n throw new Error(\n \"Monorepo detected but no workspace configuration found.\\n\" +\n 'Add a \"workspaces\" field to package.json or create pnpm-workspace.yaml.',\n );\n }\n\n const workspaceDirs = expandGlobs(monorepoRoot, includeGlobs);\n\n // Apply negation patterns: filter out directories matching any exclusion\n const excludedDirs = expandGlobs(monorepoRoot, negationPatterns);\n const excludedSet = new Set(excludedDirs);\n\n // Deduplicate and filter for Next.js apps\n const seen = new Set<string>();\n const nextApps: string[] = [];\n\n for (const dir of workspaceDirs) {\n if (seen.has(dir)) continue;\n seen.add(dir);\n if (excludedSet.has(dir)) continue;\n if (hasNextConfig(dir) || hasNextDependency(dir)) {\n nextApps.push(dir);\n }\n }\n\n return nextApps.sort();\n}\n\n/** Workspace globs split into include and negation patterns. */\nexport interface WorkspaceGlobs {\n includeGlobs: string[];\n negationPatterns: string[];\n}\n\n/**\n * Collects workspace globs from all supported monorepo config sources.\n * Returns deduplicated include globs and negation patterns separately.\n */\nfunction collectWorkspaceGlobs(root: string): WorkspaceGlobs {\n const globs: string[] = [];\n const negations: string[] = [];\n\n // 1. pnpm-workspace.yaml\n const pnpmPath = path.join(root, \"pnpm-workspace.yaml\");\n if (fs.existsSync(pnpmPath)) {\n const content = fs.readFileSync(pnpmPath, \"utf-8\");\n const parsed = parsePnpmWorkspaceYaml(content);\n globs.push(...parsed.includeGlobs);\n negations.push(...parsed.negationPatterns);\n }\n\n // 2. package.json workspaces\n const packageJsonPath = path.join(root, \"package.json\");\n if (fs.existsSync(packageJsonPath)) {\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n globs.push(...parsePackageJsonWorkspaces(pkg));\n } catch {\n // Invalid JSON — skip\n }\n }\n\n // 3. lerna.json packages\n const lernaPath = path.join(root, \"lerna.json\");\n if (fs.existsSync(lernaPath)) {\n try {\n const content = fs.readFileSync(lernaPath, \"utf-8\");\n const lerna = JSON.parse(content) as Record<string, unknown>;\n const packages = lerna[\"packages\"];\n if (Array.isArray(packages)) {\n for (const pkg of packages) {\n if (typeof pkg === \"string\") {\n globs.push(pkg);\n }\n }\n }\n } catch {\n // Invalid JSON — skip\n }\n }\n\n // Deduplicate\n return {\n includeGlobs: [...new Set(globs)],\n negationPatterns: [...new Set(negations)],\n };\n}\n\n/**\n * Parses pnpm-workspace.yaml to extract workspace package globs.\n *\n * The format is simple enough to parse with string processing:\n * ```yaml\n * packages:\n * - \"apps/*\"\n * - packages/*\n * - '!packages/internal'\n * ```\n *\n * Handles both quoted and unquoted values. Negation patterns (lines\n * starting with !) are returned separately so callers can apply them\n * as exclusions after expanding include globs.\n *\n * @internal Exported for unit testing only.\n */\nexport function parsePnpmWorkspaceYaml(content: string): WorkspaceGlobs {\n const lines = content.split(\"\\n\");\n const includeGlobs: string[] = [];\n const negationPatterns: string[] = [];\n let inPackages = false;\n\n for (const rawLine of lines) {\n const trimmed = rawLine.trim();\n\n // Detect the `packages:` key\n if (/^packages\\s*:/.test(trimmed)) {\n inPackages = true;\n continue;\n }\n\n // Stop when we hit another top-level key (no leading whitespace before key)\n if (inPackages && trimmed.length > 0 && !trimmed.startsWith(\"-\") && !rawLine.startsWith(\" \") && !rawLine.startsWith(\"\\t\")) {\n inPackages = false;\n continue;\n }\n\n if (!inPackages) continue;\n\n // Parse list items: ` - \"glob\"` or ` - glob` or ` - 'glob'`\n const itemMatch = /^\\s*-\\s+(.+)$/.exec(rawLine);\n if (!itemMatch) continue;\n\n // Strip surrounding quotes (single or double)\n const value = itemMatch[1].trim().replace(/^[\"']|[\"']$/g, \"\");\n\n // Skip empty values\n if (value.length === 0) continue;\n\n // Collect negation patterns separately (strip the leading !)\n if (value.startsWith(\"!\")) {\n negationPatterns.push(value.slice(1));\n continue;\n }\n\n includeGlobs.push(value);\n }\n\n return { includeGlobs, negationPatterns };\n}\n\n/**\n * Extracts workspace globs from a parsed package.json object.\n *\n * Handles both forms:\n * - `\"workspaces\": [\"packages/*\", \"apps/*\"]`\n * - `\"workspaces\": { \"packages\": [\"packages/*\", \"apps/*\"] }`\n */\nfunction parsePackageJsonWorkspaces(pkg: Record<string, unknown>): string[] {\n const workspaces = pkg[\"workspaces\"];\n if (workspaces === undefined || workspaces === null) return [];\n\n // Array form: string[]\n if (Array.isArray(workspaces)) {\n return workspaces.filter((w): w is string => typeof w === \"string\");\n }\n\n // Object form: { packages: string[] }\n if (typeof workspaces === \"object\") {\n const obj = workspaces as Record<string, unknown>;\n const packages = obj[\"packages\"];\n if (Array.isArray(packages)) {\n return packages.filter((p): p is string => typeof p === \"string\");\n }\n }\n\n return [];\n}\n\n/**\n * Expands workspace globs into actual directory paths.\n *\n * Supports:\n * - `packages/*` — matches one level of directories under packages/\n * - `apps/*` — matches one level of directories under apps/\n * - `packages/foo` — matches a specific directory (literal path)\n * - `packages/**` — recursively walks for directories with package.json\n *\n * @param root - The monorepo root directory\n * @param globs - Workspace glob patterns to expand\n * @returns Array of absolute paths to matched directories\n */\nfunction expandGlobs(root: string, globs: string[]): string[] {\n const dirs: string[] = [];\n\n for (const glob of globs) {\n // Remove trailing slash if present\n const cleanGlob = glob.replace(/\\/+$/, \"\");\n\n if (cleanGlob.includes(\"**\")) {\n // Recursive glob — walk the directory tree\n const prefix = cleanGlob.split(\"**\")[0].replace(/\\/+$/, \"\");\n const baseDir = path.join(root, prefix);\n if (fs.existsSync(baseDir)) {\n dirs.push(...walkDirectories(baseDir));\n }\n } else if (cleanGlob.includes(\"*\")) {\n // Single-level wildcard — expand one directory level\n const parts = cleanGlob.split(\"*\");\n // For \"packages/*\", parts = [\"packages/\", \"\"]\n const baseDir = path.join(root, parts[0].replace(/\\/+$/, \"\"));\n const suffix = parts.slice(1).join(\"*\"); // Anything after the wildcard\n\n if (!fs.existsSync(baseDir)) continue;\n\n let entries: fs.Dirent[];\n try {\n entries = fs.readdirSync(baseDir, { withFileTypes: true });\n } catch {\n continue;\n }\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n // If there is a suffix pattern, the entry name must end with it\n if (suffix && !entry.name.endsWith(suffix)) continue;\n dirs.push(path.join(baseDir, entry.name));\n }\n } else {\n // Literal path — no wildcards\n const targetDir = path.join(root, cleanGlob);\n if (fs.existsSync(targetDir) && fs.statSync(targetDir).isDirectory()) {\n dirs.push(targetDir);\n }\n }\n }\n\n return dirs;\n}\n\n/**\n * Recursively walks a directory tree and returns all subdirectories\n * that contain a package.json (indicating they are workspace packages).\n * Skips node_modules and hidden directories.\n */\nfunction walkDirectories(baseDir: string): string[] {\n const result: string[] = [];\n\n let entries: fs.Dirent[];\n try {\n entries = fs.readdirSync(baseDir, { withFileTypes: true });\n } catch {\n return result;\n }\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n // Skip node_modules and hidden directories\n if (entry.name === \"node_modules\" || entry.name.startsWith(\".\")) continue;\n\n const fullPath = path.join(baseDir, entry.name);\n\n // A workspace package should have a package.json\n if (fs.existsSync(path.join(fullPath, \"package.json\"))) {\n result.push(fullPath);\n }\n\n // Continue recursing for nested workspaces\n result.push(...walkDirectories(fullPath));\n }\n\n return result;\n}\n"],"mappings":";;;;;;;;AAAA;AAAA,YAAY,QAAQ;AACpB,YAAY,UAAU;AA8Bf,SAAS,mBAAmB,KAAoC;AAErE,MAAI,cAAc,GAAG,GAAG;AACtB,WAAO,EAAE,aAAa,KAAK,YAAY,MAAM;AAAA,EAC/C;AAGA,MAAI,kBAAkB,GAAG,GAAG;AAC1B,WAAO,EAAE,aAAa,KAAK,YAAY,MAAM;AAAA,EAC/C;AAGA,MAAI,eAAe,GAAG,GAAG;AAGvB,UAAM,OAAO,eAAe,GAAG;AAE/B,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,SAAS,KAAK,CAAC;AACrB,YAAM,eAAoB,cAAS,KAAK,MAAM;AAC9C,aAAO;AAAA,QACL,aAAa;AAAA,QACb,YAAY;AAAA,QACZ,iBAAiB;AAAA,MACnB;AAAA,IACF;AAGA,UAAM,UAAU,KACb,IAAI,CAAC,QAAQ,OAAY,cAAS,KAAK,GAAG,CAAC,EAAE,EAC7C,KAAK,IAAI;AACZ,UAAM,IAAI;AAAA,MACR;AAAA,EAAiC,OAAO;AAAA;AAAA,IAC1C;AAAA,EACF;AAGA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AAKA,SAAS,cAAc,KAAsB;AAC3C,SAAO,kBAAkB;AAAA,IAAK,CAAC,SAC1B,cAAgB,UAAK,KAAK,IAAI,CAAC;AAAA,EACpC;AACF;AAOA,SAAS,kBAAkB,KAAsB;AAC/C,QAAM,kBAAuB,UAAK,KAAK,cAAc;AACrD,MAAI,CAAI,cAAW,eAAe,EAAG,QAAO;AAE5C,MAAI;AACF,UAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,UAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAM,OAAO,IAAI,cAAc;AAC/B,UAAM,UAAU,IAAI,iBAAiB;AAErC,QAAI,OAAO,SAAS,YAAY,SAAS,QAAQ,UAAU,KAAM,QAAO;AACxE,QAAI,OAAO,YAAY,YAAY,YAAY,QAAQ,UAAU,QAAS,QAAO;AAAA,EACnF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAWO,SAAS,eAAe,KAAsB;AAEnD,MAAO,cAAgB,UAAK,KAAK,qBAAqB,CAAC,EAAG,QAAO;AACjE,MAAO,cAAgB,UAAK,KAAK,YAAY,CAAC,EAAG,QAAO;AACxD,MAAO,cAAgB,UAAK,KAAK,YAAY,CAAC,EAAG,QAAO;AAGxD,QAAM,kBAAuB,UAAK,KAAK,cAAc;AACrD,MAAO,cAAW,eAAe,GAAG;AAClC,QAAI;AACF,YAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,YAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAI,IAAI,YAAY,MAAM,OAAW,QAAO;AAAA,IAC9C,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAiBO,SAAS,eAAe,cAAgC;AAC7D,QAAM,EAAE,cAAc,iBAAiB,IAAI,sBAAsB,YAAY;AAE7E,MAAI,aAAa,WAAW,GAAG;AAC7B,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AAEA,QAAM,gBAAgB,YAAY,cAAc,YAAY;AAG5D,QAAM,eAAe,YAAY,cAAc,gBAAgB;AAC/D,QAAM,cAAc,IAAI,IAAI,YAAY;AAGxC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,WAAqB,CAAC;AAE5B,aAAW,OAAO,eAAe;AAC/B,QAAI,KAAK,IAAI,GAAG,EAAG;AACnB,SAAK,IAAI,GAAG;AACZ,QAAI,YAAY,IAAI,GAAG,EAAG;AAC1B,QAAI,cAAc,GAAG,KAAK,kBAAkB,GAAG,GAAG;AAChD,eAAS,KAAK,GAAG;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,SAAS,KAAK;AACvB;AAYA,SAAS,sBAAsB,MAA8B;AAC3D,QAAM,QAAkB,CAAC;AACzB,QAAM,YAAsB,CAAC;AAG7B,QAAM,WAAgB,UAAK,MAAM,qBAAqB;AACtD,MAAO,cAAW,QAAQ,GAAG;AAC3B,UAAM,UAAa,gBAAa,UAAU,OAAO;AACjD,UAAM,SAAS,uBAAuB,OAAO;AAC7C,UAAM,KAAK,GAAG,OAAO,YAAY;AACjC,cAAU,KAAK,GAAG,OAAO,gBAAgB;AAAA,EAC3C;AAGA,QAAM,kBAAuB,UAAK,MAAM,cAAc;AACtD,MAAO,cAAW,eAAe,GAAG;AAClC,QAAI;AACF,YAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,YAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,YAAM,KAAK,GAAG,2BAA2B,GAAG,CAAC;AAAA,IAC/C,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,YAAiB,UAAK,MAAM,YAAY;AAC9C,MAAO,cAAW,SAAS,GAAG;AAC5B,QAAI;AACF,YAAM,UAAa,gBAAa,WAAW,OAAO;AAClD,YAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,YAAM,WAAW,MAAM,UAAU;AACjC,UAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,mBAAW,OAAO,UAAU;AAC1B,cAAI,OAAO,QAAQ,UAAU;AAC3B,kBAAM,KAAK,GAAG;AAAA,UAChB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,SAAO;AAAA,IACL,cAAc,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAAA,IAChC,kBAAkB,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AAAA,EAC1C;AACF;AAmBO,SAAS,uBAAuB,SAAiC;AACtE,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,eAAyB,CAAC;AAChC,QAAM,mBAA6B,CAAC;AACpC,MAAI,aAAa;AAEjB,aAAW,WAAW,OAAO;AAC3B,UAAM,UAAU,QAAQ,KAAK;AAG7B,QAAI,gBAAgB,KAAK,OAAO,GAAG;AACjC,mBAAa;AACb;AAAA,IACF;AAGA,QAAI,cAAc,QAAQ,SAAS,KAAK,CAAC,QAAQ,WAAW,GAAG,KAAK,CAAC,QAAQ,WAAW,GAAG,KAAK,CAAC,QAAQ,WAAW,GAAI,GAAG;AACzH,mBAAa;AACb;AAAA,IACF;AAEA,QAAI,CAAC,WAAY;AAGjB,UAAM,YAAY,gBAAgB,KAAK,OAAO;AAC9C,QAAI,CAAC,UAAW;AAGhB,UAAM,QAAQ,UAAU,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AAG5D,QAAI,MAAM,WAAW,EAAG;AAGxB,QAAI,MAAM,WAAW,GAAG,GAAG;AACzB,uBAAiB,KAAK,MAAM,MAAM,CAAC,CAAC;AACpC;AAAA,IACF;AAEA,iBAAa,KAAK,KAAK;AAAA,EACzB;AAEA,SAAO,EAAE,cAAc,iBAAiB;AAC1C;AASA,SAAS,2BAA2B,KAAwC;AAC1E,QAAM,aAAa,IAAI,YAAY;AACnC,MAAI,eAAe,UAAa,eAAe,KAAM,QAAO,CAAC;AAG7D,MAAI,MAAM,QAAQ,UAAU,GAAG;AAC7B,WAAO,WAAW,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ;AAAA,EACpE;AAGA,MAAI,OAAO,eAAe,UAAU;AAClC,UAAM,MAAM;AACZ,UAAM,WAAW,IAAI,UAAU;AAC/B,QAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,aAAO,SAAS,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ;AAAA,IAClE;AAAA,EACF;AAEA,SAAO,CAAC;AACV;AAeA,SAAS,YAAY,MAAc,OAA2B;AAC5D,QAAM,OAAiB,CAAC;AAExB,aAAW,QAAQ,OAAO;AAExB,UAAM,YAAY,KAAK,QAAQ,QAAQ,EAAE;AAEzC,QAAI,UAAU,SAAS,IAAI,GAAG;AAE5B,YAAM,SAAS,UAAU,MAAM,IAAI,EAAE,CAAC,EAAE,QAAQ,QAAQ,EAAE;AAC1D,YAAM,UAAe,UAAK,MAAM,MAAM;AACtC,UAAO,cAAW,OAAO,GAAG;AAC1B,aAAK,KAAK,GAAG,gBAAgB,OAAO,CAAC;AAAA,MACvC;AAAA,IACF,WAAW,UAAU,SAAS,GAAG,GAAG;AAElC,YAAM,QAAQ,UAAU,MAAM,GAAG;AAEjC,YAAM,UAAe,UAAK,MAAM,MAAM,CAAC,EAAE,QAAQ,QAAQ,EAAE,CAAC;AAC5D,YAAM,SAAS,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG;AAEtC,UAAI,CAAI,cAAW,OAAO,EAAG;AAE7B,UAAI;AACJ,UAAI;AACF,kBAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAAA,MAC3D,QAAQ;AACN;AAAA,MACF;AAEA,iBAAW,SAAS,SAAS;AAC3B,YAAI,CAAC,MAAM,YAAY,EAAG;AAE1B,YAAI,UAAU,CAAC,MAAM,KAAK,SAAS,MAAM,EAAG;AAC5C,aAAK,KAAU,UAAK,SAAS,MAAM,IAAI,CAAC;AAAA,MAC1C;AAAA,IACF,OAAO;AAEL,YAAM,YAAiB,UAAK,MAAM,SAAS;AAC3C,UAAO,cAAW,SAAS,KAAQ,YAAS,SAAS,EAAE,YAAY,GAAG;AACpE,aAAK,KAAK,SAAS;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAOA,SAAS,gBAAgB,SAA2B;AAClD,QAAM,SAAmB,CAAC;AAE1B,MAAI;AACJ,MAAI;AACF,cAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAAA,EAC3D,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAY,EAAG;AAE1B,QAAI,MAAM,SAAS,kBAAkB,MAAM,KAAK,WAAW,GAAG,EAAG;AAEjE,UAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAG9C,QAAO,cAAgB,UAAK,UAAU,cAAc,CAAC,GAAG;AACtD,aAAO,KAAK,QAAQ;AAAA,IACtB;AAGA,WAAO,KAAK,GAAG,gBAAgB,QAAQ,CAAC;AAAA,EAC1C;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../src/cli/monorepo.ts"],"sourcesContent":["import * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport { NEXT_CONFIG_NAMES } from \"./constants.js\";\n\n/** Result of classifying the project root directory. */\nexport interface ProjectClassification {\n /** The directory to scaffold into (may differ from cwd for monorepos). */\n projectRoot: string;\n /** Whether this was auto-resolved from a monorepo root. */\n isMonorepo: boolean;\n /** If monorepo, the relative path from cwd to the resolved app. */\n appRelativePath?: string;\n}\n\n/**\n * Classifies the current directory and resolves the target project root.\n *\n * Classification logic:\n * 1. If the directory contains a Next.js config file, it is a Next.js app\n * directory. Returns it directly.\n * 1b. If no config file exists but package.json lists \"next\" as a dependency,\n * it is still a Next.js app (config files are optional since Next.js 12).\n * 2. If the directory contains monorepo markers (pnpm-workspace.yaml,\n * turbo.json, lerna.json, or a workspaces field in package.json),\n * scans workspace packages for Next.js apps.\n * 3. Otherwise, fails with a user-facing error.\n *\n * @param cwd - The current working directory\n * @returns The resolved project classification\n * @throws Error with a user-facing message if the location is invalid\n */\nexport function resolveProjectRoot(cwd: string): ProjectClassification {\n // Step 1: Check if cwd is a Next.js app directory (config file)\n if (hasNextConfig(cwd)) {\n return { projectRoot: cwd, isMonorepo: false };\n }\n\n // Step 1b: Check if cwd has \"next\" as a dependency (config is optional)\n if (hasNextDependency(cwd)) {\n return { projectRoot: cwd, isMonorepo: false };\n }\n\n // Step 2: Check for monorepo markers\n if (isMonorepoRoot(cwd)) {\n // findNextJsApps throws if no workspace globs are found (e.g., turbo.json\n // exists but no pnpm-workspace.yaml or workspaces in package.json)\n const apps = findNextJsApps(cwd);\n\n if (apps.length === 0) {\n throw new Error(\n \"This is a monorepo but no Next.js apps were found in workspace packages.\",\n );\n }\n\n if (apps.length === 1) {\n const appDir = apps[0];\n const relativePath = path.relative(cwd, appDir);\n return {\n projectRoot: appDir,\n isMonorepo: true,\n appRelativePath: relativePath,\n };\n }\n\n // Multiple apps found — cannot auto-resolve\n const appList = apps\n .map((app) => ` - ${path.relative(cwd, app)}`)\n .join(\"\\n\");\n throw new Error(\n `Found multiple Next.js apps:\\n${appList}\\nRun init from the specific app directory you want to instrument.`,\n );\n }\n\n // Step 3: Neither Next.js app nor monorepo\n throw new Error(\n \"No Next.js project found in the current directory.\\n\" +\n \"Run this command from your Next.js app directory, or from a monorepo root.\",\n );\n}\n\n/**\n * Checks whether the given directory contains a Next.js config file.\n */\nfunction hasNextConfig(dir: string): boolean {\n return NEXT_CONFIG_NAMES.some((name) =>\n fs.existsSync(path.join(dir, name)),\n );\n}\n\n/**\n * Checks whether the given directory's package.json lists \"next\" as a\n * dependency or devDependency. This handles the case where a Next.js app\n * has no explicit config file (config files are optional since Next.js 12).\n */\nfunction hasNextDependency(dir: string): boolean {\n const packageJsonPath = path.join(dir, \"package.json\");\n if (!fs.existsSync(packageJsonPath)) return false;\n\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n const deps = pkg[\"dependencies\"];\n const devDeps = pkg[\"devDependencies\"];\n\n if (typeof deps === \"object\" && deps !== null && \"next\" in deps) return true;\n if (typeof devDeps === \"object\" && devDeps !== null && \"next\" in devDeps) return true;\n } catch {\n // Invalid JSON — not a Next.js indicator\n }\n\n return false;\n}\n\n/**\n * Detects monorepo markers in the given directory.\n *\n * Checks for:\n * - pnpm-workspace.yaml\n * - turbo.json\n * - lerna.json\n * - \"workspaces\" field in package.json\n */\nexport function isMonorepoRoot(dir: string): boolean {\n // Check for standalone monorepo marker files\n if (fs.existsSync(path.join(dir, \"pnpm-workspace.yaml\"))) return true;\n if (fs.existsSync(path.join(dir, \"turbo.json\"))) return true;\n if (fs.existsSync(path.join(dir, \"lerna.json\"))) return true;\n\n // Check for \"workspaces\" field in package.json\n const packageJsonPath = path.join(dir, \"package.json\");\n if (fs.existsSync(packageJsonPath)) {\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n if (pkg[\"workspaces\"] !== undefined) return true;\n } catch {\n // Invalid JSON — not a monorepo indicator\n }\n }\n\n return false;\n}\n\n/**\n * Finds Next.js apps in workspace packages.\n *\n * Parses workspace globs from:\n * - pnpm-workspace.yaml (packages array)\n * - package.json workspaces field (string[] or { packages: string[] })\n * - lerna.json packages field (string[])\n *\n * Expands the workspace globs using filesystem traversal and returns\n * absolute paths of directories that contain a Next.js config file or\n * have \"next\" as a dependency in package.json.\n *\n * @param monorepoRoot - Absolute path to the monorepo root directory\n * @returns Sorted array of absolute paths to Next.js app directories\n */\nexport function findNextJsApps(monorepoRoot: string): string[] {\n const { includeGlobs, negationPatterns } = collectWorkspaceGlobs(monorepoRoot);\n\n if (includeGlobs.length === 0) {\n throw new Error(\n \"Monorepo detected but no workspace configuration found.\\n\" +\n 'Add a \"workspaces\" field to package.json or create pnpm-workspace.yaml.',\n );\n }\n\n const workspaceDirs = expandGlobs(monorepoRoot, includeGlobs);\n\n // Apply negation patterns: filter out directories matching any exclusion\n const excludedDirs = expandGlobs(monorepoRoot, negationPatterns);\n const excludedSet = new Set(excludedDirs);\n\n // Deduplicate and filter for Next.js apps\n const seen = new Set<string>();\n const nextApps: string[] = [];\n\n for (const dir of workspaceDirs) {\n if (seen.has(dir)) continue;\n seen.add(dir);\n if (excludedSet.has(dir)) continue;\n if (hasNextConfig(dir) || hasNextDependency(dir)) {\n nextApps.push(dir);\n }\n }\n\n return nextApps.sort();\n}\n\n/** Workspace globs split into include and negation patterns. */\nexport interface WorkspaceGlobs {\n includeGlobs: string[];\n negationPatterns: string[];\n}\n\n/**\n * Collects workspace globs from all supported monorepo config sources.\n * Returns deduplicated include globs and negation patterns separately.\n */\nfunction collectWorkspaceGlobs(root: string): WorkspaceGlobs {\n const globs: string[] = [];\n const negations: string[] = [];\n\n // 1. pnpm-workspace.yaml\n const pnpmPath = path.join(root, \"pnpm-workspace.yaml\");\n if (fs.existsSync(pnpmPath)) {\n const content = fs.readFileSync(pnpmPath, \"utf-8\");\n const parsed = parsePnpmWorkspaceYaml(content);\n globs.push(...parsed.includeGlobs);\n negations.push(...parsed.negationPatterns);\n }\n\n // 2. package.json workspaces\n const packageJsonPath = path.join(root, \"package.json\");\n if (fs.existsSync(packageJsonPath)) {\n try {\n const content = fs.readFileSync(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content) as Record<string, unknown>;\n globs.push(...parsePackageJsonWorkspaces(pkg));\n } catch {\n // Invalid JSON — skip\n }\n }\n\n // 3. lerna.json packages\n const lernaPath = path.join(root, \"lerna.json\");\n if (fs.existsSync(lernaPath)) {\n try {\n const content = fs.readFileSync(lernaPath, \"utf-8\");\n const lerna = JSON.parse(content) as Record<string, unknown>;\n const packages = lerna[\"packages\"];\n if (Array.isArray(packages)) {\n for (const pkg of packages) {\n if (typeof pkg === \"string\") {\n globs.push(pkg);\n }\n }\n }\n } catch {\n // Invalid JSON — skip\n }\n }\n\n // Deduplicate\n return {\n includeGlobs: [...new Set(globs)],\n negationPatterns: [...new Set(negations)],\n };\n}\n\n/**\n * Parses pnpm-workspace.yaml to extract workspace package globs.\n *\n * The format is simple enough to parse with string processing:\n * ```yaml\n * packages:\n * - \"apps/*\"\n * - packages/*\n * - '!packages/internal'\n * ```\n *\n * Handles both quoted and unquoted values. Negation patterns (lines\n * starting with !) are returned separately so callers can apply them\n * as exclusions after expanding include globs.\n *\n * @internal Exported for unit testing only.\n */\nexport function parsePnpmWorkspaceYaml(content: string): WorkspaceGlobs {\n const lines = content.split(\"\\n\");\n const includeGlobs: string[] = [];\n const negationPatterns: string[] = [];\n let inPackages = false;\n\n for (const rawLine of lines) {\n const trimmed = rawLine.trim();\n\n // Detect the `packages:` key\n if (/^packages\\s*:/.test(trimmed)) {\n inPackages = true;\n continue;\n }\n\n // Stop when we hit another top-level key (no leading whitespace before key)\n if (inPackages && trimmed.length > 0 && !trimmed.startsWith(\"-\") && !rawLine.startsWith(\" \") && !rawLine.startsWith(\"\\t\")) {\n inPackages = false;\n continue;\n }\n\n if (!inPackages) continue;\n\n // Parse list items: ` - \"glob\"` or ` - glob` or ` - 'glob'`\n const itemMatch = /^\\s*-\\s+(.+)$/.exec(rawLine);\n if (!itemMatch) continue;\n\n // Strip surrounding quotes (single or double)\n const value = itemMatch[1].trim().replace(/^[\"']|[\"']$/g, \"\");\n\n // Skip empty values\n if (value.length === 0) continue;\n\n // Collect negation patterns separately (strip the leading !)\n if (value.startsWith(\"!\")) {\n negationPatterns.push(value.slice(1));\n continue;\n }\n\n includeGlobs.push(value);\n }\n\n return { includeGlobs, negationPatterns };\n}\n\n/**\n * Extracts workspace globs from a parsed package.json object.\n *\n * Handles both forms:\n * - `\"workspaces\": [\"packages/*\", \"apps/*\"]`\n * - `\"workspaces\": { \"packages\": [\"packages/*\", \"apps/*\"] }`\n */\nfunction parsePackageJsonWorkspaces(pkg: Record<string, unknown>): string[] {\n const workspaces = pkg[\"workspaces\"];\n if (workspaces === undefined || workspaces === null) return [];\n\n // Array form: string[]\n if (Array.isArray(workspaces)) {\n return workspaces.filter((w): w is string => typeof w === \"string\");\n }\n\n // Object form: { packages: string[] }\n if (typeof workspaces === \"object\") {\n const obj = workspaces as Record<string, unknown>;\n const packages = obj[\"packages\"];\n if (Array.isArray(packages)) {\n return packages.filter((p): p is string => typeof p === \"string\");\n }\n }\n\n return [];\n}\n\n/**\n * Expands workspace globs into actual directory paths.\n *\n * Supports:\n * - `packages/*` — matches one level of directories under packages/\n * - `apps/*` — matches one level of directories under apps/\n * - `packages/foo` — matches a specific directory (literal path)\n * - `packages/**` — recursively walks for directories with package.json\n *\n * @param root - The monorepo root directory\n * @param globs - Workspace glob patterns to expand\n * @returns Array of absolute paths to matched directories\n */\nfunction expandGlobs(root: string, globs: string[]): string[] {\n const dirs: string[] = [];\n\n for (const glob of globs) {\n // Remove trailing slash if present\n const cleanGlob = glob.replace(/\\/+$/, \"\");\n\n if (cleanGlob.includes(\"**\")) {\n // Recursive glob — walk the directory tree\n const prefix = cleanGlob.split(\"**\")[0].replace(/\\/+$/, \"\");\n const baseDir = path.join(root, prefix);\n if (fs.existsSync(baseDir)) {\n dirs.push(...walkDirectories(baseDir));\n }\n } else if (cleanGlob.includes(\"*\")) {\n // Single-level wildcard — expand one directory level\n const parts = cleanGlob.split(\"*\");\n // For \"packages/*\", parts = [\"packages/\", \"\"]\n const baseDir = path.join(root, parts[0].replace(/\\/+$/, \"\"));\n const suffix = parts.slice(1).join(\"*\"); // Anything after the wildcard\n\n if (!fs.existsSync(baseDir)) continue;\n\n let entries: fs.Dirent[];\n try {\n entries = fs.readdirSync(baseDir, { withFileTypes: true });\n } catch {\n continue;\n }\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n // If there is a suffix pattern, the entry name must end with it\n if (suffix && !entry.name.endsWith(suffix)) continue;\n dirs.push(path.join(baseDir, entry.name));\n }\n } else {\n // Literal path — no wildcards\n const targetDir = path.join(root, cleanGlob);\n if (fs.existsSync(targetDir) && fs.statSync(targetDir).isDirectory()) {\n dirs.push(targetDir);\n }\n }\n }\n\n return dirs;\n}\n\n/**\n * Recursively walks a directory tree and returns all subdirectories\n * that contain a package.json (indicating they are workspace packages).\n * Skips node_modules and hidden directories.\n */\nfunction walkDirectories(baseDir: string): string[] {\n const result: string[] = [];\n\n let entries: fs.Dirent[];\n try {\n entries = fs.readdirSync(baseDir, { withFileTypes: true });\n } catch {\n return result;\n }\n\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n // Skip node_modules and hidden directories\n if (entry.name === \"node_modules\" || entry.name.startsWith(\".\")) continue;\n\n const fullPath = path.join(baseDir, entry.name);\n\n // A workspace package should have a package.json\n if (fs.existsSync(path.join(fullPath, \"package.json\"))) {\n result.push(fullPath);\n }\n\n // Continue recursing for nested workspaces\n result.push(...walkDirectories(fullPath));\n }\n\n return result;\n}\n"],"mappings":";;;;;AAAA,YAAY,QAAQ;AACpB,YAAY,UAAU;AA8Bf,SAAS,mBAAmB,KAAoC;AAErE,MAAI,cAAc,GAAG,GAAG;AACtB,WAAO,EAAE,aAAa,KAAK,YAAY,MAAM;AAAA,EAC/C;AAGA,MAAI,kBAAkB,GAAG,GAAG;AAC1B,WAAO,EAAE,aAAa,KAAK,YAAY,MAAM;AAAA,EAC/C;AAGA,MAAI,eAAe,GAAG,GAAG;AAGvB,UAAM,OAAO,eAAe,GAAG;AAE/B,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,SAAS,KAAK,CAAC;AACrB,YAAM,eAAoB,cAAS,KAAK,MAAM;AAC9C,aAAO;AAAA,QACL,aAAa;AAAA,QACb,YAAY;AAAA,QACZ,iBAAiB;AAAA,MACnB;AAAA,IACF;AAGA,UAAM,UAAU,KACb,IAAI,CAAC,QAAQ,OAAY,cAAS,KAAK,GAAG,CAAC,EAAE,EAC7C,KAAK,IAAI;AACZ,UAAM,IAAI;AAAA,MACR;AAAA,EAAiC,OAAO;AAAA;AAAA,IAC1C;AAAA,EACF;AAGA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AAKA,SAAS,cAAc,KAAsB;AAC3C,SAAO,kBAAkB;AAAA,IAAK,CAAC,SAC1B,cAAgB,UAAK,KAAK,IAAI,CAAC;AAAA,EACpC;AACF;AAOA,SAAS,kBAAkB,KAAsB;AAC/C,QAAM,kBAAuB,UAAK,KAAK,cAAc;AACrD,MAAI,CAAI,cAAW,eAAe,EAAG,QAAO;AAE5C,MAAI;AACF,UAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,UAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAM,OAAO,IAAI,cAAc;AAC/B,UAAM,UAAU,IAAI,iBAAiB;AAErC,QAAI,OAAO,SAAS,YAAY,SAAS,QAAQ,UAAU,KAAM,QAAO;AACxE,QAAI,OAAO,YAAY,YAAY,YAAY,QAAQ,UAAU,QAAS,QAAO;AAAA,EACnF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAWO,SAAS,eAAe,KAAsB;AAEnD,MAAO,cAAgB,UAAK,KAAK,qBAAqB,CAAC,EAAG,QAAO;AACjE,MAAO,cAAgB,UAAK,KAAK,YAAY,CAAC,EAAG,QAAO;AACxD,MAAO,cAAgB,UAAK,KAAK,YAAY,CAAC,EAAG,QAAO;AAGxD,QAAM,kBAAuB,UAAK,KAAK,cAAc;AACrD,MAAO,cAAW,eAAe,GAAG;AAClC,QAAI;AACF,YAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,YAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAI,IAAI,YAAY,MAAM,OAAW,QAAO;AAAA,IAC9C,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAiBO,SAAS,eAAe,cAAgC;AAC7D,QAAM,EAAE,cAAc,iBAAiB,IAAI,sBAAsB,YAAY;AAE7E,MAAI,aAAa,WAAW,GAAG;AAC7B,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AAEA,QAAM,gBAAgB,YAAY,cAAc,YAAY;AAG5D,QAAM,eAAe,YAAY,cAAc,gBAAgB;AAC/D,QAAM,cAAc,IAAI,IAAI,YAAY;AAGxC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,WAAqB,CAAC;AAE5B,aAAW,OAAO,eAAe;AAC/B,QAAI,KAAK,IAAI,GAAG,EAAG;AACnB,SAAK,IAAI,GAAG;AACZ,QAAI,YAAY,IAAI,GAAG,EAAG;AAC1B,QAAI,cAAc,GAAG,KAAK,kBAAkB,GAAG,GAAG;AAChD,eAAS,KAAK,GAAG;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,SAAS,KAAK;AACvB;AAYA,SAAS,sBAAsB,MAA8B;AAC3D,QAAM,QAAkB,CAAC;AACzB,QAAM,YAAsB,CAAC;AAG7B,QAAM,WAAgB,UAAK,MAAM,qBAAqB;AACtD,MAAO,cAAW,QAAQ,GAAG;AAC3B,UAAM,UAAa,gBAAa,UAAU,OAAO;AACjD,UAAM,SAAS,uBAAuB,OAAO;AAC7C,UAAM,KAAK,GAAG,OAAO,YAAY;AACjC,cAAU,KAAK,GAAG,OAAO,gBAAgB;AAAA,EAC3C;AAGA,QAAM,kBAAuB,UAAK,MAAM,cAAc;AACtD,MAAO,cAAW,eAAe,GAAG;AAClC,QAAI;AACF,YAAM,UAAa,gBAAa,iBAAiB,OAAO;AACxD,YAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,YAAM,KAAK,GAAG,2BAA2B,GAAG,CAAC;AAAA,IAC/C,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,YAAiB,UAAK,MAAM,YAAY;AAC9C,MAAO,cAAW,SAAS,GAAG;AAC5B,QAAI;AACF,YAAM,UAAa,gBAAa,WAAW,OAAO;AAClD,YAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,YAAM,WAAW,MAAM,UAAU;AACjC,UAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,mBAAW,OAAO,UAAU;AAC1B,cAAI,OAAO,QAAQ,UAAU;AAC3B,kBAAM,KAAK,GAAG;AAAA,UAChB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,SAAO;AAAA,IACL,cAAc,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAAA,IAChC,kBAAkB,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AAAA,EAC1C;AACF;AAmBO,SAAS,uBAAuB,SAAiC;AACtE,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,eAAyB,CAAC;AAChC,QAAM,mBAA6B,CAAC;AACpC,MAAI,aAAa;AAEjB,aAAW,WAAW,OAAO;AAC3B,UAAM,UAAU,QAAQ,KAAK;AAG7B,QAAI,gBAAgB,KAAK,OAAO,GAAG;AACjC,mBAAa;AACb;AAAA,IACF;AAGA,QAAI,cAAc,QAAQ,SAAS,KAAK,CAAC,QAAQ,WAAW,GAAG,KAAK,CAAC,QAAQ,WAAW,GAAG,KAAK,CAAC,QAAQ,WAAW,GAAI,GAAG;AACzH,mBAAa;AACb;AAAA,IACF;AAEA,QAAI,CAAC,WAAY;AAGjB,UAAM,YAAY,gBAAgB,KAAK,OAAO;AAC9C,QAAI,CAAC,UAAW;AAGhB,UAAM,QAAQ,UAAU,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AAG5D,QAAI,MAAM,WAAW,EAAG;AAGxB,QAAI,MAAM,WAAW,GAAG,GAAG;AACzB,uBAAiB,KAAK,MAAM,MAAM,CAAC,CAAC;AACpC;AAAA,IACF;AAEA,iBAAa,KAAK,KAAK;AAAA,EACzB;AAEA,SAAO,EAAE,cAAc,iBAAiB;AAC1C;AASA,SAAS,2BAA2B,KAAwC;AAC1E,QAAM,aAAa,IAAI,YAAY;AACnC,MAAI,eAAe,UAAa,eAAe,KAAM,QAAO,CAAC;AAG7D,MAAI,MAAM,QAAQ,UAAU,GAAG;AAC7B,WAAO,WAAW,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ;AAAA,EACpE;AAGA,MAAI,OAAO,eAAe,UAAU;AAClC,UAAM,MAAM;AACZ,UAAM,WAAW,IAAI,UAAU;AAC/B,QAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,aAAO,SAAS,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ;AAAA,IAClE;AAAA,EACF;AAEA,SAAO,CAAC;AACV;AAeA,SAAS,YAAY,MAAc,OAA2B;AAC5D,QAAM,OAAiB,CAAC;AAExB,aAAW,QAAQ,OAAO;AAExB,UAAM,YAAY,KAAK,QAAQ,QAAQ,EAAE;AAEzC,QAAI,UAAU,SAAS,IAAI,GAAG;AAE5B,YAAM,SAAS,UAAU,MAAM,IAAI,EAAE,CAAC,EAAE,QAAQ,QAAQ,EAAE;AAC1D,YAAM,UAAe,UAAK,MAAM,MAAM;AACtC,UAAO,cAAW,OAAO,GAAG;AAC1B,aAAK,KAAK,GAAG,gBAAgB,OAAO,CAAC;AAAA,MACvC;AAAA,IACF,WAAW,UAAU,SAAS,GAAG,GAAG;AAElC,YAAM,QAAQ,UAAU,MAAM,GAAG;AAEjC,YAAM,UAAe,UAAK,MAAM,MAAM,CAAC,EAAE,QAAQ,QAAQ,EAAE,CAAC;AAC5D,YAAM,SAAS,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG;AAEtC,UAAI,CAAI,cAAW,OAAO,EAAG;AAE7B,UAAI;AACJ,UAAI;AACF,kBAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAAA,MAC3D,QAAQ;AACN;AAAA,MACF;AAEA,iBAAW,SAAS,SAAS;AAC3B,YAAI,CAAC,MAAM,YAAY,EAAG;AAE1B,YAAI,UAAU,CAAC,MAAM,KAAK,SAAS,MAAM,EAAG;AAC5C,aAAK,KAAU,UAAK,SAAS,MAAM,IAAI,CAAC;AAAA,MAC1C;AAAA,IACF,OAAO;AAEL,YAAM,YAAiB,UAAK,MAAM,SAAS;AAC3C,UAAO,cAAW,SAAS,KAAQ,YAAS,SAAS,EAAE,YAAY,GAAG;AACpE,aAAK,KAAK,SAAS;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAOA,SAAS,gBAAgB,SAA2B;AAClD,QAAM,SAAmB,CAAC;AAE1B,MAAI;AACJ,MAAI;AACF,cAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAAA,EAC3D,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAY,EAAG;AAE1B,QAAI,MAAM,SAAS,kBAAkB,MAAM,KAAK,WAAW,GAAG,EAAG;AAEjE,UAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAG9C,QAAO,cAAgB,UAAK,UAAU,cAAc,CAAC,GAAG;AACtD,aAAO,KAAK,QAAQ;AAAA,IACtB;AAGA,WAAO,KAAK,GAAG,gBAAgB,QAAQ,CAAC;AAAA,EAC1C;AAEA,SAAO;AACT;","names":[]}
@@ -1,14 +1,9 @@
1
- import {
2
- init_esm_shims
3
- } from "./chunk-BGZ7J74D.js";
4
-
5
1
  // src/agent-detection/detect.ts
6
- init_esm_shims();
7
- import { execFile } from "child_process";
8
- import { access, stat } from "fs/promises";
9
- import { dirname, join, resolve } from "path";
10
- import { homedir } from "os";
11
- import { constants } from "fs";
2
+ import { execFile } from "node:child_process";
3
+ import { access, stat } from "node:fs/promises";
4
+ import { dirname, join, resolve } from "node:path";
5
+ import { homedir } from "node:os";
6
+ import { constants } from "node:fs";
12
7
  var AGENT_RULES = [
13
8
  {
14
9
  name: "claude",
@@ -157,7 +152,6 @@ async function detectAgents(projectRoot) {
157
152
  }
158
153
 
159
154
  // src/agent-detection/configs.ts
160
- init_esm_shims();
161
155
  function generateMcpConfig(agent, endpoint, anonKey) {
162
156
  if (!endpoint || endpoint.trim() === "") {
163
157
  throw new Error("endpoint must not be empty");
@@ -320,9 +314,8 @@ ${content}${m.end}
320
314
  }
321
315
 
322
316
  // src/agent-detection/inject.ts
323
- init_esm_shims();
324
- import { chmod, mkdir, readFile, writeFile } from "fs/promises";
325
- import { dirname as dirname2, isAbsolute, join as join2 } from "path";
317
+ import { chmod, mkdir, readFile, writeFile } from "node:fs/promises";
318
+ import { dirname as dirname2, isAbsolute, join as join2 } from "node:path";
326
319
  var HTML_START = "<!-- glasstrace:mcp:start -->";
327
320
  var HTML_END = "<!-- glasstrace:mcp:end -->";
328
321
  var HASH_START = "# glasstrace:mcp:start";
@@ -505,4 +498,4 @@ export {
505
498
  injectInfoSection,
506
499
  updateGitignore
507
500
  };
508
- //# sourceMappingURL=chunk-ZNOD6FC7.js.map
501
+ //# sourceMappingURL=chunk-CTJI2YKA.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/agent-detection/detect.ts","../src/agent-detection/configs.ts","../src/agent-detection/inject.ts"],"sourcesContent":["import { execFile } from \"node:child_process\";\nimport { access, stat } from \"node:fs/promises\";\nimport { dirname, join, resolve } from \"node:path\";\nimport { homedir } from \"node:os\";\nimport { constants } from \"node:fs\";\n\n/**\n * Describes an AI coding agent detected in a project.\n */\nexport interface DetectedAgent {\n name: \"claude\" | \"codex\" | \"gemini\" | \"cursor\" | \"windsurf\" | \"generic\";\n mcpConfigPath: string | null;\n infoFilePath: string | null;\n cliAvailable: boolean;\n registrationCommand: string | null;\n}\n\ntype AgentName = DetectedAgent[\"name\"];\n\ninterface AgentRule {\n name: AgentName;\n /** Paths relative to a search directory that indicate this agent is present. */\n markers: string[];\n /** Function to compute the MCP config path given the directory where markers were found. */\n mcpConfigPath: (markerDir: string) => string;\n /** Function to compute the info file path, or null. */\n infoFilePath: (markerDir: string) => string | null;\n /** CLI binary name to check in PATH, or null if no CLI exists. */\n cliBinary: string | null;\n /** Registration command template, or null. */\n registrationCommand: string | null;\n}\n\nconst AGENT_RULES: AgentRule[] = [\n {\n name: \"claude\",\n markers: [\".claude\", \"CLAUDE.md\"],\n mcpConfigPath: (dir) => join(dir, \".mcp.json\"),\n infoFilePath: (dir) => join(dir, \"CLAUDE.md\"),\n cliBinary: \"claude\",\n registrationCommand: \"npx glasstrace mcp add --agent claude\",\n },\n {\n name: \"codex\",\n markers: [\"codex.md\", \".codex\"],\n mcpConfigPath: (dir) => join(dir, \".codex\", \"config.toml\"),\n infoFilePath: (dir) => join(dir, \"codex.md\"),\n cliBinary: \"codex\",\n registrationCommand: \"npx glasstrace mcp add --agent codex\",\n },\n {\n name: \"gemini\",\n markers: [\".gemini\"],\n mcpConfigPath: (dir) => join(dir, \".gemini\", \"settings.json\"),\n infoFilePath: () => null,\n cliBinary: \"gemini\",\n registrationCommand: \"npx glasstrace mcp add --agent gemini\",\n },\n {\n name: \"cursor\",\n markers: [\".cursor\", \".cursorrules\"],\n mcpConfigPath: (dir) => join(dir, \".cursor\", \"mcp.json\"),\n infoFilePath: (dir) => join(dir, \".cursorrules\"),\n cliBinary: null,\n registrationCommand: \"npx glasstrace mcp add --agent cursor\",\n },\n {\n name: \"windsurf\",\n markers: [\".windsurfrules\", \".windsurf\"],\n mcpConfigPath: () =>\n join(homedir(), \".codeium\", \"windsurf\", \"mcp_config.json\"),\n infoFilePath: (dir) => join(dir, \".windsurfrules\"),\n cliBinary: null,\n registrationCommand: \"npx glasstrace mcp add --agent windsurf\",\n },\n];\n\n/**\n * Checks whether a path exists and is accessible, following symlinks.\n * Returns false on permission errors or missing paths.\n *\n * @param mode - The access mode to check (defaults to R_OK for marker detection).\n */\nasync function pathExists(\n path: string,\n mode: number = constants.R_OK,\n): Promise<boolean> {\n try {\n await access(path, mode);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Finds the git root directory by walking up from the given path.\n * Returns the starting directory if no `.git` is found.\n */\nasync function findGitRoot(startDir: string): Promise<string> {\n let current = resolve(startDir);\n\n while (true) {\n if (await pathExists(join(current, \".git\"), constants.F_OK)) {\n return current;\n }\n const parent = dirname(current);\n if (parent === current) {\n // Reached filesystem root without finding .git\n break;\n }\n current = parent;\n }\n\n return resolve(startDir);\n}\n\n/**\n * Returns true if a CLI binary is available on PATH.\n * Uses `which` on Unix and `where` on Windows, via execFile (no shell injection).\n */\nfunction isCliAvailable(binary: string): Promise<boolean> {\n return new Promise((resolve) => {\n const command = process.platform === \"win32\" ? \"where\" : \"which\";\n execFile(command, [binary], (error) => {\n resolve(error === null);\n });\n });\n}\n\n/**\n * Detects AI coding agents present in a project by scanning for marker\n * files and directories. Walks up from projectRoot to the git root to\n * support monorepo layouts.\n *\n * Always includes a \"generic\" fallback entry.\n *\n * @param projectRoot - Absolute or relative path to the project directory.\n * @returns Array of detected agents, with generic always last.\n * @throws If projectRoot does not exist or is not a directory.\n */\nexport async function detectAgents(\n projectRoot: string,\n): Promise<DetectedAgent[]> {\n const resolvedRoot = resolve(projectRoot);\n\n // Validate projectRoot exists and is a directory\n let rootStat;\n try {\n rootStat = await stat(resolvedRoot);\n } catch (err) {\n const code = (err as NodeJS.ErrnoException).code;\n throw new Error(\n `projectRoot does not exist: ${resolvedRoot}` +\n (code ? ` (${code})` : \"\"),\n );\n }\n\n if (!rootStat.isDirectory()) {\n throw new Error(`projectRoot is not a directory: ${resolvedRoot}`);\n }\n\n const gitRoot = await findGitRoot(resolvedRoot);\n\n // Collect unique directories to search: projectRoot and every ancestor up to gitRoot\n const searchDirs: string[] = [];\n let current = resolvedRoot;\n while (true) {\n searchDirs.push(current);\n if (current === gitRoot) {\n break;\n }\n const parent = dirname(current);\n if (parent === current) {\n break;\n }\n current = parent;\n }\n\n const detected: DetectedAgent[] = [];\n const seenAgents = new Set<AgentName>();\n\n for (const rule of AGENT_RULES) {\n let foundDir: string | null = null;\n\n // Check each search directory for markers\n for (const dir of searchDirs) {\n let markerFound = false;\n for (const marker of rule.markers) {\n if (await pathExists(join(dir, marker))) {\n markerFound = true;\n break;\n }\n }\n if (markerFound) {\n foundDir = dir;\n break;\n }\n }\n\n if (foundDir === null) {\n continue;\n }\n\n if (seenAgents.has(rule.name)) {\n continue;\n }\n seenAgents.add(rule.name);\n\n // Determine info file path — only include if the file actually exists\n let infoFilePath = rule.infoFilePath(foundDir);\n if (infoFilePath !== null && !(await pathExists(infoFilePath))) {\n infoFilePath = null;\n }\n\n const cliAvailable = rule.cliBinary\n ? await isCliAvailable(rule.cliBinary)\n : false;\n\n detected.push({\n name: rule.name,\n mcpConfigPath: rule.mcpConfigPath(foundDir),\n infoFilePath,\n cliAvailable,\n registrationCommand: rule.registrationCommand,\n });\n }\n\n // Always include generic fallback\n detected.push({\n name: \"generic\",\n mcpConfigPath: join(resolvedRoot, \".glasstrace\", \"mcp.json\"),\n infoFilePath: null,\n cliAvailable: false,\n registrationCommand: null,\n });\n\n return detected;\n}\n","import type { DetectedAgent } from \"./detect.js\";\n\n/**\n * Generates the MCP server configuration content for a given agent.\n *\n * The output is the full file content suitable for writing to the agent's\n * MCP config file. Auth tokens are intentionally included here because\n * MCP config files are local-only and required for server authentication.\n *\n * @param agent - The detected agent to generate config for.\n * @param endpoint - The Glasstrace MCP endpoint URL.\n * @param anonKey - The anonymous API key for authentication.\n * @returns The formatted configuration string.\n * @throws If endpoint or anonKey is empty.\n */\nexport function generateMcpConfig(\n agent: DetectedAgent,\n endpoint: string,\n anonKey: string,\n): string {\n if (!endpoint || endpoint.trim() === \"\") {\n throw new Error(\"endpoint must not be empty\");\n }\n if (!anonKey || anonKey.trim() === \"\") {\n throw new Error(\"anonKey must not be empty\");\n }\n\n switch (agent.name) {\n case \"claude\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n type: \"http\",\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"codex\": {\n // Escape TOML basic string special characters in the endpoint value.\n // TOML requires backslashes, quotes, and control characters to be escaped.\n const safeEndpoint = endpoint\n .replace(/\\\\/g, \"\\\\\\\\\")\n .replace(/\"/g, '\\\\\"')\n .replace(/\\n/g, \"\\\\n\")\n .replace(/\\r/g, \"\\\\r\")\n .replace(/\\t/g, \"\\\\t\");\n return [\n \"[mcp_servers.glasstrace]\",\n `url = \"${safeEndpoint}\"`,\n `bearer_token_env_var = \"GLASSTRACE_API_KEY\"`,\n \"\",\n ].join(\"\\n\");\n }\n\n case \"gemini\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n httpUrl: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"cursor\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"windsurf\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n serverUrl: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"generic\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n default: {\n const _exhaustive: never = agent.name;\n throw new Error(`Unknown agent: ${_exhaustive}`);\n }\n }\n}\n\n/**\n * Marker pair used to delimit the Glasstrace section in agent info files.\n */\ninterface MarkerPair {\n start: string;\n end: string;\n}\n\nfunction htmlMarkers(): MarkerPair {\n return {\n start: \"<!-- glasstrace:mcp:start -->\",\n end: \"<!-- glasstrace:mcp:end -->\",\n };\n}\n\nfunction hashMarkers(): MarkerPair {\n return {\n start: \"# glasstrace:mcp:start\",\n end: \"# glasstrace:mcp:end\",\n };\n}\n\n/**\n * Generates informational content for an agent's instruction file.\n *\n * This content is designed to be appended to or inserted into agent-specific\n * instruction files (CLAUDE.md, .cursorrules, codex.md). It contains ONLY\n * the endpoint URL, tool descriptions, and setup instructions. Auth tokens\n * are NEVER included in this output.\n *\n * @param agent - The detected agent to generate info for.\n * @param endpoint - The Glasstrace MCP endpoint URL.\n * @returns The formatted info section string, or empty string for agents without a supported info file format.\n * @throws If endpoint is empty.\n */\nexport function generateInfoSection(\n agent: DetectedAgent,\n endpoint: string,\n): string {\n if (!endpoint || endpoint.trim() === \"\") {\n throw new Error(\"endpoint must not be empty\");\n }\n\n const content = [\n \"\",\n \"## Glasstrace MCP Integration\",\n \"\",\n `Glasstrace is configured as an MCP server at: ${endpoint}`,\n \"\",\n \"Available tools:\",\n \"- `get_latest_error` - Get the most recent error trace from the current session\",\n \"- `get_trace` - Get a specific trace by ID or URL pattern\",\n \"- `get_root_cause` - Get the full span tree and root cause analysis for an error\",\n \"- `get_test_suggestions` - Get test suggestions based on recent errors\",\n \"- `get_session_timeline` - Get the timeline of all traces in the current session\",\n \"\",\n \"To reconfigure, run: `npx glasstrace mcp add`\",\n \"\",\n ].join(\"\\n\");\n\n switch (agent.name) {\n case \"claude\": {\n const m = htmlMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"codex\": {\n const m = htmlMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"cursor\": {\n const m = hashMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"gemini\":\n case \"windsurf\":\n case \"generic\":\n return \"\";\n\n default: {\n const _exhaustive: never = agent.name;\n throw new Error(`Unknown agent: ${_exhaustive}`);\n }\n }\n}\n","import { chmod, mkdir, readFile, writeFile } from \"node:fs/promises\";\nimport { dirname, isAbsolute, join } from \"node:path\";\nimport type { DetectedAgent } from \"./detect.js\";\n\n/** HTML comment markers used in markdown files (.md). */\nconst HTML_START = \"<!-- glasstrace:mcp:start -->\";\nconst HTML_END = \"<!-- glasstrace:mcp:end -->\";\n\n/** Hash-prefixed markers used in plain text files (.cursorrules). */\nconst HASH_START = \"# glasstrace:mcp:start\";\nconst HASH_END = \"# glasstrace:mcp:end\";\n\n/**\n * Determines whether an error is a filesystem permission or read-only error.\n * Covers EACCES (permission denied), EPERM (operation not permitted), and\n * EROFS (read-only filesystem) to handle containerized/mounted environments.\n */\nfunction isPermissionError(err: unknown): boolean {\n const code = (err as NodeJS.ErrnoException).code;\n return code === \"EACCES\" || code === \"EPERM\" || code === \"EROFS\";\n}\n\n/**\n * Writes MCP configuration content to an agent's config file path.\n *\n * Creates parent directories as needed and sets file permissions to 0o600\n * (owner read/write only) since config files may contain auth tokens.\n *\n * Fails gracefully: logs a warning to stderr on permission errors instead\n * of throwing.\n *\n * @param agent - The detected agent whose config path to write to.\n * @param content - The full configuration file content.\n * @param projectRoot - The project root (reserved for future use).\n */\nexport async function writeMcpConfig(\n agent: DetectedAgent,\n content: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n projectRoot: string,\n): Promise<void> {\n if (agent.mcpConfigPath === null) {\n return;\n }\n\n const configPath = agent.mcpConfigPath;\n const parentDir = dirname(configPath);\n\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot create directory ${parentDir}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n\n try {\n await writeFile(configPath, content, { mode: 0o600 });\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write config file ${configPath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n\n // Ensure permissions are set even if the file already existed\n // (writeFile mode only applies to newly created files on some platforms)\n try {\n await chmod(configPath, 0o600);\n } catch {\n // Best-effort; the writeFile mode should have handled this\n }\n}\n\n/**\n * Finds existing marker boundaries in file content.\n *\n * Searches for both HTML comment and hash-prefixed marker formats,\n * since an existing file might use either convention.\n *\n * @returns The start and end indices (line-level) and the matched markers,\n * or null if no complete marker pair is found.\n */\nfunction findMarkerBoundaries(\n lines: string[],\n): { startIdx: number; endIdx: number } | null {\n let startIdx = -1;\n let endIdx = -1;\n\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i].trim();\n if (trimmed === HTML_START || trimmed === HASH_START) {\n startIdx = i;\n } else if (trimmed === HTML_END || trimmed === HASH_END) {\n if (startIdx !== -1) {\n endIdx = i;\n break;\n }\n }\n }\n\n if (startIdx === -1 || endIdx === -1) {\n return null;\n }\n\n return { startIdx, endIdx };\n}\n\n/**\n * Injects an informational section into an agent's instruction file.\n *\n * Uses marker comments to enable idempotent updates:\n * - If the file contains marker pairs, replaces content between them.\n * - If the file exists but has no markers, appends the section.\n * - If the file does not exist, creates it with the section content.\n *\n * Fails gracefully: logs a warning to stderr on read-only files instead\n * of throwing.\n *\n * @param agent - The detected agent whose info file to update.\n * @param content - The section content (including markers).\n * @param projectRoot - The project root (reserved for future use).\n */\nexport async function injectInfoSection(\n agent: DetectedAgent,\n content: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n projectRoot: string,\n): Promise<void> {\n if (agent.infoFilePath === null) {\n return;\n }\n\n // Empty content means nothing to inject (e.g., agents without info sections)\n if (content === \"\") {\n return;\n }\n\n const filePath = agent.infoFilePath;\n\n let existingContent: string | null = null;\n try {\n existingContent = await readFile(filePath, \"utf-8\");\n } catch (err: unknown) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code !== \"ENOENT\") {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot read info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n }\n\n // File does not exist — create with section content\n if (existingContent === null) {\n try {\n await mkdir(dirname(filePath), { recursive: true });\n await writeFile(filePath, content, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n return;\n }\n\n // File exists — check for markers\n const lines = existingContent.split(\"\\n\");\n const boundaries = findMarkerBoundaries(lines);\n\n let newContent: string;\n if (boundaries !== null) {\n // Replace everything from start marker through end marker (inclusive)\n const before = lines.slice(0, boundaries.startIdx);\n const after = lines.slice(boundaries.endIdx + 1);\n // content already includes markers and trailing newline\n const contentWithoutTrailingNewline = content.endsWith(\"\\n\")\n ? content.slice(0, -1)\n : content;\n newContent = [...before, contentWithoutTrailingNewline, ...after].join(\"\\n\");\n } else {\n // No markers found — append with a blank line separator\n const separator = existingContent.endsWith(\"\\n\") ? \"\\n\" : \"\\n\\n\";\n newContent = existingContent + separator + content;\n }\n\n try {\n await writeFile(filePath, newContent, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n}\n\n/**\n * Ensures that the given paths are listed in the project's `.gitignore`.\n *\n * Only adds entries for paths that are not already present. Creates the\n * `.gitignore` file if it does not exist. Skips absolute paths (e.g.,\n * Windsurf's global config) since those are outside the project tree.\n *\n * Fails gracefully: logs a warning to stderr on permission errors.\n *\n * @param paths - Relative paths to ensure are gitignored.\n * @param projectRoot - The project root directory.\n */\nexport async function updateGitignore(\n paths: string[],\n projectRoot: string,\n): Promise<void> {\n const gitignorePath = join(projectRoot, \".gitignore\");\n\n // Filter out absolute paths — they reference locations outside the project\n // Uses isAbsolute() to handle both POSIX and Windows path formats\n const relativePaths = paths.filter((p) => !isAbsolute(p));\n\n if (relativePaths.length === 0) {\n return;\n }\n\n let existingContent = \"\";\n try {\n existingContent = await readFile(gitignorePath, \"utf-8\");\n } catch (err: unknown) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code !== \"ENOENT\") {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot read .gitignore: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n }\n\n // Parse existing entries, trimming whitespace for comparison\n const existingLines = existingContent\n .split(\"\\n\")\n .map((line) => line.trim())\n .filter((line) => line !== \"\");\n\n const existingSet = new Set(existingLines);\n\n // Normalize entries: trim whitespace, convert backslashes to forward slashes\n // (git ignore patterns use / as separator; backslash is an escape character),\n // drop empties, and deduplicate against existing entries.\n const toAdd = relativePaths\n .map((p) => p.trim().replace(/\\\\/g, \"/\"))\n .filter((p) => p !== \"\" && !existingSet.has(p));\n\n if (toAdd.length === 0) {\n return;\n }\n\n // Ensure file ends with newline before appending\n let updatedContent = existingContent;\n if (updatedContent.length > 0 && !updatedContent.endsWith(\"\\n\")) {\n updatedContent += \"\\n\";\n }\n\n updatedContent += toAdd.join(\"\\n\") + \"\\n\";\n\n try {\n await writeFile(gitignorePath, updatedContent, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write .gitignore: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n}\n"],"mappings":";;;;;AAAA;AAAA,SAAS,gBAAgB;AACzB,SAAS,QAAQ,YAAY;AAC7B,SAAS,SAAS,MAAM,eAAe;AACvC,SAAS,eAAe;AACxB,SAAS,iBAAiB;AA6B1B,IAAM,cAA2B;AAAA,EAC/B;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,WAAW,WAAW;AAAA,IAChC,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW;AAAA,IAC7C,cAAc,CAAC,QAAQ,KAAK,KAAK,WAAW;AAAA,IAC5C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,YAAY,QAAQ;AAAA,IAC9B,eAAe,CAAC,QAAQ,KAAK,KAAK,UAAU,aAAa;AAAA,IACzD,cAAc,CAAC,QAAQ,KAAK,KAAK,UAAU;AAAA,IAC3C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,SAAS;AAAA,IACnB,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW,eAAe;AAAA,IAC5D,cAAc,MAAM;AAAA,IACpB,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,WAAW,cAAc;AAAA,IACnC,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW,UAAU;AAAA,IACvD,cAAc,CAAC,QAAQ,KAAK,KAAK,cAAc;AAAA,IAC/C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,kBAAkB,WAAW;AAAA,IACvC,eAAe,MACb,KAAK,QAAQ,GAAG,YAAY,YAAY,iBAAiB;AAAA,IAC3D,cAAc,CAAC,QAAQ,KAAK,KAAK,gBAAgB;AAAA,IACjD,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AACF;AAQA,eAAe,WACb,MACA,OAAe,UAAU,MACP;AAClB,MAAI;AACF,UAAM,OAAO,MAAM,IAAI;AACvB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAe,YAAY,UAAmC;AAC5D,MAAI,UAAU,QAAQ,QAAQ;AAE9B,SAAO,MAAM;AACX,QAAI,MAAM,WAAW,KAAK,SAAS,MAAM,GAAG,UAAU,IAAI,GAAG;AAC3D,aAAO;AAAA,IACT;AACA,UAAM,SAAS,QAAQ,OAAO;AAC9B,QAAI,WAAW,SAAS;AAEtB;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,SAAO,QAAQ,QAAQ;AACzB;AAMA,SAAS,eAAe,QAAkC;AACxD,SAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,UAAM,UAAU,QAAQ,aAAa,UAAU,UAAU;AACzD,aAAS,SAAS,CAAC,MAAM,GAAG,CAAC,UAAU;AACrC,MAAAA,SAAQ,UAAU,IAAI;AAAA,IACxB,CAAC;AAAA,EACH,CAAC;AACH;AAaA,eAAsB,aACpB,aAC0B;AAC1B,QAAM,eAAe,QAAQ,WAAW;AAGxC,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,KAAK,YAAY;AAAA,EACpC,SAAS,KAAK;AACZ,UAAM,OAAQ,IAA8B;AAC5C,UAAM,IAAI;AAAA,MACR,+BAA+B,YAAY,MACxC,OAAO,KAAK,IAAI,MAAM;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,YAAY,GAAG;AAC3B,UAAM,IAAI,MAAM,mCAAmC,YAAY,EAAE;AAAA,EACnE;AAEA,QAAM,UAAU,MAAM,YAAY,YAAY;AAG9C,QAAM,aAAuB,CAAC;AAC9B,MAAI,UAAU;AACd,SAAO,MAAM;AACX,eAAW,KAAK,OAAO;AACvB,QAAI,YAAY,SAAS;AACvB;AAAA,IACF;AACA,UAAM,SAAS,QAAQ,OAAO;AAC9B,QAAI,WAAW,SAAS;AACtB;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,QAAM,WAA4B,CAAC;AACnC,QAAM,aAAa,oBAAI,IAAe;AAEtC,aAAW,QAAQ,aAAa;AAC9B,QAAI,WAA0B;AAG9B,eAAW,OAAO,YAAY;AAC5B,UAAI,cAAc;AAClB,iBAAW,UAAU,KAAK,SAAS;AACjC,YAAI,MAAM,WAAW,KAAK,KAAK,MAAM,CAAC,GAAG;AACvC,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AACA,UAAI,aAAa;AACf,mBAAW;AACX;AAAA,MACF;AAAA,IACF;AAEA,QAAI,aAAa,MAAM;AACrB;AAAA,IACF;AAEA,QAAI,WAAW,IAAI,KAAK,IAAI,GAAG;AAC7B;AAAA,IACF;AACA,eAAW,IAAI,KAAK,IAAI;AAGxB,QAAI,eAAe,KAAK,aAAa,QAAQ;AAC7C,QAAI,iBAAiB,QAAQ,CAAE,MAAM,WAAW,YAAY,GAAI;AAC9D,qBAAe;AAAA,IACjB;AAEA,UAAM,eAAe,KAAK,YACtB,MAAM,eAAe,KAAK,SAAS,IACnC;AAEJ,aAAS,KAAK;AAAA,MACZ,MAAM,KAAK;AAAA,MACX,eAAe,KAAK,cAAc,QAAQ;AAAA,MAC1C;AAAA,MACA;AAAA,MACA,qBAAqB,KAAK;AAAA,IAC5B,CAAC;AAAA,EACH;AAGA,WAAS,KAAK;AAAA,IACZ,MAAM;AAAA,IACN,eAAe,KAAK,cAAc,eAAe,UAAU;AAAA,IAC3D,cAAc;AAAA,IACd,cAAc;AAAA,IACd,qBAAqB;AAAA,EACvB,CAAC;AAED,SAAO;AACT;;;AC9OA;AAeO,SAAS,kBACd,OACA,UACA,SACQ;AACR,MAAI,CAAC,YAAY,SAAS,KAAK,MAAM,IAAI;AACvC,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC9C;AACA,MAAI,CAAC,WAAW,QAAQ,KAAK,MAAM,IAAI;AACrC,UAAM,IAAI,MAAM,2BAA2B;AAAA,EAC7C;AAEA,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,MAAM;AAAA,cACN,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK,SAAS;AAGZ,YAAM,eAAe,SAClB,QAAQ,OAAO,MAAM,EACrB,QAAQ,MAAM,KAAK,EACnB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK;AACvB,aAAO;AAAA,QACL;AAAA,QACA,UAAU,YAAY;AAAA,QACtB;AAAA,QACA;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb;AAAA,IAEA,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,SAAS;AAAA,cACT,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,WAAW;AAAA,cACX,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,SAAS;AACP,YAAM,cAAqB,MAAM;AACjC,YAAM,IAAI,MAAM,kBAAkB,WAAW,EAAE;AAAA,IACjD;AAAA,EACF;AACF;AAUA,SAAS,cAA0B;AACjC,SAAO;AAAA,IACL,OAAO;AAAA,IACP,KAAK;AAAA,EACP;AACF;AAEA,SAAS,cAA0B;AACjC,SAAO;AAAA,IACL,OAAO;AAAA,IACP,KAAK;AAAA,EACP;AACF;AAeO,SAAS,oBACd,OACA,UACQ;AACR,MAAI,CAAC,YAAY,SAAS,KAAK,MAAM,IAAI;AACvC,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC9C;AAEA,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA,iDAAiD,QAAQ;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,KAAK,IAAI;AAEX,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK,UAAU;AACb,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK,SAAS;AACZ,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK,UAAU;AACb,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,SAAS;AACP,YAAM,cAAqB,MAAM;AACjC,YAAM,IAAI,MAAM,kBAAkB,WAAW,EAAE;AAAA,IACjD;AAAA,EACF;AACF;;;AC3NA;AAAA,SAAS,OAAO,OAAO,UAAU,iBAAiB;AAClD,SAAS,WAAAC,UAAS,YAAY,QAAAC,aAAY;AAI1C,IAAM,aAAa;AACnB,IAAM,WAAW;AAGjB,IAAM,aAAa;AACnB,IAAM,WAAW;AAOjB,SAAS,kBAAkB,KAAuB;AAChD,QAAM,OAAQ,IAA8B;AAC5C,SAAO,SAAS,YAAY,SAAS,WAAW,SAAS;AAC3D;AAeA,eAAsB,eACpB,OACA,SAEA,aACe;AACf,MAAI,MAAM,kBAAkB,MAAM;AAChC;AAAA,EACF;AAEA,QAAM,aAAa,MAAM;AACzB,QAAM,YAAYD,SAAQ,UAAU;AAEpC,MAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC5C,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,oCAAoC,SAAS;AAAA;AAAA,MAC/C;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,MAAI;AACF,UAAM,UAAU,YAAY,SAAS,EAAE,MAAM,IAAM,CAAC;AAAA,EACtD,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,qCAAqC,UAAU;AAAA;AAAA,MACjD;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAIA,MAAI;AACF,UAAM,MAAM,YAAY,GAAK;AAAA,EAC/B,QAAQ;AAAA,EAER;AACF;AAWA,SAAS,qBACP,OAC6C;AAC7C,MAAI,WAAW;AACf,MAAI,SAAS;AAEb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,UAAU,MAAM,CAAC,EAAE,KAAK;AAC9B,QAAI,YAAY,cAAc,YAAY,YAAY;AACpD,iBAAW;AAAA,IACb,WAAW,YAAY,YAAY,YAAY,UAAU;AACvD,UAAI,aAAa,IAAI;AACnB,iBAAS;AACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,aAAa,MAAM,WAAW,IAAI;AACpC,WAAO;AAAA,EACT;AAEA,SAAO,EAAE,UAAU,OAAO;AAC5B;AAiBA,eAAsB,kBACpB,OACA,SAEA,aACe;AACf,MAAI,MAAM,iBAAiB,MAAM;AAC/B;AAAA,EACF;AAGA,MAAI,YAAY,IAAI;AAClB;AAAA,EACF;AAEA,QAAM,WAAW,MAAM;AAEvB,MAAI,kBAAiC;AACrC,MAAI;AACF,sBAAkB,MAAM,SAAS,UAAU,OAAO;AAAA,EACpD,SAAS,KAAc;AACrB,UAAM,OAAQ,IAA8B;AAC5C,QAAI,SAAS,UAAU;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,kCAAkC,QAAQ;AAAA;AAAA,QAC5C;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,oBAAoB,MAAM;AAC5B,QAAI;AACF,YAAM,MAAMA,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,YAAM,UAAU,UAAU,SAAS,OAAO;AAAA,IAC5C,SAAS,KAAc;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,mCAAmC,QAAQ;AAAA;AAAA,QAC7C;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA;AAAA,EACF;AAGA,QAAM,QAAQ,gBAAgB,MAAM,IAAI;AACxC,QAAM,aAAa,qBAAqB,KAAK;AAE7C,MAAI;AACJ,MAAI,eAAe,MAAM;AAEvB,UAAM,SAAS,MAAM,MAAM,GAAG,WAAW,QAAQ;AACjD,UAAM,QAAQ,MAAM,MAAM,WAAW,SAAS,CAAC;AAE/C,UAAM,gCAAgC,QAAQ,SAAS,IAAI,IACvD,QAAQ,MAAM,GAAG,EAAE,IACnB;AACJ,iBAAa,CAAC,GAAG,QAAQ,+BAA+B,GAAG,KAAK,EAAE,KAAK,IAAI;AAAA,EAC7E,OAAO;AAEL,UAAM,YAAY,gBAAgB,SAAS,IAAI,IAAI,OAAO;AAC1D,iBAAa,kBAAkB,YAAY;AAAA,EAC7C;AAEA,MAAI;AACF,UAAM,UAAU,UAAU,YAAY,OAAO;AAAA,EAC/C,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,mCAAmC,QAAQ;AAAA;AAAA,MAC7C;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAcA,eAAsB,gBACpB,OACA,aACe;AACf,QAAM,gBAAgBC,MAAK,aAAa,YAAY;AAIpD,QAAM,gBAAgB,MAAM,OAAO,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;AAExD,MAAI,cAAc,WAAW,GAAG;AAC9B;AAAA,EACF;AAEA,MAAI,kBAAkB;AACtB,MAAI;AACF,sBAAkB,MAAM,SAAS,eAAe,OAAO;AAAA,EACzD,SAAS,KAAc;AACrB,UAAM,OAAQ,IAA8B;AAC5C,QAAI,SAAS,UAAU;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb;AAAA;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAGA,QAAM,gBAAgB,gBACnB,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,CAAC,SAAS,SAAS,EAAE;AAE/B,QAAM,cAAc,IAAI,IAAI,aAAa;AAKzC,QAAM,QAAQ,cACX,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,OAAO,GAAG,CAAC,EACvC,OAAO,CAAC,MAAM,MAAM,MAAM,CAAC,YAAY,IAAI,CAAC,CAAC;AAEhD,MAAI,MAAM,WAAW,GAAG;AACtB;AAAA,EACF;AAGA,MAAI,iBAAiB;AACrB,MAAI,eAAe,SAAS,KAAK,CAAC,eAAe,SAAS,IAAI,GAAG;AAC/D,sBAAkB;AAAA,EACpB;AAEA,oBAAkB,MAAM,KAAK,IAAI,IAAI;AAErC,MAAI;AACF,UAAM,UAAU,eAAe,gBAAgB,OAAO;AAAA,EACxD,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb;AAAA;AAAA,MACF;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;","names":["resolve","dirname","join"]}
1
+ {"version":3,"sources":["../src/agent-detection/detect.ts","../src/agent-detection/configs.ts","../src/agent-detection/inject.ts"],"sourcesContent":["import { execFile } from \"node:child_process\";\nimport { access, stat } from \"node:fs/promises\";\nimport { dirname, join, resolve } from \"node:path\";\nimport { homedir } from \"node:os\";\nimport { constants } from \"node:fs\";\n\n/**\n * Describes an AI coding agent detected in a project.\n */\nexport interface DetectedAgent {\n name: \"claude\" | \"codex\" | \"gemini\" | \"cursor\" | \"windsurf\" | \"generic\";\n mcpConfigPath: string | null;\n infoFilePath: string | null;\n cliAvailable: boolean;\n registrationCommand: string | null;\n}\n\ntype AgentName = DetectedAgent[\"name\"];\n\ninterface AgentRule {\n name: AgentName;\n /** Paths relative to a search directory that indicate this agent is present. */\n markers: string[];\n /** Function to compute the MCP config path given the directory where markers were found. */\n mcpConfigPath: (markerDir: string) => string;\n /** Function to compute the info file path, or null. */\n infoFilePath: (markerDir: string) => string | null;\n /** CLI binary name to check in PATH, or null if no CLI exists. */\n cliBinary: string | null;\n /** Registration command template, or null. */\n registrationCommand: string | null;\n}\n\nconst AGENT_RULES: AgentRule[] = [\n {\n name: \"claude\",\n markers: [\".claude\", \"CLAUDE.md\"],\n mcpConfigPath: (dir) => join(dir, \".mcp.json\"),\n infoFilePath: (dir) => join(dir, \"CLAUDE.md\"),\n cliBinary: \"claude\",\n registrationCommand: \"npx glasstrace mcp add --agent claude\",\n },\n {\n name: \"codex\",\n markers: [\"codex.md\", \".codex\"],\n mcpConfigPath: (dir) => join(dir, \".codex\", \"config.toml\"),\n infoFilePath: (dir) => join(dir, \"codex.md\"),\n cliBinary: \"codex\",\n registrationCommand: \"npx glasstrace mcp add --agent codex\",\n },\n {\n name: \"gemini\",\n markers: [\".gemini\"],\n mcpConfigPath: (dir) => join(dir, \".gemini\", \"settings.json\"),\n infoFilePath: () => null,\n cliBinary: \"gemini\",\n registrationCommand: \"npx glasstrace mcp add --agent gemini\",\n },\n {\n name: \"cursor\",\n markers: [\".cursor\", \".cursorrules\"],\n mcpConfigPath: (dir) => join(dir, \".cursor\", \"mcp.json\"),\n infoFilePath: (dir) => join(dir, \".cursorrules\"),\n cliBinary: null,\n registrationCommand: \"npx glasstrace mcp add --agent cursor\",\n },\n {\n name: \"windsurf\",\n markers: [\".windsurfrules\", \".windsurf\"],\n mcpConfigPath: () =>\n join(homedir(), \".codeium\", \"windsurf\", \"mcp_config.json\"),\n infoFilePath: (dir) => join(dir, \".windsurfrules\"),\n cliBinary: null,\n registrationCommand: \"npx glasstrace mcp add --agent windsurf\",\n },\n];\n\n/**\n * Checks whether a path exists and is accessible, following symlinks.\n * Returns false on permission errors or missing paths.\n *\n * @param mode - The access mode to check (defaults to R_OK for marker detection).\n */\nasync function pathExists(\n path: string,\n mode: number = constants.R_OK,\n): Promise<boolean> {\n try {\n await access(path, mode);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Finds the git root directory by walking up from the given path.\n * Returns the starting directory if no `.git` is found.\n */\nasync function findGitRoot(startDir: string): Promise<string> {\n let current = resolve(startDir);\n\n while (true) {\n if (await pathExists(join(current, \".git\"), constants.F_OK)) {\n return current;\n }\n const parent = dirname(current);\n if (parent === current) {\n // Reached filesystem root without finding .git\n break;\n }\n current = parent;\n }\n\n return resolve(startDir);\n}\n\n/**\n * Returns true if a CLI binary is available on PATH.\n * Uses `which` on Unix and `where` on Windows, via execFile (no shell injection).\n */\nfunction isCliAvailable(binary: string): Promise<boolean> {\n return new Promise((resolve) => {\n const command = process.platform === \"win32\" ? \"where\" : \"which\";\n execFile(command, [binary], (error) => {\n resolve(error === null);\n });\n });\n}\n\n/**\n * Detects AI coding agents present in a project by scanning for marker\n * files and directories. Walks up from projectRoot to the git root to\n * support monorepo layouts.\n *\n * Always includes a \"generic\" fallback entry.\n *\n * @param projectRoot - Absolute or relative path to the project directory.\n * @returns Array of detected agents, with generic always last.\n * @throws If projectRoot does not exist or is not a directory.\n */\nexport async function detectAgents(\n projectRoot: string,\n): Promise<DetectedAgent[]> {\n const resolvedRoot = resolve(projectRoot);\n\n // Validate projectRoot exists and is a directory\n let rootStat;\n try {\n rootStat = await stat(resolvedRoot);\n } catch (err) {\n const code = (err as NodeJS.ErrnoException).code;\n throw new Error(\n `projectRoot does not exist: ${resolvedRoot}` +\n (code ? ` (${code})` : \"\"),\n );\n }\n\n if (!rootStat.isDirectory()) {\n throw new Error(`projectRoot is not a directory: ${resolvedRoot}`);\n }\n\n const gitRoot = await findGitRoot(resolvedRoot);\n\n // Collect unique directories to search: projectRoot and every ancestor up to gitRoot\n const searchDirs: string[] = [];\n let current = resolvedRoot;\n while (true) {\n searchDirs.push(current);\n if (current === gitRoot) {\n break;\n }\n const parent = dirname(current);\n if (parent === current) {\n break;\n }\n current = parent;\n }\n\n const detected: DetectedAgent[] = [];\n const seenAgents = new Set<AgentName>();\n\n for (const rule of AGENT_RULES) {\n let foundDir: string | null = null;\n\n // Check each search directory for markers\n for (const dir of searchDirs) {\n let markerFound = false;\n for (const marker of rule.markers) {\n if (await pathExists(join(dir, marker))) {\n markerFound = true;\n break;\n }\n }\n if (markerFound) {\n foundDir = dir;\n break;\n }\n }\n\n if (foundDir === null) {\n continue;\n }\n\n if (seenAgents.has(rule.name)) {\n continue;\n }\n seenAgents.add(rule.name);\n\n // Determine info file path — only include if the file actually exists\n let infoFilePath = rule.infoFilePath(foundDir);\n if (infoFilePath !== null && !(await pathExists(infoFilePath))) {\n infoFilePath = null;\n }\n\n const cliAvailable = rule.cliBinary\n ? await isCliAvailable(rule.cliBinary)\n : false;\n\n detected.push({\n name: rule.name,\n mcpConfigPath: rule.mcpConfigPath(foundDir),\n infoFilePath,\n cliAvailable,\n registrationCommand: rule.registrationCommand,\n });\n }\n\n // Always include generic fallback\n detected.push({\n name: \"generic\",\n mcpConfigPath: join(resolvedRoot, \".glasstrace\", \"mcp.json\"),\n infoFilePath: null,\n cliAvailable: false,\n registrationCommand: null,\n });\n\n return detected;\n}\n","import type { DetectedAgent } from \"./detect.js\";\n\n/**\n * Generates the MCP server configuration content for a given agent.\n *\n * The output is the full file content suitable for writing to the agent's\n * MCP config file. Auth tokens are intentionally included here because\n * MCP config files are local-only and required for server authentication.\n *\n * @param agent - The detected agent to generate config for.\n * @param endpoint - The Glasstrace MCP endpoint URL.\n * @param anonKey - The anonymous API key for authentication.\n * @returns The formatted configuration string.\n * @throws If endpoint or anonKey is empty.\n */\nexport function generateMcpConfig(\n agent: DetectedAgent,\n endpoint: string,\n anonKey: string,\n): string {\n if (!endpoint || endpoint.trim() === \"\") {\n throw new Error(\"endpoint must not be empty\");\n }\n if (!anonKey || anonKey.trim() === \"\") {\n throw new Error(\"anonKey must not be empty\");\n }\n\n switch (agent.name) {\n case \"claude\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n type: \"http\",\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"codex\": {\n // Escape TOML basic string special characters in the endpoint value.\n // TOML requires backslashes, quotes, and control characters to be escaped.\n const safeEndpoint = endpoint\n .replace(/\\\\/g, \"\\\\\\\\\")\n .replace(/\"/g, '\\\\\"')\n .replace(/\\n/g, \"\\\\n\")\n .replace(/\\r/g, \"\\\\r\")\n .replace(/\\t/g, \"\\\\t\");\n return [\n \"[mcp_servers.glasstrace]\",\n `url = \"${safeEndpoint}\"`,\n `bearer_token_env_var = \"GLASSTRACE_API_KEY\"`,\n \"\",\n ].join(\"\\n\");\n }\n\n case \"gemini\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n httpUrl: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"cursor\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"windsurf\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n serverUrl: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n case \"generic\":\n return JSON.stringify(\n {\n mcpServers: {\n glasstrace: {\n url: endpoint,\n headers: {\n Authorization: `Bearer ${anonKey}`,\n },\n },\n },\n },\n null,\n 2,\n );\n\n default: {\n const _exhaustive: never = agent.name;\n throw new Error(`Unknown agent: ${_exhaustive}`);\n }\n }\n}\n\n/**\n * Marker pair used to delimit the Glasstrace section in agent info files.\n */\ninterface MarkerPair {\n start: string;\n end: string;\n}\n\nfunction htmlMarkers(): MarkerPair {\n return {\n start: \"<!-- glasstrace:mcp:start -->\",\n end: \"<!-- glasstrace:mcp:end -->\",\n };\n}\n\nfunction hashMarkers(): MarkerPair {\n return {\n start: \"# glasstrace:mcp:start\",\n end: \"# glasstrace:mcp:end\",\n };\n}\n\n/**\n * Generates informational content for an agent's instruction file.\n *\n * This content is designed to be appended to or inserted into agent-specific\n * instruction files (CLAUDE.md, .cursorrules, codex.md). It contains ONLY\n * the endpoint URL, tool descriptions, and setup instructions. Auth tokens\n * are NEVER included in this output.\n *\n * @param agent - The detected agent to generate info for.\n * @param endpoint - The Glasstrace MCP endpoint URL.\n * @returns The formatted info section string, or empty string for agents without a supported info file format.\n * @throws If endpoint is empty.\n */\nexport function generateInfoSection(\n agent: DetectedAgent,\n endpoint: string,\n): string {\n if (!endpoint || endpoint.trim() === \"\") {\n throw new Error(\"endpoint must not be empty\");\n }\n\n const content = [\n \"\",\n \"## Glasstrace MCP Integration\",\n \"\",\n `Glasstrace is configured as an MCP server at: ${endpoint}`,\n \"\",\n \"Available tools:\",\n \"- `get_latest_error` - Get the most recent error trace from the current session\",\n \"- `get_trace` - Get a specific trace by ID or URL pattern\",\n \"- `get_root_cause` - Get the full span tree and root cause analysis for an error\",\n \"- `get_test_suggestions` - Get test suggestions based on recent errors\",\n \"- `get_session_timeline` - Get the timeline of all traces in the current session\",\n \"\",\n \"To reconfigure, run: `npx glasstrace mcp add`\",\n \"\",\n ].join(\"\\n\");\n\n switch (agent.name) {\n case \"claude\": {\n const m = htmlMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"codex\": {\n const m = htmlMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"cursor\": {\n const m = hashMarkers();\n return `${m.start}\\n${content}${m.end}\\n`;\n }\n\n case \"gemini\":\n case \"windsurf\":\n case \"generic\":\n return \"\";\n\n default: {\n const _exhaustive: never = agent.name;\n throw new Error(`Unknown agent: ${_exhaustive}`);\n }\n }\n}\n","import { chmod, mkdir, readFile, writeFile } from \"node:fs/promises\";\nimport { dirname, isAbsolute, join } from \"node:path\";\nimport type { DetectedAgent } from \"./detect.js\";\n\n/** HTML comment markers used in markdown files (.md). */\nconst HTML_START = \"<!-- glasstrace:mcp:start -->\";\nconst HTML_END = \"<!-- glasstrace:mcp:end -->\";\n\n/** Hash-prefixed markers used in plain text files (.cursorrules). */\nconst HASH_START = \"# glasstrace:mcp:start\";\nconst HASH_END = \"# glasstrace:mcp:end\";\n\n/**\n * Determines whether an error is a filesystem permission or read-only error.\n * Covers EACCES (permission denied), EPERM (operation not permitted), and\n * EROFS (read-only filesystem) to handle containerized/mounted environments.\n */\nfunction isPermissionError(err: unknown): boolean {\n const code = (err as NodeJS.ErrnoException).code;\n return code === \"EACCES\" || code === \"EPERM\" || code === \"EROFS\";\n}\n\n/**\n * Writes MCP configuration content to an agent's config file path.\n *\n * Creates parent directories as needed and sets file permissions to 0o600\n * (owner read/write only) since config files may contain auth tokens.\n *\n * Fails gracefully: logs a warning to stderr on permission errors instead\n * of throwing.\n *\n * @param agent - The detected agent whose config path to write to.\n * @param content - The full configuration file content.\n * @param projectRoot - The project root (reserved for future use).\n */\nexport async function writeMcpConfig(\n agent: DetectedAgent,\n content: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n projectRoot: string,\n): Promise<void> {\n if (agent.mcpConfigPath === null) {\n return;\n }\n\n const configPath = agent.mcpConfigPath;\n const parentDir = dirname(configPath);\n\n try {\n await mkdir(parentDir, { recursive: true });\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot create directory ${parentDir}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n\n try {\n await writeFile(configPath, content, { mode: 0o600 });\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write config file ${configPath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n\n // Ensure permissions are set even if the file already existed\n // (writeFile mode only applies to newly created files on some platforms)\n try {\n await chmod(configPath, 0o600);\n } catch {\n // Best-effort; the writeFile mode should have handled this\n }\n}\n\n/**\n * Finds existing marker boundaries in file content.\n *\n * Searches for both HTML comment and hash-prefixed marker formats,\n * since an existing file might use either convention.\n *\n * @returns The start and end indices (line-level) and the matched markers,\n * or null if no complete marker pair is found.\n */\nfunction findMarkerBoundaries(\n lines: string[],\n): { startIdx: number; endIdx: number } | null {\n let startIdx = -1;\n let endIdx = -1;\n\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i].trim();\n if (trimmed === HTML_START || trimmed === HASH_START) {\n startIdx = i;\n } else if (trimmed === HTML_END || trimmed === HASH_END) {\n if (startIdx !== -1) {\n endIdx = i;\n break;\n }\n }\n }\n\n if (startIdx === -1 || endIdx === -1) {\n return null;\n }\n\n return { startIdx, endIdx };\n}\n\n/**\n * Injects an informational section into an agent's instruction file.\n *\n * Uses marker comments to enable idempotent updates:\n * - If the file contains marker pairs, replaces content between them.\n * - If the file exists but has no markers, appends the section.\n * - If the file does not exist, creates it with the section content.\n *\n * Fails gracefully: logs a warning to stderr on read-only files instead\n * of throwing.\n *\n * @param agent - The detected agent whose info file to update.\n * @param content - The section content (including markers).\n * @param projectRoot - The project root (reserved for future use).\n */\nexport async function injectInfoSection(\n agent: DetectedAgent,\n content: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n projectRoot: string,\n): Promise<void> {\n if (agent.infoFilePath === null) {\n return;\n }\n\n // Empty content means nothing to inject (e.g., agents without info sections)\n if (content === \"\") {\n return;\n }\n\n const filePath = agent.infoFilePath;\n\n let existingContent: string | null = null;\n try {\n existingContent = await readFile(filePath, \"utf-8\");\n } catch (err: unknown) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code !== \"ENOENT\") {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot read info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n }\n\n // File does not exist — create with section content\n if (existingContent === null) {\n try {\n await mkdir(dirname(filePath), { recursive: true });\n await writeFile(filePath, content, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n return;\n }\n\n // File exists — check for markers\n const lines = existingContent.split(\"\\n\");\n const boundaries = findMarkerBoundaries(lines);\n\n let newContent: string;\n if (boundaries !== null) {\n // Replace everything from start marker through end marker (inclusive)\n const before = lines.slice(0, boundaries.startIdx);\n const after = lines.slice(boundaries.endIdx + 1);\n // content already includes markers and trailing newline\n const contentWithoutTrailingNewline = content.endsWith(\"\\n\")\n ? content.slice(0, -1)\n : content;\n newContent = [...before, contentWithoutTrailingNewline, ...after].join(\"\\n\");\n } else {\n // No markers found — append with a blank line separator\n const separator = existingContent.endsWith(\"\\n\") ? \"\\n\" : \"\\n\\n\";\n newContent = existingContent + separator + content;\n }\n\n try {\n await writeFile(filePath, newContent, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write info file ${filePath}: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n}\n\n/**\n * Ensures that the given paths are listed in the project's `.gitignore`.\n *\n * Only adds entries for paths that are not already present. Creates the\n * `.gitignore` file if it does not exist. Skips absolute paths (e.g.,\n * Windsurf's global config) since those are outside the project tree.\n *\n * Fails gracefully: logs a warning to stderr on permission errors.\n *\n * @param paths - Relative paths to ensure are gitignored.\n * @param projectRoot - The project root directory.\n */\nexport async function updateGitignore(\n paths: string[],\n projectRoot: string,\n): Promise<void> {\n const gitignorePath = join(projectRoot, \".gitignore\");\n\n // Filter out absolute paths — they reference locations outside the project\n // Uses isAbsolute() to handle both POSIX and Windows path formats\n const relativePaths = paths.filter((p) => !isAbsolute(p));\n\n if (relativePaths.length === 0) {\n return;\n }\n\n let existingContent = \"\";\n try {\n existingContent = await readFile(gitignorePath, \"utf-8\");\n } catch (err: unknown) {\n const code = (err as NodeJS.ErrnoException).code;\n if (code !== \"ENOENT\") {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot read .gitignore: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n }\n\n // Parse existing entries, trimming whitespace for comparison\n const existingLines = existingContent\n .split(\"\\n\")\n .map((line) => line.trim())\n .filter((line) => line !== \"\");\n\n const existingSet = new Set(existingLines);\n\n // Normalize entries: trim whitespace, convert backslashes to forward slashes\n // (git ignore patterns use / as separator; backslash is an escape character),\n // drop empties, and deduplicate against existing entries.\n const toAdd = relativePaths\n .map((p) => p.trim().replace(/\\\\/g, \"/\"))\n .filter((p) => p !== \"\" && !existingSet.has(p));\n\n if (toAdd.length === 0) {\n return;\n }\n\n // Ensure file ends with newline before appending\n let updatedContent = existingContent;\n if (updatedContent.length > 0 && !updatedContent.endsWith(\"\\n\")) {\n updatedContent += \"\\n\";\n }\n\n updatedContent += toAdd.join(\"\\n\") + \"\\n\";\n\n try {\n await writeFile(gitignorePath, updatedContent, \"utf-8\");\n } catch (err: unknown) {\n if (isPermissionError(err)) {\n process.stderr.write(\n `Warning: cannot write .gitignore: permission denied\\n`,\n );\n return;\n }\n throw err;\n }\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,QAAQ,YAAY;AAC7B,SAAS,SAAS,MAAM,eAAe;AACvC,SAAS,eAAe;AACxB,SAAS,iBAAiB;AA6B1B,IAAM,cAA2B;AAAA,EAC/B;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,WAAW,WAAW;AAAA,IAChC,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW;AAAA,IAC7C,cAAc,CAAC,QAAQ,KAAK,KAAK,WAAW;AAAA,IAC5C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,YAAY,QAAQ;AAAA,IAC9B,eAAe,CAAC,QAAQ,KAAK,KAAK,UAAU,aAAa;AAAA,IACzD,cAAc,CAAC,QAAQ,KAAK,KAAK,UAAU;AAAA,IAC3C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,SAAS;AAAA,IACnB,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW,eAAe;AAAA,IAC5D,cAAc,MAAM;AAAA,IACpB,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,WAAW,cAAc;AAAA,IACnC,eAAe,CAAC,QAAQ,KAAK,KAAK,WAAW,UAAU;AAAA,IACvD,cAAc,CAAC,QAAQ,KAAK,KAAK,cAAc;AAAA,IAC/C,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,kBAAkB,WAAW;AAAA,IACvC,eAAe,MACb,KAAK,QAAQ,GAAG,YAAY,YAAY,iBAAiB;AAAA,IAC3D,cAAc,CAAC,QAAQ,KAAK,KAAK,gBAAgB;AAAA,IACjD,WAAW;AAAA,IACX,qBAAqB;AAAA,EACvB;AACF;AAQA,eAAe,WACb,MACA,OAAe,UAAU,MACP;AAClB,MAAI;AACF,UAAM,OAAO,MAAM,IAAI;AACvB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAe,YAAY,UAAmC;AAC5D,MAAI,UAAU,QAAQ,QAAQ;AAE9B,SAAO,MAAM;AACX,QAAI,MAAM,WAAW,KAAK,SAAS,MAAM,GAAG,UAAU,IAAI,GAAG;AAC3D,aAAO;AAAA,IACT;AACA,UAAM,SAAS,QAAQ,OAAO;AAC9B,QAAI,WAAW,SAAS;AAEtB;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,SAAO,QAAQ,QAAQ;AACzB;AAMA,SAAS,eAAe,QAAkC;AACxD,SAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,UAAM,UAAU,QAAQ,aAAa,UAAU,UAAU;AACzD,aAAS,SAAS,CAAC,MAAM,GAAG,CAAC,UAAU;AACrC,MAAAA,SAAQ,UAAU,IAAI;AAAA,IACxB,CAAC;AAAA,EACH,CAAC;AACH;AAaA,eAAsB,aACpB,aAC0B;AAC1B,QAAM,eAAe,QAAQ,WAAW;AAGxC,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,KAAK,YAAY;AAAA,EACpC,SAAS,KAAK;AACZ,UAAM,OAAQ,IAA8B;AAC5C,UAAM,IAAI;AAAA,MACR,+BAA+B,YAAY,MACxC,OAAO,KAAK,IAAI,MAAM;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,YAAY,GAAG;AAC3B,UAAM,IAAI,MAAM,mCAAmC,YAAY,EAAE;AAAA,EACnE;AAEA,QAAM,UAAU,MAAM,YAAY,YAAY;AAG9C,QAAM,aAAuB,CAAC;AAC9B,MAAI,UAAU;AACd,SAAO,MAAM;AACX,eAAW,KAAK,OAAO;AACvB,QAAI,YAAY,SAAS;AACvB;AAAA,IACF;AACA,UAAM,SAAS,QAAQ,OAAO;AAC9B,QAAI,WAAW,SAAS;AACtB;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,QAAM,WAA4B,CAAC;AACnC,QAAM,aAAa,oBAAI,IAAe;AAEtC,aAAW,QAAQ,aAAa;AAC9B,QAAI,WAA0B;AAG9B,eAAW,OAAO,YAAY;AAC5B,UAAI,cAAc;AAClB,iBAAW,UAAU,KAAK,SAAS;AACjC,YAAI,MAAM,WAAW,KAAK,KAAK,MAAM,CAAC,GAAG;AACvC,wBAAc;AACd;AAAA,QACF;AAAA,MACF;AACA,UAAI,aAAa;AACf,mBAAW;AACX;AAAA,MACF;AAAA,IACF;AAEA,QAAI,aAAa,MAAM;AACrB;AAAA,IACF;AAEA,QAAI,WAAW,IAAI,KAAK,IAAI,GAAG;AAC7B;AAAA,IACF;AACA,eAAW,IAAI,KAAK,IAAI;AAGxB,QAAI,eAAe,KAAK,aAAa,QAAQ;AAC7C,QAAI,iBAAiB,QAAQ,CAAE,MAAM,WAAW,YAAY,GAAI;AAC9D,qBAAe;AAAA,IACjB;AAEA,UAAM,eAAe,KAAK,YACtB,MAAM,eAAe,KAAK,SAAS,IACnC;AAEJ,aAAS,KAAK;AAAA,MACZ,MAAM,KAAK;AAAA,MACX,eAAe,KAAK,cAAc,QAAQ;AAAA,MAC1C;AAAA,MACA;AAAA,MACA,qBAAqB,KAAK;AAAA,IAC5B,CAAC;AAAA,EACH;AAGA,WAAS,KAAK;AAAA,IACZ,MAAM;AAAA,IACN,eAAe,KAAK,cAAc,eAAe,UAAU;AAAA,IAC3D,cAAc;AAAA,IACd,cAAc;AAAA,IACd,qBAAqB;AAAA,EACvB,CAAC;AAED,SAAO;AACT;;;AC/NO,SAAS,kBACd,OACA,UACA,SACQ;AACR,MAAI,CAAC,YAAY,SAAS,KAAK,MAAM,IAAI;AACvC,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC9C;AACA,MAAI,CAAC,WAAW,QAAQ,KAAK,MAAM,IAAI;AACrC,UAAM,IAAI,MAAM,2BAA2B;AAAA,EAC7C;AAEA,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,MAAM;AAAA,cACN,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK,SAAS;AAGZ,YAAM,eAAe,SAClB,QAAQ,OAAO,MAAM,EACrB,QAAQ,MAAM,KAAK,EACnB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK,EACpB,QAAQ,OAAO,KAAK;AACvB,aAAO;AAAA,QACL;AAAA,QACA,UAAU,YAAY;AAAA,QACtB;AAAA,QACA;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb;AAAA,IAEA,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,SAAS;AAAA,cACT,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,WAAW;AAAA,cACX,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,KAAK;AACH,aAAO,KAAK;AAAA,QACV;AAAA,UACE,YAAY;AAAA,YACV,YAAY;AAAA,cACV,KAAK;AAAA,cACL,SAAS;AAAA,gBACP,eAAe,UAAU,OAAO;AAAA,cAClC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,SAAS;AACP,YAAM,cAAqB,MAAM;AACjC,YAAM,IAAI,MAAM,kBAAkB,WAAW,EAAE;AAAA,IACjD;AAAA,EACF;AACF;AAUA,SAAS,cAA0B;AACjC,SAAO;AAAA,IACL,OAAO;AAAA,IACP,KAAK;AAAA,EACP;AACF;AAEA,SAAS,cAA0B;AACjC,SAAO;AAAA,IACL,OAAO;AAAA,IACP,KAAK;AAAA,EACP;AACF;AAeO,SAAS,oBACd,OACA,UACQ;AACR,MAAI,CAAC,YAAY,SAAS,KAAK,MAAM,IAAI;AACvC,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC9C;AAEA,QAAM,UAAU;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA,iDAAiD,QAAQ;AAAA,IACzD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,KAAK,IAAI;AAEX,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK,UAAU;AACb,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK,SAAS;AACZ,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK,UAAU;AACb,YAAM,IAAI,YAAY;AACtB,aAAO,GAAG,EAAE,KAAK;AAAA,EAAK,OAAO,GAAG,EAAE,GAAG;AAAA;AAAA,IACvC;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IAET,SAAS;AACP,YAAM,cAAqB,MAAM;AACjC,YAAM,IAAI,MAAM,kBAAkB,WAAW,EAAE;AAAA,IACjD;AAAA,EACF;AACF;;;AC3NA,SAAS,OAAO,OAAO,UAAU,iBAAiB;AAClD,SAAS,WAAAC,UAAS,YAAY,QAAAC,aAAY;AAI1C,IAAM,aAAa;AACnB,IAAM,WAAW;AAGjB,IAAM,aAAa;AACnB,IAAM,WAAW;AAOjB,SAAS,kBAAkB,KAAuB;AAChD,QAAM,OAAQ,IAA8B;AAC5C,SAAO,SAAS,YAAY,SAAS,WAAW,SAAS;AAC3D;AAeA,eAAsB,eACpB,OACA,SAEA,aACe;AACf,MAAI,MAAM,kBAAkB,MAAM;AAChC;AAAA,EACF;AAEA,QAAM,aAAa,MAAM;AACzB,QAAM,YAAYD,SAAQ,UAAU;AAEpC,MAAI;AACF,UAAM,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC5C,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,oCAAoC,SAAS;AAAA;AAAA,MAC/C;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,MAAI;AACF,UAAM,UAAU,YAAY,SAAS,EAAE,MAAM,IAAM,CAAC;AAAA,EACtD,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,qCAAqC,UAAU;AAAA;AAAA,MACjD;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAIA,MAAI;AACF,UAAM,MAAM,YAAY,GAAK;AAAA,EAC/B,QAAQ;AAAA,EAER;AACF;AAWA,SAAS,qBACP,OAC6C;AAC7C,MAAI,WAAW;AACf,MAAI,SAAS;AAEb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,UAAU,MAAM,CAAC,EAAE,KAAK;AAC9B,QAAI,YAAY,cAAc,YAAY,YAAY;AACpD,iBAAW;AAAA,IACb,WAAW,YAAY,YAAY,YAAY,UAAU;AACvD,UAAI,aAAa,IAAI;AACnB,iBAAS;AACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,aAAa,MAAM,WAAW,IAAI;AACpC,WAAO;AAAA,EACT;AAEA,SAAO,EAAE,UAAU,OAAO;AAC5B;AAiBA,eAAsB,kBACpB,OACA,SAEA,aACe;AACf,MAAI,MAAM,iBAAiB,MAAM;AAC/B;AAAA,EACF;AAGA,MAAI,YAAY,IAAI;AAClB;AAAA,EACF;AAEA,QAAM,WAAW,MAAM;AAEvB,MAAI,kBAAiC;AACrC,MAAI;AACF,sBAAkB,MAAM,SAAS,UAAU,OAAO;AAAA,EACpD,SAAS,KAAc;AACrB,UAAM,OAAQ,IAA8B;AAC5C,QAAI,SAAS,UAAU;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,kCAAkC,QAAQ;AAAA;AAAA,QAC5C;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,oBAAoB,MAAM;AAC5B,QAAI;AACF,YAAM,MAAMA,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,YAAM,UAAU,UAAU,SAAS,OAAO;AAAA,IAC5C,SAAS,KAAc;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,mCAAmC,QAAQ;AAAA;AAAA,QAC7C;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA;AAAA,EACF;AAGA,QAAM,QAAQ,gBAAgB,MAAM,IAAI;AACxC,QAAM,aAAa,qBAAqB,KAAK;AAE7C,MAAI;AACJ,MAAI,eAAe,MAAM;AAEvB,UAAM,SAAS,MAAM,MAAM,GAAG,WAAW,QAAQ;AACjD,UAAM,QAAQ,MAAM,MAAM,WAAW,SAAS,CAAC;AAE/C,UAAM,gCAAgC,QAAQ,SAAS,IAAI,IACvD,QAAQ,MAAM,GAAG,EAAE,IACnB;AACJ,iBAAa,CAAC,GAAG,QAAQ,+BAA+B,GAAG,KAAK,EAAE,KAAK,IAAI;AAAA,EAC7E,OAAO;AAEL,UAAM,YAAY,gBAAgB,SAAS,IAAI,IAAI,OAAO;AAC1D,iBAAa,kBAAkB,YAAY;AAAA,EAC7C;AAEA,MAAI;AACF,UAAM,UAAU,UAAU,YAAY,OAAO;AAAA,EAC/C,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb,mCAAmC,QAAQ;AAAA;AAAA,MAC7C;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAcA,eAAsB,gBACpB,OACA,aACe;AACf,QAAM,gBAAgBC,MAAK,aAAa,YAAY;AAIpD,QAAM,gBAAgB,MAAM,OAAO,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;AAExD,MAAI,cAAc,WAAW,GAAG;AAC9B;AAAA,EACF;AAEA,MAAI,kBAAkB;AACtB,MAAI;AACF,sBAAkB,MAAM,SAAS,eAAe,OAAO;AAAA,EACzD,SAAS,KAAc;AACrB,UAAM,OAAQ,IAA8B;AAC5C,QAAI,SAAS,UAAU;AACrB,UAAI,kBAAkB,GAAG,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb;AAAA;AAAA,QACF;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAGA,QAAM,gBAAgB,gBACnB,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,CAAC,SAAS,SAAS,EAAE;AAE/B,QAAM,cAAc,IAAI,IAAI,aAAa;AAKzC,QAAM,QAAQ,cACX,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,OAAO,GAAG,CAAC,EACvC,OAAO,CAAC,MAAM,MAAM,MAAM,CAAC,YAAY,IAAI,CAAC,CAAC;AAEhD,MAAI,MAAM,WAAW,GAAG;AACtB;AAAA,EACF;AAGA,MAAI,iBAAiB;AACrB,MAAI,eAAe,SAAS,KAAK,CAAC,eAAe,SAAS,IAAI,GAAG;AAC/D,sBAAkB;AAAA,EACpB;AAEA,oBAAkB,MAAM,KAAK,IAAI,IAAI;AAErC,MAAI;AACF,UAAM,UAAU,eAAe,gBAAgB,OAAO;AAAA,EACxD,SAAS,KAAc;AACrB,QAAI,kBAAkB,GAAG,GAAG;AAC1B,cAAQ,OAAO;AAAA,QACb;AAAA;AAAA,MACF;AACA;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;","names":["resolve","dirname","join"]}