@morphllm/morphsdk 0.2.67 → 0.2.68

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/dist/{chunk-ZO4PPFCZ.js → chunk-223ZMZP6.js} +30 -5
  2. package/dist/chunk-223ZMZP6.js.map +1 -0
  3. package/dist/{chunk-GGYMQZXI.js → chunk-6RFT7K7F.js} +23 -3
  4. package/dist/chunk-6RFT7K7F.js.map +1 -0
  5. package/dist/{chunk-X4WPMVCM.js → chunk-AAIUUX4Y.js} +2 -2
  6. package/dist/{chunk-7CJPKJEE.js → chunk-BYNVJ4ON.js} +2 -2
  7. package/dist/{chunk-SQHZPUCS.js → chunk-FURFQDXF.js} +3 -3
  8. package/dist/{chunk-QZNGKOCZ.js → chunk-HBWJLKNM.js} +2 -2
  9. package/dist/chunk-HKJ2B2AA.js +15 -0
  10. package/dist/{chunk-XLSODV6H.js → chunk-JPGX6WEV.js} +3 -3
  11. package/dist/{chunk-AQD2JCLI.js → chunk-K3EHH3C4.js} +3 -3
  12. package/dist/{chunk-7V2KIZT5.js → chunk-QEGH3CWQ.js} +3 -3
  13. package/dist/{chunk-5QIWYEHJ.js → chunk-RTDKF6NS.js} +2 -2
  14. package/dist/chunk-S6KU22MU.js +396 -0
  15. package/dist/chunk-S6KU22MU.js.map +1 -0
  16. package/dist/{chunk-IVT6YMFA.js → chunk-SXE54MIC.js} +8 -8
  17. package/dist/{chunk-BGSHC2UK.js → chunk-VDJGZIQ5.js} +2 -2
  18. package/dist/{chunk-7JEL2VZO.js → chunk-VLZEBK7S.js} +3 -3
  19. package/dist/{chunk-IUG2FHNN.js → chunk-W76ICQKY.js} +2 -2
  20. package/dist/client.cjs +434 -5
  21. package/dist/client.cjs.map +1 -1
  22. package/dist/client.js +12 -11
  23. package/dist/git/client.js +1 -1
  24. package/dist/git/config.js +1 -1
  25. package/dist/git/index.js +1 -1
  26. package/dist/index.cjs +434 -5
  27. package/dist/index.cjs.map +1 -1
  28. package/dist/index.js +13 -12
  29. package/dist/modelrouter/core.js +1 -1
  30. package/dist/modelrouter/index.js +1 -1
  31. package/dist/tools/browser/anthropic.js +2 -2
  32. package/dist/tools/browser/core.js +1 -1
  33. package/dist/tools/browser/index.js +4 -4
  34. package/dist/tools/browser/live.js +1 -1
  35. package/dist/tools/browser/openai.js +2 -2
  36. package/dist/tools/browser/prompts.js +1 -1
  37. package/dist/tools/browser/vercel.js +2 -2
  38. package/dist/tools/codebase_search/anthropic.js +1 -1
  39. package/dist/tools/codebase_search/core.js +1 -1
  40. package/dist/tools/codebase_search/index.js +1 -1
  41. package/dist/tools/codebase_search/openai.js +1 -1
  42. package/dist/tools/codebase_search/prompts.js +1 -1
  43. package/dist/tools/codebase_search/vercel.js +1 -1
  44. package/dist/tools/fastapply/anthropic.js +2 -2
  45. package/dist/tools/fastapply/core.js +1 -1
  46. package/dist/tools/fastapply/index.js +4 -4
  47. package/dist/tools/fastapply/openai.js +2 -2
  48. package/dist/tools/fastapply/prompts.js +1 -1
  49. package/dist/tools/fastapply/vercel.js +2 -2
  50. package/dist/tools/index.js +4 -4
  51. package/dist/tools/utils/resilience.js +1 -1
  52. package/dist/tools/warp_grep/agent/config.js +1 -1
  53. package/dist/tools/warp_grep/agent/formatter.js +1 -1
  54. package/dist/tools/warp_grep/agent/parser.js +1 -1
  55. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  56. package/dist/tools/warp_grep/agent/runner.js +1 -1
  57. package/dist/tools/warp_grep/anthropic.cjs +434 -5
  58. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  59. package/dist/tools/warp_grep/anthropic.js +6 -5
  60. package/dist/tools/warp_grep/client.cjs +434 -5
  61. package/dist/tools/warp_grep/client.cjs.map +1 -1
  62. package/dist/tools/warp_grep/client.js +5 -4
  63. package/dist/tools/warp_grep/gemini.cjs +434 -5
  64. package/dist/tools/warp_grep/gemini.cjs.map +1 -1
  65. package/dist/tools/warp_grep/gemini.js +6 -5
  66. package/dist/tools/warp_grep/harness.cjs +417 -4
  67. package/dist/tools/warp_grep/harness.cjs.map +1 -1
  68. package/dist/tools/warp_grep/harness.js +3 -2
  69. package/dist/tools/warp_grep/harness.js.map +1 -1
  70. package/dist/tools/warp_grep/index.cjs +434 -5
  71. package/dist/tools/warp_grep/index.cjs.map +1 -1
  72. package/dist/tools/warp_grep/index.js +9 -8
  73. package/dist/tools/warp_grep/openai.cjs +434 -5
  74. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  75. package/dist/tools/warp_grep/openai.js +6 -5
  76. package/dist/tools/warp_grep/providers/local.cjs +417 -4
  77. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  78. package/dist/tools/warp_grep/providers/local.d.ts +6 -0
  79. package/dist/tools/warp_grep/providers/local.js +3 -2
  80. package/dist/tools/warp_grep/providers/remote.cjs +408 -1
  81. package/dist/tools/warp_grep/providers/remote.cjs.map +1 -1
  82. package/dist/tools/warp_grep/providers/remote.d.ts +7 -0
  83. package/dist/tools/warp_grep/providers/remote.js +3 -2
  84. package/dist/tools/warp_grep/utils/files.js +1 -1
  85. package/dist/tools/warp_grep/utils/paths.js +1 -1
  86. package/dist/tools/warp_grep/utils/ripgrep.js +1 -1
  87. package/dist/tools/warp_grep/vercel.cjs +434 -5
  88. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  89. package/dist/tools/warp_grep/vercel.js +6 -5
  90. package/package.json +1 -1
  91. package/dist/chunk-GGYMQZXI.js.map +0 -1
  92. package/dist/chunk-PZ5AY32C.js +0 -10
  93. package/dist/chunk-ZO4PPFCZ.js.map +0 -1
  94. /package/dist/{chunk-X4WPMVCM.js.map → chunk-AAIUUX4Y.js.map} +0 -0
  95. /package/dist/{chunk-7CJPKJEE.js.map → chunk-BYNVJ4ON.js.map} +0 -0
  96. /package/dist/{chunk-SQHZPUCS.js.map → chunk-FURFQDXF.js.map} +0 -0
  97. /package/dist/{chunk-QZNGKOCZ.js.map → chunk-HBWJLKNM.js.map} +0 -0
  98. /package/dist/{chunk-PZ5AY32C.js.map → chunk-HKJ2B2AA.js.map} +0 -0
  99. /package/dist/{chunk-XLSODV6H.js.map → chunk-JPGX6WEV.js.map} +0 -0
  100. /package/dist/{chunk-AQD2JCLI.js.map → chunk-K3EHH3C4.js.map} +0 -0
  101. /package/dist/{chunk-7V2KIZT5.js.map → chunk-QEGH3CWQ.js.map} +0 -0
  102. /package/dist/{chunk-5QIWYEHJ.js.map → chunk-RTDKF6NS.js.map} +0 -0
  103. /package/dist/{chunk-IVT6YMFA.js.map → chunk-SXE54MIC.js.map} +0 -0
  104. /package/dist/{chunk-BGSHC2UK.js.map → chunk-VDJGZIQ5.js.map} +0 -0
  105. /package/dist/{chunk-7JEL2VZO.js.map → chunk-VLZEBK7S.js.map} +0 -0
  106. /package/dist/{chunk-IUG2FHNN.js.map → chunk-W76ICQKY.js.map} +0 -0
@@ -10,6 +10,9 @@ import {
10
10
  import {
11
11
  runRipgrep
12
12
  } from "./chunk-TPP2UGQP.js";
13
+ import {
14
+ require_ignore
15
+ } from "./chunk-S6KU22MU.js";
13
16
  import {
14
17
  AGENT_CONFIG,
15
18
  DEFAULT_EXCLUDES
@@ -18,11 +21,30 @@ import {
18
21
  // tools/warp_grep/providers/local.ts
19
22
  import fs from "fs/promises";
20
23
  import path from "path";
24
+ var ignore = require_ignore().default || require_ignore();
21
25
  var LocalRipgrepProvider = class {
22
26
  constructor(repoRoot, excludes = DEFAULT_EXCLUDES) {
23
27
  this.repoRoot = repoRoot;
24
28
  this.excludes = excludes;
25
29
  }
30
+ ignoreFilter = null;
31
+ /**
32
+ * Build an ignore filter that combines DEFAULT_EXCLUDES and .gitignore patterns.
33
+ * Cached after first call.
34
+ */
35
+ async getIgnoreFilter() {
36
+ if (this.ignoreFilter) return this.ignoreFilter;
37
+ const ig = ignore();
38
+ ig.add(this.excludes);
39
+ try {
40
+ const gitignorePath = path.join(this.repoRoot, ".gitignore");
41
+ const gitignoreContent = await fs.readFile(gitignorePath, "utf-8");
42
+ ig.add(gitignoreContent);
43
+ } catch {
44
+ }
45
+ this.ignoreFilter = ig;
46
+ return ig;
47
+ }
26
48
  async grep(params) {
27
49
  let abs;
28
50
  try {
@@ -150,9 +172,11 @@ Details: ${res.stderr}` : ""}`
150
172
  const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;
151
173
  const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;
152
174
  const regex = params.pattern ? new RegExp(params.pattern) : null;
175
+ const ig = await this.getIgnoreFilter();
153
176
  const results = [];
154
177
  let timedOut = false;
155
178
  const startTime = Date.now();
179
+ const repoRoot = this.repoRoot;
156
180
  async function walk(dir, depth) {
157
181
  if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {
158
182
  timedOut = true;
@@ -163,17 +187,18 @@ Details: ${res.stderr}` : ""}`
163
187
  for (const entry of entries) {
164
188
  if (timedOut || results.length >= maxResults) break;
165
189
  const full = path.join(dir, entry.name);
166
- const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
167
- if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;
190
+ const relToRepo = toRepoRelative(repoRoot, full).replace(/^[.][/\\]?/, "");
191
+ const isDir = entry.isDirectory();
192
+ if (ig.ignores(relToRepo) || isDir && ig.ignores(relToRepo + "/")) continue;
168
193
  if (regex && !regex.test(entry.name)) continue;
169
194
  results.push({
170
195
  name: entry.name,
171
196
  path: toRepoRelative(path.resolve(""), full),
172
197
  // relative display
173
- type: entry.isDirectory() ? "dir" : "file",
198
+ type: isDir ? "dir" : "file",
174
199
  depth
175
200
  });
176
- if (entry.isDirectory()) {
201
+ if (isDir) {
177
202
  await walk(full, depth + 1);
178
203
  }
179
204
  }
@@ -186,4 +211,4 @@ Details: ${res.stderr}` : ""}`
186
211
  export {
187
212
  LocalRipgrepProvider
188
213
  };
189
- //# sourceMappingURL=chunk-ZO4PPFCZ.js.map
214
+ //# sourceMappingURL=chunk-223ZMZP6.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst ignore = require('ignore').default || require('ignore');\ntype Ignore = ReturnType<typeof ignore>;\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES, AGENT_CONFIG } from '../agent/config.js';\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n private ignoreFilter: Ignore | null = null;\n\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n /**\n * Build an ignore filter that combines DEFAULT_EXCLUDES and .gitignore patterns.\n * Cached after first call.\n */\n private async getIgnoreFilter(): Promise<Ignore> {\n if (this.ignoreFilter) return this.ignoreFilter;\n\n const ig = ignore();\n \n // Add default excludes (node_modules, .git, build outputs, etc.)\n ig.add(this.excludes);\n\n // Try to read .gitignore from repo root\n try {\n const gitignorePath = path.join(this.repoRoot, '.gitignore');\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig.add(gitignoreContent);\n } catch {\n // No .gitignore or unreadable - that's fine\n }\n\n this.ignoreFilter = ig;\n return ig;\n }\n\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n '-C', '1',\n ...(params.glob ? ['--glob', params.glob] : []),\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n return {\n lines: [],\n error: `[RIPGREP NOT AVAILABLE] ripgrep (rg) is required but failed to execute. Please install it:\\n` +\n ` • macOS: brew install ripgrep\\n` +\n ` • Ubuntu/Debian: apt install ripgrep\\n` +\n ` • Windows: choco install ripgrep\\n` +\n ` • Or visit: https://github.com/BurntSushi/ripgrep#installation\\n` +\n `Exit code: ${res.exitCode}${res.stderr ? `\\nDetails: ${res.stderr}` : ''}`,\n };\n }\n \n // Handle other ripgrep errors gracefully\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n return {\n lines: [],\n error: `[RIPGREP ERROR] grep failed with exit code ${res.exitCode}${res.stderr ? `: ${res.stderr}` : ''}`,\n };\n }\n \n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'query not specific enough, tool tried to return too much context and failed',\n };\n }\n \n return { lines };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n \n // Gracefully handle file not found / not a file\n if (!stat || !stat.isFile()) {\n return {\n lines: [],\n error: `[FILE NOT FOUND] You tried to read \"${params.path}\" but there is no file at this path. ` +\n `Double-check the path exists and is spelled correctly.`,\n };\n }\n \n // Gracefully handle symlinks\n if (isSymlink(abs)) {\n return {\n lines: [],\n error: `[SYMLINK] You tried to read \"${params.path}\" but this is a symlink. ` +\n `Try reading the actual file it points to instead.`,\n };\n }\n \n // Gracefully handle non-text or too-large files\n if (!isTextualFile(abs)) {\n return {\n lines: [],\n error: `[UNREADABLE FILE] You tried to read \"${params.path}\" but this file is either too large ` +\n `or not a text file, so it cannot be read. Try a different file.`,\n };\n }\n \n let lines: string[];\n try {\n lines = await readAllLines(abs);\n } catch (err) {\n return {\n lines: [],\n error: `[READ ERROR] Failed to read \"${params.path}\": ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const total = lines.length;\n let s = params.start ?? 1;\n let e = Math.min(params.end ?? total, total);\n if (s > total && total > 0) {\n // Model hallucinated range - fallback to full file\n s = 1;\n e = total;\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n if (out.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = out.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${out.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines: out };\n }\n\n async listDirectory(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<ListDirectoryEntry[]> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch {\n // Path outside repo - return empty (graceful failure)\n return [];\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n\n // Get the ignore filter (combines DEFAULT_EXCLUDES + .gitignore)\n const ig = await this.getIgnoreFilter();\n\n const results: ListDirectoryEntry[] = [];\n let timedOut = false;\n const startTime = Date.now();\n const repoRoot = this.repoRoot;\n \n async function walk(dir: string, depth: number) {\n if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {\n timedOut = true;\n return;\n }\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (timedOut || results.length >= maxResults) break;\n const full = path.join(dir, entry.name);\n \n // Get path relative to repo root for ignore matching\n const relToRepo = toRepoRelative(repoRoot, full).replace(/^[.][/\\\\]?/, '');\n \n // Use ignore filter - handles globs, .gitignore patterns, etc.\n // For directories, also check with trailing slash for proper gitignore semantics\n const isDir = entry.isDirectory();\n if (ig.ignores(relToRepo) || (isDir && ig.ignores(relToRepo + '/'))) continue;\n \n if (regex && !regex.test(entry.name)) continue;\n results.push({\n name: entry.name,\n path: toRepoRelative(path.resolve(''), full), // relative display\n type: isDir ? 'dir' : 'file',\n depth,\n });\n if (isDir) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAEjB,IAAM,SAAS,iBAAkB,WAAW;AAQrC,IAAM,uBAAN,MAAuD;AAAA,EAG5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAFhG,eAA8B;AAAA;AAAA;AAAA;AAAA;AAAA,EAQtC,MAAc,kBAAmC;AAC/C,QAAI,KAAK,aAAc,QAAO,KAAK;AAEnC,UAAM,KAAK,OAAO;AAGlB,OAAG,IAAI,KAAK,QAAQ;AAGpB,QAAI;AACF,YAAM,gBAAgB,KAAK,KAAK,KAAK,UAAU,YAAY;AAC3D,YAAM,mBAAmB,MAAM,GAAG,SAAS,eAAe,OAAO;AACjE,SAAG,IAAI,gBAAgB;AAAA,IACzB,QAAQ;AAAA,IAER;AAEA,SAAK,eAAe;AACpB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,QAA+E;AACxF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAAM;AAAA,MACN,GAAI,OAAO,OAAO,CAAC,UAAU,OAAO,IAAI,IAAI,CAAC;AAAA,MAC7C,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,aAKc,IAAI,QAAQ,GAAG,IAAI,SAAS;AAAA,WAAc,IAAI,MAAM,KAAK,EAAE;AAAA,MAClF;AAAA,IACF;AAGA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,8CAA8C,IAAI,QAAQ,GAAG,IAAI,SAAS,KAAK,IAAI,MAAM,KAAK,EAAE;AAAA,MACzG;AAAA,IACF;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,QAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAGhD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,uCAAuC,OAAO,IAAI;AAAA,MAE3D;AAAA,IACF;AAGA,QAAI,UAAU,GAAG,GAAG;AAClB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI;AAAA,MAEpD;AAAA,IACF;AAGA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,wCAAwC,OAAO,IAAI;AAAA,MAE5D;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AACF,cAAQ,MAAM,aAAa,GAAG;AAAA,IAChC,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1G;AAAA,IACF;AACA,UAAM,QAAQ,MAAM;AACpB,QAAI,IAAI,OAAO,SAAS;AACxB,QAAI,IAAI,KAAK,IAAI,OAAO,OAAO,OAAO,KAAK;AAC3C,QAAI,IAAI,SAAS,QAAQ,GAAG;AAE1B,UAAI;AACJ,UAAI;AAAA,IACN;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,QAAI,IAAI,SAAS,aAAa,gBAAgB;AAC5C,YAAM,YAAY,IAAI,MAAM,GAAG,aAAa,cAAc;AAC1D,gBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,IAAI,MAAM,SAAS;AAC/F,aAAO,EAAE,OAAO,UAAU;AAAA,IAC5B;AAEA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,cAAc,QAA0H;AAC5I,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc,aAAa;AACrD,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAG5D,UAAM,KAAK,MAAM,KAAK,gBAAgB;AAEtC,UAAM,UAAgC,CAAC;AACvC,QAAI,WAAW;AACf,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,WAAW,KAAK;AAEtB,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,KAAK,IAAI,IAAI,YAAY,aAAa,iBAAiB;AACzD,mBAAW;AACX;AAAA,MACF;AACA,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,YAAI,YAAY,QAAQ,UAAU,WAAY;AAC9C,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AAGtC,cAAM,YAAY,eAAe,UAAU,IAAI,EAAE,QAAQ,cAAc,EAAE;AAIzE,cAAM,QAAQ,MAAM,YAAY;AAChC,YAAI,GAAG,QAAQ,SAAS,KAAM,SAAS,GAAG,QAAQ,YAAY,GAAG,EAAI;AAErE,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AACtC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,KAAK,QAAQ,EAAE,GAAG,IAAI;AAAA;AAAA,UAC3C,MAAM,QAAQ,QAAQ;AAAA,UACtB;AAAA,QACF,CAAC;AACD,YAAI,OAAO;AACT,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,13 +1,31 @@
1
1
  import {
2
- AGENT_CONFIG
2
+ require_ignore
3
+ } from "./chunk-S6KU22MU.js";
4
+ import {
5
+ AGENT_CONFIG,
6
+ DEFAULT_EXCLUDES
3
7
  } from "./chunk-XT5ZO6ES.js";
4
8
 
5
9
  // tools/warp_grep/providers/remote.ts
10
+ var ignore = require_ignore().default || require_ignore();
6
11
  var RemoteCommandsProvider = class {
7
12
  constructor(repoRoot, commands) {
8
13
  this.repoRoot = repoRoot;
9
14
  this.commands = commands;
10
15
  }
16
+ ignoreFilter = null;
17
+ /**
18
+ * Build an ignore filter from DEFAULT_EXCLUDES.
19
+ * Note: For remote providers, we can't read .gitignore from the remote sandbox,
20
+ * so we only use the built-in excludes.
21
+ */
22
+ getIgnoreFilter() {
23
+ if (this.ignoreFilter) return this.ignoreFilter;
24
+ const ig = ignore();
25
+ ig.add(DEFAULT_EXCLUDES);
26
+ this.ignoreFilter = ig;
27
+ return ig;
28
+ }
11
29
  /**
12
30
  * Run grep command and parse ripgrep output
13
31
  */
@@ -61,6 +79,7 @@ var RemoteCommandsProvider = class {
61
79
  async listDirectory(params) {
62
80
  const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;
63
81
  const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;
82
+ const ig = this.getIgnoreFilter();
64
83
  try {
65
84
  const stdout = await this.commands.listDir(params.path, maxDepth);
66
85
  const paths = (stdout || "").trim().split(/\r?\n/).filter((p) => p.length > 0);
@@ -69,11 +88,12 @@ var RemoteCommandsProvider = class {
69
88
  for (const fullPath of paths) {
70
89
  if (fullPath === params.path || fullPath === this.repoRoot) continue;
71
90
  const name = fullPath.split("/").pop() || "";
72
- if (regex && !regex.test(name)) continue;
73
91
  let relativePath = fullPath;
74
92
  if (fullPath.startsWith(this.repoRoot)) {
75
93
  relativePath = fullPath.slice(this.repoRoot.length).replace(/^\//, "");
76
94
  }
95
+ if (ig.ignores(relativePath)) continue;
96
+ if (regex && !regex.test(name)) continue;
77
97
  const depth = relativePath.split("/").filter(Boolean).length - 1;
78
98
  const hasExtension = name.includes(".") && !name.startsWith(".");
79
99
  const type = hasExtension ? "file" : "dir";
@@ -95,4 +115,4 @@ var RemoteCommandsProvider = class {
95
115
  export {
96
116
  RemoteCommandsProvider
97
117
  };
98
- //# sourceMappingURL=chunk-GGYMQZXI.js.map
118
+ //# sourceMappingURL=chunk-6RFT7K7F.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/providers/remote.ts"],"sourcesContent":["/**\n * RemoteCommandsProvider - wraps simple RemoteCommands into WarpGrepProvider\n * \n * Handles parsing of raw stdout from grep/read/listDir commands.\n * Users just return stdout, SDK handles all format conversion.\n */\n\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport type { RemoteCommands } from '../types.js';\nimport { AGENT_CONFIG, DEFAULT_EXCLUDES } from '../agent/config.js';\n\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst ignore = require('ignore').default || require('ignore');\ntype Ignore = ReturnType<typeof ignore>;\n\n/**\n * Wraps simple RemoteCommands functions into a full WarpGrepProvider.\n * \n * This allows users to provide three simple functions that return raw stdout,\n * and the SDK handles all parsing internally.\n * \n * @example\n * ```typescript\n * const provider = new RemoteCommandsProvider('/home/repo', {\n * grep: async (pattern, path) => {\n * const r = await sandbox.run(`rg '${pattern}' '${path}'`);\n * return r.stdout;\n * },\n * read: async (path, start, end) => {\n * const r = await sandbox.run(`sed -n '${start},${end}p' '${path}'`);\n * return r.stdout;\n * },\n * listDir: async (path, maxDepth) => {\n * const r = await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`);\n * return r.stdout;\n * },\n * });\n * ```\n */\nexport class RemoteCommandsProvider implements WarpGrepProvider {\n private ignoreFilter: Ignore | null = null;\n\n constructor(\n private readonly repoRoot: string,\n private readonly commands: RemoteCommands\n ) {}\n\n /**\n * Build an ignore filter from DEFAULT_EXCLUDES.\n * Note: For remote providers, we can't read .gitignore from the remote sandbox,\n * so we only use the built-in excludes.\n */\n private getIgnoreFilter(): Ignore {\n if (this.ignoreFilter) return this.ignoreFilter;\n const ig = ignore();\n ig.add(DEFAULT_EXCLUDES);\n this.ignoreFilter = ig;\n return ig;\n }\n\n /**\n * Run grep command and parse ripgrep output\n */\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n try {\n const stdout = await this.commands.grep(params.pattern, params.path, params.glob);\n \n // Parse ripgrep output: each line is \"path:line:content\" or \"path-line-content\" for context\n const lines = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'Query not specific enough - too many results returned. Try a more specific pattern.',\n };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[GREP ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * Read file and add line numbers\n */\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n const start = params.start ?? 1;\n const end = params.end ?? 1_000_000;\n \n try {\n const stdout = await this.commands.read(params.path, start, end);\n \n // Split content into lines and add line numbers\n const contentLines = (stdout || '').split('\\n');\n \n // Remove trailing empty line if present (common with sed output)\n if (contentLines.length > 0 && contentLines[contentLines.length - 1] === '') {\n contentLines.pop();\n }\n \n // Format as \"lineNumber|content\"\n const lines = contentLines.map((content, idx) => `${start + idx}|${content}`);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = lines.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${lines.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[READ ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * List directory and parse find output\n */\n async listDirectory(params: { \n path: string; \n pattern?: string | null; \n maxResults?: number; \n maxDepth?: number;\n }): Promise<ListDirectoryEntry[]> {\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n \n // Get the ignore filter (DEFAULT_EXCLUDES)\n const ig = this.getIgnoreFilter();\n \n try {\n const stdout = await this.commands.listDir(params.path, maxDepth);\n \n // Parse find output: one path per line\n const paths = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((p) => p.length > 0);\n \n const regex = params.pattern ? new RegExp(params.pattern) : null;\n const entries: ListDirectoryEntry[] = [];\n \n for (const fullPath of paths) {\n // Skip the root path itself\n if (fullPath === params.path || fullPath === this.repoRoot) continue;\n \n const name = fullPath.split('/').pop() || '';\n \n // Determine relative path\n let relativePath = fullPath;\n if (fullPath.startsWith(this.repoRoot)) {\n relativePath = fullPath.slice(this.repoRoot.length).replace(/^\\//, '');\n }\n \n // Filter using ignore rules (DEFAULT_EXCLUDES)\n if (ig.ignores(relativePath)) continue;\n \n // Apply pattern filter if provided\n if (regex && !regex.test(name)) continue;\n \n // Calculate depth based on path separators\n const depth = relativePath.split('/').filter(Boolean).length - 1;\n \n // Determine if it's a directory (ends with / or infer from path structure)\n // Note: Most sandbox find commands don't indicate type, so we guess based on extension\n const hasExtension = name.includes('.') && !name.startsWith('.');\n const type: 'file' | 'dir' = hasExtension ? 'file' : 'dir';\n \n entries.push({\n name,\n path: relativePath,\n type,\n depth: Math.max(0, depth),\n });\n \n if (entries.length >= maxResults) break;\n }\n \n return entries;\n } catch (error) {\n // Return empty array on error (consistent with LocalRipgrepProvider)\n return [];\n }\n }\n}\n\n\n\n\n"],"mappings":";;;;;;;;;AAYA,IAAM,SAAS,iBAAkB,WAAW;AA2BrC,IAAM,yBAAN,MAAyD;AAAA,EAG9D,YACmB,UACA,UACjB;AAFiB;AACA;AAAA,EAChB;AAAA,EALK,eAA8B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAY9B,kBAA0B;AAChC,QAAI,KAAK,aAAc,QAAO,KAAK;AACnC,UAAM,KAAK,OAAO;AAClB,OAAG,IAAI,gBAAgB;AACvB,SAAK,eAAe;AACpB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA+E;AACxF,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,SAAS,OAAO,MAAM,OAAO,IAAI;AAGhF,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAG7B,UAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,eAAO;AAAA,UACL,OAAO,CAAC;AAAA,UACR,OAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA6E;AACtF,UAAM,QAAQ,OAAO,SAAS;AAC9B,UAAM,MAAM,OAAO,OAAO;AAE1B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG;AAG/D,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAG9C,UAAI,aAAa,SAAS,KAAK,aAAa,aAAa,SAAS,CAAC,MAAM,IAAI;AAC3E,qBAAa,IAAI;AAAA,MACnB;AAGA,YAAM,QAAQ,aAAa,IAAI,CAAC,SAAS,QAAQ,GAAG,QAAQ,GAAG,IAAI,OAAO,EAAE;AAG5E,UAAI,MAAM,SAAS,aAAa,gBAAgB;AAC9C,cAAM,YAAY,MAAM,MAAM,GAAG,aAAa,cAAc;AAC5D,kBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,MAAM,MAAM,SAAS;AACjG,eAAO,EAAE,OAAO,UAAU;AAAA,MAC5B;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,QAKc;AAChC,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,aAAa,OAAO,cAAc,aAAa;AAGrD,UAAM,KAAK,KAAK,gBAAgB;AAEhC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,OAAO,MAAM,QAAQ;AAGhE,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAE7B,YAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAC5D,YAAM,UAAgC,CAAC;AAEvC,iBAAW,YAAY,OAAO;AAE5B,YAAI,aAAa,OAAO,QAAQ,aAAa,KAAK,SAAU;AAE5D,cAAM,OAAO,SAAS,MAAM,GAAG,EAAE,IAAI,KAAK;AAG1C,YAAI,eAAe;AACnB,YAAI,SAAS,WAAW,KAAK,QAAQ,GAAG;AACtC,yBAAe,SAAS,MAAM,KAAK,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE;AAAA,QACvE;AAGA,YAAI,GAAG,QAAQ,YAAY,EAAG;AAG9B,YAAI,SAAS,CAAC,MAAM,KAAK,IAAI,EAAG;AAGhC,cAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,OAAO,OAAO,EAAE,SAAS;AAI/D,cAAM,eAAe,KAAK,SAAS,GAAG,KAAK,CAAC,KAAK,WAAW,GAAG;AAC/D,cAAM,OAAuB,eAAe,SAAS;AAErD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,OAAO,KAAK,IAAI,GAAG,KAAK;AAAA,QAC1B,CAAC;AAED,YAAI,QAAQ,UAAU,WAAY;AAAA,MACpC;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;","names":[]}
@@ -7,7 +7,7 @@ import {
7
7
  } from "./chunk-MFZP347Z.js";
8
8
  import {
9
9
  __export
10
- } from "./chunk-PZ5AY32C.js";
10
+ } from "./chunk-HKJ2B2AA.js";
11
11
 
12
12
  // tools/browser/anthropic.ts
13
13
  var anthropic_exports = {};
@@ -77,4 +77,4 @@ export {
77
77
  createBrowserTool,
78
78
  anthropic_exports
79
79
  };
80
- //# sourceMappingURL=chunk-X4WPMVCM.js.map
80
+ //# sourceMappingURL=chunk-AAIUUX4Y.js.map
@@ -6,7 +6,7 @@ import {
6
6
  } from "./chunk-MFZP347Z.js";
7
7
  import {
8
8
  __export
9
- } from "./chunk-PZ5AY32C.js";
9
+ } from "./chunk-HKJ2B2AA.js";
10
10
 
11
11
  // tools/browser/vercel.ts
12
12
  var vercel_exports = {};
@@ -54,4 +54,4 @@ export {
54
54
  browserTool,
55
55
  vercel_exports
56
56
  };
57
- //# sourceMappingURL=chunk-7CJPKJEE.js.map
57
+ //# sourceMappingURL=chunk-BYNVJ4ON.js.map
@@ -5,13 +5,13 @@ import {
5
5
  import {
6
6
  executeToolCall,
7
7
  formatResult
8
- } from "./chunk-7JEL2VZO.js";
8
+ } from "./chunk-VLZEBK7S.js";
9
9
  import {
10
10
  getSystemPrompt
11
11
  } from "./chunk-Q5AHGIQO.js";
12
12
  import {
13
13
  __export
14
- } from "./chunk-PZ5AY32C.js";
14
+ } from "./chunk-HKJ2B2AA.js";
15
15
 
16
16
  // tools/warp_grep/gemini.ts
17
17
  var gemini_exports = {};
@@ -72,4 +72,4 @@ export {
72
72
  gemini_default,
73
73
  gemini_exports
74
74
  };
75
- //# sourceMappingURL=chunk-SQHZPUCS.js.map
75
+ //# sourceMappingURL=chunk-FURFQDXF.js.map
@@ -7,7 +7,7 @@ import {
7
7
  } from "./chunk-63WE2C5R.js";
8
8
  import {
9
9
  __export
10
- } from "./chunk-PZ5AY32C.js";
10
+ } from "./chunk-HKJ2B2AA.js";
11
11
 
12
12
  // tools/fastapply/vercel.ts
13
13
  var vercel_exports = {};
@@ -86,4 +86,4 @@ export {
86
86
  vercel_default,
87
87
  vercel_exports
88
88
  };
89
- //# sourceMappingURL=chunk-QZNGKOCZ.js.map
89
+ //# sourceMappingURL=chunk-HBWJLKNM.js.map
@@ -0,0 +1,15 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropNames = Object.getOwnPropertyNames;
3
+ var __commonJS = (cb, mod) => function __require() {
4
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
5
+ };
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+
11
+ export {
12
+ __commonJS,
13
+ __export
14
+ };
15
+ //# sourceMappingURL=chunk-HKJ2B2AA.js.map
@@ -5,13 +5,13 @@ import {
5
5
  import {
6
6
  executeToolCall,
7
7
  formatResult
8
- } from "./chunk-7JEL2VZO.js";
8
+ } from "./chunk-VLZEBK7S.js";
9
9
  import {
10
10
  getSystemPrompt
11
11
  } from "./chunk-Q5AHGIQO.js";
12
12
  import {
13
13
  __export
14
- } from "./chunk-PZ5AY32C.js";
14
+ } from "./chunk-HKJ2B2AA.js";
15
15
 
16
16
  // tools/warp_grep/openai.ts
17
17
  var openai_exports = {};
@@ -71,4 +71,4 @@ export {
71
71
  openai_default,
72
72
  openai_exports
73
73
  };
74
- //# sourceMappingURL=chunk-XLSODV6H.js.map
74
+ //# sourceMappingURL=chunk-JPGX6WEV.js.map
@@ -5,13 +5,13 @@ import {
5
5
  import {
6
6
  executeToolCall,
7
7
  formatResult
8
- } from "./chunk-7JEL2VZO.js";
8
+ } from "./chunk-VLZEBK7S.js";
9
9
  import {
10
10
  getSystemPrompt
11
11
  } from "./chunk-Q5AHGIQO.js";
12
12
  import {
13
13
  __export
14
- } from "./chunk-PZ5AY32C.js";
14
+ } from "./chunk-HKJ2B2AA.js";
15
15
 
16
16
  // tools/warp_grep/anthropic.ts
17
17
  var anthropic_exports = {};
@@ -62,4 +62,4 @@ export {
62
62
  createWarpGrepTool,
63
63
  anthropic_exports
64
64
  };
65
- //# sourceMappingURL=chunk-AQD2JCLI.js.map
65
+ //# sourceMappingURL=chunk-K3EHH3C4.js.map
@@ -4,13 +4,13 @@ import {
4
4
  import {
5
5
  executeToolCall,
6
6
  formatResult
7
- } from "./chunk-7JEL2VZO.js";
7
+ } from "./chunk-VLZEBK7S.js";
8
8
  import {
9
9
  getSystemPrompt
10
10
  } from "./chunk-Q5AHGIQO.js";
11
11
  import {
12
12
  __export
13
- } from "./chunk-PZ5AY32C.js";
13
+ } from "./chunk-HKJ2B2AA.js";
14
14
 
15
15
  // tools/warp_grep/vercel.ts
16
16
  var vercel_exports = {};
@@ -54,4 +54,4 @@ export {
54
54
  vercel_default,
55
55
  vercel_exports
56
56
  };
57
- //# sourceMappingURL=chunk-7V2KIZT5.js.map
57
+ //# sourceMappingURL=chunk-QEGH3CWQ.js.map
@@ -7,7 +7,7 @@ import {
7
7
  } from "./chunk-63WE2C5R.js";
8
8
  import {
9
9
  __export
10
- } from "./chunk-PZ5AY32C.js";
10
+ } from "./chunk-HKJ2B2AA.js";
11
11
 
12
12
  // tools/fastapply/openai.ts
13
13
  var openai_exports = {};
@@ -103,4 +103,4 @@ export {
103
103
  openai_default,
104
104
  openai_exports
105
105
  };
106
- //# sourceMappingURL=chunk-5QIWYEHJ.js.map
106
+ //# sourceMappingURL=chunk-RTDKF6NS.js.map