@agi-cli/sdk 0.1.80 → 0.1.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@agi-cli/sdk",
3
- "version": "0.1.80",
3
+ "version": "0.1.82",
4
4
  "description": "AI agent SDK for building intelligent assistants - tree-shakable and comprehensive",
5
5
  "author": "ntishxyz",
6
6
  "license": "MIT",
@@ -4,7 +4,8 @@
4
4
  - Returns a compact patch artifact summarizing the change
5
5
 
6
6
  Usage tips:
7
- - Only use for creating new files or completely replacing file content
7
+ - Use for creating NEW files
8
+ - Use when replacing >70% of a file's content (almost complete rewrite)
8
9
  - NEVER use for partial/targeted edits - use apply_patch or edit instead
9
10
  - Using write for partial edits wastes output tokens and risks hallucinating unchanged parts
10
11
  - Prefer idempotent writes by providing the full intended content when you do use write
@@ -0,0 +1,562 @@
1
+ import { mkdir, readFile, unlink, writeFile } from 'node:fs/promises';
2
+ import { dirname, isAbsolute, relative, resolve } from 'node:path';
3
+
4
+ import { NORMALIZATION_LEVELS, normalizeWhitespace } from './normalize.ts';
5
+ import {
6
+ PATCH_ADD_PREFIX,
7
+ PATCH_DELETE_PREFIX,
8
+ PATCH_UPDATE_PREFIX,
9
+ PATCH_BEGIN_MARKER,
10
+ PATCH_END_MARKER,
11
+ } from './constants.ts';
12
+ import type {
13
+ AppliedPatchHunk,
14
+ AppliedPatchOperation,
15
+ PatchAddOperation,
16
+ PatchApplicationResult,
17
+ PatchDeleteOperation,
18
+ PatchHunk,
19
+ PatchHunkLine,
20
+ PatchOperation,
21
+ PatchSummary,
22
+ PatchUpdateOperation,
23
+ RejectedPatch,
24
+ } from './types.ts';
25
+ import { ensureTrailingNewline, joinLines, splitLines } from './text.ts';
26
+
27
+ export function resolveProjectPath(
28
+ projectRoot: string,
29
+ filePath: string,
30
+ ): string {
31
+ const fullPath = resolve(projectRoot, filePath);
32
+ const relativePath = relative(projectRoot, fullPath);
33
+ if (relativePath.startsWith('..') || isAbsolute(relativePath)) {
34
+ throw new Error(`Patch path escapes project root: ${filePath}`);
35
+ }
36
+ return fullPath;
37
+ }
38
+
39
+ function makeAppliedRecord(
40
+ kind: AppliedPatchOperation['kind'],
41
+ filePath: string,
42
+ hunks: AppliedPatchHunk[],
43
+ ): AppliedPatchOperation {
44
+ const stats = hunks.reduce(
45
+ (acc, hunk) => ({
46
+ additions: acc.additions + hunk.additions,
47
+ deletions: acc.deletions + hunk.deletions,
48
+ }),
49
+ { additions: 0, deletions: 0 },
50
+ );
51
+ return {
52
+ kind,
53
+ filePath,
54
+ stats,
55
+ hunks,
56
+ };
57
+ }
58
+
59
+ function makeSummary(operations: AppliedPatchOperation[]): PatchSummary {
60
+ return operations.reduce<PatchSummary>(
61
+ (acc, op) => ({
62
+ files: acc.files + 1,
63
+ additions: acc.additions + op.stats.additions,
64
+ deletions: acc.deletions + op.stats.deletions,
65
+ }),
66
+ { files: 0, additions: 0, deletions: 0 },
67
+ );
68
+ }
69
+
70
+ function serializePatchLine(line: PatchHunkLine): string {
71
+ switch (line.kind) {
72
+ case 'add':
73
+ return `+${line.content}`;
74
+ case 'remove':
75
+ return `-${line.content}`;
76
+ default:
77
+ return ` ${line.content}`;
78
+ }
79
+ }
80
+
81
+ function formatRange(start: number, count: number) {
82
+ const normalizedStart = Math.max(0, start);
83
+ if (count === 0) return `${normalizedStart},0`;
84
+ if (count === 1) return `${normalizedStart}`;
85
+ return `${normalizedStart},${count}`;
86
+ }
87
+
88
+ function formatHunkHeader(hunk: AppliedPatchHunk) {
89
+ const oldRange = formatRange(hunk.oldStart, hunk.oldLines);
90
+ const newRange = formatRange(hunk.newStart, hunk.newLines);
91
+ const context = hunk.header.context?.trim();
92
+ return context
93
+ ? `@@ -${oldRange} +${newRange} @@ ${context}`
94
+ : `@@ -${oldRange} +${newRange} @@`;
95
+ }
96
+
97
+ function formatNormalizedPatch(operations: AppliedPatchOperation[]): string {
98
+ const lines: string[] = [PATCH_BEGIN_MARKER];
99
+ for (const op of operations) {
100
+ switch (op.kind) {
101
+ case 'add':
102
+ lines.push(`${PATCH_ADD_PREFIX} ${op.filePath}`);
103
+ break;
104
+ case 'delete':
105
+ lines.push(`${PATCH_DELETE_PREFIX} ${op.filePath}`);
106
+ break;
107
+ case 'update':
108
+ lines.push(`${PATCH_UPDATE_PREFIX} ${op.filePath}`);
109
+ break;
110
+ }
111
+
112
+ if (op.kind === 'add' || op.kind === 'delete') {
113
+ for (const hunk of op.hunks) {
114
+ lines.push(formatHunkHeader(hunk));
115
+ for (const line of hunk.lines) {
116
+ lines.push(serializePatchLine(line));
117
+ }
118
+ }
119
+ continue;
120
+ }
121
+
122
+ for (const hunk of op.hunks) {
123
+ lines.push(formatHunkHeader(hunk));
124
+ for (const line of hunk.lines) {
125
+ lines.push(serializePatchLine(line));
126
+ }
127
+ }
128
+ }
129
+ lines.push(PATCH_END_MARKER);
130
+ return lines.join('\n');
131
+ }
132
+
133
+ function findLineIndex(
134
+ lines: string[],
135
+ pattern: string,
136
+ start: number,
137
+ useFuzzy: boolean,
138
+ ): number {
139
+ for (let i = Math.max(0, start); i < lines.length; i++) {
140
+ if (lines[i] === pattern) return i;
141
+ if (!useFuzzy) continue;
142
+ for (const level of NORMALIZATION_LEVELS.slice(1)) {
143
+ if (
144
+ normalizeWhitespace(lines[i], level) ===
145
+ normalizeWhitespace(pattern, level)
146
+ ) {
147
+ return i;
148
+ }
149
+ }
150
+ }
151
+ return -1;
152
+ }
153
+
154
+ function findSubsequence(
155
+ lines: string[],
156
+ pattern: string[],
157
+ startIndex: number,
158
+ useFuzzy: boolean,
159
+ ): number {
160
+ if (pattern.length === 0) return -1;
161
+ const start = Math.max(0, startIndex);
162
+ for (let i = start; i <= lines.length - pattern.length; i++) {
163
+ let matches = true;
164
+ for (let j = 0; j < pattern.length; j++) {
165
+ const line = lines[i + j];
166
+ const target = pattern[j];
167
+ if (line === target) continue;
168
+ if (!useFuzzy) {
169
+ matches = false;
170
+ break;
171
+ }
172
+ let matched = false;
173
+ for (const level of NORMALIZATION_LEVELS.slice(1)) {
174
+ if (
175
+ normalizeWhitespace(line, level) ===
176
+ normalizeWhitespace(target, level)
177
+ ) {
178
+ matched = true;
179
+ break;
180
+ }
181
+ }
182
+ if (!matched) {
183
+ matches = false;
184
+ break;
185
+ }
186
+ }
187
+ if (matches) return i;
188
+ }
189
+ return -1;
190
+ }
191
+
192
+ function computeInsertionIndex(
193
+ lines: string[],
194
+ header: PatchHunk['header'],
195
+ hint: number,
196
+ ): number {
197
+ if (header.context) {
198
+ const contextIndex = findLineIndex(lines, header.context, 0, true);
199
+ if (contextIndex !== -1) return contextIndex + 1;
200
+ }
201
+
202
+ if (typeof header.oldStart === 'number') {
203
+ const zeroBased = Math.max(0, header.oldStart - 1);
204
+ return Math.min(lines.length, zeroBased);
205
+ }
206
+
207
+ if (typeof header.newStart === 'number') {
208
+ const zeroBased = Math.max(0, header.newStart - 1);
209
+ return Math.min(lines.length, zeroBased);
210
+ }
211
+
212
+ return Math.min(lines.length, Math.max(0, hint));
213
+ }
214
+
215
+ function lineExists(
216
+ lines: string[],
217
+ target: string,
218
+ useFuzzy: boolean,
219
+ ): boolean {
220
+ return findLineIndex(lines, target, 0, useFuzzy) !== -1;
221
+ }
222
+
223
+ function isHunkAlreadyApplied(
224
+ lines: string[],
225
+ hunk: PatchHunk,
226
+ useFuzzy: boolean,
227
+ ): boolean {
228
+ const replacement = hunk.lines
229
+ .filter((line) => line.kind !== 'remove')
230
+ .map((line) => line.content);
231
+
232
+ if (replacement.length > 0) {
233
+ return findSubsequence(lines, replacement, 0, useFuzzy) !== -1;
234
+ }
235
+
236
+ const removals = hunk.lines.filter((line) => line.kind === 'remove');
237
+ const additions = hunk.lines
238
+ .filter((line) => line.kind === 'add')
239
+ .map((line) => line.content);
240
+ const contextLines = hunk.lines
241
+ .filter((line) => line.kind === 'context')
242
+ .map((line) => line.content);
243
+ if (removals.length === 0) return false;
244
+ return removals.every((line) => !lineExists(lines, line.content, useFuzzy));
245
+ }
246
+
247
+ function applyHunkToLines(
248
+ lines: string[],
249
+ originalLines: string[],
250
+ hunk: PatchHunk,
251
+ hint: number,
252
+ useFuzzy: boolean,
253
+ ): AppliedPatchHunk | null {
254
+ const expected = hunk.lines
255
+ .filter((line) => line.kind !== 'add')
256
+ .map((line) => line.content);
257
+ const replacement = hunk.lines
258
+ .filter((line) => line.kind !== 'remove')
259
+ .map((line) => line.content);
260
+
261
+ const removals = hunk.lines.filter((line) => line.kind === 'remove');
262
+
263
+ const hasExpected = expected.length > 0;
264
+ const initialHint =
265
+ typeof hunk.header.oldStart === 'number'
266
+ ? Math.max(0, hunk.header.oldStart - 1)
267
+ : hint;
268
+
269
+ let matchIndex = hasExpected
270
+ ? findSubsequence(lines, expected, Math.max(0, initialHint - 3), useFuzzy)
271
+ : -1;
272
+
273
+ if (hasExpected && matchIndex === -1) {
274
+ matchIndex = findSubsequence(lines, expected, 0, useFuzzy);
275
+ }
276
+
277
+ if (matchIndex === -1 && removals.length > 0) {
278
+ const expectedWithoutMissingRemovals = expected.filter((line) =>
279
+ lineExists(lines, line, useFuzzy),
280
+ );
281
+ if (expectedWithoutMissingRemovals.length > 0) {
282
+ matchIndex = findSubsequence(
283
+ lines,
284
+ expectedWithoutMissingRemovals,
285
+ Math.max(0, initialHint - 3),
286
+ useFuzzy,
287
+ );
288
+ if (matchIndex === -1) {
289
+ matchIndex = findSubsequence(
290
+ lines,
291
+ expectedWithoutMissingRemovals,
292
+ 0,
293
+ useFuzzy,
294
+ );
295
+ }
296
+ }
297
+ }
298
+
299
+ if (matchIndex === -1 && isHunkAlreadyApplied(lines, hunk, useFuzzy)) {
300
+ const skipStart =
301
+ initialHint >= 0 && initialHint < lines.length ? initialHint + 1 : 1;
302
+ return {
303
+ header: { ...hunk.header },
304
+ lines: hunk.lines.map((line) => ({ ...line })),
305
+ oldStart: skipStart,
306
+ oldLines: 0,
307
+ newStart: skipStart,
308
+ newLines: replacement.length,
309
+ additions: hunk.lines.filter((l) => l.kind === 'add').length,
310
+ deletions: hunk.lines.filter((l) => l.kind === 'remove').length,
311
+ };
312
+ }
313
+
314
+ if (matchIndex === -1 && !hasExpected) {
315
+ matchIndex = computeInsertionIndex(lines, hunk.header, initialHint);
316
+ }
317
+
318
+ if (matchIndex === -1) {
319
+ const contextInfo = hunk.header.context
320
+ ? ` near context '${hunk.header.context}'`
321
+ : '';
322
+
323
+ if (additions.length > 0) {
324
+ const anchorContext =
325
+ contextLines.length > 0
326
+ ? contextLines[contextLines.length - 1]
327
+ : undefined;
328
+ const anchorIndex =
329
+ anchorContext !== undefined
330
+ ? findLineIndex(lines, anchorContext, 0, useFuzzy)
331
+ : -1;
332
+
333
+ const insertionIndex =
334
+ anchorIndex !== -1
335
+ ? anchorIndex + 1
336
+ : computeInsertionIndex(lines, hunk.header, initialHint);
337
+
338
+ if (
339
+ findSubsequence(
340
+ lines,
341
+ additions,
342
+ Math.max(0, insertionIndex - additions.length),
343
+ useFuzzy,
344
+ ) !== -1
345
+ ) {
346
+ const skipStart =
347
+ insertionIndex >= 0 && insertionIndex < lines.length
348
+ ? insertionIndex + 1
349
+ : lines.length + 1;
350
+ return {
351
+ header: { ...hunk.header },
352
+ lines: hunk.lines.map((line) => ({ ...line })),
353
+ oldStart: skipStart,
354
+ oldLines: 0,
355
+ newStart: skipStart,
356
+ newLines: additions.length,
357
+ additions: additions.length,
358
+ deletions: 0,
359
+ };
360
+ }
361
+
362
+ const anchorInOriginal =
363
+ anchorContext !== undefined
364
+ ? findLineIndex(originalLines, anchorContext, 0, useFuzzy)
365
+ : -1;
366
+ const oldStart =
367
+ anchorInOriginal !== -1
368
+ ? anchorInOriginal + 1
369
+ : Math.min(originalLines.length + 1, insertionIndex + 1);
370
+
371
+ lines.splice(insertionIndex, 0, ...additions);
372
+
373
+ return {
374
+ header: { ...hunk.header },
375
+ lines: hunk.lines.map((line) => ({ ...line })),
376
+ oldStart,
377
+ oldLines: 0,
378
+ newStart: insertionIndex + 1,
379
+ newLines: additions.length,
380
+ additions: additions.length,
381
+ deletions: 0,
382
+ };
383
+ }
384
+
385
+ let errorMsg = `Failed to apply patch hunk${contextInfo}.`;
386
+ if (expected.length > 0) {
387
+ errorMsg += `\nExpected to find:\n${expected
388
+ .map((l) => ` ${l}`)
389
+ .join('\n')}`;
390
+ }
391
+ if (removals.length > 0) {
392
+ const missing = removals
393
+ .filter((line) => !lineExists(lines, line.content, useFuzzy))
394
+ .map((line) => line.content);
395
+ if (missing.length === removals.length) {
396
+ errorMsg +=
397
+ '\nAll removal lines already absent; consider reading the file again to capture current state.';
398
+ }
399
+ }
400
+ throw new Error(errorMsg);
401
+ }
402
+
403
+ const deleteCount = hasExpected ? expected.length : 0;
404
+ const originalIndex = matchIndex;
405
+ const oldStart = Math.min(
406
+ originalLines.length,
407
+ Math.max(0, originalIndex) + 1,
408
+ );
409
+ const newStart = matchIndex + 1;
410
+
411
+ lines.splice(matchIndex, deleteCount, ...replacement);
412
+
413
+ return {
414
+ header: { ...hunk.header },
415
+ lines: hunk.lines.map((line) => ({ ...line })),
416
+ oldStart,
417
+ oldLines: deleteCount,
418
+ newStart,
419
+ newLines: replacement.length,
420
+ additions: hunk.lines.filter((l) => l.kind === 'add').length,
421
+ deletions: hunk.lines.filter((l) => l.kind === 'remove').length,
422
+ };
423
+ }
424
+
425
+ async function applyAddOperation(
426
+ projectRoot: string,
427
+ operation: PatchAddOperation,
428
+ ): Promise<AppliedPatchOperation> {
429
+ const target = resolveProjectPath(projectRoot, operation.filePath);
430
+ await mkdir(dirname(target), { recursive: true });
431
+ const lines = [...operation.lines];
432
+ ensureTrailingNewline(lines);
433
+ await writeFile(target, joinLines(lines, '\n'), 'utf-8');
434
+
435
+ const appliedHunk: AppliedPatchHunk = {
436
+ header: {},
437
+ lines: operation.lines.map((line) => ({ kind: 'add', content: line })),
438
+ oldStart: 0,
439
+ oldLines: 0,
440
+ newStart: 1,
441
+ newLines: lines.length,
442
+ additions: lines.length,
443
+ deletions: 0,
444
+ };
445
+
446
+ return makeAppliedRecord('add', operation.filePath, [appliedHunk]);
447
+ }
448
+
449
+ async function applyDeleteOperation(
450
+ projectRoot: string,
451
+ operation: PatchDeleteOperation,
452
+ ): Promise<AppliedPatchOperation> {
453
+ const target = resolveProjectPath(projectRoot, operation.filePath);
454
+ let existing = '';
455
+ try {
456
+ existing = await readFile(target, 'utf-8');
457
+ } catch (error) {
458
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
459
+ throw new Error(`File not found for deletion: ${operation.filePath}`);
460
+ }
461
+ throw error;
462
+ }
463
+
464
+ const { lines } = splitLines(existing);
465
+ await unlink(target);
466
+
467
+ const appliedHunk: AppliedPatchHunk = {
468
+ header: {},
469
+ lines: lines.map((line) => ({ kind: 'remove', content: line })),
470
+ oldStart: 1,
471
+ oldLines: lines.length,
472
+ newStart: 0,
473
+ newLines: 0,
474
+ additions: 0,
475
+ deletions: lines.length,
476
+ };
477
+
478
+ return makeAppliedRecord('delete', operation.filePath, [appliedHunk]);
479
+ }
480
+
481
+ async function applyUpdateOperation(
482
+ projectRoot: string,
483
+ operation: PatchUpdateOperation,
484
+ useFuzzy: boolean,
485
+ ): Promise<AppliedPatchOperation> {
486
+ const target = resolveProjectPath(projectRoot, operation.filePath);
487
+ let original: string;
488
+ try {
489
+ original = await readFile(target, 'utf-8');
490
+ } catch (error) {
491
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
492
+ throw new Error(`File not found: ${operation.filePath}`);
493
+ }
494
+ throw error;
495
+ }
496
+
497
+ const { lines: originalLines, newline } = splitLines(original);
498
+ const workingLines = [...originalLines];
499
+ const appliedHunks: AppliedPatchHunk[] = [];
500
+ let hint = 0;
501
+
502
+ for (const hunk of operation.hunks) {
503
+ const applied = applyHunkToLines(
504
+ workingLines,
505
+ originalLines,
506
+ hunk,
507
+ hint,
508
+ useFuzzy,
509
+ );
510
+ if (!applied) continue;
511
+ appliedHunks.push(applied);
512
+ hint = applied.newStart + applied.newLines - 1;
513
+ }
514
+
515
+ ensureTrailingNewline(workingLines);
516
+ await writeFile(target, joinLines(workingLines, newline), 'utf-8');
517
+
518
+ return makeAppliedRecord('update', operation.filePath, appliedHunks);
519
+ }
520
+
521
+ export async function applyPatchOperations(
522
+ projectRoot: string,
523
+ operations: PatchOperation[],
524
+ options: { useFuzzy: boolean; allowRejects: boolean },
525
+ ): Promise<PatchApplicationResult> {
526
+ const applied: AppliedPatchOperation[] = [];
527
+ const rejected: RejectedPatch[] = [];
528
+
529
+ for (const operation of operations) {
530
+ try {
531
+ if (operation.kind === 'add') {
532
+ applied.push(await applyAddOperation(projectRoot, operation));
533
+ } else if (operation.kind === 'delete') {
534
+ applied.push(await applyDeleteOperation(projectRoot, operation));
535
+ } else {
536
+ applied.push(
537
+ await applyUpdateOperation(projectRoot, operation, options.useFuzzy),
538
+ );
539
+ }
540
+ } catch (error) {
541
+ if (options.allowRejects) {
542
+ rejected.push({
543
+ kind: operation.kind,
544
+ filePath: operation.filePath,
545
+ reason: error instanceof Error ? error.message : String(error),
546
+ operation,
547
+ });
548
+ continue;
549
+ }
550
+ throw error;
551
+ }
552
+ }
553
+
554
+ const summary = makeSummary(applied);
555
+
556
+ return {
557
+ operations: applied,
558
+ summary,
559
+ normalizedPatch: formatNormalizedPatch(applied),
560
+ rejected,
561
+ };
562
+ }
@@ -0,0 +1,5 @@
1
+ export const PATCH_BEGIN_MARKER = '*** Begin Patch';
2
+ export const PATCH_END_MARKER = '*** End Patch';
3
+ export const PATCH_ADD_PREFIX = '*** Add File:';
4
+ export const PATCH_UPDATE_PREFIX = '*** Update File:';
5
+ export const PATCH_DELETE_PREFIX = '*** Delete File:';
@@ -0,0 +1,31 @@
1
+ enum NormalizationLevel {
2
+ EXACT = 'exact',
3
+ TABS_ONLY = 'tabs',
4
+ WHITESPACE = 'whitespace',
5
+ AGGRESSIVE = 'aggressive',
6
+ }
7
+
8
+ export function normalizeWhitespace(
9
+ line: string,
10
+ level: NormalizationLevel,
11
+ ): string {
12
+ switch (level) {
13
+ case NormalizationLevel.EXACT:
14
+ return line;
15
+ case NormalizationLevel.TABS_ONLY:
16
+ return line.replace(/\t/g, ' ');
17
+ case NormalizationLevel.WHITESPACE:
18
+ return line.replace(/\t/g, ' ').replace(/\s+$/, '');
19
+ case NormalizationLevel.AGGRESSIVE:
20
+ return line.replace(/\t/g, ' ').trim();
21
+ default:
22
+ return line;
23
+ }
24
+ }
25
+
26
+ export const NORMALIZATION_LEVELS: NormalizationLevel[] = [
27
+ NormalizationLevel.EXACT,
28
+ NormalizationLevel.TABS_ONLY,
29
+ NormalizationLevel.WHITESPACE,
30
+ NormalizationLevel.AGGRESSIVE,
31
+ ];