@limrun/api 0.18.3 → 0.19.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,569 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import os from 'os';
4
+ import crypto from 'crypto';
5
+ import { spawn } from 'child_process';
6
+ import { watchFolderTree } from './folder-sync-watcher';
7
+ import { Readable } from 'stream';
8
+ import * as zlib from 'zlib';
9
+
10
+ // =============================================================================
11
+ // Folder Sync (HTTP batch)
12
+ // =============================================================================
13
+
14
+ export type FolderSyncOptions = {
15
+ apiUrl: string;
16
+ token: string;
17
+ udid: string; // used only for local cache scoping
18
+ /**
19
+ * Directory for the client-side folder-sync cache.
20
+ * Used to store the last-synced “basis” copies of files (and related sync metadata) so we can compute xdelta patches
21
+ * on subsequent syncs without re-downloading server state.
22
+ *
23
+ * Can be absolute or relative to process.cwd(). Defaults to `.lim-metadata-cache/`.
24
+ */
25
+ basisCacheDir?: string;
26
+ install?: boolean;
27
+ launchMode?: 'ForegroundIfRunning' | 'RelaunchIfRunning' | 'FailIfRunning';
28
+ /** If true, watch the folder and re-sync on any changes (debounced, single-flight). */
29
+ watch?: boolean;
30
+ /** Max patch size (bytes) to send as delta before falling back to full upload. */
31
+ maxPatchBytes?: number;
32
+ /** Controls logging verbosity */
33
+ log?: (level: 'debug' | 'info' | 'warn' | 'error', msg: string) => void;
34
+ };
35
+
36
+ export type SyncFolderResult = {
37
+ installedAppPath?: string;
38
+ installedBundleId?: string;
39
+ /** Present only when watch=true; call to stop watching. */
40
+ stopWatching?: () => void;
41
+ };
42
+
43
+ type FileEntry = { path: string; size: number; sha256: string; absPath: string; mode: number };
44
+
45
+ type FolderSyncHttpPayload = {
46
+ kind: 'delta' | 'full';
47
+ path: string;
48
+ /** Required for delta. Must match server's current sha for this path. */
49
+ basisSha256?: string;
50
+ /** Expected target sha after apply (also must match manifest's sha for path). */
51
+ targetSha256: string;
52
+ /** Number of bytes that will follow for this payload in the request body. */
53
+ length: number;
54
+ };
55
+ type FolderSyncHttpMeta = {
56
+ id: string;
57
+ rootName: string;
58
+ install?: boolean;
59
+ launchMode?: 'ForegroundIfRunning' | 'RelaunchIfRunning' | 'FailIfRunning';
60
+ files: { path: string; size: number; sha256: string; mode: number }[];
61
+ payloads: FolderSyncHttpPayload[];
62
+ };
63
+ type FolderSyncHttpResponse = {
64
+ ok: boolean;
65
+ needFull?: string[];
66
+ installedAppPath?: string;
67
+ bundleId?: string;
68
+ error?: string;
69
+ };
70
+
71
+ const noopLogger = (_level: 'debug' | 'info' | 'warn' | 'error', _msg: string) => {
72
+ // Intentionally empty: callers (e.g. ios-client.ts) should provide their own logger
73
+ // to control verbosity and integrate with the SDK's logging setup.
74
+ };
75
+
76
+ function nowMs(): number {
77
+ return Date.now();
78
+ }
79
+
80
+ function fmtMs(ms: number): string {
81
+ if (ms < 1000) return `${ms.toFixed(0)}ms`;
82
+ return `${(ms / 1000).toFixed(2)}s`;
83
+ }
84
+
85
+ function fmtBytes(bytes: number): string {
86
+ if (bytes < 1024) return `${bytes}B`;
87
+ const kib = bytes / 1024;
88
+ if (kib < 1024) return `${kib.toFixed(1)}KiB`;
89
+ const mib = kib / 1024;
90
+ if (mib < 1024) return `${mib.toFixed(1)}MiB`;
91
+ const gib = mib / 1024;
92
+ return `${gib.toFixed(2)}GiB`;
93
+ }
94
+
95
+ function genId(prefix: string): string {
96
+ return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2)}`;
97
+ }
98
+
99
+ function isENOENT(err: unknown): boolean {
100
+ const e = err as { code?: string; cause?: { code?: string } };
101
+ return e?.code === 'ENOENT' || e?.cause?.code === 'ENOENT';
102
+ }
103
+
104
+ function concurrencyLimit(): number {
105
+ // min(4, max(1, cpuCount-1))
106
+ const cpu = os.cpus()?.length ?? 1;
107
+ return Math.min(4, Math.max(1, cpu - 1));
108
+ }
109
+
110
+ async function mapLimit<T, R>(items: T[], limit: number, fn: (item: T) => Promise<R>): Promise<R[]> {
111
+ const results: R[] = new Array(items.length);
112
+ let idx = 0;
113
+ const workers = new Array(Math.min(limit, items.length)).fill(0).map(async () => {
114
+ while (true) {
115
+ const my = idx++;
116
+ if (my >= items.length) return;
117
+ const item = items[my]!;
118
+ results[my] = await fn(item);
119
+ }
120
+ });
121
+ await Promise.all(workers);
122
+ return results;
123
+ }
124
+
125
+ function folderSyncHttpUrl(apiUrl: string): string {
126
+ return `${apiUrl}/folder-sync`;
127
+ }
128
+
129
+ function u32be(n: number): Buffer {
130
+ const b = Buffer.allocUnsafe(4);
131
+ b.writeUInt32BE(n >>> 0, 0);
132
+ return b;
133
+ }
134
+
135
+ async function httpFolderSyncBatch(
136
+ opts: FolderSyncOptions,
137
+ meta: FolderSyncHttpMeta,
138
+ payloadFiles: { filePath: string }[],
139
+ compression: 'zstd' | 'gzip' | 'identity',
140
+ ): Promise<FolderSyncHttpResponse> {
141
+ const url = folderSyncHttpUrl(opts.apiUrl);
142
+ const headers: Record<string, string> = {
143
+ // OpenAPI route expects application/octet-stream.
144
+ 'Content-Type': 'application/octet-stream',
145
+ Authorization: `Bearer ${opts.token}`,
146
+ };
147
+
148
+ const metaBytes = Buffer.from(JSON.stringify(meta), 'utf-8');
149
+ const head = Buffer.concat([u32be(metaBytes.length), metaBytes]);
150
+
151
+ async function* gen(): AsyncGenerator<Buffer> {
152
+ yield head;
153
+ for (const p of payloadFiles) {
154
+ const fd = await fs.promises.open(p.filePath, 'r');
155
+ try {
156
+ const st = await fd.stat();
157
+ let offset = 0;
158
+ while (offset < st.size) {
159
+ const len = Math.min(256 * 1024, st.size - offset);
160
+ const buf = Buffer.allocUnsafe(len);
161
+ const { bytesRead } = await fd.read(buf, 0, len, offset);
162
+ if (bytesRead <= 0) break;
163
+ offset += bytesRead;
164
+ yield buf.subarray(0, bytesRead);
165
+ }
166
+ } finally {
167
+ await fd.close();
168
+ }
169
+ }
170
+ }
171
+
172
+ const sourceStream = Readable.from(gen());
173
+ let bodyStream: Readable | NodeJS.ReadWriteStream;
174
+ if (compression === 'zstd') {
175
+ const createZstd = (zlib as any).createZstdCompress as
176
+ | ((opts?: { level?: number }) => NodeJS.ReadWriteStream)
177
+ | undefined;
178
+ if (!createZstd) {
179
+ throw new Error('zstd compression not available in this Node.js version');
180
+ }
181
+ bodyStream = sourceStream.pipe(createZstd({ level: 3 }));
182
+ headers['Content-Encoding'] = 'zstd';
183
+ } else if (compression === 'gzip') {
184
+ const createGzip = zlib.createGzip as ((opts?: zlib.ZlibOptions) => NodeJS.ReadWriteStream) | undefined;
185
+ if (!createGzip) {
186
+ throw new Error('gzip compression not available in this Node.js version');
187
+ }
188
+ bodyStream = sourceStream.pipe(createGzip({ level: 6 }));
189
+ headers['Content-Encoding'] = 'gzip';
190
+ } else {
191
+ bodyStream = sourceStream;
192
+ }
193
+ const controller = new AbortController();
194
+ let streamError: unknown;
195
+ const onStreamError = (err: unknown) => {
196
+ streamError = err;
197
+ controller.abort();
198
+ };
199
+ sourceStream.on('error', onStreamError);
200
+ bodyStream.on('error', onStreamError);
201
+ const res = await fetch(url, {
202
+ method: 'POST',
203
+ headers,
204
+ body: bodyStream as any,
205
+ duplex: 'half' as any,
206
+ signal: controller.signal,
207
+ } as any).catch((err) => {
208
+ if (streamError) {
209
+ throw streamError;
210
+ }
211
+ throw err;
212
+ });
213
+ const text = await res.text();
214
+ if (!res.ok) {
215
+ throw new Error(`folder-sync http failed: ${res.status} ${text}`);
216
+ }
217
+ return JSON.parse(text) as FolderSyncHttpResponse;
218
+ }
219
+ async function sha256FileHex(filePath: string): Promise<string> {
220
+ return await new Promise((resolve, reject) => {
221
+ const hash = crypto.createHash('sha256');
222
+ const stream = fs.createReadStream(filePath);
223
+ stream.on('data', (chunk) => hash.update(chunk));
224
+ stream.on('error', reject);
225
+ stream.on('end', () => resolve(hash.digest('hex')));
226
+ });
227
+ }
228
+
229
+ async function walkFiles(root: string): Promise<FileEntry[]> {
230
+ const out: FileEntry[] = [];
231
+ const stack: string[] = [root];
232
+ const rootResolved = path.resolve(root);
233
+ while (stack.length) {
234
+ const dir = stack.pop()!;
235
+ const entries = await fs.promises.readdir(dir, { withFileTypes: true });
236
+ for (const ent of entries) {
237
+ if (ent.name === '.DS_Store') continue;
238
+ const abs = path.join(dir, ent.name);
239
+ if (ent.isDirectory()) {
240
+ stack.push(abs);
241
+ continue;
242
+ }
243
+ if (!ent.isFile()) continue;
244
+ const st = await fs.promises.stat(abs);
245
+ const rel = path.relative(rootResolved, abs).split(path.sep).join('/');
246
+ const sha256 = await sha256FileHex(abs);
247
+ // Preserve POSIX permission bits (including +x). Mask out file-type bits.
248
+ const mode = st.mode & 0o7777;
249
+ out.push({ path: rel, size: st.size, sha256, absPath: abs, mode });
250
+ }
251
+ }
252
+ out.sort((a, b) => a.path.localeCompare(b.path));
253
+ return out;
254
+ }
255
+
256
+ let xdelta3Ready: Promise<void> | null = null;
257
+ async function ensureXdelta3(): Promise<void> {
258
+ if (!xdelta3Ready) {
259
+ xdelta3Ready = new Promise<void>((resolve, reject) => {
260
+ const p = spawn('xdelta3', ['-V']);
261
+ p.on('error', reject);
262
+ p.on('exit', (code) => {
263
+ if (code === 0) resolve();
264
+ else reject(new Error(`xdelta3 not available (exit=${code})`));
265
+ });
266
+ });
267
+ }
268
+ return await xdelta3Ready;
269
+ }
270
+
271
+ async function runXdelta3Encode(basis: string, target: string, outPatch: string): Promise<void> {
272
+ await new Promise<void>((resolve, reject) => {
273
+ const p = spawn('xdelta3', ['-e', '-s', basis, target, outPatch], {
274
+ stdio: ['ignore', 'ignore', 'pipe'],
275
+ });
276
+ let stderr = '';
277
+ p.stderr.on('data', (d) => (stderr += d.toString()));
278
+ p.on('error', reject);
279
+ p.on('exit', (code) => {
280
+ if (code === 0) resolve();
281
+ else reject(new Error(`xdelta3 encode failed (exit=${code}): ${stderr.trim()}`));
282
+ });
283
+ });
284
+ }
285
+
286
+ function localBasisCacheRoot(opts: FolderSyncOptions, localFolderPath: string): string {
287
+ const hostKey = opts.apiUrl.replace(/[:/]+/g, '_');
288
+ const resolved = path.resolve(localFolderPath);
289
+ const base = path.basename(resolved);
290
+ const hash = crypto.createHash('sha1').update(resolved).digest('hex').slice(0, 8);
291
+ const rootOverride =
292
+ opts.basisCacheDir ?
293
+ path.resolve(process.cwd(), opts.basisCacheDir)
294
+ : path.join(process.cwd(), '.lim-metadata-cache');
295
+ // Include folder identity to avoid collisions between different roots.
296
+ return path.join(rootOverride, 'folder-sync', hostKey, opts.udid, `${base}-${hash}`);
297
+ }
298
+
299
+ async function cachePut(cacheRoot: string, relPath: string, srcFile: string): Promise<void> {
300
+ const dst = path.join(cacheRoot, relPath.split('/').join(path.sep));
301
+ await fs.promises.mkdir(path.dirname(dst), { recursive: true });
302
+ await fs.promises.copyFile(srcFile, dst);
303
+ }
304
+
305
+ function cacheGet(cacheRoot: string, relPath: string): string {
306
+ return path.join(cacheRoot, relPath.split('/').join(path.sep));
307
+ }
308
+
309
+ export type SyncAppResult = SyncFolderResult;
310
+
311
+ export async function syncApp(localFolderPath: string, opts: FolderSyncOptions): Promise<SyncFolderResult> {
312
+ if (!opts.watch) {
313
+ return await syncFolderOnce(localFolderPath, opts);
314
+ }
315
+ // Initial sync, then watch for changes and re-run sync in the background.
316
+ const first = await syncFolderOnce(localFolderPath, opts, 'startup');
317
+ let inFlight = false;
318
+ let queued = false;
319
+
320
+ const run = async (reason: string) => {
321
+ if (inFlight) {
322
+ queued = true;
323
+ return;
324
+ }
325
+ inFlight = true;
326
+ try {
327
+ await syncFolderOnce(localFolderPath, opts, reason);
328
+ } finally {
329
+ inFlight = false;
330
+ if (queued) {
331
+ queued = false;
332
+ void run('queued-changes');
333
+ }
334
+ }
335
+ };
336
+
337
+ const watcherLog = (level: 'debug' | 'info' | 'warn' | 'error', msg: string) => {
338
+ (opts.log ?? noopLogger)(level, `syncApp: ${msg}`);
339
+ };
340
+ const watcher = await watchFolderTree({
341
+ rootPath: localFolderPath,
342
+ log: watcherLog,
343
+ onChange: (reason) => {
344
+ void run(reason);
345
+ },
346
+ });
347
+
348
+ return {
349
+ ...first,
350
+ stopWatching: () => {
351
+ watcher.close();
352
+ },
353
+ };
354
+ }
355
+
356
+ // Back-compat alias (older callers)
357
+ export async function syncFolder(
358
+ localFolderPath: string,
359
+ opts: FolderSyncOptions,
360
+ ): Promise<SyncFolderResult> {
361
+ return await syncApp(localFolderPath, opts);
362
+ }
363
+
364
+ async function syncFolderOnce(
365
+ localFolderPath: string,
366
+ opts: FolderSyncOptions,
367
+ reason?: string,
368
+ attempt = 0,
369
+ ): Promise<SyncFolderResult> {
370
+ const totalStart = nowMs();
371
+ const log = opts.log ?? noopLogger;
372
+ const slog = (level: 'debug' | 'info' | 'warn' | 'error', msg: string) => log(level, `syncApp: ${msg}`);
373
+ const maxPatchBytes = opts.maxPatchBytes ?? 4 * 1024 * 1024;
374
+
375
+ const tEnsureStart = nowMs();
376
+ await ensureXdelta3();
377
+ const tEnsureMs = nowMs() - tEnsureStart;
378
+
379
+ const tWalkStart = nowMs();
380
+ const files = await walkFiles(localFolderPath);
381
+ const tWalkMs = nowMs() - tWalkStart;
382
+ const fileMap = new Map(files.map((f) => [f.path, f]));
383
+
384
+ const syncId = genId('sync');
385
+ const rootName = path.basename(path.resolve(localFolderPath));
386
+ const preferredCompression = (zlib as any).createZstdCompress ? 'zstd' : 'gzip';
387
+
388
+ const cacheRoot = localBasisCacheRoot(opts, localFolderPath);
389
+ await fs.promises.mkdir(cacheRoot, { recursive: true });
390
+
391
+ // Track how many bytes we actually transmit to the server (single HTTP request).
392
+ let bytesSentFull = 0;
393
+ let bytesSentDelta = 0;
394
+ let httpSendMsTotal = 0;
395
+ let deltaEncodeMsTotal = 0;
396
+ type EncodedPayload = { payload: FolderSyncHttpPayload; filePath: string; cleanupDir?: string };
397
+
398
+ // Build payload list by comparing against local basis cache (single-flight/watch assumes server matches cache).
399
+ const encodeLimit = concurrencyLimit();
400
+ const changed: FileEntry[] = [];
401
+ for (const f of files) {
402
+ const basisPath = cacheGet(cacheRoot, f.path);
403
+ if (!fs.existsSync(basisPath)) {
404
+ changed.push(f);
405
+ continue;
406
+ }
407
+ const basisSha = await sha256FileHex(basisPath);
408
+ if (basisSha !== f.sha256.toLowerCase()) {
409
+ changed.push(f);
410
+ }
411
+ }
412
+
413
+ const encodedPayloads = await mapLimit(changed, encodeLimit, async (f): Promise<EncodedPayload> => {
414
+ const basisPath = cacheGet(cacheRoot, f.path);
415
+ if (fs.existsSync(basisPath)) {
416
+ const basisSha = await sha256FileHex(basisPath);
417
+ const tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'limulator-xdelta3-'));
418
+ const patchPath = path.join(tmpDir, 'patch.xdelta3');
419
+ const encodeStart = nowMs();
420
+ await runXdelta3Encode(basisPath, f.absPath, patchPath);
421
+ const encodeMs = nowMs() - encodeStart;
422
+ deltaEncodeMsTotal += encodeMs;
423
+ const st = await fs.promises.stat(patchPath);
424
+ if (st.size <= maxPatchBytes) {
425
+ slog(
426
+ 'debug',
427
+ `delta(file): ${path.posix.basename(f.path)} patchSize=${st.size} encode=${fmtMs(encodeMs)}`,
428
+ );
429
+ bytesSentDelta += st.size;
430
+ return {
431
+ payload: {
432
+ kind: 'delta',
433
+ path: f.path,
434
+ basisSha256: basisSha.toLowerCase(),
435
+ targetSha256: f.sha256.toLowerCase(),
436
+ length: st.size,
437
+ },
438
+ filePath: patchPath,
439
+ cleanupDir: tmpDir,
440
+ };
441
+ }
442
+ // Patch too big, fall back to full
443
+ try {
444
+ await fs.promises.rm(tmpDir, { recursive: true, force: true });
445
+ } catch {
446
+ // ignore
447
+ }
448
+ }
449
+ slog('debug', `full(file): ${f.path} size=${f.size}`);
450
+ bytesSentFull += f.size;
451
+ return {
452
+ payload: { kind: 'full', path: f.path, targetSha256: f.sha256.toLowerCase(), length: f.size },
453
+ filePath: f.absPath,
454
+ };
455
+ });
456
+
457
+ const meta: FolderSyncHttpMeta = {
458
+ id: syncId,
459
+ rootName,
460
+ install: opts.install ?? true,
461
+ ...(opts.launchMode ? { launchMode: opts.launchMode } : {}),
462
+ files: files.map((f) => ({ path: f.path, size: f.size, sha256: f.sha256.toLowerCase(), mode: f.mode })),
463
+ payloads: encodedPayloads.map((p) => p.payload),
464
+ };
465
+ const hasDelta = encodedPayloads.some((p) => p.payload.kind === 'delta');
466
+ const compression: 'zstd' | 'gzip' | 'identity' = hasDelta ? 'identity' : preferredCompression;
467
+ slog(
468
+ 'info',
469
+ `sync started files=${files.length}${reason ? ` reason=${reason}` : ''} compression=${compression}`,
470
+ );
471
+
472
+ const sendStart = nowMs();
473
+ let resp: FolderSyncHttpResponse;
474
+ try {
475
+ resp = await httpFolderSyncBatch(
476
+ opts,
477
+ meta,
478
+ encodedPayloads.map((p) => ({ filePath: p.filePath })),
479
+ compression,
480
+ );
481
+ } catch (err) {
482
+ if (attempt < 1 && isENOENT(err)) {
483
+ slog('warn', `sync retrying after missing file during upload (ENOENT)`);
484
+ return await syncFolderOnce(localFolderPath, opts, reason, attempt + 1);
485
+ }
486
+ throw err;
487
+ }
488
+ httpSendMsTotal += nowMs() - sendStart;
489
+
490
+ // Retry once if server needs full for some paths (basis mismatch).
491
+ if (!resp.ok && resp.needFull && resp.needFull.length > 0) {
492
+ const need = new Set(resp.needFull);
493
+ const retryPayloads: EncodedPayload[] = [];
494
+ for (const p of need) {
495
+ const entry = fileMap.get(p);
496
+ if (!entry) continue;
497
+ retryPayloads.push({
498
+ payload: {
499
+ kind: 'full',
500
+ path: entry.path,
501
+ targetSha256: entry.sha256.toLowerCase(),
502
+ length: entry.size,
503
+ },
504
+ filePath: entry.absPath,
505
+ });
506
+ }
507
+ if (retryPayloads.length > 0) {
508
+ slog('warn', `server requested full for ${retryPayloads.length} files; retrying once`);
509
+ const retryMeta: FolderSyncHttpMeta = {
510
+ ...meta,
511
+ id: genId('sync'),
512
+ payloads: retryPayloads.map((p) => p.payload),
513
+ };
514
+ const retryStart = nowMs();
515
+ resp = await httpFolderSyncBatch(
516
+ opts,
517
+ retryMeta,
518
+ retryPayloads.map((p) => ({ filePath: p.filePath })),
519
+ preferredCompression,
520
+ );
521
+ httpSendMsTotal += nowMs() - retryStart;
522
+ }
523
+ }
524
+
525
+ // Cleanup patch temp dirs
526
+ await Promise.all(
527
+ encodedPayloads.map(async (p) => {
528
+ if (!p.cleanupDir) return;
529
+ try {
530
+ await fs.promises.rm(p.cleanupDir, { recursive: true, force: true });
531
+ } catch {
532
+ // ignore
533
+ }
534
+ }),
535
+ );
536
+
537
+ // Sync work includes: local hashing + planning + transfers (but excludes finalize/install wait).
538
+ const syncWorkMs = nowMs() - totalStart;
539
+ if (!resp.ok) {
540
+ throw new Error(resp.error ?? 'sync failed');
541
+ }
542
+ const tookMs = nowMs() - totalStart;
543
+ const totalBytes = bytesSentFull + bytesSentDelta;
544
+ slog(
545
+ 'info',
546
+ `sync finished files=${files.length} sent=${fmtBytes(totalBytes)} syncWork=${fmtMs(
547
+ syncWorkMs,
548
+ )} total=${fmtMs(tookMs)}`,
549
+ );
550
+ slog('debug', `sync bytes full=${fmtBytes(bytesSentFull)} delta=${fmtBytes(bytesSentDelta)}`);
551
+ slog(
552
+ 'debug',
553
+ `timing ensureXdelta3=${fmtMs(tEnsureMs)} walk=${fmtMs(tWalkMs)} httpSend=${fmtMs(
554
+ httpSendMsTotal,
555
+ )} deltaEncode=${fmtMs(deltaEncodeMsTotal)}`,
556
+ );
557
+ const out: SyncFolderResult = {};
558
+ if (resp.installedAppPath) {
559
+ out.installedAppPath = resp.installedAppPath;
560
+ }
561
+ if (resp.bundleId) {
562
+ out.installedBundleId = resp.bundleId;
563
+ }
564
+ // Update local cache optimistically: after a successful sync, cache reflects current local tree.
565
+ for (const f of files) {
566
+ await cachePut(cacheRoot, f.path, f.absPath);
567
+ }
568
+ return out;
569
+ }
package/src/ios-client.ts CHANGED
@@ -2,6 +2,7 @@ import { WebSocket, Data } from 'ws';
2
2
  import fs from 'fs';
3
3
  import { EventEmitter } from 'events';
4
4
  import { isNonRetryableError } from './tunnel';
5
+ import { syncApp as syncAppImpl, type SyncFolderResult, type FolderSyncOptions } from './folder-sync';
5
6
 
6
7
  /**
7
8
  * Connection state of the instance client
@@ -249,6 +250,19 @@ export type InstanceClient = {
249
250
  options?: { coordinate?: [number, number]; momentum?: number },
250
251
  ) => Promise<void>;
251
252
 
253
+ /**
254
+ * Sync an iOS app bundle folder to the server and (optionally) install/launch it.
255
+ */
256
+ syncApp: (
257
+ localAppBundlePath: string,
258
+ opts?: {
259
+ install?: boolean;
260
+ maxPatchBytes?: number;
261
+ launchMode?: 'ForegroundIfRunning' | 'RelaunchIfRunning' | 'FailIfRunning';
262
+ watch?: boolean;
263
+ },
264
+ ) => Promise<SyncFolderResult>;
265
+
252
266
  /**
253
267
  * Disconnect from the Limrun instance
254
268
  */
@@ -349,9 +363,9 @@ export type InstanceClient = {
349
363
  * Run `xcodebuild` command with the given arguments.
350
364
  * Returns the complete output once the command finishes (non-streaming).
351
365
  *
352
- * Only `-version` is allowed.
366
+ * Only `-version` is allowed (validated server-side).
353
367
  *
354
- * @param args Arguments to pass to xcodebuild (must be `['-version']`)
368
+ * @param args Arguments to pass to xcodebuild
355
369
  * @returns A promise that resolves to the command result with stdout, stderr, and exit code
356
370
  *
357
371
  * @example
@@ -363,7 +377,7 @@ export type InstanceClient = {
363
377
  * // Build version 16A242d
364
378
  * ```
365
379
  */
366
- xcodebuild: (args: ['-version']) => Promise<CommandResult>;
380
+ xcodebuild: (args: string[]) => Promise<CommandResult>;
367
381
 
368
382
  /**
369
383
  * List all open files on the instance. Useful to start tunnel to the
@@ -1001,6 +1015,7 @@ export async function createInstanceClient(options: InstanceClientOptions): Prom
1001
1015
  installApp,
1002
1016
  setOrientation,
1003
1017
  scroll,
1018
+ syncApp,
1004
1019
  disconnect,
1005
1020
  getConnectionState,
1006
1021
  onConnectionStateChange,
@@ -1092,6 +1107,49 @@ export async function createInstanceClient(options: InstanceClientOptions): Prom
1092
1107
  });
1093
1108
  };
1094
1109
 
1110
+ const syncApp = async (
1111
+ localAppBundlePath: string,
1112
+ opts?: {
1113
+ install?: boolean;
1114
+ maxPatchBytes?: number;
1115
+ launchMode?: 'ForegroundIfRunning' | 'RelaunchIfRunning' | 'FailIfRunning';
1116
+ watch?: boolean;
1117
+ },
1118
+ ): Promise<SyncFolderResult> => {
1119
+ if (!cachedDeviceInfo) {
1120
+ throw new Error('Device info not available yet; wait for client connection to be established.');
1121
+ }
1122
+ const appSyncOpts: FolderSyncOptions = {
1123
+ apiUrl: options.apiUrl,
1124
+ token: options.token,
1125
+ udid: cachedDeviceInfo.udid,
1126
+ log: (level, msg) => {
1127
+ switch (level) {
1128
+ case 'debug':
1129
+ logger.debug(msg);
1130
+ break;
1131
+ case 'info':
1132
+ logger.info(msg);
1133
+ break;
1134
+ case 'warn':
1135
+ logger.warn(msg);
1136
+ break;
1137
+ case 'error':
1138
+ logger.error(msg);
1139
+ break;
1140
+ default:
1141
+ logger.info(msg);
1142
+ break;
1143
+ }
1144
+ },
1145
+ ...(opts?.install !== undefined ? { install: opts.install } : {}),
1146
+ ...(opts?.maxPatchBytes !== undefined ? { maxPatchBytes: opts.maxPatchBytes } : {}),
1147
+ ...(opts?.launchMode !== undefined ? { launchMode: opts.launchMode } : {}),
1148
+ ...(opts?.watch !== undefined ? { watch: opts.watch } : {}),
1149
+ };
1150
+ return await syncAppImpl(localAppBundlePath, appSyncOpts);
1151
+ };
1152
+
1095
1153
  const lsof = (): Promise<LsofEntry[]> => {
1096
1154
  return sendRequest<LsofEntry[]>('listOpenFiles', { kind: 'unix' });
1097
1155
  };
@@ -1100,7 +1158,7 @@ export async function createInstanceClient(options: InstanceClientOptions): Prom
1100
1158
  return sendRequest<CommandResult>('xcrun', { args });
1101
1159
  };
1102
1160
 
1103
- const xcodebuild = (args: ['-version']): Promise<CommandResult> => {
1161
+ const xcodebuild = (args: string[]): Promise<CommandResult> => {
1104
1162
  return sendRequest<CommandResult>('xcodebuild', { args });
1105
1163
  };
1106
1164
 
@@ -1171,6 +1229,8 @@ export async function createInstanceClient(options: InstanceClientOptions): Prom
1171
1229
  const fileStream = fs.createReadStream(filePath);
1172
1230
  const uploadUrl = `${options.apiUrl}/files?name=${encodeURIComponent(name)}`;
1173
1231
  try {
1232
+ // Node's fetch (undici) supports streaming request bodies but TS DOM types may not include
1233
+ // `duplex` and may not accept Node ReadStreams as BodyInit in some configs.
1174
1234
  const response = await fetch(uploadUrl, {
1175
1235
  method: 'PUT',
1176
1236
  headers: {
@@ -1178,9 +1238,9 @@ export async function createInstanceClient(options: InstanceClientOptions): Prom
1178
1238
  'Content-Length': fs.statSync(filePath).size.toString(),
1179
1239
  Authorization: `Bearer ${options.token}`,
1180
1240
  },
1181
- body: fileStream,
1182
- duplex: 'half',
1183
- });
1241
+ body: fileStream as any,
1242
+ duplex: 'half' as any,
1243
+ } as any);
1184
1244
  if (!response.ok) {
1185
1245
  const errorBody = await response.text();
1186
1246
  logger.debug(`Upload failed: ${response.status} ${errorBody}`);