@highstate/common 0.9.15 → 0.9.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/chunk-YYNV3MVT.js +1141 -0
  2. package/dist/chunk-YYNV3MVT.js.map +1 -0
  3. package/dist/highstate.manifest.json +9 -9
  4. package/dist/index.js +2 -50
  5. package/dist/index.js.map +1 -1
  6. package/dist/units/dns/record-set/index.js +4 -6
  7. package/dist/units/dns/record-set/index.js.map +1 -1
  8. package/dist/units/existing-server/index.js +16 -22
  9. package/dist/units/existing-server/index.js.map +1 -1
  10. package/dist/units/network/l3-endpoint/index.js +6 -9
  11. package/dist/units/network/l3-endpoint/index.js.map +1 -1
  12. package/dist/units/network/l4-endpoint/index.js +6 -9
  13. package/dist/units/network/l4-endpoint/index.js.map +1 -1
  14. package/dist/units/script/index.js +6 -9
  15. package/dist/units/script/index.js.map +1 -1
  16. package/dist/units/server-dns/index.js +7 -11
  17. package/dist/units/server-dns/index.js.map +1 -1
  18. package/dist/units/server-patch/index.js +7 -11
  19. package/dist/units/server-patch/index.js.map +1 -1
  20. package/dist/units/ssh/key-pair/index.js +20 -15
  21. package/dist/units/ssh/key-pair/index.js.map +1 -1
  22. package/package.json +20 -6
  23. package/src/shared/command.ts +257 -73
  24. package/src/shared/files.ts +725 -0
  25. package/src/shared/index.ts +1 -0
  26. package/src/shared/network.ts +90 -3
  27. package/src/shared/passwords.ts +38 -2
  28. package/src/shared/ssh.ts +249 -81
  29. package/src/units/existing-server/index.ts +12 -11
  30. package/src/units/remote-folder/index.ts +0 -0
  31. package/src/units/server-dns/index.ts +1 -1
  32. package/src/units/server-patch/index.ts +1 -1
  33. package/src/units/ssh/key-pair/index.ts +16 -7
  34. package/dist/chunk-NISDP46H.js +0 -546
  35. package/dist/chunk-NISDP46H.js.map +0 -1
@@ -0,0 +1,725 @@
1
+ import type { common, network } from "@highstate/library"
2
+ import { tmpdir } from "node:os"
3
+ import { cp, mkdir, mkdtemp, rm, writeFile, rename, stat } from "node:fs/promises"
4
+ import { join, extname, basename, dirname } from "node:path"
5
+ import { createReadStream } from "node:fs"
6
+ import { pipeline } from "node:stream/promises"
7
+ import { Readable } from "node:stream"
8
+ import { createHash } from "node:crypto"
9
+ import { minimatch } from "minimatch"
10
+
11
+ import { HighstateSignature, type File } from "@highstate/contract"
12
+ import { asset, toPromise, type ObjectMeta } from "@highstate/pulumi"
13
+ import * as tar from "tar"
14
+ import unzipper from "unzipper"
15
+ import { l7EndpointToString, parseL7Endpoint, type InputL7Endpoint } from "./network"
16
+
17
+ export type FolderPackOptions = {
18
+ /**
19
+ * The patterns to include in the packed archive.
20
+ * If not provided, all files and folders will be included.
21
+ */
22
+ include?: string[]
23
+
24
+ /**
25
+ * The patterns to exclude from the packed archive.
26
+ * Applied after include patterns.
27
+ * If not provided, no files or folders will be excluded.
28
+ */
29
+ exclude?: string[]
30
+ }
31
+
32
+ /**
33
+ * Creates Pulumi asset from Highstate file.
34
+ *
35
+ * @param file The file entity to create the asset from.
36
+ * @returns The created asset.
37
+ */
38
+ export function assetFromFile(file: common.File): asset.Asset {
39
+ if (file.content.type === "remote") {
40
+ return new asset.RemoteAsset(l7EndpointToString(file.content.endpoint))
41
+ }
42
+
43
+ if (file.content.type === "local") {
44
+ return new asset.FileAsset(file.content.path)
45
+ }
46
+
47
+ if (file.content.type === "artifact") {
48
+ throw new Error(
49
+ "Artifact-based files cannot be converted to Pulumi assets directly. Use MaterializedFile instead.",
50
+ )
51
+ }
52
+
53
+ if (file.meta.isBinary) {
54
+ throw new Error(
55
+ "Cannot create asset from inline binary file content. Please open an issue if you need this feature.",
56
+ )
57
+ }
58
+
59
+ return new asset.StringAsset(file.content.value)
60
+ }
61
+
62
+ /**
63
+ * Creates Pulumi archive from Highstate folder.
64
+ *
65
+ * @param folder The folder entity to create the asset archive from.
66
+ * @returns The created asset archive.
67
+ */
68
+ export function archiveFromFolder(folder: common.Folder): asset.Archive {
69
+ if (folder.content.type === "remote") {
70
+ return new asset.RemoteArchive(l7EndpointToString(folder.content.endpoint))
71
+ }
72
+
73
+ if (folder.content.type === "local") {
74
+ return new asset.FileArchive(folder.content.path)
75
+ }
76
+
77
+ if (folder.content.type === "artifact") {
78
+ throw new Error(
79
+ "Artifact-based folders cannot be converted to Pulumi assets directly. Use MaterializedFolder instead.",
80
+ )
81
+ }
82
+
83
+ const files: Record<string, asset.Asset> = {}
84
+
85
+ for (const file of folder.content.files) {
86
+ files[file.meta.name] = assetFromFile(file)
87
+ }
88
+
89
+ for (const subfolder of folder.content.folders) {
90
+ files[subfolder.meta.name] = archiveFromFolder(subfolder)
91
+ }
92
+
93
+ return new asset.AssetArchive(files)
94
+ }
95
+
96
+ /**
97
+ * Extracts a tar or zip archive from a stream to a destination directory.
98
+ *
99
+ * @param stream The stream containing the archive data
100
+ * @param destinationPath The path where to extract the archive
101
+ * @param archiveType The type of archive ('tar' or 'zip')
102
+ */
103
+ async function unarchiveFromStream(
104
+ stream: Readable,
105
+ destinationPath: string,
106
+ archiveType: "tar" | "zip",
107
+ ): Promise<void> {
108
+ await mkdir(destinationPath, { recursive: true })
109
+
110
+ switch (archiveType) {
111
+ case "tar": {
112
+ const extractStream = tar.extract({
113
+ cwd: destinationPath,
114
+ strict: true,
115
+ })
116
+
117
+ await pipeline(stream, extractStream)
118
+ return
119
+ }
120
+ case "zip": {
121
+ // Extract directly from stream using unzipper
122
+ await pipeline(stream, unzipper.Extract({ path: destinationPath }))
123
+ return
124
+ }
125
+ }
126
+ }
127
+
128
+ /**
129
+ * Determines the archive type based on file extension or content type.
130
+ *
131
+ * @param fileName The name of the file
132
+ * @param contentType Optional content type from HTTP headers
133
+ * @returns The detected archive type or null if not an archive
134
+ */
135
+ function detectArchiveType(fileName: string, contentType?: string): "tar" | "zip" | null {
136
+ const ext = extname(fileName).toLowerCase()
137
+
138
+ if (ext === ".tar" || ext === ".tgz" || ext === ".tar.gz") {
139
+ return "tar"
140
+ }
141
+
142
+ if (ext === ".zip") {
143
+ return "zip"
144
+ }
145
+
146
+ // Fallback to content type
147
+ if (contentType) {
148
+ if (contentType.includes("tar") || contentType.includes("gzip")) {
149
+ return "tar"
150
+ }
151
+ if (contentType.includes("zip")) {
152
+ return "zip"
153
+ }
154
+ }
155
+
156
+ return null
157
+ }
158
+
159
+ /**
160
+ * The `MaterializedFile` class represents a file entity that has been materialized
161
+ * to a local filesystem path.
162
+ *
163
+ * It handles creating a temporary directory, writing the file content to that directory,
164
+ * and cleaning up the temporary files when disposed.
165
+ *
166
+ * For improved cleanup reliability, the class will use HIGHSTATE_TEMP_PATH as the base
167
+ * directory for temporary files if available, allowing for centralized cleanup by the runner.
168
+ */
169
+ export class MaterializedFile implements AsyncDisposable {
170
+ private _tmpPath?: string
171
+ private _path!: string
172
+ private _disposed = false
173
+
174
+ readonly artifactMeta: ObjectMeta = {}
175
+
176
+ constructor(
177
+ readonly entity: common.File,
178
+ readonly parent?: MaterializedFolder,
179
+ ) {}
180
+
181
+ get path(): string {
182
+ return this._path
183
+ }
184
+
185
+ private async _open(): Promise<void> {
186
+ if (this.parent) {
187
+ // if the parent folder is provided, the file path is relative to the parent folder
188
+ this._path = join(this.parent.path, this.entity.meta.name)
189
+ } else {
190
+ // otherwise, the file path is in a temporary directory
191
+ // use HIGHSTATE_TEMP_PATH as base if available for better cleanup reliability
192
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
193
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-file-"))
194
+ this._path = join(this._tmpPath, this.entity.meta.name)
195
+ }
196
+
197
+ switch (this.entity.content.type) {
198
+ case "embedded": {
199
+ const content = this.entity.meta.isBinary
200
+ ? Buffer.from(this.entity.content.value, "base64")
201
+ : this.entity.content.value
202
+
203
+ await writeFile(this._path, content, { mode: this.entity.meta.mode })
204
+ break
205
+ }
206
+ case "local": {
207
+ await cp(this.entity.content.path, this._path, { mode: this.entity.meta.mode })
208
+ break
209
+ }
210
+ case "remote": {
211
+ const response = await fetch(l7EndpointToString(this.entity.content.endpoint))
212
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`)
213
+
214
+ const arrayBuffer = await response.arrayBuffer()
215
+ await writeFile(this._path, Buffer.from(arrayBuffer), { mode: this.entity.meta.mode })
216
+
217
+ break
218
+ }
219
+ case "artifact": {
220
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH
221
+ if (!artifactPath) {
222
+ throw new Error(
223
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content",
224
+ )
225
+ }
226
+
227
+ const tgzPath = join(artifactPath, `${this.entity.content.hash}.tgz`)
228
+
229
+ // extract the tgz file directly to the target path
230
+ const readStream = createReadStream(tgzPath)
231
+ await unarchiveFromStream(readStream, dirname(this._path), "tar")
232
+ break
233
+ }
234
+ }
235
+ }
236
+
237
+ async [Symbol.asyncDispose](): Promise<void> {
238
+ if (this._disposed) return
239
+ this._disposed = true
240
+
241
+ try {
242
+ if (this._tmpPath) {
243
+ // clear the whole temporary directory if it was created
244
+ await rm(this._tmpPath, { recursive: true, force: true })
245
+ } else {
246
+ // otherwise, just remove the file
247
+ await rm(this._path, { force: true })
248
+ }
249
+ } catch (error) {
250
+ // ignore errors during cleanup, as the file might have been already removed
251
+ // or the temporary directory might not exist
252
+ // TODO: centralized logging for unit code
253
+ console.warn("failed to clean up materialized file:", error)
254
+ }
255
+ }
256
+
257
+ /**
258
+ * Packs the materialized file into an artifact and returns the file entity with artifact content.
259
+ *
260
+ * Creates a tgz archive of the file and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
261
+ */
262
+ async pack(): Promise<File> {
263
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH
264
+ if (!writeDir) {
265
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set")
266
+ }
267
+
268
+ // read actual file stats from filesystem
269
+ const fileStats = await stat(this._path)
270
+
271
+ // create tgz archive of the file
272
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
273
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`)
274
+
275
+ try {
276
+ await tar.create(
277
+ {
278
+ gzip: true,
279
+ file: tempArchivePath,
280
+ cwd: dirname(this._path),
281
+ noMtime: true, // to reproduce the same archive every time
282
+ },
283
+ [basename(this._path)],
284
+ )
285
+
286
+ // calculate hash of the archive
287
+ const fileContent = createReadStream(tempArchivePath)
288
+ const hash = createHash("sha256")
289
+
290
+ for await (const chunk of fileContent) {
291
+ hash.update(chunk as Buffer)
292
+ }
293
+
294
+ const hashValue = hash.digest("hex")
295
+
296
+ // move archive to write directory with hash name
297
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`)
298
+ await rename(tempArchivePath, finalArchivePath)
299
+
300
+ const newMeta = {
301
+ name: this.entity.meta.name,
302
+ mode: fileStats.mode & 0o777, // extract only permission bits
303
+ size: fileStats.size,
304
+ isBinary: this.entity.meta.isBinary, // keep original binary flag as we can't reliably detect this from filesystem
305
+ }
306
+
307
+ // return file entity with artifact content using actual filesystem stats
308
+ return {
309
+ meta: newMeta,
310
+ content: {
311
+ type: "artifact",
312
+ [HighstateSignature.Artifact]: true,
313
+ hash: hashValue,
314
+ meta: await toPromise(this.artifactMeta),
315
+ },
316
+ }
317
+ } finally {
318
+ // clean up temporary archive
319
+ try {
320
+ await rm(tempArchivePath, { force: true })
321
+ } catch {
322
+ // ignore cleanup errors
323
+ }
324
+ }
325
+ }
326
+
327
+ /**
328
+ * Creates an empty materialized file with the given name.
329
+ *
330
+ * @param name The name of the file to create
331
+ * @param content Optional initial content of the file (default is empty string)
332
+ * @param mode Optional file mode (permissions)
333
+ * @returns A new MaterializedFile instance representing an empty file
334
+ */
335
+ static async create(name: string, content = "", mode?: number): Promise<MaterializedFile> {
336
+ const entity: common.File = {
337
+ meta: {
338
+ name,
339
+ mode,
340
+ size: 0,
341
+ isBinary: false,
342
+ },
343
+ content: {
344
+ type: "embedded",
345
+ value: content,
346
+ },
347
+ }
348
+
349
+ const materializedFile = new MaterializedFile(entity)
350
+
351
+ try {
352
+ await materializedFile._open()
353
+ } catch (error) {
354
+ await materializedFile[Symbol.asyncDispose]()
355
+ throw error
356
+ }
357
+
358
+ return materializedFile
359
+ }
360
+
361
+ static async open(file: common.File, parent?: MaterializedFolder): Promise<MaterializedFile> {
362
+ const materializedFile = new MaterializedFile(file, parent)
363
+
364
+ try {
365
+ await materializedFile._open()
366
+ } catch (error) {
367
+ await materializedFile[Symbol.asyncDispose]()
368
+ throw error
369
+ }
370
+
371
+ return materializedFile
372
+ }
373
+ }
374
+
375
+ /**
376
+ * The `MaterializedFolder` class represents a folder entity that has been materialized
377
+ * to a local filesystem path.
378
+ *
379
+ * It handles creating a temporary directory, copying the folder content to that directory,
380
+ * and cleaning up the temporary files when disposed.
381
+ *
382
+ * For improved cleanup reliability, the class will use HIGHSTATE_TEMP_PATH as the base
383
+ * directory for temporary files if available, allowing for centralized cleanup by the runner.
384
+ */
385
+ export class MaterializedFolder implements AsyncDisposable {
386
+ private _tmpPath?: string
387
+ private _path!: string
388
+ private _disposed = false
389
+
390
+ private readonly _disposables: AsyncDisposable[] = []
391
+
392
+ readonly artifactMeta: ObjectMeta = {}
393
+
394
+ constructor(
395
+ readonly entity: common.Folder,
396
+ readonly parent?: MaterializedFolder,
397
+ ) {}
398
+
399
+ get path(): string {
400
+ return this._path
401
+ }
402
+
403
+ private async _open(): Promise<void> {
404
+ if (this.parent) {
405
+ // if the parent folder is provided, the folder path is relative to the parent folder
406
+ this._path = join(this.parent.path, this.entity.meta.name)
407
+ } else {
408
+ // otherwise, the folder path is in a temporary directory
409
+ // use HIGHSTATE_TEMP_PATH as base if available for better cleanup reliability
410
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
411
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-folder-"))
412
+ this._path = join(this._tmpPath, this.entity.meta.name)
413
+ }
414
+
415
+ switch (this.entity.content.type) {
416
+ case "embedded": {
417
+ // create the folder itself
418
+ await mkdir(this._path, { mode: this.entity.meta.mode })
419
+
420
+ for (const file of this.entity.content.files) {
421
+ const materializedFile = await MaterializedFile.open(file, this)
422
+ this._disposables.push(materializedFile)
423
+ }
424
+
425
+ for (const subfolder of this.entity.content.folders) {
426
+ const materializedFolder = await MaterializedFolder.open(subfolder, this)
427
+ this._disposables.push(materializedFolder)
428
+ }
429
+
430
+ break
431
+ }
432
+ case "local": {
433
+ // Check if the local path is an archive file that needs extraction
434
+ const archiveType = detectArchiveType(this.entity.content.path)
435
+
436
+ if (archiveType) {
437
+ // Extract archive to the destination path
438
+ const readStream = createReadStream(this.entity.content.path)
439
+ await unarchiveFromStream(readStream, this._path, archiveType)
440
+ } else {
441
+ // Regular directory copy
442
+ await cp(this.entity.content.path, this._path, {
443
+ recursive: true,
444
+ mode: this.entity.meta.mode,
445
+ })
446
+ }
447
+
448
+ break
449
+ }
450
+ case "remote": {
451
+ const response = await fetch(l7EndpointToString(this.entity.content.endpoint))
452
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`)
453
+ if (!response.body) throw new Error("Response body is empty")
454
+
455
+ // Try to detect archive type from URL or content type
456
+ const url = new URL(l7EndpointToString(this.entity.content.endpoint))
457
+ const archiveType = detectArchiveType(
458
+ url.pathname,
459
+ response.headers.get("content-type") || undefined,
460
+ )
461
+
462
+ if (!archiveType) {
463
+ throw new Error("Remote folder content must be an archive (tar, tar.gz, tgz, or zip)")
464
+ }
465
+
466
+ if (!response.body) {
467
+ throw new Error("Response body is empty")
468
+ }
469
+
470
+ const reader = response.body.getReader()
471
+ const stream = new Readable({
472
+ async read() {
473
+ try {
474
+ const { done, value } = await reader.read()
475
+ if (done) {
476
+ this.push(null)
477
+ } else {
478
+ this.push(Buffer.from(value))
479
+ }
480
+ } catch (error) {
481
+ this.destroy(error instanceof Error ? error : new Error(String(error)))
482
+ }
483
+ },
484
+ })
485
+
486
+ await unarchiveFromStream(stream, this._path, archiveType)
487
+
488
+ break
489
+ }
490
+ case "artifact": {
491
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH
492
+
493
+ if (!artifactPath) {
494
+ throw new Error(
495
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content",
496
+ )
497
+ }
498
+
499
+ const tgzPath = join(artifactPath, `${this.entity.content.hash}.tgz`)
500
+
501
+ // extract the tgz file directly to the target path
502
+ const readStream = createReadStream(tgzPath)
503
+ await unarchiveFromStream(readStream, dirname(this._path), "tar")
504
+
505
+ break
506
+ }
507
+ }
508
+ }
509
+
510
+ async [Symbol.asyncDispose](): Promise<void> {
511
+ if (this._disposed) return
512
+ this._disposed = true
513
+
514
+ try {
515
+ if (this._tmpPath) {
516
+ // clear the whole temporary directory if it was created
517
+ await rm(this._tmpPath, { recursive: true, force: true })
518
+ } else {
519
+ // otherwise, just remove the folder
520
+ await rm(this._path, { recursive: true, force: true })
521
+ }
522
+ } catch (error) {
523
+ // ignore errors during cleanup, as the folder might have been already removed
524
+ // or the temporary directory might not exist
525
+ // TODO: centralized logging for unit code
526
+ console.warn("failed to clean up materialized folder:", error)
527
+ }
528
+
529
+ // dispose all materialized children
530
+ for (const disposable of this._disposables) {
531
+ await disposable[Symbol.asyncDispose]()
532
+ }
533
+ }
534
+
535
+ /**
536
+ * Packs the materialized folder into an artifact and returns the folder entity with artifact content.
537
+ *
538
+ * Creates a tgz archive of the entire folder and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
539
+ */
540
+ async pack({ include, exclude }: FolderPackOptions = {}): Promise<common.Folder> {
541
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH
542
+ if (!writeDir) {
543
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set")
544
+ }
545
+
546
+ // read actual folder stats from filesystem
547
+ const folderStats = await stat(this._path)
548
+
549
+ // create tgz archive of the folder
550
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
551
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`)
552
+
553
+ const entity = this.entity
554
+
555
+ try {
556
+ await tar.create(
557
+ {
558
+ gzip: true,
559
+ file: tempArchivePath,
560
+ cwd: dirname(this._path),
561
+
562
+ filter(path) {
563
+ // match without the folder name prefix
564
+ path = path.slice(entity.meta.name.length + 1)
565
+
566
+ // handle explicit excludes
567
+ for (const pattern of exclude ?? []) {
568
+ if (minimatch(path, pattern)) {
569
+ return false
570
+ }
571
+ }
572
+
573
+ // try to match include patterns
574
+ for (const pattern of include ?? []) {
575
+ if (minimatch(path, pattern)) {
576
+ return true
577
+ }
578
+ }
579
+
580
+ // include all files if no include patterns are specified
581
+ return !include || include.length === 0
582
+ },
583
+
584
+ // to reproduce the same archive every time
585
+ portable: true,
586
+ noMtime: true,
587
+ },
588
+ [basename(this._path)],
589
+ )
590
+
591
+ // calculate hash of the archive
592
+ const fileContent = createReadStream(tempArchivePath)
593
+ const hash = createHash("sha256")
594
+
595
+ for await (const chunk of fileContent) {
596
+ hash.update(chunk as Buffer)
597
+ }
598
+
599
+ const hashValue = hash.digest("hex")
600
+
601
+ // move archive to write directory with hash name
602
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`)
603
+ await rename(tempArchivePath, finalArchivePath)
604
+
605
+ const newMeta = {
606
+ name: this.entity.meta.name,
607
+ mode: folderStats.mode & 0o777, // extract only permission bits
608
+ }
609
+
610
+ // return folder entity with artifact content using actual filesystem stats
611
+ return {
612
+ meta: newMeta,
613
+ content: {
614
+ [HighstateSignature.Artifact]: true,
615
+ type: "artifact",
616
+ hash: hashValue,
617
+ meta: await toPromise(this.artifactMeta),
618
+ },
619
+ }
620
+ } finally {
621
+ // clean up temporary archive
622
+ try {
623
+ await rm(tempArchivePath, { force: true })
624
+ } catch {
625
+ // ignore cleanup errors
626
+ }
627
+ }
628
+ }
629
+
630
+ /**
631
+ * Creates an empty materialized folder with the given name.
632
+ *
633
+ * @param name The name of the folder to create
634
+ * @param mode Optional folder mode (permissions)
635
+ * @param parent Optional parent folder to create the folder in
636
+ * @returns A new MaterializedFolder instance representing an empty folder
637
+ */
638
+ static async create(
639
+ name: string,
640
+ mode?: number,
641
+ parent?: MaterializedFolder,
642
+ ): Promise<MaterializedFolder> {
643
+ const entity: common.Folder = {
644
+ meta: {
645
+ name,
646
+ mode,
647
+ },
648
+ content: {
649
+ type: "embedded",
650
+ files: [],
651
+ folders: [],
652
+ },
653
+ }
654
+
655
+ const materializedFolder = new MaterializedFolder(entity, parent)
656
+
657
+ try {
658
+ await materializedFolder._open()
659
+ } catch (error) {
660
+ await materializedFolder[Symbol.asyncDispose]()
661
+ throw error
662
+ }
663
+
664
+ return materializedFolder
665
+ }
666
+
667
+ static async open(
668
+ folder: common.Folder,
669
+ parent?: MaterializedFolder,
670
+ ): Promise<MaterializedFolder> {
671
+ const materializedFolder = new MaterializedFolder(folder, parent)
672
+
673
+ try {
674
+ await materializedFolder._open()
675
+ } catch (error) {
676
+ await materializedFolder[Symbol.asyncDispose]()
677
+ throw error
678
+ }
679
+
680
+ return materializedFolder
681
+ }
682
+ }
683
+
684
+ /**
685
+ * Fetches the size of a file from a given L7 endpoint.
686
+ *
687
+ * @param endpoint The L7 endpoint to fetch the file size from.
688
+ * @returns The size of the file in bytes.
689
+ * @throws If the protocol is not HTTP/HTTPS or if the request fails.
690
+ */
691
+ export async function fetchFileSize(endpoint: network.L7Endpoint): Promise<number> {
692
+ if (endpoint.appProtocol !== "http" && endpoint.appProtocol !== "https") {
693
+ throw new Error(
694
+ `Unsupported protocol: ${endpoint.appProtocol}. Only HTTP and HTTPS are supported.`,
695
+ )
696
+ }
697
+
698
+ const url = l7EndpointToString(endpoint)
699
+ const response = await fetch(url, { method: "HEAD" })
700
+
701
+ if (!response.ok) {
702
+ throw new Error(`Failed to fetch file size: ${response.statusText}`)
703
+ }
704
+
705
+ const contentLength = response.headers.get("content-length")
706
+ if (!contentLength) {
707
+ throw new Error("Content-Length header is missing in the response")
708
+ }
709
+
710
+ const size = parseInt(contentLength, 10)
711
+ if (isNaN(size)) {
712
+ throw new Error(`Invalid Content-Length value: ${contentLength}`)
713
+ }
714
+
715
+ return size
716
+ }
717
+
718
+ /**
719
+ * Extracts the name from an L7 endpoint URL without its file extension.
720
+ */
721
+ export function getNameByEndpoint(endpoint: InputL7Endpoint): string {
722
+ const parsedEndpoint = parseL7Endpoint(endpoint)
723
+
724
+ return parsedEndpoint.resource ? basename(parsedEndpoint.resource) : ""
725
+ }