@highstate/common 0.9.15 → 0.9.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/chunk-HZBJ6LLS.js +1057 -0
  2. package/dist/chunk-HZBJ6LLS.js.map +1 -0
  3. package/dist/highstate.manifest.json +9 -9
  4. package/dist/index.js +2 -50
  5. package/dist/index.js.map +1 -1
  6. package/dist/units/dns/record-set/index.js +4 -6
  7. package/dist/units/dns/record-set/index.js.map +1 -1
  8. package/dist/units/existing-server/index.js +7 -13
  9. package/dist/units/existing-server/index.js.map +1 -1
  10. package/dist/units/network/l3-endpoint/index.js +6 -9
  11. package/dist/units/network/l3-endpoint/index.js.map +1 -1
  12. package/dist/units/network/l4-endpoint/index.js +6 -9
  13. package/dist/units/network/l4-endpoint/index.js.map +1 -1
  14. package/dist/units/script/index.js +6 -9
  15. package/dist/units/script/index.js.map +1 -1
  16. package/dist/units/server-dns/index.js +7 -11
  17. package/dist/units/server-dns/index.js.map +1 -1
  18. package/dist/units/server-patch/index.js +7 -11
  19. package/dist/units/server-patch/index.js.map +1 -1
  20. package/dist/units/ssh/key-pair/index.js +18 -12
  21. package/dist/units/ssh/key-pair/index.js.map +1 -1
  22. package/package.json +20 -6
  23. package/src/shared/command.ts +19 -9
  24. package/src/shared/files.ts +730 -0
  25. package/src/shared/index.ts +1 -0
  26. package/src/shared/network.ts +88 -1
  27. package/src/shared/ssh.ts +36 -18
  28. package/src/units/existing-server/index.ts +1 -1
  29. package/src/units/remote-folder/index.ts +0 -0
  30. package/src/units/server-dns/index.ts +1 -1
  31. package/src/units/server-patch/index.ts +1 -1
  32. package/src/units/ssh/key-pair/index.ts +12 -2
  33. package/dist/chunk-NISDP46H.js +0 -546
  34. package/dist/chunk-NISDP46H.js.map +0 -1
@@ -0,0 +1,730 @@
1
+ import type { common, network } from "@highstate/library"
2
+ import { tmpdir } from "node:os"
3
+ import { cp, mkdir, mkdtemp, rm, writeFile, rename, stat } from "node:fs/promises"
4
+ import { join, extname, basename, dirname } from "node:path"
5
+ import { createReadStream } from "node:fs"
6
+ import { pipeline } from "node:stream/promises"
7
+ import { Readable } from "node:stream"
8
+ import { createHash } from "node:crypto"
9
+ import { minimatch } from "minimatch"
10
+
11
+ import { HighstateSignature, type File } from "@highstate/contract"
12
+ import { asset, toPromise, type ObjectMeta } from "@highstate/pulumi"
13
+ import * as tar from "tar"
14
+ import unzipper from "unzipper"
15
+ import { l7EndpointToString, parseL7Endpoint, type InputL7Endpoint } from "./network"
16
+
17
+ export type FolderPackOptions = {
18
+ /**
19
+ * The patterns to include in the packed archive.
20
+ * If not provided, all files and folders will be included.
21
+ */
22
+ include?: string[]
23
+
24
+ /**
25
+ * The patterns to exclude from the packed archive.
26
+ * Applied after include patterns.
27
+ * If not provided, no files or folders will be excluded.
28
+ */
29
+ exclude?: string[]
30
+ }
31
+
32
+ /**
33
+ * Creates Pulumi asset from Highstate file.
34
+ *
35
+ * @param file The file entity to create the asset from.
36
+ * @returns The created asset.
37
+ */
38
+ export function assetFromFile(file: common.File): asset.Asset {
39
+ if (file.content.type === "remote") {
40
+ return new asset.RemoteAsset(l7EndpointToString(file.content.endpoint))
41
+ }
42
+
43
+ if (file.content.type === "local") {
44
+ return new asset.FileAsset(file.content.path)
45
+ }
46
+
47
+ if (file.content.type === "artifact") {
48
+ throw new Error(
49
+ "Artifact-based files cannot be converted to Pulumi assets directly. Use MaterializedFile instead.",
50
+ )
51
+ }
52
+
53
+ if (file.meta.isBinary) {
54
+ throw new Error(
55
+ "Cannot create asset from inline binary file content. Please open an issue if you need this feature.",
56
+ )
57
+ }
58
+
59
+ return new asset.StringAsset(file.content.value)
60
+ }
61
+
62
+ /**
63
+ * Creates Pulumi archive from Highstate folder.
64
+ *
65
+ * @param folder The folder entity to create the asset archive from.
66
+ * @returns The created asset archive.
67
+ */
68
+ export function archiveFromFolder(folder: common.Folder): asset.Archive {
69
+ if (folder.content.type === "remote") {
70
+ return new asset.RemoteArchive(l7EndpointToString(folder.content.endpoint))
71
+ }
72
+
73
+ if (folder.content.type === "local") {
74
+ return new asset.FileArchive(folder.content.path)
75
+ }
76
+
77
+ if (folder.content.type === "artifact") {
78
+ throw new Error(
79
+ "Artifact-based folders cannot be converted to Pulumi assets directly. Use MaterializedFolder instead.",
80
+ )
81
+ }
82
+
83
+ const files: Record<string, asset.Asset> = {}
84
+
85
+ for (const file of folder.content.files) {
86
+ files[file.meta.name] = assetFromFile(file)
87
+ }
88
+
89
+ for (const subfolder of folder.content.folders) {
90
+ files[subfolder.meta.name] = archiveFromFolder(subfolder)
91
+ }
92
+
93
+ return new asset.AssetArchive(files)
94
+ }
95
+
96
+ /**
97
+ * Extracts a tar or zip archive from a stream to a destination directory.
98
+ *
99
+ * @param stream The stream containing the archive data
100
+ * @param destinationPath The path where to extract the archive
101
+ * @param archiveType The type of archive ('tar' or 'zip')
102
+ */
103
+ async function unarchiveFromStream(
104
+ stream: Readable,
105
+ destinationPath: string,
106
+ archiveType: "tar" | "zip",
107
+ ): Promise<void> {
108
+ await mkdir(destinationPath, { recursive: true })
109
+
110
+ switch (archiveType) {
111
+ case "tar": {
112
+ const extractStream = tar.extract({
113
+ cwd: destinationPath,
114
+ strict: true,
115
+ })
116
+
117
+ await pipeline(stream, extractStream)
118
+ return
119
+ }
120
+ case "zip": {
121
+ // Extract directly from stream using unzipper
122
+ await pipeline(stream, unzipper.Extract({ path: destinationPath }))
123
+ return
124
+ }
125
+ }
126
+ }
127
+
128
+ /**
129
+ * Determines the archive type based on file extension or content type.
130
+ *
131
+ * @param fileName The name of the file
132
+ * @param contentType Optional content type from HTTP headers
133
+ * @returns The detected archive type or null if not an archive
134
+ */
135
+ function detectArchiveType(fileName: string, contentType?: string): "tar" | "zip" | null {
136
+ const ext = extname(fileName).toLowerCase()
137
+
138
+ if (ext === ".tar" || ext === ".tgz" || ext === ".tar.gz") {
139
+ return "tar"
140
+ }
141
+
142
+ if (ext === ".zip") {
143
+ return "zip"
144
+ }
145
+
146
+ // Fallback to content type
147
+ if (contentType) {
148
+ if (contentType.includes("tar") || contentType.includes("gzip")) {
149
+ return "tar"
150
+ }
151
+ if (contentType.includes("zip")) {
152
+ return "zip"
153
+ }
154
+ }
155
+
156
+ return null
157
+ }
158
+
159
+ /**
160
+ * The `MaterializedFile` class represents a file entity that has been materialized
161
+ * to a local filesystem path.
162
+ *
163
+ * It handles creating a temporary directory, writing the file content to that directory,
164
+ * and cleaning up the temporary files when disposed.
165
+ *
166
+ * For improved cleanup reliability, the class will use HIGHSTATE_TEMP_PATH as the base
167
+ * directory for temporary files if available, allowing for centralized cleanup by the runner.
168
+ */
169
+ export class MaterializedFile implements AsyncDisposable {
170
+ private _tmpPath?: string
171
+ private _path!: string
172
+ private _disposed = false
173
+
174
+ readonly artifactMeta: ObjectMeta = {}
175
+
176
+ constructor(
177
+ readonly entity: common.File,
178
+ readonly parent?: MaterializedFolder,
179
+ ) {}
180
+
181
+ get path(): string {
182
+ return this._path
183
+ }
184
+
185
+ private async _open(): Promise<void> {
186
+ if (this.parent) {
187
+ // if the parent folder is provided, the file path is relative to the parent folder
188
+ this._path = join(this.parent.path, this.entity.meta.name)
189
+ } else {
190
+ // otherwise, the file path is in a temporary directory
191
+ // use HIGHSTATE_TEMP_PATH as base if available for better cleanup reliability
192
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
193
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-file-"))
194
+ this._path = join(this._tmpPath, this.entity.meta.name)
195
+ }
196
+
197
+ switch (this.entity.content.type) {
198
+ case "embedded": {
199
+ const content = this.entity.meta.isBinary
200
+ ? Buffer.from(this.entity.content.value, "base64")
201
+ : this.entity.content.value
202
+
203
+ await writeFile(this._path, content, { mode: this.entity.meta.mode })
204
+ break
205
+ }
206
+ case "local": {
207
+ await cp(this.entity.content.path, this._path, { mode: this.entity.meta.mode })
208
+ break
209
+ }
210
+ case "remote": {
211
+ const response = await load(l7EndpointToString(this.entity.content.endpoint))
212
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`)
213
+
214
+ const arrayBuffer = await response.arrayBuffer()
215
+ await writeFile(this._path, Buffer.from(arrayBuffer), { mode: this.entity.meta.mode })
216
+
217
+ break
218
+ }
219
+ case "artifact": {
220
+ const artifactData = this.entity.content[HighstateSignature.Artifact]
221
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH
222
+
223
+ if (!artifactPath) {
224
+ throw new Error(
225
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content",
226
+ )
227
+ }
228
+
229
+ const tgzPath = join(artifactPath, `${artifactData.hash}.tgz`)
230
+
231
+ // extract the tgz file directly to the target path
232
+ const readStream = createReadStream(tgzPath)
233
+ await unarchiveFromStream(readStream, dirname(this._path), "tar")
234
+ break
235
+ }
236
+ }
237
+ }
238
+
239
+ async [Symbol.asyncDispose](): Promise<void> {
240
+ if (this._disposed) return
241
+ this._disposed = true
242
+
243
+ try {
244
+ if (this._tmpPath) {
245
+ // clear the whole temporary directory if it was created
246
+ await rm(this._tmpPath, { recursive: true, force: true })
247
+ } else {
248
+ // otherwise, just remove the file
249
+ await rm(this._path, { force: true })
250
+ }
251
+ } catch (error) {
252
+ // ignore errors during cleanup, as the file might have been already removed
253
+ // or the temporary directory might not exist
254
+ // TODO: centralized logging for unit code
255
+ console.warn("failed to clean up materialized file:", error)
256
+ }
257
+ }
258
+
259
+ /**
260
+ * Packs the materialized file into an artifact and returns the file entity with artifact content.
261
+ *
262
+ * Creates a tgz archive of the file and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
263
+ */
264
+ async pack(): Promise<File> {
265
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH
266
+ if (!writeDir) {
267
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set")
268
+ }
269
+
270
+ // read actual file stats from filesystem
271
+ const fileStats = await stat(this._path)
272
+
273
+ // create tgz archive of the file
274
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
275
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`)
276
+
277
+ try {
278
+ await tar.create(
279
+ {
280
+ gzip: true,
281
+ file: tempArchivePath,
282
+ cwd: dirname(this._path),
283
+ noMtime: true, // to reproduce the same archive every time
284
+ },
285
+ [basename(this._path)],
286
+ )
287
+
288
+ // calculate hash of the archive
289
+ const fileContent = createReadStream(tempArchivePath)
290
+ const hash = createHash("sha256")
291
+
292
+ for await (const chunk of fileContent) {
293
+ hash.update(chunk as Buffer)
294
+ }
295
+
296
+ const hashValue = hash.digest("hex")
297
+
298
+ // move archive to write directory with hash name
299
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`)
300
+ await rename(tempArchivePath, finalArchivePath)
301
+
302
+ const newMeta = {
303
+ name: this.entity.meta.name,
304
+ mode: fileStats.mode & 0o777, // extract only permission bits
305
+ size: fileStats.size,
306
+ isBinary: this.entity.meta.isBinary, // keep original binary flag as we can't reliably detect this from filesystem
307
+ }
308
+
309
+ // return file entity with artifact content using actual filesystem stats
310
+ return {
311
+ meta: newMeta,
312
+ content: {
313
+ type: "artifact",
314
+ [HighstateSignature.Artifact]: {
315
+ hash: hashValue,
316
+ meta: await toPromise(this.artifactMeta),
317
+ },
318
+ },
319
+ }
320
+ } finally {
321
+ // clean up temporary archive
322
+ try {
323
+ await rm(tempArchivePath, { force: true })
324
+ } catch {
325
+ // ignore cleanup errors
326
+ }
327
+ }
328
+ }
329
+
330
+ /**
331
+ * Creates an empty materialized file with the given name.
332
+ *
333
+ * @param name The name of the file to create
334
+ * @param content Optional initial content of the file (default is empty string)
335
+ * @param mode Optional file mode (permissions)
336
+ * @returns A new MaterializedFile instance representing an empty file
337
+ */
338
+ static async create(name: string, content = "", mode?: number): Promise<MaterializedFile> {
339
+ const entity: common.File = {
340
+ meta: {
341
+ name,
342
+ mode,
343
+ size: 0,
344
+ isBinary: false,
345
+ },
346
+ content: {
347
+ type: "embedded",
348
+ value: content,
349
+ },
350
+ }
351
+
352
+ const materializedFile = new MaterializedFile(entity)
353
+
354
+ try {
355
+ await materializedFile._open()
356
+ } catch (error) {
357
+ await materializedFile[Symbol.asyncDispose]()
358
+ throw error
359
+ }
360
+
361
+ return materializedFile
362
+ }
363
+
364
+ static async open(file: common.File, parent?: MaterializedFolder): Promise<MaterializedFile> {
365
+ const materializedFile = new MaterializedFile(file, parent)
366
+
367
+ try {
368
+ await materializedFile._open()
369
+ } catch (error) {
370
+ await materializedFile[Symbol.asyncDispose]()
371
+ throw error
372
+ }
373
+
374
+ return materializedFile
375
+ }
376
+ }
377
+
378
+ /**
379
+ * The `MaterializedFolder` class represents a folder entity that has been materialized
380
+ * to a local filesystem path.
381
+ *
382
+ * It handles creating a temporary directory, copying the folder content to that directory,
383
+ * and cleaning up the temporary files when disposed.
384
+ *
385
+ * For improved cleanup reliability, the class will use HIGHSTATE_TEMP_PATH as the base
386
+ * directory for temporary files if available, allowing for centralized cleanup by the runner.
387
+ */
388
+ export class MaterializedFolder implements AsyncDisposable {
389
+ private _tmpPath?: string
390
+ private _path!: string
391
+ private _disposed = false
392
+
393
+ private readonly _disposables: AsyncDisposable[] = []
394
+
395
+ readonly artifactMeta: ObjectMeta = {}
396
+
397
+ constructor(
398
+ readonly entity: common.Folder,
399
+ readonly parent?: MaterializedFolder,
400
+ ) {}
401
+
402
+ get path(): string {
403
+ return this._path
404
+ }
405
+
406
+ private async _open(): Promise<void> {
407
+ if (this.parent) {
408
+ // if the parent folder is provided, the folder path is relative to the parent folder
409
+ this._path = join(this.parent.path, this.entity.meta.name)
410
+ } else {
411
+ // otherwise, the folder path is in a temporary directory
412
+ // use HIGHSTATE_TEMP_PATH as base if available for better cleanup reliability
413
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
414
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-folder-"))
415
+ this._path = join(this._tmpPath, this.entity.meta.name)
416
+ }
417
+
418
+ switch (this.entity.content.type) {
419
+ case "embedded": {
420
+ // create the folder itself
421
+ await mkdir(this._path, { mode: this.entity.meta.mode })
422
+
423
+ for (const file of this.entity.content.files) {
424
+ const materializedFile = await MaterializedFile.open(file, this)
425
+ this._disposables.push(materializedFile)
426
+ }
427
+
428
+ for (const subfolder of this.entity.content.folders) {
429
+ const materializedFolder = await MaterializedFolder.open(subfolder, this)
430
+ this._disposables.push(materializedFolder)
431
+ }
432
+
433
+ break
434
+ }
435
+ case "local": {
436
+ // Check if the local path is an archive file that needs extraction
437
+ const archiveType = detectArchiveType(this.entity.content.path)
438
+
439
+ if (archiveType) {
440
+ // Extract archive to the destination path
441
+ const readStream = createReadStream(this.entity.content.path)
442
+ await unarchiveFromStream(readStream, this._path, archiveType)
443
+ } else {
444
+ // Regular directory copy
445
+ await cp(this.entity.content.path, this._path, {
446
+ recursive: true,
447
+ mode: this.entity.meta.mode,
448
+ })
449
+ }
450
+
451
+ break
452
+ }
453
+ case "remote": {
454
+ const response = await load(l7EndpointToString(this.entity.content.endpoint))
455
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`)
456
+ if (!response.body) throw new Error("Response body is empty")
457
+
458
+ // Try to detect archive type from URL or content type
459
+ const url = new URL(l7EndpointToString(this.entity.content.endpoint))
460
+ const archiveType = detectArchiveType(
461
+ url.pathname,
462
+ response.headers.get("content-type") || undefined,
463
+ )
464
+
465
+ if (!archiveType) {
466
+ throw new Error("Remote folder content must be an archive (tar, tar.gz, tgz, or zip)")
467
+ }
468
+
469
+ if (!response.body) {
470
+ throw new Error("Response body is empty")
471
+ }
472
+
473
+ const reader = response.body.getReader()
474
+ const stream = new Readable({
475
+ async read() {
476
+ try {
477
+ const { done, value } = await reader.read()
478
+ if (done) {
479
+ this.push(null)
480
+ } else {
481
+ this.push(Buffer.from(value))
482
+ }
483
+ } catch (error) {
484
+ this.destroy(error instanceof Error ? error : new Error(String(error)))
485
+ }
486
+ },
487
+ })
488
+
489
+ await unarchiveFromStream(stream, this._path, archiveType)
490
+
491
+ break
492
+ }
493
+ case "artifact": {
494
+ const artifactData = this.entity.content[HighstateSignature.Artifact]
495
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH
496
+
497
+ if (!artifactPath) {
498
+ throw new Error(
499
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content",
500
+ )
501
+ }
502
+
503
+ const tgzPath = join(artifactPath, `${artifactData.hash}.tgz`)
504
+
505
+ // extract the tgz file directly to the target path
506
+ const readStream = createReadStream(tgzPath)
507
+ await unarchiveFromStream(readStream, dirname(this._path), "tar")
508
+
509
+ break
510
+ }
511
+ }
512
+ }
513
+
514
+ async [Symbol.asyncDispose](): Promise<void> {
515
+ if (this._disposed) return
516
+ this._disposed = true
517
+
518
+ try {
519
+ if (this._tmpPath) {
520
+ // clear the whole temporary directory if it was created
521
+ await rm(this._tmpPath, { recursive: true, force: true })
522
+ } else {
523
+ // otherwise, just remove the folder
524
+ await rm(this._path, { recursive: true, force: true })
525
+ }
526
+ } catch (error) {
527
+ // ignore errors during cleanup, as the folder might have been already removed
528
+ // or the temporary directory might not exist
529
+ // TODO: centralized logging for unit code
530
+ console.warn("failed to clean up materialized folder:", error)
531
+ }
532
+
533
+ // dispose all materialized children
534
+ for (const disposable of this._disposables) {
535
+ await disposable[Symbol.asyncDispose]()
536
+ }
537
+ }
538
+
539
+ /**
540
+ * Packs the materialized folder into an artifact and returns the folder entity with artifact content.
541
+ *
542
+ * Creates a tgz archive of the entire folder and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
543
+ */
544
+ async pack({ include, exclude }: FolderPackOptions = {}): Promise<common.Folder> {
545
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH
546
+ if (!writeDir) {
547
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set")
548
+ }
549
+
550
+ // read actual folder stats from filesystem
551
+ const folderStats = await stat(this._path)
552
+
553
+ // create tgz archive of the folder
554
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir()
555
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`)
556
+
557
+ const entity = this.entity
558
+
559
+ try {
560
+ await tar.create(
561
+ {
562
+ gzip: true,
563
+ file: tempArchivePath,
564
+ cwd: dirname(this._path),
565
+
566
+ filter(path) {
567
+ // match without the folder name prefix
568
+ path = path.slice(entity.meta.name.length + 1)
569
+
570
+ // handle explicit excludes
571
+ for (const pattern of exclude ?? []) {
572
+ if (minimatch(path, pattern)) {
573
+ return false
574
+ }
575
+ }
576
+
577
+ // try to match include patterns
578
+ for (const pattern of include ?? []) {
579
+ if (minimatch(path, pattern)) {
580
+ return true
581
+ }
582
+ }
583
+
584
+ // include all files if no include patterns are specified
585
+ return !include || include.length === 0
586
+ },
587
+
588
+ // to reproduce the same archive every time
589
+ portable: true,
590
+ noMtime: true,
591
+ },
592
+ [basename(this._path)],
593
+ )
594
+
595
+ // calculate hash of the archive
596
+ const fileContent = createReadStream(tempArchivePath)
597
+ const hash = createHash("sha256")
598
+
599
+ for await (const chunk of fileContent) {
600
+ hash.update(chunk as Buffer)
601
+ }
602
+
603
+ const hashValue = hash.digest("hex")
604
+
605
+ // move archive to write directory with hash name
606
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`)
607
+ await rename(tempArchivePath, finalArchivePath)
608
+
609
+ const newMeta = {
610
+ name: this.entity.meta.name,
611
+ mode: folderStats.mode & 0o777, // extract only permission bits
612
+ }
613
+
614
+ // return folder entity with artifact content using actual filesystem stats
615
+ return {
616
+ meta: newMeta,
617
+ content: {
618
+ type: "artifact",
619
+ [HighstateSignature.Artifact]: {
620
+ hash: hashValue,
621
+ meta: await toPromise(this.artifactMeta),
622
+ },
623
+ },
624
+ }
625
+ } finally {
626
+ // clean up temporary archive
627
+ try {
628
+ await rm(tempArchivePath, { force: true })
629
+ } catch {
630
+ // ignore cleanup errors
631
+ }
632
+ }
633
+ }
634
+
635
+ /**
636
+ * Creates an empty materialized folder with the given name.
637
+ *
638
+ * @param name The name of the folder to create
639
+ * @param mode Optional folder mode (permissions)
640
+ * @param parent Optional parent folder to create the folder in
641
+ * @returns A new MaterializedFolder instance representing an empty folder
642
+ */
643
+ static async create(
644
+ name: string,
645
+ mode?: number,
646
+ parent?: MaterializedFolder,
647
+ ): Promise<MaterializedFolder> {
648
+ const entity: common.Folder = {
649
+ meta: {
650
+ name,
651
+ mode,
652
+ },
653
+ content: {
654
+ type: "embedded",
655
+ files: [],
656
+ folders: [],
657
+ },
658
+ }
659
+
660
+ const materializedFolder = new MaterializedFolder(entity, parent)
661
+
662
+ try {
663
+ await materializedFolder._open()
664
+ } catch (error) {
665
+ await materializedFolder[Symbol.asyncDispose]()
666
+ throw error
667
+ }
668
+
669
+ return materializedFolder
670
+ }
671
+
672
+ static async open(
673
+ folder: common.Folder,
674
+ parent?: MaterializedFolder,
675
+ ): Promise<MaterializedFolder> {
676
+ const materializedFolder = new MaterializedFolder(folder, parent)
677
+
678
+ try {
679
+ await materializedFolder._open()
680
+ } catch (error) {
681
+ await materializedFolder[Symbol.asyncDispose]()
682
+ throw error
683
+ }
684
+
685
+ return materializedFolder
686
+ }
687
+ }
688
+
689
+ /**
690
+ * Fetches the size of a file from a given L7 endpoint.
691
+ *
692
+ * @param endpoint The L7 endpoint to fetch the file size from.
693
+ * @returns The size of the file in bytes.
694
+ * @throws If the protocol is not HTTP/HTTPS or if the request fails.
695
+ */
696
+ export async function fetchFileSize(endpoint: network.L7Endpoint): Promise<number> {
697
+ if (endpoint.appProtocol !== "http" && endpoint.appProtocol !== "https") {
698
+ throw new Error(
699
+ `Unsupported protocol: ${endpoint.appProtocol}. Only HTTP and HTTPS are supported.`,
700
+ )
701
+ }
702
+
703
+ const url = l7EndpointToString(endpoint)
704
+ const response = await load(url, { method: "HEAD" })
705
+
706
+ if (!response.ok) {
707
+ throw new Error(`Failed to fetch file size: ${response.statusText}`)
708
+ }
709
+
710
+ const contentLength = response.headers.get("content-length")
711
+ if (!contentLength) {
712
+ throw new Error("Content-Length header is missing in the response")
713
+ }
714
+
715
+ const size = parseInt(contentLength, 10)
716
+ if (isNaN(size)) {
717
+ throw new Error(`Invalid Content-Length value: ${contentLength}`)
718
+ }
719
+
720
+ return size
721
+ }
722
+
723
+ /**
724
+ * Extracts the name from an L7 endpoint URL without its file extension.
725
+ */
726
+ export function getNameByEndpoint(endpoint: InputL7Endpoint): string {
727
+ const parsedEndpoint = parseL7Endpoint(endpoint)
728
+
729
+ return parsedEndpoint.resource ? basename(parsedEndpoint.resource) : ""
730
+ }