@highstate/common 0.9.15 → 0.9.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/chunk-YYNV3MVT.js +1141 -0
  2. package/dist/chunk-YYNV3MVT.js.map +1 -0
  3. package/dist/highstate.manifest.json +9 -9
  4. package/dist/index.js +2 -50
  5. package/dist/index.js.map +1 -1
  6. package/dist/units/dns/record-set/index.js +4 -6
  7. package/dist/units/dns/record-set/index.js.map +1 -1
  8. package/dist/units/existing-server/index.js +16 -22
  9. package/dist/units/existing-server/index.js.map +1 -1
  10. package/dist/units/network/l3-endpoint/index.js +6 -9
  11. package/dist/units/network/l3-endpoint/index.js.map +1 -1
  12. package/dist/units/network/l4-endpoint/index.js +6 -9
  13. package/dist/units/network/l4-endpoint/index.js.map +1 -1
  14. package/dist/units/script/index.js +6 -9
  15. package/dist/units/script/index.js.map +1 -1
  16. package/dist/units/server-dns/index.js +7 -11
  17. package/dist/units/server-dns/index.js.map +1 -1
  18. package/dist/units/server-patch/index.js +7 -11
  19. package/dist/units/server-patch/index.js.map +1 -1
  20. package/dist/units/ssh/key-pair/index.js +20 -15
  21. package/dist/units/ssh/key-pair/index.js.map +1 -1
  22. package/package.json +20 -6
  23. package/src/shared/command.ts +257 -73
  24. package/src/shared/files.ts +725 -0
  25. package/src/shared/index.ts +1 -0
  26. package/src/shared/network.ts +90 -3
  27. package/src/shared/passwords.ts +38 -2
  28. package/src/shared/ssh.ts +249 -81
  29. package/src/units/existing-server/index.ts +12 -11
  30. package/src/units/remote-folder/index.ts +0 -0
  31. package/src/units/server-dns/index.ts +1 -1
  32. package/src/units/server-patch/index.ts +1 -1
  33. package/src/units/ssh/key-pair/index.ts +16 -7
  34. package/dist/chunk-NISDP46H.js +0 -546
  35. package/dist/chunk-NISDP46H.js.map +0 -1
@@ -0,0 +1,1141 @@
1
+ import { toPromise, output, ComponentResource, interpolate, normalize, secret, ensureSecretValue, asset } from '@highstate/pulumi';
2
+ import { uniqueBy, flat, capitalize, groupBy } from 'remeda';
3
+ import { homedir, tmpdir } from 'node:os';
4
+ import { local, remote } from '@pulumi/command';
5
+ import '@highstate/library';
6
+ import { randomBytes, bytesToHex } from '@noble/hashes/utils';
7
+ import { secureMask } from 'micro-key-producer/password.js';
8
+ import getKeys, { PrivateExport } from 'micro-key-producer/ssh.js';
9
+ import { randomBytes as randomBytes$1 } from 'micro-key-producer/utils.js';
10
+ import { mkdtemp, writeFile, cp, rm, stat, rename, mkdir } from 'node:fs/promises';
11
+ import { join, dirname, basename, extname } from 'node:path';
12
+ import { createReadStream } from 'node:fs';
13
+ import { pipeline } from 'node:stream/promises';
14
+ import { Readable } from 'node:stream';
15
+ import { createHash } from 'node:crypto';
16
+ import { minimatch } from 'minimatch';
17
+ import { HighstateSignature } from '@highstate/contract';
18
+ import * as tar from 'tar';
19
+ import unzipper from 'unzipper';
20
+
21
+ // src/shared/network.ts
22
+ function l3EndpointToString(l3Endpoint) {
23
+ switch (l3Endpoint.type) {
24
+ case "ipv4":
25
+ return l3Endpoint.address;
26
+ case "ipv6":
27
+ return l3Endpoint.address;
28
+ case "hostname":
29
+ return l3Endpoint.hostname;
30
+ }
31
+ }
32
+ function l4EndpointToString(l4Endpoint) {
33
+ if (l4Endpoint.type === "ipv6") {
34
+ return `[${l4Endpoint.address}]:${l4Endpoint.port}`;
35
+ }
36
+ return `${l3EndpointToString(l4Endpoint)}:${l4Endpoint.port}`;
37
+ }
38
+ function l4EndpointWithProtocolToString(l4Endpoint) {
39
+ const protocol = `${l4Endpoint.protocol}://`;
40
+ return `${protocol}${l4EndpointToString(l4Endpoint)}`;
41
+ }
42
+ function l7EndpointToString(l7Endpoint) {
43
+ const protocol = `${l7Endpoint.appProtocol}://`;
44
+ let endpoint = l4EndpointToString(l7Endpoint);
45
+ if (l7Endpoint.resource) {
46
+ endpoint += `/${l7Endpoint.resource}`;
47
+ }
48
+ return `${protocol}${endpoint}`;
49
+ }
50
+ function l34EndpointToString(l34Endpoint) {
51
+ if (l34Endpoint.port) {
52
+ return l4EndpointToString(l34Endpoint);
53
+ }
54
+ return l3EndpointToString(l34Endpoint);
55
+ }
56
+ var L34_ENDPOINT_RE = /^(?:(?<protocol>[a-z]+):\/\/)?(?:(?:\[?(?<ipv6>[0-9A-Fa-f:]+)\]?)|(?<ipv4>(?:\d{1,3}\.){3}\d{1,3})|(?<hostname>[a-zA-Z0-9-*]+(?:\.[a-zA-Z0-9-*]+)*))(?::(?<port>\d{1,5}))?$/;
57
+ var L7_ENDPOINT_RE = /^(?<appProtocol>[a-z]+):\/\/(?:(?:\[?(?<ipv6>[0-9A-Fa-f:]+)\]?)|(?<ipv4>(?:\d{1,3}\.){3}\d{1,3})|(?<hostname>[a-zA-Z0-9-*]+(?:\.[a-zA-Z0-9-*]+)*))(?::(?<port>\d{1,5}))?(?:\/(?<resource>.*))?$/;
58
+ function parseL34Endpoint(l34Endpoint) {
59
+ if (typeof l34Endpoint === "object") {
60
+ return l34Endpoint;
61
+ }
62
+ const match = l34Endpoint.match(L34_ENDPOINT_RE);
63
+ if (!match) {
64
+ throw new Error(`Invalid L3/L4 endpoint: "${l34Endpoint}"`);
65
+ }
66
+ const { protocol, ipv6, ipv4, hostname, port } = match.groups;
67
+ if (protocol && protocol !== "tcp" && protocol !== "udp") {
68
+ throw new Error(`Invalid L4 endpoint protocol: "${protocol}"`);
69
+ }
70
+ let visibility = "public";
71
+ if (ipv4 && IPV4_PRIVATE_REGEX.test(ipv4)) {
72
+ visibility = "external";
73
+ } else if (ipv6 && IPV6_PRIVATE_REGEX.test(ipv6)) {
74
+ visibility = "external";
75
+ }
76
+ const fallbackProtocol = port ? "tcp" : void 0;
77
+ return {
78
+ type: ipv6 ? "ipv6" : ipv4 ? "ipv4" : "hostname",
79
+ visibility,
80
+ address: ipv6 || ipv4,
81
+ hostname,
82
+ port: port ? parseInt(port, 10) : void 0,
83
+ protocol: protocol ? protocol : fallbackProtocol
84
+ };
85
+ }
86
+ function parseL3Endpoint(l3Endpoint) {
87
+ if (typeof l3Endpoint === "object") {
88
+ return l3Endpoint;
89
+ }
90
+ const parsed = parseL34Endpoint(l3Endpoint);
91
+ if (parsed.port) {
92
+ throw new Error(`Port cannot be specified in L3 endpoint: "${l3Endpoint}"`);
93
+ }
94
+ return parsed;
95
+ }
96
+ function parseL4Endpoint(l4Endpoint) {
97
+ if (typeof l4Endpoint === "object") {
98
+ return l4Endpoint;
99
+ }
100
+ const parsed = parseL34Endpoint(l4Endpoint);
101
+ if (!parsed.port) {
102
+ throw new Error(`No port found in L4 endpoint: "${l4Endpoint}"`);
103
+ }
104
+ return parsed;
105
+ }
106
+ var IPV4_PRIVATE_REGEX = /^(?:10|127)(?:\.\d{1,3}){3}$|^(?:172\.1[6-9]|172\.2[0-9]|172\.3[0-1])(?:\.\d{1,3}){2}$|^(?:192\.168)(?:\.\d{1,3}){2}$/;
107
+ var IPV6_PRIVATE_REGEX = /^(?:fc|fd)(?:[0-9a-f]{2}){0,2}::(?:[0-9a-f]{1,4}:){7}[0-9a-f]{1,4}$|^::(?:ffff:(?:10|127)(?:\.\d{1,3}){3}|(?:172\.1[6-9]|172\.2[0-9]|172\.3[0-1])(?:\.\d{1,3}){2}|(?:192\.168)(?:\.\d{1,3}){2})$/;
108
+ async function requireInputL3Endpoint(rawEndpoint, inputEndpoint) {
109
+ if (rawEndpoint) {
110
+ return parseL3Endpoint(rawEndpoint);
111
+ }
112
+ if (inputEndpoint) {
113
+ return toPromise(inputEndpoint);
114
+ }
115
+ throw new Error("No endpoint provided");
116
+ }
117
+ async function requireInputL4Endpoint(rawEndpoint, inputEndpoint) {
118
+ if (rawEndpoint) {
119
+ return parseL4Endpoint(rawEndpoint);
120
+ }
121
+ if (inputEndpoint) {
122
+ return toPromise(inputEndpoint);
123
+ }
124
+ throw new Error("No endpoint provided");
125
+ }
126
+ function l3EndpointToL4(l3Endpoint, port, protocol = "tcp") {
127
+ return {
128
+ ...parseL3Endpoint(l3Endpoint),
129
+ port,
130
+ protocol
131
+ };
132
+ }
133
+ function filterEndpoints(endpoints, filter, types) {
134
+ if (filter?.length) {
135
+ endpoints = endpoints.filter((endpoint) => filter.includes(endpoint.visibility));
136
+ } else if (endpoints.some((endpoint) => endpoint.visibility === "public")) {
137
+ endpoints = endpoints.filter((endpoint) => endpoint.visibility === "public");
138
+ } else if (endpoints.some((endpoint) => endpoint.visibility === "external")) {
139
+ endpoints = endpoints.filter((endpoint) => endpoint.visibility === "external");
140
+ }
141
+ if (types && types.length) {
142
+ endpoints = endpoints.filter((endpoint) => types.includes(endpoint.type));
143
+ }
144
+ return endpoints;
145
+ }
146
+ function l3EndpointToCidr(l3Endpoint) {
147
+ switch (l3Endpoint.type) {
148
+ case "ipv4":
149
+ return `${l3Endpoint.address}/32`;
150
+ case "ipv6":
151
+ return `${l3Endpoint.address}/128`;
152
+ case "hostname":
153
+ throw new Error("Cannot convert hostname to CIDR");
154
+ }
155
+ }
156
+ var udpAppProtocols = ["dns", "dhcp"];
157
+ function parseL7Endpoint(l7Endpoint) {
158
+ if (typeof l7Endpoint === "object") {
159
+ return l7Endpoint;
160
+ }
161
+ const match = l7Endpoint.match(L7_ENDPOINT_RE);
162
+ if (!match) {
163
+ throw new Error(`Invalid L7 endpoint: "${l7Endpoint}"`);
164
+ }
165
+ const { appProtocol, ipv6, ipv4, hostname, port, resource } = match.groups;
166
+ let visibility = "public";
167
+ if (ipv4 && IPV4_PRIVATE_REGEX.test(ipv4)) {
168
+ visibility = "external";
169
+ } else if (ipv6 && IPV6_PRIVATE_REGEX.test(ipv6)) {
170
+ visibility = "external";
171
+ }
172
+ return {
173
+ type: ipv6 ? "ipv6" : ipv4 ? "ipv4" : "hostname",
174
+ visibility,
175
+ address: ipv6 || ipv4,
176
+ hostname,
177
+ // Default port for L7 endpoints (TODO: add more specific defaults for common protocols)
178
+ port: port ? parseInt(port, 10) : 443,
179
+ // L7 endpoints typically use TCP, but can also use UDP for specific protocols
180
+ protocol: udpAppProtocols.includes(appProtocol) ? "udp" : "tcp",
181
+ appProtocol,
182
+ resource: resource || ""
183
+ };
184
+ }
185
+ async function updateEndpoints(currentEndpoints, endpoints, inputEndpoints, mode = "prepend") {
186
+ const resolvedCurrentEndpoints = await toPromise(currentEndpoints);
187
+ const resolvedInputEndpoints = await toPromise(inputEndpoints);
188
+ const newEndpoints = uniqueBy(
189
+ //
190
+ [...endpoints.map(parseL34Endpoint), ...resolvedInputEndpoints],
191
+ (endpoint) => l34EndpointToString(endpoint)
192
+ );
193
+ if (mode === "replace") {
194
+ return newEndpoints;
195
+ }
196
+ return uniqueBy(
197
+ //
198
+ [...newEndpoints, ...resolvedCurrentEndpoints],
199
+ (endpoint) => l34EndpointToString(endpoint)
200
+ );
201
+ }
202
+ function getServerConnection(ssh2) {
203
+ return output(ssh2).apply((ssh3) => ({
204
+ host: l3EndpointToString(ssh3.endpoints[0]),
205
+ port: ssh3.endpoints[0].port,
206
+ user: ssh3.user,
207
+ password: ssh3.password,
208
+ privateKey: ssh3.keyPair?.privateKey,
209
+ dialErrorLimit: 3,
210
+ hostKey: ssh3.hostKey
211
+ }));
212
+ }
213
+ function createCommand(command) {
214
+ if (Array.isArray(command)) {
215
+ return command.join(" ");
216
+ }
217
+ return command;
218
+ }
219
+ function wrapWithWorkDir(dir) {
220
+ if (!dir) {
221
+ return (command) => output(command);
222
+ }
223
+ return (command) => interpolate`cd "${dir}" && ${command}`;
224
+ }
225
+ function wrapWithWaitFor(timeout = 300, interval = 5) {
226
+ return (command) => (
227
+ // TOD: escape the command
228
+ interpolate`timeout ${timeout} bash -c 'while ! ${createCommand(command)}; do sleep ${interval}; done'`
229
+ );
230
+ }
231
+ var Command = class _Command extends ComponentResource {
232
+ stdout;
233
+ stderr;
234
+ constructor(name, args, opts) {
235
+ super("highstate:common:Command", name, args, opts);
236
+ const command = args.host === "local" ? new local.Command(
237
+ name,
238
+ {
239
+ create: output(args.create).apply(createCommand),
240
+ update: args.update ? output(args.update).apply(createCommand) : void 0,
241
+ delete: args.delete ? output(args.delete).apply(createCommand) : void 0,
242
+ logging: args.logging,
243
+ triggers: args.triggers ? output(args.triggers).apply(flat) : void 0,
244
+ dir: args.cwd ?? homedir(),
245
+ environment: args.environment,
246
+ stdin: args.stdin
247
+ },
248
+ { ...opts, parent: this }
249
+ ) : new remote.Command(
250
+ name,
251
+ {
252
+ connection: output(args.host).apply((server) => {
253
+ if ("host" in server) {
254
+ return output(server);
255
+ }
256
+ if (!server.ssh) {
257
+ throw new Error(`The server "${server.hostname}" has no SSH credentials`);
258
+ }
259
+ return getServerConnection(server.ssh);
260
+ }),
261
+ create: output(args.create).apply(createCommand).apply(wrapWithWorkDir(args.cwd)),
262
+ update: args.update ? output(args.update).apply(createCommand).apply(wrapWithWorkDir(args.cwd)) : void 0,
263
+ delete: args.delete ? output(args.delete).apply(createCommand).apply(wrapWithWorkDir(args.cwd)) : void 0,
264
+ logging: args.logging,
265
+ triggers: args.triggers ? output(args.triggers).apply(flat) : void 0,
266
+ stdin: args.stdin,
267
+ environment: args.environment
268
+ },
269
+ { ...opts, parent: this }
270
+ );
271
+ this.stdout = command.stdout;
272
+ this.stderr = command.stderr;
273
+ }
274
+ /**
275
+ * Waits for the command to complete and returns its output.
276
+ * The standard output will be returned.
277
+ */
278
+ async wait() {
279
+ return await toPromise(this.stdout);
280
+ }
281
+ /**
282
+ * Creates a command that writes the given content to a file on the host.
283
+ * The file will be created if it does not exist, and overwritten if it does.
284
+ *
285
+ * Use for small text files like configuration files.
286
+ */
287
+ static createTextFile(name, options, opts) {
288
+ return new _Command(
289
+ name,
290
+ {
291
+ host: options.host,
292
+ create: interpolate`mkdir -p $(dirname "${options.path}") && cat > ${options.path}`,
293
+ delete: interpolate`rm -rf ${options.path}`,
294
+ stdin: options.content
295
+ },
296
+ opts
297
+ );
298
+ }
299
+ /**
300
+ * Creates a command that waits for a file to be created and then reads its content.
301
+ * This is useful for waiting for a file to be generated by another process.
302
+ *
303
+ * Use for small text files like configuration files.
304
+ */
305
+ static receiveTextFile(name, options, opts) {
306
+ return new _Command(
307
+ name,
308
+ {
309
+ host: options.host,
310
+ create: interpolate`while ! test -f "${options.path}"; do sleep 1; done; cat "${options.path}"`,
311
+ logging: "stderr"
312
+ },
313
+ opts
314
+ );
315
+ }
316
+ /**
317
+ * Creates a command that waits for a condition to be met.
318
+ * The command will run until the condition is met or the timeout is reached.
319
+ *
320
+ * The condition is considered met if the command returns a zero exit code.
321
+ *
322
+ * @param name The name of the command resource.
323
+ * @param args The arguments for the command, including the condition to check.
324
+ * @param opts Optional resource options.
325
+ */
326
+ static waitFor(name, args, opts) {
327
+ return new _Command(
328
+ name,
329
+ {
330
+ ...args,
331
+ create: output(args.create).apply(wrapWithWaitFor(args.timeout, args.interval)),
332
+ update: args.update ? output(args.update).apply(wrapWithWaitFor(args.timeout, args.interval)) : void 0,
333
+ delete: args.delete ? output(args.delete).apply(wrapWithWaitFor(args.timeout, args.interval)) : void 0
334
+ },
335
+ opts
336
+ );
337
+ }
338
+ };
339
+ function getTypeByEndpoint(endpoint) {
340
+ switch (endpoint.type) {
341
+ case "ipv4":
342
+ return "A";
343
+ case "ipv6":
344
+ return "AAAA";
345
+ case "hostname":
346
+ return "CNAME";
347
+ }
348
+ }
349
+ var DnsRecord = class extends ComponentResource {
350
+ /**
351
+ * The underlying dns record resource.
352
+ */
353
+ dnsRecord;
354
+ /**
355
+ * The wait commands to be executed after the DNS record is created/updated.
356
+ *
357
+ * Use this field as a dependency for other resources.
358
+ */
359
+ waitCommands;
360
+ constructor(name, args, opts) {
361
+ super("highstate:common:DnsRecord", name, args, opts);
362
+ this.dnsRecord = output(args).apply((args2) => {
363
+ const l3Endpoint = parseL3Endpoint(args2.value);
364
+ const type = args2.type ?? getTypeByEndpoint(l3Endpoint);
365
+ return output(
366
+ this.create(
367
+ name,
368
+ {
369
+ ...args2,
370
+ type,
371
+ value: l3EndpointToString(l3Endpoint)
372
+ },
373
+ { ...opts, parent: this }
374
+ )
375
+ );
376
+ });
377
+ this.waitCommands = output(args).apply((args2) => {
378
+ const waitAt = args2.waitAt ? Array.isArray(args2.waitAt) ? args2.waitAt : [args2.waitAt] : [];
379
+ return waitAt.map((host) => {
380
+ const hostname = host === "local" ? "local" : host.hostname;
381
+ return new Command(
382
+ `${name}-wait-${hostname}`,
383
+ {
384
+ host,
385
+ create: `while ! getent hosts ${args2.name} >/dev/null; do echo "Waiting for DNS record ${args2.name} to be created"; sleep 5; done`,
386
+ triggers: [args2.type, args2.ttl, args2.priority, args2.proxied]
387
+ },
388
+ { parent: this }
389
+ );
390
+ });
391
+ });
392
+ }
393
+ static create(name, args, opts) {
394
+ return output(args).apply(async (args2) => {
395
+ const providerType = args2.provider.type;
396
+ const implName = `${capitalize(providerType)}DnsRecord`;
397
+ const implModule = await import(`@highstate/${providerType}`);
398
+ const implClass = implModule[implName];
399
+ return new implClass(name, args2, opts);
400
+ });
401
+ }
402
+ };
403
+ var DnsRecordSet = class _DnsRecordSet extends ComponentResource {
404
+ /**
405
+ * The underlying dns record resources.
406
+ */
407
+ dnsRecords;
408
+ /**
409
+ * The wait commands to be executed after the DNS records are created/updated.
410
+ */
411
+ waitCommands;
412
+ constructor(name, records, opts) {
413
+ super("highstate:common:DnsRecordSet", name, records, opts);
414
+ this.dnsRecords = records;
415
+ this.waitCommands = records.apply(
416
+ (records2) => records2.flatMap((record) => record.waitCommands)
417
+ );
418
+ }
419
+ static create(name, args, opts) {
420
+ const records = output(args).apply((args2) => {
421
+ const recordName = args2.name ?? name;
422
+ const values = normalize(args2.value, args2.values);
423
+ return output(
424
+ args2.providers.filter((provider) => recordName.endsWith(provider.domain)).flatMap((provider) => {
425
+ return values.map((value) => {
426
+ const l3Endpoint = parseL3Endpoint(value);
427
+ return DnsRecord.create(
428
+ `${provider.type}-from-${recordName}-to-${l3EndpointToString(l3Endpoint)}`,
429
+ { name: recordName, ...args2, value: l3Endpoint, provider },
430
+ opts
431
+ );
432
+ });
433
+ })
434
+ );
435
+ });
436
+ return new _DnsRecordSet(name, records, opts);
437
+ }
438
+ };
439
+ async function updateEndpointsWithFqdn(endpoints, fqdn, fqdnEndpointFilter, patchMode, dnsProviders) {
440
+ const resolvedEndpoints = await toPromise(endpoints);
441
+ if (!fqdn) {
442
+ return {
443
+ endpoints: resolvedEndpoints,
444
+ dnsRecordSet: void 0
445
+ };
446
+ }
447
+ const filteredEndpoints = filterEndpoints(resolvedEndpoints, fqdnEndpointFilter);
448
+ const dnsRecordSet = DnsRecordSet.create(fqdn, {
449
+ providers: dnsProviders,
450
+ values: filteredEndpoints,
451
+ waitAt: "local"
452
+ });
453
+ const portProtocolGroups = groupBy(
454
+ filteredEndpoints,
455
+ (endpoint) => endpoint.port ? `${endpoint.port}-${endpoint.protocol}` : ""
456
+ );
457
+ const newEndpoints = [];
458
+ for (const group of Object.values(portProtocolGroups)) {
459
+ newEndpoints.unshift({
460
+ type: "hostname",
461
+ hostname: fqdn,
462
+ visibility: group[0].visibility,
463
+ port: group[0].port,
464
+ protocol: group[0].protocol
465
+ });
466
+ }
467
+ await toPromise(
468
+ dnsRecordSet.waitCommands.apply((waitCommands) => waitCommands.map((command) => command.stdout))
469
+ );
470
+ if (patchMode === "prepend") {
471
+ return {
472
+ endpoints: uniqueBy(
473
+ //
474
+ [...newEndpoints, ...resolvedEndpoints],
475
+ (endpoint) => l34EndpointToString(endpoint)
476
+ ),
477
+ dnsRecordSet
478
+ };
479
+ }
480
+ return {
481
+ endpoints: newEndpoints,
482
+ dnsRecordSet
483
+ };
484
+ }
485
+ function generatePassword() {
486
+ return secureMask.apply(randomBytes(32)).password;
487
+ }
488
+ function generateKey(format = "hex") {
489
+ const bytes = randomBytes(32);
490
+ if (format === "raw") {
491
+ return bytes;
492
+ }
493
+ if (format === "base64") {
494
+ return Buffer.from(bytes).toString("base64");
495
+ }
496
+ return bytesToHex(bytes);
497
+ }
498
+
499
+ // assets/images.json
500
+ var terminal_ssh = {
501
+ image: "ghcr.io/exeteres/highstate/terminal-ssh:latest@sha256:99380e0405522afa0058eedce124c1970a87408663365b2dbce737801a7cd5d1"
502
+ };
503
+
504
+ // src/shared/ssh.ts
505
+ function createSshTerminal(credentials) {
506
+ return output(credentials).apply((credentials2) => {
507
+ if (!credentials2) {
508
+ return void 0;
509
+ }
510
+ const command = ["ssh", "-tt", "-o", "UserKnownHostsFile=/known_hosts"];
511
+ const endpoint = credentials2.endpoints[0];
512
+ command.push("-p", endpoint.port.toString());
513
+ if (credentials2.keyPair) {
514
+ command.push("-i", "/private_key");
515
+ }
516
+ command.push(`${credentials2.user}@${l3EndpointToString(endpoint)}`);
517
+ if (credentials2.password) {
518
+ command.unshift("sshpass", "-f", "/password");
519
+ }
520
+ return {
521
+ name: "ssh",
522
+ meta: {
523
+ title: "Shell",
524
+ description: "Connect to the server via SSH",
525
+ icon: "gg:remote"
526
+ },
527
+ spec: {
528
+ image: terminal_ssh.image,
529
+ command,
530
+ files: {
531
+ "/password": credentials2.password,
532
+ "/private_key": credentials2.keyPair?.privateKey && {
533
+ content: {
534
+ type: "embedded",
535
+ value: credentials2.keyPair?.privateKey
536
+ },
537
+ meta: {
538
+ name: "private_key",
539
+ mode: 384
540
+ }
541
+ },
542
+ "/known_hosts": {
543
+ content: {
544
+ type: "embedded",
545
+ value: `${l3EndpointToString(endpoint)} ${credentials2.hostKey}`
546
+ },
547
+ meta: {
548
+ name: "known_hosts",
549
+ mode: 420
550
+ }
551
+ }
552
+ }
553
+ }
554
+ };
555
+ });
556
+ }
557
+ function generateSshPrivateKey() {
558
+ const seed = randomBytes$1(32);
559
+ return getKeys(seed).privateKey;
560
+ }
561
+ function sshPrivateKeyToKeyPair(privateKeyString) {
562
+ return output(privateKeyString).apply((privateKeyString2) => {
563
+ const privateKeyStruct = PrivateExport.decode(privateKeyString2);
564
+ const privKey = privateKeyStruct.keys[0].privKey.privKey;
565
+ const { fingerprint, publicKey } = getKeys(privKey.slice(0, 32));
566
+ return output({
567
+ type: "ed25519",
568
+ fingerprint,
569
+ publicKey,
570
+ privateKey: secret(privateKeyString2)
571
+ });
572
+ });
573
+ }
574
+ function ensureSshKeyPair(privateKey, existingKeyPair) {
575
+ if (existingKeyPair) {
576
+ return output(existingKeyPair);
577
+ }
578
+ return ensureSecretValue(privateKey, generateSshPrivateKey).value.apply(sshPrivateKeyToKeyPair);
579
+ }
580
+ async function createServerEntity({
581
+ name,
582
+ fallbackHostname,
583
+ endpoints,
584
+ sshEndpoint,
585
+ sshPort = 22,
586
+ sshUser = "root",
587
+ sshPassword,
588
+ sshPrivateKey,
589
+ hasSsh = true,
590
+ pingInterval,
591
+ pingTimeout,
592
+ waitForPing,
593
+ waitForSsh,
594
+ sshCheckInterval,
595
+ sshCheckTimeout
596
+ }) {
597
+ if (endpoints.length === 0) {
598
+ throw new Error("At least one L3 endpoint is required to create a server entity");
599
+ }
600
+ fallbackHostname ??= name;
601
+ waitForSsh ??= hasSsh;
602
+ waitForPing ??= !waitForSsh;
603
+ if (waitForPing) {
604
+ await Command.waitFor(`${name}.ping`, {
605
+ host: "local",
606
+ create: `ping -c 1 ${l3EndpointToString(endpoints[0])}`,
607
+ timeout: pingTimeout ?? 300,
608
+ interval: pingInterval ?? 5,
609
+ triggers: [Date.now()]
610
+ }).wait();
611
+ }
612
+ if (!hasSsh) {
613
+ return {
614
+ hostname: name,
615
+ endpoints
616
+ };
617
+ }
618
+ sshEndpoint ??= l3EndpointToL4(endpoints[0], sshPort);
619
+ if (waitForSsh) {
620
+ await Command.waitFor(`${name}.ssh`, {
621
+ host: "local",
622
+ create: `nc -zv ${l3EndpointToString(sshEndpoint)} ${sshPort}`,
623
+ timeout: sshCheckTimeout ?? 300,
624
+ interval: sshCheckInterval ?? 5,
625
+ triggers: [Date.now()]
626
+ }).wait();
627
+ }
628
+ const connection = output({
629
+ host: l3EndpointToString(sshEndpoint),
630
+ port: sshEndpoint.port,
631
+ user: sshUser,
632
+ password: sshPassword,
633
+ privateKey: sshPrivateKey,
634
+ dialErrorLimit: 3
635
+ });
636
+ const hostnameResult = new remote.Command("hostname", {
637
+ connection,
638
+ create: "hostname",
639
+ triggers: [Date.now()]
640
+ });
641
+ const hostKeyResult = new remote.Command("host-key", {
642
+ connection,
643
+ create: "cat /etc/ssh/ssh_host_ed25519_key.pub",
644
+ triggers: [Date.now()]
645
+ });
646
+ return await toPromise({
647
+ endpoints,
648
+ hostname: hostnameResult.stdout.apply((x) => x.trim()),
649
+ ssh: {
650
+ endpoints: [sshEndpoint],
651
+ user: sshUser,
652
+ hostKey: hostKeyResult.stdout.apply((x) => x.trim()),
653
+ password: sshPassword,
654
+ keyPair: sshPrivateKey ? sshPrivateKeyToKeyPair(sshPrivateKey) : void 0
655
+ }
656
+ });
657
+ }
658
+ function assetFromFile(file) {
659
+ if (file.content.type === "remote") {
660
+ return new asset.RemoteAsset(l7EndpointToString(file.content.endpoint));
661
+ }
662
+ if (file.content.type === "local") {
663
+ return new asset.FileAsset(file.content.path);
664
+ }
665
+ if (file.content.type === "artifact") {
666
+ throw new Error(
667
+ "Artifact-based files cannot be converted to Pulumi assets directly. Use MaterializedFile instead."
668
+ );
669
+ }
670
+ if (file.meta.isBinary) {
671
+ throw new Error(
672
+ "Cannot create asset from inline binary file content. Please open an issue if you need this feature."
673
+ );
674
+ }
675
+ return new asset.StringAsset(file.content.value);
676
+ }
677
+ function archiveFromFolder(folder) {
678
+ if (folder.content.type === "remote") {
679
+ return new asset.RemoteArchive(l7EndpointToString(folder.content.endpoint));
680
+ }
681
+ if (folder.content.type === "local") {
682
+ return new asset.FileArchive(folder.content.path);
683
+ }
684
+ if (folder.content.type === "artifact") {
685
+ throw new Error(
686
+ "Artifact-based folders cannot be converted to Pulumi assets directly. Use MaterializedFolder instead."
687
+ );
688
+ }
689
+ const files = {};
690
+ for (const file of folder.content.files) {
691
+ files[file.meta.name] = assetFromFile(file);
692
+ }
693
+ for (const subfolder of folder.content.folders) {
694
+ files[subfolder.meta.name] = archiveFromFolder(subfolder);
695
+ }
696
+ return new asset.AssetArchive(files);
697
+ }
698
+ async function unarchiveFromStream(stream, destinationPath, archiveType) {
699
+ await mkdir(destinationPath, { recursive: true });
700
+ switch (archiveType) {
701
+ case "tar": {
702
+ const extractStream = tar.extract({
703
+ cwd: destinationPath,
704
+ strict: true
705
+ });
706
+ await pipeline(stream, extractStream);
707
+ return;
708
+ }
709
+ case "zip": {
710
+ await pipeline(stream, unzipper.Extract({ path: destinationPath }));
711
+ return;
712
+ }
713
+ }
714
+ }
715
+ function detectArchiveType(fileName, contentType) {
716
+ const ext = extname(fileName).toLowerCase();
717
+ if (ext === ".tar" || ext === ".tgz" || ext === ".tar.gz") {
718
+ return "tar";
719
+ }
720
+ if (ext === ".zip") {
721
+ return "zip";
722
+ }
723
+ if (contentType) {
724
+ if (contentType.includes("tar") || contentType.includes("gzip")) {
725
+ return "tar";
726
+ }
727
+ if (contentType.includes("zip")) {
728
+ return "zip";
729
+ }
730
+ }
731
+ return null;
732
+ }
733
+ var MaterializedFile = class _MaterializedFile {
734
+ constructor(entity, parent) {
735
+ this.entity = entity;
736
+ this.parent = parent;
737
+ }
738
+ _tmpPath;
739
+ _path;
740
+ _disposed = false;
741
+ artifactMeta = {};
742
+ get path() {
743
+ return this._path;
744
+ }
745
+ async _open() {
746
+ if (this.parent) {
747
+ this._path = join(this.parent.path, this.entity.meta.name);
748
+ } else {
749
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
750
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-file-"));
751
+ this._path = join(this._tmpPath, this.entity.meta.name);
752
+ }
753
+ switch (this.entity.content.type) {
754
+ case "embedded": {
755
+ const content = this.entity.meta.isBinary ? Buffer.from(this.entity.content.value, "base64") : this.entity.content.value;
756
+ await writeFile(this._path, content, { mode: this.entity.meta.mode });
757
+ break;
758
+ }
759
+ case "local": {
760
+ await cp(this.entity.content.path, this._path, { mode: this.entity.meta.mode });
761
+ break;
762
+ }
763
+ case "remote": {
764
+ const response = await fetch(l7EndpointToString(this.entity.content.endpoint));
765
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`);
766
+ const arrayBuffer = await response.arrayBuffer();
767
+ await writeFile(this._path, Buffer.from(arrayBuffer), { mode: this.entity.meta.mode });
768
+ break;
769
+ }
770
+ case "artifact": {
771
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH;
772
+ if (!artifactPath) {
773
+ throw new Error(
774
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content"
775
+ );
776
+ }
777
+ const tgzPath = join(artifactPath, `${this.entity.content.hash}.tgz`);
778
+ const readStream = createReadStream(tgzPath);
779
+ await unarchiveFromStream(readStream, dirname(this._path), "tar");
780
+ break;
781
+ }
782
+ }
783
+ }
784
+ async [Symbol.asyncDispose]() {
785
+ if (this._disposed) return;
786
+ this._disposed = true;
787
+ try {
788
+ if (this._tmpPath) {
789
+ await rm(this._tmpPath, { recursive: true, force: true });
790
+ } else {
791
+ await rm(this._path, { force: true });
792
+ }
793
+ } catch (error) {
794
+ console.warn("failed to clean up materialized file:", error);
795
+ }
796
+ }
797
+ /**
798
+ * Packs the materialized file into an artifact and returns the file entity with artifact content.
799
+ *
800
+ * Creates a tgz archive of the file and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
801
+ */
802
+ async pack() {
803
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH;
804
+ if (!writeDir) {
805
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set");
806
+ }
807
+ const fileStats = await stat(this._path);
808
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
809
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`);
810
+ try {
811
+ await tar.create(
812
+ {
813
+ gzip: true,
814
+ file: tempArchivePath,
815
+ cwd: dirname(this._path),
816
+ noMtime: true
817
+ // to reproduce the same archive every time
818
+ },
819
+ [basename(this._path)]
820
+ );
821
+ const fileContent = createReadStream(tempArchivePath);
822
+ const hash = createHash("sha256");
823
+ for await (const chunk of fileContent) {
824
+ hash.update(chunk);
825
+ }
826
+ const hashValue = hash.digest("hex");
827
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`);
828
+ await rename(tempArchivePath, finalArchivePath);
829
+ const newMeta = {
830
+ name: this.entity.meta.name,
831
+ mode: fileStats.mode & 511,
832
+ // extract only permission bits
833
+ size: fileStats.size,
834
+ isBinary: this.entity.meta.isBinary
835
+ // keep original binary flag as we can't reliably detect this from filesystem
836
+ };
837
+ return {
838
+ meta: newMeta,
839
+ content: {
840
+ type: "artifact",
841
+ [HighstateSignature.Artifact]: true,
842
+ hash: hashValue,
843
+ meta: await toPromise(this.artifactMeta)
844
+ }
845
+ };
846
+ } finally {
847
+ try {
848
+ await rm(tempArchivePath, { force: true });
849
+ } catch {
850
+ }
851
+ }
852
+ }
853
+ /**
854
+ * Creates an empty materialized file with the given name.
855
+ *
856
+ * @param name The name of the file to create
857
+ * @param content Optional initial content of the file (default is empty string)
858
+ * @param mode Optional file mode (permissions)
859
+ * @returns A new MaterializedFile instance representing an empty file
860
+ */
861
+ static async create(name, content = "", mode) {
862
+ const entity = {
863
+ meta: {
864
+ name,
865
+ mode,
866
+ size: 0,
867
+ isBinary: false
868
+ },
869
+ content: {
870
+ type: "embedded",
871
+ value: content
872
+ }
873
+ };
874
+ const materializedFile = new _MaterializedFile(entity);
875
+ try {
876
+ await materializedFile._open();
877
+ } catch (error) {
878
+ await materializedFile[Symbol.asyncDispose]();
879
+ throw error;
880
+ }
881
+ return materializedFile;
882
+ }
883
+ static async open(file, parent) {
884
+ const materializedFile = new _MaterializedFile(file, parent);
885
+ try {
886
+ await materializedFile._open();
887
+ } catch (error) {
888
+ await materializedFile[Symbol.asyncDispose]();
889
+ throw error;
890
+ }
891
+ return materializedFile;
892
+ }
893
+ };
894
+ var MaterializedFolder = class _MaterializedFolder {
895
+ constructor(entity, parent) {
896
+ this.entity = entity;
897
+ this.parent = parent;
898
+ }
899
+ _tmpPath;
900
+ _path;
901
+ _disposed = false;
902
+ _disposables = [];
903
+ artifactMeta = {};
904
+ get path() {
905
+ return this._path;
906
+ }
907
+ async _open() {
908
+ if (this.parent) {
909
+ this._path = join(this.parent.path, this.entity.meta.name);
910
+ } else {
911
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
912
+ this._tmpPath = await mkdtemp(join(tempBase, "highstate-folder-"));
913
+ this._path = join(this._tmpPath, this.entity.meta.name);
914
+ }
915
+ switch (this.entity.content.type) {
916
+ case "embedded": {
917
+ await mkdir(this._path, { mode: this.entity.meta.mode });
918
+ for (const file of this.entity.content.files) {
919
+ const materializedFile = await MaterializedFile.open(file, this);
920
+ this._disposables.push(materializedFile);
921
+ }
922
+ for (const subfolder of this.entity.content.folders) {
923
+ const materializedFolder = await _MaterializedFolder.open(subfolder, this);
924
+ this._disposables.push(materializedFolder);
925
+ }
926
+ break;
927
+ }
928
+ case "local": {
929
+ const archiveType = detectArchiveType(this.entity.content.path);
930
+ if (archiveType) {
931
+ const readStream = createReadStream(this.entity.content.path);
932
+ await unarchiveFromStream(readStream, this._path, archiveType);
933
+ } else {
934
+ await cp(this.entity.content.path, this._path, {
935
+ recursive: true,
936
+ mode: this.entity.meta.mode
937
+ });
938
+ }
939
+ break;
940
+ }
941
+ case "remote": {
942
+ const response = await fetch(l7EndpointToString(this.entity.content.endpoint));
943
+ if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`);
944
+ if (!response.body) throw new Error("Response body is empty");
945
+ const url = new URL(l7EndpointToString(this.entity.content.endpoint));
946
+ const archiveType = detectArchiveType(
947
+ url.pathname,
948
+ response.headers.get("content-type") || void 0
949
+ );
950
+ if (!archiveType) {
951
+ throw new Error("Remote folder content must be an archive (tar, tar.gz, tgz, or zip)");
952
+ }
953
+ if (!response.body) {
954
+ throw new Error("Response body is empty");
955
+ }
956
+ const reader = response.body.getReader();
957
+ const stream = new Readable({
958
+ async read() {
959
+ try {
960
+ const { done, value } = await reader.read();
961
+ if (done) {
962
+ this.push(null);
963
+ } else {
964
+ this.push(Buffer.from(value));
965
+ }
966
+ } catch (error) {
967
+ this.destroy(error instanceof Error ? error : new Error(String(error)));
968
+ }
969
+ }
970
+ });
971
+ await unarchiveFromStream(stream, this._path, archiveType);
972
+ break;
973
+ }
974
+ case "artifact": {
975
+ const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH;
976
+ if (!artifactPath) {
977
+ throw new Error(
978
+ "HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content"
979
+ );
980
+ }
981
+ const tgzPath = join(artifactPath, `${this.entity.content.hash}.tgz`);
982
+ const readStream = createReadStream(tgzPath);
983
+ await unarchiveFromStream(readStream, dirname(this._path), "tar");
984
+ break;
985
+ }
986
+ }
987
+ }
988
+ async [Symbol.asyncDispose]() {
989
+ if (this._disposed) return;
990
+ this._disposed = true;
991
+ try {
992
+ if (this._tmpPath) {
993
+ await rm(this._tmpPath, { recursive: true, force: true });
994
+ } else {
995
+ await rm(this._path, { recursive: true, force: true });
996
+ }
997
+ } catch (error) {
998
+ console.warn("failed to clean up materialized folder:", error);
999
+ }
1000
+ for (const disposable of this._disposables) {
1001
+ await disposable[Symbol.asyncDispose]();
1002
+ }
1003
+ }
1004
+ /**
1005
+ * Packs the materialized folder into an artifact and returns the folder entity with artifact content.
1006
+ *
1007
+ * Creates a tgz archive of the entire folder and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
1008
+ */
1009
+ async pack({ include, exclude } = {}) {
1010
+ const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH;
1011
+ if (!writeDir) {
1012
+ throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set");
1013
+ }
1014
+ const folderStats = await stat(this._path);
1015
+ const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
1016
+ const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`);
1017
+ const entity = this.entity;
1018
+ try {
1019
+ await tar.create(
1020
+ {
1021
+ gzip: true,
1022
+ file: tempArchivePath,
1023
+ cwd: dirname(this._path),
1024
+ filter(path) {
1025
+ path = path.slice(entity.meta.name.length + 1);
1026
+ for (const pattern of exclude ?? []) {
1027
+ if (minimatch(path, pattern)) {
1028
+ return false;
1029
+ }
1030
+ }
1031
+ for (const pattern of include ?? []) {
1032
+ if (minimatch(path, pattern)) {
1033
+ return true;
1034
+ }
1035
+ }
1036
+ return !include || include.length === 0;
1037
+ },
1038
+ // to reproduce the same archive every time
1039
+ portable: true,
1040
+ noMtime: true
1041
+ },
1042
+ [basename(this._path)]
1043
+ );
1044
+ const fileContent = createReadStream(tempArchivePath);
1045
+ const hash = createHash("sha256");
1046
+ for await (const chunk of fileContent) {
1047
+ hash.update(chunk);
1048
+ }
1049
+ const hashValue = hash.digest("hex");
1050
+ const finalArchivePath = join(writeDir, `${hashValue}.tgz`);
1051
+ await rename(tempArchivePath, finalArchivePath);
1052
+ const newMeta = {
1053
+ name: this.entity.meta.name,
1054
+ mode: folderStats.mode & 511
1055
+ // extract only permission bits
1056
+ };
1057
+ return {
1058
+ meta: newMeta,
1059
+ content: {
1060
+ [HighstateSignature.Artifact]: true,
1061
+ type: "artifact",
1062
+ hash: hashValue,
1063
+ meta: await toPromise(this.artifactMeta)
1064
+ }
1065
+ };
1066
+ } finally {
1067
+ try {
1068
+ await rm(tempArchivePath, { force: true });
1069
+ } catch {
1070
+ }
1071
+ }
1072
+ }
1073
+ /**
1074
+ * Creates an empty materialized folder with the given name.
1075
+ *
1076
+ * @param name The name of the folder to create
1077
+ * @param mode Optional folder mode (permissions)
1078
+ * @param parent Optional parent folder to create the folder in
1079
+ * @returns A new MaterializedFolder instance representing an empty folder
1080
+ */
1081
+ static async create(name, mode, parent) {
1082
+ const entity = {
1083
+ meta: {
1084
+ name,
1085
+ mode
1086
+ },
1087
+ content: {
1088
+ type: "embedded",
1089
+ files: [],
1090
+ folders: []
1091
+ }
1092
+ };
1093
+ const materializedFolder = new _MaterializedFolder(entity, parent);
1094
+ try {
1095
+ await materializedFolder._open();
1096
+ } catch (error) {
1097
+ await materializedFolder[Symbol.asyncDispose]();
1098
+ throw error;
1099
+ }
1100
+ return materializedFolder;
1101
+ }
1102
+ static async open(folder, parent) {
1103
+ const materializedFolder = new _MaterializedFolder(folder, parent);
1104
+ try {
1105
+ await materializedFolder._open();
1106
+ } catch (error) {
1107
+ await materializedFolder[Symbol.asyncDispose]();
1108
+ throw error;
1109
+ }
1110
+ return materializedFolder;
1111
+ }
1112
+ };
1113
+ async function fetchFileSize(endpoint) {
1114
+ if (endpoint.appProtocol !== "http" && endpoint.appProtocol !== "https") {
1115
+ throw new Error(
1116
+ `Unsupported protocol: ${endpoint.appProtocol}. Only HTTP and HTTPS are supported.`
1117
+ );
1118
+ }
1119
+ const url = l7EndpointToString(endpoint);
1120
+ const response = await fetch(url, { method: "HEAD" });
1121
+ if (!response.ok) {
1122
+ throw new Error(`Failed to fetch file size: ${response.statusText}`);
1123
+ }
1124
+ const contentLength = response.headers.get("content-length");
1125
+ if (!contentLength) {
1126
+ throw new Error("Content-Length header is missing in the response");
1127
+ }
1128
+ const size = parseInt(contentLength, 10);
1129
+ if (isNaN(size)) {
1130
+ throw new Error(`Invalid Content-Length value: ${contentLength}`);
1131
+ }
1132
+ return size;
1133
+ }
1134
+ function getNameByEndpoint(endpoint) {
1135
+ const parsedEndpoint = parseL7Endpoint(endpoint);
1136
+ return parsedEndpoint.resource ? basename(parsedEndpoint.resource) : "";
1137
+ }
1138
+
1139
+ export { Command, DnsRecord, DnsRecordSet, MaterializedFile, MaterializedFolder, archiveFromFolder, assetFromFile, createServerEntity, createSshTerminal, ensureSshKeyPair, fetchFileSize, filterEndpoints, generateKey, generatePassword, generateSshPrivateKey, getNameByEndpoint, getServerConnection, l34EndpointToString, l3EndpointToCidr, l3EndpointToL4, l3EndpointToString, l4EndpointToString, l4EndpointWithProtocolToString, l7EndpointToString, parseL34Endpoint, parseL3Endpoint, parseL4Endpoint, parseL7Endpoint, requireInputL3Endpoint, requireInputL4Endpoint, sshPrivateKeyToKeyPair, updateEndpoints, updateEndpointsWithFqdn };
1140
+ //# sourceMappingURL=chunk-YYNV3MVT.js.map
1141
+ //# sourceMappingURL=chunk-YYNV3MVT.js.map