@highstate/common 0.9.14 → 0.9.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-HZBJ6LLS.js +1057 -0
- package/dist/chunk-HZBJ6LLS.js.map +1 -0
- package/dist/highstate.manifest.json +9 -9
- package/dist/index.js +2 -50
- package/dist/index.js.map +1 -1
- package/dist/units/dns/record-set/index.js +4 -6
- package/dist/units/dns/record-set/index.js.map +1 -1
- package/dist/units/existing-server/index.js +7 -13
- package/dist/units/existing-server/index.js.map +1 -1
- package/dist/units/network/l3-endpoint/index.js +6 -9
- package/dist/units/network/l3-endpoint/index.js.map +1 -1
- package/dist/units/network/l4-endpoint/index.js +6 -9
- package/dist/units/network/l4-endpoint/index.js.map +1 -1
- package/dist/units/script/index.js +6 -9
- package/dist/units/script/index.js.map +1 -1
- package/dist/units/server-dns/index.js +7 -11
- package/dist/units/server-dns/index.js.map +1 -1
- package/dist/units/server-patch/index.js +7 -11
- package/dist/units/server-patch/index.js.map +1 -1
- package/dist/units/ssh/key-pair/index.js +18 -12
- package/dist/units/ssh/key-pair/index.js.map +1 -1
- package/package.json +22 -7
- package/src/shared/command.ts +19 -9
- package/src/shared/files.ts +730 -0
- package/src/shared/index.ts +1 -0
- package/src/shared/network.ts +88 -1
- package/src/shared/ssh.ts +47 -19
- package/src/units/existing-server/index.ts +1 -1
- package/src/units/remote-folder/index.ts +0 -0
- package/src/units/server-dns/index.ts +1 -1
- package/src/units/server-patch/index.ts +1 -1
- package/src/units/ssh/key-pair/index.ts +12 -2
- package/dist/chunk-2JB6FMYR.js +0 -531
- package/dist/chunk-2JB6FMYR.js.map +0 -1
@@ -0,0 +1,1057 @@
|
|
1
|
+
import { toPromise, output, ComponentResource, interpolate, normalize, secret, getOrCreateSecret, asset } from '@highstate/pulumi';
|
2
|
+
import { uniqueBy, capitalize, groupBy } from 'remeda';
|
3
|
+
import { local, remote } from '@pulumi/command';
|
4
|
+
import '@highstate/library';
|
5
|
+
import { randomBytes } from '@noble/hashes/utils';
|
6
|
+
import { secureMask } from 'micro-key-producer/password.js';
|
7
|
+
import getKeys, { PrivateExport } from 'micro-key-producer/ssh.js';
|
8
|
+
import { randomBytes as randomBytes$1 } from 'micro-key-producer/utils.js';
|
9
|
+
import { tmpdir } from 'node:os';
|
10
|
+
import { mkdtemp, writeFile, cp, rm, stat, rename, mkdir } from 'node:fs/promises';
|
11
|
+
import { join, dirname, basename, extname } from 'node:path';
|
12
|
+
import { createReadStream } from 'node:fs';
|
13
|
+
import { pipeline } from 'node:stream/promises';
|
14
|
+
import { Readable } from 'node:stream';
|
15
|
+
import { createHash } from 'node:crypto';
|
16
|
+
import { minimatch } from 'minimatch';
|
17
|
+
import { HighstateSignature } from '@highstate/contract';
|
18
|
+
import * as tar from 'tar';
|
19
|
+
import unzipper from 'unzipper';
|
20
|
+
|
21
|
+
// src/shared/network.ts
|
22
|
+
function l3EndpointToString(l3Endpoint) {
|
23
|
+
switch (l3Endpoint.type) {
|
24
|
+
case "ipv4":
|
25
|
+
return l3Endpoint.address;
|
26
|
+
case "ipv6":
|
27
|
+
return l3Endpoint.address;
|
28
|
+
case "hostname":
|
29
|
+
return l3Endpoint.hostname;
|
30
|
+
}
|
31
|
+
}
|
32
|
+
function l4EndpointToString(l4Endpoint) {
|
33
|
+
if (l4Endpoint.type === "ipv6") {
|
34
|
+
return `[${l4Endpoint.address}]:${l4Endpoint.port}`;
|
35
|
+
}
|
36
|
+
return `${l3EndpointToString(l4Endpoint)}:${l4Endpoint.port}`;
|
37
|
+
}
|
38
|
+
function l4EndpointWithProtocolToString(l4Endpoint) {
|
39
|
+
const protocol = `${l4Endpoint.protocol}://`;
|
40
|
+
return `${protocol}${l4EndpointToString(l4Endpoint)}`;
|
41
|
+
}
|
42
|
+
function l7EndpointToString(l7Endpoint) {
|
43
|
+
const protocol = `${l7Endpoint.appProtocol}://`;
|
44
|
+
let endpoint = l4EndpointToString(l7Endpoint);
|
45
|
+
if (l7Endpoint.resource) {
|
46
|
+
endpoint += `/${l7Endpoint.resource}`;
|
47
|
+
}
|
48
|
+
return `${protocol}${endpoint}`;
|
49
|
+
}
|
50
|
+
function l34EndpointToString(l34Endpoint) {
|
51
|
+
if (l34Endpoint.port) {
|
52
|
+
return l4EndpointToString(l34Endpoint);
|
53
|
+
}
|
54
|
+
return l3EndpointToString(l34Endpoint);
|
55
|
+
}
|
56
|
+
var L34_ENDPOINT_RE = /^(?:(?<protocol>[a-z]+):\/\/)?(?:(?:\[?(?<ipv6>[0-9A-Fa-f:]+)\]?)|(?<ipv4>(?:\d{1,3}\.){3}\d{1,3})|(?<hostname>[a-zA-Z0-9-*]+(?:\.[a-zA-Z0-9-*]+)*))(?::(?<port>\d{1,5}))?$/;
|
57
|
+
var L7_ENDPOINT_RE = /^(?<appProtocol>[a-z]+):\/\/(?:(?:\[?(?<ipv6>[0-9A-Fa-f:]+)\]?)|(?<ipv4>(?:\d{1,3}\.){3}\d{1,3})|(?<hostname>[a-zA-Z0-9-*]+(?:\.[a-zA-Z0-9-*]+)*))(?::(?<port>\d{1,5}))?(?:\/(?<resource>.*))?$/;
|
58
|
+
function parseL34Endpoint(l34Endpoint) {
|
59
|
+
if (typeof l34Endpoint === "object") {
|
60
|
+
return l34Endpoint;
|
61
|
+
}
|
62
|
+
const match = l34Endpoint.match(L34_ENDPOINT_RE);
|
63
|
+
if (!match) {
|
64
|
+
throw new Error(`Invalid L3/L4 endpoint: "${l34Endpoint}"`);
|
65
|
+
}
|
66
|
+
const { protocol, ipv6, ipv4, hostname, port } = match.groups;
|
67
|
+
if (protocol && protocol !== "tcp" && protocol !== "udp") {
|
68
|
+
throw new Error(`Invalid L4 endpoint protocol: "${protocol}"`);
|
69
|
+
}
|
70
|
+
let visibility = "public";
|
71
|
+
if (ipv4 && IPV4_PRIVATE_REGEX.test(ipv4)) {
|
72
|
+
visibility = "external";
|
73
|
+
} else if (ipv6 && IPV6_PRIVATE_REGEX.test(ipv6)) {
|
74
|
+
visibility = "external";
|
75
|
+
}
|
76
|
+
const fallbackProtocol = port ? "tcp" : void 0;
|
77
|
+
return {
|
78
|
+
type: ipv6 ? "ipv6" : ipv4 ? "ipv4" : "hostname",
|
79
|
+
visibility,
|
80
|
+
address: ipv6 || ipv4,
|
81
|
+
hostname,
|
82
|
+
port: port ? parseInt(port, 10) : void 0,
|
83
|
+
protocol: protocol ? protocol : fallbackProtocol
|
84
|
+
};
|
85
|
+
}
|
86
|
+
function parseL3Endpoint(l3Endpoint) {
|
87
|
+
if (typeof l3Endpoint === "object") {
|
88
|
+
return l3Endpoint;
|
89
|
+
}
|
90
|
+
const parsed = parseL34Endpoint(l3Endpoint);
|
91
|
+
if (parsed.port) {
|
92
|
+
throw new Error(`Port cannot be specified in L3 endpoint: "${l3Endpoint}"`);
|
93
|
+
}
|
94
|
+
return parsed;
|
95
|
+
}
|
96
|
+
function parseL4Endpoint(l4Endpoint) {
|
97
|
+
if (typeof l4Endpoint === "object") {
|
98
|
+
return l4Endpoint;
|
99
|
+
}
|
100
|
+
const parsed = parseL34Endpoint(l4Endpoint);
|
101
|
+
if (!parsed.port) {
|
102
|
+
throw new Error(`No port found in L4 endpoint: "${l4Endpoint}"`);
|
103
|
+
}
|
104
|
+
return parsed;
|
105
|
+
}
|
106
|
+
var IPV4_PRIVATE_REGEX = /^(?:10|127)(?:\.\d{1,3}){3}$|^(?:172\.1[6-9]|172\.2[0-9]|172\.3[0-1])(?:\.\d{1,3}){2}$|^(?:192\.168)(?:\.\d{1,3}){2}$/;
|
107
|
+
var IPV6_PRIVATE_REGEX = /^(?:fc|fd)(?:[0-9a-f]{2}){0,2}::(?:[0-9a-f]{1,4}:){7}[0-9a-f]{1,4}$|^::(?:ffff:(?:10|127)(?:\.\d{1,3}){3}|(?:172\.1[6-9]|172\.2[0-9]|172\.3[0-1])(?:\.\d{1,3}){2}|(?:192\.168)(?:\.\d{1,3}){2})$/;
|
108
|
+
async function requireInputL3Endpoint(rawEndpoint, inputEndpoint) {
|
109
|
+
if (rawEndpoint) {
|
110
|
+
return parseL3Endpoint(rawEndpoint);
|
111
|
+
}
|
112
|
+
if (inputEndpoint) {
|
113
|
+
return toPromise(inputEndpoint);
|
114
|
+
}
|
115
|
+
throw new Error("No endpoint provided");
|
116
|
+
}
|
117
|
+
async function requireInputL4Endpoint(rawEndpoint, inputEndpoint) {
|
118
|
+
if (rawEndpoint) {
|
119
|
+
return parseL4Endpoint(rawEndpoint);
|
120
|
+
}
|
121
|
+
if (inputEndpoint) {
|
122
|
+
return toPromise(inputEndpoint);
|
123
|
+
}
|
124
|
+
throw new Error("No endpoint provided");
|
125
|
+
}
|
126
|
+
function l3ToL4Endpoint(l3Endpoint, port, protocol = "tcp") {
|
127
|
+
return {
|
128
|
+
...parseL3Endpoint(l3Endpoint),
|
129
|
+
port,
|
130
|
+
protocol
|
131
|
+
};
|
132
|
+
}
|
133
|
+
function filterEndpoints(endpoints, filter, types) {
|
134
|
+
if (filter?.length) {
|
135
|
+
endpoints = endpoints.filter((endpoint) => filter.includes(endpoint.visibility));
|
136
|
+
} else if (endpoints.some((endpoint) => endpoint.visibility === "public")) {
|
137
|
+
endpoints = endpoints.filter((endpoint) => endpoint.visibility === "public");
|
138
|
+
} else if (endpoints.some((endpoint) => endpoint.visibility === "external")) {
|
139
|
+
endpoints = endpoints.filter((endpoint) => endpoint.visibility === "external");
|
140
|
+
}
|
141
|
+
if (types && types.length) {
|
142
|
+
endpoints = endpoints.filter((endpoint) => types.includes(endpoint.type));
|
143
|
+
}
|
144
|
+
return endpoints;
|
145
|
+
}
|
146
|
+
function l3EndpointToCidr(l3Endpoint) {
|
147
|
+
switch (l3Endpoint.type) {
|
148
|
+
case "ipv4":
|
149
|
+
return `${l3Endpoint.address}/32`;
|
150
|
+
case "ipv6":
|
151
|
+
return `${l3Endpoint.address}/128`;
|
152
|
+
case "hostname":
|
153
|
+
throw new Error("Cannot convert hostname to CIDR");
|
154
|
+
}
|
155
|
+
}
|
156
|
+
var udpAppProtocols = ["dns", "dhcp"];
|
157
|
+
function parseL7Endpoint(l7Endpoint) {
|
158
|
+
if (typeof l7Endpoint === "object") {
|
159
|
+
return l7Endpoint;
|
160
|
+
}
|
161
|
+
const match = l7Endpoint.match(L7_ENDPOINT_RE);
|
162
|
+
if (!match) {
|
163
|
+
throw new Error(`Invalid L7 endpoint: "${l7Endpoint}"`);
|
164
|
+
}
|
165
|
+
const { appProtocol, ipv6, ipv4, hostname, port, resource } = match.groups;
|
166
|
+
let visibility = "public";
|
167
|
+
if (ipv4 && IPV4_PRIVATE_REGEX.test(ipv4)) {
|
168
|
+
visibility = "external";
|
169
|
+
} else if (ipv6 && IPV6_PRIVATE_REGEX.test(ipv6)) {
|
170
|
+
visibility = "external";
|
171
|
+
}
|
172
|
+
return {
|
173
|
+
type: ipv6 ? "ipv6" : ipv4 ? "ipv4" : "hostname",
|
174
|
+
visibility,
|
175
|
+
address: ipv6 || ipv4,
|
176
|
+
hostname,
|
177
|
+
// Default port for L7 endpoints (TODO: add more specific defaults for common protocols)
|
178
|
+
port: port ? parseInt(port, 10) : 443,
|
179
|
+
// L7 endpoints typically use TCP, but can also use UDP for specific protocols
|
180
|
+
protocol: udpAppProtocols.includes(appProtocol) ? "udp" : "tcp",
|
181
|
+
appProtocol,
|
182
|
+
resource: resource || ""
|
183
|
+
};
|
184
|
+
}
|
185
|
+
async function updateEndpoints(currentEndpoints, endpoints, inputEndpoints, mode = "prepend") {
|
186
|
+
const resolvedCurrentEndpoints = await toPromise(currentEndpoints);
|
187
|
+
const resolvedInputEndpoints = await toPromise(inputEndpoints);
|
188
|
+
const newEndpoints = uniqueBy(
|
189
|
+
//
|
190
|
+
[...endpoints.map(parseL34Endpoint), ...resolvedInputEndpoints],
|
191
|
+
(endpoint) => l34EndpointToString(endpoint)
|
192
|
+
);
|
193
|
+
if (mode === "replace") {
|
194
|
+
return newEndpoints;
|
195
|
+
}
|
196
|
+
return uniqueBy(
|
197
|
+
//
|
198
|
+
[...newEndpoints, ...resolvedCurrentEndpoints],
|
199
|
+
(endpoint) => l34EndpointToString(endpoint)
|
200
|
+
);
|
201
|
+
}
|
202
|
+
function getServerConnection(ssh2) {
|
203
|
+
return output(ssh2).apply((ssh3) => ({
|
204
|
+
host: l3EndpointToString(ssh3.endpoints[0]),
|
205
|
+
port: ssh3.endpoints[0].port,
|
206
|
+
user: ssh3.user,
|
207
|
+
password: ssh3.password,
|
208
|
+
privateKey: ssh3.keyPair?.privateKey,
|
209
|
+
dialErrorLimit: 3,
|
210
|
+
hostKey: ssh3.hostKey
|
211
|
+
}));
|
212
|
+
}
|
213
|
+
function createCommand(command) {
|
214
|
+
if (Array.isArray(command)) {
|
215
|
+
return command.join(" ");
|
216
|
+
}
|
217
|
+
return command;
|
218
|
+
}
|
219
|
+
var Command = class _Command extends ComponentResource {
|
220
|
+
command;
|
221
|
+
stdout;
|
222
|
+
stderr;
|
223
|
+
constructor(name, args, opts) {
|
224
|
+
super("highstate:common:Command", name, args, opts);
|
225
|
+
this.command = output(args).apply((args2) => {
|
226
|
+
if (args2.host === "local") {
|
227
|
+
return new local.Command(
|
228
|
+
name,
|
229
|
+
{
|
230
|
+
create: createCommand(args2.create),
|
231
|
+
update: args2.update ? createCommand(args2.update) : void 0,
|
232
|
+
delete: args2.delete ? createCommand(args2.delete) : void 0,
|
233
|
+
logging: args2.logging,
|
234
|
+
triggers: args2.triggers,
|
235
|
+
dir: args2.cwd
|
236
|
+
},
|
237
|
+
{ ...opts, parent: this }
|
238
|
+
);
|
239
|
+
}
|
240
|
+
if (!args2.host.ssh) {
|
241
|
+
throw new Error(`The host "${args2.host.hostname}" has no SSH credentials`);
|
242
|
+
}
|
243
|
+
return new remote.Command(
|
244
|
+
name,
|
245
|
+
{
|
246
|
+
connection: getServerConnection(args2.host.ssh),
|
247
|
+
create: createCommand(args2.create),
|
248
|
+
update: args2.update ? createCommand(args2.update) : void 0,
|
249
|
+
delete: args2.delete ? createCommand(args2.delete) : void 0,
|
250
|
+
logging: args2.logging,
|
251
|
+
triggers: args2.triggers
|
252
|
+
},
|
253
|
+
{ ...opts, parent: this }
|
254
|
+
);
|
255
|
+
});
|
256
|
+
this.stdout = this.command.stdout;
|
257
|
+
this.stderr = this.command.stderr;
|
258
|
+
}
|
259
|
+
static createTextFile(name, options, opts) {
|
260
|
+
return output(options).apply((options2) => {
|
261
|
+
const escapedContent = options2.content.replace(/"/g, '\\"');
|
262
|
+
const command = new _Command(
|
263
|
+
name,
|
264
|
+
{
|
265
|
+
host: options2.host,
|
266
|
+
create: interpolate`mkdir -p $(dirname ${options2.path}) && echo "${escapedContent}" > ${options2.path}`,
|
267
|
+
delete: interpolate`rm -rf ${options2.path}`
|
268
|
+
},
|
269
|
+
opts
|
270
|
+
);
|
271
|
+
return command;
|
272
|
+
});
|
273
|
+
}
|
274
|
+
static receiveTextFile(name, options, opts) {
|
275
|
+
return output(options).apply((options2) => {
|
276
|
+
const command = new _Command(
|
277
|
+
name,
|
278
|
+
{
|
279
|
+
host: options2.host,
|
280
|
+
create: interpolate`while ! test -f ${options2.path}; do sleep 1; done; cat ${options2.path}`,
|
281
|
+
logging: "stderr"
|
282
|
+
},
|
283
|
+
opts
|
284
|
+
);
|
285
|
+
return command;
|
286
|
+
});
|
287
|
+
}
|
288
|
+
};
|
289
|
+
function getTypeByEndpoint(endpoint) {
|
290
|
+
switch (endpoint.type) {
|
291
|
+
case "ipv4":
|
292
|
+
return "A";
|
293
|
+
case "ipv6":
|
294
|
+
return "AAAA";
|
295
|
+
case "hostname":
|
296
|
+
return "CNAME";
|
297
|
+
}
|
298
|
+
}
|
299
|
+
var DnsRecord = class extends ComponentResource {
|
300
|
+
/**
|
301
|
+
* The underlying dns record resource.
|
302
|
+
*/
|
303
|
+
dnsRecord;
|
304
|
+
/**
|
305
|
+
* The wait commands to be executed after the DNS record is created/updated.
|
306
|
+
*
|
307
|
+
* Use this field as a dependency for other resources.
|
308
|
+
*/
|
309
|
+
waitCommands;
|
310
|
+
constructor(name, args, opts) {
|
311
|
+
super("highstate:common:DnsRecord", name, args, opts);
|
312
|
+
this.dnsRecord = output(args).apply((args2) => {
|
313
|
+
const l3Endpoint = parseL3Endpoint(args2.value);
|
314
|
+
const type = args2.type ?? getTypeByEndpoint(l3Endpoint);
|
315
|
+
return output(
|
316
|
+
this.create(
|
317
|
+
name,
|
318
|
+
{
|
319
|
+
...args2,
|
320
|
+
type,
|
321
|
+
value: l3EndpointToString(l3Endpoint)
|
322
|
+
},
|
323
|
+
{ ...opts, parent: this }
|
324
|
+
)
|
325
|
+
);
|
326
|
+
});
|
327
|
+
this.waitCommands = output(args).apply((args2) => {
|
328
|
+
const waitAt = args2.waitAt ? Array.isArray(args2.waitAt) ? args2.waitAt : [args2.waitAt] : [];
|
329
|
+
return waitAt.map((host) => {
|
330
|
+
const hostname = host === "local" ? "local" : host.hostname;
|
331
|
+
return new Command(
|
332
|
+
`${name}-wait-${hostname}`,
|
333
|
+
{
|
334
|
+
host,
|
335
|
+
create: `while ! getent hosts ${args2.name} >/dev/null; do echo "Waiting for DNS record ${args2.name} to be created"; sleep 5; done`,
|
336
|
+
triggers: [args2.type, args2.ttl, args2.priority, args2.proxied]
|
337
|
+
},
|
338
|
+
{ parent: this }
|
339
|
+
);
|
340
|
+
});
|
341
|
+
});
|
342
|
+
}
|
343
|
+
static create(name, args, opts) {
|
344
|
+
return output(args).apply(async (args2) => {
|
345
|
+
const providerType = args2.provider.type;
|
346
|
+
const implName = `${capitalize(providerType)}DnsRecord`;
|
347
|
+
const implModule = await import(`@highstate/${providerType}`);
|
348
|
+
const implClass = implModule[implName];
|
349
|
+
return new implClass(name, args2, opts);
|
350
|
+
});
|
351
|
+
}
|
352
|
+
};
|
353
|
+
var DnsRecordSet = class _DnsRecordSet extends ComponentResource {
|
354
|
+
/**
|
355
|
+
* The underlying dns record resources.
|
356
|
+
*/
|
357
|
+
dnsRecords;
|
358
|
+
/**
|
359
|
+
* The wait commands to be executed after the DNS records are created/updated.
|
360
|
+
*/
|
361
|
+
waitCommands;
|
362
|
+
constructor(name, records, opts) {
|
363
|
+
super("highstate:common:DnsRecordSet", name, records, opts);
|
364
|
+
this.dnsRecords = records;
|
365
|
+
this.waitCommands = records.apply(
|
366
|
+
(records2) => records2.flatMap((record) => record.waitCommands)
|
367
|
+
);
|
368
|
+
}
|
369
|
+
static create(name, args, opts) {
|
370
|
+
const records = output(args).apply((args2) => {
|
371
|
+
const recordName = args2.name ?? name;
|
372
|
+
const values = normalize(args2.value, args2.values);
|
373
|
+
return output(
|
374
|
+
args2.providers.filter((provider) => recordName.endsWith(provider.domain)).flatMap((provider) => {
|
375
|
+
return values.map((value) => {
|
376
|
+
const l3Endpoint = parseL3Endpoint(value);
|
377
|
+
return DnsRecord.create(
|
378
|
+
`${provider.type}-from-${recordName}-to-${l3EndpointToString(l3Endpoint)}`,
|
379
|
+
{ name: recordName, ...args2, value: l3Endpoint, provider },
|
380
|
+
opts
|
381
|
+
);
|
382
|
+
});
|
383
|
+
})
|
384
|
+
);
|
385
|
+
});
|
386
|
+
return new _DnsRecordSet(name, records, opts);
|
387
|
+
}
|
388
|
+
};
|
389
|
+
async function updateEndpointsWithFqdn(endpoints, fqdn, fqdnEndpointFilter, patchMode, dnsProviders) {
|
390
|
+
const resolvedEndpoints = await toPromise(endpoints);
|
391
|
+
if (!fqdn) {
|
392
|
+
return {
|
393
|
+
endpoints: resolvedEndpoints,
|
394
|
+
dnsRecordSet: void 0
|
395
|
+
};
|
396
|
+
}
|
397
|
+
const filteredEndpoints = filterEndpoints(resolvedEndpoints, fqdnEndpointFilter);
|
398
|
+
const dnsRecordSet = DnsRecordSet.create(fqdn, {
|
399
|
+
providers: dnsProviders,
|
400
|
+
values: filteredEndpoints,
|
401
|
+
waitAt: "local"
|
402
|
+
});
|
403
|
+
const portProtocolGroups = groupBy(
|
404
|
+
filteredEndpoints,
|
405
|
+
(endpoint) => endpoint.port ? `${endpoint.port}-${endpoint.protocol}` : ""
|
406
|
+
);
|
407
|
+
const newEndpoints = [];
|
408
|
+
for (const group of Object.values(portProtocolGroups)) {
|
409
|
+
newEndpoints.unshift({
|
410
|
+
type: "hostname",
|
411
|
+
hostname: fqdn,
|
412
|
+
visibility: group[0].visibility,
|
413
|
+
port: group[0].port,
|
414
|
+
protocol: group[0].protocol
|
415
|
+
});
|
416
|
+
}
|
417
|
+
await toPromise(
|
418
|
+
dnsRecordSet.waitCommands.apply((waitCommands) => waitCommands.map((command) => command.stdout))
|
419
|
+
);
|
420
|
+
if (patchMode === "prepend") {
|
421
|
+
return {
|
422
|
+
endpoints: uniqueBy(
|
423
|
+
//
|
424
|
+
[...newEndpoints, ...resolvedEndpoints],
|
425
|
+
(endpoint) => l34EndpointToString(endpoint)
|
426
|
+
),
|
427
|
+
dnsRecordSet
|
428
|
+
};
|
429
|
+
}
|
430
|
+
return {
|
431
|
+
endpoints: newEndpoints,
|
432
|
+
dnsRecordSet
|
433
|
+
};
|
434
|
+
}
|
435
|
+
function generatePassword() {
|
436
|
+
return secureMask.apply(randomBytes(32)).password;
|
437
|
+
}
|
438
|
+
|
439
|
+
// assets/images.json
|
440
|
+
var terminal_ssh = {
|
441
|
+
image: "ghcr.io/exeteres/highstate/terminal-ssh:latest@sha256:99380e0405522afa0058eedce124c1970a87408663365b2dbce737801a7cd5d1"
|
442
|
+
};
|
443
|
+
|
444
|
+
// src/shared/ssh.ts
|
445
|
+
function createSshTerminal(credentials) {
|
446
|
+
return output(credentials).apply((credentials2) => {
|
447
|
+
if (!credentials2) {
|
448
|
+
return void 0;
|
449
|
+
}
|
450
|
+
const command = ["ssh", "-tt", "-o", "UserKnownHostsFile=/known_hosts"];
|
451
|
+
const endpoint = credentials2.endpoints[0];
|
452
|
+
command.push("-p", endpoint.port.toString());
|
453
|
+
if (credentials2.keyPair) {
|
454
|
+
command.push("-i", "/private_key");
|
455
|
+
}
|
456
|
+
command.push(`${credentials2.user}@${l3EndpointToString(endpoint)}`);
|
457
|
+
if (credentials2.password) {
|
458
|
+
command.unshift("sshpass", "-f", "/password");
|
459
|
+
}
|
460
|
+
return {
|
461
|
+
name: "ssh",
|
462
|
+
meta: {
|
463
|
+
title: "Shell",
|
464
|
+
description: "Connect to the server via SSH",
|
465
|
+
icon: "gg:remote"
|
466
|
+
},
|
467
|
+
spec: {
|
468
|
+
image: terminal_ssh.image,
|
469
|
+
command,
|
470
|
+
files: {
|
471
|
+
"/password": credentials2.password,
|
472
|
+
"/private_key": credentials2.keyPair?.privateKey && {
|
473
|
+
content: {
|
474
|
+
type: "embedded",
|
475
|
+
value: credentials2.keyPair?.privateKey
|
476
|
+
},
|
477
|
+
meta: {
|
478
|
+
name: "private_key",
|
479
|
+
mode: 384
|
480
|
+
}
|
481
|
+
},
|
482
|
+
"/known_hosts": {
|
483
|
+
content: {
|
484
|
+
type: "embedded",
|
485
|
+
value: `${l3EndpointToString(endpoint)} ${credentials2.hostKey}`
|
486
|
+
},
|
487
|
+
meta: {
|
488
|
+
name: "known_hosts",
|
489
|
+
mode: 420
|
490
|
+
}
|
491
|
+
}
|
492
|
+
}
|
493
|
+
}
|
494
|
+
};
|
495
|
+
});
|
496
|
+
}
|
497
|
+
function generatePrivateKey() {
|
498
|
+
const seed = randomBytes$1(32);
|
499
|
+
return getKeys(seed).privateKey;
|
500
|
+
}
|
501
|
+
function privateKeyToKeyPair(privateKeyString) {
|
502
|
+
return output(privateKeyString).apply((privateKeyString2) => {
|
503
|
+
const privateKeyStruct = PrivateExport.decode(privateKeyString2);
|
504
|
+
const privKey = privateKeyStruct.keys[0].privKey.privKey;
|
505
|
+
const { fingerprint, publicKey } = getKeys(privKey.slice(0, 32));
|
506
|
+
return output({
|
507
|
+
type: "ed25519",
|
508
|
+
fingerprint,
|
509
|
+
publicKey,
|
510
|
+
privateKey: secret(privateKeyString2)
|
511
|
+
});
|
512
|
+
});
|
513
|
+
}
|
514
|
+
function getOrCreateSshKeyPair(inputs, secrets) {
|
515
|
+
if (inputs.sshKeyPair) {
|
516
|
+
return output(inputs.sshKeyPair);
|
517
|
+
}
|
518
|
+
const privateKey = getOrCreateSecret(secrets, "sshPrivateKey", generatePrivateKey);
|
519
|
+
return privateKey.apply(privateKeyToKeyPair);
|
520
|
+
}
|
521
|
+
function createServerEntity(fallbackHostname, endpoint, sshPort = 22, sshUser = "root", sshPassword, sshPrivateKey, hasSsh = true) {
|
522
|
+
const connection = output({
|
523
|
+
host: l3EndpointToString(endpoint),
|
524
|
+
port: sshPort,
|
525
|
+
user: sshUser,
|
526
|
+
password: sshPassword,
|
527
|
+
privateKey: sshPrivateKey,
|
528
|
+
dialErrorLimit: 3
|
529
|
+
});
|
530
|
+
if (!hasSsh) {
|
531
|
+
return output({
|
532
|
+
hostname: fallbackHostname,
|
533
|
+
endpoints: [endpoint]
|
534
|
+
});
|
535
|
+
}
|
536
|
+
const command = new local.Command("check-ssh", {
|
537
|
+
create: `nc -zv ${l3EndpointToString(endpoint)} ${sshPort} && echo "up" || echo "down"`,
|
538
|
+
triggers: [Date.now()]
|
539
|
+
});
|
540
|
+
return command.stdout.apply((result) => {
|
541
|
+
if (result === "down") {
|
542
|
+
return output({
|
543
|
+
hostname: fallbackHostname,
|
544
|
+
endpoints: [endpoint]
|
545
|
+
});
|
546
|
+
}
|
547
|
+
const hostnameResult = new remote.Command("hostname", {
|
548
|
+
connection,
|
549
|
+
create: "hostname",
|
550
|
+
triggers: [Date.now()]
|
551
|
+
});
|
552
|
+
const hostKeyResult = new remote.Command("host-key", {
|
553
|
+
connection,
|
554
|
+
create: "cat /etc/ssh/ssh_host_ed25519_key.pub",
|
555
|
+
triggers: [Date.now()]
|
556
|
+
});
|
557
|
+
return output({
|
558
|
+
endpoints: [endpoint],
|
559
|
+
hostname: hostnameResult.stdout.apply((x) => x.trim()),
|
560
|
+
ssh: {
|
561
|
+
endpoints: [l3ToL4Endpoint(endpoint, sshPort)],
|
562
|
+
user: sshUser,
|
563
|
+
hostKey: hostKeyResult.stdout.apply((x) => x.trim()),
|
564
|
+
password: sshPassword,
|
565
|
+
keyPair: sshPrivateKey ? privateKeyToKeyPair(sshPrivateKey) : void 0
|
566
|
+
}
|
567
|
+
});
|
568
|
+
});
|
569
|
+
}
|
570
|
+
function assetFromFile(file) {
|
571
|
+
if (file.content.type === "remote") {
|
572
|
+
return new asset.RemoteAsset(l7EndpointToString(file.content.endpoint));
|
573
|
+
}
|
574
|
+
if (file.content.type === "local") {
|
575
|
+
return new asset.FileAsset(file.content.path);
|
576
|
+
}
|
577
|
+
if (file.content.type === "artifact") {
|
578
|
+
throw new Error(
|
579
|
+
"Artifact-based files cannot be converted to Pulumi assets directly. Use MaterializedFile instead."
|
580
|
+
);
|
581
|
+
}
|
582
|
+
if (file.meta.isBinary) {
|
583
|
+
throw new Error(
|
584
|
+
"Cannot create asset from inline binary file content. Please open an issue if you need this feature."
|
585
|
+
);
|
586
|
+
}
|
587
|
+
return new asset.StringAsset(file.content.value);
|
588
|
+
}
|
589
|
+
function archiveFromFolder(folder) {
|
590
|
+
if (folder.content.type === "remote") {
|
591
|
+
return new asset.RemoteArchive(l7EndpointToString(folder.content.endpoint));
|
592
|
+
}
|
593
|
+
if (folder.content.type === "local") {
|
594
|
+
return new asset.FileArchive(folder.content.path);
|
595
|
+
}
|
596
|
+
if (folder.content.type === "artifact") {
|
597
|
+
throw new Error(
|
598
|
+
"Artifact-based folders cannot be converted to Pulumi assets directly. Use MaterializedFolder instead."
|
599
|
+
);
|
600
|
+
}
|
601
|
+
const files = {};
|
602
|
+
for (const file of folder.content.files) {
|
603
|
+
files[file.meta.name] = assetFromFile(file);
|
604
|
+
}
|
605
|
+
for (const subfolder of folder.content.folders) {
|
606
|
+
files[subfolder.meta.name] = archiveFromFolder(subfolder);
|
607
|
+
}
|
608
|
+
return new asset.AssetArchive(files);
|
609
|
+
}
|
610
|
+
async function unarchiveFromStream(stream, destinationPath, archiveType) {
|
611
|
+
await mkdir(destinationPath, { recursive: true });
|
612
|
+
switch (archiveType) {
|
613
|
+
case "tar": {
|
614
|
+
const extractStream = tar.extract({
|
615
|
+
cwd: destinationPath,
|
616
|
+
strict: true
|
617
|
+
});
|
618
|
+
await pipeline(stream, extractStream);
|
619
|
+
return;
|
620
|
+
}
|
621
|
+
case "zip": {
|
622
|
+
await pipeline(stream, unzipper.Extract({ path: destinationPath }));
|
623
|
+
return;
|
624
|
+
}
|
625
|
+
}
|
626
|
+
}
|
627
|
+
function detectArchiveType(fileName, contentType) {
|
628
|
+
const ext = extname(fileName).toLowerCase();
|
629
|
+
if (ext === ".tar" || ext === ".tgz" || ext === ".tar.gz") {
|
630
|
+
return "tar";
|
631
|
+
}
|
632
|
+
if (ext === ".zip") {
|
633
|
+
return "zip";
|
634
|
+
}
|
635
|
+
if (contentType) {
|
636
|
+
if (contentType.includes("tar") || contentType.includes("gzip")) {
|
637
|
+
return "tar";
|
638
|
+
}
|
639
|
+
if (contentType.includes("zip")) {
|
640
|
+
return "zip";
|
641
|
+
}
|
642
|
+
}
|
643
|
+
return null;
|
644
|
+
}
|
645
|
+
var MaterializedFile = class _MaterializedFile {
|
646
|
+
constructor(entity, parent) {
|
647
|
+
this.entity = entity;
|
648
|
+
this.parent = parent;
|
649
|
+
}
|
650
|
+
_tmpPath;
|
651
|
+
_path;
|
652
|
+
_disposed = false;
|
653
|
+
artifactMeta = {};
|
654
|
+
get path() {
|
655
|
+
return this._path;
|
656
|
+
}
|
657
|
+
async _open() {
|
658
|
+
if (this.parent) {
|
659
|
+
this._path = join(this.parent.path, this.entity.meta.name);
|
660
|
+
} else {
|
661
|
+
const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
|
662
|
+
this._tmpPath = await mkdtemp(join(tempBase, "highstate-file-"));
|
663
|
+
this._path = join(this._tmpPath, this.entity.meta.name);
|
664
|
+
}
|
665
|
+
switch (this.entity.content.type) {
|
666
|
+
case "embedded": {
|
667
|
+
const content = this.entity.meta.isBinary ? Buffer.from(this.entity.content.value, "base64") : this.entity.content.value;
|
668
|
+
await writeFile(this._path, content, { mode: this.entity.meta.mode });
|
669
|
+
break;
|
670
|
+
}
|
671
|
+
case "local": {
|
672
|
+
await cp(this.entity.content.path, this._path, { mode: this.entity.meta.mode });
|
673
|
+
break;
|
674
|
+
}
|
675
|
+
case "remote": {
|
676
|
+
const response = await load(l7EndpointToString(this.entity.content.endpoint));
|
677
|
+
if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`);
|
678
|
+
const arrayBuffer = await response.arrayBuffer();
|
679
|
+
await writeFile(this._path, Buffer.from(arrayBuffer), { mode: this.entity.meta.mode });
|
680
|
+
break;
|
681
|
+
}
|
682
|
+
case "artifact": {
|
683
|
+
const artifactData = this.entity.content[HighstateSignature.Artifact];
|
684
|
+
const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH;
|
685
|
+
if (!artifactPath) {
|
686
|
+
throw new Error(
|
687
|
+
"HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content"
|
688
|
+
);
|
689
|
+
}
|
690
|
+
const tgzPath = join(artifactPath, `${artifactData.hash}.tgz`);
|
691
|
+
const readStream = createReadStream(tgzPath);
|
692
|
+
await unarchiveFromStream(readStream, dirname(this._path), "tar");
|
693
|
+
break;
|
694
|
+
}
|
695
|
+
}
|
696
|
+
}
|
697
|
+
async [Symbol.asyncDispose]() {
|
698
|
+
if (this._disposed) return;
|
699
|
+
this._disposed = true;
|
700
|
+
try {
|
701
|
+
if (this._tmpPath) {
|
702
|
+
await rm(this._tmpPath, { recursive: true, force: true });
|
703
|
+
} else {
|
704
|
+
await rm(this._path, { force: true });
|
705
|
+
}
|
706
|
+
} catch (error) {
|
707
|
+
console.warn("failed to clean up materialized file:", error);
|
708
|
+
}
|
709
|
+
}
|
710
|
+
/**
|
711
|
+
* Packs the materialized file into an artifact and returns the file entity with artifact content.
|
712
|
+
*
|
713
|
+
* Creates a tgz archive of the file and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
|
714
|
+
*/
|
715
|
+
async pack() {
|
716
|
+
const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH;
|
717
|
+
if (!writeDir) {
|
718
|
+
throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set");
|
719
|
+
}
|
720
|
+
const fileStats = await stat(this._path);
|
721
|
+
const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
|
722
|
+
const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`);
|
723
|
+
try {
|
724
|
+
await tar.create(
|
725
|
+
{
|
726
|
+
gzip: true,
|
727
|
+
file: tempArchivePath,
|
728
|
+
cwd: dirname(this._path),
|
729
|
+
noMtime: true
|
730
|
+
// to reproduce the same archive every time
|
731
|
+
},
|
732
|
+
[basename(this._path)]
|
733
|
+
);
|
734
|
+
const fileContent = createReadStream(tempArchivePath);
|
735
|
+
const hash = createHash("sha256");
|
736
|
+
for await (const chunk of fileContent) {
|
737
|
+
hash.update(chunk);
|
738
|
+
}
|
739
|
+
const hashValue = hash.digest("hex");
|
740
|
+
const finalArchivePath = join(writeDir, `${hashValue}.tgz`);
|
741
|
+
await rename(tempArchivePath, finalArchivePath);
|
742
|
+
const newMeta = {
|
743
|
+
name: this.entity.meta.name,
|
744
|
+
mode: fileStats.mode & 511,
|
745
|
+
// extract only permission bits
|
746
|
+
size: fileStats.size,
|
747
|
+
isBinary: this.entity.meta.isBinary
|
748
|
+
// keep original binary flag as we can't reliably detect this from filesystem
|
749
|
+
};
|
750
|
+
return {
|
751
|
+
meta: newMeta,
|
752
|
+
content: {
|
753
|
+
type: "artifact",
|
754
|
+
[HighstateSignature.Artifact]: {
|
755
|
+
hash: hashValue,
|
756
|
+
meta: await toPromise(this.artifactMeta)
|
757
|
+
}
|
758
|
+
}
|
759
|
+
};
|
760
|
+
} finally {
|
761
|
+
try {
|
762
|
+
await rm(tempArchivePath, { force: true });
|
763
|
+
} catch {
|
764
|
+
}
|
765
|
+
}
|
766
|
+
}
|
767
|
+
/**
|
768
|
+
* Creates an empty materialized file with the given name.
|
769
|
+
*
|
770
|
+
* @param name The name of the file to create
|
771
|
+
* @param content Optional initial content of the file (default is empty string)
|
772
|
+
* @param mode Optional file mode (permissions)
|
773
|
+
* @returns A new MaterializedFile instance representing an empty file
|
774
|
+
*/
|
775
|
+
static async create(name, content = "", mode) {
|
776
|
+
const entity = {
|
777
|
+
meta: {
|
778
|
+
name,
|
779
|
+
mode,
|
780
|
+
size: 0,
|
781
|
+
isBinary: false
|
782
|
+
},
|
783
|
+
content: {
|
784
|
+
type: "embedded",
|
785
|
+
value: content
|
786
|
+
}
|
787
|
+
};
|
788
|
+
const materializedFile = new _MaterializedFile(entity);
|
789
|
+
try {
|
790
|
+
await materializedFile._open();
|
791
|
+
} catch (error) {
|
792
|
+
await materializedFile[Symbol.asyncDispose]();
|
793
|
+
throw error;
|
794
|
+
}
|
795
|
+
return materializedFile;
|
796
|
+
}
|
797
|
+
static async open(file, parent) {
|
798
|
+
const materializedFile = new _MaterializedFile(file, parent);
|
799
|
+
try {
|
800
|
+
await materializedFile._open();
|
801
|
+
} catch (error) {
|
802
|
+
await materializedFile[Symbol.asyncDispose]();
|
803
|
+
throw error;
|
804
|
+
}
|
805
|
+
return materializedFile;
|
806
|
+
}
|
807
|
+
};
|
808
|
+
var MaterializedFolder = class _MaterializedFolder {
|
809
|
+
constructor(entity, parent) {
|
810
|
+
this.entity = entity;
|
811
|
+
this.parent = parent;
|
812
|
+
}
|
813
|
+
_tmpPath;
|
814
|
+
_path;
|
815
|
+
_disposed = false;
|
816
|
+
_disposables = [];
|
817
|
+
artifactMeta = {};
|
818
|
+
get path() {
|
819
|
+
return this._path;
|
820
|
+
}
|
821
|
+
async _open() {
|
822
|
+
if (this.parent) {
|
823
|
+
this._path = join(this.parent.path, this.entity.meta.name);
|
824
|
+
} else {
|
825
|
+
const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
|
826
|
+
this._tmpPath = await mkdtemp(join(tempBase, "highstate-folder-"));
|
827
|
+
this._path = join(this._tmpPath, this.entity.meta.name);
|
828
|
+
}
|
829
|
+
switch (this.entity.content.type) {
|
830
|
+
case "embedded": {
|
831
|
+
await mkdir(this._path, { mode: this.entity.meta.mode });
|
832
|
+
for (const file of this.entity.content.files) {
|
833
|
+
const materializedFile = await MaterializedFile.open(file, this);
|
834
|
+
this._disposables.push(materializedFile);
|
835
|
+
}
|
836
|
+
for (const subfolder of this.entity.content.folders) {
|
837
|
+
const materializedFolder = await _MaterializedFolder.open(subfolder, this);
|
838
|
+
this._disposables.push(materializedFolder);
|
839
|
+
}
|
840
|
+
break;
|
841
|
+
}
|
842
|
+
case "local": {
|
843
|
+
const archiveType = detectArchiveType(this.entity.content.path);
|
844
|
+
if (archiveType) {
|
845
|
+
const readStream = createReadStream(this.entity.content.path);
|
846
|
+
await unarchiveFromStream(readStream, this._path, archiveType);
|
847
|
+
} else {
|
848
|
+
await cp(this.entity.content.path, this._path, {
|
849
|
+
recursive: true,
|
850
|
+
mode: this.entity.meta.mode
|
851
|
+
});
|
852
|
+
}
|
853
|
+
break;
|
854
|
+
}
|
855
|
+
case "remote": {
|
856
|
+
const response = await load(l7EndpointToString(this.entity.content.endpoint));
|
857
|
+
if (!response.ok) throw new Error(`Failed to fetch: ${response.statusText}`);
|
858
|
+
if (!response.body) throw new Error("Response body is empty");
|
859
|
+
const url = new URL(l7EndpointToString(this.entity.content.endpoint));
|
860
|
+
const archiveType = detectArchiveType(
|
861
|
+
url.pathname,
|
862
|
+
response.headers.get("content-type") || void 0
|
863
|
+
);
|
864
|
+
if (!archiveType) {
|
865
|
+
throw new Error("Remote folder content must be an archive (tar, tar.gz, tgz, or zip)");
|
866
|
+
}
|
867
|
+
if (!response.body) {
|
868
|
+
throw new Error("Response body is empty");
|
869
|
+
}
|
870
|
+
const reader = response.body.getReader();
|
871
|
+
const stream = new Readable({
|
872
|
+
async read() {
|
873
|
+
try {
|
874
|
+
const { done, value } = await reader.read();
|
875
|
+
if (done) {
|
876
|
+
this.push(null);
|
877
|
+
} else {
|
878
|
+
this.push(Buffer.from(value));
|
879
|
+
}
|
880
|
+
} catch (error) {
|
881
|
+
this.destroy(error instanceof Error ? error : new Error(String(error)));
|
882
|
+
}
|
883
|
+
}
|
884
|
+
});
|
885
|
+
await unarchiveFromStream(stream, this._path, archiveType);
|
886
|
+
break;
|
887
|
+
}
|
888
|
+
case "artifact": {
|
889
|
+
const artifactData = this.entity.content[HighstateSignature.Artifact];
|
890
|
+
const artifactPath = process.env.HIGHSTATE_ARTIFACT_READ_PATH;
|
891
|
+
if (!artifactPath) {
|
892
|
+
throw new Error(
|
893
|
+
"HIGHSTATE_ARTIFACT_READ_PATH environment variable is not set but required for artifact content"
|
894
|
+
);
|
895
|
+
}
|
896
|
+
const tgzPath = join(artifactPath, `${artifactData.hash}.tgz`);
|
897
|
+
const readStream = createReadStream(tgzPath);
|
898
|
+
await unarchiveFromStream(readStream, dirname(this._path), "tar");
|
899
|
+
break;
|
900
|
+
}
|
901
|
+
}
|
902
|
+
}
|
903
|
+
async [Symbol.asyncDispose]() {
|
904
|
+
if (this._disposed) return;
|
905
|
+
this._disposed = true;
|
906
|
+
try {
|
907
|
+
if (this._tmpPath) {
|
908
|
+
await rm(this._tmpPath, { recursive: true, force: true });
|
909
|
+
} else {
|
910
|
+
await rm(this._path, { recursive: true, force: true });
|
911
|
+
}
|
912
|
+
} catch (error) {
|
913
|
+
console.warn("failed to clean up materialized folder:", error);
|
914
|
+
}
|
915
|
+
for (const disposable of this._disposables) {
|
916
|
+
await disposable[Symbol.asyncDispose]();
|
917
|
+
}
|
918
|
+
}
|
919
|
+
/**
|
920
|
+
* Packs the materialized folder into an artifact and returns the folder entity with artifact content.
|
921
|
+
*
|
922
|
+
* Creates a tgz archive of the entire folder and stores it in HIGHSTATE_ARTIFACT_WRITE_PATH where it will be collected by Highstate.
|
923
|
+
*/
|
924
|
+
async pack({ include, exclude } = {}) {
|
925
|
+
const writeDir = process.env.HIGHSTATE_ARTIFACT_WRITE_PATH;
|
926
|
+
if (!writeDir) {
|
927
|
+
throw new Error("HIGHSTATE_ARTIFACT_WRITE_PATH environment variable is not set");
|
928
|
+
}
|
929
|
+
const folderStats = await stat(this._path);
|
930
|
+
const tempBase = process.env.HIGHSTATE_TEMP_PATH || tmpdir();
|
931
|
+
const tempArchivePath = join(tempBase, `highstate-pack-${Date.now()}.tgz`);
|
932
|
+
const entity = this.entity;
|
933
|
+
try {
|
934
|
+
await tar.create(
|
935
|
+
{
|
936
|
+
gzip: true,
|
937
|
+
file: tempArchivePath,
|
938
|
+
cwd: dirname(this._path),
|
939
|
+
filter(path) {
|
940
|
+
path = path.slice(entity.meta.name.length + 1);
|
941
|
+
for (const pattern of exclude ?? []) {
|
942
|
+
if (minimatch(path, pattern)) {
|
943
|
+
return false;
|
944
|
+
}
|
945
|
+
}
|
946
|
+
for (const pattern of include ?? []) {
|
947
|
+
if (minimatch(path, pattern)) {
|
948
|
+
return true;
|
949
|
+
}
|
950
|
+
}
|
951
|
+
return !include || include.length === 0;
|
952
|
+
},
|
953
|
+
// to reproduce the same archive every time
|
954
|
+
portable: true,
|
955
|
+
noMtime: true
|
956
|
+
},
|
957
|
+
[basename(this._path)]
|
958
|
+
);
|
959
|
+
const fileContent = createReadStream(tempArchivePath);
|
960
|
+
const hash = createHash("sha256");
|
961
|
+
for await (const chunk of fileContent) {
|
962
|
+
hash.update(chunk);
|
963
|
+
}
|
964
|
+
const hashValue = hash.digest("hex");
|
965
|
+
const finalArchivePath = join(writeDir, `${hashValue}.tgz`);
|
966
|
+
await rename(tempArchivePath, finalArchivePath);
|
967
|
+
const newMeta = {
|
968
|
+
name: this.entity.meta.name,
|
969
|
+
mode: folderStats.mode & 511
|
970
|
+
// extract only permission bits
|
971
|
+
};
|
972
|
+
return {
|
973
|
+
meta: newMeta,
|
974
|
+
content: {
|
975
|
+
type: "artifact",
|
976
|
+
[HighstateSignature.Artifact]: {
|
977
|
+
hash: hashValue,
|
978
|
+
meta: await toPromise(this.artifactMeta)
|
979
|
+
}
|
980
|
+
}
|
981
|
+
};
|
982
|
+
} finally {
|
983
|
+
try {
|
984
|
+
await rm(tempArchivePath, { force: true });
|
985
|
+
} catch {
|
986
|
+
}
|
987
|
+
}
|
988
|
+
}
|
989
|
+
/**
|
990
|
+
* Creates an empty materialized folder with the given name.
|
991
|
+
*
|
992
|
+
* @param name The name of the folder to create
|
993
|
+
* @param mode Optional folder mode (permissions)
|
994
|
+
* @param parent Optional parent folder to create the folder in
|
995
|
+
* @returns A new MaterializedFolder instance representing an empty folder
|
996
|
+
*/
|
997
|
+
static async create(name, mode, parent) {
|
998
|
+
const entity = {
|
999
|
+
meta: {
|
1000
|
+
name,
|
1001
|
+
mode
|
1002
|
+
},
|
1003
|
+
content: {
|
1004
|
+
type: "embedded",
|
1005
|
+
files: [],
|
1006
|
+
folders: []
|
1007
|
+
}
|
1008
|
+
};
|
1009
|
+
const materializedFolder = new _MaterializedFolder(entity, parent);
|
1010
|
+
try {
|
1011
|
+
await materializedFolder._open();
|
1012
|
+
} catch (error) {
|
1013
|
+
await materializedFolder[Symbol.asyncDispose]();
|
1014
|
+
throw error;
|
1015
|
+
}
|
1016
|
+
return materializedFolder;
|
1017
|
+
}
|
1018
|
+
static async open(folder, parent) {
|
1019
|
+
const materializedFolder = new _MaterializedFolder(folder, parent);
|
1020
|
+
try {
|
1021
|
+
await materializedFolder._open();
|
1022
|
+
} catch (error) {
|
1023
|
+
await materializedFolder[Symbol.asyncDispose]();
|
1024
|
+
throw error;
|
1025
|
+
}
|
1026
|
+
return materializedFolder;
|
1027
|
+
}
|
1028
|
+
};
|
1029
|
+
async function fetchFileSize(endpoint) {
|
1030
|
+
if (endpoint.appProtocol !== "http" && endpoint.appProtocol !== "https") {
|
1031
|
+
throw new Error(
|
1032
|
+
`Unsupported protocol: ${endpoint.appProtocol}. Only HTTP and HTTPS are supported.`
|
1033
|
+
);
|
1034
|
+
}
|
1035
|
+
const url = l7EndpointToString(endpoint);
|
1036
|
+
const response = await load(url, { method: "HEAD" });
|
1037
|
+
if (!response.ok) {
|
1038
|
+
throw new Error(`Failed to fetch file size: ${response.statusText}`);
|
1039
|
+
}
|
1040
|
+
const contentLength = response.headers.get("content-length");
|
1041
|
+
if (!contentLength) {
|
1042
|
+
throw new Error("Content-Length header is missing in the response");
|
1043
|
+
}
|
1044
|
+
const size = parseInt(contentLength, 10);
|
1045
|
+
if (isNaN(size)) {
|
1046
|
+
throw new Error(`Invalid Content-Length value: ${contentLength}`);
|
1047
|
+
}
|
1048
|
+
return size;
|
1049
|
+
}
|
1050
|
+
function getNameByEndpoint(endpoint) {
|
1051
|
+
const parsedEndpoint = parseL7Endpoint(endpoint);
|
1052
|
+
return parsedEndpoint.resource ? basename(parsedEndpoint.resource) : "";
|
1053
|
+
}
|
1054
|
+
|
1055
|
+
export { Command, DnsRecord, DnsRecordSet, MaterializedFile, MaterializedFolder, archiveFromFolder, assetFromFile, createServerEntity, createSshTerminal, fetchFileSize, filterEndpoints, generatePassword, generatePrivateKey, getNameByEndpoint, getOrCreateSshKeyPair, getServerConnection, l34EndpointToString, l3EndpointToCidr, l3EndpointToString, l3ToL4Endpoint, l4EndpointToString, l4EndpointWithProtocolToString, l7EndpointToString, parseL34Endpoint, parseL3Endpoint, parseL4Endpoint, parseL7Endpoint, privateKeyToKeyPair, requireInputL3Endpoint, requireInputL4Endpoint, updateEndpoints, updateEndpointsWithFqdn };
|
1056
|
+
//# sourceMappingURL=chunk-HZBJ6LLS.js.map
|
1057
|
+
//# sourceMappingURL=chunk-HZBJ6LLS.js.map
|