dxfl 0.4.2-alpha → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,11 @@
1
- # v0.4.2-alpha
1
+ # v0.5.0
2
2
 
3
- - Temporarily disabling CORS configuration
3
+ - `deuxfleurs.toml`: Add support for setting headers
4
+ - `deuxfleurs.toml`/`deploy`: Add support for compressing files at upload time
5
+
6
+ # v0.4.2
7
+
8
+ `deploy`: Temporarily disable cors to workaround Garage bug
4
9
 
5
10
  # v0.4.1
6
11
 
package/README.md CHANGED
@@ -101,6 +101,38 @@ status = 301
101
101
  # Default value: false
102
102
  force = true
103
103
 
104
+ # Rules for configuring headers.
105
+ # Multiple rules can be specified. In case of several rules matching
106
+ # the same file, the first rule is applied.
107
+ # Each rule is defined in its own `[[headers]]` block.
108
+ [[headers]]
109
+ # Files that are matched by the rule, using "glob" syntax.
110
+ # (see https://github.com/micromatch/picomatch#globbing-features)
111
+ # This pattern matches all files ending in .jpg in any subdirectory.
112
+ #
113
+ # It is also possible to specify several glob patterns, in this case
114
+ # only one of the pattern needs to match. For example, to match .jpg
115
+ # or .png files:
116
+ # for = ["**/*.jpg", "**/*.png"]
117
+ for = "**/*.jpg"
118
+ # (Optional) Compress uploaded files.
119
+ # Setting this option will compress files matched by the rule when
120
+ # uploading them, and will set the Content-Encoding header to the
121
+ # corresponding value.
122
+ # Currently, the only supported compression setting is "gzip".
123
+ compress = "gzip"
124
+ #
125
+ # Headers to set for files that match this rule.
126
+ #
127
+ # Only the following headers can be configured:
128
+ # Content-Type, Cache-Control, Content-Disposition, Content-Encoding,
129
+ # Content-Language, and Expires.
130
+ [headers.values]
131
+ Cache-Control = "max-age=31536000, no-transform, public"
132
+ # Note: if no Content-Type is manually specified, a default value is
133
+ # computed from the file extension. In most cases this is good enough.
134
+ Content-Type = "image/jpeg"
135
+
104
136
  # Configuration to allow cross-origin requests.
105
137
  # Multiple rules can be specified and will be matched in order.
106
138
  # Each rule is defined in its own `[[cors]]` block
package/dist/bucket.js CHANGED
@@ -8,11 +8,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
8
8
  });
9
9
  };
10
10
  import fs from "fs/promises";
11
- import mime from "mime";
12
11
  import { DeleteObjectCommand, DeleteObjectsCommand, HeadObjectCommand, ListObjectsV2Command, PutObjectCommand, S3Client, } from "@aws-sdk/client-s3";
13
12
  import { PromisePool } from "@supercharge/promise-pool";
14
13
  import { apiConfExists, openApiConf } from "./auth.js";
15
14
  import { ErrorMsg, wrapS3Call } from "./error.js";
15
+ import { headersFromS3, headersToS3 } from "./headers.js";
16
16
  import { GuichetApi } from "./guichet.js";
17
17
  import { parseEtag, toChunks, formatBytesHuman } from "./utils.js";
18
18
  export function getBucketCredentials(name) {
@@ -100,6 +100,7 @@ export function getBucketFilesDetails(bucket, files) {
100
100
  const resp = yield wrapS3Call(`read metadata of file "${file}"`, [200], () => bucket.client.send(new HeadObjectCommand({ Bucket: bucket.name, Key: file })));
101
101
  res.set(file, {
102
102
  redirect: resp.WebsiteRedirectLocation,
103
+ headers: headersFromS3(resp),
103
104
  });
104
105
  });
105
106
  }
@@ -143,22 +144,15 @@ export function deleteBucketFiles(bucket, files) {
143
144
  }));
144
145
  });
145
146
  }
146
- export function uploadFile(bucket, s3Path, localPath) {
147
+ export function uploadFile(bucket, s3Path, localPath, headers) {
147
148
  return __awaiter(this, void 0, void 0, function* () {
148
- var _a;
149
- // use `path.posix` because `Key` is a path in a bucket that uses `/` as separator.
150
- let ContentType = (_a = mime.getType(localPath)) !== null && _a !== void 0 ? _a : undefined;
151
- // add charset=utf-8 by default on text files (TODO: allow the user to override this)
152
- if (ContentType && ContentType.startsWith("text/")) {
153
- ContentType = ContentType + "; charset=utf-8";
154
- }
155
149
  // NB: we read the entire file into memory instead of creating a stream from
156
150
  // the file (which would allow streaming the data). Indeed, using a stream
157
151
  // results in transcient network errors being throwned instead of being
158
152
  // retried by the AWS SDK: https://github.com/aws/aws-sdk-js-v3/issues/6770
159
153
  // which we want to avoid...
160
154
  const Body = yield fs.readFile(localPath);
161
- const params = { Bucket: bucket.name, Key: s3Path, Body, ContentType };
155
+ const params = Object.assign({ Bucket: bucket.name, Key: s3Path, Body }, headersToS3(headers));
162
156
  yield wrapS3Call(`upload "${s3Path}"`, [200], () => bucket.client.send(new PutObjectCommand(params)));
163
157
  });
164
158
  }
@@ -178,3 +172,10 @@ export function putEmptyObjectRedirect(bucket, source, target) {
178
172
  yield wrapS3Call(`create redirection source object ${source}`, [200], () => bucket.client.send(new PutObjectCommand(params)));
179
173
  });
180
174
  }
175
+ export function setObjectHeaders(bucket, s3Path, localPath, headers) {
176
+ return __awaiter(this, void 0, void 0, function* () {
177
+ // FIXME: could we be more efficient and use CopyObject to avoid
178
+ // re-uploading the object?
179
+ yield uploadFile(bucket, s3Path, localPath, headers);
180
+ });
181
+ }
package/dist/deploy.js CHANGED
@@ -10,10 +10,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
10
10
  import fs from "fs";
11
11
  import path from "path";
12
12
  import { PromisePool } from "@supercharge/promise-pool";
13
- import { deleteBucketFile, deleteBucketFiles, getBucketCredentials, getBucket, getBucketFiles, putEmptyObjectRedirect, uploadFile, } from "./bucket.js";
13
+ import { deleteBucketFile, deleteBucketFiles, getBucketCredentials, getBucket, getBucketFiles, putEmptyObjectRedirect, uploadFile, setObjectHeaders, } from "./bucket.js";
14
14
  import { ErrorMsg } from "./error.js";
15
- import { confirmationPrompt, filterMap, formatBytesHuman, formatCount, getFileMd5, sum, } from "./utils.js";
16
- import { equalBucketRedirect, getBucketConfig, putBucketWebsiteConfig, readConfigFile, } from "./website_config.js";
15
+ import { fileContentType, supportedHeaders, } from "./headers.js";
16
+ import { confirmationPrompt, filterMap, formatBytesHuman, formatCount, getFileMd5, gzipFile, mapEq, mkTmpDir, sum, } from "./utils.js";
17
+ import { evalHeadersRules, equalBucketRedirect, getBucketConfig, putBucketWebsiteConfig, readConfigFile, } from "./website_config.js";
17
18
  // Walks through the local directory at path `dir`, and for each file it contains, returns :
18
19
  // - `localPath`: its path on the local filesystem (includes `dir`). On windows, this path
19
20
  // will typically use `\` as separator.
@@ -60,6 +61,58 @@ function getLocalFilesWithInfo(localFolder) {
60
61
  })));
61
62
  });
62
63
  }
64
+ // Compute headers-related settings for local files and compress them if required.
65
+ export function compressSelectedLocalFiles(localFiles, localCfg) {
66
+ return __awaiter(this, void 0, void 0, function* () {
67
+ const tmpDir = yield mkTmpDir();
68
+ const localFilesHeaders = yield Promise.all(localFiles.map((f) => __awaiter(this, void 0, void 0, function* () {
69
+ // evaluate headers rules for this file
70
+ let headers = evalHeadersRules(localCfg.headers_rules, f.s3Path);
71
+ // copy headers.values (we may edit it afterwards)
72
+ headers = { values: new Map(headers.values), compress: headers.compress };
73
+ // compute a best-effort content type if not explicitly specified in the config
74
+ if (!headers.values.has("Content-Type")) {
75
+ headers.values.set("Content-Type", fileContentType(f.localPath));
76
+ }
77
+ // if we need compression, compute the compressed file, stored it tmpDir,
78
+ // and set the localPath, md5, and size to the one of the compressed file
79
+ let localPath = f.localPath;
80
+ let md5 = f.md5;
81
+ let size = f.size;
82
+ let compressed = false;
83
+ if (headers.compress) {
84
+ compressed = true;
85
+ if (headers.compress == "gzip") {
86
+ const compressedPath = path.join(tmpDir, f.s3Path);
87
+ yield gzipFile(localPath, compressedPath);
88
+ const [stat, compressedMd5] = yield Promise.all([
89
+ fs.promises.stat(compressedPath),
90
+ getFileMd5(compressedPath),
91
+ ]);
92
+ localPath = compressedPath;
93
+ md5 = compressedMd5;
94
+ size = stat.size;
95
+ }
96
+ else {
97
+ headers.compress;
98
+ }
99
+ // set Content-Encoding, unless manually specified by the user
100
+ if (!headers.values.has("Content-Encoding")) {
101
+ headers.values.set("Content-Encoding", headers.compress);
102
+ }
103
+ }
104
+ return {
105
+ localPath,
106
+ s3Path: f.s3Path,
107
+ size,
108
+ md5,
109
+ headers: headers.values,
110
+ compressed,
111
+ };
112
+ })));
113
+ return { localFilesHeaders, tmpDir };
114
+ });
115
+ }
63
116
  function computeDeployPlan(localFiles, remoteFiles, localCfg, remoteCfg) {
64
117
  // We raise an error if a file is both present locally and is the source of a
65
118
  // redirection.
@@ -118,10 +171,23 @@ function computeDeployPlan(localFiles, remoteFiles, localCfg, remoteCfg) {
118
171
  // - are missing on the remote
119
172
  // - have a md5 that differs from their remote ETag,
120
173
  // - have a local size that differs from its remote size
121
- let filesToUpload = localFiles.filter(({ s3Path, size, md5 }) => {
122
- const remoteFile = remoteFiles.get(s3Path);
123
- return !remoteFile || remoteFile.etag != md5 || remoteFile.size != size;
174
+ const filesToUpload = localFiles.filter(f => {
175
+ const remoteFile = remoteFiles.get(f.s3Path);
176
+ return !remoteFile || remoteFile.etag != f.md5 || remoteFile.size != f.size;
124
177
  });
178
+ // Compute remote files for which headers need to be updated.
179
+ let modifyHeaders = [];
180
+ for (const { localPath, s3Path, headers } of localFiles) {
181
+ const remoteHeaders = remoteCfg.headers.get(s3Path);
182
+ if (remoteHeaders && !mapEq(headers, remoteHeaders)) {
183
+ modifyHeaders.push({
184
+ localPath,
185
+ s3Path,
186
+ before: remoteHeaders,
187
+ after: headers,
188
+ });
189
+ }
190
+ }
125
191
  return {
126
192
  localFiles,
127
193
  remoteFiles,
@@ -138,6 +204,7 @@ function computeDeployPlan(localFiles, remoteFiles, localCfg, remoteCfg) {
138
204
  from: remoteCfg.cors_rules,
139
205
  to: localCfg.cors_rules,
140
206
  },
207
+ modifyHeaders,
141
208
  };
142
209
  }
143
210
  function diffBucketRedirects(from, to) {
@@ -162,6 +229,7 @@ function diffBucketRedirects(from, to) {
162
229
  return { added, updated, deleted };
163
230
  }
164
231
  function printPlan(plan, details) {
232
+ var _a, _b;
165
233
  function showBucketRedirectTarget(r) {
166
234
  const to = r.to.kind == "replace" ? `${r.to.target}` : `${r.to.prefix}*`;
167
235
  const proto = r.protocol ? `${r.protocol}://` : "";
@@ -173,12 +241,15 @@ function printPlan(plan, details) {
173
241
  function showBucketRedirect(r) {
174
242
  return `${r.prefix}* -> ${showBucketRedirectTarget(r)}`;
175
243
  }
176
- function printSummary(nb, action, bytes) {
244
+ function printSummary(nb, action, bytes, compressed) {
177
245
  if (nb > 0) {
178
246
  process.stdout.write(` ${formatCount(nb, "file")} ${action}`);
179
247
  if (bytes) {
180
248
  process.stdout.write(` (${formatBytesHuman(bytes)})`);
181
249
  }
250
+ if (compressed && compressed > 0) {
251
+ process.stdout.write(` (${compressed} compressed)`);
252
+ }
182
253
  process.stdout.write("\n");
183
254
  }
184
255
  }
@@ -205,9 +276,11 @@ function printPlan(plan, details) {
205
276
  filesUnchanged.delete(f.s3Path);
206
277
  }
207
278
  const sizeUnchanged = sum([...filesUnchanged.values()]);
208
- printSummary(plan.filesToUpload.length, "uploaded", sizeSent);
279
+ const nbUploadCompressed = plan.filesToUpload.filter(f => f.compressed).length;
280
+ printSummary(plan.filesToUpload.length, "uploaded", sizeSent, nbUploadCompressed);
209
281
  printSummary(plan.filesToDelete.length, "deleted", sizeDeleted);
210
282
  printSummary(filesUnchanged.size, "unchanged", sizeUnchanged);
283
+ printSummary(plan.modifyHeaders.length, "with modified headers", undefined);
211
284
  const items = [
212
285
  [bredirects.added.size + oredirects_added.length, "added"],
213
286
  [bredirects.updated.size + oredirects_updated.length, "modified"],
@@ -231,7 +304,20 @@ function printPlan(plan, details) {
231
304
  process.stdout.write(` Delete ${file.name} (${file.size ? formatBytesHuman(file.size) : "?B"})\n`);
232
305
  }
233
306
  for (const file of plan.filesToUpload) {
234
- process.stdout.write(` Send ${file.s3Path} (${formatBytesHuman(file.size)})\n`);
307
+ process.stdout.write(` Send ${file.s3Path} (${formatBytesHuman(file.size)}${file.compressed ? " compressed" : ""})\n`);
308
+ // TODO: should we also print the file headers? but this is quite noisy...
309
+ // (it will print at least Content-Type for each file)
310
+ }
311
+ // print modified headers
312
+ for (const change of plan.modifyHeaders) {
313
+ process.stdout.write(` Update headers for ${change.s3Path}:\n`);
314
+ for (const hdr of supportedHeaders) {
315
+ if (change.before.get(hdr) !== change.after.get(hdr)) {
316
+ const before = (_a = change.before.get(hdr)) !== null && _a !== void 0 ? _a : "<undefined>";
317
+ const after = (_b = change.after.get(hdr)) !== null && _b !== void 0 ? _b : "<undefined>";
318
+ process.stdout.write(` - ${hdr}: ${after} (was: ${before})\n`);
319
+ }
320
+ }
235
321
  }
236
322
  // print redirects
237
323
  for (const [_, r] of bredirects.added) {
@@ -252,6 +338,7 @@ function printPlan(plan, details) {
252
338
  for (const { source, prev_target } of oredirects_deleted) {
253
339
  process.stdout.write(` Delete redirect ${source} -> ${prev_target}\n`);
254
340
  }
341
+ // print new CORS rules
255
342
  if (cors_changed) {
256
343
  process.stdout.write(" Set CORS rules:\n");
257
344
  for (const rule of plan.cors_rules.to) {
@@ -312,7 +399,7 @@ function applyDeployPlan(bucket, plan) {
312
399
  })
313
400
  .process((f) => __awaiter(this, void 0, void 0, function* () {
314
401
  process.stdout.write(` Send ${f.s3Path} (${formatBytesHuman(f.size)})\n`);
315
- yield uploadFile(bucket, f.s3Path, f.localPath);
402
+ yield uploadFile(bucket, f.s3Path, f.localPath, f.headers);
316
403
  }));
317
404
  // Apply bucket redirects & global config
318
405
  yield putBucketWebsiteConfig(bucket, plan.index_page.to, plan.error_page.to, plan.bucket_redirects.to);
@@ -320,6 +407,16 @@ function applyDeployPlan(bucket, plan) {
320
407
  // if (!equalCorsRules(plan.cors_rules.from, plan.cors_rules.to)) {
321
408
  // await putCorsRules(bucket, plan.cors_rules.to);
322
409
  // }
410
+ // Modify headers
411
+ yield PromisePool.for(plan.modifyHeaders)
412
+ .withConcurrency(50)
413
+ .handleError(err => {
414
+ throw err;
415
+ })
416
+ .process((_a) => __awaiter(this, [_a], void 0, function* ({ localPath, s3Path, after }) {
417
+ process.stdout.write(` Update headers of ${s3Path}\n`);
418
+ yield setObjectHeaders(bucket, s3Path, localPath, after);
419
+ }));
323
420
  });
324
421
  }
325
422
  export function deploy(website, localFolder, options) {
@@ -344,13 +441,18 @@ export function deploy(website, localFolder, options) {
344
441
  return [bucket, remoteFiles, remoteWebsiteConfig];
345
442
  }))(),
346
443
  ]);
444
+ // Compress local files when required by the local configuration.
445
+ // The temporary directory needs to be removed before exiting.
446
+ // FIXME: the directory should ideally be also removed if we throw an exception...
447
+ const { localFilesHeaders, tmpDir } = yield compressSelectedLocalFiles(localFiles, localWebsiteConfig);
347
448
  // Compute the deploy plan
348
- const plan = computeDeployPlan(localFiles, remoteFiles, localWebsiteConfig, remoteWebsiteConfig);
449
+ const plan = computeDeployPlan(localFilesHeaders, remoteFiles, localWebsiteConfig, remoteWebsiteConfig);
349
450
  // If --dry-run: display the plan and return
350
451
  if (options.dryRun) {
351
452
  printPlan(plan, "full");
352
453
  process.stdout.write("\nSummary:\n");
353
454
  printPlan(plan, "summary");
455
+ fs.promises.rm(tmpDir, { recursive: true });
354
456
  return;
355
457
  }
356
458
  // If not --yes: show the plan summary, ask for confirmation before proceeding
@@ -363,6 +465,7 @@ export function deploy(website, localFolder, options) {
363
465
  printPlan(plan, "full");
364
466
  });
365
467
  if (!ok) {
468
+ fs.promises.rm(tmpDir, { recursive: true });
366
469
  return;
367
470
  }
368
471
  }
@@ -373,5 +476,6 @@ export function deploy(website, localFolder, options) {
373
476
  process.stdout.write("\nSummary:\n");
374
477
  printPlan(plan, "summary");
375
478
  }
479
+ fs.promises.rm(tmpDir, { recursive: true });
376
480
  });
377
481
  }
@@ -0,0 +1,49 @@
1
+ import mime from "mime";
2
+ export const supportedHeaders = [
3
+ "Content-Type",
4
+ "Cache-Control",
5
+ "Content-Disposition",
6
+ "Content-Encoding",
7
+ "Content-Language",
8
+ "Expires",
9
+ ];
10
+ export function headersFromS3(s3hdr) {
11
+ let h = new Map();
12
+ function setIf(k, v) {
13
+ if (v !== undefined) {
14
+ h.set(k, v);
15
+ }
16
+ }
17
+ setIf("Content-Type", s3hdr.ContentType);
18
+ setIf("Cache-Control", s3hdr.CacheControl);
19
+ setIf("Content-Disposition", s3hdr.ContentDisposition);
20
+ setIf("Content-Encoding", s3hdr.ContentEncoding);
21
+ setIf("Content-Language", s3hdr.ContentLanguage);
22
+ setIf("Expires", s3hdr.ExpiresString);
23
+ return h;
24
+ }
25
+ export function headersToS3(m) {
26
+ return {
27
+ ContentType: m.get("Content-Type"),
28
+ CacheControl: m.get("Cache-Control"),
29
+ ContentDisposition: m.get("Content-Disposition"),
30
+ ContentEncoding: m.get("Content-Encoding"),
31
+ ContentLanguage: m.get("Content-Language"),
32
+ ExpiresString: m.get("Expires"),
33
+ };
34
+ }
35
+ // best-effort content-type computed from a file contents
36
+ export function fileContentType(localPath) {
37
+ var _a;
38
+ let ContentType = (_a = mime.getType(localPath)) !== null && _a !== void 0 ? _a : undefined;
39
+ // add charset=utf-8 by default on text files
40
+ if (ContentType && ContentType.startsWith("text/")) {
41
+ ContentType = ContentType + "; charset=utf-8";
42
+ }
43
+ // if no content-type was computed, default to application/octet-stream,
44
+ // which matches what Garage does if no Content-Type is provided
45
+ if (ContentType == undefined) {
46
+ ContentType = "application/octet-stream";
47
+ }
48
+ return ContentType;
49
+ }
package/dist/index.js CHANGED
@@ -6,7 +6,7 @@ import { deploy } from "./deploy.js";
6
6
  import { empty } from "./empty.js";
7
7
  import { vhostsList } from "./vhosts.js";
8
8
  import { inspect } from "./inspect.js";
9
- program.name("dxfl").description("Deuxfleurs CLI tool").version("0.4.2-alpha");
9
+ program.name("dxfl").description("Deuxfleurs CLI tool").version("0.5.0");
10
10
  program
11
11
  .command("login")
12
12
  .description("Link your Deuxfleurs account with this tool.")
package/dist/utils.js CHANGED
@@ -14,12 +14,16 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
14
14
  function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
15
15
  function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
16
16
  };
17
- import fs from "fs";
18
- import crypto from "crypto";
19
- import { stdin, stdout } from "process";
17
+ import crypto from "node:crypto";
18
+ import fs from "node:fs";
19
+ import os from "node:os";
20
+ import path from "node:path";
21
+ import { stdin, stdout } from "node:process";
22
+ import { pipeline } from "node:stream/promises";
23
+ import { styleText } from "node:util";
24
+ import zlib from "node:zlib";
20
25
  import readline from "readline/promises";
21
26
  import { ErrorMsg } from "./error.js";
22
- import { styleText } from "node:util";
23
27
  export function getFileMd5(file) {
24
28
  return __awaiter(this, void 0, void 0, function* () {
25
29
  var _a, e_1, _b, _c;
@@ -42,6 +46,18 @@ export function getFileMd5(file) {
42
46
  return hash.digest("hex");
43
47
  });
44
48
  }
49
+ export function gzipFile(inPath, outPath) {
50
+ return __awaiter(this, void 0, void 0, function* () {
51
+ const source = fs.createReadStream(inPath);
52
+ const dest = fs.createWriteStream(outPath);
53
+ yield pipeline(source, zlib.createGzip(), dest);
54
+ });
55
+ }
56
+ export function mkTmpDir() {
57
+ return __awaiter(this, void 0, void 0, function* () {
58
+ return yield fs.promises.mkdtemp(path.join(os.tmpdir(), "dxfl-"));
59
+ });
60
+ }
45
61
  export function formatBytesHuman(bytes) {
46
62
  if (bytes < 1000) {
47
63
  return `${bytes}B`;
@@ -130,3 +146,7 @@ export function separator(size = 42) {
130
146
  }
131
147
  return styleText("gray", dashes);
132
148
  }
149
+ export function mapEq(m1, m2) {
150
+ return (m1.size === m2.size &&
151
+ Array.from(m1.keys()).every(key => m1.get(key) == m2.get(key)));
152
+ }
@@ -10,10 +10,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
10
10
  import fs from "fs";
11
11
  import TOML from "smol-toml";
12
12
  import URI from "fast-uri";
13
+ import picomatch from "picomatch";
13
14
  import { z as zod } from "zod";
14
15
  import { fromError as zodError } from "zod-validation-error";
15
- import { GetBucketWebsiteCommand, PutBucketWebsiteCommand, PutBucketCorsCommand, } from "@aws-sdk/client-s3";
16
+ import { DeleteBucketCorsCommand, GetBucketWebsiteCommand, PutBucketWebsiteCommand, PutBucketCorsCommand, } from "@aws-sdk/client-s3";
16
17
  import { ErrorMsg, withErrorMsg, wrapS3Call } from "./error.js";
18
+ import { supportedHeaders } from "./headers.js";
17
19
  import { getBucketFilesDetails } from "./bucket.js";
18
20
  ////////////// Utilities
19
21
  export function equalBucketRedirect(b1, b2) {
@@ -50,6 +52,16 @@ export function equalCorsRules(c1, c2) {
50
52
  }
51
53
  return eqArr(eqRule, c1, c2);
52
54
  }
55
+ export function evalHeadersRules(rules, path) {
56
+ for (const rule of rules) {
57
+ if (rule.for(path)) {
58
+ // take the first rule that matches
59
+ return rule.params;
60
+ }
61
+ }
62
+ // return default params
63
+ return { values: new Map() };
64
+ }
53
65
  ////////////// Parsing from a TOML config file
54
66
  // Parsing: TOML -> untyped object
55
67
  function readConfigFileObject(filename) {
@@ -91,24 +103,39 @@ function readConfigFileObject(filename) {
91
103
  });
92
104
  }
93
105
  // Parsing: untyped object -> RawConfig
94
- const RawRedirectSchema = zod.object({
106
+ const RawRedirectSchema = zod
107
+ .object({
95
108
  from: zod.string(),
96
109
  to: zod.string(),
97
110
  force: zod.boolean().optional(),
98
111
  status: zod.number().int().positive().optional(),
99
- });
100
- const RawCorsSchema = zod.object({
112
+ })
113
+ .strict();
114
+ const RawCorsSchema = zod
115
+ .object({
101
116
  allowed_origins: zod.union([zod.string(), zod.string().array()]),
102
117
  allowed_methods: zod.union([zod.string(), zod.string().array()]).optional(),
103
118
  allowed_headers: zod.union([zod.string(), zod.string().array()]).optional(),
104
119
  expose_headers: zod.union([zod.string(), zod.string().array()]).optional(),
105
- });
106
- const RawConfigSchema = zod.object({
120
+ })
121
+ .strict();
122
+ const RawHeadersSchema = zod
123
+ .object({
124
+ for: zod.union([zod.string(), zod.string().array()]),
125
+ compress: zod.enum(["gzip"]).optional(),
126
+ // loose validation, we will validate later the fields against supportedHeaders
127
+ values: zod.object({}).catchall(zod.string()).passthrough().optional(),
128
+ })
129
+ .strict();
130
+ const RawConfigSchema = zod
131
+ .object({
107
132
  index_page: zod.string().optional(),
108
133
  error_page: zod.string().optional(),
109
134
  redirects: zod.array(RawRedirectSchema).optional(),
110
135
  cors: zod.array(RawCorsSchema).optional(),
111
- });
136
+ headers: zod.array(RawHeadersSchema).optional(),
137
+ })
138
+ .strict();
112
139
  function interpRawConfig(cfg) {
113
140
  try {
114
141
  return RawConfigSchema.parse(cfg);
@@ -118,7 +145,7 @@ function interpRawConfig(cfg) {
118
145
  throw new ErrorMsg(validationError.toString());
119
146
  }
120
147
  }
121
- // Parsing: RawConfig -> WebsiteConfig
148
+ // Parsing: RawConfig -> LocalWebsiteConfig
122
149
  // Parses the escaping scheme used for 'from' and 'to' paths
123
150
  // in redirects. These fields may specify a '*' at the end
124
151
  // (in that case they indicate a prefix) or not (then they are
@@ -151,7 +178,7 @@ function unescape(s) {
151
178
  }
152
179
  }
153
180
  function interpConfig(rawcfg) {
154
- var _a, _b, _c, _d, _e, _f;
181
+ var _a, _b, _c, _d, _e, _f, _g, _h;
155
182
  function interpRedirect(r) {
156
183
  var _a, _b;
157
184
  const rfrom = withErrorMsg(() => unescape(r.from), msg => `from: ${msg}`);
@@ -281,6 +308,7 @@ function interpConfig(rawcfg) {
281
308
  bucket_redirects: [],
282
309
  object_redirects: new Map(),
283
310
  cors_rules: [],
311
+ headers_rules: [],
284
312
  };
285
313
  for (const [i, raw] of ((_b = rawcfg.redirects) !== null && _b !== void 0 ? _b : []).entries()) {
286
314
  // `i+1` is only used for display: start counting redirects from 1 instead of 0
@@ -306,6 +334,29 @@ function interpConfig(rawcfg) {
306
334
  expose_headers: interpRawArray((_f = raw.expose_headers) !== null && _f !== void 0 ? _f : []),
307
335
  });
308
336
  }
337
+ for (const [_, raw] of ((_g = rawcfg.headers) !== null && _g !== void 0 ? _g : []).entries()) {
338
+ const glob = picomatch(raw.for, {
339
+ dot: true, // don't treat dotfiles differently
340
+ contains: false,
341
+ });
342
+ let values = new Map();
343
+ for (const [k, v] of Object.entries((_h = raw.values) !== null && _h !== void 0 ? _h : {})) {
344
+ if (supportedHeaders.includes(k)) {
345
+ values.set(k, v);
346
+ }
347
+ else {
348
+ const hdrs = supportedHeaders.join(", ");
349
+ throw new ErrorMsg(`Cannot set value for header "${k}" in configuration.\nSupported headers are: ${hdrs}.`);
350
+ }
351
+ }
352
+ cfg.headers_rules.push({
353
+ for: glob,
354
+ params: {
355
+ compress: raw.compress,
356
+ values,
357
+ },
358
+ });
359
+ }
309
360
  return cfg;
310
361
  }
311
362
  export function readConfigFile(filename) {
@@ -427,12 +478,18 @@ export function getBucketConfig(bucket, files) {
427
478
  // });
428
479
  // }
429
480
  // }
481
+ // Interpret headers
482
+ let headers = new Map();
483
+ for (const [file, { headers: file_headers }] of details) {
484
+ headers.set(file, file_headers);
485
+ }
430
486
  return {
431
487
  index_page,
432
488
  error_page,
433
489
  bucket_redirects,
434
490
  object_redirects,
435
491
  cors_rules,
492
+ headers,
436
493
  };
437
494
  });
438
495
  }
@@ -496,9 +553,19 @@ export function putCorsRules(bucket, cors_rules) {
496
553
  ExposeHeaders: rule.expose_headers,
497
554
  });
498
555
  }
499
- yield wrapS3Call(`write the bucket CORS config`, [200], () => bucket.client.send(new PutBucketCorsCommand({
500
- Bucket: bucket.name,
501
- CORSConfiguration: { CORSRules },
502
- })));
556
+ if (CORSRules.length == 0) {
557
+ // work around a bug in garage when using PutBucketCorsCommand with
558
+ // empty rules (https://git.deuxfleurs.fr/Deuxfleurs/garage/pulls/1320)
559
+ // Instead, use DeleteBucketCorsCommand which is equivalent in that case.
560
+ yield wrapS3Call(`empty the bucket CORS config`, [204], () => bucket.client.send(new DeleteBucketCorsCommand({
561
+ Bucket: bucket.name,
562
+ })));
563
+ }
564
+ else {
565
+ yield wrapS3Call(`write the bucket CORS config`, [200], () => bucket.client.send(new PutBucketCorsCommand({
566
+ Bucket: bucket.name,
567
+ CORSConfiguration: { CORSRules },
568
+ })));
569
+ }
503
570
  });
504
571
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dxfl",
3
- "version": "0.4.2-alpha",
3
+ "version": "0.5.0",
4
4
  "description": "",
5
5
  "license": "EUPL-1.2",
6
6
  "author": "Deuxfleurs Team <coucou@deuxfleurs.fr>",
@@ -23,14 +23,16 @@
23
23
  "prettier-check": "npx prettier . --check"
24
24
  },
25
25
  "dependencies": {
26
- "@aws-sdk/client-s3": "^3.1004.0",
26
+ "@aws-sdk/client-s3": "^3.1011.0",
27
27
  "@commander-js/extra-typings": "^14.0.0",
28
28
  "@supercharge/promise-pool": "^3.2.0",
29
29
  "@types/node": "^25.2.1",
30
+ "@types/picomatch": "^4.0.2",
30
31
  "commander": "^14.0.3",
31
32
  "fast-uri": "^3.1.0",
32
33
  "guichet-sdk-ts": "^0.1.0",
33
34
  "mime": "^4.1.0",
35
+ "picomatch": "^4.0.3",
34
36
  "read": "^5.0.1",
35
37
  "smol-toml": "^1.6.0",
36
38
  "typescript": "^5.9.3",