@projectdochelp/s3te 3.1.4 → 3.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -80,7 +80,7 @@ This section is only about the AWS things you need before you touch S3TE. The ac
80
80
  | Daily-work AWS access | `s3te deploy` needs credentials that can create CloudFormation stacks and related resources. | [Create an IAM user](https://docs.aws.amazon.com/console/iam/add-users), [Manage access keys](https://docs.aws.amazon.com/IAM/latest/UserGuide/access-keys-admin-managed.html) |
81
81
  | AWS CLI v2 | The S3TE CLI shells out to the official `aws` CLI. | [Install AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html), [Get started with AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html) |
82
82
  | Domain name you control | CloudFront and TLS only make sense for domains you can point to AWS. | Use your registrar of choice |
83
- | ACM certificate in `us-east-1` | CloudFront requires its public certificate in `us-east-1`. | [Public certificates in ACM](https://docs.aws.amazon.com/acm/latest/userguide/acm-public-certificates.html) |
83
+ | ACM certificate in `us-east-1` | CloudFront requires its public certificate in `us-east-1`, and the certificate must cover every alias S3TE will derive for that environment. | [Public certificates in ACM](https://docs.aws.amazon.com/acm/latest/userguide/acm-public-certificates.html) |
84
84
  | Optional Route53 hosted zone | Needed only if S3TE should create DNS alias records automatically. | [Create a public hosted zone](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/CreatingHostedZone.html) |
85
85
 
86
86
  </details>
@@ -192,7 +192,7 @@ mywebsite/
192
192
  extensions.json
193
193
  ```
194
194
 
195
- The generated `.github/workflows/s3te-sync.yml` is the default CI path for GitHub-based source publishing into the S3TE code bucket. It is scaffolded once and then left alone on later `s3te init` runs unless you use `--force`.
195
+ The generated `.github/workflows/s3te-sync.yml` is the default CI path for GitHub-based source publishing into the S3TE code buckets. It is scaffolded once and then left alone on later `s3te init` runs unless you use `--force`.
196
196
 
197
197
  </details>
198
198
 
@@ -228,6 +228,12 @@ The most important fields for a first deployment are:
228
228
 
229
229
  Use plain hostnames in `baseUrl` and `cloudFrontAliases`, not full URLs. If your config contains a `prod` environment plus additional environments such as `test` or `stage`, S3TE keeps the `prod` hostname unchanged and derives non-production hostnames automatically by prepending `<env>.`.
230
230
 
231
+ Your ACM certificate must cover the final derived aliases of the environment you deploy. Example:
232
+
233
+ - `*.example.com` covers `test.example.com`
234
+ - `*.example.com` does not cover `test.app.example.com`
235
+ - for nested aliases like `test.app.example.com`, add a SAN such as `*.app.example.com`, the exact hostname, or use a different `certificateArn` for that environment
236
+
231
237
  </details>
232
238
 
233
239
  <details>
@@ -243,6 +249,8 @@ npx s3te deploy --env dev
243
249
 
244
250
  `render` writes the local preview into `offline/S3TELocal/preview/dev/...`.
245
251
 
252
+ `doctor --env <name>` now also checks whether the configured ACM certificate covers the CloudFront aliases that S3TE derives for that environment. For that check, the AWS identity running `doctor` needs permission to call `acm:DescribeCertificate` for the configured certificate ARN.
253
+
246
254
  `deploy` creates or updates the persistent environment stack, uses a temporary deploy stack for packaged Lambda artifacts, synchronizes the source project into the code bucket, and removes the temporary stack again when the deploy finishes.
247
255
 
248
256
  After the first successful deploy, use `s3te sync --env dev` for regular template, partial, asset and source updates when the infrastructure itself did not change.
@@ -251,6 +259,175 @@ If you left `route53HostedZoneId` out of the config, the last DNS step stays man
251
259
 
252
260
  </details>
253
261
 
262
+ <details>
263
+ <summary>6. Prepare GitHub Actions for code-bucket publishing</summary>
264
+
265
+ Use this step if your team wants GitHub pushes to publish project sources into the S3TE code bucket instead of running `s3te sync` locally.
266
+
267
+ `s3te init` already scaffolded `.github/workflows/s3te-sync.yml` for that path.
268
+
269
+ That workflow is meant for source publishing only:
270
+
271
+ - it validates the project
272
+ - it reads the selected environment from GitHub and resolves the matching AWS region from `s3te.config.json`
273
+ - it uploads every configured variant into its own S3TE code bucket
274
+ - the resulting S3 events trigger the deployed Lambda pipeline in AWS
275
+
276
+ Use a full `deploy` only when the infrastructure, environment config, or runtime package changes.
277
+
278
+ GitHub preparation checklist:
279
+
280
+ 1. Push the project to GitHub together with `.github/workflows/s3te-sync.yml`.
281
+ 2. Make sure GitHub Actions are allowed for the repository or organization.
282
+ 3. Run the first real `npx s3te deploy --env <name>` so the code buckets already exist.
283
+ 4. In AWS IAM, create an access key for a CI user that may sync only the S3TE code buckets for that environment.
284
+ 5. In GitHub open `Settings -> Secrets and variables -> Actions -> Variables`.
285
+ 6. Add these repository variables:
286
+ - `S3TE_ENVIRONMENT`
287
+ Use the exact environment name from `s3te.config.json`, for example `dev`, `test`, or `prod`.
288
+ - `S3TE_GIT_BRANCH` optional
289
+ Use the branch that should trigger the sync job, for example `main`.
290
+ 7. In GitHub open `Settings -> Secrets and variables -> Actions -> Secrets`.
291
+ 8. Add these repository secrets:
292
+ - `AWS_ACCESS_KEY_ID`
293
+ - `AWS_SECRET_ACCESS_KEY`
294
+ 9. Leave `.github/workflows/s3te-sync.yml` unchanged unless you want a custom CI flow. The scaffolded workflow already reads:
295
+ - the environment from `S3TE_ENVIRONMENT`
296
+ - the branch from `S3TE_GIT_BRANCH` or defaults to `main`
297
+ - the AWS region from `s3te.config.json`
298
+
299
+ You do not have to store bucket names, source folders, part folders, or AWS regions in GitHub variables. `s3te sync` resolves all of that from `s3te.config.json`.
300
+
301
+ For projects with multiple environments such as `test` and `prod`, the simplest setup is usually one workflow file per target environment, for example:
302
+
303
+ - `.github/workflows/s3te-sync-test.yml` with `npx s3te sync --env test`
304
+ - `.github/workflows/s3te-sync-prod.yml` with `npx s3te sync --env prod`
305
+
306
+ First verification in GitHub:
307
+
308
+ 1. Open the `Actions` tab in the repository.
309
+ 2. Select `S3TE Sync`.
310
+ 3. Start it once manually with `Run workflow`.
311
+ 4. Check that the run reaches the `Configure AWS credentials`, `Validate project`, and `Sync project sources to the S3TE code buckets` steps without error.
312
+
313
+ Where to get the AWS values:
314
+
315
+ - `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
316
+ In the AWS console open `IAM -> Users -> <your-ci-user> -> Security credentials -> Create access key`.
317
+ Save both values immediately. The secret access key is shown only once. AWS documents the credential options and access-key handling here:
318
+ [AWS security credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/security-creds.html),
319
+ [Manage access keys for IAM users](https://docs.aws.amazon.com/IAM/latest/UserGuide/access-keys-admin-managed.html).
320
+ - `S3TE_ENVIRONMENT`
321
+ This is the environment key from your `s3te.config.json`, for example `test` or `prod`.
322
+ - AWS region
323
+ You do not need to copy this into GitHub. The workflow reads `environments.<name>.awsRegion` directly from `s3te.config.json`.
324
+
325
+ What gets uploaded where:
326
+
327
+ - For each variant, S3TE stages `partDir` into `part/` and `sourceDir` into `<variant>/`.
328
+ - Then S3TE syncs that staged tree into the resolved code bucket for that variant and environment.
329
+
330
+ With your example config this means:
331
+
332
+ - `test` + `website`: `app/part` and `app/website` go to `test-website-code-sop`
333
+ - `test` + `app`: `app/part-app` and `app/app` go to `test-app-code-sop`
334
+ - `prod` + `website`: `app/part` and `app/website` go to `website-code-sop`
335
+ - `prod` + `app`: `app/part-app` and `app/app` go to `app-code-sop`
336
+
337
+ Minimal IAM policy example for the `test` environment and both variants:
338
+
339
+ ```json
340
+ {
341
+ "Version": "2012-10-17",
342
+ "Statement": [
343
+ {
344
+ "Effect": "Allow",
345
+ "Action": ["s3:ListBucket"],
346
+ "Resource": [
347
+ "arn:aws:s3:::test-website-code-sop",
348
+ "arn:aws:s3:::test-app-code-sop"
349
+ ]
350
+ },
351
+ {
352
+ "Effect": "Allow",
353
+ "Action": ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"],
354
+ "Resource": [
355
+ "arn:aws:s3:::test-website-code-sop/*",
356
+ "arn:aws:s3:::test-app-code-sop/*"
357
+ ]
358
+ }
359
+ ]
360
+ }
361
+ ```
362
+
363
+ For different environments or additional variants, use the derived code bucket names from your config.
364
+
365
+ The scaffolded workflow looks like this:
366
+
367
+ ```yaml
368
+ # Required GitHub repository secrets:
369
+ # - AWS_ACCESS_KEY_ID
370
+ # - AWS_SECRET_ACCESS_KEY
371
+ # Required GitHub repository variable:
372
+ # - S3TE_ENVIRONMENT (for example dev, test, or prod)
373
+ # Optional GitHub repository variable:
374
+ # - S3TE_GIT_BRANCH (defaults to main)
375
+ # This workflow reads s3te.config.json at runtime and syncs all variants into their own code buckets.
376
+ name: S3TE Sync
377
+ on:
378
+ workflow_dispatch:
379
+ inputs:
380
+ environment:
381
+ description: Optional S3TE environment override from s3te.config.json
382
+ required: false
383
+ type: string
384
+ push:
385
+ paths:
386
+ - "app/**"
387
+ - "package.json"
388
+ - "package-lock.json"
389
+ - ".github/workflows/s3te-sync.yml"
390
+
391
+ jobs:
392
+ sync:
393
+ if: github.event_name == 'workflow_dispatch' || github.ref_name == (vars.S3TE_GIT_BRANCH || 'main')
394
+ runs-on: ubuntu-latest
395
+ permissions:
396
+ contents: read
397
+ steps:
398
+ - uses: actions/checkout@v4
399
+ - uses: actions/setup-node@v4
400
+ with:
401
+ node-version: 22
402
+ cache: npm
403
+ - name: Install dependencies
404
+ shell: bash
405
+ run: |
406
+ if [ -f package-lock.json ]; then
407
+ npm ci
408
+ else
409
+ npm install
410
+ fi
411
+ - name: Resolve S3TE environment and AWS region from s3te.config.json
412
+ id: s3te-config
413
+ shell: bash
414
+ env:
415
+ WORKFLOW_INPUT_ENVIRONMENT: ${{ inputs.environment }}
416
+ REPOSITORY_S3TE_ENVIRONMENT: ${{ vars.S3TE_ENVIRONMENT }}
417
+ run: |
418
+ node -e "const fs=require('node:fs'); const requested=(process.env.WORKFLOW_INPUT_ENVIRONMENT || process.env.REPOSITORY_S3TE_ENVIRONMENT || '').trim(); const config=JSON.parse(fs.readFileSync('s3te.config.json','utf8')); const known=Object.keys(config.environments ?? {}); if(!requested){ console.error('Missing GitHub repository variable S3TE_ENVIRONMENT.'); process.exit(1);} const environmentConfig=config.environments?.[requested]; if(!environmentConfig){ console.error('Unknown environment ' + requested + '. Known environments: ' + (known.length > 0 ? known.join(', ') : '(none)') + '.'); process.exit(1);} fs.appendFileSync(process.env.GITHUB_OUTPUT, 'environment=' + requested + '\n'); fs.appendFileSync(process.env.GITHUB_OUTPUT, 'aws_region=' + environmentConfig.awsRegion + '\n');"
419
+ - name: Configure AWS credentials
420
+ uses: aws-actions/configure-aws-credentials@v4
421
+ with:
422
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
423
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
424
+ aws-region: ${{ steps.s3te-config.outputs.aws_region }}
425
+ - run: npx s3te validate --env ${{ steps.s3te-config.outputs.environment }}
426
+ - run: npx s3te sync --env ${{ steps.s3te-config.outputs.environment }}
427
+ ```
428
+
429
+ </details>
430
+
254
431
  ## Usage
255
432
 
256
433
  Once the project is installed, your everyday loop splits into two paths: deploy when infrastructure changes, sync when only project sources changed.
@@ -265,7 +442,7 @@ Once the project is installed, your everyday loop splits into two paths: deploy
265
442
  3. Validate and render locally.
266
443
  4. Run your tests.
267
444
  5. Use `deploy` for the first installation or after infrastructure/config/runtime changes.
268
- 6. Use `sync` for day-to-day source publishing into the code bucket.
445
+ 6. Use `sync` for day-to-day source publishing into the code buckets.
269
446
 
270
447
  ```bash
271
448
  npx s3te validate
@@ -417,115 +594,6 @@ That deploy updates the existing environment stack and adds the Webiny mirror re
417
594
 
418
595
  </details>
419
596
 
420
- <details>
421
- <summary>GitHub Actions source publishing</summary>
422
-
423
- If your team works through GitHub instead of running `s3te sync` locally, the scaffold already includes `.github/workflows/s3te-sync.yml`.
424
-
425
- That workflow is meant for source publishing only:
426
-
427
- - it validates the project
428
- - it uploads `app/...` and `part/...` into the S3TE code bucket
429
- - the resulting S3 events trigger the deployed Lambda pipeline in AWS
430
-
431
- Use a full `deploy` only when the infrastructure, environment config, or runtime package changes.
432
-
433
- GitHub preparation checklist:
434
-
435
- 1. Push the project to GitHub together with `.github/workflows/s3te-sync.yml`.
436
- 2. Make sure GitHub Actions are allowed for the repository or organization.
437
- 3. Run the first real `npx s3te deploy --env <name>` so the code bucket already exists.
438
- 4. In AWS IAM, create an access key for a CI user that may sync only the S3TE code bucket for that environment.
439
- 5. In GitHub open `Settings -> Secrets and variables -> Actions -> Secrets`.
440
- 6. Add these repository secrets:
441
- - `AWS_ACCESS_KEY_ID`
442
- - `AWS_SECRET_ACCESS_KEY`
443
- 7. Open `.github/workflows/s3te-sync.yml` and adjust:
444
- - the branch under `on.push.branches`
445
- - `aws-region`
446
- - `npx s3te sync --env dev` to your target environment such as `prod` or `test`
447
-
448
- No GitHub variables are required by the scaffolded workflow. The code bucket name is resolved by S3TE from `s3te.config.json`, so you do not have to store bucket names in GitHub.
449
-
450
- For projects with multiple environments such as `test` and `prod`, the simplest setup is usually one workflow file per target environment, for example:
451
-
452
- - `.github/workflows/s3te-sync-test.yml` with `npx s3te sync --env test`
453
- - `.github/workflows/s3te-sync-prod.yml` with `npx s3te sync --env prod`
454
-
455
- First verification in GitHub:
456
-
457
- 1. Open the `Actions` tab in the repository.
458
- 2. Select `S3TE Sync`.
459
- 3. Start it once manually with `Run workflow`.
460
- 4. Check that the run reaches the `Configure AWS credentials`, `Validate project`, and `Sync project sources to the S3TE code bucket` steps without error.
461
-
462
- Minimal IAM policy example for one code bucket:
463
-
464
- ```json
465
- {
466
- "Version": "2012-10-17",
467
- "Statement": [
468
- {
469
- "Effect": "Allow",
470
- "Action": ["s3:ListBucket"],
471
- "Resource": ["arn:aws:s3:::dev-website-code-mywebsite"]
472
- },
473
- {
474
- "Effect": "Allow",
475
- "Action": ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"],
476
- "Resource": ["arn:aws:s3:::dev-website-code-mywebsite/*"]
477
- }
478
- ]
479
- }
480
- ```
481
-
482
- For non-production environments or additional variants, use the derived code bucket names from your config, for example `test-website-code-mywebsite` or `app-code-mywebsite`.
483
-
484
- The scaffolded workflow looks like this:
485
-
486
- ```yaml
487
- name: S3TE Sync
488
- on:
489
- workflow_dispatch:
490
- push:
491
- branches: ["main"]
492
- paths:
493
- - "app/**"
494
- - "package.json"
495
- - "package-lock.json"
496
- - ".github/workflows/s3te-sync.yml"
497
-
498
- jobs:
499
- sync:
500
- runs-on: ubuntu-latest
501
- permissions:
502
- contents: read
503
- steps:
504
- - uses: actions/checkout@v4
505
- - uses: actions/setup-node@v4
506
- with:
507
- node-version: 22
508
- cache: npm
509
- - name: Install dependencies
510
- shell: bash
511
- run: |
512
- if [ -f package-lock.json ]; then
513
- npm ci
514
- else
515
- npm install
516
- fi
517
- - name: Configure AWS credentials
518
- uses: aws-actions/configure-aws-credentials@v4
519
- with:
520
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
521
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
522
- aws-region: eu-central-1
523
- - run: npx s3te validate
524
- - run: npx s3te sync --env dev
525
- ```
526
-
527
- </details>
528
-
529
597
  <details>
530
598
  <summary>What the migration command changes</summary>
531
599
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@projectdochelp/s3te",
3
- "version": "3.1.4",
3
+ "version": "3.2.1",
4
4
  "description": "CLI, render core, AWS adapter, and testkit for S3TemplateEngine projects",
5
5
  "repository": {
6
6
  "type": "git",
@@ -53,6 +53,52 @@ async function describeStack({ stackName, region, profile, cwd }) {
53
53
  return JSON.parse(describedStack.stdout).Stacks?.[0];
54
54
  }
55
55
 
56
+ async function describeStackEvents({ stackName, region, profile, cwd }) {
57
+ const describedEvents = await runAwsCli(["cloudformation", "describe-stack-events", "--stack-name", stackName, "--output", "json"], {
58
+ region,
59
+ profile,
60
+ cwd,
61
+ errorCode: "ADAPTER_ERROR"
62
+ });
63
+ return JSON.parse(describedEvents.stdout).StackEvents ?? [];
64
+ }
65
+
66
+ export function summarizeStackFailureEvents(stackEvents = [], limit = 8) {
67
+ return stackEvents
68
+ .filter((event) => (
69
+ String(event.ResourceStatus ?? "").includes("FAILED")
70
+ || String(event.ResourceStatus ?? "").includes("ROLLBACK")
71
+ ))
72
+ .map((event) => ({
73
+ timestamp: event.Timestamp,
74
+ logicalResourceId: event.LogicalResourceId,
75
+ resourceType: event.ResourceType,
76
+ resourceStatus: event.ResourceStatus,
77
+ resourceStatusReason: event.ResourceStatusReason
78
+ }))
79
+ .slice(0, limit);
80
+ }
81
+
82
+ async function attachStackFailureDetails(error, { stackName, region, profile, cwd }) {
83
+ try {
84
+ const stackEvents = await describeStackEvents({ stackName, region, profile, cwd });
85
+ const summarizedEvents = summarizeStackFailureEvents(stackEvents);
86
+ if (summarizedEvents.length > 0) {
87
+ error.details = {
88
+ ...(error.details ?? {}),
89
+ stackFailureEvents: summarizedEvents
90
+ };
91
+ }
92
+ } catch (stackEventsError) {
93
+ error.details = {
94
+ ...(error.details ?? {}),
95
+ stackFailureEventsError: stackEventsError.message
96
+ };
97
+ }
98
+
99
+ return error;
100
+ }
101
+
56
102
  async function deployCloudFormationStack({
57
103
  stackName,
58
104
  templatePath,
@@ -85,13 +131,22 @@ async function deployCloudFormationStack({
85
131
  args.push("--no-execute-changeset");
86
132
  }
87
133
 
88
- await runAwsCli(args, {
89
- region,
90
- profile,
91
- cwd,
92
- stdio,
93
- errorCode: "ADAPTER_ERROR"
94
- });
134
+ try {
135
+ await runAwsCli(args, {
136
+ region,
137
+ profile,
138
+ cwd,
139
+ stdio,
140
+ errorCode: "ADAPTER_ERROR"
141
+ });
142
+ } catch (error) {
143
+ throw await attachStackFailureDetails(error, {
144
+ stackName,
145
+ region,
146
+ profile,
147
+ cwd
148
+ });
149
+ }
95
150
  }
96
151
 
97
152
  async function resolveWebinyStreamArn({ runtimeConfig, region, profile, cwd }) {
@@ -176,6 +231,63 @@ async function deployTemporaryArtifactsStack({
176
231
  };
177
232
  }
178
233
 
234
+ export function collectBucketObjectVersions(payload = {}) {
235
+ return [
236
+ ...(payload.Versions ?? []),
237
+ ...(payload.DeleteMarkers ?? [])
238
+ ].map((entry) => ({
239
+ Key: entry.Key,
240
+ VersionId: entry.VersionId
241
+ })).filter((entry) => entry.Key && entry.VersionId);
242
+ }
243
+
244
+ function chunkItems(items, chunkSize) {
245
+ const chunks = [];
246
+ for (let index = 0; index < items.length; index += chunkSize) {
247
+ chunks.push(items.slice(index, index + chunkSize));
248
+ }
249
+ return chunks;
250
+ }
251
+
252
+ async function deleteBucketObjectVersions({
253
+ bucketName,
254
+ region,
255
+ profile,
256
+ cwd
257
+ }) {
258
+ while (true) {
259
+ const listedVersions = await runAwsCli(["s3api", "list-object-versions", "--bucket", bucketName, "--output", "json"], {
260
+ region,
261
+ profile,
262
+ cwd,
263
+ errorCode: "ADAPTER_ERROR"
264
+ });
265
+ const objects = collectBucketObjectVersions(JSON.parse(listedVersions.stdout || "{}"));
266
+ if (objects.length === 0) {
267
+ return;
268
+ }
269
+
270
+ for (const batch of chunkItems(objects, 250)) {
271
+ await runAwsCli([
272
+ "s3api",
273
+ "delete-objects",
274
+ "--bucket",
275
+ bucketName,
276
+ "--delete",
277
+ JSON.stringify({
278
+ Objects: batch,
279
+ Quiet: true
280
+ })
281
+ ], {
282
+ region,
283
+ profile,
284
+ cwd,
285
+ errorCode: "ADAPTER_ERROR"
286
+ });
287
+ }
288
+ }
289
+ }
290
+
179
291
  async function cleanupTemporaryArtifactsStack({
180
292
  stackName,
181
293
  artifactBucket,
@@ -191,6 +303,12 @@ async function cleanupTemporaryArtifactsStack({
191
303
  cwd,
192
304
  errorCode: "ADAPTER_ERROR"
193
305
  });
306
+ await deleteBucketObjectVersions({
307
+ bucketName: artifactBucket,
308
+ region,
309
+ profile,
310
+ cwd
311
+ });
194
312
  } catch (error) {
195
313
  if (!String(error.message).includes("NoSuchBucket")) {
196
314
  throw error;
@@ -4,6 +4,7 @@ import { spawn } from "node:child_process";
4
4
 
5
5
  import {
6
6
  S3teError,
7
+ buildEnvironmentRuntimeConfig,
7
8
  createManualRenderTargets,
8
9
  isRenderableKey,
9
10
  loadProjectConfig,
@@ -16,6 +17,7 @@ import {
16
17
  ensureAwsCliAvailable,
17
18
  ensureAwsCredentials,
18
19
  packageAwsProject,
20
+ runAwsCli,
19
21
  syncAwsProject
20
22
  } from "../../aws-adapter/src/index.mjs";
21
23
 
@@ -32,11 +34,102 @@ function normalizePath(value) {
32
34
  return String(value).replace(/\\/g, "/");
33
35
  }
34
36
 
37
+ function isProjectTestFile(filename) {
38
+ return /(?:^test-.*|.*\.(?:test|spec))\.(?:cjs|mjs|js)$/i.test(filename);
39
+ }
40
+
41
+ async function listProjectTestFiles(rootDir, currentDir = rootDir) {
42
+ const entries = await fs.readdir(currentDir, { withFileTypes: true });
43
+ const files = [];
44
+
45
+ for (const entry of entries) {
46
+ const fullPath = path.join(currentDir, entry.name);
47
+ if (entry.isDirectory()) {
48
+ files.push(...await listProjectTestFiles(rootDir, fullPath));
49
+ continue;
50
+ }
51
+
52
+ if (entry.isFile() && isProjectTestFile(entry.name)) {
53
+ files.push(normalizePath(path.relative(rootDir, fullPath)));
54
+ }
55
+ }
56
+
57
+ return files.sort();
58
+ }
59
+
35
60
  function unknownEnvironmentMessage(config, environmentName) {
36
61
  const knownEnvironments = Object.keys(config?.environments ?? {});
37
62
  return `Unknown environment ${environmentName}. Known environments: ${knownEnvironments.length > 0 ? knownEnvironments.join(", ") : "(none)"}.`;
38
63
  }
39
64
 
65
+ function normalizeHostname(value) {
66
+ return String(value ?? "").trim().toLowerCase().replace(/\.+$/, "");
67
+ }
68
+
69
+ function certificatePatternMatchesHost(pattern, hostname) {
70
+ const normalizedPattern = normalizeHostname(pattern);
71
+ const normalizedHostname = normalizeHostname(hostname);
72
+
73
+ if (!normalizedPattern || !normalizedHostname) {
74
+ return false;
75
+ }
76
+
77
+ if (!normalizedPattern.includes("*")) {
78
+ return normalizedPattern === normalizedHostname;
79
+ }
80
+
81
+ const patternLabels = normalizedPattern.split(".");
82
+ const hostnameLabels = normalizedHostname.split(".");
83
+
84
+ if (patternLabels[0] !== "*" || patternLabels.slice(1).some((label) => label.includes("*"))) {
85
+ return false;
86
+ }
87
+
88
+ if (patternLabels.length !== hostnameLabels.length) {
89
+ return false;
90
+ }
91
+
92
+ return patternLabels.slice(1).join(".") === hostnameLabels.slice(1).join(".");
93
+ }
94
+
95
+ function findUncoveredCertificateHosts(hostnames, certificateDomains) {
96
+ const normalizedCertificateDomains = [...new Set(
97
+ certificateDomains
98
+ .map((value) => normalizeHostname(value))
99
+ .filter(Boolean)
100
+ )];
101
+
102
+ return [...new Set(
103
+ hostnames
104
+ .map((value) => normalizeHostname(value))
105
+ .filter(Boolean)
106
+ .filter((hostname) => !normalizedCertificateDomains.some((pattern) => certificatePatternMatchesHost(pattern, hostname)))
107
+ )].sort();
108
+ }
109
+
110
+ function collectEnvironmentCloudFrontAliases(config, environmentName) {
111
+ const runtimeConfig = buildEnvironmentRuntimeConfig(config, environmentName);
112
+ const aliases = [];
113
+
114
+ for (const variantConfig of Object.values(runtimeConfig.variants)) {
115
+ for (const languageConfig of Object.values(variantConfig.languages)) {
116
+ aliases.push(...(languageConfig.cloudFrontAliases ?? []));
117
+ }
118
+ }
119
+
120
+ return [...new Set(aliases.map((value) => normalizeHostname(value)).filter(Boolean))].sort();
121
+ }
122
+
123
+ async function describeAcmCertificate({ certificateArn, profile, cwd, runAwsCliFn }) {
124
+ const response = await runAwsCliFn(["acm", "describe-certificate", "--certificate-arn", certificateArn, "--output", "json"], {
125
+ region: "us-east-1",
126
+ profile,
127
+ cwd,
128
+ errorCode: "AWS_AUTH_ERROR"
129
+ });
130
+ return JSON.parse(response.stdout || "{}").Certificate ?? {};
131
+ }
132
+
40
133
  function assertKnownEnvironment(config, environmentName) {
41
134
  if (!environmentName) {
42
135
  return;
@@ -250,17 +343,24 @@ function schemaTemplate() {
250
343
  }
251
344
 
252
345
  function githubSyncWorkflowTemplate() {
253
- return `# Before first use:
254
- # 1. Run "npx s3te deploy --env dev" once so the S3TE code bucket already exists.
255
- # 2. Add GitHub Actions secrets AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY.
256
- # 3. Adjust branch, aws-region, and the target environment below.
346
+ return `# Required GitHub repository secrets:
347
+ # - AWS_ACCESS_KEY_ID
348
+ # - AWS_SECRET_ACCESS_KEY
349
+ # Required GitHub repository variable:
350
+ # - S3TE_ENVIRONMENT (for example dev, test, or prod)
351
+ # Optional GitHub repository variable:
352
+ # - S3TE_GIT_BRANCH (defaults to main)
353
+ # This workflow reads s3te.config.json at runtime and syncs all variants into their own code buckets.
257
354
  name: S3TE Sync
258
355
 
259
356
  on:
260
357
  workflow_dispatch:
358
+ inputs:
359
+ environment:
360
+ description: Optional S3TE environment override from s3te.config.json
361
+ required: false
362
+ type: string
261
363
  push:
262
- branches:
263
- - main
264
364
  paths:
265
365
  - "app/**"
266
366
  - "package.json"
@@ -269,6 +369,7 @@ on:
269
369
 
270
370
  jobs:
271
371
  sync:
372
+ if: github.event_name == 'workflow_dispatch' || github.ref_name == (vars.S3TE_GIT_BRANCH || 'main')
272
373
  runs-on: ubuntu-latest
273
374
  permissions:
274
375
  contents: read
@@ -286,16 +387,24 @@ jobs:
286
387
  else
287
388
  npm install
288
389
  fi
390
+ - name: Resolve S3TE environment and AWS region from s3te.config.json
391
+ id: s3te-config
392
+ shell: bash
393
+ env:
394
+ WORKFLOW_INPUT_ENVIRONMENT: \${{ inputs.environment }}
395
+ REPOSITORY_S3TE_ENVIRONMENT: \${{ vars.S3TE_ENVIRONMENT }}
396
+ run: |
397
+ node -e "const fs=require('node:fs'); const requested=(process.env.WORKFLOW_INPUT_ENVIRONMENT || process.env.REPOSITORY_S3TE_ENVIRONMENT || '').trim(); const config=JSON.parse(fs.readFileSync('s3te.config.json','utf8')); const known=Object.keys(config.environments ?? {}); if(!requested){ console.error('Missing GitHub repository variable S3TE_ENVIRONMENT.'); process.exit(1);} const environmentConfig=config.environments?.[requested]; if(!environmentConfig){ console.error('Unknown environment ' + requested + '. Known environments: ' + (known.length > 0 ? known.join(', ') : '(none)') + '.'); process.exit(1);} fs.appendFileSync(process.env.GITHUB_OUTPUT, 'environment=' + requested + '\\n'); fs.appendFileSync(process.env.GITHUB_OUTPUT, 'aws_region=' + environmentConfig.awsRegion + '\\n');"
289
398
  - name: Configure AWS credentials
290
399
  uses: aws-actions/configure-aws-credentials@v4
291
400
  with:
292
401
  aws-access-key-id: \${{ secrets.AWS_ACCESS_KEY_ID }}
293
402
  aws-secret-access-key: \${{ secrets.AWS_SECRET_ACCESS_KEY }}
294
- aws-region: eu-central-1
403
+ aws-region: \${{ steps.s3te-config.outputs.aws_region }}
295
404
  - name: Validate project
296
- run: npx s3te validate
297
- - name: Sync project sources to the S3TE code bucket
298
- run: npx s3te sync --env dev
405
+ run: npx s3te validate --env \${{ steps.s3te-config.outputs.environment }}
406
+ - name: Sync project sources to the S3TE code buckets
407
+ run: npx s3te sync --env \${{ steps.s3te-config.outputs.environment }}
299
408
  `;
300
409
  }
301
410
 
@@ -720,8 +829,12 @@ export async function runProjectTests(projectDir) {
720
829
  const testsDir = await fileExists(path.join(projectDir, "offline", "tests"))
721
830
  ? "offline/tests"
722
831
  : "tests";
832
+ const testFiles = await listProjectTestFiles(path.join(projectDir, testsDir));
833
+ const testArgs = testFiles.length > 0
834
+ ? testFiles.map((relativePath) => normalizePath(path.join(testsDir, relativePath)))
835
+ : [testsDir];
723
836
  return new Promise((resolve) => {
724
- const child = spawn(process.execPath, ["--test", testsDir], {
837
+ const child = spawn(process.execPath, ["--test", ...testArgs], {
725
838
  cwd: projectDir,
726
839
  stdio: "inherit"
727
840
  });
@@ -771,6 +884,9 @@ export async function syncProject(projectDir, config, options = {}) {
771
884
  }
772
885
 
773
886
  export async function doctorProject(projectDir, configPath, options = {}) {
887
+ const ensureAwsCliAvailableFn = options.ensureAwsCliAvailableFn ?? ensureAwsCliAvailable;
888
+ const ensureAwsCredentialsFn = options.ensureAwsCredentialsFn ?? ensureAwsCredentials;
889
+ const runAwsCliFn = options.runAwsCliFn ?? runAwsCli;
774
890
  const checks = [];
775
891
  const majorVersion = Number(process.versions.node.split(".")[0]);
776
892
 
@@ -795,7 +911,7 @@ export async function doctorProject(projectDir, configPath, options = {}) {
795
911
  }
796
912
 
797
913
  try {
798
- await ensureAwsCliAvailable({ cwd: projectDir });
914
+ await ensureAwsCliAvailableFn({ cwd: projectDir });
799
915
  checks.push({ name: "aws-cli", ok: true, message: "AWS CLI available" });
800
916
  } catch (error) {
801
917
  checks.push({ name: "aws-cli", ok: false, message: error.message });
@@ -812,7 +928,7 @@ export async function doctorProject(projectDir, configPath, options = {}) {
812
928
  }
813
929
 
814
930
  try {
815
- await ensureAwsCredentials({
931
+ await ensureAwsCredentialsFn({
816
932
  region: options.config.environments[options.environment].awsRegion,
817
933
  profile: options.profile,
818
934
  cwd: projectDir
@@ -821,6 +937,38 @@ export async function doctorProject(projectDir, configPath, options = {}) {
821
937
  } catch (error) {
822
938
  checks.push({ name: "aws-auth", ok: false, message: error.message });
823
939
  }
940
+
941
+ const environmentConfig = options.config.environments[options.environment];
942
+ const awsAuthCheck = checks.at(-1);
943
+ if (awsAuthCheck?.name === "aws-auth" && awsAuthCheck.ok) {
944
+ try {
945
+ const cloudFrontAliases = collectEnvironmentCloudFrontAliases(options.config, options.environment);
946
+ const certificate = await describeAcmCertificate({
947
+ certificateArn: environmentConfig.certificateArn,
948
+ profile: options.profile,
949
+ cwd: projectDir,
950
+ runAwsCliFn
951
+ });
952
+ const certificateDomains = [
953
+ certificate.DomainName,
954
+ ...(certificate.SubjectAlternativeNames ?? [])
955
+ ];
956
+ const uncoveredAliases = findUncoveredCertificateHosts(cloudFrontAliases, certificateDomains);
957
+ checks.push({
958
+ name: "acm-certificate",
959
+ ok: uncoveredAliases.length === 0,
960
+ message: uncoveredAliases.length === 0
961
+ ? `ACM certificate covers ${cloudFrontAliases.length} CloudFront alias(es) for ${options.environment}`
962
+ : `ACM certificate ${environmentConfig.certificateArn} does not cover these CloudFront aliases for ${options.environment}: ${uncoveredAliases.join(", ")}.`
963
+ });
964
+ } catch (error) {
965
+ checks.push({
966
+ name: "acm-certificate",
967
+ ok: false,
968
+ message: `Could not inspect ACM certificate ${environmentConfig.certificateArn}: ${error.message}`
969
+ });
970
+ }
971
+ }
824
972
  }
825
973
 
826
974
  return checks;