@lsts_tech/infra 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +158 -0
- package/dist/bin/init.d.ts +9 -0
- package/dist/bin/init.d.ts.map +1 -0
- package/dist/bin/init.js +315 -0
- package/dist/bin/init.js.map +1 -0
- package/dist/stacks/Dns.d.ts +69 -0
- package/dist/stacks/Dns.d.ts.map +1 -0
- package/dist/stacks/Dns.js +57 -0
- package/dist/stacks/Dns.js.map +1 -0
- package/dist/stacks/ExpoSite.d.ts +72 -0
- package/dist/stacks/ExpoSite.d.ts.map +1 -0
- package/dist/stacks/ExpoSite.js +49 -0
- package/dist/stacks/ExpoSite.js.map +1 -0
- package/dist/stacks/NextSite.d.ts +86 -0
- package/dist/stacks/NextSite.d.ts.map +1 -0
- package/dist/stacks/NextSite.js +60 -0
- package/dist/stacks/NextSite.js.map +1 -0
- package/dist/stacks/Pipeline.d.ts +128 -0
- package/dist/stacks/Pipeline.d.ts.map +1 -0
- package/dist/stacks/Pipeline.js +311 -0
- package/dist/stacks/Pipeline.js.map +1 -0
- package/dist/stacks/index.d.ts +41 -0
- package/dist/stacks/index.d.ts.map +1 -0
- package/dist/stacks/index.js +38 -0
- package/dist/stacks/index.js.map +1 -0
- package/docs/CLI.md +59 -0
- package/docs/CONFIGURATION.md +78 -0
- package/docs/EXAMPLES.md +9 -0
- package/examples/next-and-expo/infra.config.ts +104 -0
- package/examples/next-only/infra.config.ts +60 -0
- package/package.json +102 -0
- package/schemas/pipeline.schema.json +25 -0
- package/scripts/cleanup-orphan-lambdas.sh +102 -0
- package/scripts/delete-amplify-app.sh +50 -0
- package/scripts/ensure-pipelines.sh +144 -0
- package/scripts/ensure-secrets.sh +58 -0
- package/scripts/postdeploy-update-dns.sh +158 -0
- package/scripts/predeploy-checks.sh +192 -0
- package/scripts/pulumi-deploy.sh +29 -0
- package/scripts/sst-deploy.sh +79 -0
- package/templates/buildspec.yml +77 -0
- package/templates/ensure-pipelines.sh +117 -0
- package/templates/env.example +38 -0
- package/templates/infra.config.ts +199 -0
- package/templates/secrets.schema.json +20 -0
- package/templates/sst-env.d.ts +50 -0
- package/templates/sst.config.ts +28 -0
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { resolveDomain, createNextSite, createPipeline } from "@lsts_tech/infra";
|
|
2
|
+
|
|
3
|
+
const secrets = {
|
|
4
|
+
DatabaseUrl: new sst.Secret("DatabaseUrl"),
|
|
5
|
+
AuthSecret: new sst.Secret("AuthSecret"),
|
|
6
|
+
};
|
|
7
|
+
|
|
8
|
+
export function createInfrastructure() {
|
|
9
|
+
const stage = $app.stage;
|
|
10
|
+
const rootDomain = process.env.INFRA_ROOT_DOMAIN ?? "example.com";
|
|
11
|
+
|
|
12
|
+
const { domain, domainName } = resolveDomain({
|
|
13
|
+
rootDomain,
|
|
14
|
+
stage,
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
const { url } = createNextSite({
|
|
18
|
+
appPath: "../../apps/web",
|
|
19
|
+
id: `web-${stage}`,
|
|
20
|
+
domain,
|
|
21
|
+
environment: {
|
|
22
|
+
NEXT_PUBLIC_APP_URL: `https://${domainName}`,
|
|
23
|
+
DATABASE_URL: secrets.DatabaseUrl.value,
|
|
24
|
+
AUTH_SECRET: secrets.AuthSecret.value,
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
const outputs: Record<string, unknown> = {
|
|
29
|
+
siteUrl: url,
|
|
30
|
+
domain: domainName,
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
if (stage === "production") {
|
|
34
|
+
const repo = process.env.INFRA_PIPELINE_REPO ?? "myorg/myrepo";
|
|
35
|
+
const prefix = process.env.INFRA_PIPELINE_PREFIX ?? "myapp";
|
|
36
|
+
|
|
37
|
+
const prod = createPipeline({
|
|
38
|
+
name: `${prefix}-prod`,
|
|
39
|
+
repo,
|
|
40
|
+
branch: process.env.INFRA_PIPELINE_BRANCH_PROD ?? "main",
|
|
41
|
+
stage: "production",
|
|
42
|
+
projectTag: process.env.INFRA_PROJECT_TAG ?? prefix,
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
const dev = createPipeline({
|
|
46
|
+
name: `${prefix}-dev`,
|
|
47
|
+
repo,
|
|
48
|
+
branch: process.env.INFRA_PIPELINE_BRANCH_DEV ?? "develop",
|
|
49
|
+
stage: "dev",
|
|
50
|
+
projectTag: process.env.INFRA_PROJECT_TAG ?? prefix,
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
outputs.pipelines = {
|
|
54
|
+
production: prod.pipelineName,
|
|
55
|
+
dev: dev.pipelineName,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return outputs;
|
|
60
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@lsts_tech/infra",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Reusable SST v3 infrastructure constructs for deploying Next.js and Expo web apps from monorepos.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"author": "LSTS Solutions",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/lstechnologysolutions/infra.git"
|
|
11
|
+
},
|
|
12
|
+
"homepage": "https://github.com/lstechnologysolutions/infra#readme",
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/lstechnologysolutions/infra/issues"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"sst",
|
|
18
|
+
"infrastructure",
|
|
19
|
+
"aws",
|
|
20
|
+
"nextjs",
|
|
21
|
+
"codepipeline",
|
|
22
|
+
"monorepo",
|
|
23
|
+
"turborepo",
|
|
24
|
+
"iac",
|
|
25
|
+
"cloudfront",
|
|
26
|
+
"route53",
|
|
27
|
+
"lambda",
|
|
28
|
+
"opennext",
|
|
29
|
+
"expo",
|
|
30
|
+
"expo-web",
|
|
31
|
+
"static-site",
|
|
32
|
+
"white-label"
|
|
33
|
+
],
|
|
34
|
+
"main": "./dist/stacks/index.js",
|
|
35
|
+
"types": "./dist/stacks/index.d.ts",
|
|
36
|
+
"exports": {
|
|
37
|
+
".": {
|
|
38
|
+
"types": "./dist/stacks/index.d.ts",
|
|
39
|
+
"import": "./dist/stacks/index.js"
|
|
40
|
+
},
|
|
41
|
+
"./stacks/Dns": {
|
|
42
|
+
"types": "./dist/stacks/Dns.d.ts",
|
|
43
|
+
"import": "./dist/stacks/Dns.js"
|
|
44
|
+
},
|
|
45
|
+
"./stacks/NextSite": {
|
|
46
|
+
"types": "./dist/stacks/NextSite.d.ts",
|
|
47
|
+
"import": "./dist/stacks/NextSite.js"
|
|
48
|
+
},
|
|
49
|
+
"./stacks/ExpoSite": {
|
|
50
|
+
"types": "./dist/stacks/ExpoSite.d.ts",
|
|
51
|
+
"import": "./dist/stacks/ExpoSite.js"
|
|
52
|
+
},
|
|
53
|
+
"./stacks/Pipeline": {
|
|
54
|
+
"types": "./dist/stacks/Pipeline.d.ts",
|
|
55
|
+
"import": "./dist/stacks/Pipeline.js"
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
"bin": {
|
|
59
|
+
"lsts-infra": "dist/bin/init.js"
|
|
60
|
+
},
|
|
61
|
+
"files": [
|
|
62
|
+
"dist/",
|
|
63
|
+
"scripts/",
|
|
64
|
+
"schemas/",
|
|
65
|
+
"templates/",
|
|
66
|
+
"docs/",
|
|
67
|
+
"examples/",
|
|
68
|
+
"README.md",
|
|
69
|
+
"LICENSE"
|
|
70
|
+
],
|
|
71
|
+
"scripts": {
|
|
72
|
+
"build": "rm -rf dist && tsc",
|
|
73
|
+
"prepublishOnly": "npm run build",
|
|
74
|
+
"dev": "sst dev",
|
|
75
|
+
"deploy": "sst deploy",
|
|
76
|
+
"deploy:dev": "sst deploy --stage dev",
|
|
77
|
+
"deploy:prod": "sst deploy --stage production",
|
|
78
|
+
"remove": "sst remove",
|
|
79
|
+
"console": "sst console",
|
|
80
|
+
"secrets": "sst secrets",
|
|
81
|
+
"ensure-secrets": "./scripts/ensure-secrets.sh",
|
|
82
|
+
"lint": "echo 'no lint configured'",
|
|
83
|
+
"check-types": "tsc --noEmit",
|
|
84
|
+
"clean": "rm -rf .sst dist node_modules"
|
|
85
|
+
},
|
|
86
|
+
"publishConfig": {
|
|
87
|
+
"access": "public",
|
|
88
|
+
"registry": "https://registry.npmjs.org/"
|
|
89
|
+
},
|
|
90
|
+
"dependencies": {
|
|
91
|
+
"sst": "^3.7.0",
|
|
92
|
+
"@pulumi/aws": "^6.66.0",
|
|
93
|
+
"@pulumi/pulumi": "^3.145.0"
|
|
94
|
+
},
|
|
95
|
+
"devDependencies": {
|
|
96
|
+
"@types/node": "^22.13.9",
|
|
97
|
+
"typescript": "^5"
|
|
98
|
+
},
|
|
99
|
+
"engines": {
|
|
100
|
+
"node": ">=20"
|
|
101
|
+
}
|
|
102
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
3
|
+
"title": "Pipeline Schema",
|
|
4
|
+
"description": "Defines a minimal pipeline configuration model that the infra package expects for creating CodePipeline/CodeBuild resources.",
|
|
5
|
+
"type": "object",
|
|
6
|
+
"properties": {
|
|
7
|
+
"name": {
|
|
8
|
+
"type": "string",
|
|
9
|
+
"description": "Logical name for the pipeline (eg. web-cicd)"
|
|
10
|
+
},
|
|
11
|
+
"branch": {
|
|
12
|
+
"type": "string",
|
|
13
|
+
"description": "Git branch the pipeline will build and deploy"
|
|
14
|
+
},
|
|
15
|
+
"stage": {
|
|
16
|
+
"type": "string",
|
|
17
|
+
"description": "SST stage to deploy (eg. dev, production)"
|
|
18
|
+
},
|
|
19
|
+
"buildSpecPath": {
|
|
20
|
+
"type": "string",
|
|
21
|
+
"description": "Path (in repo) to a buildspec file used by CodeBuild"
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"required": ["name", "branch", "stage"]
|
|
25
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# cleanup-orphan-lambdas.sh
|
|
5
|
+
# Finds Lambda functions that match a given prefix but are not owned by any
|
|
6
|
+
# CloudFormation stack (orphans). Checks recent usage and optionally deletes
|
|
7
|
+
# them when invoked with --delete.
|
|
8
|
+
#
|
|
9
|
+
# Required environment variables:
|
|
10
|
+
# PREFIX — Lambda function name prefix to search for (e.g., "myapp")
|
|
11
|
+
|
|
12
|
+
AWS_REGION=${AWS_REGION:-us-east-1}
|
|
13
|
+
THRESHOLD_DAYS=${THRESHOLD_DAYS:-30}
|
|
14
|
+
|
|
15
|
+
if [ -z "${PREFIX:-}" ]; then
|
|
16
|
+
echo "ERROR: PREFIX environment variable is required (e.g., PREFIX=myapp)"
|
|
17
|
+
echo "This is the Lambda function name prefix to search for orphans."
|
|
18
|
+
exit 1
|
|
19
|
+
fi
|
|
20
|
+
|
|
21
|
+
usage() {
|
|
22
|
+
echo "Usage: PREFIX=myapp $0 [--delete] [--region REGION]"
|
|
23
|
+
echo " --delete Actually delete identified orphan functions"
|
|
24
|
+
exit 1
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
DO_DELETE=0
|
|
28
|
+
while [ "$#" -gt 0 ]; do
|
|
29
|
+
case "$1" in
|
|
30
|
+
--delete) DO_DELETE=1; shift ;;
|
|
31
|
+
--region) AWS_REGION="$2"; shift 2 ;;
|
|
32
|
+
-h|--help) usage ;;
|
|
33
|
+
*) echo "Unknown arg: $1"; usage ;;
|
|
34
|
+
esac
|
|
35
|
+
done
|
|
36
|
+
|
|
37
|
+
echo "Region: $AWS_REGION Prefix: $PREFIX Threshold: $THRESHOLD_DAYS days"
|
|
38
|
+
|
|
39
|
+
echo "Listing CloudFormation stacks for project..."
|
|
40
|
+
stacks=$(aws cloudformation list-stacks --region "$AWS_REGION" \
|
|
41
|
+
--query "StackSummaries[?contains(StackName,'$PREFIX') && StackStatus!='DELETE_COMPLETE'].StackName" --output text)
|
|
42
|
+
|
|
43
|
+
echo "Found stacks: $stacks"
|
|
44
|
+
|
|
45
|
+
owned_arns_file=$(mktemp)
|
|
46
|
+
trap 'rm -f "$owned_arns_file"' EXIT
|
|
47
|
+
|
|
48
|
+
if [ -n "$stacks" ]; then
|
|
49
|
+
for s in $stacks; do
|
|
50
|
+
echo "Collecting functions from stack: $s"
|
|
51
|
+
aws cloudformation list-stack-resources --stack-name "$s" --region "$AWS_REGION" \
|
|
52
|
+
--query "StackResourceSummaries[?ResourceType=='AWS::Lambda::Function'].PhysicalResourceId" --output text >> "$owned_arns_file" || true
|
|
53
|
+
done
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
echo "Listing all Lambda functions with prefix '$PREFIX'..."
|
|
57
|
+
all_funcs_json=$(aws lambda list-functions --region "$AWS_REGION" --query "Functions[?starts_with(FunctionName, '$PREFIX')].[FunctionName,FunctionArn,LastModified]" --output json)
|
|
58
|
+
|
|
59
|
+
ORPHAN_FILE="/tmp/${PREFIX}-orphan-lambdas.txt"
|
|
60
|
+
|
|
61
|
+
echo "$all_funcs_json" | jq -r '.[] | @base64' | while read -r item; do
|
|
62
|
+
_jq() { echo "$item" | base64 --decode | jq -r "$1"; }
|
|
63
|
+
name=$(_jq '.[0]')
|
|
64
|
+
arn=$(_jq '.[1]')
|
|
65
|
+
last_modified=$(_jq '.[2]')
|
|
66
|
+
|
|
67
|
+
# check ownership
|
|
68
|
+
if grep -qF "$arn" "$owned_arns_file"; then
|
|
69
|
+
echo "SKIP (owned) $name"
|
|
70
|
+
continue
|
|
71
|
+
fi
|
|
72
|
+
|
|
73
|
+
# check last invocation metric over threshold window
|
|
74
|
+
end_time=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
|
75
|
+
start_time=$(date -u -d "$THRESHOLD_DAYS days ago" +%Y-%m-%dT%H:%M:%SZ)
|
|
76
|
+
|
|
77
|
+
invocations=$(aws cloudwatch get-metric-statistics --region "$AWS_REGION" \
|
|
78
|
+
--namespace AWS/Lambda --metric-name Invocations --statistics Sum \
|
|
79
|
+
--dimensions Name=FunctionName,Value="$name" --start-time "$start_time" --end-time "$end_time" --period 86400 \
|
|
80
|
+
--query "Datapoints[].Sum" --output text || echo "")
|
|
81
|
+
|
|
82
|
+
if [ -z "$invocations" ] || [ "$invocations" = "None" ] || [ "$invocations" = "" ]; then
|
|
83
|
+
inv_sum=0
|
|
84
|
+
else
|
|
85
|
+
inv_sum=$(echo "$invocations" | awk '{sum += $1} END {print sum+0}')
|
|
86
|
+
fi
|
|
87
|
+
|
|
88
|
+
if [ "$inv_sum" -eq 0 ]; then
|
|
89
|
+
echo "ORPHAN CANDIDATE: $name (lastModified: $last_modified) — no invocations in last $THRESHOLD_DAYS days"
|
|
90
|
+
if [ "$DO_DELETE" -eq 1 ]; then
|
|
91
|
+
echo "Deleting $name..."
|
|
92
|
+
aws lambda delete-function --function-name "$name" --region "$AWS_REGION"
|
|
93
|
+
echo "Deleted $name"
|
|
94
|
+
else
|
|
95
|
+
echo "$name" >> "$ORPHAN_FILE"
|
|
96
|
+
fi
|
|
97
|
+
else
|
|
98
|
+
echo "IN USE: $name (invocations last $THRESHOLD_DAYS days: $inv_sum)"
|
|
99
|
+
fi
|
|
100
|
+
done
|
|
101
|
+
|
|
102
|
+
echo "Completed. Dry-run list at $ORPHAN_FILE (if any). Rerun with --delete to remove."
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# delete-amplify-app.sh
|
|
5
|
+
# Delete an AWS Amplify app by app id or by matching a domain/branch.
|
|
6
|
+
# Usage:
|
|
7
|
+
# ./delete-amplify-app.sh --app-id <appId>
|
|
8
|
+
# ./delete-amplify-app.sh --domain example.com
|
|
9
|
+
# This is non-reversible; ensure you have backups before running.
|
|
10
|
+
|
|
11
|
+
if ! command -v aws >/dev/null 2>&1; then
|
|
12
|
+
echo "aws CLI required"
|
|
13
|
+
exit 1
|
|
14
|
+
fi
|
|
15
|
+
|
|
16
|
+
APP_ID=""
|
|
17
|
+
DOMAIN=""
|
|
18
|
+
|
|
19
|
+
while [[ $# -gt 0 ]]; do
|
|
20
|
+
case "$1" in
|
|
21
|
+
--app-id) APP_ID="$2"; shift 2 ;;
|
|
22
|
+
--domain) DOMAIN="$2"; shift 2 ;;
|
|
23
|
+
-h|--help) echo "Usage: $0 [--app-id <appId>] [--domain <domain>]"; exit 0 ;;
|
|
24
|
+
*) echo "Unknown arg: $1"; exit 1 ;;
|
|
25
|
+
esac
|
|
26
|
+
done
|
|
27
|
+
|
|
28
|
+
if [[ -z "$APP_ID" && -z "$DOMAIN" ]]; then
|
|
29
|
+
echo "Either --app-id or --domain is required"
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
if [[ -n "$DOMAIN" ]]; then
|
|
34
|
+
echo "Searching Amplify apps for domain: $DOMAIN"
|
|
35
|
+
# List apps and find ones with domain associations
|
|
36
|
+
apps=$(aws amplify list-apps --query "apps[].{id:appId,name:name,domain:defaultDomain}" --output json)
|
|
37
|
+
# Try to find app that matches domain or defaultDomain contains the domain
|
|
38
|
+
APP_ID=$(echo "$apps" | node -e "const r=require('fs').readFileSync(0,'utf8'); const a=JSON.parse(r)||[]; for(const it of a){ if((it.domain||'').includes(process.argv[1])|| (it.name||'').includes(process.argv[1])){ console.log(it.id||it.appId||it.appId||it.appId); process.exit(0);} }" "$DOMAIN" || true)
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
if [[ -z "$APP_ID" ]]; then
|
|
42
|
+
echo "Amplify app not found for domain; please specify --app-id"
|
|
43
|
+
exit 1
|
|
44
|
+
fi
|
|
45
|
+
|
|
46
|
+
echo "Deleting Amplify app: $APP_ID"
|
|
47
|
+
aws amplify delete-app --app-id "$APP_ID"
|
|
48
|
+
|
|
49
|
+
echo "Deleted Amplify app $APP_ID (request submitted)."
|
|
50
|
+
echo "Note: You may want to remove associated DNS records and artifacts if needed."
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# ensure-pipelines.sh
|
|
5
|
+
#
|
|
6
|
+
# AWS-only helper (v1.0.0) to ensure configured CodePipelines exist.
|
|
7
|
+
# Missing pipelines are created by triggering an SST deploy for their mapped stage.
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# APPROVE=true bash scripts/ensure-pipelines.sh
|
|
11
|
+
|
|
12
|
+
SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd)
|
|
13
|
+
INFRA_DIR=$(cd "$SCRIPT_DIR/.." && pwd)
|
|
14
|
+
|
|
15
|
+
if [ -f "$INFRA_DIR/.env" ]; then
|
|
16
|
+
set -a
|
|
17
|
+
# shellcheck disable=SC1091
|
|
18
|
+
source "$INFRA_DIR/.env"
|
|
19
|
+
set +a
|
|
20
|
+
fi
|
|
21
|
+
|
|
22
|
+
DOMAIN_ROOT=${DOMAIN_ROOT:-${INFRA_ROOT_DOMAIN:-}}
|
|
23
|
+
if [ -z "$DOMAIN_ROOT" ]; then
|
|
24
|
+
echo "ERROR: Set DOMAIN_ROOT or INFRA_ROOT_DOMAIN (e.g., example.com)" >&2
|
|
25
|
+
exit 1
|
|
26
|
+
fi
|
|
27
|
+
|
|
28
|
+
REGION=${AWS_REGION:-us-east-1}
|
|
29
|
+
PIPELINE_PREFIX=${INFRA_PIPELINE_PREFIX:-myapp}
|
|
30
|
+
PIPELINES_CSV=${INFRA_PIPELINES:-production,dev,mobile}
|
|
31
|
+
REPO_DEFAULT=${INFRA_PIPELINE_REPO:-myorg/myrepo}
|
|
32
|
+
BRANCH_PROD=${INFRA_PIPELINE_BRANCH_PROD:-main}
|
|
33
|
+
BRANCH_DEV=${INFRA_PIPELINE_BRANCH_DEV:-develop}
|
|
34
|
+
BRANCH_MOBILE=${INFRA_PIPELINE_BRANCH_MOBILE:-mobile}
|
|
35
|
+
|
|
36
|
+
declare -A PIPELINE_STAGE=()
|
|
37
|
+
declare -A PIPELINE_REPO=()
|
|
38
|
+
declare -A PIPELINE_BRANCH=()
|
|
39
|
+
|
|
40
|
+
add_pipeline() {
|
|
41
|
+
local stage=$1
|
|
42
|
+
local name="${PIPELINE_PREFIX}-${stage}"
|
|
43
|
+
local branch=${2:-main}
|
|
44
|
+
|
|
45
|
+
if [ "$stage" = "production" ]; then
|
|
46
|
+
name="${PIPELINE_PREFIX}-prod"
|
|
47
|
+
fi
|
|
48
|
+
|
|
49
|
+
PIPELINE_STAGE["$name"]="$stage"
|
|
50
|
+
PIPELINE_REPO["$name"]="$REPO_DEFAULT"
|
|
51
|
+
PIPELINE_BRANCH["$name"]="$branch"
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
IFS=',' read -r -a STAGE_LIST <<< "$PIPELINES_CSV"
|
|
55
|
+
for raw in "${STAGE_LIST[@]}"; do
|
|
56
|
+
stage=$(echo "$raw" | xargs)
|
|
57
|
+
case "$stage" in
|
|
58
|
+
production) add_pipeline "production" "$BRANCH_PROD" ;;
|
|
59
|
+
dev) add_pipeline "dev" "$BRANCH_DEV" ;;
|
|
60
|
+
mobile) add_pipeline "mobile" "$BRANCH_MOBILE" ;;
|
|
61
|
+
""|none) ;;
|
|
62
|
+
*)
|
|
63
|
+
# Allow custom stage names while defaulting to main branch.
|
|
64
|
+
add_pipeline "$stage" "main"
|
|
65
|
+
;;
|
|
66
|
+
esac
|
|
67
|
+
done
|
|
68
|
+
|
|
69
|
+
resolve_sst_deploy_script() {
|
|
70
|
+
local candidates=(
|
|
71
|
+
"$SCRIPT_DIR/sst-deploy.sh"
|
|
72
|
+
"$INFRA_DIR/node_modules/@lsts_tech/infra/scripts/sst-deploy.sh"
|
|
73
|
+
"$INFRA_DIR/../node_modules/@lsts_tech/infra/scripts/sst-deploy.sh"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
for candidate in "${candidates[@]}"; do
|
|
77
|
+
if [ -f "$candidate" ]; then
|
|
78
|
+
echo "$candidate"
|
|
79
|
+
return 0
|
|
80
|
+
fi
|
|
81
|
+
done
|
|
82
|
+
|
|
83
|
+
return 1
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if ! command -v aws >/dev/null 2>&1; then
|
|
87
|
+
echo "aws CLI not found in PATH" >&2
|
|
88
|
+
exit 1
|
|
89
|
+
fi
|
|
90
|
+
|
|
91
|
+
if [ ${#PIPELINE_STAGE[@]} -eq 0 ]; then
|
|
92
|
+
echo "No pipelines configured in scripts/ensure-pipelines.sh. Nothing to do."
|
|
93
|
+
exit 0
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
SST_DEPLOY_SCRIPT=${SST_DEPLOY_SCRIPT:-}
|
|
97
|
+
if [ -z "$SST_DEPLOY_SCRIPT" ]; then
|
|
98
|
+
SST_DEPLOY_SCRIPT=$(resolve_sst_deploy_script || true)
|
|
99
|
+
fi
|
|
100
|
+
|
|
101
|
+
if [ -z "$SST_DEPLOY_SCRIPT" ]; then
|
|
102
|
+
echo "Could not locate sst-deploy.sh. Set SST_DEPLOY_SCRIPT explicitly." >&2
|
|
103
|
+
exit 1
|
|
104
|
+
fi
|
|
105
|
+
|
|
106
|
+
cd "$INFRA_DIR"
|
|
107
|
+
|
|
108
|
+
MISSING=()
|
|
109
|
+
for NAME in "${!PIPELINE_STAGE[@]}"; do
|
|
110
|
+
PIPELINE_NAME="${NAME}-pipeline"
|
|
111
|
+
echo "Checking for pipeline names: $NAME or $PIPELINE_NAME (region: $REGION)"
|
|
112
|
+
|
|
113
|
+
if aws codepipeline get-pipeline --name "$NAME" --region "$REGION" >/dev/null 2>&1 || \
|
|
114
|
+
aws codepipeline get-pipeline --name "$PIPELINE_NAME" --region "$REGION" >/dev/null 2>&1; then
|
|
115
|
+
echo "Pipeline exists: $PIPELINE_NAME"
|
|
116
|
+
else
|
|
117
|
+
echo "Pipeline missing: $PIPELINE_NAME"
|
|
118
|
+
MISSING+=("$NAME")
|
|
119
|
+
fi
|
|
120
|
+
done
|
|
121
|
+
|
|
122
|
+
if [ ${#MISSING[@]} -eq 0 ]; then
|
|
123
|
+
echo "All pipelines present; nothing to do."
|
|
124
|
+
exit 0
|
|
125
|
+
fi
|
|
126
|
+
|
|
127
|
+
echo "Missing pipelines: ${MISSING[*]}"
|
|
128
|
+
|
|
129
|
+
if [ "${APPROVE:-}" != "true" ]; then
|
|
130
|
+
echo "To create missing pipelines, re-run with APPROVE=true."
|
|
131
|
+
exit 1
|
|
132
|
+
fi
|
|
133
|
+
|
|
134
|
+
for NAME in "${MISSING[@]}"; do
|
|
135
|
+
STAGE=${PIPELINE_STAGE[$NAME]:-production}
|
|
136
|
+
REPO=${PIPELINE_REPO[$NAME]:-$REPO_DEFAULT}
|
|
137
|
+
BRANCH=${PIPELINE_BRANCH[$NAME]:-main}
|
|
138
|
+
|
|
139
|
+
echo "Creating pipeline '$NAME' via SST deploy (stage: $STAGE, repo: $REPO, branch: $BRANCH)"
|
|
140
|
+
APPROVE=true STACK="$STAGE" bash "$SST_DEPLOY_SCRIPT"
|
|
141
|
+
done
|
|
142
|
+
|
|
143
|
+
echo "Done. Current pipelines:"
|
|
144
|
+
aws codepipeline list-pipelines --region "$REGION" --query "pipelines[].name" --output table || true
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# ensure-secrets.sh
|
|
5
|
+
# Idempotently ensure SST secrets for a given stage using the schema.
|
|
6
|
+
# Usage: ./ensure-secrets.sh <stage> [--values-file path/to/values.env]
|
|
7
|
+
|
|
8
|
+
STAGE=${1:-dev}
|
|
9
|
+
VALUES_FILE=${2:-}
|
|
10
|
+
|
|
11
|
+
ROOT_DIR=$(cd "$(dirname "$0")/.." && pwd)
|
|
12
|
+
SCHEMA_FILE="$ROOT_DIR/schemas/secrets.schema.json"
|
|
13
|
+
|
|
14
|
+
if ! command -v node >/dev/null 2>&1; then
|
|
15
|
+
echo "node is required to run this script"
|
|
16
|
+
exit 1
|
|
17
|
+
fi
|
|
18
|
+
|
|
19
|
+
if ! command -v npx >/dev/null 2>&1; then
|
|
20
|
+
echo "npx is required to run this script"
|
|
21
|
+
exit 1
|
|
22
|
+
fi
|
|
23
|
+
|
|
24
|
+
echo "Using secrets schema: $SCHEMA_FILE"
|
|
25
|
+
|
|
26
|
+
# Extract keys from schema using node (single-line invocation avoids heredoc issues)
|
|
27
|
+
KEYS=$(node -e "const fs=require('fs');const p=process.argv[1];const schema=JSON.parse(fs.readFileSync(p,'utf8'));console.log(Object.keys(schema.properties||{}).join(' '));" "$SCHEMA_FILE")
|
|
28
|
+
|
|
29
|
+
declare -A VALS
|
|
30
|
+
|
|
31
|
+
# Load values-file if provided (simple KEY=VALUE lines)
|
|
32
|
+
if [[ -n "$VALUES_FILE" && -f "$VALUES_FILE" ]]; then
|
|
33
|
+
echo "Loading values from $VALUES_FILE"
|
|
34
|
+
while IFS='=' read -r k v; do
|
|
35
|
+
k=$(echo "$k" | tr -d ' \t\r\n')
|
|
36
|
+
v=$(echo "$v" | sed -e 's/^\s*//;s/\s*$//')
|
|
37
|
+
if [[ -n "$k" ]]; then
|
|
38
|
+
VALS["$k"]="$v"
|
|
39
|
+
fi
|
|
40
|
+
done < <(grep -E '^[A-Za-z0-9_]+=.*' "$VALUES_FILE" || true)
|
|
41
|
+
fi
|
|
42
|
+
|
|
43
|
+
echo "Ensuring SST secrets for stage: $STAGE"
|
|
44
|
+
for key in $KEYS; do
|
|
45
|
+
value="${VALS[$key]:-__MISSING__}"
|
|
46
|
+
if [[ "$value" == "__MISSING__" ]]; then
|
|
47
|
+
# generate a random value for AuthSecret if missing
|
|
48
|
+
if [[ "$key" == "AuthSecret" ]]; then
|
|
49
|
+
value=$(node -e "console.log(require('crypto').randomBytes(32).toString('hex'))")
|
|
50
|
+
fi
|
|
51
|
+
fi
|
|
52
|
+
|
|
53
|
+
echo "Setting secret: $key"
|
|
54
|
+
# Use `sst secret set` which is idempotent for values; it will create/update.
|
|
55
|
+
npx --yes sst secret set "$key" "$value" --stage "$STAGE"
|
|
56
|
+
done
|
|
57
|
+
|
|
58
|
+
echo "All secrets ensured for stage: $STAGE"
|