@liflig/cdk 2.17.1 → 2.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +10 -10
- package/assets/cloudtrail-slack-integration-lambda/main.py +0 -267
- package/assets/pipeline-slack-notification-lambda/index.py +0 -300
- package/assets/prepare-cdk-source-lambda/index.py +0 -159
- package/assets/slack-alarm-lambda/index.py +0 -103
- package/lib/alarms/database-alarms.d.ts +0 -125
- package/lib/alarms/database-alarms.js +0 -171
- package/lib/alarms/index.d.ts +0 -3
- package/lib/alarms/index.js +0 -10
- package/lib/alarms/service-alarms.d.ts +0 -145
- package/lib/alarms/service-alarms.js +0 -148
- package/lib/alarms/ses-alarms.d.ts +0 -67
- package/lib/alarms/ses-alarms.js +0 -49
- package/lib/alarms/slack-alarm.d.ts +0 -25
- package/lib/alarms/slack-alarm.js +0 -47
- package/lib/bastion-host.d.ts +0 -41
- package/lib/bastion-host.js +0 -86
- package/lib/bin/cdk-create-snapshots.d.ts +0 -2
- package/lib/bin/fetch-pipeline-variables.d.ts +0 -2
- package/lib/build-artifacts/index.d.ts +0 -68
- package/lib/build-artifacts/index.js +0 -118
- package/lib/cdk-deploy/cdk-deploy.d.ts +0 -63
- package/lib/cdk-deploy/cdk-deploy.js +0 -175
- package/lib/cdk-deploy/index.d.ts +0 -1
- package/lib/cdk-deploy/index.js +0 -6
- package/lib/cdk-deploy/start-deploy-handler.d.ts +0 -8
- package/lib/cdk-deploy/start-deploy-handler.js +0 -72
- package/lib/cdk-deploy/status-handler.d.ts +0 -6
- package/lib/cdk-deploy/status-handler.js +0 -83
- package/lib/cdk-pipelines/cloud-assembly-lookup-handler.d.ts +0 -6
- package/lib/cdk-pipelines/cloud-assembly-lookup-handler.js +0 -63
- package/lib/cdk-pipelines/index.d.ts +0 -3
- package/lib/cdk-pipelines/index.js +0 -10
- package/lib/cdk-pipelines/liflig-cdk-pipeline.d.ts +0 -110
- package/lib/cdk-pipelines/liflig-cdk-pipeline.js +0 -232
- package/lib/cdk-pipelines/slack-notification.d.ts +0 -51
- package/lib/cdk-pipelines/slack-notification.js +0 -54
- package/lib/cdk-pipelines/variables.d.ts +0 -15
- package/lib/cdk-pipelines/variables.js +0 -80
- package/lib/cloudtrail-slack-integration/cloudtrail-slack-integration.d.ts +0 -47
- package/lib/cloudtrail-slack-integration/cloudtrail-slack-integration.js +0 -211
- package/lib/cloudtrail-slack-integration/index.d.ts +0 -1
- package/lib/cloudtrail-slack-integration/index.js +0 -6
- package/lib/configure-parameters/configure-parameters.d.ts +0 -61
- package/lib/configure-parameters/configure-parameters.js +0 -94
- package/lib/configure-parameters/index.d.ts +0 -1
- package/lib/configure-parameters/index.js +0 -6
- package/lib/cross-region-ssm-parameter.d.ts +0 -13
- package/lib/cross-region-ssm-parameter.js +0 -46
- package/lib/ecs/cluster.d.ts +0 -25
- package/lib/ecs/cluster.js +0 -70
- package/lib/ecs/fargate-service.d.ts +0 -62
- package/lib/ecs/fargate-service.js +0 -99
- package/lib/ecs/index.d.ts +0 -3
- package/lib/ecs/index.js +0 -10
- package/lib/ecs/listener-rule.d.ts +0 -25
- package/lib/ecs/listener-rule.js +0 -27
- package/lib/ecs-update-image/artifact-status.d.ts +0 -39
- package/lib/ecs-update-image/artifact-status.js +0 -41
- package/lib/ecs-update-image/ecs-update-image.d.ts +0 -41
- package/lib/ecs-update-image/ecs-update-image.js +0 -98
- package/lib/ecs-update-image/index.d.ts +0 -3
- package/lib/ecs-update-image/index.js +0 -10
- package/lib/ecs-update-image/start-deploy-handler.d.ts +0 -6
- package/lib/ecs-update-image/start-deploy-handler.js +0 -104
- package/lib/ecs-update-image/status-handler.d.ts +0 -11
- package/lib/ecs-update-image/status-handler.js +0 -74
- package/lib/ecs-update-image/tag.d.ts +0 -47
- package/lib/ecs-update-image/tag.js +0 -67
- package/lib/feature-flags.d.ts +0 -18
- package/lib/feature-flags.js +0 -48
- package/lib/griid/artefact-bucket.d.ts +0 -7
- package/lib/griid/artefact-bucket.js +0 -30
- package/lib/griid/index.d.ts +0 -4
- package/lib/griid/index.js +0 -18
- package/lib/hosted-zone-with-param.d.ts +0 -29
- package/lib/hosted-zone-with-param.js +0 -65
- package/lib/index.d.ts +0 -32
- package/lib/kinesis/index.d.ts +0 -1
- package/lib/kinesis/index.js +0 -6
- package/lib/kinesis/kinesis-to-datadog-stream.d.ts +0 -28
- package/lib/kinesis/kinesis-to-datadog-stream.js +0 -126
- package/lib/load-balancer/index.d.ts +0 -1
- package/lib/load-balancer/index.js +0 -6
- package/lib/load-balancer/load-balancer.d.ts +0 -16
- package/lib/load-balancer/load-balancer.js +0 -60
- package/lib/pipelines/conventions.d.ts +0 -14
- package/lib/pipelines/conventions.js +0 -24
- package/lib/pipelines/deploy-env.d.ts +0 -18
- package/lib/pipelines/deploy-env.js +0 -96
- package/lib/pipelines/index.d.ts +0 -2
- package/lib/pipelines/index.js +0 -8
- package/lib/pipelines/liflig-cdk-deployer-deps.d.ts +0 -13
- package/lib/pipelines/liflig-cdk-deployer-deps.js +0 -35
- package/lib/pipelines/pipeline.d.ts +0 -78
- package/lib/pipelines/pipeline.js +0 -224
- package/lib/platform/index.d.ts +0 -1
- package/lib/platform/index.js +0 -7
- package/lib/platform/platform.d.ts +0 -37
- package/lib/platform/platform.js +0 -57
- package/lib/rds/database.d.ts +0 -49
- package/lib/rds/database.js +0 -60
- package/lib/rds/index.d.ts +0 -1
- package/lib/rds/index.js +0 -6
- package/lib/ses/configurationsetdeliveryoptions/index.d.ts +0 -26
- package/lib/ses/configurationsetdeliveryoptions/index.js +0 -48
- package/lib/ses/configurationsetsnsdestination/handler.d.ts +0 -17
- package/lib/ses/configurationsetsnsdestination/handler.js +0 -75
- package/lib/ses/configurationsetsnsdestination/index.d.ts +0 -29
- package/lib/ses/configurationsetsnsdestination/index.js +0 -75
- package/lib/ses/index.d.ts +0 -4
- package/lib/ses/index.js +0 -12
- package/lib/ses/sesdomain/handler.d.ts +0 -10
- package/lib/ses/sesdomain/handler.js +0 -82
- package/lib/ses/sesdomain/index.d.ts +0 -57
- package/lib/ses/sesdomain/index.js +0 -94
- package/lib/ses/sesverifyemail/handler.d.ts +0 -9
- package/lib/ses/sesverifyemail/handler.js +0 -25
- package/lib/ses/sesverifyemail/index.d.ts +0 -13
- package/lib/ses/sesverifyemail/index.js +0 -51
- package/lib/snapshots.d.ts +0 -4
- package/lib/snapshots.js +0 -214
- package/lib/ssm-parameter-backed-resource.d.ts +0 -45
- package/lib/ssm-parameter-backed-resource.js +0 -67
- package/lib/ssm-parameter-reader.d.ts +0 -21
- package/lib/ssm-parameter-reader.js +0 -48
- package/lib/tags.d.ts +0 -8
- package/lib/tags.js +0 -36
- package/lib/utils.d.ts +0 -2
- package/lib/utils.js +0 -17
- package/lib/webapp/index.d.ts +0 -3
- package/lib/webapp/index.js +0 -10
- package/lib/webapp/monitor.d.ts +0 -187
- package/lib/webapp/monitor.js +0 -156
- package/lib/webapp/security-headers.d.ts +0 -38
- package/lib/webapp/security-headers.js +0 -129
- package/lib/webapp/webapp.d.ts +0 -116
- package/lib/webapp/webapp.js +0 -118
- package/lib/webapp-deploy-via-role.d.ts +0 -25
- package/lib/webapp-deploy-via-role.js +0 -32
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@liflig/cdk",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.18.1",
|
|
4
4
|
"description": "CDK library for Liflig",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -37,25 +37,25 @@
|
|
|
37
37
|
"@aws-cdk/assert": "2.68.0",
|
|
38
38
|
"@commitlint/cli": "18.4.3",
|
|
39
39
|
"@commitlint/config-conventional": "18.4.3",
|
|
40
|
-
"@types/aws-lambda": "8.10.
|
|
41
|
-
"@types/jest": "29.5.
|
|
42
|
-
"@types/node": "18.
|
|
40
|
+
"@types/aws-lambda": "8.10.130",
|
|
41
|
+
"@types/jest": "29.5.11",
|
|
42
|
+
"@types/node": "18.19.3",
|
|
43
43
|
"@typescript-eslint/eslint-plugin": "5.62.0",
|
|
44
44
|
"@typescript-eslint/parser": "5.62.0",
|
|
45
45
|
"aws-cdk": "2.111.0",
|
|
46
46
|
"aws-cdk-lib": "2.111.0",
|
|
47
47
|
"constructs": "10.3.0",
|
|
48
|
-
"eslint": "8.
|
|
49
|
-
"eslint-config-prettier": "9.
|
|
48
|
+
"eslint": "8.55.0",
|
|
49
|
+
"eslint-config-prettier": "9.1.0",
|
|
50
50
|
"eslint-plugin-prettier": "5.0.1",
|
|
51
51
|
"husky": "8.0.3",
|
|
52
52
|
"jest": "29.7.0",
|
|
53
53
|
"jest-cdk-snapshot": "2.0.1",
|
|
54
|
-
"prettier": "3.1.
|
|
55
|
-
"semantic-release": "22.0.
|
|
54
|
+
"prettier": "3.1.1",
|
|
55
|
+
"semantic-release": "22.0.12",
|
|
56
56
|
"ts-jest": "29.1.1",
|
|
57
|
-
"ts-node": "10.9.
|
|
58
|
-
"typescript": "5.3.
|
|
57
|
+
"ts-node": "10.9.2",
|
|
58
|
+
"typescript": "5.3.3"
|
|
59
59
|
},
|
|
60
60
|
"dependencies": {
|
|
61
61
|
"@capraconsulting/webapp-deploy-lambda": "2.1.4",
|
|
@@ -1,267 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
|
|
3
|
-
"""
|
|
4
|
-
Transform CloudTrail events to payloads formatted for Slack's API, and send them
|
|
5
|
-
directly to Slack or through an SQS FIFO queue for deduplication.
|
|
6
|
-
|
|
7
|
-
The code below contains entrypoints for two Lambda functions (prefixed with `handler_`).
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
import os
|
|
11
|
-
import logging
|
|
12
|
-
import json
|
|
13
|
-
import urllib.request
|
|
14
|
-
import re
|
|
15
|
-
import boto3
|
|
16
|
-
|
|
17
|
-
logger = logging.getLogger()
|
|
18
|
-
logger.setLevel(logging.INFO)
|
|
19
|
-
|
|
20
|
-
def augment_strings_with_friendly_names(strings, friendly_names):
|
|
21
|
-
"""A helper method for augmenting various values (e.g., AWS account ID) in
|
|
22
|
-
a list of strings with a more friendly name"""
|
|
23
|
-
# We avoid replacing values that are directly prefixed and/or suffixed with ':'
|
|
24
|
-
# as it is most likely an ARN or similiar. We don't want to replace account IDs
|
|
25
|
-
# inside ARNs as this would look messy.This is a quite basic heuristic, but it should allow
|
|
26
|
-
# us to easily replace most relevant values (e.g., principal ID, account ID, etc.) with
|
|
27
|
-
# friendly names without a complicated regex.
|
|
28
|
-
pattern = re.compile("|".join([f"(?<!:)({re.escape(key)})(?!:)" for key in friendly_names]))
|
|
29
|
-
return [pattern.sub(lambda m: m[0] + f" ({friendly_names[m.string[m.start():m.end()]]})", s) for s in strings]
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def get_slack_payload_for_assume_role_event(event, friendly_names):
|
|
33
|
-
"""Parse a CloudTrail event related to the API call sts:AssumeRole,
|
|
34
|
-
and return a Slack-formatted attachment"""
|
|
35
|
-
event_detail = event["detail"]
|
|
36
|
-
recipient_account_id = event_detail["recipientAccountId"]
|
|
37
|
-
request_parameters = event_detail.get("requestParameters", {}) or {}
|
|
38
|
-
|
|
39
|
-
timestamp = event_detail["eventTime"]
|
|
40
|
-
user_identity = event_detail["userIdentity"]
|
|
41
|
-
principal_id = user_identity["principalId"]
|
|
42
|
-
principal_account_id = user_identity["accountId"]
|
|
43
|
-
source_identity = request_parameters.get("sourceIdentity", "")
|
|
44
|
-
source_ip = event_detail.get("sourceIPAddress", "")
|
|
45
|
-
role_arn = request_parameters.get("roleArn", "")
|
|
46
|
-
|
|
47
|
-
fallback = f"Sensitive role accessed in '{recipient_account_id}'"
|
|
48
|
-
pretext_messages = [f":warning: Sensitive role in `{recipient_account_id}` assumed by"]
|
|
49
|
-
if principal_id.startswith("AIDA"):
|
|
50
|
-
pretext_messages.append("IAM user")
|
|
51
|
-
elif principal_id.startswith("AROA"):
|
|
52
|
-
# The other part of the principal ID for a role is the name of the session
|
|
53
|
-
principal_id = principal_id.split(":")[0]
|
|
54
|
-
pretext_messages.append(f"IAM role")
|
|
55
|
-
else:
|
|
56
|
-
pretext_messages.append("principal")
|
|
57
|
-
pretext_messages.append(f"in `{principal_account_id}`")
|
|
58
|
-
pretext = " ".join(pretext_messages)
|
|
59
|
-
|
|
60
|
-
text = [
|
|
61
|
-
f"*Role ARN:* `{role_arn}`",
|
|
62
|
-
f"*Principal Account ID:* `{principal_account_id}`",
|
|
63
|
-
f"*Principal ID:* `{principal_id}`",
|
|
64
|
-
f"*Source IP:* `{source_ip}`",
|
|
65
|
-
f"*Source Identity:* `{source_identity}`" if source_identity else "",
|
|
66
|
-
f"*Timestamp:* `{timestamp}`",
|
|
67
|
-
]
|
|
68
|
-
text = "\n".join(line for line in text if line)
|
|
69
|
-
|
|
70
|
-
try:
|
|
71
|
-
pretext, fallback, text = augment_strings_with_friendly_names([pretext, fallback, text], friendly_names)
|
|
72
|
-
except:
|
|
73
|
-
logger.exception("Failed to augment strings with friendly names")
|
|
74
|
-
return {
|
|
75
|
-
"attachments": [
|
|
76
|
-
{
|
|
77
|
-
"pretext": pretext,
|
|
78
|
-
"color": "warning",
|
|
79
|
-
"text": text,
|
|
80
|
-
"fallback": fallback,
|
|
81
|
-
"mrkdwn_in": ["pretext", "text"],
|
|
82
|
-
}
|
|
83
|
-
]
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def get_fallback_slack_payload_for_event(
|
|
88
|
-
event, friendly_names, fallback_parse_behavior=""
|
|
89
|
-
):
|
|
90
|
-
"""Parse a generic CloudTrail event related to an API call
|
|
91
|
-
and return a Slack-formatted attachment"""
|
|
92
|
-
event_detail = event["detail"]
|
|
93
|
-
event_name = event_detail["eventName"]
|
|
94
|
-
event_type = event_detail["eventType"]
|
|
95
|
-
event_time = event_detail["eventTime"]
|
|
96
|
-
recipient_account_id = event_detail["recipientAccountId"]
|
|
97
|
-
pretext = f":warning: CloudTrail event in account `{recipient_account_id}`"
|
|
98
|
-
fallback = f"CloudTrail event in account '{recipient_account_id}'"
|
|
99
|
-
if fallback_parse_behavior == "DUMP_EVENT":
|
|
100
|
-
text = "\n".join(
|
|
101
|
-
["*Event:*", "```", json.dumps(event, sort_keys=True, indent=2), "```"]
|
|
102
|
-
)
|
|
103
|
-
else:
|
|
104
|
-
error_message = event_detail.get("errorMessage", "")
|
|
105
|
-
# This may be None, in which case we force it to an empty dict instead
|
|
106
|
-
response_element = (event_detail.get("responseElements", {}) or {}).get(
|
|
107
|
-
event_name, ""
|
|
108
|
-
)
|
|
109
|
-
user_identity = event_detail["userIdentity"]
|
|
110
|
-
principal_id = user_identity.get("principalId", "")
|
|
111
|
-
principal_type = user_identity.get("type", "")
|
|
112
|
-
principal_account_id = user_identity.get("accountId", "")
|
|
113
|
-
principal_arn = user_identity.get("arn", "")
|
|
114
|
-
source_ip = event_detail.get("sourceIPAddress", "")
|
|
115
|
-
resources = event_detail.get("resources", []) or []
|
|
116
|
-
text = [
|
|
117
|
-
f"*Event Type:* `{event_type}`",
|
|
118
|
-
f"*Event Name:* `{event_name}`",
|
|
119
|
-
f"*Event Time:* `{event_time}`",
|
|
120
|
-
f"*Error Message:* `{error_message}`" if error_message else "",
|
|
121
|
-
f"*Response Code:* `{response_element}`" if response_element else "",
|
|
122
|
-
f"*Principal Type:* `{principal_type}`" if principal_type else "",
|
|
123
|
-
f"*Principal Account ID:* `{principal_account_id}`"
|
|
124
|
-
if principal_account_id
|
|
125
|
-
else "",
|
|
126
|
-
f"*Principal ARN:* `{principal_arn}`" if principal_arn else "",
|
|
127
|
-
f"*Principal ID:* `{principal_id}`" if principal_id else "",
|
|
128
|
-
f"*Source IP:* `{source_ip}`" if source_ip else "",
|
|
129
|
-
f"*Resources:*\n```{json.dumps(resources, indent=2, sort_keys=True)}\n```"
|
|
130
|
-
if len(resources)
|
|
131
|
-
else "",
|
|
132
|
-
]
|
|
133
|
-
# Filter out empty strings
|
|
134
|
-
text = "\n".join(line for line in text if line)
|
|
135
|
-
|
|
136
|
-
try:
|
|
137
|
-
pretext, fallback, text = augment_strings_with_friendly_names([pretext, fallback, text], friendly_names)
|
|
138
|
-
except:
|
|
139
|
-
logger.exception("Failed to augment strings with friendly names")
|
|
140
|
-
|
|
141
|
-
return {
|
|
142
|
-
"attachments": [
|
|
143
|
-
{
|
|
144
|
-
"pretext": pretext,
|
|
145
|
-
"color": "warning",
|
|
146
|
-
"text": text,
|
|
147
|
-
"fallback": fallback,
|
|
148
|
-
"mrkdwn_in": ["pretext", "text"],
|
|
149
|
-
}
|
|
150
|
-
]
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
def get_augmented_friendly_names(event, friendly_names):
|
|
155
|
-
"""Return an augmented dictionary containing the alias of the current
|
|
156
|
-
AWS account as a friendly name for the current account ID if relevant"""
|
|
157
|
-
augmented_friendly_names = {**friendly_names}
|
|
158
|
-
try:
|
|
159
|
-
event_account_id = event["account"]
|
|
160
|
-
event_detail = event["detail"]
|
|
161
|
-
recipient_account_id = event_detail["recipientAccountId"]
|
|
162
|
-
if (
|
|
163
|
-
not friendly_names.get(event_account_id, "")
|
|
164
|
-
and event_account_id == recipient_account_id
|
|
165
|
-
):
|
|
166
|
-
logger.info(
|
|
167
|
-
"No friendly name was supplied for current account '%s', so looking up account alias",
|
|
168
|
-
event_account_id,
|
|
169
|
-
)
|
|
170
|
-
iam = boto3.client("iam")
|
|
171
|
-
aliases = iam.list_account_aliases()["AccountAliases"]
|
|
172
|
-
if len(aliases):
|
|
173
|
-
augmented_friendly_names[event_account_id] = aliases[0]
|
|
174
|
-
except:
|
|
175
|
-
logger.exception("Failed to look up alias of current AWS account")
|
|
176
|
-
|
|
177
|
-
return augmented_friendly_names
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
def post_to_slack(slack_payload, slack_webhook_url):
|
|
181
|
-
"""Post a payload to Slack's webhook API"""
|
|
182
|
-
encoded_slack_payload = json.dumps(slack_payload).encode("utf-8")
|
|
183
|
-
try:
|
|
184
|
-
slack_request = urllib.request.Request(
|
|
185
|
-
slack_webhook_url,
|
|
186
|
-
data=encoded_slack_payload,
|
|
187
|
-
headers={"Content-Type": "application/json"},
|
|
188
|
-
)
|
|
189
|
-
urllib.request.urlopen(slack_request)
|
|
190
|
-
except:
|
|
191
|
-
logger.exception("Failed to post to Slack")
|
|
192
|
-
raise
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
def handler_event_transformer(event, context):
|
|
196
|
-
"""Lambda handler for the event transformer Lambda"""
|
|
197
|
-
logger.info("Triggered with event: %s", json.dumps(event, indent=2))
|
|
198
|
-
|
|
199
|
-
friendly_names = json.loads(os.environ["FRIENDLY_NAMES"])
|
|
200
|
-
slack_webhook_url = os.environ["SLACK_WEBHOOK_URL"]
|
|
201
|
-
slack_channel = os.environ["SLACK_CHANNEL"]
|
|
202
|
-
sqs_queue_url = os.environ.get("SQS_QUEUE_URL", "")
|
|
203
|
-
fallback_parse_behavior = os.environ.get("FALLBACK_PARSE_BEHAVIOR", "")
|
|
204
|
-
deduplicate_events = os.environ.get("DEDUPLICATE_EVENTS", "false") == "true"
|
|
205
|
-
|
|
206
|
-
friendly_names = get_augmented_friendly_names(
|
|
207
|
-
event, friendly_names
|
|
208
|
-
)
|
|
209
|
-
|
|
210
|
-
if not event["detail-type"].endswith("via CloudTrail"):
|
|
211
|
-
logger.warn("Invalid event received")
|
|
212
|
-
return
|
|
213
|
-
|
|
214
|
-
slack_payload = {}
|
|
215
|
-
try:
|
|
216
|
-
if event["detail"]["eventName"] == "AssumeRole":
|
|
217
|
-
slack_payload = get_slack_payload_for_assume_role_event(
|
|
218
|
-
event, friendly_names
|
|
219
|
-
)
|
|
220
|
-
except:
|
|
221
|
-
logger.exception("Failed to parse event using predefined schema")
|
|
222
|
-
if not slack_payload:
|
|
223
|
-
logger.warn("Using a fallback schema to parse event")
|
|
224
|
-
slack_payload = get_fallback_slack_payload_for_event(
|
|
225
|
-
event,
|
|
226
|
-
friendly_names,
|
|
227
|
-
fallback_parse_behavior=fallback_parse_behavior,
|
|
228
|
-
)
|
|
229
|
-
slack_payload = {**slack_payload, "channel": slack_channel}
|
|
230
|
-
|
|
231
|
-
if deduplicate_events and sqs_queue_url:
|
|
232
|
-
logger.info("Sending message to SQS for deduplication")
|
|
233
|
-
deduplication_id = (
|
|
234
|
-
event["detail"].get("requestID", "")
|
|
235
|
-
or event["detail"].get("eventID", "")
|
|
236
|
-
or event["id"]
|
|
237
|
-
)
|
|
238
|
-
body = {
|
|
239
|
-
"slackWebhookUrl": slack_webhook_url,
|
|
240
|
-
"slackPayload": slack_payload,
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
sqs = boto3.client("sqs")
|
|
244
|
-
sqs.send_message(
|
|
245
|
-
QueueUrl=sqs_queue_url,
|
|
246
|
-
MessageBody=json.dumps(body),
|
|
247
|
-
MessageDeduplicationId=deduplication_id,
|
|
248
|
-
MessageGroupId=deduplication_id,
|
|
249
|
-
)
|
|
250
|
-
else:
|
|
251
|
-
logger.info("Sending message directly to Slack")
|
|
252
|
-
post_to_slack(slack_payload, slack_webhook_url)
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
def handler_slack_forwarder(event, context):
|
|
256
|
-
"""Lambda handler for the Slack forwarder Lambda"""
|
|
257
|
-
logger.info("Triggered with event: %s", json.dumps(event, indent=2))
|
|
258
|
-
records = event["Records"]
|
|
259
|
-
for record in records:
|
|
260
|
-
body = json.loads(record["body"])
|
|
261
|
-
slack_channel = body.get("slackChannel", "")
|
|
262
|
-
slack_webhook_url = body.get("slackWebhookUrl", "")
|
|
263
|
-
slack_payload = {
|
|
264
|
-
**body["slackPayload"],
|
|
265
|
-
**({"channel": slack_channel} if slack_channel else {}),
|
|
266
|
-
}
|
|
267
|
-
post_to_slack(slack_payload, slack_webhook_url)
|
|
@@ -1,300 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import logging
|
|
3
|
-
import os
|
|
4
|
-
import typing as t
|
|
5
|
-
from urllib.error import HTTPError, URLError
|
|
6
|
-
from urllib.parse import quote
|
|
7
|
-
from urllib.request import Request, urlopen
|
|
8
|
-
|
|
9
|
-
import boto3
|
|
10
|
-
|
|
11
|
-
client = boto3.client("codepipeline")
|
|
12
|
-
s3 = boto3.client("s3")
|
|
13
|
-
secrets_manager = boto3.client("secretsmanager")
|
|
14
|
-
|
|
15
|
-
ACCOUNT_FRIENDLY_NAME = os.getenv("ACCOUNT_FRIENDLY_NAME", None)
|
|
16
|
-
SLACK_URL_SECRET_NAME = os.getenv("SLACK_URL_SECRET_NAME", None)
|
|
17
|
-
NOTIFICATION_LEVEL = os.getenv("NOTIFICATION_LEVEL", "WARN")
|
|
18
|
-
|
|
19
|
-
# Example event:
|
|
20
|
-
#
|
|
21
|
-
# {
|
|
22
|
-
# version: '0',
|
|
23
|
-
# id: '01896665-9ef2-b417-cccd-333acf6a9320',
|
|
24
|
-
# 'detail-type': 'CodePipeline Pipeline Execution State Change',
|
|
25
|
-
# source: 'aws.codepipeline',
|
|
26
|
-
# account: '123456789123',
|
|
27
|
-
# time: '2021-06-11T23:02:20Z',
|
|
28
|
-
# region: 'eu-west-1',
|
|
29
|
-
# resources: [
|
|
30
|
-
# 'arn:aws:codepipeline:eu-west-1:123456789123:hst-tester-pipeline-PipelineC660917D-OLEMKURBGPBG'
|
|
31
|
-
# ],
|
|
32
|
-
# detail: {
|
|
33
|
-
# pipeline: 'hst-tester-pipeline-PipelineC660917D-OLEMKURBGPBG',
|
|
34
|
-
# 'execution-id': '91daefbf-658a-4c6f-ad9e-13de7df5eaeb',
|
|
35
|
-
# state: 'SUCCEEDED',
|
|
36
|
-
# version: 3
|
|
37
|
-
# }
|
|
38
|
-
# }
|
|
39
|
-
|
|
40
|
-
STYLES = {
|
|
41
|
-
"FAILED": {"emoji_prefix": ":x:", "message_color": "#ff0000"},
|
|
42
|
-
"SUCCEEDED": {"emoji_prefix": ":white_check_mark:", "message_color": "#008000"},
|
|
43
|
-
"STARTED": {"emoji_prefix": ":rocket:", "message_color": "#00bfff"},
|
|
44
|
-
"SUPERSEDED": {"emoji_prefix": ":arrow_heading_down:", "message_color": "#373737"},
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
class TriggerMetadataVcs(t.TypedDict):
|
|
49
|
-
branchName: str
|
|
50
|
-
commitAuthor: str
|
|
51
|
-
commitHash: str
|
|
52
|
-
repositoryName: str
|
|
53
|
-
repositoryOwner: str
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
class TriggerMetadataCi(t.TypedDict):
|
|
57
|
-
type: t.Literal["JENKINS", "GITHUB_ACTIONS"]
|
|
58
|
-
triggeredBy: str
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
class TriggerMetadata(t.TypedDict):
|
|
62
|
-
version: t.Literal["0.1"]
|
|
63
|
-
ci: TriggerMetadataCi
|
|
64
|
-
vcs: TriggerMetadataVcs
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def get_masked_slack_webhook_url(slack_webhook_url: str):
|
|
68
|
-
"""
|
|
69
|
-
Return a string that masks the final path segment of a Slack webhook URL.
|
|
70
|
-
The URL is typically formatted as such: https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX
|
|
71
|
-
"""
|
|
72
|
-
trimmed_url = slack_webhook_url.rstrip("/")
|
|
73
|
-
[*url, final_path_segment] = trimmed_url.split("/")
|
|
74
|
-
return "/".join(url + [len(final_path_segment) * "*"])
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def get_previous_pipeline_execution(
|
|
78
|
-
pipeline_name: str, execution_id: str
|
|
79
|
-
) -> dict | None:
|
|
80
|
-
"""Return the newest past execution that either succeeded or failed"""
|
|
81
|
-
|
|
82
|
-
pipeline_executions = client.list_pipeline_executions(
|
|
83
|
-
pipelineName=pipeline_name,
|
|
84
|
-
)["pipelineExecutionSummaries"]
|
|
85
|
-
|
|
86
|
-
is_next = False
|
|
87
|
-
|
|
88
|
-
for item in pipeline_executions:
|
|
89
|
-
# Only include succeeded and failed executions.
|
|
90
|
-
# This is needed to properly detect a recovered
|
|
91
|
-
# pipeline (failed -> succeeded, even if e.g. superseeded in between).
|
|
92
|
-
if is_next and item["status"] in ["Succeeded", "Failed"]:
|
|
93
|
-
return item
|
|
94
|
-
if item["pipelineExecutionId"] == execution_id:
|
|
95
|
-
is_next = True
|
|
96
|
-
|
|
97
|
-
return None
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
def get_text_for_failed(pipeline_name: str, execution_id: str, state: str) -> str:
|
|
101
|
-
"""Return a Slack-formatted string that describes failed pipeline execution actions,
|
|
102
|
-
if any, in a failed execution"""
|
|
103
|
-
|
|
104
|
-
# We only show details if the pipeline has completed with failed state.
|
|
105
|
-
# If we were to process this for other events such as started events,
|
|
106
|
-
# we would include details from after the event took place.
|
|
107
|
-
if state != "FAILED":
|
|
108
|
-
return ""
|
|
109
|
-
|
|
110
|
-
action_executions = client.list_action_executions(
|
|
111
|
-
pipelineName=pipeline_name,
|
|
112
|
-
filter={
|
|
113
|
-
"pipelineExecutionId": execution_id,
|
|
114
|
-
},
|
|
115
|
-
)["actionExecutionDetails"]
|
|
116
|
-
|
|
117
|
-
failures = []
|
|
118
|
-
|
|
119
|
-
for action_execution in action_executions:
|
|
120
|
-
if action_execution["status"] == "Failed":
|
|
121
|
-
stage = action_execution["stageName"]
|
|
122
|
-
action = action_execution["actionName"]
|
|
123
|
-
summary = action_execution["output"]["executionResult"][
|
|
124
|
-
"externalExecutionSummary"
|
|
125
|
-
]
|
|
126
|
-
failures.append(f"{stage}.{action} failed:\n{summary}")
|
|
127
|
-
|
|
128
|
-
result = ""
|
|
129
|
-
|
|
130
|
-
if len(failures):
|
|
131
|
-
result = "```\n" + "\n\n".join(failures) + "\n```"
|
|
132
|
-
|
|
133
|
-
return result
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
def get_metadata_from_trigger(
|
|
137
|
-
pipeline_name: str, execution_id: str
|
|
138
|
-
) -> TriggerMetadata | None:
|
|
139
|
-
"""Returns a dictionary containing the metadata, if any, stored in the trigger file"""
|
|
140
|
-
|
|
141
|
-
action_response = client.list_action_executions(
|
|
142
|
-
pipelineName=pipeline_name, filter={"pipelineExecutionId": execution_id}
|
|
143
|
-
)
|
|
144
|
-
|
|
145
|
-
action = next(
|
|
146
|
-
(
|
|
147
|
-
action
|
|
148
|
-
for action in action_response["actionExecutionDetails"]
|
|
149
|
-
if action["input"]["actionTypeId"]["category"] == "Source"
|
|
150
|
-
and action["input"]["actionTypeId"]["provider"] == "S3"
|
|
151
|
-
),
|
|
152
|
-
None,
|
|
153
|
-
)
|
|
154
|
-
if action:
|
|
155
|
-
s3_version_id = action["output"]["outputVariables"]["VersionId"]
|
|
156
|
-
artifacts_bucket = action["input"]["configuration"]["S3Bucket"]
|
|
157
|
-
trigger_file = action["input"]["configuration"]["S3ObjectKey"]
|
|
158
|
-
|
|
159
|
-
try:
|
|
160
|
-
response = s3.get_object(
|
|
161
|
-
Bucket=artifacts_bucket, Key=trigger_file, VersionId=s3_version_id
|
|
162
|
-
)
|
|
163
|
-
file_content = response["Body"].read().decode("utf-8")
|
|
164
|
-
ci_metadata = json.loads(file_content)
|
|
165
|
-
return ci_metadata
|
|
166
|
-
except Exception as e:
|
|
167
|
-
print(f"Could not obtain metadata from trigger file: {e}")
|
|
168
|
-
|
|
169
|
-
return None
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def get_footer_text(ci_metadata: TriggerMetadata) -> str:
|
|
173
|
-
"""Returns the footer text for the Slack message if the metadata contains the required fields"""
|
|
174
|
-
|
|
175
|
-
footer_text = ""
|
|
176
|
-
if ci_metadata and ci_metadata.get("version", "") == "0.1":
|
|
177
|
-
ci = ci_metadata.get("ci", {})
|
|
178
|
-
vcs = ci_metadata.get("vcs", {})
|
|
179
|
-
triggering_actor = ci.get("triggeredBy", "")
|
|
180
|
-
repository_owner = vcs.get("repositoryOwner", "")
|
|
181
|
-
repository_name = vcs.get("repositoryName", "")
|
|
182
|
-
short_commit_hash = vcs.get("commitHash", "")[:8]
|
|
183
|
-
branch_name = vcs.get("branchName", "")
|
|
184
|
-
if (
|
|
185
|
-
triggering_actor
|
|
186
|
-
and repository_owner
|
|
187
|
-
and repository_name
|
|
188
|
-
and short_commit_hash
|
|
189
|
-
and branch_name
|
|
190
|
-
):
|
|
191
|
-
commit_link_text = f"{repository_owner}/{repository_name} @ {branch_name} ({short_commit_hash})"
|
|
192
|
-
github_commit_link = f"https://github.com/{repository_owner}/{repository_name}/commit/{short_commit_hash}"
|
|
193
|
-
footer_text = f"Triggered by {triggering_actor} in <{github_commit_link}|{commit_link_text}>"
|
|
194
|
-
|
|
195
|
-
return footer_text
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
def get_secret(secret):
|
|
199
|
-
try:
|
|
200
|
-
return secrets_manager.get_secret_value(SecretId=secret)["SecretString"]
|
|
201
|
-
except Exception as e:
|
|
202
|
-
raise Exception(f"Error retrieving secret: {e}")
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
def handler(event, context):
|
|
206
|
-
|
|
207
|
-
print("Event: " + json.dumps(event))
|
|
208
|
-
|
|
209
|
-
region = event["region"]
|
|
210
|
-
account_id = event["account"]
|
|
211
|
-
pipeline_name = event["detail"]["pipeline"]
|
|
212
|
-
state = event["detail"]["state"]
|
|
213
|
-
execution_id = event["detail"]["execution-id"]
|
|
214
|
-
|
|
215
|
-
if state in ("STARTED", "SUPERSEDED") and NOTIFICATION_LEVEL != "DEBUG":
|
|
216
|
-
return
|
|
217
|
-
|
|
218
|
-
if event["detail-type"] != "CodePipeline Pipeline Execution State Change":
|
|
219
|
-
print("Ignoring unknown event")
|
|
220
|
-
return
|
|
221
|
-
|
|
222
|
-
previous_pipeline_execution = get_previous_pipeline_execution(
|
|
223
|
-
pipeline_name, execution_id
|
|
224
|
-
)
|
|
225
|
-
|
|
226
|
-
previous_failed = (
|
|
227
|
-
previous_pipeline_execution is not None
|
|
228
|
-
and previous_pipeline_execution["status"] == "Failed"
|
|
229
|
-
)
|
|
230
|
-
|
|
231
|
-
# We still show succeeded for the first event or when
|
|
232
|
-
# the previous execution was not success.
|
|
233
|
-
if state == "SUCCEEDED" and (NOTIFICATION_LEVEL == "WARN"):
|
|
234
|
-
if previous_pipeline_execution is not None and not previous_failed:
|
|
235
|
-
print("Ignoring succeeded event")
|
|
236
|
-
return
|
|
237
|
-
|
|
238
|
-
pipeline_url = f"https://{region}.console.aws.amazon.com/codesuite/codepipeline/pipelines/{quote(pipeline_name, safe='')}/view"
|
|
239
|
-
execution_url = f"https://{region}.console.aws.amazon.com/codesuite/codepipeline/pipelines/{quote(pipeline_name, safe='')}/executions/{execution_id}/timeline"
|
|
240
|
-
|
|
241
|
-
account_friendly_name = f"in {ACCOUNT_FRIENDLY_NAME or account_id}"
|
|
242
|
-
|
|
243
|
-
state_text = state
|
|
244
|
-
if previous_failed and state == "SUCCEEDED":
|
|
245
|
-
state_text += " (previously failed)"
|
|
246
|
-
|
|
247
|
-
ci_metadata = get_metadata_from_trigger(pipeline_name, execution_id)
|
|
248
|
-
|
|
249
|
-
footer_text = get_footer_text(ci_metadata)
|
|
250
|
-
|
|
251
|
-
style = STYLES.get(
|
|
252
|
-
state, {"emoji_prefix": ":question:", "message_color": "#ffdf00"}
|
|
253
|
-
)
|
|
254
|
-
|
|
255
|
-
emoji_prefix = style["emoji_prefix"]
|
|
256
|
-
message_color = style["message_color"]
|
|
257
|
-
|
|
258
|
-
text_for_failed = get_text_for_failed(pipeline_name, execution_id, state)
|
|
259
|
-
|
|
260
|
-
text = "\n".join(
|
|
261
|
-
s
|
|
262
|
-
for s in [f"*Execution:* <{execution_url}|{execution_id}>", text_for_failed]
|
|
263
|
-
if s
|
|
264
|
-
)
|
|
265
|
-
pretext = " ".join(
|
|
266
|
-
s
|
|
267
|
-
for s in [
|
|
268
|
-
f"{emoji_prefix} Pipeline *<{pipeline_url}|{pipeline_name}>*",
|
|
269
|
-
f"*{state_text}*",
|
|
270
|
-
account_friendly_name,
|
|
271
|
-
]
|
|
272
|
-
if s
|
|
273
|
-
)
|
|
274
|
-
fallback = f"Pipeline {pipeline_name} {state}"
|
|
275
|
-
attachments = [
|
|
276
|
-
{
|
|
277
|
-
"footer": footer_text,
|
|
278
|
-
"color": message_color,
|
|
279
|
-
"text": text,
|
|
280
|
-
"mrkdwn_in": ["text", "pretext"],
|
|
281
|
-
"pretext": pretext,
|
|
282
|
-
"fallback": fallback,
|
|
283
|
-
},
|
|
284
|
-
]
|
|
285
|
-
|
|
286
|
-
slack_message = {
|
|
287
|
-
"attachments": attachments,
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
slack_url = get_secret(SLACK_URL_SECRET_NAME)
|
|
291
|
-
|
|
292
|
-
req = Request(slack_url, json.dumps(slack_message).encode("utf-8"))
|
|
293
|
-
print(f"Posting message to Slack URL {get_masked_slack_webhook_url(slack_url)}")
|
|
294
|
-
try:
|
|
295
|
-
response = urlopen(req)
|
|
296
|
-
response.read()
|
|
297
|
-
except HTTPError as e:
|
|
298
|
-
raise Exception(f"Request to slack failed: {e.code} {e.reason}")
|
|
299
|
-
except URLError as e:
|
|
300
|
-
raise Exception(f"Server connection to slack failed: {e.reason}")
|