cartography 0.104.0rc2__py3-none-any.whl → 0.105.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cartography might be problematic. Click here for more details.
- cartography/_version.py +2 -2
- cartography/cli.py +26 -1
- cartography/client/aws/__init__.py +19 -0
- cartography/client/aws/ecr.py +51 -0
- cartography/config.py +8 -0
- cartography/data/indexes.cypher +0 -3
- cartography/data/jobs/cleanup/aws_import_lambda_cleanup.json +1 -1
- cartography/graph/cleanupbuilder.py +151 -41
- cartography/intel/aws/acm.py +124 -0
- cartography/intel/aws/cloudtrail.py +3 -38
- cartography/intel/aws/ecr.py +8 -2
- cartography/intel/aws/iam.py +1 -1
- cartography/intel/aws/lambda_function.py +1 -1
- cartography/intel/aws/resources.py +2 -2
- cartography/intel/aws/s3.py +195 -4
- cartography/intel/aws/secretsmanager.py +19 -5
- cartography/intel/aws/sqs.py +36 -90
- cartography/intel/entra/__init__.py +11 -0
- cartography/intel/entra/groups.py +151 -0
- cartography/intel/entra/ou.py +21 -5
- cartography/intel/trivy/__init__.py +161 -0
- cartography/intel/trivy/scanner.py +363 -0
- cartography/models/aws/acm/certificate.py +75 -0
- cartography/models/aws/cloudtrail/trail.py +24 -0
- cartography/models/aws/s3/notification.py +24 -0
- cartography/models/aws/secretsmanager/secret_version.py +0 -2
- cartography/models/aws/sqs/__init__.py +0 -0
- cartography/models/aws/sqs/queue.py +89 -0
- cartography/models/core/nodes.py +15 -2
- cartography/models/entra/group.py +91 -0
- cartography/models/trivy/__init__.py +0 -0
- cartography/models/trivy/findings.py +66 -0
- cartography/models/trivy/fix.py +66 -0
- cartography/models/trivy/package.py +71 -0
- cartography/sync.py +2 -0
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/METADATA +3 -2
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/RECORD +42 -30
- cartography/intel/aws/efs.py +0 -93
- cartography/models/aws/efs/mount_target.py +0 -52
- /cartography/models/aws/{efs → acm}/__init__.py +0 -0
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/WHEEL +0 -0
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/entry_points.txt +0 -0
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/licenses/LICENSE +0 -0
- {cartography-0.104.0rc2.dist-info → cartography-0.105.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import boto3
|
|
6
|
+
from neo4j import Session
|
|
7
|
+
|
|
8
|
+
from cartography.client.core.tx import load
|
|
9
|
+
from cartography.graph.job import GraphJob
|
|
10
|
+
from cartography.models.trivy.findings import TrivyImageFindingSchema
|
|
11
|
+
from cartography.models.trivy.fix import TrivyFixSchema
|
|
12
|
+
from cartography.models.trivy.package import TrivyPackageSchema
|
|
13
|
+
from cartography.stats import get_stats_client
|
|
14
|
+
from cartography.util import timeit
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
stat_handler = get_stats_client(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _validate_packages(package_list: list[dict]) -> list[dict]:
|
|
21
|
+
"""
|
|
22
|
+
Validates that each package has the required fields.
|
|
23
|
+
Returns only packages that have both InstalledVersion and PkgName.
|
|
24
|
+
"""
|
|
25
|
+
validated_packages: list[dict] = []
|
|
26
|
+
for pkg in package_list:
|
|
27
|
+
if (
|
|
28
|
+
"InstalledVersion" in pkg
|
|
29
|
+
and pkg["InstalledVersion"]
|
|
30
|
+
and "PkgName" in pkg
|
|
31
|
+
and pkg["PkgName"]
|
|
32
|
+
):
|
|
33
|
+
validated_packages.append(pkg)
|
|
34
|
+
else:
|
|
35
|
+
logger.warning(
|
|
36
|
+
"Package object does not have required fields `InstalledVersion` or `PkgName` - skipping."
|
|
37
|
+
)
|
|
38
|
+
return validated_packages
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def transform_scan_results(
|
|
42
|
+
results: list[dict], image_digest: str
|
|
43
|
+
) -> tuple[list[dict], list[dict], list[dict]]:
|
|
44
|
+
"""
|
|
45
|
+
Transform raw Trivy scan results into a format suitable for loading into Neo4j.
|
|
46
|
+
Returns a tuple of (findings_list, packages_list, fixes_list).
|
|
47
|
+
"""
|
|
48
|
+
findings_list = []
|
|
49
|
+
packages_list = []
|
|
50
|
+
fixes_list = []
|
|
51
|
+
|
|
52
|
+
for scan_class in results:
|
|
53
|
+
# Sometimes a scan class will have no vulns and Trivy will leave the key undefined instead of showing [].
|
|
54
|
+
if "Vulnerabilities" in scan_class and scan_class["Vulnerabilities"]:
|
|
55
|
+
for result in scan_class["Vulnerabilities"]:
|
|
56
|
+
# Transform finding data
|
|
57
|
+
finding = {
|
|
58
|
+
"id": f'TIF|{result["VulnerabilityID"]}',
|
|
59
|
+
"VulnerabilityID": result["VulnerabilityID"],
|
|
60
|
+
"cve_id": result["VulnerabilityID"],
|
|
61
|
+
"Description": result.get("Description"),
|
|
62
|
+
"LastModifiedDate": result.get("LastModifiedDate"),
|
|
63
|
+
"PrimaryURL": result.get("PrimaryURL"),
|
|
64
|
+
"PublishedDate": result.get("PublishedDate"),
|
|
65
|
+
"Severity": result["Severity"],
|
|
66
|
+
"SeveritySource": result.get("SeveritySource"),
|
|
67
|
+
"Title": result.get("Title"),
|
|
68
|
+
"nvd_v2_score": None,
|
|
69
|
+
"nvd_v2_vector": None,
|
|
70
|
+
"nvd_v3_score": None,
|
|
71
|
+
"nvd_v3_vector": None,
|
|
72
|
+
"redhat_v3_score": None,
|
|
73
|
+
"redhat_v3_vector": None,
|
|
74
|
+
"ubuntu_v3_score": None,
|
|
75
|
+
"ubuntu_v3_vector": None,
|
|
76
|
+
"Class": scan_class["Class"],
|
|
77
|
+
"Type": scan_class["Type"],
|
|
78
|
+
"ImageDigest": image_digest, # For AFFECTS relationship
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Add CVSS scores if available
|
|
82
|
+
if "CVSS" in result:
|
|
83
|
+
if "nvd" in result["CVSS"]:
|
|
84
|
+
nvd = result["CVSS"]["nvd"]
|
|
85
|
+
finding["nvd_v2_score"] = nvd.get("V2Score")
|
|
86
|
+
finding["nvd_v2_vector"] = nvd.get("V2Vector")
|
|
87
|
+
finding["nvd_v3_score"] = nvd.get("V3Score")
|
|
88
|
+
finding["nvd_v3_vector"] = nvd.get("V3Vector")
|
|
89
|
+
if "redhat" in result["CVSS"]:
|
|
90
|
+
redhat = result["CVSS"]["redhat"]
|
|
91
|
+
finding["redhat_v3_score"] = redhat.get("V3Score")
|
|
92
|
+
finding["redhat_v3_vector"] = redhat.get("V3Vector")
|
|
93
|
+
if "ubuntu" in result["CVSS"]:
|
|
94
|
+
ubuntu = result["CVSS"]["ubuntu"]
|
|
95
|
+
finding["ubuntu_v3_score"] = ubuntu.get("V3Score")
|
|
96
|
+
finding["ubuntu_v3_vector"] = ubuntu.get("V3Vector")
|
|
97
|
+
|
|
98
|
+
findings_list.append(finding)
|
|
99
|
+
|
|
100
|
+
# Transform package data
|
|
101
|
+
package_id = f"{result['InstalledVersion']}|{result['PkgName']}"
|
|
102
|
+
packages_list.append(
|
|
103
|
+
{
|
|
104
|
+
"id": package_id,
|
|
105
|
+
"InstalledVersion": result["InstalledVersion"],
|
|
106
|
+
"PkgName": result["PkgName"],
|
|
107
|
+
"Class": scan_class["Class"],
|
|
108
|
+
"Type": scan_class["Type"],
|
|
109
|
+
"ImageDigest": image_digest, # For DEPLOYED relationship
|
|
110
|
+
"FindingId": finding["id"], # For AFFECTS relationship
|
|
111
|
+
}
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
# Transform fix data if available
|
|
115
|
+
if result.get("FixedVersion") is not None:
|
|
116
|
+
fixes_list.append(
|
|
117
|
+
{
|
|
118
|
+
"id": f"{result['FixedVersion']}|{result['PkgName']}",
|
|
119
|
+
"FixedVersion": result["FixedVersion"],
|
|
120
|
+
"PackageId": package_id,
|
|
121
|
+
"FindingId": finding["id"],
|
|
122
|
+
}
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Validate packages before returning
|
|
126
|
+
packages_list = _validate_packages(packages_list)
|
|
127
|
+
return findings_list, packages_list, fixes_list
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@timeit
|
|
131
|
+
def get_json_files_in_s3(
|
|
132
|
+
s3_bucket: str, s3_prefix: str, boto3_session: boto3.Session
|
|
133
|
+
) -> set[str]:
|
|
134
|
+
"""
|
|
135
|
+
List S3 objects in the S3 prefix.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
s3_bucket: S3 bucket name containing scan results
|
|
139
|
+
s3_prefix: S3 prefix path containing scan results
|
|
140
|
+
boto3_session: boto3 session for dependency injection
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Set of S3 object keys for JSON files in the S3 prefix
|
|
144
|
+
"""
|
|
145
|
+
s3_client = boto3_session.client("s3")
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
# List objects in the S3 prefix
|
|
149
|
+
paginator = s3_client.get_paginator("list_objects_v2")
|
|
150
|
+
page_iterator = paginator.paginate(Bucket=s3_bucket, Prefix=s3_prefix)
|
|
151
|
+
results = set()
|
|
152
|
+
|
|
153
|
+
for page in page_iterator:
|
|
154
|
+
if "Contents" not in page:
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
for obj in page["Contents"]:
|
|
158
|
+
object_key = obj["Key"]
|
|
159
|
+
|
|
160
|
+
# Skip non-JSON files
|
|
161
|
+
if not object_key.endswith(".json"):
|
|
162
|
+
continue
|
|
163
|
+
|
|
164
|
+
# Skip files that don't start with our prefix
|
|
165
|
+
if not object_key.startswith(s3_prefix):
|
|
166
|
+
continue
|
|
167
|
+
|
|
168
|
+
results.add(object_key)
|
|
169
|
+
|
|
170
|
+
except Exception as e:
|
|
171
|
+
logger.error(
|
|
172
|
+
f"Error listing S3 objects in bucket {s3_bucket} with prefix {s3_prefix}: {e}"
|
|
173
|
+
)
|
|
174
|
+
raise
|
|
175
|
+
|
|
176
|
+
logger.info(f"Found {len(results)} json files in s3://{s3_bucket}/{s3_prefix}")
|
|
177
|
+
return results
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@timeit
|
|
181
|
+
def cleanup(neo4j_session: Session, common_job_parameters: dict[str, Any]) -> None:
|
|
182
|
+
"""
|
|
183
|
+
Run cleanup jobs for Trivy nodes.
|
|
184
|
+
"""
|
|
185
|
+
logger.info("Running Trivy cleanup")
|
|
186
|
+
GraphJob.from_node_schema(TrivyImageFindingSchema(), common_job_parameters).run(
|
|
187
|
+
neo4j_session
|
|
188
|
+
)
|
|
189
|
+
GraphJob.from_node_schema(TrivyPackageSchema(), common_job_parameters).run(
|
|
190
|
+
neo4j_session
|
|
191
|
+
)
|
|
192
|
+
GraphJob.from_node_schema(TrivyFixSchema(), common_job_parameters).run(
|
|
193
|
+
neo4j_session
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
@timeit
|
|
198
|
+
def load_scan_vulns(
|
|
199
|
+
neo4j_session: Session,
|
|
200
|
+
findings_list: list[dict[str, Any]],
|
|
201
|
+
update_tag: int,
|
|
202
|
+
) -> None:
|
|
203
|
+
"""
|
|
204
|
+
Load TrivyImageFinding nodes into Neo4j.
|
|
205
|
+
"""
|
|
206
|
+
load(
|
|
207
|
+
neo4j_session,
|
|
208
|
+
TrivyImageFindingSchema(),
|
|
209
|
+
findings_list,
|
|
210
|
+
lastupdated=update_tag,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@timeit
|
|
215
|
+
def load_scan_packages(
|
|
216
|
+
neo4j_session: Session,
|
|
217
|
+
packages_list: list[dict[str, Any]],
|
|
218
|
+
update_tag: int,
|
|
219
|
+
) -> None:
|
|
220
|
+
"""
|
|
221
|
+
Load TrivyPackage nodes into Neo4j.
|
|
222
|
+
"""
|
|
223
|
+
load(
|
|
224
|
+
neo4j_session,
|
|
225
|
+
TrivyPackageSchema(),
|
|
226
|
+
packages_list,
|
|
227
|
+
lastupdated=update_tag,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
@timeit
|
|
232
|
+
def load_scan_fixes(
|
|
233
|
+
neo4j_session: Session,
|
|
234
|
+
fixes_list: list[dict[str, Any]],
|
|
235
|
+
update_tag: int,
|
|
236
|
+
) -> None:
|
|
237
|
+
"""
|
|
238
|
+
Load TrivyFix nodes into Neo4j.
|
|
239
|
+
"""
|
|
240
|
+
load(
|
|
241
|
+
neo4j_session,
|
|
242
|
+
TrivyFixSchema(),
|
|
243
|
+
fixes_list,
|
|
244
|
+
lastupdated=update_tag,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
@timeit
|
|
249
|
+
def read_scan_results_from_s3(
|
|
250
|
+
boto3_session: boto3.Session,
|
|
251
|
+
s3_bucket: str,
|
|
252
|
+
s3_object_key: str,
|
|
253
|
+
image_uri: str,
|
|
254
|
+
) -> tuple[list[dict], str | None]:
|
|
255
|
+
"""
|
|
256
|
+
Read and parse Trivy scan results from S3.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
boto3_session: boto3 session for S3 operations
|
|
260
|
+
s3_bucket: S3 bucket containing scan results
|
|
261
|
+
s3_object_key: S3 object key for the scan results
|
|
262
|
+
image_uri: ECR image URI (for logging purposes)
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Tuple of (list of scan result dictionaries from the "Results" key, image digest)
|
|
266
|
+
"""
|
|
267
|
+
s3_client = boto3_session.client("s3")
|
|
268
|
+
|
|
269
|
+
# Read JSON scan results from S3
|
|
270
|
+
logger.debug(f"Reading scan results from S3: s3://{s3_bucket}/{s3_object_key}")
|
|
271
|
+
response = s3_client.get_object(Bucket=s3_bucket, Key=s3_object_key)
|
|
272
|
+
scan_data_json = response["Body"].read().decode("utf-8")
|
|
273
|
+
|
|
274
|
+
# Parse JSON data
|
|
275
|
+
trivy_data = json.loads(scan_data_json)
|
|
276
|
+
|
|
277
|
+
# Extract results using the same logic as binary scanning
|
|
278
|
+
if "Results" in trivy_data and trivy_data["Results"]:
|
|
279
|
+
results = trivy_data["Results"]
|
|
280
|
+
else:
|
|
281
|
+
stat_handler.incr("image_scan_no_results_count")
|
|
282
|
+
logger.warning(
|
|
283
|
+
f"S3 scan data did not contain a `Results` key for URI = {image_uri}; continuing."
|
|
284
|
+
)
|
|
285
|
+
results = []
|
|
286
|
+
|
|
287
|
+
image_digest = None
|
|
288
|
+
if "Metadata" in trivy_data and trivy_data["Metadata"]:
|
|
289
|
+
repo_digests = trivy_data["Metadata"].get("RepoDigests", [])
|
|
290
|
+
if repo_digests:
|
|
291
|
+
# Sample input: 000000000000.dkr.ecr.us-east-1.amazonaws.com/test-repository@sha256:88016
|
|
292
|
+
# Sample output: sha256:88016
|
|
293
|
+
repo_digest = repo_digests[0]
|
|
294
|
+
if "@" in repo_digest:
|
|
295
|
+
image_digest = repo_digest.split("@")[1]
|
|
296
|
+
|
|
297
|
+
return results, image_digest
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
@timeit
|
|
301
|
+
def sync_single_image_from_s3(
|
|
302
|
+
neo4j_session: Session,
|
|
303
|
+
image_uri: str,
|
|
304
|
+
update_tag: int,
|
|
305
|
+
s3_bucket: str,
|
|
306
|
+
s3_object_key: str,
|
|
307
|
+
boto3_session: boto3.Session,
|
|
308
|
+
) -> None:
|
|
309
|
+
"""
|
|
310
|
+
Read Trivy scan results from S3 and sync to Neo4j.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
neo4j_session: Neo4j session for database operations
|
|
314
|
+
image_uri: ECR image URI
|
|
315
|
+
update_tag: Update tag for tracking
|
|
316
|
+
s3_bucket: S3 bucket containing scan results
|
|
317
|
+
s3_object_key: S3 object key for this image's scan results
|
|
318
|
+
boto3_session: boto3 session for S3 operations
|
|
319
|
+
"""
|
|
320
|
+
try:
|
|
321
|
+
# Read and parse scan results from S3
|
|
322
|
+
results, image_digest = read_scan_results_from_s3(
|
|
323
|
+
boto3_session,
|
|
324
|
+
s3_bucket,
|
|
325
|
+
s3_object_key,
|
|
326
|
+
image_uri,
|
|
327
|
+
)
|
|
328
|
+
if not image_digest:
|
|
329
|
+
logger.warning(f"No image digest found for {image_uri}; skipping over.")
|
|
330
|
+
return
|
|
331
|
+
|
|
332
|
+
# Transform all data in one pass using existing function
|
|
333
|
+
findings_list, packages_list, fixes_list = transform_scan_results(
|
|
334
|
+
results,
|
|
335
|
+
image_digest,
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
num_findings = len(findings_list)
|
|
339
|
+
stat_handler.incr("image_scan_cve_count", num_findings)
|
|
340
|
+
|
|
341
|
+
# Load the transformed data using existing functions
|
|
342
|
+
load_scan_vulns(
|
|
343
|
+
neo4j_session,
|
|
344
|
+
findings_list,
|
|
345
|
+
update_tag=update_tag,
|
|
346
|
+
)
|
|
347
|
+
load_scan_packages(
|
|
348
|
+
neo4j_session,
|
|
349
|
+
packages_list,
|
|
350
|
+
update_tag=update_tag,
|
|
351
|
+
)
|
|
352
|
+
load_scan_fixes(
|
|
353
|
+
neo4j_session,
|
|
354
|
+
fixes_list,
|
|
355
|
+
update_tag=update_tag,
|
|
356
|
+
)
|
|
357
|
+
stat_handler.incr("images_processed_count")
|
|
358
|
+
|
|
359
|
+
except Exception as e:
|
|
360
|
+
logger.error(
|
|
361
|
+
f"Failed to process S3 scan results for {image_uri} from {s3_object_key}: {e}"
|
|
362
|
+
)
|
|
363
|
+
raise
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.nodes import CartographyNodeProperties
|
|
5
|
+
from cartography.models.core.nodes import CartographyNodeSchema
|
|
6
|
+
from cartography.models.core.relationships import CartographyRelProperties
|
|
7
|
+
from cartography.models.core.relationships import CartographyRelSchema
|
|
8
|
+
from cartography.models.core.relationships import LinkDirection
|
|
9
|
+
from cartography.models.core.relationships import make_target_node_matcher
|
|
10
|
+
from cartography.models.core.relationships import OtherRelationships
|
|
11
|
+
from cartography.models.core.relationships import TargetNodeMatcher
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(frozen=True)
|
|
15
|
+
class ACMCertificateNodeProperties(CartographyNodeProperties):
|
|
16
|
+
id: PropertyRef = PropertyRef("Arn")
|
|
17
|
+
arn: PropertyRef = PropertyRef("Arn", extra_index=True)
|
|
18
|
+
domainname: PropertyRef = PropertyRef("DomainName")
|
|
19
|
+
type: PropertyRef = PropertyRef("Type")
|
|
20
|
+
status: PropertyRef = PropertyRef("Status")
|
|
21
|
+
key_algorithm: PropertyRef = PropertyRef("KeyAlgorithm")
|
|
22
|
+
signature_algorithm: PropertyRef = PropertyRef("SignatureAlgorithm")
|
|
23
|
+
not_before: PropertyRef = PropertyRef("NotBefore")
|
|
24
|
+
not_after: PropertyRef = PropertyRef("NotAfter")
|
|
25
|
+
in_use_by: PropertyRef = PropertyRef("InUseBy")
|
|
26
|
+
region: PropertyRef = PropertyRef("Region", set_in_kwargs=True)
|
|
27
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass(frozen=True)
|
|
31
|
+
class ACMCertificateToAWSAccountRelProperties(CartographyRelProperties):
|
|
32
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(frozen=True)
|
|
36
|
+
class ACMCertificateToAWSAccountRel(CartographyRelSchema):
|
|
37
|
+
target_node_label: str = "AWSAccount"
|
|
38
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
39
|
+
{"id": PropertyRef("AWS_ID", set_in_kwargs=True)}
|
|
40
|
+
)
|
|
41
|
+
direction: LinkDirection = LinkDirection.INWARD
|
|
42
|
+
rel_label: str = "RESOURCE"
|
|
43
|
+
properties: ACMCertificateToAWSAccountRelProperties = (
|
|
44
|
+
ACMCertificateToAWSAccountRelProperties()
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass(frozen=True)
|
|
49
|
+
class ACMCertificateToELBV2ListenerRelProperties(CartographyRelProperties):
|
|
50
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass(frozen=True)
|
|
54
|
+
class ACMCertificateToELBV2ListenerRel(CartographyRelSchema):
|
|
55
|
+
target_node_label: str = "ELBV2Listener"
|
|
56
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
57
|
+
{"id": PropertyRef("ELBV2ListenerArns", one_to_many=True)}
|
|
58
|
+
)
|
|
59
|
+
direction: LinkDirection = LinkDirection.OUTWARD
|
|
60
|
+
rel_label: str = "USED_BY"
|
|
61
|
+
properties: ACMCertificateToELBV2ListenerRelProperties = (
|
|
62
|
+
ACMCertificateToELBV2ListenerRelProperties()
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass(frozen=True)
|
|
67
|
+
class ACMCertificateSchema(CartographyNodeSchema):
|
|
68
|
+
label: str = "ACMCertificate"
|
|
69
|
+
properties: ACMCertificateNodeProperties = ACMCertificateNodeProperties()
|
|
70
|
+
sub_resource_relationship: ACMCertificateToAWSAccountRel = (
|
|
71
|
+
ACMCertificateToAWSAccountRel()
|
|
72
|
+
)
|
|
73
|
+
other_relationships: OtherRelationships = OtherRelationships(
|
|
74
|
+
[ACMCertificateToELBV2ListenerRel()]
|
|
75
|
+
)
|
|
@@ -7,6 +7,7 @@ from cartography.models.core.relationships import CartographyRelProperties
|
|
|
7
7
|
from cartography.models.core.relationships import CartographyRelSchema
|
|
8
8
|
from cartography.models.core.relationships import LinkDirection
|
|
9
9
|
from cartography.models.core.relationships import make_target_node_matcher
|
|
10
|
+
from cartography.models.core.relationships import OtherRelationships
|
|
10
11
|
from cartography.models.core.relationships import TargetNodeMatcher
|
|
11
12
|
|
|
12
13
|
|
|
@@ -54,8 +55,31 @@ class CloudTrailToAWSAccountRel(CartographyRelSchema):
|
|
|
54
55
|
)
|
|
55
56
|
|
|
56
57
|
|
|
58
|
+
@dataclass(frozen=True)
|
|
59
|
+
class CloudTrailTrailToS3BucketRelProperties(CartographyRelProperties):
|
|
60
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass(frozen=True)
|
|
64
|
+
class CloudTrailTrailToS3BucketRel(CartographyRelSchema):
|
|
65
|
+
target_node_label: str = "S3Bucket"
|
|
66
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
67
|
+
{"name": PropertyRef("S3BucketName")},
|
|
68
|
+
)
|
|
69
|
+
direction: LinkDirection = LinkDirection.OUTWARD
|
|
70
|
+
rel_label: str = "LOGS_TO"
|
|
71
|
+
properties: CloudTrailTrailToS3BucketRelProperties = (
|
|
72
|
+
CloudTrailTrailToS3BucketRelProperties()
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
57
76
|
@dataclass(frozen=True)
|
|
58
77
|
class CloudTrailTrailSchema(CartographyNodeSchema):
|
|
59
78
|
label: str = "CloudTrailTrail"
|
|
60
79
|
properties: CloudTrailTrailNodeProperties = CloudTrailTrailNodeProperties()
|
|
61
80
|
sub_resource_relationship: CloudTrailToAWSAccountRel = CloudTrailToAWSAccountRel()
|
|
81
|
+
other_relationships: OtherRelationships = OtherRelationships(
|
|
82
|
+
[
|
|
83
|
+
CloudTrailTrailToS3BucketRel(),
|
|
84
|
+
]
|
|
85
|
+
)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.relationships import CartographyRelProperties
|
|
5
|
+
from cartography.models.core.relationships import CartographyRelSchema
|
|
6
|
+
from cartography.models.core.relationships import LinkDirection
|
|
7
|
+
from cartography.models.core.relationships import make_target_node_matcher
|
|
8
|
+
from cartography.models.core.relationships import TargetNodeMatcher
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class S3BucketToSNSTopicRelProperties(CartographyRelProperties):
|
|
13
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(frozen=True)
|
|
17
|
+
class S3BucketToSNSTopicRel(CartographyRelSchema):
|
|
18
|
+
target_node_label: str = "SNSTopic"
|
|
19
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
20
|
+
{"arn": PropertyRef("TopicArn")},
|
|
21
|
+
)
|
|
22
|
+
direction: LinkDirection = LinkDirection.OUTWARD
|
|
23
|
+
rel_label: str = "NOTIFIES"
|
|
24
|
+
properties: S3BucketToSNSTopicRelProperties = S3BucketToSNSTopicRelProperties()
|
|
@@ -91,8 +91,6 @@ class SecretsManagerSecretVersionToKMSKeyRel(CartographyRelSchema):
|
|
|
91
91
|
properties: SecretsManagerSecretVersionRelProperties = (
|
|
92
92
|
SecretsManagerSecretVersionRelProperties()
|
|
93
93
|
)
|
|
94
|
-
# Only create this relationship if KmsKeyId exists
|
|
95
|
-
conditional_match_property: str = "KmsKeyId"
|
|
96
94
|
|
|
97
95
|
|
|
98
96
|
@dataclass(frozen=True)
|
|
File without changes
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.nodes import CartographyNodeProperties
|
|
5
|
+
from cartography.models.core.nodes import CartographyNodeSchema
|
|
6
|
+
from cartography.models.core.relationships import CartographyRelProperties
|
|
7
|
+
from cartography.models.core.relationships import CartographyRelSchema
|
|
8
|
+
from cartography.models.core.relationships import LinkDirection
|
|
9
|
+
from cartography.models.core.relationships import make_target_node_matcher
|
|
10
|
+
from cartography.models.core.relationships import OtherRelationships
|
|
11
|
+
from cartography.models.core.relationships import TargetNodeMatcher
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(frozen=True)
|
|
15
|
+
class SQSQueueNodeProperties(CartographyNodeProperties):
|
|
16
|
+
id: PropertyRef = PropertyRef("QueueArn")
|
|
17
|
+
arn: PropertyRef = PropertyRef("QueueArn", extra_index=True)
|
|
18
|
+
name: PropertyRef = PropertyRef("name")
|
|
19
|
+
url: PropertyRef = PropertyRef("url")
|
|
20
|
+
created_timestamp: PropertyRef = PropertyRef("CreatedTimestamp")
|
|
21
|
+
delay_seconds: PropertyRef = PropertyRef("DelaySeconds")
|
|
22
|
+
last_modified_timestamp: PropertyRef = PropertyRef("LastModifiedTimestamp")
|
|
23
|
+
maximum_message_size: PropertyRef = PropertyRef("MaximumMessageSize")
|
|
24
|
+
message_retention_period: PropertyRef = PropertyRef("MessageRetentionPeriod")
|
|
25
|
+
policy: PropertyRef = PropertyRef("Policy")
|
|
26
|
+
receive_message_wait_time_seconds: PropertyRef = PropertyRef(
|
|
27
|
+
"ReceiveMessageWaitTimeSeconds"
|
|
28
|
+
)
|
|
29
|
+
redrive_policy_dead_letter_target_arn: PropertyRef = PropertyRef(
|
|
30
|
+
"redrive_policy_dead_letter_target_arn"
|
|
31
|
+
)
|
|
32
|
+
redrive_policy_max_receive_count: PropertyRef = PropertyRef(
|
|
33
|
+
"redrive_policy_max_receive_count"
|
|
34
|
+
)
|
|
35
|
+
visibility_timeout: PropertyRef = PropertyRef("VisibilityTimeout")
|
|
36
|
+
kms_master_key_id: PropertyRef = PropertyRef("KmsMasterKeyId")
|
|
37
|
+
kms_data_key_reuse_period_seconds: PropertyRef = PropertyRef(
|
|
38
|
+
"KmsDataKeyReusePeriodSeconds"
|
|
39
|
+
)
|
|
40
|
+
fifo_queue: PropertyRef = PropertyRef("FifoQueue")
|
|
41
|
+
content_based_deduplication: PropertyRef = PropertyRef("ContentBasedDeduplication")
|
|
42
|
+
deduplication_scope: PropertyRef = PropertyRef("DeduplicationScope")
|
|
43
|
+
fifo_throughput_limit: PropertyRef = PropertyRef("FifoThroughputLimit")
|
|
44
|
+
region: PropertyRef = PropertyRef("Region", set_in_kwargs=True)
|
|
45
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass(frozen=True)
|
|
49
|
+
class SQSQueueToAWSAccountRelProperties(CartographyRelProperties):
|
|
50
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass(frozen=True)
|
|
54
|
+
class SQSQueueToAWSAccountRel(CartographyRelSchema):
|
|
55
|
+
target_node_label: str = "AWSAccount"
|
|
56
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
57
|
+
{"id": PropertyRef("AWS_ID", set_in_kwargs=True)}
|
|
58
|
+
)
|
|
59
|
+
direction: LinkDirection = LinkDirection.INWARD
|
|
60
|
+
rel_label: str = "RESOURCE"
|
|
61
|
+
properties: SQSQueueToAWSAccountRelProperties = SQSQueueToAWSAccountRelProperties()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass(frozen=True)
|
|
65
|
+
class SQSQueueToDeadLetterQueueRelProperties(CartographyRelProperties):
|
|
66
|
+
lastupdated: PropertyRef = PropertyRef("lastupdated", set_in_kwargs=True)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass(frozen=True)
|
|
70
|
+
class SQSQueueToDeadLetterQueueRel(CartographyRelSchema):
|
|
71
|
+
target_node_label: str = "SQSQueue"
|
|
72
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
73
|
+
{"id": PropertyRef("redrive_policy_dead_letter_target_arn")}
|
|
74
|
+
)
|
|
75
|
+
direction: LinkDirection = LinkDirection.OUTWARD
|
|
76
|
+
rel_label: str = "HAS_DEADLETTER_QUEUE"
|
|
77
|
+
properties: SQSQueueToDeadLetterQueueRelProperties = (
|
|
78
|
+
SQSQueueToDeadLetterQueueRelProperties()
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@dataclass(frozen=True)
|
|
83
|
+
class SQSQueueSchema(CartographyNodeSchema):
|
|
84
|
+
label: str = "SQSQueue"
|
|
85
|
+
properties: SQSQueueNodeProperties = SQSQueueNodeProperties()
|
|
86
|
+
sub_resource_relationship: SQSQueueToAWSAccountRel = SQSQueueToAWSAccountRel()
|
|
87
|
+
other_relationships: OtherRelationships = OtherRelationships(
|
|
88
|
+
[SQSQueueToDeadLetterQueueRel()]
|
|
89
|
+
)
|
cartography/models/core/nodes.py
CHANGED
|
@@ -91,7 +91,7 @@ class CartographyNodeSchema(abc.ABC):
|
|
|
91
91
|
"""
|
|
92
92
|
Optional.
|
|
93
93
|
Allows subclasses to specify additional cartography relationships on the node.
|
|
94
|
-
:return: None if not
|
|
94
|
+
:return: None if not overridden. Else return the node's OtherRelationships.
|
|
95
95
|
"""
|
|
96
96
|
return None
|
|
97
97
|
|
|
@@ -100,6 +100,19 @@ class CartographyNodeSchema(abc.ABC):
|
|
|
100
100
|
"""
|
|
101
101
|
Optional.
|
|
102
102
|
Allows specifying extra labels on the node.
|
|
103
|
-
:return: None if not
|
|
103
|
+
:return: None if not overridden. Else return the ExtraNodeLabels specified on the node.
|
|
104
104
|
"""
|
|
105
105
|
return None
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def scoped_cleanup(self) -> bool:
|
|
109
|
+
"""
|
|
110
|
+
Optional.
|
|
111
|
+
Allows specifying whether cleanups of this node must be scoped to the sub resource relationship.
|
|
112
|
+
If True (default), when we clean up nodes of this type, we will only delete stale nodes in the current sub
|
|
113
|
+
resource. This is how our AWS sync behaves.
|
|
114
|
+
If False, when we clean up node of this type, we will delete all stale nodes. This is designed for resource
|
|
115
|
+
types that don't have a "tenant"-like entity.
|
|
116
|
+
:return: True if not overridden. Else return the boolean value specified on the node.
|
|
117
|
+
"""
|
|
118
|
+
return True
|