cartography 0.94.0rc2__py3-none-any.whl → 0.95.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cartography might be problematic. Click here for more details.
- cartography/cli.py +42 -24
- cartography/config.py +12 -8
- cartography/data/indexes.cypher +0 -2
- cartography/data/jobs/scoped_analysis/semgrep_sca_risk_analysis.json +13 -13
- cartography/driftdetect/cli.py +1 -1
- cartography/graph/job.py +8 -1
- cartography/intel/aws/permission_relationships.py +6 -2
- cartography/intel/gcp/__init__.py +110 -23
- cartography/intel/kandji/__init__.py +1 -1
- cartography/intel/semgrep/findings.py +106 -59
- cartography/intel/snipeit/__init__.py +30 -0
- cartography/intel/snipeit/asset.py +74 -0
- cartography/intel/snipeit/user.py +75 -0
- cartography/intel/snipeit/util.py +35 -0
- cartography/models/semgrep/findings.py +3 -1
- cartography/models/snipeit/__init__.py +0 -0
- cartography/models/snipeit/asset.py +81 -0
- cartography/models/snipeit/tenant.py +17 -0
- cartography/models/snipeit/user.py +49 -0
- cartography/sync.py +2 -2
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/LICENSE +1 -1
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/METADATA +3 -5
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/RECORD +26 -22
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/WHEEL +1 -1
- cartography/data/jobs/cleanup/crxcavator_import_cleanup.json +0 -18
- cartography/intel/crxcavator/__init__.py +0 -44
- cartography/intel/crxcavator/crxcavator.py +0 -329
- cartography-0.94.0rc2.dist-info/NOTICE +0 -4
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/entry_points.txt +0 -0
- {cartography-0.94.0rc2.dist-info → cartography-0.95.0rc1.dist-info}/top_level.txt +0 -0
|
@@ -3,10 +3,11 @@ from typing import Any
|
|
|
3
3
|
from typing import Dict
|
|
4
4
|
from typing import List
|
|
5
5
|
from typing import Tuple
|
|
6
|
-
from urllib.error import HTTPError
|
|
7
6
|
|
|
8
7
|
import neo4j
|
|
9
8
|
import requests
|
|
9
|
+
from requests.exceptions import HTTPError
|
|
10
|
+
from requests.exceptions import ReadTimeout
|
|
10
11
|
|
|
11
12
|
from cartography.client.core.tx import load
|
|
12
13
|
from cartography.graph.job import GraphJob
|
|
@@ -20,6 +21,7 @@ from cartography.util import timeit
|
|
|
20
21
|
|
|
21
22
|
logger = logging.getLogger(__name__)
|
|
22
23
|
stat_handler = get_stats_client(__name__)
|
|
24
|
+
_PAGE_SIZE = 500
|
|
23
25
|
_TIMEOUT = (60, 60)
|
|
24
26
|
_MAX_RETRIES = 3
|
|
25
27
|
|
|
@@ -48,60 +50,91 @@ def get_deployment(semgrep_app_token: str) -> Dict[str, Any]:
|
|
|
48
50
|
|
|
49
51
|
|
|
50
52
|
@timeit
|
|
51
|
-
def get_sca_vulns(semgrep_app_token: str,
|
|
53
|
+
def get_sca_vulns(semgrep_app_token: str, deployment_slug: str) -> List[Dict[str, Any]]:
|
|
52
54
|
"""
|
|
53
55
|
Gets the SCA vulns associated with the passed Semgrep App token and deployment id.
|
|
54
56
|
param: semgrep_app_token: The Semgrep App token to use for authentication.
|
|
55
|
-
param:
|
|
57
|
+
param: deployment_slug: The Semgrep deployment slug to use for retrieving SCA vulns.
|
|
56
58
|
"""
|
|
57
59
|
all_vulns = []
|
|
58
|
-
sca_url = f"https://semgrep.dev/api/v1/deployments/{
|
|
60
|
+
sca_url = f"https://semgrep.dev/api/v1/deployments/{deployment_slug}/findings"
|
|
59
61
|
has_more = True
|
|
60
|
-
|
|
61
|
-
page = 1
|
|
62
|
+
page = 0
|
|
62
63
|
retries = 0
|
|
63
64
|
headers = {
|
|
64
65
|
"Content-Type": "application/json",
|
|
65
66
|
"Authorization": f"Bearer {semgrep_app_token}",
|
|
66
67
|
}
|
|
67
68
|
|
|
68
|
-
request_data = {
|
|
69
|
-
"
|
|
70
|
-
"
|
|
71
|
-
"
|
|
72
|
-
"
|
|
69
|
+
request_data: dict[str, Any] = {
|
|
70
|
+
"page": page,
|
|
71
|
+
"page_size": _PAGE_SIZE,
|
|
72
|
+
"issue_type": "sca",
|
|
73
|
+
"exposures": "reachable,always_reachable,conditionally_reachable,unreachable,unknown",
|
|
74
|
+
"ref": "_default",
|
|
75
|
+
"dedup": "true",
|
|
73
76
|
}
|
|
74
|
-
|
|
77
|
+
logger.info(f"Retrieving Semgrep SCA vulns for deployment '{deployment_slug}'.")
|
|
75
78
|
while has_more:
|
|
76
79
|
|
|
77
|
-
if cursor:
|
|
78
|
-
request_data.update({
|
|
79
|
-
"cursor": {
|
|
80
|
-
"vulnOffset": cursor["vulnOffset"],
|
|
81
|
-
"issueOffset": cursor["issueOffset"],
|
|
82
|
-
},
|
|
83
|
-
})
|
|
84
80
|
try:
|
|
85
|
-
response = requests.
|
|
81
|
+
response = requests.get(sca_url, params=request_data, headers=headers, timeout=_TIMEOUT)
|
|
86
82
|
response.raise_for_status()
|
|
87
83
|
data = response.json()
|
|
88
|
-
except HTTPError as e:
|
|
84
|
+
except (ReadTimeout, HTTPError) as e:
|
|
89
85
|
logger.warning(f"Failed to retrieve Semgrep SCA vulns for page {page}. Retrying...")
|
|
90
86
|
retries += 1
|
|
91
87
|
if retries >= _MAX_RETRIES:
|
|
92
88
|
raise e
|
|
93
89
|
continue
|
|
94
|
-
vulns = data["
|
|
95
|
-
|
|
96
|
-
has_more = data.get("hasMore", False)
|
|
90
|
+
vulns = data["findings"]
|
|
91
|
+
has_more = len(vulns) > 0
|
|
97
92
|
if page % 10 == 0:
|
|
98
|
-
logger.info(f"Processed {page}
|
|
93
|
+
logger.info(f"Processed page {page} of Semgrep SCA vulnerabilities.")
|
|
99
94
|
all_vulns.extend(vulns)
|
|
100
95
|
retries = 0
|
|
96
|
+
page += 1
|
|
97
|
+
request_data["page"] = page
|
|
101
98
|
|
|
99
|
+
logger.info(f"Retrieved {len(all_vulns)} Semgrep SCA vulns in {page} pages.")
|
|
102
100
|
return all_vulns
|
|
103
101
|
|
|
104
102
|
|
|
103
|
+
def _get_vuln_class(vuln: Dict) -> str:
|
|
104
|
+
vulnerability_classes = vuln["rule"].get("vulnerability_classes", [])
|
|
105
|
+
if vulnerability_classes:
|
|
106
|
+
return vulnerability_classes[0]
|
|
107
|
+
return "Other"
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _determine_exposure(vuln: Dict[str, Any]) -> str | None:
|
|
111
|
+
# See Semgrep reachability types:
|
|
112
|
+
# https://semgrep.dev/docs/semgrep-supply-chain/overview#types-of-semgrep-supply-chain-findings
|
|
113
|
+
reachability_types = {
|
|
114
|
+
"NO REACHABILITY ANALYSIS": 2,
|
|
115
|
+
"UNREACHABLE": 2,
|
|
116
|
+
"REACHABLE": 0,
|
|
117
|
+
"ALWAYS REACHABLE": 0,
|
|
118
|
+
"CONDITIONALLY REACHABLE": 1,
|
|
119
|
+
}
|
|
120
|
+
reachable_flag = vuln["reachability"]
|
|
121
|
+
if reachable_flag and reachable_flag.upper() in reachability_types:
|
|
122
|
+
reach_score = reachability_types[reachable_flag.upper()]
|
|
123
|
+
if reach_score < reachability_types["UNREACHABLE"]:
|
|
124
|
+
return "REACHABLE"
|
|
125
|
+
else:
|
|
126
|
+
return "UNREACHABLE"
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _build_vuln_url(vuln: str) -> str | None:
|
|
131
|
+
if 'CVE' in vuln:
|
|
132
|
+
return f"https://nvd.nist.gov/vuln/detail/{vuln}"
|
|
133
|
+
if 'GHSA' in vuln:
|
|
134
|
+
return f"https://github.com/advisories/{vuln}"
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
|
|
105
138
|
def transform_sca_vulns(raw_vulns: List[Dict[str, Any]]) -> Tuple[List[Dict[str, Any]], List[Dict[str, str]]]:
|
|
106
139
|
"""
|
|
107
140
|
Transforms the raw SCA vulns response from Semgrep API into a list of dicts
|
|
@@ -112,46 +145,59 @@ def transform_sca_vulns(raw_vulns: List[Dict[str, Any]]) -> Tuple[List[Dict[str,
|
|
|
112
145
|
for vuln in raw_vulns:
|
|
113
146
|
sca_vuln: Dict[str, Any] = {}
|
|
114
147
|
# Mandatory fields
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
sca_vuln["
|
|
120
|
-
sca_vuln["
|
|
121
|
-
sca_vuln["
|
|
122
|
-
sca_vuln["
|
|
123
|
-
sca_vuln["
|
|
124
|
-
sca_vuln["
|
|
125
|
-
|
|
148
|
+
repository_name = vuln["repository"]["name"]
|
|
149
|
+
rule_id = vuln["rule"]["name"]
|
|
150
|
+
vulnerability_class = _get_vuln_class(vuln)
|
|
151
|
+
package = vuln['found_dependency']['package']
|
|
152
|
+
sca_vuln["id"] = vuln["id"]
|
|
153
|
+
sca_vuln["repositoryName"] = repository_name
|
|
154
|
+
sca_vuln["branch"] = vuln["ref"]
|
|
155
|
+
sca_vuln["ruleId"] = rule_id
|
|
156
|
+
sca_vuln["title"] = package + ":" + vulnerability_class
|
|
157
|
+
sca_vuln["description"] = vuln["rule"]["message"]
|
|
158
|
+
sca_vuln["ecosystem"] = vuln["found_dependency"]["ecosystem"]
|
|
159
|
+
sca_vuln["severity"] = vuln["severity"].upper()
|
|
160
|
+
sca_vuln["reachability"] = vuln["reachability"].upper() # Check done to determine rechabilitity
|
|
161
|
+
sca_vuln["reachableIf"] = vuln["reachable_condition"].upper() if vuln["reachable_condition"] else None
|
|
162
|
+
sca_vuln["exposureType"] = _determine_exposure(vuln) # Determintes if reachable or unreachable
|
|
163
|
+
dependency = f"{package}|{vuln['found_dependency']['version']}"
|
|
126
164
|
sca_vuln["matchedDependency"] = dependency
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
165
|
+
dep_url = vuln["found_dependency"]["lockfile_line_url"]
|
|
166
|
+
if dep_url: # Lock file can be null, need to set
|
|
167
|
+
dep_file = dep_url.split("/")[-1].split("#")[0]
|
|
168
|
+
sca_vuln["dependencyFileLocation_path"] = dep_file
|
|
169
|
+
sca_vuln["dependencyFileLocation_url"] = dep_url
|
|
170
|
+
else:
|
|
171
|
+
if sca_vuln.get("location"):
|
|
172
|
+
sca_vuln["dependencyFileLocation_path"] = sca_vuln["location"]["file_path"]
|
|
173
|
+
sca_vuln["transitivity"] = vuln["found_dependency"]["transitivity"].upper()
|
|
174
|
+
if vuln.get("vulnerability_identifier"):
|
|
175
|
+
vuln_id = vuln["vulnerability_identifier"].upper()
|
|
176
|
+
sca_vuln["cveId"] = vuln_id
|
|
177
|
+
sca_vuln["ref_urls"] = [_build_vuln_url(vuln_id)]
|
|
178
|
+
if vuln.get('fix_recommendations') and len(vuln['fix_recommendations']) > 0:
|
|
179
|
+
fix = vuln['fix_recommendations'][0]
|
|
180
|
+
dep_fix = f"{fix['package']}|{fix['version']}"
|
|
137
181
|
sca_vuln["closestSafeDependency"] = dep_fix
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
sca_vuln["
|
|
141
|
-
sca_vuln["
|
|
142
|
-
|
|
143
|
-
|
|
182
|
+
sca_vuln["openedAt"] = vuln["created_at"]
|
|
183
|
+
sca_vuln["fixStatus"] = vuln["status"]
|
|
184
|
+
sca_vuln["triageStatus"] = vuln["triage_state"]
|
|
185
|
+
sca_vuln["confidence"] = vuln["confidence"]
|
|
186
|
+
usage = vuln.get("usage")
|
|
187
|
+
if usage:
|
|
144
188
|
usage_dict = {}
|
|
189
|
+
url = usage["location"]["url"]
|
|
145
190
|
usage_dict["SCA_ID"] = sca_vuln["id"]
|
|
146
|
-
usage_dict["findingId"] =
|
|
191
|
+
usage_dict["findingId"] = hash(url.split("github.com/")[-1])
|
|
147
192
|
usage_dict["path"] = usage["location"]["path"]
|
|
148
|
-
usage_dict["startLine"] = usage["location"]["
|
|
149
|
-
usage_dict["startCol"] = usage["location"]["
|
|
150
|
-
usage_dict["endLine"] = usage["location"]["
|
|
151
|
-
usage_dict["endCol"] = usage["location"]["
|
|
152
|
-
usage_dict["url"] =
|
|
193
|
+
usage_dict["startLine"] = usage["location"]["start_line"]
|
|
194
|
+
usage_dict["startCol"] = usage["location"]["start_col"]
|
|
195
|
+
usage_dict["endLine"] = usage["location"]["end_line"]
|
|
196
|
+
usage_dict["endCol"] = usage["location"]["end_col"]
|
|
197
|
+
usage_dict["url"] = url
|
|
153
198
|
usages.append(usage_dict)
|
|
154
199
|
vulns.append(sca_vuln)
|
|
200
|
+
|
|
155
201
|
return vulns, usages
|
|
156
202
|
|
|
157
203
|
|
|
@@ -228,9 +274,10 @@ def sync(
|
|
|
228
274
|
logger.info("Running Semgrep SCA findings sync job.")
|
|
229
275
|
semgrep_deployment = get_deployment(semgrep_app_token)
|
|
230
276
|
deployment_id = semgrep_deployment["id"]
|
|
277
|
+
deployment_slug = semgrep_deployment["slug"]
|
|
231
278
|
load_semgrep_deployment(neo4j_sesion, semgrep_deployment, update_tag)
|
|
232
279
|
common_job_parameters["DEPLOYMENT_ID"] = deployment_id
|
|
233
|
-
raw_vulns = get_sca_vulns(semgrep_app_token,
|
|
280
|
+
raw_vulns = get_sca_vulns(semgrep_app_token, deployment_slug)
|
|
234
281
|
vulns, usages = transform_sca_vulns(raw_vulns)
|
|
235
282
|
load_semgrep_sca_vulns(neo4j_sesion, vulns, deployment_id, update_tag)
|
|
236
283
|
load_semgrep_sca_usages(neo4j_sesion, usages, deployment_id, update_tag)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
import neo4j
|
|
4
|
+
|
|
5
|
+
from cartography.config import Config
|
|
6
|
+
from cartography.intel.snipeit import asset
|
|
7
|
+
from cartography.intel.snipeit import user
|
|
8
|
+
from cartography.stats import get_stats_client
|
|
9
|
+
from cartography.util import timeit
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
stat_handler = get_stats_client(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@timeit
|
|
16
|
+
def start_snipeit_ingestion(neo4j_session: neo4j.Session, config: Config) -> None:
|
|
17
|
+
if config.snipeit_base_uri is None or config.snipeit_token is None or config.snipeit_tenant_id is None:
|
|
18
|
+
logger.warning(
|
|
19
|
+
"Required parameter(s) missing. Skipping sync.",
|
|
20
|
+
)
|
|
21
|
+
return
|
|
22
|
+
|
|
23
|
+
common_job_parameters = {
|
|
24
|
+
"UPDATE_TAG": config.update_tag,
|
|
25
|
+
"TENANT_ID": config.snipeit_tenant_id,
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
# Ingest SnipeIT users and assets
|
|
29
|
+
user.sync(neo4j_session, common_job_parameters, config.snipeit_base_uri, config.snipeit_token)
|
|
30
|
+
asset.sync(neo4j_session, common_job_parameters, config.snipeit_base_uri, config.snipeit_token)
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any
|
|
3
|
+
from typing import Dict
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
import neo4j
|
|
7
|
+
|
|
8
|
+
from .util import call_snipeit_api
|
|
9
|
+
from cartography.client.core.tx import load
|
|
10
|
+
from cartography.graph.job import GraphJob
|
|
11
|
+
from cartography.models.snipeit.asset import SnipeitAssetSchema
|
|
12
|
+
from cartography.models.snipeit.tenant import SnipeitTenantSchema
|
|
13
|
+
from cartography.util import timeit
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@timeit
|
|
20
|
+
def get(base_uri: str, token: str) -> List[Dict]:
|
|
21
|
+
api_endpoint = "/api/v1/hardware"
|
|
22
|
+
results: List[Dict[str, Any]] = []
|
|
23
|
+
while True:
|
|
24
|
+
offset = len(results)
|
|
25
|
+
api_endpoint = f"{api_endpoint}?order='asc'&offset={offset}"
|
|
26
|
+
response = call_snipeit_api(api_endpoint, base_uri, token)
|
|
27
|
+
results.extend(response['rows'])
|
|
28
|
+
|
|
29
|
+
total = response['total']
|
|
30
|
+
results_count = len(results)
|
|
31
|
+
if results_count >= total:
|
|
32
|
+
break
|
|
33
|
+
|
|
34
|
+
return results
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@timeit
|
|
38
|
+
def load_assets(
|
|
39
|
+
neo4j_session: neo4j.Session,
|
|
40
|
+
common_job_parameters: Dict,
|
|
41
|
+
data: List[Dict[str, Any]],
|
|
42
|
+
) -> None:
|
|
43
|
+
# Create the SnipeIT Tenant
|
|
44
|
+
load(
|
|
45
|
+
neo4j_session,
|
|
46
|
+
SnipeitTenantSchema(),
|
|
47
|
+
[{'id': common_job_parameters["TENANT_ID"]}],
|
|
48
|
+
lastupdated=common_job_parameters["UPDATE_TAG"],
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
load(
|
|
52
|
+
neo4j_session,
|
|
53
|
+
SnipeitAssetSchema(),
|
|
54
|
+
data,
|
|
55
|
+
lastupdated=common_job_parameters["UPDATE_TAG"],
|
|
56
|
+
TENANT_ID=common_job_parameters["TENANT_ID"],
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@timeit
|
|
61
|
+
def cleanup(neo4j_session: neo4j.Session, common_job_parameters: Dict) -> None:
|
|
62
|
+
GraphJob.from_node_schema(SnipeitAssetSchema(), common_job_parameters).run(neo4j_session)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@timeit
|
|
66
|
+
def sync(
|
|
67
|
+
neo4j_session: neo4j.Session,
|
|
68
|
+
common_job_parameters: Dict,
|
|
69
|
+
base_uri: str,
|
|
70
|
+
token: str,
|
|
71
|
+
) -> None:
|
|
72
|
+
assets = get(base_uri=base_uri, token=token)
|
|
73
|
+
load_assets(neo4j_session=neo4j_session, common_job_parameters=common_job_parameters, data=assets)
|
|
74
|
+
cleanup(neo4j_session, common_job_parameters)
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any
|
|
3
|
+
from typing import Dict
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
import neo4j
|
|
7
|
+
|
|
8
|
+
from .util import call_snipeit_api
|
|
9
|
+
from cartography.client.core.tx import load
|
|
10
|
+
from cartography.graph.job import GraphJob
|
|
11
|
+
from cartography.models.snipeit.tenant import SnipeitTenantSchema
|
|
12
|
+
from cartography.models.snipeit.user import SnipeitUserSchema
|
|
13
|
+
from cartography.util import timeit
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@timeit
|
|
19
|
+
def get(base_uri: str, token: str) -> List[Dict]:
|
|
20
|
+
api_endpoint = "/api/v1/users"
|
|
21
|
+
results: List[Dict[str, Any]] = []
|
|
22
|
+
while True:
|
|
23
|
+
offset = len(results)
|
|
24
|
+
api_endpoint = f"{api_endpoint}?order='asc'&offset={offset}"
|
|
25
|
+
response = call_snipeit_api(api_endpoint, base_uri, token)
|
|
26
|
+
results.extend(response['rows'])
|
|
27
|
+
|
|
28
|
+
total = response['total']
|
|
29
|
+
results_count = len(results)
|
|
30
|
+
if results_count >= total:
|
|
31
|
+
break
|
|
32
|
+
|
|
33
|
+
return results
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@timeit
|
|
37
|
+
def load_users(
|
|
38
|
+
neo4j_session: neo4j.Session,
|
|
39
|
+
common_job_parameters: Dict,
|
|
40
|
+
data: List[Dict[str, Any]],
|
|
41
|
+
) -> None:
|
|
42
|
+
logger.debug(data[0])
|
|
43
|
+
|
|
44
|
+
# Create the SnipeIT Tenant
|
|
45
|
+
load(
|
|
46
|
+
neo4j_session,
|
|
47
|
+
SnipeitTenantSchema(),
|
|
48
|
+
[{'id': common_job_parameters["TENANT_ID"]}],
|
|
49
|
+
lastupdated=common_job_parameters["UPDATE_TAG"],
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
load(
|
|
53
|
+
neo4j_session,
|
|
54
|
+
SnipeitUserSchema(),
|
|
55
|
+
data,
|
|
56
|
+
lastupdated=common_job_parameters["UPDATE_TAG"],
|
|
57
|
+
TENANT_ID=common_job_parameters["TENANT_ID"],
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@timeit
|
|
62
|
+
def cleanup(neo4j_session: neo4j.Session, common_job_parameters: Dict) -> None:
|
|
63
|
+
GraphJob.from_node_schema(SnipeitUserSchema(), common_job_parameters).run(neo4j_session)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@timeit
|
|
67
|
+
def sync(
|
|
68
|
+
neo4j_session: neo4j.Session,
|
|
69
|
+
common_job_parameters: Dict,
|
|
70
|
+
base_uri: str,
|
|
71
|
+
token: str,
|
|
72
|
+
) -> None:
|
|
73
|
+
users = get(base_uri=base_uri, token=token)
|
|
74
|
+
load_users(neo4j_session, common_job_parameters, users)
|
|
75
|
+
cleanup(neo4j_session, common_job_parameters)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any
|
|
3
|
+
from typing import Dict
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
from cartography.util import timeit
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
# Connect and read timeouts of 60 seconds each; see https://requests.readthedocs.io/en/master/user/advanced/#timeouts
|
|
11
|
+
_TIMEOUT = (60, 60)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@timeit
|
|
15
|
+
def call_snipeit_api(api_and_parameters: str, base_uri: str, token: str) -> Dict[str, Any]:
|
|
16
|
+
uri = base_uri + api_and_parameters
|
|
17
|
+
try:
|
|
18
|
+
logger.debug(
|
|
19
|
+
"SnipeIT: Get %s", uri,
|
|
20
|
+
)
|
|
21
|
+
response = requests.get(
|
|
22
|
+
uri,
|
|
23
|
+
headers={
|
|
24
|
+
'Accept': 'application/json',
|
|
25
|
+
'Authorization': f'Bearer {token}',
|
|
26
|
+
},
|
|
27
|
+
timeout=_TIMEOUT,
|
|
28
|
+
)
|
|
29
|
+
except requests.exceptions.Timeout:
|
|
30
|
+
# Add context and re-raise for callers to handle
|
|
31
|
+
logger.warning(f"SnipeIT: requests.get('{uri}') timed out.")
|
|
32
|
+
raise
|
|
33
|
+
# if call failed, use requests library to raise an exception
|
|
34
|
+
response.raise_for_status()
|
|
35
|
+
return response.json()
|
|
@@ -17,6 +17,7 @@ class SemgrepSCAFindingNodeProperties(CartographyNodeProperties):
|
|
|
17
17
|
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
18
18
|
rule_id: PropertyRef = PropertyRef('ruleId', extra_index=True)
|
|
19
19
|
repository: PropertyRef = PropertyRef('repositoryName', extra_index=True)
|
|
20
|
+
branch: PropertyRef = PropertyRef('branch')
|
|
20
21
|
summary: PropertyRef = PropertyRef('title', extra_index=True)
|
|
21
22
|
description: PropertyRef = PropertyRef('description')
|
|
22
23
|
package_manager: PropertyRef = PropertyRef('ecosystem')
|
|
@@ -32,8 +33,9 @@ class SemgrepSCAFindingNodeProperties(CartographyNodeProperties):
|
|
|
32
33
|
dependency_file: PropertyRef = PropertyRef('dependencyFileLocation_path', extra_index=True)
|
|
33
34
|
dependency_file_url: PropertyRef = PropertyRef('dependencyFileLocation_url', extra_index=True)
|
|
34
35
|
scan_time: PropertyRef = PropertyRef('openedAt')
|
|
35
|
-
published_time: PropertyRef = PropertyRef('announcedAt')
|
|
36
36
|
fix_status: PropertyRef = PropertyRef('fixStatus')
|
|
37
|
+
triage_status: PropertyRef = PropertyRef('triageStatus')
|
|
38
|
+
confidence: PropertyRef = PropertyRef('confidence')
|
|
37
39
|
|
|
38
40
|
|
|
39
41
|
@dataclass(frozen=True)
|
|
File without changes
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.nodes import CartographyNodeProperties
|
|
5
|
+
from cartography.models.core.nodes import CartographyNodeSchema
|
|
6
|
+
from cartography.models.core.relationships import CartographyRelProperties
|
|
7
|
+
from cartography.models.core.relationships import CartographyRelSchema
|
|
8
|
+
from cartography.models.core.relationships import LinkDirection
|
|
9
|
+
from cartography.models.core.relationships import make_target_node_matcher
|
|
10
|
+
from cartography.models.core.relationships import OtherRelationships
|
|
11
|
+
from cartography.models.core.relationships import TargetNodeMatcher
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(frozen=True)
|
|
15
|
+
class SnipeitAssetNodeProperties(CartographyNodeProperties):
|
|
16
|
+
"""
|
|
17
|
+
https://snipe-it.readme.io/reference/hardware-list
|
|
18
|
+
"""
|
|
19
|
+
# Common properties
|
|
20
|
+
id: PropertyRef = PropertyRef('id')
|
|
21
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
22
|
+
|
|
23
|
+
# SnipeIT specific properties
|
|
24
|
+
asset_tag: PropertyRef = PropertyRef('asset_tag')
|
|
25
|
+
assigned_to: PropertyRef = PropertyRef('assigned_to.email')
|
|
26
|
+
category: PropertyRef = PropertyRef('category.name')
|
|
27
|
+
company: PropertyRef = PropertyRef('company.name')
|
|
28
|
+
manufacturer: PropertyRef = PropertyRef('manufacturer.name')
|
|
29
|
+
model: PropertyRef = PropertyRef('model.name')
|
|
30
|
+
serial: PropertyRef = PropertyRef('serial', extra_index=True)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
###
|
|
34
|
+
# (:SnipeitAsset)<-[:ASSET]-(:SnipeitTenant)
|
|
35
|
+
###
|
|
36
|
+
@dataclass(frozen=True)
|
|
37
|
+
class SnipeitTenantToSnipeitAssetRelProperties(CartographyRelProperties):
|
|
38
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass(frozen=True)
|
|
42
|
+
class SnipeitTenantToSnipeitAssetRel(CartographyRelSchema):
|
|
43
|
+
target_node_label: str = 'SnipeitTenant'
|
|
44
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
45
|
+
{'id': PropertyRef('TENANT_ID', set_in_kwargs=True)},
|
|
46
|
+
)
|
|
47
|
+
direction: LinkDirection = LinkDirection.INWARD
|
|
48
|
+
rel_label: str = "HAS_ASSET"
|
|
49
|
+
properties: SnipeitTenantToSnipeitAssetRelProperties = SnipeitTenantToSnipeitAssetRelProperties()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
###
|
|
53
|
+
# (:SnipeitUser)-[:HAS_CHECKED_OUT]->(:SnipeitAsset)
|
|
54
|
+
###
|
|
55
|
+
@dataclass(frozen=True)
|
|
56
|
+
class SnipeitUserToSnipeitAssetProperties(CartographyRelProperties):
|
|
57
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass(frozen=True)
|
|
61
|
+
class SnipeitUserToSnipeitAssetRel(CartographyRelSchema):
|
|
62
|
+
target_node_label: str = 'SnipeitUser'
|
|
63
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
64
|
+
{'email': PropertyRef('assigned_to.email')},
|
|
65
|
+
)
|
|
66
|
+
direction: LinkDirection = LinkDirection.INWARD
|
|
67
|
+
rel_label: str = "HAS_CHECKED_OUT"
|
|
68
|
+
properties: SnipeitUserToSnipeitAssetProperties = SnipeitUserToSnipeitAssetProperties()
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
###
|
|
72
|
+
@dataclass(frozen=True)
|
|
73
|
+
class SnipeitAssetSchema(CartographyNodeSchema):
|
|
74
|
+
label: str = 'SnipeitAsset' # The label of the node
|
|
75
|
+
properties: SnipeitAssetNodeProperties = SnipeitAssetNodeProperties() # An object representing all properties
|
|
76
|
+
sub_resource_relationship: SnipeitTenantToSnipeitAssetRel = SnipeitTenantToSnipeitAssetRel()
|
|
77
|
+
other_relationships: OtherRelationships = OtherRelationships(
|
|
78
|
+
[
|
|
79
|
+
SnipeitUserToSnipeitAssetRel(),
|
|
80
|
+
],
|
|
81
|
+
)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.nodes import CartographyNodeProperties
|
|
5
|
+
from cartography.models.core.nodes import CartographyNodeSchema
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class SnipeitTenantNodeProperties(CartographyNodeProperties):
|
|
10
|
+
id: PropertyRef = PropertyRef('id')
|
|
11
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass(frozen=True)
|
|
15
|
+
class SnipeitTenantSchema(CartographyNodeSchema):
|
|
16
|
+
label: str = 'SnipeitTenant' # The label of the node
|
|
17
|
+
properties: SnipeitTenantNodeProperties = SnipeitTenantNodeProperties() # An object representing all properties
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from cartography.models.core.common import PropertyRef
|
|
4
|
+
from cartography.models.core.nodes import CartographyNodeProperties
|
|
5
|
+
from cartography.models.core.nodes import CartographyNodeSchema
|
|
6
|
+
from cartography.models.core.relationships import CartographyRelProperties
|
|
7
|
+
from cartography.models.core.relationships import CartographyRelSchema
|
|
8
|
+
from cartography.models.core.relationships import LinkDirection
|
|
9
|
+
from cartography.models.core.relationships import make_target_node_matcher
|
|
10
|
+
from cartography.models.core.relationships import TargetNodeMatcher
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class SnipeitUserNodeProperties(CartographyNodeProperties):
|
|
15
|
+
"""
|
|
16
|
+
Ref: https://snipe-it.readme.io/reference/users
|
|
17
|
+
"""
|
|
18
|
+
# Common properties
|
|
19
|
+
id: PropertyRef = PropertyRef('id')
|
|
20
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
21
|
+
|
|
22
|
+
# SnipeIT specific properties
|
|
23
|
+
company: PropertyRef = PropertyRef('company_id.name', extra_index=True)
|
|
24
|
+
email: PropertyRef = PropertyRef('email', extra_index=True)
|
|
25
|
+
username: PropertyRef = PropertyRef('username')
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(frozen=True)
|
|
29
|
+
class SnipeitTenantToSnipeitUserRelProperties(CartographyRelProperties):
|
|
30
|
+
lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass(frozen=True)
|
|
34
|
+
# (:SnipeitTenant)-[:HAS_USER]->(:SnipeitUser)
|
|
35
|
+
class SnipeitTenantToSnipeitUserRel(CartographyRelSchema):
|
|
36
|
+
target_node_label: str = 'SnipeitTenant'
|
|
37
|
+
target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
|
|
38
|
+
{'id': PropertyRef('TENANT_ID', set_in_kwargs=True)},
|
|
39
|
+
)
|
|
40
|
+
direction: LinkDirection = LinkDirection.INWARD
|
|
41
|
+
rel_label: str = "HAS_USER"
|
|
42
|
+
properties: SnipeitTenantToSnipeitUserRelProperties = SnipeitTenantToSnipeitUserRelProperties()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass(frozen=True)
|
|
46
|
+
class SnipeitUserSchema(CartographyNodeSchema):
|
|
47
|
+
label: str = 'SnipeitUser' # The label of the node
|
|
48
|
+
properties: SnipeitUserNodeProperties = SnipeitUserNodeProperties() # An object representing all properties
|
|
49
|
+
sub_resource_relationship: SnipeitTenantToSnipeitUserRel = SnipeitTenantToSnipeitUserRel()
|
cartography/sync.py
CHANGED
|
@@ -17,7 +17,6 @@ import cartography.intel.azure
|
|
|
17
17
|
import cartography.intel.bigfix
|
|
18
18
|
import cartography.intel.create_indexes
|
|
19
19
|
import cartography.intel.crowdstrike
|
|
20
|
-
import cartography.intel.crxcavator.crxcavator
|
|
21
20
|
import cartography.intel.cve
|
|
22
21
|
import cartography.intel.digitalocean
|
|
23
22
|
import cartography.intel.duo
|
|
@@ -30,6 +29,7 @@ import cartography.intel.lastpass
|
|
|
30
29
|
import cartography.intel.oci
|
|
31
30
|
import cartography.intel.okta
|
|
32
31
|
import cartography.intel.semgrep
|
|
32
|
+
import cartography.intel.snipeit
|
|
33
33
|
from cartography.config import Config
|
|
34
34
|
from cartography.stats import set_stats_client
|
|
35
35
|
from cartography.util import STATUS_FAILURE
|
|
@@ -45,7 +45,6 @@ TOP_LEVEL_MODULES = OrderedDict({ # preserve order so that the default sync alw
|
|
|
45
45
|
'crowdstrike': cartography.intel.crowdstrike.start_crowdstrike_ingestion,
|
|
46
46
|
'gcp': cartography.intel.gcp.start_gcp_ingestion,
|
|
47
47
|
'gsuite': cartography.intel.gsuite.start_gsuite_ingestion,
|
|
48
|
-
'crxcavator': cartography.intel.crxcavator.start_extension_ingestion,
|
|
49
48
|
'cve': cartography.intel.cve.start_cve_ingestion,
|
|
50
49
|
'oci': cartography.intel.oci.start_oci_ingestion,
|
|
51
50
|
'okta': cartography.intel.okta.start_okta_ingestion,
|
|
@@ -57,6 +56,7 @@ TOP_LEVEL_MODULES = OrderedDict({ # preserve order so that the default sync alw
|
|
|
57
56
|
'bigfix': cartography.intel.bigfix.start_bigfix_ingestion,
|
|
58
57
|
'duo': cartography.intel.duo.start_duo_ingestion,
|
|
59
58
|
'semgrep': cartography.intel.semgrep.start_semgrep_ingestion,
|
|
59
|
+
'snipeit': cartography.intel.snipeit.start_snipeit_ingestion,
|
|
60
60
|
'analysis': cartography.intel.analysis.run,
|
|
61
61
|
})
|
|
62
62
|
|