cartography 0.93.0rc1__py3-none-any.whl → 0.94.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cartography might be problematic. Click here for more details.

cartography/cli.py CHANGED
@@ -541,6 +541,28 @@ class CLI:
541
541
  'Required if you are using the Semgrep intel module. Ignored otherwise.'
542
542
  ),
543
543
  )
544
+ parser.add_argument(
545
+ '--snipeit-base-uri',
546
+ type=str,
547
+ default=None,
548
+ help=(
549
+ 'Your SnipeIT base URI'
550
+ 'Required if you are using the SnipeIT intel module. Ignored otherwise.'
551
+ ),
552
+ )
553
+ parser.add_argument(
554
+ '--snipeit-token-env-var',
555
+ type=str,
556
+ default=None,
557
+ help='The name of an environment variable containing token with which to authenticate to SnipeIT.',
558
+ )
559
+ parser.add_argument(
560
+ '--snipeit-tenant-id',
561
+ type=str,
562
+ default=None,
563
+ help='An ID for the SnipeIT tenant.',
564
+ )
565
+
544
566
  return parser
545
567
 
546
568
  def main(self, argv: str) -> int:
@@ -744,6 +766,26 @@ class CLI:
744
766
  else:
745
767
  config.cve_api_key = None
746
768
 
769
+ # SnipeIT config
770
+ if config.snipeit_base_uri:
771
+ if config.snipeit_token_env_var:
772
+ logger.debug(
773
+ "Reading SnipeIT API token from environment variable '%s'.",
774
+ config.snipeit_token_env_var,
775
+ )
776
+ config.snipeit_token = os.environ.get(config.snipeit_token_env_var)
777
+ elif os.environ.get('SNIPEIT_TOKEN'):
778
+ logger.debug(
779
+ "Reading SnipeIT API token from environment variable 'SNIPEIT_TOKEN'.",
780
+ )
781
+ config.snipeit_token = os.environ.get('SNIPEIT_TOKEN')
782
+ else:
783
+ logger.warning("A SnipeIT base URI was provided but a token was not.")
784
+ config.kandji_token = None
785
+ else:
786
+ logger.warning("A SnipeIT base URI was not provided.")
787
+ config.snipeit_base_uri = None
788
+
747
789
  # Run cartography
748
790
  try:
749
791
  return cartography.sync.run_with_config(self.sync, config)
cartography/config.py CHANGED
@@ -111,6 +111,12 @@ class Config:
111
111
  :param duo_api_hostname: The Duo api hostname, e.g. "api-abc123.duosecurity.com". Optional.
112
112
  :param semgrep_app_token: The Semgrep api token. Optional.
113
113
  :type semgrep_app_token: str
114
+ :type snipeit_base_uri: string
115
+ :param snipeit_base_uri: SnipeIT data provider base URI. Optional.
116
+ :type snipeit_token: string
117
+ :param snipeit_token: Token used to authenticate to the SnipeIT data provider. Optional.
118
+ :type snipeit_tenant_id: string
119
+ :param snipeit_tenant_id: Token used to authenticate to the SnipeIT data provider. Optional.
114
120
  """
115
121
 
116
122
  def __init__(
@@ -170,6 +176,9 @@ class Config:
170
176
  duo_api_secret=None,
171
177
  duo_api_hostname=None,
172
178
  semgrep_app_token=None,
179
+ snipeit_base_uri=None,
180
+ snipeit_token=None,
181
+ snipeit_tenant_id=None,
173
182
  ):
174
183
  self.neo4j_uri = neo4j_uri
175
184
  self.neo4j_user = neo4j_user
@@ -226,3 +235,6 @@ class Config:
226
235
  self.duo_api_secret = duo_api_secret
227
236
  self.duo_api_hostname = duo_api_hostname
228
237
  self.semgrep_app_token = semgrep_app_token
238
+ self.snipeit_base_uri = snipeit_base_uri
239
+ self.snipeit_token = snipeit_token
240
+ self.snipeit_tenant_id = snipeit_tenant_id
@@ -13,47 +13,47 @@
13
13
  },
14
14
  {
15
15
  "__comment__": "not possible to identify if reachable && version specifier is the only flag of the vulnerability (likelihood = rare) && severity in [low, medium, high] -> Risk = Info",
16
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'UNKNOWN_EXPOSURE', reachability_check:'VERSION_SPECIFIER', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW', 'MEDIUM', 'HIGH'] SET s.reachability_risk = 'INFO' return COUNT(*) as TotalCompleted",
16
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'UNREACHABLE', reachability_check:'NO REACHABILITY ANALYSIS', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW', 'MEDIUM', 'HIGH'] SET s.reachability_risk = 'INFO' return COUNT(*) as TotalCompleted",
17
17
  "iterative": false
18
18
  },
19
19
  {
20
20
  "__comment__": "not possible to identify if reachable && version specifier is the only flag of the vulnerability (likelihood = rare) && severity = critical -> Risk = Low",
21
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'UNKNOWN_EXPOSURE', reachability_check:'VERSION_SPECIFIER', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
21
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'UNREACHABLE', reachability_check:'NO REACHABILITY ANALYSIS', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
22
22
  "iterative": false
23
23
  },
24
24
  {
25
- "__comment__": "manual review required to confirm && version specifier is the only flag of the vulnerability (likelihood = possible) && severity in [low, medium] -> Risk = Low",
26
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'MANUAL_REVIEW_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW', 'MEDIUM'] SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
25
+ "__comment__": "manual review required to confirm exploitation when conditions met && identified version is vulnerable (likelihood = possible) && severity in [low, medium] -> Risk = Low",
26
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'CONDITIONALLY REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW', 'MEDIUM'] SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
27
27
  "iterative": false
28
28
  },
29
29
  {
30
- "__comment__": "manual review required to confirm && version specifier is the only flag of the vulnerability (likelihood = possible) && severity = high -> Risk = Medium",
31
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'MANUAL_REVIEW_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'HIGH' SET s.reachability_risk = 'MEDIUM' return COUNT(*) as TotalCompleted",
30
+ "__comment__": "manual review required to confirm exploitation when conditions met && identified version is vulnerable (likelihood = possible) && severity = high -> Risk = Medium",
31
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'CONDITIONALLY REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'HIGH' SET s.reachability_risk = 'MEDIUM' return COUNT(*) as TotalCompleted",
32
32
  "iterative": false
33
33
  },
34
34
  {
35
- "__comment__": "manual review required to confirm && version specifier is the only flag of the vulnerability (likelihood = possible) && severity = critical -> Risk = High",
36
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'MANUAL_REVIEW_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'HIGH' return COUNT(*) as TotalCompleted",
35
+ "__comment__": "manual review required to confirm exploitation when conditions met && identified version is vulnerable (likelihood = possible) && severity = critical -> Risk = High",
36
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'CONDITIONALLY REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'HIGH' return COUNT(*) as TotalCompleted",
37
37
  "iterative": false
38
38
  },
39
39
  {
40
40
  "__comment__": "adding the vulnerable version flags it reachable (likelihood = likely) && severity in [low, medium] -> Risk = Low",
41
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW','MEDIUM'] SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
41
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity IN ['LOW','MEDIUM'] SET s.reachability_risk = 'LOW' return COUNT(*) as TotalCompleted",
42
42
  "iterative": false
43
43
  },
44
44
  {
45
- "__comment__": "adding the vulnerable version flags it reachable (likelihood = likely) && severity = high -> Risk = Low",
46
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'HIGH' SET s.reachability_risk = 'MEDIUM' return COUNT(*) as TotalCompleted",
45
+ "__comment__": "adding the vulnerable version flags it reachable (likelihood = likely) && severity = high -> Risk = Medium",
46
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'HIGH' SET s.reachability_risk = 'MEDIUM' return COUNT(*) as TotalCompleted",
47
47
  "iterative": false
48
48
  },
49
49
  {
50
50
  "__comment__": "adding the vulnerable version flags it reachable (special case for critical, if something is so critical that needs to be fixed, likelihood = likely)) && severity = critical -> Risk = Critical",
51
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS_REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'CRITICAL' return COUNT(*) as TotalCompleted",
51
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'ALWAYS REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) WHERE s.severity = 'CRITICAL' SET s.reachability_risk = 'CRITICAL' return COUNT(*) as TotalCompleted",
52
52
  "iterative": false
53
53
  },
54
54
  {
55
55
  "__comment__": "if reachability analysis confirmed that is rechable (likelihood = certain) -> Risk = Severity",
56
- "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'REACHABILITY', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) SET s.reachability_risk = s.severity return COUNT(*) as TotalCompleted",
56
+ "query": "MATCH (g:GitHubRepository{archived:false})<-[:FOUND_IN]-(s:SemgrepSCAFinding{reachability:'REACHABLE', reachability_check:'REACHABLE', lastupdated:$UPDATE_TAG})<-[:RESOURCE]-(:SemgrepDeployment{id:$DEPLOYMENT_ID}) SET s.reachability_risk = s.severity return COUNT(*) as TotalCompleted",
57
57
  "iterative": false
58
58
  },
59
59
  {
cartography/graph/job.py CHANGED
@@ -150,7 +150,14 @@ class GraphJob:
150
150
  )
151
151
 
152
152
  statements: List[GraphStatement] = [
153
- GraphStatement(query, parameters=parameters, iterative=True, iterationsize=100) for query in queries
153
+ GraphStatement(
154
+ query,
155
+ parameters=parameters,
156
+ iterative=True,
157
+ iterationsize=100,
158
+ parent_job_name=node_schema.label,
159
+ parent_job_sequence_num=idx,
160
+ ) for idx, query in enumerate(queries, start=1)
154
161
  ]
155
162
 
156
163
  return cls(
@@ -3,6 +3,7 @@ from typing import Any
3
3
 
4
4
  import boto3
5
5
  import neo4j
6
+ from botocore.exceptions import ClientError
6
7
 
7
8
  from .util import get_botocore_config
8
9
  from cartography.client.core.tx import load
@@ -17,13 +18,30 @@ logger = logging.getLogger(__name__)
17
18
 
18
19
  @timeit
19
20
  @aws_handle_regions
20
- def get_launch_templates(boto3_session: boto3.session.Session, region: str) -> list[dict[str, Any]]:
21
+ def get_launch_templates(
22
+ boto3_session: boto3.session.Session,
23
+ region: str,
24
+ ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
21
25
  client = boto3_session.client('ec2', region_name=region, config=get_botocore_config())
22
26
  paginator = client.get_paginator('describe_launch_templates')
23
27
  templates: list[dict[str, Any]] = []
28
+ template_versions: list[dict[str, Any]] = []
24
29
  for page in paginator.paginate():
25
- templates.extend(page['LaunchTemplates'])
26
- return templates
30
+ paginated_templates = page['LaunchTemplates']
31
+ for template in paginated_templates:
32
+ template_id = template['LaunchTemplateId']
33
+ try:
34
+ versions = get_launch_template_versions_by_template(boto3_session, template_id, region)
35
+ except ClientError as e:
36
+ logger.warning(
37
+ f"Failed to get launch template versions for {template_id}: {e}",
38
+ exc_info=True,
39
+ )
40
+ versions = []
41
+ # Using a key not defined in latest boto3 documentation
42
+ template_versions.extend(versions)
43
+ templates.extend(paginated_templates)
44
+ return templates, template_versions
27
45
 
28
46
 
29
47
  def transform_launch_templates(templates: list[dict[str, Any]]) -> list[dict[str, Any]]:
@@ -55,17 +73,16 @@ def load_launch_templates(
55
73
 
56
74
  @timeit
57
75
  @aws_handle_regions
58
- def get_launch_template_versions(
76
+ def get_launch_template_versions_by_template(
59
77
  boto3_session: boto3.session.Session,
60
- templates: list[dict[str, Any]],
78
+ template: str,
61
79
  region: str,
62
80
  ) -> list[dict[str, Any]]:
63
81
  client = boto3_session.client('ec2', region_name=region, config=get_botocore_config())
64
82
  v_paginator = client.get_paginator('describe_launch_template_versions')
65
83
  template_versions = []
66
- for template in templates:
67
- for versions in v_paginator.paginate(LaunchTemplateId=template['LaunchTemplateId']):
68
- template_versions.extend(versions['LaunchTemplateVersions'])
84
+ for versions in v_paginator.paginate(LaunchTemplateId=template):
85
+ template_versions.extend(versions['LaunchTemplateVersions'])
69
86
  return template_versions
70
87
 
71
88
 
@@ -136,11 +153,9 @@ def sync_ec2_launch_templates(
136
153
  ) -> None:
137
154
  for region in regions:
138
155
  logger.info(f"Syncing launch templates for region '{region}' in account '{current_aws_account_id}'.")
139
- templates = get_launch_templates(boto3_session, region)
156
+ templates, versions = get_launch_templates(boto3_session, region)
140
157
  templates = transform_launch_templates(templates)
141
158
  load_launch_templates(neo4j_session, templates, region, current_aws_account_id, update_tag)
142
-
143
- versions = get_launch_template_versions(boto3_session, templates, region)
144
159
  versions = transform_launch_template_versions(versions)
145
160
  load_launch_template_versions(neo4j_session, versions, region, current_aws_account_id, update_tag)
146
161
 
@@ -18,6 +18,15 @@ from cartography.util import timeit
18
18
 
19
19
  logger = logging.getLogger(__name__)
20
20
 
21
+ # As of 7/22/24, Inspector is only available in the below regions. We will need to update this hardcoded list here over
22
+ # time. :\ https://docs.aws.amazon.com/general/latest/gr/inspector2.html
23
+ AWS_INSPECTOR_REGIONS = {
24
+ "us-east-2", "us-east-1", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-southeast-3", "ap-south-1",
25
+ "ap-northeast-3", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-northeast-1", "ca-central-1",
26
+ "eu-central-1", "eu-west-1", "eu-west-2", "eu-south-1", "eu-west-3", "eu-north-1", "eu-central-2", "me-south-1",
27
+ "sa-east-1",
28
+ }
29
+
21
30
 
22
31
  @timeit
23
32
  @aws_handle_regions
@@ -206,7 +215,9 @@ def sync(
206
215
  update_tag: int,
207
216
  common_job_parameters: Dict[str, Any],
208
217
  ) -> None:
209
- for region in regions:
218
+ inspector_regions = [region for region in regions if region in AWS_INSPECTOR_REGIONS]
219
+
220
+ for region in inspector_regions:
210
221
  logger.info(f"Syncing AWS Inspector findings for account {current_aws_account_id} and region {region}")
211
222
  findings = get_inspector_findings(boto3_session, region, current_aws_account_id)
212
223
  finding_data, package_data = transform_inspector_findings(findings)
@@ -322,8 +322,12 @@ def cleanup_rpr(
322
322
  )
323
323
 
324
324
  statement = GraphStatement(
325
- cleanup_rpr_query_template, {'UPDATE_TAG': update_tag, 'AWS_ID': current_aws_id},
326
- True, 1000,
325
+ cleanup_rpr_query_template,
326
+ {'UPDATE_TAG': update_tag, 'AWS_ID': current_aws_id},
327
+ True,
328
+ 1000,
329
+ parent_job_name=f"{relationship_name}:{node_label}",
330
+ parent_job_sequence_num=1,
327
331
  )
328
332
  statement.run(neo4j_session)
329
333
 
@@ -1,4 +1,6 @@
1
1
  import logging
2
+ from collections import namedtuple
3
+ from time import sleep
2
4
  from typing import Any
3
5
  from typing import Dict
4
6
  from typing import List
@@ -15,6 +17,8 @@ from cartography.util import timeit
15
17
 
16
18
  logger = logging.getLogger(__name__)
17
19
 
20
+ RepoPermission = namedtuple('RepoPermission', ['repo_url', 'permission'])
21
+
18
22
 
19
23
  @timeit
20
24
  def get_teams(org: str, api_url: str, token: str) -> Tuple[PaginatedGraphqlData, Dict[str, Any]]:
@@ -45,26 +49,53 @@ def get_teams(org: str, api_url: str, token: str) -> Tuple[PaginatedGraphqlData,
45
49
 
46
50
  @timeit
47
51
  def _get_team_repos_for_multiple_teams(
48
- team_raw_data: List[Dict[str, Any]],
52
+ team_raw_data: list[dict[str, Any]],
49
53
  org: str,
50
54
  api_url: str,
51
55
  token: str,
52
- ) -> Dict[str, Any]:
53
- result = {}
56
+ ) -> dict[str, list[RepoPermission]]:
57
+ result: dict[str, list[RepoPermission]] = {}
54
58
  for team in team_raw_data:
55
59
  team_name = team['slug']
56
60
  repo_count = team['repositories']['totalCount']
57
61
 
58
- team_repos = _get_team_repos(org, api_url, token, team_name) if repo_count > 0 else None
62
+ if repo_count == 0:
63
+ # This team has access to no repos so let's move on
64
+ result[team_name] = []
65
+ continue
59
66
 
60
67
  repo_urls = []
61
68
  repo_permissions = []
62
- if team_repos:
63
- repo_urls = [t['url'] for t in team_repos.nodes] if team_repos.nodes else []
64
- repo_permissions = [t['permission'] for t in team_repos.edges] if team_repos.edges else []
69
+
70
+ max_tries = 5
71
+
72
+ for current_try in range(1, max_tries + 1):
73
+ team_repos = _get_team_repos(org, api_url, token, team_name)
74
+
75
+ try:
76
+ # The `or []` is because `.nodes` can be None. See:
77
+ # https://docs.github.com/en/graphql/reference/objects#teamrepositoryconnection
78
+ for repo in team_repos.nodes or []:
79
+ repo_urls.append(repo['url'])
80
+
81
+ # The `or []` is because `.edges` can be None.
82
+ for edge in team_repos.edges or []:
83
+ repo_permissions.append(edge['permission'])
84
+ # We're done! Break out of the retry loop.
85
+ break
86
+
87
+ except TypeError:
88
+ # Handles issue #1334
89
+ logger.warning(
90
+ f"GitHub returned None when trying to find repo or permission data for team {team_name}.",
91
+ exc_info=True,
92
+ )
93
+ if current_try == max_tries:
94
+ raise RuntimeError(f"GitHub returned a None repo url for team {team_name}, retries exhausted.")
95
+ sleep(current_try ** 2)
65
96
 
66
97
  # Shape = [(repo_url, 'WRITE'), ...]]
67
- result[team_name] = list(zip(repo_urls, repo_permissions))
98
+ result[team_name] = [RepoPermission(url, perm) for url, perm in zip(repo_urls, repo_permissions)]
68
99
  return result
69
100
 
70
101
 
@@ -114,8 +145,8 @@ def _get_team_repos(org: str, api_url: str, token: str, team: str) -> PaginatedG
114
145
  def transform_teams(
115
146
  team_paginated_data: PaginatedGraphqlData,
116
147
  org_data: Dict[str, Any],
117
- team_repo_data: Dict[str, Any],
118
- ) -> List[Dict[str, Any]]:
148
+ team_repo_data: dict[str, list[RepoPermission]],
149
+ ) -> list[dict[str, Any]]:
119
150
  result = []
120
151
  for team in team_paginated_data.nodes:
121
152
  team_name = team['slug']
@@ -21,7 +21,7 @@ def start_kandji_ingestion(neo4j_session: neo4j.Session, config: Config) -> None
21
21
  """
22
22
  if config.kandji_base_uri is None or config.kandji_token is None or config.kandji_tenant_id is None:
23
23
  logger.warning(
24
- 'Required parameter(s) missing. Skipping sync.',
24
+ 'Required parameter missing. Skipping sync. '
25
25
  'See docs to configure.',
26
26
  )
27
27
  return
@@ -3,10 +3,11 @@ from typing import Any
3
3
  from typing import Dict
4
4
  from typing import List
5
5
  from typing import Tuple
6
- from urllib.error import HTTPError
7
6
 
8
7
  import neo4j
9
8
  import requests
9
+ from requests.exceptions import HTTPError
10
+ from requests.exceptions import ReadTimeout
10
11
 
11
12
  from cartography.client.core.tx import load
12
13
  from cartography.graph.job import GraphJob
@@ -20,6 +21,7 @@ from cartography.util import timeit
20
21
 
21
22
  logger = logging.getLogger(__name__)
22
23
  stat_handler = get_stats_client(__name__)
24
+ _PAGE_SIZE = 500
23
25
  _TIMEOUT = (60, 60)
24
26
  _MAX_RETRIES = 3
25
27
 
@@ -48,60 +50,91 @@ def get_deployment(semgrep_app_token: str) -> Dict[str, Any]:
48
50
 
49
51
 
50
52
  @timeit
51
- def get_sca_vulns(semgrep_app_token: str, deployment_id: str) -> List[Dict[str, Any]]:
53
+ def get_sca_vulns(semgrep_app_token: str, deployment_slug: str) -> List[Dict[str, Any]]:
52
54
  """
53
55
  Gets the SCA vulns associated with the passed Semgrep App token and deployment id.
54
56
  param: semgrep_app_token: The Semgrep App token to use for authentication.
55
- param: deployment_id: The Semgrep deployment id to use for retrieving SCA vulns.
57
+ param: deployment_slug: The Semgrep deployment slug to use for retrieving SCA vulns.
56
58
  """
57
59
  all_vulns = []
58
- sca_url = f"https://semgrep.dev/api/v1/deployments/{deployment_id}/ssc-vulns"
60
+ sca_url = f"https://semgrep.dev/api/v1/deployments/{deployment_slug}/findings"
59
61
  has_more = True
60
- cursor: Dict[str, str] = {}
61
- page = 1
62
+ page = 0
62
63
  retries = 0
63
64
  headers = {
64
65
  "Content-Type": "application/json",
65
66
  "Authorization": f"Bearer {semgrep_app_token}",
66
67
  }
67
68
 
68
- request_data = {
69
- "deploymentId": deployment_id,
70
- "pageSize": 100,
71
- "exposure": ["UNREACHABLE", "REACHABLE", "UNKNOWN_EXPOSURE"],
72
- "refs": ["_default"],
69
+ request_data: dict[str, Any] = {
70
+ "page": page,
71
+ "page_size": _PAGE_SIZE,
72
+ "issue_type": "sca",
73
+ "exposures": "reachable,always_reachable,conditionally_reachable,unreachable,unknown",
74
+ "ref": "_default",
75
+ "dedup": "true",
73
76
  }
74
-
77
+ logger.info(f"Retrieving Semgrep SCA vulns for deployment '{deployment_slug}'.")
75
78
  while has_more:
76
79
 
77
- if cursor:
78
- request_data.update({
79
- "cursor": {
80
- "vulnOffset": cursor["vulnOffset"],
81
- "issueOffset": cursor["issueOffset"],
82
- },
83
- })
84
80
  try:
85
- response = requests.post(sca_url, json=request_data, headers=headers, timeout=_TIMEOUT)
81
+ response = requests.get(sca_url, params=request_data, headers=headers, timeout=_TIMEOUT)
86
82
  response.raise_for_status()
87
83
  data = response.json()
88
- except HTTPError as e:
84
+ except (ReadTimeout, HTTPError) as e:
89
85
  logger.warning(f"Failed to retrieve Semgrep SCA vulns for page {page}. Retrying...")
90
86
  retries += 1
91
87
  if retries >= _MAX_RETRIES:
92
88
  raise e
93
89
  continue
94
- vulns = data["vulns"]
95
- cursor = data.get("cursor")
96
- has_more = data.get("hasMore", False)
90
+ vulns = data["findings"]
91
+ has_more = len(vulns) > 0
97
92
  if page % 10 == 0:
98
- logger.info(f"Processed {page} pages of Semgrep SCA vulnerabilities so far.")
93
+ logger.info(f"Processed page {page} of Semgrep SCA vulnerabilities.")
99
94
  all_vulns.extend(vulns)
100
95
  retries = 0
96
+ page += 1
97
+ request_data["page"] = page
101
98
 
99
+ logger.info(f"Retrieved {len(all_vulns)} Semgrep SCA vulns in {page} pages.")
102
100
  return all_vulns
103
101
 
104
102
 
103
+ def _get_vuln_class(vuln: Dict) -> str:
104
+ vulnerability_classes = vuln["rule"].get("vulnerability_classes", [])
105
+ if vulnerability_classes:
106
+ return vulnerability_classes[0]
107
+ return "Other"
108
+
109
+
110
+ def _determine_exposure(vuln: Dict[str, Any]) -> str | None:
111
+ # See Semgrep reachability types:
112
+ # https://semgrep.dev/docs/semgrep-supply-chain/overview#types-of-semgrep-supply-chain-findings
113
+ reachability_types = {
114
+ "NO REACHABILITY ANALYSIS": 2,
115
+ "UNREACHABLE": 2,
116
+ "REACHABLE": 0,
117
+ "ALWAYS REACHABLE": 0,
118
+ "CONDITIONALLY REACHABLE": 1,
119
+ }
120
+ reachable_flag = vuln["reachability"]
121
+ if reachable_flag and reachable_flag.upper() in reachability_types:
122
+ reach_score = reachability_types[reachable_flag.upper()]
123
+ if reach_score < reachability_types["UNREACHABLE"]:
124
+ return "REACHABLE"
125
+ else:
126
+ return "UNREACHABLE"
127
+ return None
128
+
129
+
130
+ def _build_vuln_url(vuln: str) -> str | None:
131
+ if 'CVE' in vuln:
132
+ return f"https://nvd.nist.gov/vuln/detail/{vuln}"
133
+ if 'GHSA' in vuln:
134
+ return f"https://github.com/advisories/{vuln}"
135
+ return None
136
+
137
+
105
138
  def transform_sca_vulns(raw_vulns: List[Dict[str, Any]]) -> Tuple[List[Dict[str, Any]], List[Dict[str, str]]]:
106
139
  """
107
140
  Transforms the raw SCA vulns response from Semgrep API into a list of dicts
@@ -112,46 +145,59 @@ def transform_sca_vulns(raw_vulns: List[Dict[str, Any]]) -> Tuple[List[Dict[str,
112
145
  for vuln in raw_vulns:
113
146
  sca_vuln: Dict[str, Any] = {}
114
147
  # Mandatory fields
115
- sca_vuln["id"] = vuln["groupKey"]
116
- sca_vuln["repositoryName"] = vuln["repositoryName"]
117
- sca_vuln["ruleId"] = vuln["advisory"]["ruleId"]
118
- sca_vuln["title"] = vuln["advisory"]["title"]
119
- sca_vuln["description"] = vuln["advisory"]["description"]
120
- sca_vuln["ecosystem"] = vuln["advisory"]["ecosystem"]
121
- sca_vuln["severity"] = vuln["advisory"]["severity"]
122
- sca_vuln["reachability"] = vuln["advisory"]["reachability"]
123
- sca_vuln["reachableIf"] = vuln["advisory"]["reachableIf"]
124
- sca_vuln["exposureType"] = vuln["exposureType"]
125
- dependency = f"{vuln['matchedDependency']['name']}|{vuln['matchedDependency']['versionSpecifier']}"
148
+ repository_name = vuln["repository"]["name"]
149
+ rule_id = vuln["rule"]["name"]
150
+ vulnerability_class = _get_vuln_class(vuln)
151
+ package = vuln['found_dependency']['package']
152
+ sca_vuln["id"] = vuln["id"]
153
+ sca_vuln["repositoryName"] = repository_name
154
+ sca_vuln["branch"] = vuln["ref"]
155
+ sca_vuln["ruleId"] = rule_id
156
+ sca_vuln["title"] = package + ":" + vulnerability_class
157
+ sca_vuln["description"] = vuln["rule"]["message"]
158
+ sca_vuln["ecosystem"] = vuln["found_dependency"]["ecosystem"]
159
+ sca_vuln["severity"] = vuln["severity"].upper()
160
+ sca_vuln["reachability"] = vuln["reachability"].upper() # Check done to determine rechabilitity
161
+ sca_vuln["reachableIf"] = vuln["reachable_condition"].upper() if vuln["reachable_condition"] else None
162
+ sca_vuln["exposureType"] = _determine_exposure(vuln) # Determintes if reachable or unreachable
163
+ dependency = f"{package}|{vuln['found_dependency']['version']}"
126
164
  sca_vuln["matchedDependency"] = dependency
127
- sca_vuln["dependencyFileLocation_path"] = vuln["dependencyFileLocation"]["path"]
128
- sca_vuln["dependencyFileLocation_url"] = vuln["dependencyFileLocation"]["url"]
129
- # Optional fields
130
- sca_vuln["transitivity"] = vuln.get("transitivity", None)
131
- cves = vuln.get("advisory", {}).get("references", {}).get("cveIds")
132
- if len(cves) > 0:
133
- # Take the first CVE
134
- sca_vuln["cveId"] = vuln["advisory"]["references"]["cveIds"][0]
135
- if vuln.get('closestSafeDependency'):
136
- dep_fix = f"{vuln['closestSafeDependency']['name']}|{vuln['closestSafeDependency']['versionSpecifier']}"
165
+ dep_url = vuln["found_dependency"]["lockfile_line_url"]
166
+ if dep_url: # Lock file can be null, need to set
167
+ dep_file = dep_url.split("/")[-1].split("#")[0]
168
+ sca_vuln["dependencyFileLocation_path"] = dep_file
169
+ sca_vuln["dependencyFileLocation_url"] = dep_url
170
+ else:
171
+ if sca_vuln.get("location"):
172
+ sca_vuln["dependencyFileLocation_path"] = sca_vuln["location"]["file_path"]
173
+ sca_vuln["transitivity"] = vuln["found_dependency"]["transitivity"].upper()
174
+ if vuln.get("vulnerability_identifier"):
175
+ vuln_id = vuln["vulnerability_identifier"].upper()
176
+ sca_vuln["cveId"] = vuln_id
177
+ sca_vuln["ref_urls"] = [_build_vuln_url(vuln_id)]
178
+ if vuln.get('fix_recommendations') and len(vuln['fix_recommendations']) > 0:
179
+ fix = vuln['fix_recommendations'][0]
180
+ dep_fix = f"{fix['package']}|{fix['version']}"
137
181
  sca_vuln["closestSafeDependency"] = dep_fix
138
- if vuln["advisory"].get("references", {}).get("urls", []):
139
- sca_vuln["ref_urls"] = vuln["advisory"].get("references", {}).get("urls", [])
140
- sca_vuln["openedAt"] = vuln.get("openedAt", None)
141
- sca_vuln["announcedAt"] = vuln.get("announcedAt", None)
142
- sca_vuln["fixStatus"] = vuln["triage"]["status"]
143
- for usage in vuln.get("usages", []):
182
+ sca_vuln["openedAt"] = vuln["created_at"]
183
+ sca_vuln["fixStatus"] = vuln["status"]
184
+ sca_vuln["triageStatus"] = vuln["triage_state"]
185
+ sca_vuln["confidence"] = vuln["confidence"]
186
+ usage = vuln.get("usage")
187
+ if usage:
144
188
  usage_dict = {}
189
+ url = usage["location"]["url"]
145
190
  usage_dict["SCA_ID"] = sca_vuln["id"]
146
- usage_dict["findingId"] = usage["findingId"]
191
+ usage_dict["findingId"] = hash(url.split("github.com/")[-1])
147
192
  usage_dict["path"] = usage["location"]["path"]
148
- usage_dict["startLine"] = usage["location"]["startLine"]
149
- usage_dict["startCol"] = usage["location"]["startCol"]
150
- usage_dict["endLine"] = usage["location"]["endLine"]
151
- usage_dict["endCol"] = usage["location"]["endCol"]
152
- usage_dict["url"] = usage["location"]["url"]
193
+ usage_dict["startLine"] = usage["location"]["start_line"]
194
+ usage_dict["startCol"] = usage["location"]["start_col"]
195
+ usage_dict["endLine"] = usage["location"]["end_line"]
196
+ usage_dict["endCol"] = usage["location"]["end_col"]
197
+ usage_dict["url"] = url
153
198
  usages.append(usage_dict)
154
199
  vulns.append(sca_vuln)
200
+
155
201
  return vulns, usages
156
202
 
157
203
 
@@ -228,9 +274,10 @@ def sync(
228
274
  logger.info("Running Semgrep SCA findings sync job.")
229
275
  semgrep_deployment = get_deployment(semgrep_app_token)
230
276
  deployment_id = semgrep_deployment["id"]
277
+ deployment_slug = semgrep_deployment["slug"]
231
278
  load_semgrep_deployment(neo4j_sesion, semgrep_deployment, update_tag)
232
279
  common_job_parameters["DEPLOYMENT_ID"] = deployment_id
233
- raw_vulns = get_sca_vulns(semgrep_app_token, deployment_id)
280
+ raw_vulns = get_sca_vulns(semgrep_app_token, deployment_slug)
234
281
  vulns, usages = transform_sca_vulns(raw_vulns)
235
282
  load_semgrep_sca_vulns(neo4j_sesion, vulns, deployment_id, update_tag)
236
283
  load_semgrep_sca_usages(neo4j_sesion, usages, deployment_id, update_tag)
@@ -0,0 +1,30 @@
1
+ import logging
2
+
3
+ import neo4j
4
+
5
+ from cartography.config import Config
6
+ from cartography.intel.snipeit import asset
7
+ from cartography.intel.snipeit import user
8
+ from cartography.stats import get_stats_client
9
+ from cartography.util import timeit
10
+
11
+ logger = logging.getLogger(__name__)
12
+ stat_handler = get_stats_client(__name__)
13
+
14
+
15
+ @timeit
16
+ def start_snipeit_ingestion(neo4j_session: neo4j.Session, config: Config) -> None:
17
+ if config.snipeit_base_uri is None or config.snipeit_token is None or config.snipeit_tenant_id is None:
18
+ logger.warning(
19
+ "Required parameter(s) missing. Skipping sync.",
20
+ )
21
+ return
22
+
23
+ common_job_parameters = {
24
+ "UPDATE_TAG": config.update_tag,
25
+ "TENANT_ID": config.snipeit_tenant_id,
26
+ }
27
+
28
+ # Ingest SnipeIT users and assets
29
+ user.sync(neo4j_session, common_job_parameters, config.snipeit_base_uri, config.snipeit_token)
30
+ asset.sync(neo4j_session, common_job_parameters, config.snipeit_base_uri, config.snipeit_token)
@@ -0,0 +1,74 @@
1
+ import logging
2
+ from typing import Any
3
+ from typing import Dict
4
+ from typing import List
5
+
6
+ import neo4j
7
+
8
+ from .util import call_snipeit_api
9
+ from cartography.client.core.tx import load
10
+ from cartography.graph.job import GraphJob
11
+ from cartography.models.snipeit.asset import SnipeitAssetSchema
12
+ from cartography.models.snipeit.tenant import SnipeitTenantSchema
13
+ from cartography.util import timeit
14
+
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ @timeit
20
+ def get(base_uri: str, token: str) -> List[Dict]:
21
+ api_endpoint = "/api/v1/hardware"
22
+ results: List[Dict[str, Any]] = []
23
+ while True:
24
+ offset = len(results)
25
+ api_endpoint = f"{api_endpoint}?order='asc'&offset={offset}"
26
+ response = call_snipeit_api(api_endpoint, base_uri, token)
27
+ results.extend(response['rows'])
28
+
29
+ total = response['total']
30
+ results_count = len(results)
31
+ if results_count >= total:
32
+ break
33
+
34
+ return results
35
+
36
+
37
+ @timeit
38
+ def load_assets(
39
+ neo4j_session: neo4j.Session,
40
+ common_job_parameters: Dict,
41
+ data: List[Dict[str, Any]],
42
+ ) -> None:
43
+ # Create the SnipeIT Tenant
44
+ load(
45
+ neo4j_session,
46
+ SnipeitTenantSchema(),
47
+ [{'id': common_job_parameters["TENANT_ID"]}],
48
+ lastupdated=common_job_parameters["UPDATE_TAG"],
49
+ )
50
+
51
+ load(
52
+ neo4j_session,
53
+ SnipeitAssetSchema(),
54
+ data,
55
+ lastupdated=common_job_parameters["UPDATE_TAG"],
56
+ TENANT_ID=common_job_parameters["TENANT_ID"],
57
+ )
58
+
59
+
60
+ @timeit
61
+ def cleanup(neo4j_session: neo4j.Session, common_job_parameters: Dict) -> None:
62
+ GraphJob.from_node_schema(SnipeitAssetSchema(), common_job_parameters).run(neo4j_session)
63
+
64
+
65
+ @timeit
66
+ def sync(
67
+ neo4j_session: neo4j.Session,
68
+ common_job_parameters: Dict,
69
+ base_uri: str,
70
+ token: str,
71
+ ) -> None:
72
+ assets = get(base_uri=base_uri, token=token)
73
+ load_assets(neo4j_session=neo4j_session, common_job_parameters=common_job_parameters, data=assets)
74
+ cleanup(neo4j_session, common_job_parameters)
@@ -0,0 +1,75 @@
1
+ import logging
2
+ from typing import Any
3
+ from typing import Dict
4
+ from typing import List
5
+
6
+ import neo4j
7
+
8
+ from .util import call_snipeit_api
9
+ from cartography.client.core.tx import load
10
+ from cartography.graph.job import GraphJob
11
+ from cartography.models.snipeit.tenant import SnipeitTenantSchema
12
+ from cartography.models.snipeit.user import SnipeitUserSchema
13
+ from cartography.util import timeit
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ @timeit
19
+ def get(base_uri: str, token: str) -> List[Dict]:
20
+ api_endpoint = "/api/v1/users"
21
+ results: List[Dict[str, Any]] = []
22
+ while True:
23
+ offset = len(results)
24
+ api_endpoint = f"{api_endpoint}?order='asc'&offset={offset}"
25
+ response = call_snipeit_api(api_endpoint, base_uri, token)
26
+ results.extend(response['rows'])
27
+
28
+ total = response['total']
29
+ results_count = len(results)
30
+ if results_count >= total:
31
+ break
32
+
33
+ return results
34
+
35
+
36
+ @timeit
37
+ def load_users(
38
+ neo4j_session: neo4j.Session,
39
+ common_job_parameters: Dict,
40
+ data: List[Dict[str, Any]],
41
+ ) -> None:
42
+ logger.debug(data[0])
43
+
44
+ # Create the SnipeIT Tenant
45
+ load(
46
+ neo4j_session,
47
+ SnipeitTenantSchema(),
48
+ [{'id': common_job_parameters["TENANT_ID"]}],
49
+ lastupdated=common_job_parameters["UPDATE_TAG"],
50
+ )
51
+
52
+ load(
53
+ neo4j_session,
54
+ SnipeitUserSchema(),
55
+ data,
56
+ lastupdated=common_job_parameters["UPDATE_TAG"],
57
+ TENANT_ID=common_job_parameters["TENANT_ID"],
58
+ )
59
+
60
+
61
+ @timeit
62
+ def cleanup(neo4j_session: neo4j.Session, common_job_parameters: Dict) -> None:
63
+ GraphJob.from_node_schema(SnipeitUserSchema(), common_job_parameters).run(neo4j_session)
64
+
65
+
66
+ @timeit
67
+ def sync(
68
+ neo4j_session: neo4j.Session,
69
+ common_job_parameters: Dict,
70
+ base_uri: str,
71
+ token: str,
72
+ ) -> None:
73
+ users = get(base_uri=base_uri, token=token)
74
+ load_users(neo4j_session, common_job_parameters, users)
75
+ cleanup(neo4j_session, common_job_parameters)
@@ -0,0 +1,35 @@
1
+ import logging
2
+ from typing import Any
3
+ from typing import Dict
4
+
5
+ import requests
6
+
7
+ from cartography.util import timeit
8
+
9
+ logger = logging.getLogger(__name__)
10
+ # Connect and read timeouts of 60 seconds each; see https://requests.readthedocs.io/en/master/user/advanced/#timeouts
11
+ _TIMEOUT = (60, 60)
12
+
13
+
14
+ @timeit
15
+ def call_snipeit_api(api_and_parameters: str, base_uri: str, token: str) -> Dict[str, Any]:
16
+ uri = base_uri + api_and_parameters
17
+ try:
18
+ logger.debug(
19
+ "SnipeIT: Get %s", uri,
20
+ )
21
+ response = requests.get(
22
+ uri,
23
+ headers={
24
+ 'Accept': 'application/json',
25
+ 'Authorization': f'Bearer {token}',
26
+ },
27
+ timeout=_TIMEOUT,
28
+ )
29
+ except requests.exceptions.Timeout:
30
+ # Add context and re-raise for callers to handle
31
+ logger.warning(f"SnipeIT: requests.get('{uri}') timed out.")
32
+ raise
33
+ # if call failed, use requests library to raise an exception
34
+ response.raise_for_status()
35
+ return response.json()
@@ -17,6 +17,7 @@ class SemgrepSCAFindingNodeProperties(CartographyNodeProperties):
17
17
  lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
18
18
  rule_id: PropertyRef = PropertyRef('ruleId', extra_index=True)
19
19
  repository: PropertyRef = PropertyRef('repositoryName', extra_index=True)
20
+ branch: PropertyRef = PropertyRef('branch')
20
21
  summary: PropertyRef = PropertyRef('title', extra_index=True)
21
22
  description: PropertyRef = PropertyRef('description')
22
23
  package_manager: PropertyRef = PropertyRef('ecosystem')
@@ -32,8 +33,9 @@ class SemgrepSCAFindingNodeProperties(CartographyNodeProperties):
32
33
  dependency_file: PropertyRef = PropertyRef('dependencyFileLocation_path', extra_index=True)
33
34
  dependency_file_url: PropertyRef = PropertyRef('dependencyFileLocation_url', extra_index=True)
34
35
  scan_time: PropertyRef = PropertyRef('openedAt')
35
- published_time: PropertyRef = PropertyRef('announcedAt')
36
36
  fix_status: PropertyRef = PropertyRef('fixStatus')
37
+ triage_status: PropertyRef = PropertyRef('triageStatus')
38
+ confidence: PropertyRef = PropertyRef('confidence')
37
39
 
38
40
 
39
41
  @dataclass(frozen=True)
File without changes
@@ -0,0 +1,81 @@
1
+ from dataclasses import dataclass
2
+
3
+ from cartography.models.core.common import PropertyRef
4
+ from cartography.models.core.nodes import CartographyNodeProperties
5
+ from cartography.models.core.nodes import CartographyNodeSchema
6
+ from cartography.models.core.relationships import CartographyRelProperties
7
+ from cartography.models.core.relationships import CartographyRelSchema
8
+ from cartography.models.core.relationships import LinkDirection
9
+ from cartography.models.core.relationships import make_target_node_matcher
10
+ from cartography.models.core.relationships import OtherRelationships
11
+ from cartography.models.core.relationships import TargetNodeMatcher
12
+
13
+
14
+ @dataclass(frozen=True)
15
+ class SnipeitAssetNodeProperties(CartographyNodeProperties):
16
+ """
17
+ https://snipe-it.readme.io/reference/hardware-list
18
+ """
19
+ # Common properties
20
+ id: PropertyRef = PropertyRef('id')
21
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
22
+
23
+ # SnipeIT specific properties
24
+ asset_tag: PropertyRef = PropertyRef('asset_tag')
25
+ assigned_to: PropertyRef = PropertyRef('assigned_to.email')
26
+ category: PropertyRef = PropertyRef('category.name')
27
+ company: PropertyRef = PropertyRef('company.name')
28
+ manufacturer: PropertyRef = PropertyRef('manufacturer.name')
29
+ model: PropertyRef = PropertyRef('model.name')
30
+ serial: PropertyRef = PropertyRef('serial', extra_index=True)
31
+
32
+
33
+ ###
34
+ # (:SnipeitAsset)<-[:ASSET]-(:SnipeitTenant)
35
+ ###
36
+ @dataclass(frozen=True)
37
+ class SnipeitTenantToSnipeitAssetRelProperties(CartographyRelProperties):
38
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
39
+
40
+
41
+ @dataclass(frozen=True)
42
+ class SnipeitTenantToSnipeitAssetRel(CartographyRelSchema):
43
+ target_node_label: str = 'SnipeitTenant'
44
+ target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
45
+ {'id': PropertyRef('TENANT_ID', set_in_kwargs=True)},
46
+ )
47
+ direction: LinkDirection = LinkDirection.INWARD
48
+ rel_label: str = "HAS_ASSET"
49
+ properties: SnipeitTenantToSnipeitAssetRelProperties = SnipeitTenantToSnipeitAssetRelProperties()
50
+
51
+
52
+ ###
53
+ # (:SnipeitUser)-[:HAS_CHECKED_OUT]->(:SnipeitAsset)
54
+ ###
55
+ @dataclass(frozen=True)
56
+ class SnipeitUserToSnipeitAssetProperties(CartographyRelProperties):
57
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
58
+
59
+
60
+ @dataclass(frozen=True)
61
+ class SnipeitUserToSnipeitAssetRel(CartographyRelSchema):
62
+ target_node_label: str = 'SnipeitUser'
63
+ target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
64
+ {'email': PropertyRef('assigned_to.email')},
65
+ )
66
+ direction: LinkDirection = LinkDirection.INWARD
67
+ rel_label: str = "HAS_CHECKED_OUT"
68
+ properties: SnipeitUserToSnipeitAssetProperties = SnipeitUserToSnipeitAssetProperties()
69
+
70
+
71
+ ###
72
+ @dataclass(frozen=True)
73
+ class SnipeitAssetSchema(CartographyNodeSchema):
74
+ label: str = 'SnipeitAsset' # The label of the node
75
+ properties: SnipeitAssetNodeProperties = SnipeitAssetNodeProperties() # An object representing all properties
76
+ sub_resource_relationship: SnipeitTenantToSnipeitAssetRel = SnipeitTenantToSnipeitAssetRel()
77
+ other_relationships: OtherRelationships = OtherRelationships(
78
+ [
79
+ SnipeitUserToSnipeitAssetRel(),
80
+ ],
81
+ )
@@ -0,0 +1,17 @@
1
+ from dataclasses import dataclass
2
+
3
+ from cartography.models.core.common import PropertyRef
4
+ from cartography.models.core.nodes import CartographyNodeProperties
5
+ from cartography.models.core.nodes import CartographyNodeSchema
6
+
7
+
8
+ @dataclass(frozen=True)
9
+ class SnipeitTenantNodeProperties(CartographyNodeProperties):
10
+ id: PropertyRef = PropertyRef('id')
11
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
12
+
13
+
14
+ @dataclass(frozen=True)
15
+ class SnipeitTenantSchema(CartographyNodeSchema):
16
+ label: str = 'SnipeitTenant' # The label of the node
17
+ properties: SnipeitTenantNodeProperties = SnipeitTenantNodeProperties() # An object representing all properties
@@ -0,0 +1,49 @@
1
+ from dataclasses import dataclass
2
+
3
+ from cartography.models.core.common import PropertyRef
4
+ from cartography.models.core.nodes import CartographyNodeProperties
5
+ from cartography.models.core.nodes import CartographyNodeSchema
6
+ from cartography.models.core.relationships import CartographyRelProperties
7
+ from cartography.models.core.relationships import CartographyRelSchema
8
+ from cartography.models.core.relationships import LinkDirection
9
+ from cartography.models.core.relationships import make_target_node_matcher
10
+ from cartography.models.core.relationships import TargetNodeMatcher
11
+
12
+
13
+ @dataclass(frozen=True)
14
+ class SnipeitUserNodeProperties(CartographyNodeProperties):
15
+ """
16
+ Ref: https://snipe-it.readme.io/reference/users
17
+ """
18
+ # Common properties
19
+ id: PropertyRef = PropertyRef('id')
20
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
21
+
22
+ # SnipeIT specific properties
23
+ company: PropertyRef = PropertyRef('company_id.name', extra_index=True)
24
+ email: PropertyRef = PropertyRef('email', extra_index=True)
25
+ username: PropertyRef = PropertyRef('username')
26
+
27
+
28
+ @dataclass(frozen=True)
29
+ class SnipeitTenantToSnipeitUserRelProperties(CartographyRelProperties):
30
+ lastupdated: PropertyRef = PropertyRef('lastupdated', set_in_kwargs=True)
31
+
32
+
33
+ @dataclass(frozen=True)
34
+ # (:SnipeitTenant)-[:HAS_USER]->(:SnipeitUser)
35
+ class SnipeitTenantToSnipeitUserRel(CartographyRelSchema):
36
+ target_node_label: str = 'SnipeitTenant'
37
+ target_node_matcher: TargetNodeMatcher = make_target_node_matcher(
38
+ {'id': PropertyRef('TENANT_ID', set_in_kwargs=True)},
39
+ )
40
+ direction: LinkDirection = LinkDirection.INWARD
41
+ rel_label: str = "HAS_USER"
42
+ properties: SnipeitTenantToSnipeitUserRelProperties = SnipeitTenantToSnipeitUserRelProperties()
43
+
44
+
45
+ @dataclass(frozen=True)
46
+ class SnipeitUserSchema(CartographyNodeSchema):
47
+ label: str = 'SnipeitUser' # The label of the node
48
+ properties: SnipeitUserNodeProperties = SnipeitUserNodeProperties() # An object representing all properties
49
+ sub_resource_relationship: SnipeitTenantToSnipeitUserRel = SnipeitTenantToSnipeitUserRel()
cartography/sync.py CHANGED
@@ -30,6 +30,7 @@ import cartography.intel.lastpass
30
30
  import cartography.intel.oci
31
31
  import cartography.intel.okta
32
32
  import cartography.intel.semgrep
33
+ import cartography.intel.snipeit
33
34
  from cartography.config import Config
34
35
  from cartography.stats import set_stats_client
35
36
  from cartography.util import STATUS_FAILURE
@@ -57,6 +58,7 @@ TOP_LEVEL_MODULES = OrderedDict({ # preserve order so that the default sync alw
57
58
  'bigfix': cartography.intel.bigfix.start_bigfix_ingestion,
58
59
  'duo': cartography.intel.duo.start_duo_ingestion,
59
60
  'semgrep': cartography.intel.semgrep.start_semgrep_ingestion,
61
+ 'snipeit': cartography.intel.snipeit.start_snipeit_ingestion,
60
62
  'analysis': cartography.intel.analysis.run,
61
63
  })
62
64
 
cartography/util.py CHANGED
@@ -225,7 +225,7 @@ If not, then the AWS datatype somehow does not have this key.''',
225
225
  return items
226
226
 
227
227
 
228
- AWSGetFunc = TypeVar('AWSGetFunc', bound=Callable[..., List])
228
+ AWSGetFunc = TypeVar('AWSGetFunc', bound=Callable[..., Iterable])
229
229
 
230
230
  # fix for AWS TooManyRequestsException
231
231
  # https://github.com/lyft/cartography/issues/297
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cartography
3
- Version: 0.93.0rc1
3
+ Version: 0.94.0
4
4
  Summary: Explore assets and their relationships across your technical infrastructure.
5
5
  Home-page: https://www.github.com/lyft/cartography
6
6
  Maintainer: Lyft
@@ -1,11 +1,11 @@
1
1
  cartography/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cartography/__main__.py,sha256=JftXT_nUPkqcEh8uxCCT4n-OyHYqbldEgrDS-4ygy0U,101
3
- cartography/cli.py,sha256=ot9_gMxw5_irVS7KYfWf5HIr2Xkb10RDEbOTY1nzUcw,31787
4
- cartography/config.py,sha256=rL1zgxZO47_R7S6E9e0CwxmhzRSN0X_q93NtcPR1G00,11368
3
+ cartography/cli.py,sha256=MpS6JwnRZKPfXIxD-cV--bDLjgLqQjFzmIWNzX--ZcE,33385
4
+ cartography/config.py,sha256=5E_YlWUdqBg94cUcugsRkihjEn-RxagG8M3lBliTiQA,11966
5
5
  cartography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  cartography/stats.py,sha256=dbybb9V2FuvSuHjjNwz6Vjwnd1hap2C7h960rLoKcl8,4406
7
- cartography/sync.py,sha256=a80r_IzrZcWGSmRDRrxkesNYPiOuLte5YHvDQT3L-Lw,9730
8
- cartography/util.py,sha256=F3FPMJl1KDW0x_5cvt2ZGI0Dv1LVrHU7Az4OleAANBI,14474
7
+ cartography/sync.py,sha256=5mUuo1Kr1_yVFSikWYY8sxXk-Ii5k1e8eqivMFdnkks,9829
8
+ cartography/util.py,sha256=umfnjX8jVLu0rpYA75X-WvRpYzHQxns9qZiPwfyAlwQ,14478
9
9
  cartography/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  cartography/client/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  cartography/client/aws/iam.py,sha256=dYsGikc36DEsSeR2XVOVFFUDwuU9yWj_EVkpgVYCFgM,1293
@@ -118,7 +118,7 @@ cartography/data/jobs/cleanup/okta_groups_cleanup.json,sha256=cBI3f_okl4pnVH48L1
118
118
  cartography/data/jobs/cleanup/okta_import_cleanup.json,sha256=4XQwYpY9vITLhnLpijMVa5PxO0Tm38CcMydnbPdQPm0,3798
119
119
  cartography/data/jobs/cleanup/pagerduty_import_cleanup.json,sha256=RJqG_Uw_QEGTer_-s2IuZ3a2kykhUcCdDNZu0S7SEB4,4457
120
120
  cartography/data/jobs/scoped_analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
121
- cartography/data/jobs/scoped_analysis/semgrep_sca_risk_analysis.json,sha256=Nlx4xRISmn_RQjVoRO1qAc2KtkiGy8i4mUB1NBPjCVc,6451
121
+ cartography/data/jobs/scoped_analysis/semgrep_sca_risk_analysis.json,sha256=eIYxbl5TdgVzN8En2JozWoyKAiIh3Dp8wUMkTDPGZY0,6485
122
122
  cartography/driftdetect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
123
  cartography/driftdetect/__main__.py,sha256=Sz24Kxy5x6RC3GQEkuUDXzjOV3SvlHVkZdvPl1GLl5E,125
124
124
  cartography/driftdetect/add_shortcut.py,sha256=COtcCW9T0ss-bP1B2y9gEk3kN6HA01kkurSiDBNLzco,2377
@@ -135,7 +135,7 @@ cartography/driftdetect/util.py,sha256=Lqxv8QoFn3_3Fz18qCOjkjJ6yBwgrHjrxXmArBAEd
135
135
  cartography/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
136
136
  cartography/graph/cleanupbuilder.py,sha256=87vFrOJo66hOrrqeNwXp18WrNQEheHTlZko9KUkXWhY,8021
137
137
  cartography/graph/context.py,sha256=RGxGb8EnxowcqjR0nFF86baNhgRHeUF9wjIoFUoG8LU,1230
138
- cartography/graph/job.py,sha256=VBKc0VLbDz1zm5jslF49nbPbQS7DkdQwfPG7rdLSc1w,7288
138
+ cartography/graph/job.py,sha256=RZWsbNhHuJlcSpw4C73ZuovRTp7kGrcm3X9yUH8vT1Q,7488
139
139
  cartography/graph/querybuilder.py,sha256=MMXzUEg4td-YmHMNM97KAqDZ6-1wNClO2jmJoG47BTY,20108
140
140
  cartography/graph/statement.py,sha256=VsqG46ty_Mm87fr8YdIwfr6a82OUXU7yZe6S-Py9hZg,5345
141
141
  cartography/intel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -153,11 +153,11 @@ cartography/intel/aws/elasticache.py,sha256=fCI47aDFmIDyE26GiReKYb6XIZUwrzcvsXBQ
153
153
  cartography/intel/aws/elasticsearch.py,sha256=ZL7MkXF_bXRSoXuDSI1dwGckRLG2zDB8LuAD07vSLnE,8374
154
154
  cartography/intel/aws/emr.py,sha256=xhWBVZngxJRFjMEDxwq3G6SgytRGLq0v2a_CeDvByR0,3372
155
155
  cartography/intel/aws/iam.py,sha256=eLw0NkBGKzCI_tQ3wmrx3aUibQerrsxKJd3d0RCKcKQ,32374
156
- cartography/intel/aws/inspector.py,sha256=6enCu2USefuGT3FqA0Vto6i-z4BrL2HC_clbiXSLIlo,8654
156
+ cartography/intel/aws/inspector.py,sha256=S22ZgRKEnmnBTJ-u0rodqRPB7_LkSIek47NeBxN4XJw,9336
157
157
  cartography/intel/aws/kms.py,sha256=bZUzMxAH_DsAcGTJBs08gg2tLKYu-QWjvMvV9C-6v50,11731
158
158
  cartography/intel/aws/lambda_function.py,sha256=KKTyn53xpaMI9WvIqxmsOASFwflHt-2_5ow-zUFc2wg,9890
159
159
  cartography/intel/aws/organizations.py,sha256=HaQZ3J5XF15BuykuDypqFORDYpnoHuRRr4DuceewH4s,4485
160
- cartography/intel/aws/permission_relationships.py,sha256=PhOnag0a1gZHtUg82546MKhj-8IcGJ7wLbvPASUBXlg,14792
160
+ cartography/intel/aws/permission_relationships.py,sha256=IarV9gt5BaplZ5TPo_mfypt9bTKfT9qDtqC3Ob89qGI,14904
161
161
  cartography/intel/aws/rds.py,sha256=vnlNYmrO2Cc0PNn31CeG2QwYhwjVosbQFE9Ol1vQyLE,25252
162
162
  cartography/intel/aws/redshift.py,sha256=KOqiXIllHmtPTeaNGl-cX4srY5pFE6o12j8MQ5-zWpc,6694
163
163
  cartography/intel/aws/resourcegroupstaggingapi.py,sha256=aq4kPF6t8QZZoTxdkQVLXH65Di41CDJVM9llJNe6iaY,10278
@@ -175,7 +175,7 @@ cartography/intel/aws/ec2/images.py,sha256=heElwHJGqVD3iUJjxwA_Sdc3CmE4HPs00CTMH
175
175
  cartography/intel/aws/ec2/instances.py,sha256=mnTjdBY-4D-TGhH29UrSaLUW0Uft0JApDIJkkLz4zPc,12170
176
176
  cartography/intel/aws/ec2/internet_gateways.py,sha256=dI-4-85_3DGGZZBcY_DN6XqESx9P26S6jKok314lcnQ,2883
177
177
  cartography/intel/aws/ec2/key_pairs.py,sha256=SvRgd56vE4eouvTSNoFK8PP8HYoECO91goxc36oq_FY,2508
178
- cartography/intel/aws/ec2/launch_templates.py,sha256=YhLh_O2cZbeoKXA6MXmwkxJJi0ubZb5FyouTYykuq1k,5372
178
+ cartography/intel/aws/ec2/launch_templates.py,sha256=aeqaL8On38ET8nM8bISsIXLy6PkZoV-tqSWG38YXgkI,6010
179
179
  cartography/intel/aws/ec2/load_balancer_v2s.py,sha256=95FfQQn740gexINIHDJizOM4OKzRtQT_y2XQMipQ5Dg,8661
180
180
  cartography/intel/aws/ec2/load_balancers.py,sha256=1GwErzGqi3BKCARqfGJcD_r_D84rFKVy5kNMas9jAok,6756
181
181
  cartography/intel/aws/ec2/network_interfaces.py,sha256=CzF8PooCYUQ2pk8DR8JDAhkWRUQSBj_27OsIfkL_-Cs,9199
@@ -230,7 +230,7 @@ cartography/intel/gcp/gke.py,sha256=qaTwsVaxkwNhW5_Mw4bedOk7fgJK8y0LwwcYlUABXDg,
230
230
  cartography/intel/gcp/storage.py,sha256=oO_ayEhkXlj2Gn7T5MU41ZXiqwRwe6Ud4wzqyRTsyf4,9075
231
231
  cartography/intel/github/__init__.py,sha256=y876JJGTDJZEOFCDiNCJfcLNxN24pVj4s2N0YmuuoaE,1914
232
232
  cartography/intel/github/repos.py,sha256=YPDdBMk6NkZjwPcqPW5LlCy_OS9tKcrZD6ygiUG93J0,21766
233
- cartography/intel/github/teams.py,sha256=mofyJeJVOD7Umh9Rq6QnAwom9bBHBx18kyvFMvQX5YE,5383
233
+ cartography/intel/github/teams.py,sha256=aXI-XbxlA1IDaAUX0XSdEt6pA2n4ew5j_doj1iNYCDM,6618
234
234
  cartography/intel/github/users.py,sha256=kQp0dxzP08DVrdvfVeCciQbrKPbbFvwbR_p_I_XGt7s,3826
235
235
  cartography/intel/github/util.py,sha256=K6hbxypy4luKhIE1Uh5VWZc9OyjMK2OuO00vBAQfloA,8049
236
236
  cartography/intel/gsuite/__init__.py,sha256=AGIUskGlLCVGHbnQicNpNWi9AvmV7_7hUKTt-hsB2J8,4306
@@ -238,7 +238,7 @@ cartography/intel/gsuite/api.py,sha256=J0dkNdfBVMrEv8vvStQu7YKVxXSyV45WueFhUS4aO
238
238
  cartography/intel/jamf/__init__.py,sha256=Nof-LrUeevoieo6oP2GyfTwx8k5TUIgreW6hSj53YjQ,419
239
239
  cartography/intel/jamf/computers.py,sha256=EfjlupQ-9HYTjOrmuwrGuJDy9ApAnJvk8WrYcp6_Jkk,1673
240
240
  cartography/intel/jamf/util.py,sha256=EAyP8VpOY2uAvW3HtX6r7qORNjGa1Tr3fuqezuLQ0j4,1017
241
- cartography/intel/kandji/__init__.py,sha256=OHZJNzuNibIfJ51OkL3XL2EdA_ZmvPHPeWCQUld4J64,1079
241
+ cartography/intel/kandji/__init__.py,sha256=Y38bVRmrGVJRy0mSof8xU-cuEyJ7N_oI7KekYjYyuiQ,1076
242
242
  cartography/intel/kandji/devices.py,sha256=j_rP6rQ5VPT_XEcGXx7Yt6eCOm1Oe3I2qWIxXODXEcA,2224
243
243
  cartography/intel/kubernetes/__init__.py,sha256=jaOTEanWnTrYvcBN1XUC5oqBhz1AJbFmzoT9uu_VBSg,1481
244
244
  cartography/intel/kubernetes/namespaces.py,sha256=6o-FgAX_Ai5NCj2xOWM-RNWEvn0gZjVQnZSGCJlcIhw,2710
@@ -271,7 +271,11 @@ cartography/intel/pagerduty/teams.py,sha256=aRubUXgEVVReyLrXAX_be1E_QBJv3Qlr4n77
271
271
  cartography/intel/pagerduty/users.py,sha256=oltGssxrnzYsV6QTGP1SsPoA1rCUDStj6vGlGWY695g,1623
272
272
  cartography/intel/pagerduty/vendors.py,sha256=WlDHExrWRBegDQKtxBV5nJiYgwoTLxNee4HrQDJ-Pdg,1559
273
273
  cartography/intel/semgrep/__init__.py,sha256=94vjdszGEosvXiKtYWKD34BRKwRbJxlBO1PZcKdxnFA,619
274
- cartography/intel/semgrep/findings.py,sha256=hbH_wL1XJDZDDrbIV_FjPv4A7oS2xM_hhMAbZlRm9po,9025
274
+ cartography/intel/semgrep/findings.py,sha256=9MSbDFrRUqb5nkEWN0R9Fx57RJMt27-9obpIHXNd45Y,10836
275
+ cartography/intel/snipeit/__init__.py,sha256=0uIh8NbuI7IbfgaOrPHg4Nfm1yO6mTRC_qaFiIjR2FA,992
276
+ cartography/intel/snipeit/asset.py,sha256=KkGRUgIydvf_6SHtgpVLT-TjtEGz029SrOaoh0qDW6E,1997
277
+ cartography/intel/snipeit/user.py,sha256=hm9v_p29bphHtGe9LKVo1FD_rQcbCigrCRf8YsmteXA,1971
278
+ cartography/intel/snipeit/util.py,sha256=fXlzdFQXm01Oaa2REYNN7x3y3k2l3zCVhf_BxcRUELY,1040
275
279
  cartography/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
276
280
  cartography/models/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
277
281
  cartography/models/aws/emr.py,sha256=TkuwoZnw_VHbJ5bwkac7-ZfwSLe_TeK3gxkuwGQOUk4,3037
@@ -330,12 +334,16 @@ cartography/models/lastpass/tenant.py,sha256=TG-9LFo9Sfzb9UgcTt_gFVTKocLItbgQMMP
330
334
  cartography/models/lastpass/user.py,sha256=SMTTYN6jgccc9k76hY3rVImElJOhHhZ9f1aZ6JzcrHw,3487
331
335
  cartography/models/semgrep/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
332
336
  cartography/models/semgrep/deployment.py,sha256=or5qZDuR51MXzINpH15jZrqmSUvXQevCNYWJ7D6v-JI,745
333
- cartography/models/semgrep/findings.py,sha256=xrn8sgXpNMrNJbKQagaAVxaCG9bVjTATSRR2XRBR4rg,5386
337
+ cartography/models/semgrep/findings.py,sha256=RPd-QzvP38fbTIqFARx6XpcZSsd5JM3KIg-ZlJA7NlE,5490
334
338
  cartography/models/semgrep/locations.py,sha256=kSk7Nn5Mn4Ob84MVZOo2GR0YFi-9Okq9pgA3FfC6_bk,3061
335
- cartography-0.93.0rc1.dist-info/LICENSE,sha256=489ZXeW9G90up6ep-D1n-lJgk9ciNT2yxXpFgRSidtk,11341
336
- cartography-0.93.0rc1.dist-info/METADATA,sha256=1r_kEpkQWTPR77ToFbTkU78iBSTCM9JuQdleKFo3dJE,1991
337
- cartography-0.93.0rc1.dist-info/NOTICE,sha256=YOGAsjFtbyKj5tslYIg6V5jEYRuEvnSsIuDOUKj0Qj4,97
338
- cartography-0.93.0rc1.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
339
- cartography-0.93.0rc1.dist-info/entry_points.txt,sha256=GVIAWD0o0_K077qMA_k1oZU4v-M0a8GLKGJR8tZ-qH8,112
340
- cartography-0.93.0rc1.dist-info/top_level.txt,sha256=BHqsNJQiI6Q72DeypC1IINQJE59SLhU4nllbQjgJi9g,12
341
- cartography-0.93.0rc1.dist-info/RECORD,,
339
+ cartography/models/snipeit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
340
+ cartography/models/snipeit/asset.py,sha256=FyRAaeXuZjMy0eUQcSDFcgEAF5lbLMlvqp1Tv9d3Lv4,3238
341
+ cartography/models/snipeit/tenant.py,sha256=p4rFnpNNuF1W5ilGBbexDaETWTwavfb38RcQGoImkQI,679
342
+ cartography/models/snipeit/user.py,sha256=MsB4MiCVNTH6JpESime7cOkB89autZOXQpL6Z0l7L6o,2113
343
+ cartography-0.94.0.dist-info/LICENSE,sha256=489ZXeW9G90up6ep-D1n-lJgk9ciNT2yxXpFgRSidtk,11341
344
+ cartography-0.94.0.dist-info/METADATA,sha256=9v4HeSznhEKST3TK3YKaipeJ75pxCunQLCTqqmerloI,1988
345
+ cartography-0.94.0.dist-info/NOTICE,sha256=YOGAsjFtbyKj5tslYIg6V5jEYRuEvnSsIuDOUKj0Qj4,97
346
+ cartography-0.94.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
347
+ cartography-0.94.0.dist-info/entry_points.txt,sha256=GVIAWD0o0_K077qMA_k1oZU4v-M0a8GLKGJR8tZ-qH8,112
348
+ cartography-0.94.0.dist-info/top_level.txt,sha256=BHqsNJQiI6Q72DeypC1IINQJE59SLhU4nllbQjgJi9g,12
349
+ cartography-0.94.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (70.3.0)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5