ethyca-fides 2.56.3b0__py2.py3-none-any.whl → 2.56.3b2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/METADATA +1 -2
  2. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/RECORD +106 -102
  3. fides/_version.py +3 -3
  4. fides/api/alembic/migrations/versions/1152c1717849_fix_monitorexecution_started_default.py +57 -0
  5. fides/api/alembic/migrations/versions/69ad6d844e21_add_comments_and_comment_references.py +84 -0
  6. fides/api/alembic/migrations/versions/6ea2171c544f_change_attachment_storage_key_to_.py +77 -0
  7. fides/api/custom_types.py +24 -1
  8. fides/api/db/base.py +1 -0
  9. fides/api/models/attachment.py +109 -49
  10. fides/api/models/comment.py +109 -0
  11. fides/api/models/detection_discovery.py +4 -2
  12. fides/api/service/connectors/query_configs/saas_query_config.py +21 -15
  13. fides/api/service/storage/storage_uploader_service.py +4 -10
  14. fides/api/task/graph_task.py +84 -43
  15. fides/api/tasks/storage.py +106 -15
  16. fides/api/util/aws_util.py +19 -0
  17. fides/api/util/collection_util.py +117 -0
  18. fides/api/util/consent_util.py +20 -5
  19. fides/api/util/saas_util.py +32 -56
  20. fides/config/security_settings.py +7 -13
  21. fides/data/language/languages.yml +2 -0
  22. fides/ui-build/static/admin/404.html +1 -1
  23. fides/ui-build/static/admin/_next/static/chunks/pages/{_app-54e64129c5f3042a.js → _app-3b7bbcdb61d952e7.js} +1 -1
  24. fides/ui-build/static/admin/_next/static/chunks/pages/{index-c9fa68dc0fa42c81.js → index-94e6d589c4edf360.js} +1 -1
  25. fides/ui-build/static/admin/_next/static/chunks/pages/settings/consent-4769f55b138073f7.js +1 -0
  26. fides/ui-build/static/admin/_next/static/{o6oSu0mrMicc3b7f8nyq5 → n4uO6TqGfiKHQ-X5XYkoy}/_buildManifest.js +1 -1
  27. fides/ui-build/static/admin/add-systems/manual.html +1 -1
  28. fides/ui-build/static/admin/add-systems/multiple.html +1 -1
  29. fides/ui-build/static/admin/add-systems.html +1 -1
  30. fides/ui-build/static/admin/ant-poc.html +1 -1
  31. fides/ui-build/static/admin/consent/configure/add-vendors.html +1 -1
  32. fides/ui-build/static/admin/consent/configure.html +1 -1
  33. fides/ui-build/static/admin/consent/privacy-experience/[id].html +1 -1
  34. fides/ui-build/static/admin/consent/privacy-experience/new.html +1 -1
  35. fides/ui-build/static/admin/consent/privacy-experience.html +1 -1
  36. fides/ui-build/static/admin/consent/privacy-notices/[id].html +1 -1
  37. fides/ui-build/static/admin/consent/privacy-notices/new.html +1 -1
  38. fides/ui-build/static/admin/consent/privacy-notices.html +1 -1
  39. fides/ui-build/static/admin/consent/properties.html +1 -1
  40. fides/ui-build/static/admin/consent/reporting.html +1 -1
  41. fides/ui-build/static/admin/consent.html +1 -1
  42. fides/ui-build/static/admin/data-catalog/[systemId]/projects/[projectUrn]/[resourceUrn].html +1 -1
  43. fides/ui-build/static/admin/data-catalog/[systemId]/projects/[projectUrn].html +1 -1
  44. fides/ui-build/static/admin/data-catalog/[systemId]/projects.html +1 -1
  45. fides/ui-build/static/admin/data-catalog/[systemId]/resources/[resourceUrn].html +1 -1
  46. fides/ui-build/static/admin/data-catalog/[systemId]/resources.html +1 -1
  47. fides/ui-build/static/admin/data-catalog.html +1 -1
  48. fides/ui-build/static/admin/data-discovery/action-center/[monitorId]/[systemId].html +1 -1
  49. fides/ui-build/static/admin/data-discovery/action-center/[monitorId].html +1 -1
  50. fides/ui-build/static/admin/data-discovery/action-center.html +1 -1
  51. fides/ui-build/static/admin/data-discovery/activity.html +1 -1
  52. fides/ui-build/static/admin/data-discovery/detection/[resourceUrn].html +1 -1
  53. fides/ui-build/static/admin/data-discovery/detection.html +1 -1
  54. fides/ui-build/static/admin/data-discovery/discovery/[resourceUrn].html +1 -1
  55. fides/ui-build/static/admin/data-discovery/discovery.html +1 -1
  56. fides/ui-build/static/admin/datamap.html +1 -1
  57. fides/ui-build/static/admin/dataset/[datasetId]/[collectionName]/[...subfieldNames].html +1 -1
  58. fides/ui-build/static/admin/dataset/[datasetId]/[collectionName].html +1 -1
  59. fides/ui-build/static/admin/dataset/[datasetId].html +1 -1
  60. fides/ui-build/static/admin/dataset/new.html +1 -1
  61. fides/ui-build/static/admin/dataset.html +1 -1
  62. fides/ui-build/static/admin/datastore-connection/[id].html +1 -1
  63. fides/ui-build/static/admin/datastore-connection/new.html +1 -1
  64. fides/ui-build/static/admin/datastore-connection.html +1 -1
  65. fides/ui-build/static/admin/index.html +1 -1
  66. fides/ui-build/static/admin/integrations/[id].html +1 -1
  67. fides/ui-build/static/admin/integrations.html +1 -1
  68. fides/ui-build/static/admin/lib/fides-ext-gpp.js +1 -1
  69. fides/ui-build/static/admin/lib/fides-headless.js +1 -1
  70. fides/ui-build/static/admin/lib/fides-tcf.js +3 -3
  71. fides/ui-build/static/admin/lib/fides.js +2 -2
  72. fides/ui-build/static/admin/login/[provider].html +1 -1
  73. fides/ui-build/static/admin/login.html +1 -1
  74. fides/ui-build/static/admin/messaging/[id].html +1 -1
  75. fides/ui-build/static/admin/messaging/add-template.html +1 -1
  76. fides/ui-build/static/admin/messaging.html +1 -1
  77. fides/ui-build/static/admin/privacy-requests/[id].html +1 -1
  78. fides/ui-build/static/admin/privacy-requests/configure/messaging.html +1 -1
  79. fides/ui-build/static/admin/privacy-requests/configure/storage.html +1 -1
  80. fides/ui-build/static/admin/privacy-requests/configure.html +1 -1
  81. fides/ui-build/static/admin/privacy-requests.html +1 -1
  82. fides/ui-build/static/admin/properties/[id].html +1 -1
  83. fides/ui-build/static/admin/properties/add-property.html +1 -1
  84. fides/ui-build/static/admin/properties.html +1 -1
  85. fides/ui-build/static/admin/reporting/datamap.html +1 -1
  86. fides/ui-build/static/admin/settings/about.html +1 -1
  87. fides/ui-build/static/admin/settings/consent.html +1 -1
  88. fides/ui-build/static/admin/settings/custom-fields.html +1 -1
  89. fides/ui-build/static/admin/settings/domain-records.html +1 -1
  90. fides/ui-build/static/admin/settings/domains.html +1 -1
  91. fides/ui-build/static/admin/settings/email-templates.html +1 -1
  92. fides/ui-build/static/admin/settings/locations.html +1 -1
  93. fides/ui-build/static/admin/settings/organization.html +1 -1
  94. fides/ui-build/static/admin/settings/regulations.html +1 -1
  95. fides/ui-build/static/admin/systems/configure/[id]/test-datasets.html +1 -1
  96. fides/ui-build/static/admin/systems/configure/[id].html +1 -1
  97. fides/ui-build/static/admin/systems.html +1 -1
  98. fides/ui-build/static/admin/taxonomy.html +1 -1
  99. fides/ui-build/static/admin/user-management/new.html +1 -1
  100. fides/ui-build/static/admin/user-management/profile/[id].html +1 -1
  101. fides/ui-build/static/admin/user-management.html +1 -1
  102. fides/ui-build/static/admin/_next/static/chunks/pages/settings/consent-815497f4f12600ec.js +0 -1
  103. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/LICENSE +0 -0
  104. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/WHEEL +0 -0
  105. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/entry_points.txt +0 -0
  106. {ethyca_fides-2.56.3b0.dist-info → ethyca_fides-2.56.3b2.dist-info}/top_level.txt +0 -0
  107. /fides/ui-build/static/admin/_next/static/{o6oSu0mrMicc3b7f8nyq5 → n4uO6TqGfiKHQ-X5XYkoy}/_ssgManifest.js +0 -0
@@ -36,7 +36,8 @@ from fides.api.models.connectionconfig import (
36
36
  ConnectionType,
37
37
  )
38
38
  from fides.api.models.datasetconfig import DatasetConfig
39
- from fides.api.models.policy import Policy
39
+ from fides.api.models.policy import Policy, Rule
40
+ from fides.api.models.privacy_preference import PrivacyPreferenceHistory
40
41
  from fides.api.models.privacy_request import ExecutionLog, PrivacyRequest, RequestTask
41
42
  from fides.api.schemas.policy import ActionType, CurrentStep
42
43
  from fides.api.schemas.privacy_request import ExecutionLogStatus
@@ -54,7 +55,9 @@ from fides.api.util.collection_util import (
54
55
  make_immutable,
55
56
  make_mutable,
56
57
  )
57
- from fides.api.util.consent_util import add_errored_system_status_for_consent_reporting
58
+ from fides.api.util.consent_util import (
59
+ add_errored_system_status_for_consent_reporting_on_preferences,
60
+ )
58
61
  from fides.api.util.logger import Pii
59
62
  from fides.api.util.logger_context_utils import LoggerContextKeys
60
63
  from fides.api.util.saas_util import FIDESOPS_GROUPED_INPUTS
@@ -138,13 +141,7 @@ def retry(
138
141
  self.resources.request.id,
139
142
  )
140
143
  self.log_skipped(action_type, exc)
141
- for pref in self.resources.request.privacy_preferences:
142
- # For consent reporting, also caching the given system as skipped for all historical privacy preferences.
143
- pref.cache_system_status(
144
- self.resources.session,
145
- self.connector.configuration.system_key,
146
- ExecutionLogStatus.skipped,
147
- )
144
+ self.cache_system_status_for_preferences()
148
145
  return default_return
149
146
  except BaseException as ex: # pylint: disable=W0703
150
147
  traceback.print_exc()
@@ -164,11 +161,7 @@ def retry(
164
161
  action_type.value
165
162
  ] # Convert ActionType into a CurrentStep, no longer coerced with Pydantic V2
166
163
  )
167
- add_errored_system_status_for_consent_reporting(
168
- self.resources.session,
169
- self.resources.request,
170
- self.connector.configuration,
171
- )
164
+ self.add_error_status_for_consent_reporting()
172
165
  if not self.request_task.id:
173
166
  # TODO Remove when we stop support for DSR 2.0
174
167
  # Re-raise to stop privacy request execution on failure for
@@ -730,6 +723,48 @@ class GraphTask(ABC): # pylint: disable=too-many-instance-attributes
730
723
  self.log_end(ActionType.consent)
731
724
  return output
732
725
 
726
+ def cache_system_status_for_preferences(self) -> None:
727
+ """
728
+ Calls cache_system_status for all historical privacy preferences for the given request.
729
+
730
+ Purposely uses a new session.
731
+ """
732
+
733
+ privacy_request_id = self.resources.request.id
734
+
735
+ with get_db() as db:
736
+
737
+ privacy_preferences = db.query(PrivacyPreferenceHistory).filter(
738
+ PrivacyPreferenceHistory.privacy_request_id == privacy_request_id
739
+ )
740
+ for pref in privacy_preferences:
741
+ # For consent reporting, also caching the given system as skipped for all historical privacy preferences.
742
+ pref.cache_system_status(
743
+ db,
744
+ self.connector.configuration.system_key, # type: ignore[arg-type]
745
+ ExecutionLogStatus.skipped,
746
+ )
747
+
748
+ def add_error_status_for_consent_reporting(self) -> None:
749
+ """
750
+ Adds the errored system status for all historical privacy preferences for the given request that
751
+ are deemed relevant for the connector failure (i.e if they had a "pending" log added to them).
752
+
753
+ Purposely uses a new session.
754
+ """
755
+ privacy_request_id = self.resources.request.id
756
+ with get_db() as db:
757
+ privacy_preferences = (
758
+ db.query(PrivacyPreferenceHistory)
759
+ .filter(
760
+ PrivacyPreferenceHistory.privacy_request_id == privacy_request_id
761
+ )
762
+ .all()
763
+ )
764
+ add_errored_system_status_for_consent_reporting_on_preferences(
765
+ db, privacy_preferences, self.connector.configuration
766
+ )
767
+
733
768
 
734
769
  def collect_queries(
735
770
  traversal: Traversal, resources: TaskResources
@@ -816,39 +851,45 @@ def build_affected_field_logs(
816
851
  }]
817
852
  """
818
853
 
819
- targeted_field_paths: Dict[FieldAddress, str] = {}
854
+ policy_id = policy.id
820
855
 
821
- for rule in policy.rules: # type: ignore[attr-defined]
822
- if rule.action_type != action_type:
823
- continue
824
- rule_categories: List[str] = rule.get_target_data_categories()
825
- if not rule_categories:
826
- continue
856
+ with get_db() as db:
827
857
 
828
- collection_categories: Dict[
829
- str, List[FieldPath]
830
- ] = node.collection.field_paths_by_category # type: ignore
831
- for rule_cat in rule_categories:
832
- for collection_cat, field_paths in collection_categories.items():
833
- if collection_cat.startswith(rule_cat):
834
- targeted_field_paths.update(
835
- {
836
- node.address.field_address(field_path): collection_cat
837
- for field_path in field_paths
838
- }
839
- )
858
+ rules = db.query(Rule).filter(Rule.policy_id == policy_id)
840
859
 
841
- ret: List[Dict[str, Any]] = []
842
- for field_address, data_categories in targeted_field_paths.items():
843
- ret.append(
844
- {
845
- "path": field_address.value,
846
- "field_name": field_address.field_path.string_path,
847
- "data_categories": [data_categories],
848
- }
849
- )
860
+ targeted_field_paths: Dict[FieldAddress, str] = {}
861
+
862
+ for rule in rules: # type: ignore[attr-defined]
863
+ if rule.action_type != action_type:
864
+ continue
865
+ rule_categories: List[str] = rule.get_target_data_categories()
866
+ if not rule_categories:
867
+ continue
868
+
869
+ collection_categories: Dict[
870
+ str, List[FieldPath]
871
+ ] = node.collection.field_paths_by_category # type: ignore
872
+ for rule_cat in rule_categories:
873
+ for collection_cat, field_paths in collection_categories.items():
874
+ if collection_cat.startswith(rule_cat):
875
+ targeted_field_paths.update(
876
+ {
877
+ node.address.field_address(field_path): collection_cat
878
+ for field_path in field_paths
879
+ }
880
+ )
881
+
882
+ ret: List[Dict[str, Any]] = []
883
+ for field_address, data_categories in targeted_field_paths.items():
884
+ ret.append(
885
+ {
886
+ "path": field_address.value,
887
+ "field_name": field_address.field_path.string_path,
888
+ "data_categories": [data_categories],
889
+ }
890
+ )
850
891
 
851
- return ret
892
+ return ret
852
893
 
853
894
 
854
895
  def build_consent_dataset_graph(datasets: List[DatasetConfig]) -> DatasetGraph:
@@ -5,20 +5,20 @@ import os
5
5
  import secrets
6
6
  import zipfile
7
7
  from io import BytesIO
8
- from typing import Any, Dict, Optional, Set, Union
8
+ from typing import Any, Dict, Optional, Union
9
9
 
10
10
  import pandas as pd
11
11
  from botocore.exceptions import ClientError, ParamValidationError
12
+ from fideslang.validation import AnyHttpUrlString
12
13
  from loguru import logger
13
14
 
14
15
  from fides.api.cryptography.cryptographic_util import bytes_to_b64_str
15
- from fides.api.graph.graph import DataCategoryFieldMapping
16
16
  from fides.api.models.privacy_request import PrivacyRequest
17
17
  from fides.api.schemas.storage.storage import ResponseFormat, StorageSecrets
18
18
  from fides.api.service.privacy_request.dsr_package.dsr_report_builder import (
19
19
  DsrReportBuilder,
20
20
  )
21
- from fides.api.util.aws_util import get_aws_session
21
+ from fides.api.util.aws_util import get_s3_client
22
22
  from fides.api.util.cache import get_cache, get_encryption_cache_key
23
23
  from fides.api.util.encryption.aes_gcm_encryption_scheme import (
24
24
  encrypt_to_bytes_verify_secrets_length,
@@ -101,7 +101,9 @@ def write_to_in_memory_buffer(
101
101
  raise NotImplementedError(f"No handling for response format {resp_format}.")
102
102
 
103
103
 
104
- def create_presigned_url_for_s3(s3_client: Any, bucket_name: str, file_key: str) -> str:
104
+ def create_presigned_url_for_s3(
105
+ s3_client: Any, bucket_name: str, file_key: str
106
+ ) -> AnyHttpUrlString:
105
107
  """ "Generate a presigned URL to share an S3 object
106
108
 
107
109
  :param s3_client: s3 base client
@@ -119,23 +121,108 @@ def create_presigned_url_for_s3(s3_client: Any, bucket_name: str, file_key: str)
119
121
  return response
120
122
 
121
123
 
124
+ def generic_upload_to_s3( # pylint: disable=R0913
125
+ storage_secrets: Dict[StorageSecrets, Any],
126
+ bucket_name: str,
127
+ file_key: str,
128
+ auth_method: str,
129
+ document: bytes,
130
+ ) -> Optional[AnyHttpUrlString]:
131
+ """Uploads arbitrary data to s3 returned from an access request"""
132
+ logger.info("Starting S3 Upload of {}", file_key)
133
+
134
+ try:
135
+ s3_client = get_s3_client(auth_method, storage_secrets)
136
+ try:
137
+ s3_client.put_object(Bucket=bucket_name, Key=file_key, Body=document)
138
+ except Exception as e:
139
+ logger.error("Encountered error while uploading s3 object: {}", e)
140
+ raise e
141
+
142
+ presigned_url: AnyHttpUrlString = create_presigned_url_for_s3(
143
+ s3_client, bucket_name, file_key
144
+ )
145
+
146
+ return presigned_url
147
+ except ClientError as e:
148
+ logger.error(
149
+ "Encountered error while uploading and generating link for s3 object: {}", e
150
+ )
151
+ raise e
152
+ except ParamValidationError as e:
153
+ raise ValueError(f"The parameters you provided are incorrect: {e}")
154
+
155
+
156
+ def generic_retrieve_from_s3(
157
+ storage_secrets: Dict[StorageSecrets, Any],
158
+ bucket_name: str,
159
+ file_key: str,
160
+ auth_method: str,
161
+ ) -> Optional[bytes]:
162
+ """Retrieves arbitrary data from s3"""
163
+ logger.info("Starting S3 Retrieve of {}", file_key)
164
+
165
+ try:
166
+ s3_client = get_s3_client(auth_method, storage_secrets)
167
+ try:
168
+ response = s3_client.get_object(Bucket=bucket_name, Key=file_key)
169
+ return response["Body"].read()
170
+ except Exception as e:
171
+ logger.error("Encountered error while retrieving s3 object: {}", e)
172
+ raise e
173
+ except ClientError as e:
174
+ logger.error("Encountered error while retrieving s3 object: {}", e)
175
+ raise e
176
+ except ParamValidationError as e:
177
+ raise ValueError(f"The parameters you provided are incorrect: {e}")
178
+
179
+
180
+ def generic_delete_from_s3(
181
+ storage_secrets: Dict[StorageSecrets, Any],
182
+ bucket_name: str,
183
+ file_key: str,
184
+ auth_method: str,
185
+ ) -> None:
186
+ """Deletes arbitrary data from s3"""
187
+ logger.info("Starting S3 Delete of {}", file_key)
188
+
189
+ try:
190
+ s3_client = get_s3_client(auth_method, storage_secrets)
191
+ try:
192
+ s3_client.delete_object(Bucket=bucket_name, Key=file_key)
193
+ except Exception as e:
194
+ logger.error("Encountered error while deleting s3 object: {}", e)
195
+ raise e
196
+ except ClientError as e:
197
+ logger.error("Encountered error while deleting s3 object: {}", e)
198
+ raise e
199
+ except ParamValidationError as e:
200
+ raise ValueError(f"The parameters you provided are incorrect: {e}")
201
+
202
+
122
203
  def upload_to_s3( # pylint: disable=R0913
123
204
  storage_secrets: Dict[StorageSecrets, Any],
124
205
  data: Dict,
125
206
  bucket_name: str,
126
207
  file_key: str,
127
208
  resp_format: str,
128
- privacy_request: PrivacyRequest,
209
+ privacy_request: Optional[PrivacyRequest],
210
+ document: Optional[bytes],
129
211
  auth_method: str,
130
- data_category_field_mapping: Optional[DataCategoryFieldMapping] = None,
131
- data_use_map: Optional[Dict[str, Set[str]]] = None,
132
- ) -> str:
212
+ ) -> Optional[AnyHttpUrlString]:
133
213
  """Uploads arbitrary data to s3 returned from an access request"""
134
214
  logger.info("Starting S3 Upload of {}", file_key)
135
215
 
216
+ if privacy_request is None and document is not None:
217
+ return generic_upload_to_s3(
218
+ storage_secrets, bucket_name, file_key, auth_method, document
219
+ )
220
+
221
+ if privacy_request is None:
222
+ raise ValueError("Privacy request must be provided")
223
+
136
224
  try:
137
- my_session = get_aws_session(auth_method, storage_secrets)
138
- s3_client = my_session.client("s3")
225
+ s3_client = get_s3_client(auth_method, storage_secrets)
139
226
 
140
227
  # handles file chunking
141
228
  try:
@@ -148,7 +235,7 @@ def upload_to_s3( # pylint: disable=R0913
148
235
  logger.error("Encountered error while uploading s3 object: {}", e)
149
236
  raise e
150
237
 
151
- presigned_url: str = create_presigned_url_for_s3(
238
+ presigned_url: AnyHttpUrlString = create_presigned_url_for_s3(
152
239
  s3_client, bucket_name, file_key
153
240
  )
154
241
 
@@ -162,17 +249,21 @@ def upload_to_s3( # pylint: disable=R0913
162
249
  raise ValueError(f"The parameters you provided are incorrect: {e}")
163
250
 
164
251
 
252
+ def get_local_filename(file_key: str) -> str:
253
+ """Verifies that the local storage directory exists"""
254
+ if not os.path.exists(LOCAL_FIDES_UPLOAD_DIRECTORY):
255
+ os.makedirs(LOCAL_FIDES_UPLOAD_DIRECTORY)
256
+ return f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{file_key}"
257
+
258
+
165
259
  def upload_to_local(
166
260
  data: Dict,
167
261
  file_key: str,
168
262
  privacy_request: PrivacyRequest,
169
263
  resp_format: str = ResponseFormat.json.value,
170
- data_category_field_mapping: Optional[DataCategoryFieldMapping] = None,
171
- data_use_map: Optional[Dict[str, Set[str]]] = None,
172
264
  ) -> str:
173
265
  """Uploads access request data to a local folder - for testing/demo purposes only"""
174
- if not os.path.exists(LOCAL_FIDES_UPLOAD_DIRECTORY):
175
- os.makedirs(LOCAL_FIDES_UPLOAD_DIRECTORY)
266
+ get_local_filename(file_key)
176
267
 
177
268
  filename = f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{file_key}"
178
269
  in_memory_file = write_to_in_memory_buffer(resp_format, data, privacy_request)
@@ -70,3 +70,22 @@ def get_aws_session(
70
70
  raise
71
71
  else:
72
72
  return session
73
+
74
+
75
+ def get_s3_client(
76
+ auth_method: str,
77
+ storage_secrets: Optional[Dict[StorageSecrets, Any]],
78
+ assume_role_arn: Optional[str] = None,
79
+ ) -> Session:
80
+ """
81
+ Abstraction to retrieve an AWS S3 client using secrets.
82
+
83
+ If an `assume_role_arn` is provided, the secrets will be used to
84
+ assume that role and return a Session instantiated with that role.
85
+ """
86
+ session = get_aws_session(
87
+ auth_method=auth_method,
88
+ storage_secrets=storage_secrets,
89
+ assume_role_arn=assume_role_arn,
90
+ )
91
+ return session.client("s3")
@@ -1,3 +1,4 @@
1
+ from collections import deque
1
2
  from functools import reduce
2
3
  from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union
3
4
 
@@ -119,3 +120,119 @@ def extract_key_for_address(
119
120
  request_id_dataset, collection = full_request_id.split(":")
120
121
  dataset = request_id_dataset.split("__", number_of_leading_strings_to_exclude)[-1]
121
122
  return f"{dataset}:{collection}"
123
+
124
+
125
+ def unflatten_dict(flat_dict: Dict[str, Any], separator: str = ".") -> Dict[str, Any]:
126
+ """
127
+ Converts a dictionary of paths/values into a nested dictionary
128
+
129
+ example:
130
+
131
+ {"A.B": "1", "A.C": "2"}
132
+
133
+ becomes
134
+
135
+ {
136
+ "A": {
137
+ "B": "1",
138
+ "C": "2"
139
+ }
140
+ }
141
+ """
142
+ output: Dict[Any, Any] = {}
143
+ queue = deque(flat_dict.items())
144
+
145
+ while queue:
146
+ path, value = queue.popleft()
147
+ keys = path.split(separator)
148
+ target = output
149
+ for i, current_key in enumerate(keys[:-1]):
150
+ next_key = keys[i + 1]
151
+ if next_key.isdigit():
152
+ target = target.setdefault(current_key, [])
153
+ else:
154
+ if isinstance(target, dict):
155
+ target = target.setdefault(current_key, {})
156
+ elif isinstance(target, list):
157
+ while len(target) <= int(current_key):
158
+ target.append({})
159
+ target = target[int(current_key)]
160
+ try:
161
+ if isinstance(target, list):
162
+ target.append(value)
163
+ else:
164
+ # If the value is a dictionary, add its components to the queue for processing
165
+ if isinstance(value, dict):
166
+ target = target.setdefault(keys[-1], {})
167
+ for inner_key, inner_value in value.items():
168
+ new_key = f"{path}{separator}{inner_key}"
169
+ queue.append((new_key, inner_value))
170
+ else:
171
+ target[keys[-1]] = value
172
+ except TypeError as exc:
173
+ raise ValueError(
174
+ f"Error unflattening dictionary, conflicting levels detected: {exc}"
175
+ )
176
+ return output
177
+
178
+
179
+ def flatten_dict(data: Any, prefix: str = "", separator: str = ".") -> Dict[str, Any]:
180
+ """
181
+ Recursively flatten a dictionary or list into a flat dictionary with dot-notation keys.
182
+ Handles nested dictionaries and arrays with proper indices.
183
+
184
+ example:
185
+
186
+ {
187
+ "A": {
188
+ "B": "1",
189
+ "C": "2"
190
+ },
191
+ "D": [
192
+ {"E": "3"},
193
+ {"E": "4"}
194
+ ]
195
+ }
196
+
197
+ becomes
198
+
199
+ {
200
+ "A.B": "1",
201
+ "A.C": "2",
202
+ "D.0.E": "3",
203
+ "D.1.E": "4"
204
+ }
205
+
206
+ Args:
207
+ data: The data to flatten (must be a dict or list)
208
+ prefix: The current key prefix (used in recursion)
209
+ separator: The separator to use between key segments (default: ".")
210
+
211
+ Returns:
212
+ A flattened dictionary with dot-notation keys
213
+
214
+ Raises:
215
+ FidesopsException: If input is not a dict or list
216
+ """
217
+ items = {}
218
+
219
+ if isinstance(data, dict):
220
+ for k, v in data.items():
221
+ new_key = f"{prefix}{separator}{k}" if prefix else k
222
+ if isinstance(v, (dict, list)):
223
+ items.update(flatten_dict(v, new_key, separator))
224
+ else:
225
+ items[new_key] = v
226
+ elif isinstance(data, list):
227
+ for i, v in enumerate(data):
228
+ new_key = f"{prefix}{separator}{i}"
229
+ if isinstance(v, (dict, list)):
230
+ items.update(flatten_dict(v, new_key, separator))
231
+ else:
232
+ items[new_key] = v
233
+ else:
234
+ raise ValueError(
235
+ f"Input to flatten_dict must be a dict or list, got {type(data).__name__}"
236
+ )
237
+
238
+ return items
@@ -214,15 +214,30 @@ def add_errored_system_status_for_consent_reporting(
214
214
 
215
215
  Deeming them relevant if they already had a "pending" log added to them.
216
216
  """
217
- for pref in privacy_request.privacy_preferences: # type: ignore[attr-defined]
217
+ add_errored_system_status_for_consent_reporting_on_preferences(db, privacy_request.privacy_preferences, connection_config) # type: ignore[attr-defined]
218
+
219
+
220
+ def add_errored_system_status_for_consent_reporting_on_preferences(
221
+ db: Session,
222
+ privacy_preferences: List[PrivacyPreferenceHistory],
223
+ connection_config: ConnectionConfig,
224
+ ) -> None:
225
+ """
226
+ Cache an errored system status for consent reporting on just the subset
227
+ of preferences that were deemed relevant for the connector on failure,
228
+ from the provided list of preferences.
229
+
230
+ Deeming them relevant if they already had a "pending" log added to them.
231
+ """
232
+ for preference in privacy_preferences:
218
233
  if (
219
- pref.affected_system_status
220
- and pref.affected_system_status.get(connection_config.system_key)
234
+ preference.affected_system_status
235
+ and preference.affected_system_status.get(connection_config.system_key)
221
236
  == ExecutionLogStatus.pending.value
222
237
  ):
223
- pref.cache_system_status(
238
+ preference.cache_system_status(
224
239
  db,
225
- connection_config.system_key,
240
+ connection_config.system_key, # type: ignore[arg-type]
226
241
  ExecutionLogStatus.error,
227
242
  )
228
243
 
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import json
4
4
  import re
5
5
  import socket
6
- from collections import defaultdict, deque
6
+ from collections import defaultdict
7
7
  from ipaddress import IPv4Address, IPv6Address, ip_address
8
8
  from typing import Any, Dict, List, Optional, Set, Tuple, Union
9
9
 
@@ -256,60 +256,6 @@ def merge_datasets(dataset: GraphDataset, config_dataset: GraphDataset) -> Graph
256
256
  )
257
257
 
258
258
 
259
- def unflatten_dict(flat_dict: Dict[str, Any], separator: str = ".") -> Dict[str, Any]:
260
- """
261
- Converts a dictionary of paths/values into a nested dictionary
262
-
263
- example:
264
-
265
- {"A.B": "1", "A.C": "2"}
266
-
267
- becomes
268
-
269
- {
270
- "A": {
271
- "B": "1",
272
- "C": "2"
273
- }
274
- }
275
- """
276
- output: Dict[Any, Any] = {}
277
- queue = deque(flat_dict.items())
278
-
279
- while queue:
280
- path, value = queue.popleft()
281
- keys = path.split(separator)
282
- target = output
283
- for i, current_key in enumerate(keys[:-1]):
284
- next_key = keys[i + 1]
285
- if next_key.isdigit():
286
- target = target.setdefault(current_key, [])
287
- else:
288
- if isinstance(target, dict):
289
- target = target.setdefault(current_key, {})
290
- elif isinstance(target, list):
291
- while len(target) <= int(current_key):
292
- target.append({})
293
- target = target[int(current_key)]
294
- try:
295
- if isinstance(target, list):
296
- target.append(value)
297
- else:
298
- # If the value is a dictionary, add its components to the queue for processing
299
- if isinstance(value, dict):
300
- target = target.setdefault(keys[-1], {})
301
- for inner_key, inner_value in value.items():
302
- new_key = f"{path}{separator}{inner_key}"
303
- queue.append((new_key, inner_value))
304
- else:
305
- target[keys[-1]] = value
306
- except TypeError as exc:
307
- raise FidesopsException(
308
- f"Error unflattening dictionary, conflicting levels detected: {exc}"
309
- )
310
- return output
311
-
312
-
313
259
  def format_body(
314
260
  headers: Dict[str, Any],
315
261
  body: Optional[str],
@@ -339,7 +285,7 @@ def format_body(
339
285
  if content_type == "application/json":
340
286
  output = body
341
287
  elif content_type == "application/x-www-form-urlencoded":
342
- output = multidimensional_urlencode(json.loads(body))
288
+ output = nullsafe_urlencode(json.loads(body))
343
289
  elif content_type == "text/plain":
344
290
  output = body
345
291
  else:
@@ -470,3 +416,33 @@ def replace_version(saas_config: str, new_version: str) -> str:
470
416
  version_pattern, f"version: {new_version}", saas_config, count=1
471
417
  )
472
418
  return updated_config
419
+
420
+
421
+ def nullsafe_urlencode(data: Any) -> str:
422
+ """
423
+ Wrapper around multidimensional_urlencode that preserves null values as empty strings.
424
+
425
+ This is useful for APIs that expect keys with empty values (e.g., "name=") to represent
426
+ null values, rather than omitting the field entirely.
427
+
428
+ Args:
429
+ data: The data to encode (can be a dict, list, or other nested structure)
430
+
431
+ Returns:
432
+ URL-encoded string with null values properly handled
433
+ """
434
+
435
+ def prepare_null_values(data: Any) -> Any:
436
+ """
437
+ Recursively process data for URL encoding, converting None values to empty strings.
438
+ """
439
+ if data is None:
440
+ return ""
441
+ if isinstance(data, dict):
442
+ return {k: prepare_null_values(v) for k, v in data.items()}
443
+ if isinstance(data, list):
444
+ return [prepare_null_values(item) for item in data]
445
+ return data
446
+
447
+ processed_data = prepare_null_values(data)
448
+ return multidimensional_urlencode(processed_data)
@@ -3,7 +3,6 @@
3
3
  # pylint: disable=C0115,C0116, E0213
4
4
  from typing import List, Optional, Pattern, Tuple, Union
5
5
 
6
- import validators
7
6
  from pydantic import Field, SerializeAsAny, ValidationInfo, field_validator
8
7
  from pydantic_settings import SettingsConfigDict
9
8
  from slowapi.wrappers import parse_many # type: ignore
@@ -176,22 +175,17 @@ class SecuritySettings(FidesSettings):
176
175
  @field_validator("cors_origins", mode="before")
177
176
  @classmethod
178
177
  def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]:
179
- """Return a list of valid origins for CORS requests"""
180
-
181
- def validate(values: List[str]) -> None:
182
- for value in values:
183
- if value != "*":
184
- if not validators.url(value):
185
- raise ValueError(f"{value} is not a valid url")
178
+ """
179
+ Return a list of origins for CORS requests.
186
180
 
181
+ This validator allows us to parse a comma-separated string of origins
182
+ into a list of origins, since the `cors_origins` field can be set
183
+ as a comma-separated string or a list of strings.
184
+ """
187
185
  if isinstance(v, str) and not v.startswith("["):
188
186
  values = [i.strip() for i in v.split(",")]
189
- validate(values)
190
-
191
187
  return values
192
- if isinstance(v, (list, str)):
193
- validate(v) # type: ignore
194
-
188
+ if isinstance(v, list):
195
189
  return v
196
190
  raise ValueError(v)
197
191