ethyca-fides 2.56.3b1__py2.py3-none-any.whl → 2.56.3b2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/METADATA +1 -1
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/RECORD +94 -91
- fides/_version.py +3 -3
- fides/api/alembic/migrations/versions/69ad6d844e21_add_comments_and_comment_references.py +84 -0
- fides/api/alembic/migrations/versions/6ea2171c544f_change_attachment_storage_key_to_.py +77 -0
- fides/api/models/attachment.py +109 -49
- fides/api/models/comment.py +109 -0
- fides/api/service/connectors/query_configs/saas_query_config.py +21 -15
- fides/api/service/storage/storage_uploader_service.py +4 -10
- fides/api/tasks/storage.py +106 -15
- fides/api/util/aws_util.py +19 -0
- fides/api/util/collection_util.py +117 -0
- fides/api/util/saas_util.py +32 -56
- fides/data/language/languages.yml +2 -0
- fides/ui-build/static/admin/404.html +1 -1
- fides/ui-build/static/admin/_next/static/chunks/pages/{_app-3c1a7742661d3a9e.js → _app-3b7bbcdb61d952e7.js} +1 -1
- fides/ui-build/static/admin/_next/static/chunks/pages/{index-c9fa68dc0fa42c81.js → index-94e6d589c4edf360.js} +1 -1
- fides/ui-build/static/admin/_next/static/{LOp6RUpN795nyhXOv95wz → n4uO6TqGfiKHQ-X5XYkoy}/_buildManifest.js +1 -1
- fides/ui-build/static/admin/add-systems/manual.html +1 -1
- fides/ui-build/static/admin/add-systems/multiple.html +1 -1
- fides/ui-build/static/admin/add-systems.html +1 -1
- fides/ui-build/static/admin/ant-poc.html +1 -1
- fides/ui-build/static/admin/consent/configure/add-vendors.html +1 -1
- fides/ui-build/static/admin/consent/configure.html +1 -1
- fides/ui-build/static/admin/consent/privacy-experience/[id].html +1 -1
- fides/ui-build/static/admin/consent/privacy-experience/new.html +1 -1
- fides/ui-build/static/admin/consent/privacy-experience.html +1 -1
- fides/ui-build/static/admin/consent/privacy-notices/[id].html +1 -1
- fides/ui-build/static/admin/consent/privacy-notices/new.html +1 -1
- fides/ui-build/static/admin/consent/privacy-notices.html +1 -1
- fides/ui-build/static/admin/consent/properties.html +1 -1
- fides/ui-build/static/admin/consent/reporting.html +1 -1
- fides/ui-build/static/admin/consent.html +1 -1
- fides/ui-build/static/admin/data-catalog/[systemId]/projects/[projectUrn]/[resourceUrn].html +1 -1
- fides/ui-build/static/admin/data-catalog/[systemId]/projects/[projectUrn].html +1 -1
- fides/ui-build/static/admin/data-catalog/[systemId]/projects.html +1 -1
- fides/ui-build/static/admin/data-catalog/[systemId]/resources/[resourceUrn].html +1 -1
- fides/ui-build/static/admin/data-catalog/[systemId]/resources.html +1 -1
- fides/ui-build/static/admin/data-catalog.html +1 -1
- fides/ui-build/static/admin/data-discovery/action-center/[monitorId]/[systemId].html +1 -1
- fides/ui-build/static/admin/data-discovery/action-center/[monitorId].html +1 -1
- fides/ui-build/static/admin/data-discovery/action-center.html +1 -1
- fides/ui-build/static/admin/data-discovery/activity.html +1 -1
- fides/ui-build/static/admin/data-discovery/detection/[resourceUrn].html +1 -1
- fides/ui-build/static/admin/data-discovery/detection.html +1 -1
- fides/ui-build/static/admin/data-discovery/discovery/[resourceUrn].html +1 -1
- fides/ui-build/static/admin/data-discovery/discovery.html +1 -1
- fides/ui-build/static/admin/datamap.html +1 -1
- fides/ui-build/static/admin/dataset/[datasetId]/[collectionName]/[...subfieldNames].html +1 -1
- fides/ui-build/static/admin/dataset/[datasetId]/[collectionName].html +1 -1
- fides/ui-build/static/admin/dataset/[datasetId].html +1 -1
- fides/ui-build/static/admin/dataset/new.html +1 -1
- fides/ui-build/static/admin/dataset.html +1 -1
- fides/ui-build/static/admin/datastore-connection/[id].html +1 -1
- fides/ui-build/static/admin/datastore-connection/new.html +1 -1
- fides/ui-build/static/admin/datastore-connection.html +1 -1
- fides/ui-build/static/admin/index.html +1 -1
- fides/ui-build/static/admin/integrations/[id].html +1 -1
- fides/ui-build/static/admin/integrations.html +1 -1
- fides/ui-build/static/admin/login/[provider].html +1 -1
- fides/ui-build/static/admin/login.html +1 -1
- fides/ui-build/static/admin/messaging/[id].html +1 -1
- fides/ui-build/static/admin/messaging/add-template.html +1 -1
- fides/ui-build/static/admin/messaging.html +1 -1
- fides/ui-build/static/admin/privacy-requests/[id].html +1 -1
- fides/ui-build/static/admin/privacy-requests/configure/messaging.html +1 -1
- fides/ui-build/static/admin/privacy-requests/configure/storage.html +1 -1
- fides/ui-build/static/admin/privacy-requests/configure.html +1 -1
- fides/ui-build/static/admin/privacy-requests.html +1 -1
- fides/ui-build/static/admin/properties/[id].html +1 -1
- fides/ui-build/static/admin/properties/add-property.html +1 -1
- fides/ui-build/static/admin/properties.html +1 -1
- fides/ui-build/static/admin/reporting/datamap.html +1 -1
- fides/ui-build/static/admin/settings/about.html +1 -1
- fides/ui-build/static/admin/settings/consent.html +1 -1
- fides/ui-build/static/admin/settings/custom-fields.html +1 -1
- fides/ui-build/static/admin/settings/domain-records.html +1 -1
- fides/ui-build/static/admin/settings/domains.html +1 -1
- fides/ui-build/static/admin/settings/email-templates.html +1 -1
- fides/ui-build/static/admin/settings/locations.html +1 -1
- fides/ui-build/static/admin/settings/organization.html +1 -1
- fides/ui-build/static/admin/settings/regulations.html +1 -1
- fides/ui-build/static/admin/systems/configure/[id]/test-datasets.html +1 -1
- fides/ui-build/static/admin/systems/configure/[id].html +1 -1
- fides/ui-build/static/admin/systems.html +1 -1
- fides/ui-build/static/admin/taxonomy.html +1 -1
- fides/ui-build/static/admin/user-management/new.html +1 -1
- fides/ui-build/static/admin/user-management/profile/[id].html +1 -1
- fides/ui-build/static/admin/user-management.html +1 -1
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/LICENSE +0 -0
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/WHEEL +0 -0
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/entry_points.txt +0 -0
- {ethyca_fides-2.56.3b1.dist-info → ethyca_fides-2.56.3b2.dist-info}/top_level.txt +0 -0
- /fides/ui-build/static/admin/_next/static/{LOp6RUpN795nyhXOv95wz → n4uO6TqGfiKHQ-X5XYkoy}/_ssgManifest.js +0 -0
fides/api/models/attachment.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from enum import Enum as EnumType
|
|
2
3
|
from typing import Any, Optional
|
|
3
4
|
|
|
5
|
+
from loguru import logger as log
|
|
4
6
|
from sqlalchemy import Column
|
|
5
7
|
from sqlalchemy import Enum as EnumColumn
|
|
6
8
|
from sqlalchemy import ForeignKey, String, UniqueConstraint
|
|
@@ -9,6 +11,15 @@ from sqlalchemy.orm import Session, relationship
|
|
|
9
11
|
|
|
10
12
|
from fides.api.db.base_class import Base
|
|
11
13
|
from fides.api.models.fides_user import FidesUser # pylint: disable=unused-import
|
|
14
|
+
from fides.api.models.storage import StorageConfig # pylint: disable=unused-import
|
|
15
|
+
from fides.api.schemas.storage.storage import StorageDetails, StorageType
|
|
16
|
+
from fides.api.tasks.storage import (
|
|
17
|
+
LOCAL_FIDES_UPLOAD_DIRECTORY,
|
|
18
|
+
generic_delete_from_s3,
|
|
19
|
+
generic_retrieve_from_s3,
|
|
20
|
+
get_local_filename,
|
|
21
|
+
upload_to_s3,
|
|
22
|
+
)
|
|
12
23
|
|
|
13
24
|
|
|
14
25
|
class AttachmentType(str, EnumType):
|
|
@@ -74,7 +85,9 @@ class Attachment(Base):
|
|
|
74
85
|
)
|
|
75
86
|
file_name = Column(String, nullable=False)
|
|
76
87
|
attachment_type = Column(EnumColumn(AttachmentType), nullable=False)
|
|
77
|
-
storage_key = Column(
|
|
88
|
+
storage_key = Column(
|
|
89
|
+
String, ForeignKey("storageconfig.key", ondelete="CASCADE"), nullable=False
|
|
90
|
+
)
|
|
78
91
|
|
|
79
92
|
user = relationship(
|
|
80
93
|
"FidesUser",
|
|
@@ -90,64 +103,111 @@ class Attachment(Base):
|
|
|
90
103
|
uselist=True,
|
|
91
104
|
)
|
|
92
105
|
|
|
93
|
-
|
|
94
|
-
""
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
106
|
+
config = relationship(
|
|
107
|
+
"StorageConfig",
|
|
108
|
+
lazy="selectin",
|
|
109
|
+
uselist=False,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def upload(self, attachment: bytes) -> None:
|
|
113
|
+
"""Uploads an attachment to S3 or local storage."""
|
|
114
|
+
if self.config.type == StorageType.s3:
|
|
115
|
+
bucket_name = f"{self.config.details[StorageDetails.BUCKET.value]}"
|
|
116
|
+
auth_method = self.config.details[StorageDetails.AUTH_METHOD.value]
|
|
117
|
+
upload_to_s3(
|
|
118
|
+
storage_secrets=self.config.secrets,
|
|
119
|
+
data={},
|
|
120
|
+
bucket_name=bucket_name,
|
|
121
|
+
file_key=self.id,
|
|
122
|
+
resp_format=self.config.format,
|
|
123
|
+
privacy_request=None,
|
|
124
|
+
document=attachment,
|
|
125
|
+
auth_method=auth_method,
|
|
126
|
+
)
|
|
127
|
+
log.info(f"Uploaded {self.file_name} to S3 bucket {bucket_name}/{self.id}")
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
if self.config.type == StorageType.local:
|
|
131
|
+
filename = get_local_filename(self.id)
|
|
132
|
+
with open(filename, "wb") as file:
|
|
133
|
+
file.write(attachment)
|
|
134
|
+
return
|
|
135
|
+
|
|
136
|
+
raise ValueError(f"Unsupported storage type: {self.config.type}")
|
|
137
|
+
|
|
138
|
+
def retrieve_attachment(self) -> Optional[bytes]:
|
|
139
|
+
"""Returns the attachment from S3 in bytes form."""
|
|
140
|
+
if self.config.type == StorageType.s3:
|
|
141
|
+
bucket_name = f"{self.config.details[StorageDetails.BUCKET.value]}"
|
|
142
|
+
auth_method = self.config.details[StorageDetails.AUTH_METHOD.value]
|
|
143
|
+
return generic_retrieve_from_s3(
|
|
144
|
+
storage_secrets=self.config.secrets,
|
|
145
|
+
bucket_name=bucket_name,
|
|
146
|
+
file_key=self.id,
|
|
147
|
+
auth_method=auth_method,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
if self.config.type == StorageType.local:
|
|
151
|
+
filename = f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{self.id}"
|
|
152
|
+
with open(filename, "rb") as file:
|
|
153
|
+
return file.read()
|
|
154
|
+
|
|
155
|
+
raise ValueError(f"Unsupported storage type: {self.config.type}")
|
|
156
|
+
|
|
157
|
+
def delete_attachment_from_storage(self) -> None:
|
|
158
|
+
"""Deletes an attachment from S3 or local storage."""
|
|
159
|
+
if self.config.type == StorageType.s3:
|
|
160
|
+
bucket_name = f"{self.config.details[StorageDetails.BUCKET.value]}"
|
|
161
|
+
auth_method = self.config.details[StorageDetails.AUTH_METHOD.value]
|
|
162
|
+
generic_delete_from_s3(
|
|
163
|
+
storage_secrets=self.config.secrets,
|
|
164
|
+
bucket_name=bucket_name,
|
|
165
|
+
file_key=self.id,
|
|
166
|
+
auth_method=auth_method,
|
|
167
|
+
)
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
if self.config.type == StorageType.local:
|
|
171
|
+
filename = f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{self.id}"
|
|
172
|
+
os.remove(filename)
|
|
173
|
+
return
|
|
174
|
+
|
|
175
|
+
raise ValueError(f"Unsupported storage type: {self.config.type}")
|
|
131
176
|
|
|
132
177
|
@classmethod
|
|
133
|
-
def
|
|
178
|
+
def create_and_upload(
|
|
134
179
|
cls,
|
|
135
180
|
db: Session,
|
|
136
181
|
*,
|
|
137
182
|
data: dict[str, Any],
|
|
183
|
+
attachment_file: bytes,
|
|
138
184
|
check_name: bool = False,
|
|
139
|
-
attachment: Optional[
|
|
140
|
-
bytes
|
|
141
|
-
] = None, # This will not be optional once the upload method is implemented.
|
|
142
185
|
) -> "Attachment":
|
|
143
186
|
"""Creates a new attachment record in the database and uploads the attachment to S3."""
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
187
|
+
if attachment_file is None:
|
|
188
|
+
raise ValueError("Attachment is required")
|
|
189
|
+
attachment_model = super().create(db=db, data=data, check_name=check_name)
|
|
190
|
+
|
|
191
|
+
try:
|
|
192
|
+
attachment_model.upload(attachment_file)
|
|
193
|
+
return attachment_model
|
|
194
|
+
except Exception as e:
|
|
195
|
+
log.error(f"Failed to upload attachment: {e}")
|
|
196
|
+
attachment_model.delete(db)
|
|
197
|
+
raise e
|
|
198
|
+
|
|
199
|
+
@classmethod
|
|
200
|
+
def create(
|
|
201
|
+
cls,
|
|
202
|
+
db: Session,
|
|
203
|
+
*,
|
|
204
|
+
data: dict[str, Any],
|
|
205
|
+
check_name: bool = False,
|
|
206
|
+
) -> "Attachment":
|
|
207
|
+
"""Raises Error, provides information for user to create with upload instead."""
|
|
208
|
+
raise NotImplementedError("Please use create_and_upload method for Attachment")
|
|
147
209
|
|
|
148
210
|
def delete(self, db: Session) -> None:
|
|
149
211
|
"""Deletes an attachment record from the database and deletes the attachment from S3."""
|
|
150
|
-
|
|
151
|
-
# attachment_record.delete_attachment_from_s3(db)
|
|
152
|
-
# log.info(f"Deleted attachment {attachment_record.id} from S3")
|
|
212
|
+
self.delete_attachment_from_storage()
|
|
153
213
|
super().delete(db=db)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from enum import Enum as EnumType
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from sqlalchemy import Column
|
|
5
|
+
from sqlalchemy import Enum as EnumColumn
|
|
6
|
+
from sqlalchemy import ForeignKey, String, UniqueConstraint
|
|
7
|
+
from sqlalchemy.ext.declarative import declared_attr
|
|
8
|
+
from sqlalchemy.orm import Session, relationship
|
|
9
|
+
|
|
10
|
+
from fides.api.db.base_class import Base
|
|
11
|
+
from fides.api.models.attachment import Attachment, AttachmentReference
|
|
12
|
+
from fides.api.models.fides_user import FidesUser # pylint: disable=unused-import
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CommentType(str, EnumType):
|
|
16
|
+
"""
|
|
17
|
+
Enum for comment types. Indicates comment usage.
|
|
18
|
+
|
|
19
|
+
- notes are internal comments.
|
|
20
|
+
- reply comments are public and may cause an email or other communciation to be sent
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
note = "note"
|
|
24
|
+
reply = "reply"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CommentReferenceType(str, EnumType):
|
|
28
|
+
"""
|
|
29
|
+
Enum for comment reference types. Indicates where the comment is referenced.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
manual_step = "manual_step"
|
|
33
|
+
privacy_request = "privacy_request"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class CommentReference(Base):
|
|
37
|
+
"""
|
|
38
|
+
Stores information about a comment and any other element which may reference that comment.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
@declared_attr
|
|
42
|
+
def __tablename__(cls) -> str:
|
|
43
|
+
"""Overriding base class method to set the table name."""
|
|
44
|
+
return "comment_reference"
|
|
45
|
+
|
|
46
|
+
comment_id = Column(String, ForeignKey("comment.id"), nullable=False)
|
|
47
|
+
reference_id = Column(String, nullable=False)
|
|
48
|
+
reference_type = Column(EnumColumn(CommentReferenceType), nullable=False)
|
|
49
|
+
|
|
50
|
+
__table_args__ = (
|
|
51
|
+
UniqueConstraint("comment_id", "reference_id", name="comment_reference_uc"),
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
comment = relationship(
|
|
55
|
+
"Comment",
|
|
56
|
+
back_populates="references",
|
|
57
|
+
uselist=False,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def create(
|
|
62
|
+
cls, db: Session, *, data: dict[str, Any], check_name: bool = False
|
|
63
|
+
) -> "CommentReference":
|
|
64
|
+
"""Creates a new comment reference record in the database."""
|
|
65
|
+
return super().create(db=db, data=data, check_name=check_name)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class Comment(Base):
|
|
69
|
+
"""
|
|
70
|
+
Stores information about a Comment.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
user_id = Column(
|
|
74
|
+
String, ForeignKey("fidesuser.id", ondelete="SET NULL"), nullable=True
|
|
75
|
+
)
|
|
76
|
+
comment_text = Column(String, nullable=False)
|
|
77
|
+
comment_type = Column(EnumColumn(CommentType), nullable=False)
|
|
78
|
+
|
|
79
|
+
user = relationship(
|
|
80
|
+
"FidesUser",
|
|
81
|
+
backref="comments",
|
|
82
|
+
lazy="selectin",
|
|
83
|
+
uselist=False,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
references = relationship(
|
|
87
|
+
"CommentReference",
|
|
88
|
+
back_populates="comment",
|
|
89
|
+
cascade="all, delete",
|
|
90
|
+
uselist=True,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def get_attachments(self, db: Session) -> list[Attachment]:
|
|
94
|
+
"""Retrieve all attachments associated with this comment."""
|
|
95
|
+
stmt = (
|
|
96
|
+
db.query(Attachment)
|
|
97
|
+
.join(
|
|
98
|
+
AttachmentReference, Attachment.id == AttachmentReference.attachment_id
|
|
99
|
+
)
|
|
100
|
+
.where(AttachmentReference.reference_id == self.id)
|
|
101
|
+
)
|
|
102
|
+
return db.execute(stmt).scalars().all()
|
|
103
|
+
|
|
104
|
+
def delete(self, db: Session) -> None:
|
|
105
|
+
"""Delete the comment and all associated references."""
|
|
106
|
+
attachments = self.get_attachments(db)
|
|
107
|
+
for attachment in attachments:
|
|
108
|
+
attachment.delete(db)
|
|
109
|
+
db.delete(self)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# pylint: disable=too-many-instance-attributes
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
|
+
from copy import deepcopy
|
|
4
5
|
from datetime import datetime
|
|
5
6
|
from itertools import product
|
|
6
7
|
from typing import Any, Dict, List, Literal, Optional, Tuple, TypeVar
|
|
@@ -12,7 +13,6 @@ from loguru import logger
|
|
|
12
13
|
from sqlalchemy.orm import Session
|
|
13
14
|
|
|
14
15
|
from fides.api.common_exceptions import FidesopsException
|
|
15
|
-
from fides.api.graph.config import ScalarField
|
|
16
16
|
from fides.api.graph.execution import ExecutionNode
|
|
17
17
|
from fides.api.models.policy import Policy
|
|
18
18
|
from fides.api.models.privacy_request import (
|
|
@@ -33,7 +33,12 @@ from fides.api.task.refine_target_path import (
|
|
|
33
33
|
join_detailed_path,
|
|
34
34
|
)
|
|
35
35
|
from fides.api.util import saas_util
|
|
36
|
-
from fides.api.util.collection_util import
|
|
36
|
+
from fides.api.util.collection_util import (
|
|
37
|
+
Row,
|
|
38
|
+
flatten_dict,
|
|
39
|
+
merge_dicts,
|
|
40
|
+
unflatten_dict,
|
|
41
|
+
)
|
|
37
42
|
from fides.api.util.saas_util import (
|
|
38
43
|
ALL_OBJECT_FIELDS,
|
|
39
44
|
CUSTOM_PRIVACY_REQUEST_FIELDS,
|
|
@@ -45,7 +50,6 @@ from fides.api.util.saas_util import (
|
|
|
45
50
|
REPLY_TO_TOKEN,
|
|
46
51
|
UUID,
|
|
47
52
|
get_identities,
|
|
48
|
-
unflatten_dict,
|
|
49
53
|
)
|
|
50
54
|
from fides.common.api.v1.urn_registry import REQUEST_TASK_CALLBACK, V1_URL_PREFIX
|
|
51
55
|
from fides.config.config_proxy import ConfigProxy
|
|
@@ -510,22 +514,24 @@ class SaaSQueryConfig(QueryConfig[SaaSRequestParams]):
|
|
|
510
514
|
|
|
511
515
|
def all_value_map(self, row: Row) -> Dict[str, Any]:
|
|
512
516
|
"""
|
|
513
|
-
Takes a row and preserves only the fields that are defined in the
|
|
517
|
+
Takes a row and preserves only the fields that are defined in the collection.
|
|
514
518
|
Used for scenarios when an update endpoint has required fields other than
|
|
515
519
|
just the fields being updated.
|
|
516
520
|
"""
|
|
521
|
+
flattened_row = flatten_dict(deepcopy(row))
|
|
517
522
|
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
523
|
+
# Get root field names defined in the collection
|
|
524
|
+
collection_fields = {
|
|
525
|
+
field_path.string_path.split(".")[0]
|
|
526
|
+
for field_path, _ in self.field_map().items()
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
# Only keep the field values defined in the collection
|
|
530
|
+
return {
|
|
531
|
+
path: value
|
|
532
|
+
for path, value in flattened_row.items()
|
|
533
|
+
if path.split(".")[0] in collection_fields
|
|
534
|
+
}
|
|
529
535
|
|
|
530
536
|
def query_to_str(self, t: T, input_data: Dict[str, List[Any]]) -> str:
|
|
531
537
|
"""Convert query to string"""
|
|
@@ -41,9 +41,7 @@ def upload(
|
|
|
41
41
|
logger.warning("Storage type not found: {}", storage_key)
|
|
42
42
|
raise StorageUploadError(f"Storage type not found: {storage_key}")
|
|
43
43
|
uploader: Any = _get_uploader_from_config_type(config.type) # type: ignore
|
|
44
|
-
return uploader(
|
|
45
|
-
db, config, data, privacy_request, data_category_field_mapping, data_use_map
|
|
46
|
-
)
|
|
44
|
+
return uploader(db, config, data, privacy_request)
|
|
47
45
|
|
|
48
46
|
|
|
49
47
|
def get_extension(resp_format: ResponseFormat) -> str:
|
|
@@ -88,14 +86,13 @@ def _s3_uploader(
|
|
|
88
86
|
config: StorageConfig,
|
|
89
87
|
data: Dict,
|
|
90
88
|
privacy_request: PrivacyRequest,
|
|
91
|
-
data_category_field_mapping: Optional[DataCategoryFieldMapping] = None,
|
|
92
|
-
data_use_map: Optional[Dict[str, Set[str]]] = None,
|
|
93
89
|
) -> str:
|
|
94
90
|
"""Constructs necessary info needed for s3 before calling upload"""
|
|
95
91
|
file_key: str = _construct_file_key(privacy_request.id, config)
|
|
96
92
|
|
|
97
93
|
bucket_name = config.details[StorageDetails.BUCKET.value]
|
|
98
94
|
auth_method = config.details[StorageDetails.AUTH_METHOD.value]
|
|
95
|
+
document = None
|
|
99
96
|
|
|
100
97
|
return upload_to_s3(
|
|
101
98
|
config.secrets, # type: ignore
|
|
@@ -104,9 +101,8 @@ def _s3_uploader(
|
|
|
104
101
|
file_key,
|
|
105
102
|
config.format.value, # type: ignore
|
|
106
103
|
privacy_request,
|
|
104
|
+
document,
|
|
107
105
|
auth_method,
|
|
108
|
-
data_category_field_mapping,
|
|
109
|
-
data_use_map,
|
|
110
106
|
)
|
|
111
107
|
|
|
112
108
|
|
|
@@ -115,9 +111,7 @@ def _local_uploader(
|
|
|
115
111
|
config: StorageConfig,
|
|
116
112
|
data: Dict,
|
|
117
113
|
privacy_request: PrivacyRequest,
|
|
118
|
-
data_category_field_mapping: Optional[DataCategoryFieldMapping] = None,
|
|
119
|
-
data_use_map: Optional[Dict[str, Set[str]]] = None,
|
|
120
114
|
) -> str:
|
|
121
115
|
"""Uploads data to local storage, used for quick-start/demo purposes"""
|
|
122
116
|
file_key: str = _construct_file_key(privacy_request.id, config)
|
|
123
|
-
return upload_to_local(data, file_key, privacy_request, config.format.value
|
|
117
|
+
return upload_to_local(data, file_key, privacy_request, config.format.value) # type: ignore
|
fides/api/tasks/storage.py
CHANGED
|
@@ -5,20 +5,20 @@ import os
|
|
|
5
5
|
import secrets
|
|
6
6
|
import zipfile
|
|
7
7
|
from io import BytesIO
|
|
8
|
-
from typing import Any, Dict, Optional,
|
|
8
|
+
from typing import Any, Dict, Optional, Union
|
|
9
9
|
|
|
10
10
|
import pandas as pd
|
|
11
11
|
from botocore.exceptions import ClientError, ParamValidationError
|
|
12
|
+
from fideslang.validation import AnyHttpUrlString
|
|
12
13
|
from loguru import logger
|
|
13
14
|
|
|
14
15
|
from fides.api.cryptography.cryptographic_util import bytes_to_b64_str
|
|
15
|
-
from fides.api.graph.graph import DataCategoryFieldMapping
|
|
16
16
|
from fides.api.models.privacy_request import PrivacyRequest
|
|
17
17
|
from fides.api.schemas.storage.storage import ResponseFormat, StorageSecrets
|
|
18
18
|
from fides.api.service.privacy_request.dsr_package.dsr_report_builder import (
|
|
19
19
|
DsrReportBuilder,
|
|
20
20
|
)
|
|
21
|
-
from fides.api.util.aws_util import
|
|
21
|
+
from fides.api.util.aws_util import get_s3_client
|
|
22
22
|
from fides.api.util.cache import get_cache, get_encryption_cache_key
|
|
23
23
|
from fides.api.util.encryption.aes_gcm_encryption_scheme import (
|
|
24
24
|
encrypt_to_bytes_verify_secrets_length,
|
|
@@ -101,7 +101,9 @@ def write_to_in_memory_buffer(
|
|
|
101
101
|
raise NotImplementedError(f"No handling for response format {resp_format}.")
|
|
102
102
|
|
|
103
103
|
|
|
104
|
-
def create_presigned_url_for_s3(
|
|
104
|
+
def create_presigned_url_for_s3(
|
|
105
|
+
s3_client: Any, bucket_name: str, file_key: str
|
|
106
|
+
) -> AnyHttpUrlString:
|
|
105
107
|
""" "Generate a presigned URL to share an S3 object
|
|
106
108
|
|
|
107
109
|
:param s3_client: s3 base client
|
|
@@ -119,23 +121,108 @@ def create_presigned_url_for_s3(s3_client: Any, bucket_name: str, file_key: str)
|
|
|
119
121
|
return response
|
|
120
122
|
|
|
121
123
|
|
|
124
|
+
def generic_upload_to_s3( # pylint: disable=R0913
|
|
125
|
+
storage_secrets: Dict[StorageSecrets, Any],
|
|
126
|
+
bucket_name: str,
|
|
127
|
+
file_key: str,
|
|
128
|
+
auth_method: str,
|
|
129
|
+
document: bytes,
|
|
130
|
+
) -> Optional[AnyHttpUrlString]:
|
|
131
|
+
"""Uploads arbitrary data to s3 returned from an access request"""
|
|
132
|
+
logger.info("Starting S3 Upload of {}", file_key)
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
s3_client = get_s3_client(auth_method, storage_secrets)
|
|
136
|
+
try:
|
|
137
|
+
s3_client.put_object(Bucket=bucket_name, Key=file_key, Body=document)
|
|
138
|
+
except Exception as e:
|
|
139
|
+
logger.error("Encountered error while uploading s3 object: {}", e)
|
|
140
|
+
raise e
|
|
141
|
+
|
|
142
|
+
presigned_url: AnyHttpUrlString = create_presigned_url_for_s3(
|
|
143
|
+
s3_client, bucket_name, file_key
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
return presigned_url
|
|
147
|
+
except ClientError as e:
|
|
148
|
+
logger.error(
|
|
149
|
+
"Encountered error while uploading and generating link for s3 object: {}", e
|
|
150
|
+
)
|
|
151
|
+
raise e
|
|
152
|
+
except ParamValidationError as e:
|
|
153
|
+
raise ValueError(f"The parameters you provided are incorrect: {e}")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def generic_retrieve_from_s3(
|
|
157
|
+
storage_secrets: Dict[StorageSecrets, Any],
|
|
158
|
+
bucket_name: str,
|
|
159
|
+
file_key: str,
|
|
160
|
+
auth_method: str,
|
|
161
|
+
) -> Optional[bytes]:
|
|
162
|
+
"""Retrieves arbitrary data from s3"""
|
|
163
|
+
logger.info("Starting S3 Retrieve of {}", file_key)
|
|
164
|
+
|
|
165
|
+
try:
|
|
166
|
+
s3_client = get_s3_client(auth_method, storage_secrets)
|
|
167
|
+
try:
|
|
168
|
+
response = s3_client.get_object(Bucket=bucket_name, Key=file_key)
|
|
169
|
+
return response["Body"].read()
|
|
170
|
+
except Exception as e:
|
|
171
|
+
logger.error("Encountered error while retrieving s3 object: {}", e)
|
|
172
|
+
raise e
|
|
173
|
+
except ClientError as e:
|
|
174
|
+
logger.error("Encountered error while retrieving s3 object: {}", e)
|
|
175
|
+
raise e
|
|
176
|
+
except ParamValidationError as e:
|
|
177
|
+
raise ValueError(f"The parameters you provided are incorrect: {e}")
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def generic_delete_from_s3(
|
|
181
|
+
storage_secrets: Dict[StorageSecrets, Any],
|
|
182
|
+
bucket_name: str,
|
|
183
|
+
file_key: str,
|
|
184
|
+
auth_method: str,
|
|
185
|
+
) -> None:
|
|
186
|
+
"""Deletes arbitrary data from s3"""
|
|
187
|
+
logger.info("Starting S3 Delete of {}", file_key)
|
|
188
|
+
|
|
189
|
+
try:
|
|
190
|
+
s3_client = get_s3_client(auth_method, storage_secrets)
|
|
191
|
+
try:
|
|
192
|
+
s3_client.delete_object(Bucket=bucket_name, Key=file_key)
|
|
193
|
+
except Exception as e:
|
|
194
|
+
logger.error("Encountered error while deleting s3 object: {}", e)
|
|
195
|
+
raise e
|
|
196
|
+
except ClientError as e:
|
|
197
|
+
logger.error("Encountered error while deleting s3 object: {}", e)
|
|
198
|
+
raise e
|
|
199
|
+
except ParamValidationError as e:
|
|
200
|
+
raise ValueError(f"The parameters you provided are incorrect: {e}")
|
|
201
|
+
|
|
202
|
+
|
|
122
203
|
def upload_to_s3( # pylint: disable=R0913
|
|
123
204
|
storage_secrets: Dict[StorageSecrets, Any],
|
|
124
205
|
data: Dict,
|
|
125
206
|
bucket_name: str,
|
|
126
207
|
file_key: str,
|
|
127
208
|
resp_format: str,
|
|
128
|
-
privacy_request: PrivacyRequest,
|
|
209
|
+
privacy_request: Optional[PrivacyRequest],
|
|
210
|
+
document: Optional[bytes],
|
|
129
211
|
auth_method: str,
|
|
130
|
-
|
|
131
|
-
data_use_map: Optional[Dict[str, Set[str]]] = None,
|
|
132
|
-
) -> str:
|
|
212
|
+
) -> Optional[AnyHttpUrlString]:
|
|
133
213
|
"""Uploads arbitrary data to s3 returned from an access request"""
|
|
134
214
|
logger.info("Starting S3 Upload of {}", file_key)
|
|
135
215
|
|
|
216
|
+
if privacy_request is None and document is not None:
|
|
217
|
+
return generic_upload_to_s3(
|
|
218
|
+
storage_secrets, bucket_name, file_key, auth_method, document
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
if privacy_request is None:
|
|
222
|
+
raise ValueError("Privacy request must be provided")
|
|
223
|
+
|
|
136
224
|
try:
|
|
137
|
-
|
|
138
|
-
s3_client = my_session.client("s3")
|
|
225
|
+
s3_client = get_s3_client(auth_method, storage_secrets)
|
|
139
226
|
|
|
140
227
|
# handles file chunking
|
|
141
228
|
try:
|
|
@@ -148,7 +235,7 @@ def upload_to_s3( # pylint: disable=R0913
|
|
|
148
235
|
logger.error("Encountered error while uploading s3 object: {}", e)
|
|
149
236
|
raise e
|
|
150
237
|
|
|
151
|
-
presigned_url:
|
|
238
|
+
presigned_url: AnyHttpUrlString = create_presigned_url_for_s3(
|
|
152
239
|
s3_client, bucket_name, file_key
|
|
153
240
|
)
|
|
154
241
|
|
|
@@ -162,17 +249,21 @@ def upload_to_s3( # pylint: disable=R0913
|
|
|
162
249
|
raise ValueError(f"The parameters you provided are incorrect: {e}")
|
|
163
250
|
|
|
164
251
|
|
|
252
|
+
def get_local_filename(file_key: str) -> str:
|
|
253
|
+
"""Verifies that the local storage directory exists"""
|
|
254
|
+
if not os.path.exists(LOCAL_FIDES_UPLOAD_DIRECTORY):
|
|
255
|
+
os.makedirs(LOCAL_FIDES_UPLOAD_DIRECTORY)
|
|
256
|
+
return f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{file_key}"
|
|
257
|
+
|
|
258
|
+
|
|
165
259
|
def upload_to_local(
|
|
166
260
|
data: Dict,
|
|
167
261
|
file_key: str,
|
|
168
262
|
privacy_request: PrivacyRequest,
|
|
169
263
|
resp_format: str = ResponseFormat.json.value,
|
|
170
|
-
data_category_field_mapping: Optional[DataCategoryFieldMapping] = None,
|
|
171
|
-
data_use_map: Optional[Dict[str, Set[str]]] = None,
|
|
172
264
|
) -> str:
|
|
173
265
|
"""Uploads access request data to a local folder - for testing/demo purposes only"""
|
|
174
|
-
|
|
175
|
-
os.makedirs(LOCAL_FIDES_UPLOAD_DIRECTORY)
|
|
266
|
+
get_local_filename(file_key)
|
|
176
267
|
|
|
177
268
|
filename = f"{LOCAL_FIDES_UPLOAD_DIRECTORY}/{file_key}"
|
|
178
269
|
in_memory_file = write_to_in_memory_buffer(resp_format, data, privacy_request)
|
fides/api/util/aws_util.py
CHANGED
|
@@ -70,3 +70,22 @@ def get_aws_session(
|
|
|
70
70
|
raise
|
|
71
71
|
else:
|
|
72
72
|
return session
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_s3_client(
|
|
76
|
+
auth_method: str,
|
|
77
|
+
storage_secrets: Optional[Dict[StorageSecrets, Any]],
|
|
78
|
+
assume_role_arn: Optional[str] = None,
|
|
79
|
+
) -> Session:
|
|
80
|
+
"""
|
|
81
|
+
Abstraction to retrieve an AWS S3 client using secrets.
|
|
82
|
+
|
|
83
|
+
If an `assume_role_arn` is provided, the secrets will be used to
|
|
84
|
+
assume that role and return a Session instantiated with that role.
|
|
85
|
+
"""
|
|
86
|
+
session = get_aws_session(
|
|
87
|
+
auth_method=auth_method,
|
|
88
|
+
storage_secrets=storage_secrets,
|
|
89
|
+
assume_role_arn=assume_role_arn,
|
|
90
|
+
)
|
|
91
|
+
return session.client("s3")
|