my-aws-helpers 1.6.0__tar.gz → 2.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/PKG-INFO +5 -2
- my_aws_helpers-2.0.0/my_aws_helpers/api.py +54 -0
- my_aws_helpers-2.0.0/my_aws_helpers/auth.py +16 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers/cognito.py +6 -6
- my_aws_helpers-2.0.0/my_aws_helpers/dynamo.py +202 -0
- my_aws_helpers-2.0.0/my_aws_helpers/errors.py +14 -0
- my_aws_helpers-2.0.0/my_aws_helpers/logging.py +20 -0
- my_aws_helpers-2.0.0/my_aws_helpers/s3.py +207 -0
- my_aws_helpers-2.0.0/my_aws_helpers/sfn.py +44 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/PKG-INFO +6 -3
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/SOURCES.txt +2 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/setup.py +1 -1
- my_aws_helpers-1.6.0/my_aws_helpers/api.py +0 -65
- my_aws_helpers-1.6.0/my_aws_helpers/dynamo.py +0 -125
- my_aws_helpers-1.6.0/my_aws_helpers/errors.py +0 -8
- my_aws_helpers-1.6.0/my_aws_helpers/s3.py +0 -116
- my_aws_helpers-1.6.0/my_aws_helpers/sfn.py +0 -57
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/README.md +0 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/dependency_links.txt +0 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/requires.txt +0 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/top_level.txt +0 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/my_aws_helpers.egg-info/zip-safe +0 -0
- {my_aws_helpers-1.6.0 → my_aws_helpers-2.0.0}/setup.cfg +0 -0
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: my_aws_helpers
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0.0
|
|
4
4
|
Summary: AWS Helpers
|
|
5
5
|
Home-page: https://github.com/JarrodMccarthy/aws_helpers.git
|
|
6
6
|
Author: Jarrod McCarthy
|
|
7
|
+
License: UNKNOWN
|
|
7
8
|
Platform: any
|
|
8
9
|
Classifier: License :: Other/Proprietary License
|
|
9
10
|
Classifier: Programming Language :: Python
|
|
10
11
|
Classifier: Programming Language :: Python :: 3.8
|
|
11
12
|
Classifier: Programming Language :: Python :: 3.9
|
|
12
13
|
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
-
|
|
14
|
+
|
|
15
|
+
UNKNOWN
|
|
16
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from typing import Optional, Dict, Any
|
|
2
|
+
import json
|
|
3
|
+
from my_aws_helpers.errors import *
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class API:
|
|
7
|
+
def response(code: int, body: Optional[str] = None):
|
|
8
|
+
return {
|
|
9
|
+
"statusCode": code,
|
|
10
|
+
"headers": {"Access-Control-Allow-Origin": "*"},
|
|
11
|
+
"body": body,
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
def get_optional_query_string_param(event: dict, param: str) -> Optional[Any]:
|
|
15
|
+
query_string_params = event.get("queryStringParameters")
|
|
16
|
+
if query_string_params is None:
|
|
17
|
+
return None
|
|
18
|
+
else:
|
|
19
|
+
return query_string_params.get(param)
|
|
20
|
+
|
|
21
|
+
def get_optional_body_param(event: dict, param: str):
|
|
22
|
+
body = event.get("body")
|
|
23
|
+
if body is None:
|
|
24
|
+
return None
|
|
25
|
+
else:
|
|
26
|
+
body = json.loads(body)
|
|
27
|
+
param_value = body.get(param)
|
|
28
|
+
return param_value
|
|
29
|
+
|
|
30
|
+
def parse_payload(event: Dict[str, Any]):
|
|
31
|
+
payload = {}
|
|
32
|
+
if event.get("queryStringParameters"):
|
|
33
|
+
payload["queryStringParameters"] = event["queryStringParameters"]
|
|
34
|
+
if event.get("pathParameters"):
|
|
35
|
+
payload["pathParameters"] = event["pathParameters"]
|
|
36
|
+
if event.get("body"):
|
|
37
|
+
payload["body"] = json.loads(event["body"])
|
|
38
|
+
return payload
|
|
39
|
+
|
|
40
|
+
def handle_error_response(func):
|
|
41
|
+
def wrapper(event, context):
|
|
42
|
+
try:
|
|
43
|
+
response = func(event, context)
|
|
44
|
+
return API.response(code=200, body=json.dumps(response))
|
|
45
|
+
except ClientError as e:
|
|
46
|
+
return API.response(code=400, body=json.dumps({"Error": f"{e}"}))
|
|
47
|
+
except NotFoundError as e:
|
|
48
|
+
return API.response(code=404, body=json.dumps({"Error": f"{e}"}))
|
|
49
|
+
except ServerError as e:
|
|
50
|
+
return API.response(code=500, body=json.dumps({"Error": f"{e}"}))
|
|
51
|
+
except Exception as e:
|
|
52
|
+
return API.response(code=500, body=json.dumps({"Error": f"{e}"}))
|
|
53
|
+
|
|
54
|
+
return wrapper
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from my_aws_helpers.logging import select_powertools_logger
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
logger = select_powertools_logger("aws-helpers-s3")
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Auth:
|
|
8
|
+
|
|
9
|
+
@staticmethod
|
|
10
|
+
def get_bearer_from_lambda_event(event: dict) -> str:
|
|
11
|
+
try:
|
|
12
|
+
bearer_token = event["headers"]["Authorization"]
|
|
13
|
+
return str(bearer_token).split("Bearer")[-1]
|
|
14
|
+
except Exception as e:
|
|
15
|
+
logger.exception(f"Failed to get bearer from lambda event due to {e}")
|
|
16
|
+
return None
|
|
@@ -51,7 +51,7 @@
|
|
|
51
51
|
# 'Name': 'Full Name',
|
|
52
52
|
# 'Value': user.fullname,
|
|
53
53
|
# 'Name': 'name',
|
|
54
|
-
# 'Value': user.fullname,
|
|
54
|
+
# 'Value': user.fullname,
|
|
55
55
|
# 'Name': 'nickname',
|
|
56
56
|
# 'Value': user.nickname,
|
|
57
57
|
# 'Name': 'email',
|
|
@@ -75,7 +75,7 @@
|
|
|
75
75
|
# def _init_auth(self, cognito_username: str, user: User):
|
|
76
76
|
# """Initiate Auth (SRP) -> Returns Challenge"""
|
|
77
77
|
# try:
|
|
78
|
-
# response = self.client.initiate_auth(
|
|
78
|
+
# response = self.client.initiate_auth(
|
|
79
79
|
# AuthFlow='USER_PASSWORD_AUTH', #'USER_SRP_AUTH'|'REFRESH_TOKEN_AUTH'|'REFRESH_TOKEN'|'CUSTOM_AUTH'|'ADMIN_NO_SRP_AUTH'| 'USER_PASSWORD_AUTH',
|
|
80
80
|
# AuthParameters={
|
|
81
81
|
# 'USERNAME': user.email,
|
|
@@ -89,7 +89,7 @@
|
|
|
89
89
|
# except Exception as e:
|
|
90
90
|
# raise Exception(f"Error {e}: Issue during initialising auth for user {cognito_username}")
|
|
91
91
|
|
|
92
|
-
# def _respond_to_auth_challenge(self, init_auth_response, user: User, cognito_username: str):
|
|
92
|
+
# def _respond_to_auth_challenge(self, init_auth_response, user: User, cognito_username: str):
|
|
93
93
|
# """Respond to Auth Challenge -> Cognito Tokens"""
|
|
94
94
|
# try:
|
|
95
95
|
# cognito_username = init_auth_response["ChallengeParameters"].get('USER_ID_FOR_SRP') #needs testing
|
|
@@ -110,13 +110,13 @@
|
|
|
110
110
|
# return response
|
|
111
111
|
# except Exception as e:
|
|
112
112
|
# raise Exception(f"Error {e}: Issue responding to auth challenge for user {user.email}")
|
|
113
|
-
|
|
113
|
+
|
|
114
114
|
# # def get_token(self, user: User, cognito_username: str):
|
|
115
115
|
# # init_auth_response = self._init_auth(cognito_username, user)
|
|
116
116
|
# # response = self._respond_to_auth_challenge(init_auth_response, user, cognito_username)
|
|
117
117
|
# # return response
|
|
118
118
|
|
|
119
|
-
# # def sign_up(self, user: User):
|
|
119
|
+
# # def sign_up(self, user: User):
|
|
120
120
|
# # response = self.create_user(user)
|
|
121
121
|
# # cognito_username = response['User'].get('Username')
|
|
122
122
|
# # get_token_response = self.get_token(user, cognito_username)
|
|
@@ -141,4 +141,4 @@
|
|
|
141
141
|
# # JWT = tokens['id_token']
|
|
142
142
|
# # response = requests.request("GET", self.cognito_known_tokens_url, headers={}, data={})
|
|
143
143
|
# # public = json.loads(response.text) #loads a json str into dict
|
|
144
|
-
# # return jwt.decode(JWT, public) #decode jwt
|
|
144
|
+
# # return jwt.decode(JWT, public) #decode jwt
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
from typing import List, Any, Optional
|
|
2
|
+
from datetime import datetime, date
|
|
3
|
+
import boto3
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from decimal import Decimal, Context
|
|
6
|
+
|
|
7
|
+
from my_aws_helpers.logging import select_powertools_logger
|
|
8
|
+
|
|
9
|
+
logger = select_powertools_logger("aws-helpers-dynamo")
|
|
10
|
+
|
|
11
|
+
class MetaData:
|
|
12
|
+
"""
|
|
13
|
+
This class is a convenience class,
|
|
14
|
+
each of its attributes will be attached to objects that inherit from `BaseTableObject`
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
created_by: Optional[str]
|
|
18
|
+
created_on: Optional[datetime]
|
|
19
|
+
updated_by: Optional[str]
|
|
20
|
+
updated_on: Optional[datetime]
|
|
21
|
+
|
|
22
|
+
def set_timestamp(self, ts: Any) -> datetime:
|
|
23
|
+
"""Be absolutely sure timestamps are datetimes"""
|
|
24
|
+
if isinstance(ts, datetime):
|
|
25
|
+
return ts
|
|
26
|
+
else:
|
|
27
|
+
return datetime.now()
|
|
28
|
+
|
|
29
|
+
def __init__(self, **kwargs) -> None:
|
|
30
|
+
self.created_by = (
|
|
31
|
+
kwargs["created_by"] if kwargs.get("created_by") else self._get_user()
|
|
32
|
+
)
|
|
33
|
+
self.updated_by = (
|
|
34
|
+
kwargs["updated_by"] if kwargs.get("updated_by") else self._get_user()
|
|
35
|
+
)
|
|
36
|
+
self.created_on = self.set_timestamp(ts=kwargs.get("created_on"))
|
|
37
|
+
self.updated_on = self.set_timestamp(ts=kwargs.get("updated_on"))
|
|
38
|
+
|
|
39
|
+
def _get_user(self):
|
|
40
|
+
"""This should probably do some clever thing to get the actual user details from the token or something"""
|
|
41
|
+
return ""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class BaseTableObject(MetaData):
|
|
45
|
+
"""
|
|
46
|
+
An Abstract class that helps ensure your objects
|
|
47
|
+
conform to the AssetTable schema and
|
|
48
|
+
implement serialisation/deserialisation for Dynamo
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
pk: str
|
|
52
|
+
sk: str
|
|
53
|
+
|
|
54
|
+
def _get_pk(self):
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
def _get_sk(self):
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abstractclassmethod
|
|
61
|
+
def _from_dynamo_representation():
|
|
62
|
+
"""
|
|
63
|
+
Deserialises this object from Dynamo Representation
|
|
64
|
+
"""
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
@abstractmethod
|
|
68
|
+
def _to_dynamo_representation():
|
|
69
|
+
"""
|
|
70
|
+
Serialises this object to Dynamo Representation
|
|
71
|
+
"""
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
def _optional_get(self, kwargs: dict, key: str, default: Any):
|
|
75
|
+
return kwargs.get(key) if kwargs.get(key) else default
|
|
76
|
+
|
|
77
|
+
def __init__(self, **kwargs) -> None:
|
|
78
|
+
super().__init__(**kwargs)
|
|
79
|
+
self.pk = self._optional_get(kwargs=kwargs, key="pk", default=self._get_pk())
|
|
80
|
+
self.sk = self._optional_get(kwargs=kwargs, key="sk", default=self._get_sk())
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class DynamoSerialiser:
|
|
84
|
+
|
|
85
|
+
@staticmethod
|
|
86
|
+
def _serialise(obj: Any):
|
|
87
|
+
if isinstance(obj, datetime) or isinstance(obj, date):
|
|
88
|
+
return obj.isoformat()
|
|
89
|
+
if isinstance(obj, float):
|
|
90
|
+
ctx = Context(prec = 38)
|
|
91
|
+
return ctx.create_decimal_from_float(obj)
|
|
92
|
+
return obj
|
|
93
|
+
|
|
94
|
+
@staticmethod
|
|
95
|
+
def object_serialiser(obj: Any):
|
|
96
|
+
if isinstance(obj, list):
|
|
97
|
+
return [DynamoSerialiser.object_serialiser(obj=obj) for obj in obj]
|
|
98
|
+
if isinstance(obj, dict):
|
|
99
|
+
return {k: DynamoSerialiser.object_serialiser(v) for k, v in obj.items()}
|
|
100
|
+
return DynamoSerialiser._serialise(obj=obj)
|
|
101
|
+
|
|
102
|
+
class Dynamo:
|
|
103
|
+
table: boto3.resource
|
|
104
|
+
|
|
105
|
+
def __init__(self, table_name: str) -> None:
|
|
106
|
+
ddb = boto3.resource("dynamodb")
|
|
107
|
+
self.table = ddb.Table(table_name)
|
|
108
|
+
|
|
109
|
+
def put_item(self, item: dict):
|
|
110
|
+
return self.table.put_item(Item=item)
|
|
111
|
+
|
|
112
|
+
def get_item(self, item: dict):
|
|
113
|
+
return self.table.get_item(Item=item)
|
|
114
|
+
|
|
115
|
+
def delete_item(self, item: dict):
|
|
116
|
+
return self.table.delete_item(Item=item)
|
|
117
|
+
|
|
118
|
+
def batch_put(self, items: List[dict]) -> None:
|
|
119
|
+
with self.table.batch_writer() as batch:
|
|
120
|
+
for item in items:
|
|
121
|
+
batch.put_item(Item=item)
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
def batch_delete(self, items: List[dict]) -> None:
|
|
125
|
+
with self.table.batch_writer() as batch:
|
|
126
|
+
for item in items:
|
|
127
|
+
batch.delete_item(Key=item)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
def _deep_scan(self):
|
|
131
|
+
response = self.table.scan()
|
|
132
|
+
items: List = response["Items"]
|
|
133
|
+
while response.get("LastEvaluatedKey") is not None:
|
|
134
|
+
response = self.table.scan(ExclusiveStartKey = response["LastEvaluatedKey"])
|
|
135
|
+
if response.get("Items") is not None:
|
|
136
|
+
items.extend(response["Items"])
|
|
137
|
+
if response.get("LastEvaluatedKey") is None:
|
|
138
|
+
break
|
|
139
|
+
return items
|
|
140
|
+
|
|
141
|
+
def delete_table_items(self, partition_key_name: str = "pk", sort_key_name: str = "sk") -> bool:
|
|
142
|
+
try:
|
|
143
|
+
items = self._deep_scan()
|
|
144
|
+
delete_repr_items = [
|
|
145
|
+
{
|
|
146
|
+
partition_key_name: item[partition_key_name],
|
|
147
|
+
sort_key_name: item[sort_key_name],
|
|
148
|
+
}
|
|
149
|
+
for item in items
|
|
150
|
+
]
|
|
151
|
+
self.batch_delete(items = delete_repr_items)
|
|
152
|
+
return True
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.exception(f'Failed to delete table items due to {e}')
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
def to_dynamo_representation(obj: dict):
|
|
158
|
+
"""
|
|
159
|
+
Attempts to put common datatype transformations in one spot
|
|
160
|
+
"""
|
|
161
|
+
new_obj = dict()
|
|
162
|
+
for key, value in obj.items():
|
|
163
|
+
new_obj[key] = _datatype_map(value=value)
|
|
164
|
+
return new_obj
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _datatype_map(value: Any):
|
|
168
|
+
if isinstance(value, float):
|
|
169
|
+
return Decimal(str(value))
|
|
170
|
+
if (isinstance(value, date)) or (isinstance(value, datetime)):
|
|
171
|
+
return value.isoformat()
|
|
172
|
+
if isinstance(value, list):
|
|
173
|
+
return [_datatype_map(value=item) for item in value]
|
|
174
|
+
if isinstance(value, dict):
|
|
175
|
+
new_obj = dict()
|
|
176
|
+
for k, v in value.items():
|
|
177
|
+
new_obj[k] = _datatype_map(value=v)
|
|
178
|
+
return new_obj
|
|
179
|
+
return value
|
|
180
|
+
|
|
181
|
+
class BaseQueries(ABC):
|
|
182
|
+
table_name: str
|
|
183
|
+
|
|
184
|
+
def __init__(self, table_name: str, client: Optional[Dynamo] = None) -> None:
|
|
185
|
+
self.table_name = table_name
|
|
186
|
+
self.client = self._get_client() if client is None else client
|
|
187
|
+
|
|
188
|
+
def _get_client(self):
|
|
189
|
+
return Dynamo(table_name=self.table_name)
|
|
190
|
+
|
|
191
|
+
def _iterative_query(self, query_kwargs: dict) -> List[dict]:
|
|
192
|
+
results = list()
|
|
193
|
+
last_evaluated_key = "not none"
|
|
194
|
+
exclusive_start_key = None
|
|
195
|
+
while last_evaluated_key is not None:
|
|
196
|
+
if exclusive_start_key is not None:
|
|
197
|
+
query_kwargs["ExclusiveStartKey"] = exclusive_start_key
|
|
198
|
+
result = self.client.table.query(**query_kwargs)
|
|
199
|
+
results += result["Items"]
|
|
200
|
+
last_evaluated_key = result.get("LastEvaluatedKey")
|
|
201
|
+
exclusive_start_key = last_evaluated_key
|
|
202
|
+
return results
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def select_powertools_logger(service_name: str) -> logging.Logger:
|
|
6
|
+
"""
|
|
7
|
+
Returns the powertools logger if it can be found,
|
|
8
|
+
Returns a Logger with name = service_name if powertools logger is not found
|
|
9
|
+
"""
|
|
10
|
+
existing_loggers = [name for name in logging.root.manager.loggerDict]
|
|
11
|
+
powertools_service_name = os.environ.get("POWERTOOLS_SERVICE_NAME")
|
|
12
|
+
if powertools_service_name is not None:
|
|
13
|
+
logger = (
|
|
14
|
+
logging.getLogger(powertools_service_name)
|
|
15
|
+
if powertools_service_name in existing_loggers
|
|
16
|
+
else None
|
|
17
|
+
)
|
|
18
|
+
if logger:
|
|
19
|
+
return logger
|
|
20
|
+
return logging.getLogger(service_name)
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
import boto3
|
|
2
|
+
import io
|
|
3
|
+
import json
|
|
4
|
+
from typing import Optional, Any, Dict
|
|
5
|
+
from copy import copy
|
|
6
|
+
import os
|
|
7
|
+
import gzip
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from my_aws_helpers.logging import select_powertools_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger = select_powertools_logger("aws-helpers-s3")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ContentType(str, Enum):
|
|
16
|
+
plain_text = "text/plain"
|
|
17
|
+
xml_content = "text/xml"
|
|
18
|
+
json_content = "application/json"
|
|
19
|
+
pdf_content = "application/pdf"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ContentEncoding(str, Enum):
|
|
23
|
+
gzip = "gzip"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class S3Location:
|
|
27
|
+
bucket: str
|
|
28
|
+
file_name: str
|
|
29
|
+
location: str
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def from_location(cls, location: str):
|
|
33
|
+
bucket, file_name = location.split("/")[0], "/".join(location.split("/")[1:])
|
|
34
|
+
return cls(bucket=bucket, file_name=file_name)
|
|
35
|
+
|
|
36
|
+
def __init__(self, bucket: str, file_name: str) -> None:
|
|
37
|
+
self.bucket = bucket
|
|
38
|
+
self.file_name = file_name
|
|
39
|
+
self.location = f"{self.bucket}/{self.file_name}"
|
|
40
|
+
|
|
41
|
+
def serialise(self):
|
|
42
|
+
return copy(vars(self))
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class S3:
|
|
46
|
+
client: boto3.client
|
|
47
|
+
|
|
48
|
+
def __init__(self, client: Optional[boto3.client] = None) -> None:
|
|
49
|
+
self.client = client if client else self._get_client()
|
|
50
|
+
|
|
51
|
+
def _get_client(self) -> boto3.client:
|
|
52
|
+
region_name = os.environ["AWS_DEFAULT_REGION"]
|
|
53
|
+
s3_client = boto3.client("s3", region_name=region_name)
|
|
54
|
+
endpoint_url = s3_client.meta.endpoint_url
|
|
55
|
+
s3_client = boto3.client(
|
|
56
|
+
"s3", region_name=region_name, endpoint_url=endpoint_url
|
|
57
|
+
)
|
|
58
|
+
return s3_client
|
|
59
|
+
|
|
60
|
+
def _streaming_body_to_dict(self, payload):
|
|
61
|
+
file_like_obj = io.BytesIO(payload.read())
|
|
62
|
+
response = json.loads(file_like_obj.getvalue())
|
|
63
|
+
return response
|
|
64
|
+
|
|
65
|
+
def put_json_object(self, bucket_name: str, file_name: str, object: dict):
|
|
66
|
+
return self.client.put_object(
|
|
67
|
+
Body=json.dumps(object), Bucket=bucket_name, Key=file_name
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def get_object(self, bucket_name: str, file_name: str):
|
|
71
|
+
response = self.client.get_object(Bucket=bucket_name, Key=file_name)
|
|
72
|
+
return self._streaming_body_to_dict(response["Body"])
|
|
73
|
+
|
|
74
|
+
def get_presigned_url(
|
|
75
|
+
self, bucket_name: str, file_name: str, expires_in: int = 3600
|
|
76
|
+
):
|
|
77
|
+
return self.client.generate_presigned_url(
|
|
78
|
+
"get_object",
|
|
79
|
+
Params={
|
|
80
|
+
"Bucket": bucket_name,
|
|
81
|
+
"Key": file_name,
|
|
82
|
+
},
|
|
83
|
+
ExpiresIn=expires_in,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def get_s3_location_from_bucket_file(
|
|
87
|
+
bucket_name: str, file_name: str
|
|
88
|
+
) -> S3Location:
|
|
89
|
+
return S3Location(bucket=bucket_name, file_name=file_name)
|
|
90
|
+
|
|
91
|
+
def get_bucket_file_from_s3_location(s3_location: S3Location) -> S3Location:
|
|
92
|
+
return S3Location.from_location(location=s3_location)
|
|
93
|
+
|
|
94
|
+
def save_document_content(
|
|
95
|
+
self,
|
|
96
|
+
file_contents: bytes,
|
|
97
|
+
s3_location: S3Location,
|
|
98
|
+
content_encoding: str = "",
|
|
99
|
+
content_type: str = "application/pdf",
|
|
100
|
+
compress: bool = True,
|
|
101
|
+
) -> Optional[S3Location]:
|
|
102
|
+
"""
|
|
103
|
+
saves document content to bucket, in file_name
|
|
104
|
+
Options for content_type:
|
|
105
|
+
"application/pdf"
|
|
106
|
+
"text/plain"
|
|
107
|
+
"application/json"
|
|
108
|
+
probably more
|
|
109
|
+
Options for content_encoding:
|
|
110
|
+
"": default encoding
|
|
111
|
+
"gzip": compressed contents
|
|
112
|
+
"""
|
|
113
|
+
try:
|
|
114
|
+
if compress or s3_location.file_name.endswith(".gz"):
|
|
115
|
+
file_contents = gzip.compress(file_contents)
|
|
116
|
+
content_encoding = ContentEncoding.gzip.value
|
|
117
|
+
obj = self.client.Object(s3_location.bucket, s3_location.file_name)
|
|
118
|
+
obj.put(
|
|
119
|
+
Body=file_contents,
|
|
120
|
+
ContentType=content_type,
|
|
121
|
+
ContentEncoding=content_encoding,
|
|
122
|
+
)
|
|
123
|
+
return s3_location
|
|
124
|
+
except Exception as e:
|
|
125
|
+
logger.exception(e)
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
def read_binary_from_s3(self, s3_location: S3Location) -> Optional[bytes]:
|
|
129
|
+
try:
|
|
130
|
+
obj = self.client.Object(s3_location.bucket, s3_location.file_name)
|
|
131
|
+
d_bytes = io.BytesIO()
|
|
132
|
+
obj.download_fileobj(d_bytes)
|
|
133
|
+
d_bytes.seek(0)
|
|
134
|
+
if obj.content_encoding == ContentEncoding.gzip.value:
|
|
135
|
+
try:
|
|
136
|
+
with gzip.GzipFile(fileobj=d_bytes) as gz_file:
|
|
137
|
+
return gz_file.read()
|
|
138
|
+
except gzip.BadGzipFile:
|
|
139
|
+
d_bytes.seek(0)
|
|
140
|
+
return d_bytes.read()
|
|
141
|
+
except Exception as e:
|
|
142
|
+
logger.exception(f"Failed to read binary from s3 due to {e}")
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
def save_text_to_s3(self, text: str, s3_location: S3Location):
|
|
146
|
+
try:
|
|
147
|
+
file_contents = bytes(text.encode("UTF-8"))
|
|
148
|
+
return self.save_document_content(
|
|
149
|
+
file_contents=file_contents,
|
|
150
|
+
s3_location=s3_location,
|
|
151
|
+
content_type=ContentType.plain_text.value,
|
|
152
|
+
compress=True,
|
|
153
|
+
content_encoding=ContentEncoding.gzip.value,
|
|
154
|
+
)
|
|
155
|
+
except Exception as e:
|
|
156
|
+
logger.exception(f"Failed to save text to s3 due to {e}")
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
def save_xml_to_s3(self, xml_text: bytes, s3_location: S3Location):
|
|
160
|
+
"""
|
|
161
|
+
xml_text tends to come from:
|
|
162
|
+
root = lxml.etree.ElementTree().get_root()
|
|
163
|
+
xml_text = ET.tostring(root, encoding='utf-8')
|
|
164
|
+
"""
|
|
165
|
+
try:
|
|
166
|
+
return self.save_document_content(
|
|
167
|
+
file_contents=xml_text,
|
|
168
|
+
s3_location=s3_location,
|
|
169
|
+
content_type=ContentType.xml_content.value,
|
|
170
|
+
compress=True,
|
|
171
|
+
content_encoding=ContentEncoding.gzip.value,
|
|
172
|
+
)
|
|
173
|
+
except Exception as e:
|
|
174
|
+
logger.exception(f"Failed to save xml text to s3 due to {e}")
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
def save_xml_to_s3(self, pdf_content: bytes, s3_location: S3Location):
|
|
178
|
+
"""
|
|
179
|
+
pdf_content tends to come from:
|
|
180
|
+
PyMuPdf.Document().write()
|
|
181
|
+
"""
|
|
182
|
+
try:
|
|
183
|
+
return self.save_document_content(
|
|
184
|
+
file_contents=pdf_content,
|
|
185
|
+
s3_location=s3_location,
|
|
186
|
+
content_type=ContentType.pdf_content.value,
|
|
187
|
+
compress=True,
|
|
188
|
+
content_encoding=ContentEncoding.gzip.value,
|
|
189
|
+
)
|
|
190
|
+
except Exception as e:
|
|
191
|
+
logger.exception(f"Failed to save pdf to s3 due to {e}")
|
|
192
|
+
return None
|
|
193
|
+
|
|
194
|
+
def save_dict_to_s3(self, content: Dict[str, Any], s3_location: S3Location):
|
|
195
|
+
""" """
|
|
196
|
+
try:
|
|
197
|
+
file_contents = bytes(json.dumps(content).encode("UTF-8"))
|
|
198
|
+
return self.save_document_content(
|
|
199
|
+
file_contents=file_contents,
|
|
200
|
+
s3_location=s3_location,
|
|
201
|
+
content_type=ContentType.json_content.value,
|
|
202
|
+
compress=True,
|
|
203
|
+
content_encoding=ContentEncoding.gzip.value,
|
|
204
|
+
)
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.exception(f"Failed to save dict to s3 due to {e}")
|
|
207
|
+
return None
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import boto3
|
|
2
|
+
from typing import Optional
|
|
3
|
+
import json
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
from my_aws_helpers.api import API
|
|
6
|
+
from my_aws_helpers.errors import *
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SFN:
|
|
10
|
+
client: boto3.client
|
|
11
|
+
|
|
12
|
+
def __init__(self, client: Optional[boto3.client] = None) -> None:
|
|
13
|
+
self.client = client if client else boto3.client("stepfunctions")
|
|
14
|
+
|
|
15
|
+
def start_execution(
|
|
16
|
+
self,
|
|
17
|
+
sfn_arn: str,
|
|
18
|
+
event: Optional[dict] = None,
|
|
19
|
+
name: Optional[str] = uuid4().hex,
|
|
20
|
+
):
|
|
21
|
+
input_event = json.dumps(event) if event else event
|
|
22
|
+
return self.client.start_execution(
|
|
23
|
+
stateMachineArn=sfn_arn,
|
|
24
|
+
name=name,
|
|
25
|
+
input=input_event,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def handle_error_response(func):
|
|
29
|
+
def wrapper(event, context):
|
|
30
|
+
try:
|
|
31
|
+
response = func(event, context)
|
|
32
|
+
return API.response(code=200, body=json.dumps(response))
|
|
33
|
+
except AlreadyExists as e:
|
|
34
|
+
return API.response(code=201, body=json.dumps({"Success": f"{e}"}))
|
|
35
|
+
except ClientError as e:
|
|
36
|
+
return API.response(code=400, body=json.dumps({"Error": f"{e}"}))
|
|
37
|
+
except NotFoundError as e:
|
|
38
|
+
return API.response(code=404, body=json.dumps({"Error": f"{e}"}))
|
|
39
|
+
except ServerError as e:
|
|
40
|
+
return API.response(code=500, body=json.dumps({"Error": f"{e}"}))
|
|
41
|
+
except Exception as e:
|
|
42
|
+
return API.response(code=500, body=json.dumps({"Error": f"{e}"}))
|
|
43
|
+
|
|
44
|
+
return wrapper
|
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
|
-
Name:
|
|
3
|
-
Version:
|
|
2
|
+
Name: my-aws-helpers
|
|
3
|
+
Version: 2.0.0
|
|
4
4
|
Summary: AWS Helpers
|
|
5
5
|
Home-page: https://github.com/JarrodMccarthy/aws_helpers.git
|
|
6
6
|
Author: Jarrod McCarthy
|
|
7
|
+
License: UNKNOWN
|
|
7
8
|
Platform: any
|
|
8
9
|
Classifier: License :: Other/Proprietary License
|
|
9
10
|
Classifier: Programming Language :: Python
|
|
10
11
|
Classifier: Programming Language :: Python :: 3.8
|
|
11
12
|
Classifier: Programming Language :: Python :: 3.9
|
|
12
13
|
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
-
|
|
14
|
+
|
|
15
|
+
UNKNOWN
|
|
16
|
+
|
|
@@ -2,9 +2,11 @@ README.md
|
|
|
2
2
|
setup.cfg
|
|
3
3
|
setup.py
|
|
4
4
|
my_aws_helpers/api.py
|
|
5
|
+
my_aws_helpers/auth.py
|
|
5
6
|
my_aws_helpers/cognito.py
|
|
6
7
|
my_aws_helpers/dynamo.py
|
|
7
8
|
my_aws_helpers/errors.py
|
|
9
|
+
my_aws_helpers/logging.py
|
|
8
10
|
my_aws_helpers/s3.py
|
|
9
11
|
my_aws_helpers/sfn.py
|
|
10
12
|
my_aws_helpers.egg-info/PKG-INFO
|
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
from typing import Optional, Dict, Any
|
|
2
|
-
import json
|
|
3
|
-
from my_aws_helpers.errors import *
|
|
4
|
-
|
|
5
|
-
class API:
|
|
6
|
-
def response(code: int, body: Optional[str] = None):
|
|
7
|
-
return {
|
|
8
|
-
"statusCode": code,
|
|
9
|
-
"headers": {"Access-Control-Allow-Origin": "*"},
|
|
10
|
-
"body": body,
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
def get_optional_query_string_param(event: dict, param: str) -> Optional[Any]:
|
|
14
|
-
query_string_params = event.get("queryStringParameters")
|
|
15
|
-
if query_string_params is None:
|
|
16
|
-
return None
|
|
17
|
-
else:
|
|
18
|
-
return query_string_params.get(param)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
def get_optional_body_param(event: dict, param: str):
|
|
22
|
-
body = event.get("body")
|
|
23
|
-
if body is None:
|
|
24
|
-
return None
|
|
25
|
-
else:
|
|
26
|
-
body = json.loads(body)
|
|
27
|
-
param_value = body.get(param)
|
|
28
|
-
return param_value
|
|
29
|
-
|
|
30
|
-
def parse_payload(event: Dict[str, Any]):
|
|
31
|
-
payload = {}
|
|
32
|
-
if event.get("queryStringParameters"): payload["queryStringParameters"] = event["queryStringParameters"]
|
|
33
|
-
if event.get("pathParameters"): payload["pathParameters"] = event["pathParameters"]
|
|
34
|
-
if event.get("body"): payload["body"] = json.loads(event["body"])
|
|
35
|
-
return payload
|
|
36
|
-
|
|
37
|
-
def handle_error_response(func):
|
|
38
|
-
def wrapper(event, context):
|
|
39
|
-
try:
|
|
40
|
-
response = func(event, context)
|
|
41
|
-
return API.response(
|
|
42
|
-
code = 200,
|
|
43
|
-
body = json.dumps(response)
|
|
44
|
-
)
|
|
45
|
-
except ClientError as e:
|
|
46
|
-
return API.response(
|
|
47
|
-
code = 400,
|
|
48
|
-
body = json.dumps({"Error": f"{e}"})
|
|
49
|
-
)
|
|
50
|
-
except NotFoundError as e:
|
|
51
|
-
return API.response(
|
|
52
|
-
code = 404,
|
|
53
|
-
body = json.dumps({"Error": f"{e}"})
|
|
54
|
-
)
|
|
55
|
-
except ServerError as e:
|
|
56
|
-
return API.response(
|
|
57
|
-
code = 500,
|
|
58
|
-
body = json.dumps({"Error": f"{e}"})
|
|
59
|
-
)
|
|
60
|
-
except Exception as e:
|
|
61
|
-
return API.response(
|
|
62
|
-
code = 500,
|
|
63
|
-
body = json.dumps({"Error": f"{e}"})
|
|
64
|
-
)
|
|
65
|
-
return wrapper
|
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
from typing import List, Any, Optional
|
|
2
|
-
from datetime import datetime, date
|
|
3
|
-
import boto3
|
|
4
|
-
from abc import ABC, abstractclassmethod, abstractmethod
|
|
5
|
-
from decimal import Decimal
|
|
6
|
-
|
|
7
|
-
class MetaData:
|
|
8
|
-
"""
|
|
9
|
-
This class is a convenience class,
|
|
10
|
-
each of its attributes will be attached to objects that inherit from `BaseTableObject`
|
|
11
|
-
"""
|
|
12
|
-
created_by: Optional[str]
|
|
13
|
-
created_on: Optional[datetime]
|
|
14
|
-
updated_by: Optional[str]
|
|
15
|
-
updated_on: Optional[datetime]
|
|
16
|
-
|
|
17
|
-
def set_timestamp(self, ts: Any) -> datetime:
|
|
18
|
-
"""Be absolutely sure timestamps are datetimes"""
|
|
19
|
-
if isinstance(ts, datetime):
|
|
20
|
-
return ts
|
|
21
|
-
else:
|
|
22
|
-
return datetime.now()
|
|
23
|
-
|
|
24
|
-
def __init__(self, **kwargs) -> None:
|
|
25
|
-
self.created_by = kwargs["created_by"] if kwargs.get("created_by") else self._get_user()
|
|
26
|
-
self.updated_by = kwargs["updated_by"] if kwargs.get("updated_by") else self._get_user()
|
|
27
|
-
self.created_on = self.set_timestamp(ts=kwargs.get("created_on"))
|
|
28
|
-
self.updated_on = self.set_timestamp(ts=kwargs.get("updated_on"))
|
|
29
|
-
|
|
30
|
-
def _get_user(self):
|
|
31
|
-
"""This should probably do some clever thing to get the actual user details from the token or something"""
|
|
32
|
-
return ""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class BaseTableObject(MetaData):
|
|
36
|
-
"""
|
|
37
|
-
An Abstract class that helps ensure your objects
|
|
38
|
-
conform to the AssetTable schema and
|
|
39
|
-
implement serialisation/deserialisation for Dynamo
|
|
40
|
-
"""
|
|
41
|
-
pk: str
|
|
42
|
-
sk: str
|
|
43
|
-
|
|
44
|
-
def _get_pk(self):
|
|
45
|
-
pass
|
|
46
|
-
|
|
47
|
-
def _get_sk(self):
|
|
48
|
-
pass
|
|
49
|
-
|
|
50
|
-
@abstractclassmethod
|
|
51
|
-
def _from_dynamo_representation():
|
|
52
|
-
"""
|
|
53
|
-
Deserialises this object from Dynamo Representation
|
|
54
|
-
"""
|
|
55
|
-
pass
|
|
56
|
-
|
|
57
|
-
@abstractmethod
|
|
58
|
-
def _to_dynamo_representation():
|
|
59
|
-
"""
|
|
60
|
-
Serialises this object to Dynamo Representation
|
|
61
|
-
"""
|
|
62
|
-
pass
|
|
63
|
-
|
|
64
|
-
def _optional_get(self, kwargs: dict, key: str, default: Any):
|
|
65
|
-
return kwargs.get(key) if kwargs.get(key) else default
|
|
66
|
-
|
|
67
|
-
def __init__(self, **kwargs) -> None:
|
|
68
|
-
super().__init__(**kwargs)
|
|
69
|
-
self.pk = self._optional_get(kwargs=kwargs, key='pk', default=self._get_pk())
|
|
70
|
-
self.sk = self._optional_get(kwargs=kwargs, key='sk', default=self._get_sk())
|
|
71
|
-
|
|
72
|
-
class BaseQueries(ABC):
|
|
73
|
-
table_name: str
|
|
74
|
-
|
|
75
|
-
def __init__(self, table_name: str) -> None:
|
|
76
|
-
self.table_name = table_name
|
|
77
|
-
|
|
78
|
-
class Dynamo:
|
|
79
|
-
def __init__(self, table_name: str) -> None:
|
|
80
|
-
ddb = boto3.resource('dynamodb')
|
|
81
|
-
self.table = ddb.Table(table_name)
|
|
82
|
-
|
|
83
|
-
def put_item(self, item: dict):
|
|
84
|
-
return self.table.put_item(Item=item)
|
|
85
|
-
|
|
86
|
-
def get_item(self, item: dict):
|
|
87
|
-
return self.table.get_item(Item=item)
|
|
88
|
-
|
|
89
|
-
def delete_item(self, item: dict):
|
|
90
|
-
return self.table.delete_item(Item=item)
|
|
91
|
-
|
|
92
|
-
def batch_put(self, items: List[dict]) -> None:
|
|
93
|
-
with self.table.batch_writer() as batch:
|
|
94
|
-
for item in items:
|
|
95
|
-
batch.put_item(Item=item)
|
|
96
|
-
return
|
|
97
|
-
|
|
98
|
-
def batch_delete(self, items: List[dict]) -> None:
|
|
99
|
-
with self.table.batch_writer() as batch:
|
|
100
|
-
for item in items:
|
|
101
|
-
batch.delete_item(Key=item)
|
|
102
|
-
return
|
|
103
|
-
|
|
104
|
-
def to_dynamo_representation(obj: dict):
|
|
105
|
-
"""
|
|
106
|
-
Attempts to put common datatype transformations in one spot
|
|
107
|
-
"""
|
|
108
|
-
new_obj = dict()
|
|
109
|
-
for key, value in obj.items():
|
|
110
|
-
new_obj[key] = _datatype_map(value=value)
|
|
111
|
-
return new_obj
|
|
112
|
-
|
|
113
|
-
def _datatype_map(value: Any):
|
|
114
|
-
if (isinstance(value, float)):
|
|
115
|
-
return Decimal(str(value))
|
|
116
|
-
if (isinstance(value, date)) or (isinstance(value, datetime)):
|
|
117
|
-
return value.isoformat()
|
|
118
|
-
if (isinstance(value, list)):
|
|
119
|
-
return [_datatype_map(value = item) for item in value]
|
|
120
|
-
if (isinstance(value, dict)):
|
|
121
|
-
new_obj = dict()
|
|
122
|
-
for k, v in value.items():
|
|
123
|
-
new_obj[k] = _datatype_map(value=v)
|
|
124
|
-
return new_obj
|
|
125
|
-
return value
|
|
@@ -1,116 +0,0 @@
|
|
|
1
|
-
import boto3
|
|
2
|
-
import io
|
|
3
|
-
import json
|
|
4
|
-
from typing import Tuple, Optional
|
|
5
|
-
from copy import copy
|
|
6
|
-
import os
|
|
7
|
-
import gzip
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class S3Location:
|
|
12
|
-
bucket: str
|
|
13
|
-
file_name: str
|
|
14
|
-
location: str
|
|
15
|
-
|
|
16
|
-
@classmethod
|
|
17
|
-
def from_location(cls, location: str):
|
|
18
|
-
bucket, file_name = location.split('/')[0], '/'.join(location.split('/')[1:])
|
|
19
|
-
return cls(bucket = bucket, file_name = file_name)
|
|
20
|
-
|
|
21
|
-
def __init__(self, bucket: str, file_name: str) -> None:
|
|
22
|
-
self.bucket = bucket
|
|
23
|
-
self.file_name = file_name
|
|
24
|
-
self.location = f"{self.bucket}/{self.file_name}"
|
|
25
|
-
|
|
26
|
-
def serialise(self):
|
|
27
|
-
return copy(vars(self))
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
class S3:
|
|
31
|
-
client: Optional[boto3.client]
|
|
32
|
-
|
|
33
|
-
def __init__(self, client: Optional[boto3.client]) -> None:
|
|
34
|
-
self.client = client if client else self._get_client()
|
|
35
|
-
|
|
36
|
-
def _get_client(self) -> boto3.client:
|
|
37
|
-
region_name = os.environ["AWS_DEFAULT_REGION"]
|
|
38
|
-
s3_client = boto3.client("s3", region_name=region_name)
|
|
39
|
-
endpoint_url = s3_client.meta.endpoint_url
|
|
40
|
-
s3_client = boto3.client("s3", region_name=region_name, endpoint_url=endpoint_url)
|
|
41
|
-
return s3_client
|
|
42
|
-
|
|
43
|
-
def _streaming_body_to_dict(self, payload):
|
|
44
|
-
file_like_obj = io.BytesIO(payload.read())
|
|
45
|
-
response = json.loads(file_like_obj.getvalue())
|
|
46
|
-
return response
|
|
47
|
-
|
|
48
|
-
def put_json_object(self, bucket_name: str, file_name: str, object: dict):
|
|
49
|
-
return self.client.put_object(
|
|
50
|
-
Body = json.dumps(object),
|
|
51
|
-
Bucket = bucket_name,
|
|
52
|
-
Key = file_name
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
def get_object(self, bucket_name: str, file_name: str):
|
|
56
|
-
response = self.client.get_object(
|
|
57
|
-
Bucket = bucket_name,
|
|
58
|
-
Key = file_name
|
|
59
|
-
)
|
|
60
|
-
return self._streaming_body_to_dict(response["Body"])
|
|
61
|
-
|
|
62
|
-
def get_presigned_url(self, bucket_name: str, file_name: str, expires_in: int = 3600):
|
|
63
|
-
return self.client.generate_presigned_url(
|
|
64
|
-
'get_object',
|
|
65
|
-
Params = {
|
|
66
|
-
"Bucket": bucket_name,
|
|
67
|
-
"Key": file_name,
|
|
68
|
-
},
|
|
69
|
-
ExpiresIn = expires_in
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
def get_s3_location_from_bucket_file(bucket_name: str, file_name: str) -> S3Location:
|
|
73
|
-
return S3Location(bucket=bucket_name, file_name=file_name)
|
|
74
|
-
|
|
75
|
-
def get_bucket_file_from_s3_location(s3_location: S3Location) -> S3Location:
|
|
76
|
-
return S3Location.from_location(location=s3_location)
|
|
77
|
-
|
|
78
|
-
def save_document_content(
|
|
79
|
-
self,
|
|
80
|
-
file_contents: bytes,
|
|
81
|
-
file_name: str,
|
|
82
|
-
bucket_name: str,
|
|
83
|
-
content_encoding: str = "",
|
|
84
|
-
content_type: str = "application/pdf",
|
|
85
|
-
compress: bool = True,
|
|
86
|
-
) -> S3Location:
|
|
87
|
-
"""
|
|
88
|
-
saves document content to bucket, in file_name
|
|
89
|
-
Options for content_type:
|
|
90
|
-
"application/pdf"
|
|
91
|
-
"text/plain"
|
|
92
|
-
"application/json"
|
|
93
|
-
probably more
|
|
94
|
-
Options for content_encoding:
|
|
95
|
-
"": default encoding
|
|
96
|
-
"gzip": compressed contents
|
|
97
|
-
"""
|
|
98
|
-
if compress or file_name.endswith(".gz"):
|
|
99
|
-
file_contents = gzip.compress(file_contents)
|
|
100
|
-
content_encoding = "gzip"
|
|
101
|
-
obj = self.client.Object(bucket_name, file_name)
|
|
102
|
-
obj.put(Body = file_contents, ContentType = content_type, ContentEncoding = content_encoding)
|
|
103
|
-
return S3Location(bucket=bucket_name, file_name=file_name)
|
|
104
|
-
|
|
105
|
-
def read_binary_from_s3(self, s3_location: S3Location) -> bytes:
|
|
106
|
-
obj = self.client.Object(s3_location.bucket, s3_location.file_name)
|
|
107
|
-
d_bytes = io.BytesIO()
|
|
108
|
-
obj.download_fileobj(d_bytes)
|
|
109
|
-
d_bytes.seek(0)
|
|
110
|
-
if obj.content_encoding == "gzip":
|
|
111
|
-
try:
|
|
112
|
-
with gzip.GzipFile(fileobj=d_bytes) as gz_file:
|
|
113
|
-
return gz_file.read()
|
|
114
|
-
except gzip.BadGzipFile:
|
|
115
|
-
d_bytes.seek(0)
|
|
116
|
-
return d_bytes.read()
|
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
import boto3
|
|
2
|
-
from typing import Optional
|
|
3
|
-
import json
|
|
4
|
-
from uuid import uuid4
|
|
5
|
-
from my_aws_helpers.api import API
|
|
6
|
-
from my_aws_helpers.errors import *
|
|
7
|
-
|
|
8
|
-
class SFN:
|
|
9
|
-
client: boto3.client
|
|
10
|
-
|
|
11
|
-
def __init__(self) -> None:
|
|
12
|
-
self.client = boto3.client('stepfunctions')
|
|
13
|
-
|
|
14
|
-
def start_execution(self, sfn_arn: str, event: Optional[dict] = None, name: Optional[str] = uuid4().hex):
|
|
15
|
-
input_event = json.dumps(event) if event else event
|
|
16
|
-
return self.client.start_execution(
|
|
17
|
-
stateMachineArn = sfn_arn,
|
|
18
|
-
name = name,
|
|
19
|
-
input = input_event,
|
|
20
|
-
)
|
|
21
|
-
|
|
22
|
-
def handle_error_response(func):
|
|
23
|
-
def wrapper(event, context):
|
|
24
|
-
try:
|
|
25
|
-
response = func(event, context)
|
|
26
|
-
return API.response(
|
|
27
|
-
code = 200,
|
|
28
|
-
body = json.dumps(response)
|
|
29
|
-
)
|
|
30
|
-
except AlreadyExists as e:
|
|
31
|
-
return API.response(
|
|
32
|
-
code = 201,
|
|
33
|
-
body = json.dumps({"Success": f"{e}"})
|
|
34
|
-
)
|
|
35
|
-
except ClientError as e:
|
|
36
|
-
return API.response(
|
|
37
|
-
code = 400,
|
|
38
|
-
body = json.dumps({"Error": f"{e}"})
|
|
39
|
-
)
|
|
40
|
-
except NotFoundError as e:
|
|
41
|
-
return API.response(
|
|
42
|
-
code = 404,
|
|
43
|
-
body = json.dumps({"Error": f"{e}"})
|
|
44
|
-
)
|
|
45
|
-
except ServerError as e:
|
|
46
|
-
return API.response(
|
|
47
|
-
code = 500,
|
|
48
|
-
body = json.dumps({"Error": f"{e}"})
|
|
49
|
-
)
|
|
50
|
-
except Exception as e:
|
|
51
|
-
return API.response(
|
|
52
|
-
code = 500,
|
|
53
|
-
body = json.dumps({"Error": f"{e}"})
|
|
54
|
-
)
|
|
55
|
-
return wrapper
|
|
56
|
-
|
|
57
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|