boto3-assist 0.1.14__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. boto3_assist/boto3session.py +3 -2
  2. boto3_assist/cloudwatch/cloudwatch_connection.py +84 -0
  3. boto3_assist/cloudwatch/cloudwatch_connection_tracker.py +17 -0
  4. boto3_assist/cloudwatch/cloudwatch_log_connection.py +62 -0
  5. boto3_assist/cloudwatch/cloudwatch_logs.py +39 -0
  6. boto3_assist/cloudwatch/cloudwatch_query.py +191 -0
  7. boto3_assist/connection.py +101 -0
  8. boto3_assist/connection_tracker.py +8 -8
  9. boto3_assist/dynamodb/dynamodb.py +30 -19
  10. boto3_assist/dynamodb/dynamodb_connection.py +31 -2
  11. boto3_assist/dynamodb/dynamodb_index.py +27 -0
  12. boto3_assist/dynamodb/dynamodb_iservice.py +4 -0
  13. boto3_assist/dynamodb/dynamodb_model_base.py +6 -7
  14. boto3_assist/dynamodb/dynamodb_reserved_words.py +6 -3
  15. boto3_assist/dynamodb/dynamodb_reserved_words.txt +0 -1
  16. boto3_assist/ec2/ec2_connection.py +3 -0
  17. boto3_assist/environment_services/environment_loader.py +67 -3
  18. boto3_assist/environment_services/environment_variables.py +4 -0
  19. boto3_assist/errors/custom_exceptions.py +34 -0
  20. boto3_assist/s3/s3.py +476 -0
  21. boto3_assist/s3/s3_connection.py +120 -0
  22. boto3_assist/utilities/file_operations.py +105 -0
  23. boto3_assist/utilities/http_utility.py +42 -0
  24. boto3_assist/version.py +1 -1
  25. {boto3_assist-0.1.14.dist-info → boto3_assist-0.2.1.dist-info}/METADATA +1 -3
  26. boto3_assist-0.2.1.dist-info/RECORD +43 -0
  27. {boto3_assist-0.1.14.dist-info → boto3_assist-0.2.1.dist-info}/WHEEL +1 -1
  28. boto3_assist-0.1.14.dist-info/RECORD +0 -32
  29. {boto3_assist-0.1.14.dist-info → boto3_assist-0.2.1.dist-info}/licenses/LICENSE-EXPLAINED.txt +0 -0
  30. {boto3_assist-0.1.14.dist-info → boto3_assist-0.2.1.dist-info}/licenses/LICENSE.txt +0 -0
boto3_assist/s3/s3.py ADDED
@@ -0,0 +1,476 @@
1
+ """
2
+ Geek Cafe, LLC
3
+ Maintainers: Eric Wilson
4
+ MIT License. See Project Root for the license information.
5
+ """
6
+
7
+ import os
8
+ import tempfile
9
+ import time
10
+ from typing import Any, Dict, List, Optional
11
+
12
+ from aws_lambda_powertools import Logger
13
+ from botocore.exceptions import ClientError
14
+
15
+ from boto3_assist.errors.custom_exceptions import InvalidHttpMethod
16
+ from boto3_assist.s3.s3_connection import S3Connection
17
+ from boto3_assist.utilities.datetime_utility import DatetimeUtility
18
+ from boto3_assist.utilities.file_operations import FileOperations
19
+ from boto3_assist.utilities.http_utility import HttpUtility
20
+
21
+ logger = Logger(child=True)
22
+
23
+
24
+ class S3(S3Connection):
25
+ """Common S3 Actions"""
26
+
27
+ def __init__(
28
+ self,
29
+ *,
30
+ aws_profile: Optional[str] = None,
31
+ aws_region: Optional[str] = None,
32
+ aws_end_point_url: Optional[str] = None,
33
+ aws_access_key_id: Optional[str] = None,
34
+ aws_secret_access_key: Optional[str] = None,
35
+ ) -> None:
36
+ """_summary_
37
+
38
+ Args:
39
+ aws_profile (Optional[str], optional): _description_. Defaults to None.
40
+ aws_region (Optional[str], optional): _description_. Defaults to None.
41
+ aws_end_point_url (Optional[str], optional): _description_. Defaults to None.
42
+ aws_access_key_id (Optional[str], optional): _description_. Defaults to None.
43
+ aws_secret_access_key (Optional[str], optional): _description_. Defaults to None.
44
+ """
45
+ super().__init__(
46
+ aws_profile=aws_profile,
47
+ aws_region=aws_region,
48
+ aws_end_point_url=aws_end_point_url,
49
+ aws_access_key_id=aws_access_key_id,
50
+ aws_secret_access_key=aws_secret_access_key,
51
+ )
52
+
53
+ def generate_presigned_url(
54
+ self,
55
+ bucket_name: str,
56
+ key_path: str,
57
+ user_id: str,
58
+ file_name: str,
59
+ meta_data: dict | None = None,
60
+ expiration=3600,
61
+ method_type="POST",
62
+ ) -> Dict[str, Any]:
63
+ """
64
+ Create a signed URL for uploading a file to S3.
65
+ :param bucket_name: The name of the S3 bucket.
66
+ :param user_id: The user ID of the user uploading the file.
67
+ :param file_name: The file name of the file being uploaded.
68
+ :param aws_profile: The name of the AWS profile to use.
69
+ :param aws_region: The name of the AWS region to use.
70
+ :param expiration: The number of seconds the URL is valid for.
71
+ :return: The signed URL.
72
+ """
73
+ start = DatetimeUtility.get_utc_now()
74
+ logger.debug(
75
+ f"Creating signed URL for bucket {bucket_name} for user {user_id} and file {file_name} at {start} UTC"
76
+ )
77
+
78
+ file_extension = FileOperations.get_file_extension(file_name)
79
+
80
+ local_meta = {
81
+ "user_id": f"{user_id}",
82
+ "file_name": f"{file_name}",
83
+ "extension": f"{file_extension}",
84
+ "method": "pre-signed-upload",
85
+ }
86
+
87
+ if not meta_data:
88
+ meta_data = local_meta
89
+ else:
90
+ meta_data.update(local_meta)
91
+
92
+ object_key = key_path
93
+ method_type = method_type.upper()
94
+
95
+ signed_url: str | Dict[str, Any]
96
+ if method_type == "PUT":
97
+ signed_url = self.client.generate_presigned_url(
98
+ "put_object",
99
+ Params={
100
+ "Bucket": f"{bucket_name}",
101
+ "Key": f"{object_key}",
102
+ # NOTE: if you include the ContentType or Metadata then its required in the when they upload the file
103
+ # Otherwise you will get a `SignatureDoesNotMatch` error
104
+ # for now I'm commenting it out.
105
+ #'ContentType': 'application/octet-stream',
106
+ #'ACL': 'private',
107
+ # "Metadata": meta_data,
108
+ },
109
+ ExpiresIn=expiration, # URL is valid for x seconds
110
+ )
111
+ elif method_type == "POST":
112
+ signed_url = self.client.generate_presigned_post(
113
+ bucket_name,
114
+ object_key,
115
+ ExpiresIn=expiration, # URL is valid for x seconds
116
+ )
117
+ elif method_type == "GET":
118
+ signed_url = self.client.generate_presigned_url(
119
+ "get_object",
120
+ Params={
121
+ "Bucket": f"{bucket_name}",
122
+ "Key": f"{object_key}",
123
+ },
124
+ ExpiresIn=expiration, # URL is valid for x seconds
125
+ )
126
+ else:
127
+ raise InvalidHttpMethod(
128
+ f'Unknown method type was referenced. valid types are "PUT", "POST", "GET" , "{method_type}" as used '
129
+ )
130
+
131
+ end = DatetimeUtility.get_utc_now()
132
+ logger.debug(f"Signed URL created in {end-start}")
133
+
134
+ response = {
135
+ "signed_url": signed_url,
136
+ "object_key": object_key,
137
+ "meta_data": meta_data,
138
+ }
139
+
140
+ return response
141
+
142
+ def upload_file(
143
+ self,
144
+ bucket: str,
145
+ key: str,
146
+ local_file_path: str,
147
+ throw_error_on_failure: bool = False,
148
+ ) -> str | None:
149
+ """
150
+ Uploads a file to s3. Returns the full s3 path s3://<bucket>/<key>
151
+ """
152
+
153
+ if key.startswith("/"):
154
+ # remove the first slash
155
+ key = key[1:]
156
+
157
+ # build the path
158
+ s3_path = f"s3://{bucket}/{key}"
159
+
160
+ logger.debug(
161
+ {
162
+ "metric_filter": "upload_file_to_s3",
163
+ "bucket": bucket,
164
+ "key": key,
165
+ "local_file_path": local_file_path,
166
+ }
167
+ )
168
+ try:
169
+ self.client.upload_file(local_file_path, bucket, key)
170
+
171
+ except ClientError as ce:
172
+ error = {
173
+ "metric_filter": "upload_file_to_s3_failure",
174
+ "s3 upload": "failure",
175
+ "bucket": bucket,
176
+ "key": key,
177
+ "local_file_path": local_file_path,
178
+ }
179
+ logger.error(error)
180
+
181
+ if throw_error_on_failure:
182
+ raise RuntimeError(error) from ce
183
+
184
+ return None
185
+
186
+ return s3_path
187
+
188
+ def download_file(
189
+ self,
190
+ bucket: str,
191
+ object_key: str,
192
+ local_directory: str | None = None,
193
+ local_file_path: str | None = None,
194
+ retry_attempts: int = 3,
195
+ retry_sleep: int = 5,
196
+ ) -> str:
197
+ """Download a file from s3"""
198
+ exception: Exception | None = None
199
+
200
+ if retry_attempts == 0:
201
+ retry_attempts = 1
202
+
203
+ for i in range(retry_attempts):
204
+ exception = None
205
+ try:
206
+ path = self.download_file_no_retries(
207
+ bucket=bucket,
208
+ object_key=object_key,
209
+ local_directory=local_directory,
210
+ local_file_path=local_file_path,
211
+ )
212
+ if path and os.path.exists(path):
213
+ return path
214
+
215
+ except Exception as e: # pylint: disable=w0718
216
+ logger.warning(
217
+ {
218
+ "action": "download_file",
219
+ "result": "failure",
220
+ "exception": str(e),
221
+ "attempt": i + 1,
222
+ "retry_attempts": retry_attempts,
223
+ }
224
+ )
225
+
226
+ exception = e
227
+
228
+ # sleep for a bit
229
+ attempt = i + 1
230
+ time.sleep(attempt * retry_sleep)
231
+
232
+ if exception:
233
+ logger.exception(
234
+ {
235
+ "action": "download_file",
236
+ "result": "failure",
237
+ "exception": str(exception),
238
+ "retry_attempts": retry_attempts,
239
+ }
240
+ )
241
+
242
+ raise exception from exception
243
+
244
+ raise RuntimeError("Unable to download file")
245
+
246
+ def download_file_no_retries(
247
+ self,
248
+ bucket: str,
249
+ object_key: str,
250
+ local_directory: str | None = None,
251
+ local_file_path: str | None = None,
252
+ ) -> str:
253
+ """
254
+ Downloads a file from s3
255
+
256
+ Args:
257
+ bucket (str): s3 bucket
258
+ object_key (str): the s3 object key
259
+ local_directory (str, optional): Local directory to download to. Defaults to None.
260
+ If None, we'll use a local tmp directory.
261
+
262
+ Raises:
263
+ e:
264
+
265
+ Returns:
266
+ str: Path to the downloaded file.
267
+ """
268
+
269
+ decoded_object_key: str
270
+ try:
271
+ logger.debug(
272
+ {
273
+ "action": "downloading file",
274
+ "bucket": bucket,
275
+ "object_key": object_key,
276
+ "local_directory": local_directory,
277
+ }
278
+ )
279
+ return self.__download_file(
280
+ bucket, object_key, local_directory, local_file_path
281
+ )
282
+ except FileNotFoundError:
283
+ logger.warning(
284
+ {
285
+ "metric_filter": "download_file_error",
286
+ "error": "FileNotFoundError",
287
+ "message": "attempting to find it decoded",
288
+ "bucket": bucket,
289
+ "object_key": object_key,
290
+ }
291
+ )
292
+
293
+ # attempt to decode the object_key
294
+ decoded_object_key = HttpUtility.decode_url(object_key)
295
+
296
+ logger.error(
297
+ {
298
+ "metric_filter": "download_file_error",
299
+ "error": "FileNotFoundError",
300
+ "message": "attempting to find it decoded",
301
+ "bucket": bucket,
302
+ "object_key": object_key,
303
+ "decoded_object_key": decoded_object_key,
304
+ }
305
+ )
306
+
307
+ return self.__download_file(bucket, decoded_object_key, local_directory)
308
+
309
+ except Exception as e:
310
+ logger.error(
311
+ {
312
+ "metric_filter": "download_file_error",
313
+ "error": str(e),
314
+ "bucket": bucket,
315
+ "decoded_object_key": decoded_object_key,
316
+ }
317
+ )
318
+ raise e
319
+
320
+ def stream_file(self, bucket_name: str, object_key: str) -> Dict[str, Any]:
321
+ """
322
+ Gets a file from s3 and returns the response.
323
+ The "Body" is a streaming body object. You can read it like a file.
324
+ For example:
325
+
326
+ with response["Body"] as f:
327
+ data = f.read()
328
+ print(data)
329
+
330
+ """
331
+
332
+ logger.debug(
333
+ {
334
+ "source": "download_file",
335
+ "action": "downloading a file from s3",
336
+ "bucket": bucket_name,
337
+ "key": object_key,
338
+ }
339
+ )
340
+
341
+ response: Dict[str, Any] = {}
342
+ error = None
343
+
344
+ try:
345
+ response = dict(self.client.get_object(Bucket=bucket_name, Key=object_key))
346
+
347
+ logger.debug(
348
+ {"metric_filter": "s3_download_response", "response": str(response)}
349
+ )
350
+
351
+ except Exception as e: # pylint: disable=W0718
352
+ error = str(e)
353
+ logger.error({"metric_filter": "s3_download_error", "error": str(e)})
354
+ raise RuntimeError(
355
+ {
356
+ "metric_filter": "s3_download_error",
357
+ "error": str(e),
358
+ "bucket": bucket_name,
359
+ "key": object_key,
360
+ }
361
+ ) from e
362
+
363
+ finally:
364
+ logger.debug(
365
+ {
366
+ "source": "download_file",
367
+ "action": "downloading a file from s3",
368
+ "bucket": bucket_name,
369
+ "key": object_key,
370
+ "response": response,
371
+ "errors": error,
372
+ }
373
+ )
374
+
375
+ return response
376
+
377
+ def __download_file(
378
+ self,
379
+ bucket: str,
380
+ key: str,
381
+ local_directory: str | None = None,
382
+ local_file_path: str | None = None,
383
+ ):
384
+ if local_directory and local_file_path:
385
+ raise ValueError(
386
+ "Only one of local_directory or local_file_path can be provided"
387
+ )
388
+
389
+ if local_directory and not os.path.exists(local_directory):
390
+ FileOperations.makedirs(local_directory)
391
+
392
+ if local_file_path and not os.path.exists(os.path.dirname(local_file_path)):
393
+ FileOperations.makedirs(os.path.dirname(local_file_path))
394
+
395
+ file_name = self.__get_file_name_from_path(key)
396
+ if local_directory is None and local_file_path is None:
397
+ local_path = self.get_local_path_for_file(file_name)
398
+ elif local_directory:
399
+ local_path = os.path.join(local_directory, file_name)
400
+ else:
401
+ local_path = local_file_path
402
+
403
+ logger.debug(
404
+ {
405
+ "source": "download_file",
406
+ "action": "downloading a file from s3",
407
+ "bucket": bucket,
408
+ "key": key,
409
+ "file_name": file_name,
410
+ "local_path": local_path,
411
+ }
412
+ )
413
+
414
+ error: str | None = None
415
+ try:
416
+ self.client.download_file(bucket, key, local_path)
417
+
418
+ except Exception as e: # pylint: disable=W0718
419
+ error = str(e)
420
+ logger.error({"metric_filter": "s3_download_error", "error": str(e)})
421
+
422
+ file_exist = os.path.exists(local_path)
423
+
424
+ logger.debug(
425
+ {
426
+ "source": "download_file",
427
+ "action": "downloading a file from s3",
428
+ "bucket": bucket,
429
+ "key": key,
430
+ "file_name": file_name,
431
+ "local_path": local_path,
432
+ "file_downloaded": file_exist,
433
+ "errors": error,
434
+ }
435
+ )
436
+
437
+ if not file_exist:
438
+ raise FileNotFoundError("File Failed to download (does not exist) from S3.")
439
+
440
+ return local_path
441
+
442
+ def __get_file_name_from_path(self, path: str) -> str:
443
+ """
444
+ Get a file name from the path
445
+
446
+ Args:
447
+ path (str): a file path
448
+
449
+ Returns:
450
+ str: the file name
451
+ """
452
+ return path.rsplit("/")[-1]
453
+
454
+ def get_local_path_for_file(self, file_name: str):
455
+ """
456
+ Get a local temp location for a file.
457
+ This is designed to work with lambda functions.
458
+ The /tmp directory is the only writeable location for lambda functions.
459
+ """
460
+ temp_dir = self.get_temp_directory()
461
+ # use /tmp it's the only writeable location for lambda
462
+ local_path = os.path.join(temp_dir, file_name)
463
+ return local_path
464
+
465
+ def get_temp_directory(self):
466
+ """
467
+ Determines the appropriate temporary directory based on the environment.
468
+ If running in AWS Lambda, returns '/tmp'.
469
+ Otherwise, returns the system's standard temp directory.
470
+ """
471
+ if "AWS_LAMBDA_FUNCTION_NAME" in os.environ:
472
+ # In AWS Lambda environment
473
+ return "/tmp"
474
+ else:
475
+ # Not in AWS Lambda, use the system's default temp directory
476
+ return tempfile.gettempdir()
@@ -0,0 +1,120 @@
1
+ """
2
+ Geek Cafe, LLC
3
+ Maintainers: Eric Wilson
4
+ MIT License. See Project Root for the license information.
5
+ """
6
+
7
+ from typing import Optional
8
+ from typing import TYPE_CHECKING
9
+
10
+ from aws_lambda_powertools import Logger
11
+ from boto3_assist.boto3session import Boto3SessionManager
12
+ from boto3_assist.environment_services.environment_variables import (
13
+ EnvironmentVariables,
14
+ )
15
+ from boto3_assist.connection_tracker import ConnectionTracker
16
+
17
+ if TYPE_CHECKING:
18
+ from mypy_boto3_s3 import S3Client, S3ServiceResource
19
+ else:
20
+ S3Client = object
21
+ S3ServiceResource = object
22
+
23
+
24
+ logger = Logger()
25
+ tracker: ConnectionTracker = ConnectionTracker(service_name="s3")
26
+
27
+
28
+ class S3Connection:
29
+ """Connection"""
30
+
31
+ def __init__(
32
+ self,
33
+ *,
34
+ aws_profile: Optional[str] = None,
35
+ aws_region: Optional[str] = None,
36
+ aws_end_point_url: Optional[str] = None,
37
+ aws_access_key_id: Optional[str] = None,
38
+ aws_secret_access_key: Optional[str] = None,
39
+ ) -> None:
40
+ self.aws_profile = aws_profile or EnvironmentVariables.AWS.profile()
41
+ self.aws_region = aws_region or EnvironmentVariables.AWS.region()
42
+ self.end_point_url = (
43
+ aws_end_point_url or EnvironmentVariables.AWS.endpoint_url()
44
+ )
45
+ self.aws_access_key_id = (
46
+ aws_access_key_id or EnvironmentVariables.AWS.aws_access_key_id()
47
+ )
48
+ self.aws_secret_access_key = (
49
+ aws_secret_access_key or EnvironmentVariables.AWS.aws_secret_access_key()
50
+ )
51
+ self.__session: Boto3SessionManager | None = None
52
+ self.__client: S3Client | None = None
53
+ self.__resource: S3ServiceResource | None = None
54
+
55
+ self.raise_on_error: bool = True
56
+
57
+ def setup(self, setup_source: Optional[str] = None) -> None:
58
+ """
59
+ Setup the environment. Automatically called via init.
60
+ You can run setup at anytime with new parameters.
61
+ Args: setup_source: Optional[str] = None
62
+ Defines the source of the setup. Useful for logging.
63
+ Returns: None
64
+ """
65
+
66
+ self.__session = Boto3SessionManager(
67
+ service_name="s3",
68
+ aws_profile=self.aws_profile,
69
+ aws_region=self.aws_region,
70
+ aws_access_key_id=self.aws_access_key_id,
71
+ aws_secret_access_key=self.aws_secret_access_key,
72
+ aws_endpoint_url=self.end_point_url,
73
+ )
74
+
75
+ tracker.increment_connection()
76
+
77
+ self.raise_on_error = False
78
+
79
+ @property
80
+ def session(self) -> Boto3SessionManager:
81
+ """Session"""
82
+ if self.__session is None:
83
+ self.setup(setup_source="session init")
84
+
85
+ if self.__session is None:
86
+ raise RuntimeError("Session is not available")
87
+ return self.__session
88
+
89
+ @property
90
+ def client(self) -> S3Client:
91
+ """Client Connection"""
92
+ if self.__client is None:
93
+ logger.info("Creating Client")
94
+ self.__client = self.session.client
95
+
96
+ if self.raise_on_error and self.__client is None:
97
+ raise RuntimeError("Client is not available")
98
+ return self.__client
99
+
100
+ @client.setter
101
+ def client(self, value: S3Client):
102
+ logger.info("Setting Client")
103
+ self.__client = value
104
+
105
+ @property
106
+ def resource(self) -> S3ServiceResource:
107
+ """Resource Connection"""
108
+ if self.__resource is None:
109
+ logger.info("Creating Resource")
110
+ self.__resource = self.session.resource
111
+
112
+ if self.raise_on_error and self.__resource is None:
113
+ raise RuntimeError("Resource is not available")
114
+
115
+ return self.__resource
116
+
117
+ @resource.setter
118
+ def resource(self, value: S3ServiceResource):
119
+ logger.info("Setting Resource")
120
+ self.__resource = value
@@ -0,0 +1,105 @@
1
+ import os
2
+ import shutil
3
+ import json
4
+ import zipfile
5
+ from typing import List, Any, Dict
6
+ from pathlib import Path
7
+ import re
8
+
9
+ from aws_lambda_powertools import Logger
10
+
11
+
12
+ logger = Logger()
13
+
14
+
15
+ class FileOperations:
16
+ """
17
+ General File Operations
18
+ """
19
+
20
+ def __init__(self) -> None:
21
+ pass
22
+
23
+ @staticmethod
24
+ def makedirs(path):
25
+ """Create a directory and all sub directories."""
26
+ abs_path = os.path.abspath(path)
27
+ os.makedirs(abs_path, exist_ok=True)
28
+
29
+ @staticmethod
30
+ def clean_directory(path: str):
31
+ """Clean / Delete all files and directories and sub directories"""
32
+ if path is None:
33
+ return
34
+ if path == "/":
35
+ raise ValueError("Cannot delete root directory")
36
+
37
+ abs_path = os.path.abspath(path)
38
+ if os.path.exists(abs_path):
39
+ items = os.listdir(abs_path)
40
+ for item in items:
41
+ path = os.path.join(abs_path, item)
42
+ if os.path.exists(path):
43
+ try:
44
+ if os.path.isdir(path):
45
+ shutil.rmtree(path)
46
+ elif os.path.isfile(path):
47
+ os.remove(path)
48
+
49
+ except Exception as e: # pylint: disable=W0718
50
+ logger.exception(f"clean up error {str(e)}")
51
+
52
+ @staticmethod
53
+ def get_directory_name(path: str):
54
+ """
55
+ Get the directory path from a path that is either a directory
56
+ or a path to a file.
57
+ """
58
+ dirname = os.path.dirname(path)
59
+ return dirname
60
+
61
+ @staticmethod
62
+ def write_to_file(path: str, data: str, append: bool = False) -> str:
63
+ """
64
+ Write to a file
65
+
66
+ """
67
+ return FileOperations.write_file(path=path, output=data, append=append)
68
+
69
+ @staticmethod
70
+ def write_file(path: str, output: str, append: bool = False) -> str:
71
+ """
72
+ Writes to a file
73
+ Args:
74
+ path (str): path
75
+ output (str): text to write to the file
76
+ append (bool): if true this operation will append to the file
77
+ otherwise it will overwrite. the default is to overwrite
78
+ Returns:
79
+ str: path to the file
80
+ """
81
+ dirname = FileOperations.get_directory_name(path)
82
+ FileOperations.makedirs(dirname)
83
+ mode = "a" if append else "w"
84
+
85
+ if output is None:
86
+ output = ""
87
+ with open(path, mode=mode, encoding="utf-8") as file:
88
+ file.write(output)
89
+
90
+ return path
91
+
92
+ @staticmethod
93
+ def get_file_extension(file_name: str, include_dot: bool = False):
94
+ """Get the extension of a file"""
95
+ logger.debug(f"getting extension for {file_name}")
96
+ # get the last part of a string after a period .
97
+ extention = os.path.splitext(file_name)[1]
98
+ logger.debug(f"extention is {extention}")
99
+
100
+ if not include_dot:
101
+ if str(extention).startswith("."):
102
+ extention = str(extention).removeprefix(".")
103
+ logger.debug(f"extension after prefix removal: {extention}")
104
+
105
+ return extention