kardioutils 1.0.10__tar.gz → 1.0.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. {kardioutils-1.0.10/kardioutils.egg-info → kardioutils-1.0.11}/PKG-INFO +1 -1
  2. kardioutils-1.0.11/dl2050utils/__version__.py +1 -0
  3. kardioutils-1.0.11/dl2050utils/df_utils.py +77 -0
  4. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/gs.py +250 -103
  5. {kardioutils-1.0.10 → kardioutils-1.0.11/kardioutils.egg-info}/PKG-INFO +1 -1
  6. {kardioutils-1.0.10 → kardioutils-1.0.11}/kardioutils.egg-info/SOURCES.txt +1 -0
  7. kardioutils-1.0.10/dl2050utils/__version__.py +0 -1
  8. {kardioutils-1.0.10 → kardioutils-1.0.11}/LICENSE.txt +0 -0
  9. {kardioutils-1.0.10 → kardioutils-1.0.11}/README.md +0 -0
  10. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/__config__.py +0 -0
  11. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/__init__.py +0 -0
  12. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/api.py +0 -0
  13. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/auth.py +0 -0
  14. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/com.py +0 -0
  15. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/common.py +0 -0
  16. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/core.py +0 -0
  17. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/db copy.py +0 -0
  18. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/db.py +0 -0
  19. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/dbdf.py +0 -0
  20. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/dbutils.py +0 -0
  21. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/df.py +0 -0
  22. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/env.py +0 -0
  23. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/etl.py +0 -0
  24. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/fdb.py +0 -0
  25. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/fs.py +0 -0
  26. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/graphql.py +0 -0
  27. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/ju.py +0 -0
  28. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/log.py +0 -0
  29. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/mq.py +0 -0
  30. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/rest.py +0 -0
  31. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/restapp.py +0 -0
  32. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/restutils.py +0 -0
  33. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/sqlite.py +0 -0
  34. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/ulists.py +0 -0
  35. {kardioutils-1.0.10 → kardioutils-1.0.11}/dl2050utils/wsgi.py +0 -0
  36. {kardioutils-1.0.10 → kardioutils-1.0.11}/kardioutils.egg-info/dependency_links.txt +0 -0
  37. {kardioutils-1.0.10 → kardioutils-1.0.11}/kardioutils.egg-info/top_level.txt +0 -0
  38. {kardioutils-1.0.10 → kardioutils-1.0.11}/setup.cfg +0 -0
  39. {kardioutils-1.0.10 → kardioutils-1.0.11}/setup.py +0 -0
  40. {kardioutils-1.0.10 → kardioutils-1.0.11}/test/test_core.py +0 -0
  41. {kardioutils-1.0.10 → kardioutils-1.0.11}/test/test_db.py +0 -0
  42. {kardioutils-1.0.10 → kardioutils-1.0.11}/test/test_env.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kardioutils
3
- Version: 1.0.10
3
+ Version: 1.0.11
4
4
  Summary: Utils lib
5
5
  Author: João Neto
6
6
  Author-email: joao.filipe.neto@gmail.com
@@ -0,0 +1 @@
1
+ version = "1.0.11"
@@ -0,0 +1,77 @@
1
+ import pandas as pd
2
+ import os
3
+
4
+ def list_prefixes(df: pd.DataFrame) -> list:
5
+ """Return all distinct prefixes in the dataframe."""
6
+ return df["prefix"].dropna().unique().tolist()
7
+
8
+
9
+ def filter_by_prefix(df: pd.DataFrame, prefix: str) -> pd.DataFrame:
10
+ """Return all rows that match a given prefix exactly."""
11
+ return df[df["prefix"] == prefix]
12
+
13
+
14
+ def filter_prefix_contains(df: pd.DataFrame, text: str) -> pd.DataFrame:
15
+ """Return all rows where prefix contains the given text."""
16
+ return df[df["prefix"].str.contains(text, na=False)]
17
+
18
+
19
+ def find_by_uid_suffix(df: pd.DataFrame, uid_suffix: str) -> pd.DataFrame:
20
+ """Return all rows that match a given uid_suffix."""
21
+ return df[df["uid_suffix"] == uid_suffix]
22
+
23
+
24
+ def find_by_uid_full(df: pd.DataFrame, uid_full: str) -> pd.DataFrame:
25
+ """Return all rows that match a given uid_full."""
26
+ return df[df["uid_full"] == uid_full]
27
+
28
+
29
+ def holter_only(df: pd.DataFrame) -> pd.DataFrame:
30
+ """Return only rows where holter == True."""
31
+ return df[df["holter"] == True]
32
+
33
+
34
+ def non_holter_only(df: pd.DataFrame) -> pd.DataFrame:
35
+ """Return only rows where holter == False."""
36
+ return df[df["holter"] == False]
37
+
38
+
39
+ def get_path_by_uid_suffix(df: pd.DataFrame, uid_suffix: str) -> str | None:
40
+ """
41
+ Return the path for a given uid_suffix.
42
+ If there are multiple rows, returns the first one.
43
+ If nothing is found, returns None.
44
+ """
45
+ rows = df[df["uid_suffix"] == uid_suffix]
46
+ if rows.empty:
47
+ return None
48
+ return rows.iloc[0]["path"]
49
+
50
+
51
+ def get_paths_by_prefix(df: pd.DataFrame, prefix: str, holter_only_flag: bool | None = None) -> list:
52
+ """
53
+ Return a list of paths filtered by prefix and optionally holter flag.
54
+ - holter_only_flag = True → only holter rows
55
+ - holter_only_flag = False → only non-holter rows
56
+ - holter_only_flag = None → ignore holter column
57
+ """
58
+ subset = df[df["prefix"] == prefix]
59
+ if holter_only_flag is not None:
60
+ subset = subset[subset["holter"] == holter_only_flag]
61
+ return subset["path"].dropna().tolist()
62
+
63
+
64
+ def check_missing_files(df):
65
+ """
66
+ Return subset of rows whose 'path' does not point to an existing file.
67
+ """
68
+ mask = ~df["path"].astype(str).apply(os.path.exists)
69
+ return df[mask]
70
+
71
+
72
+ def check_existing_files(df):
73
+ """
74
+ Return subset of rows whose 'path' exists.
75
+ """
76
+ mask = df["path"].astype(str).apply(os.path.exists)
77
+ return df[mask]
@@ -8,12 +8,45 @@ import datetime
8
8
  import re
9
9
  import pickle
10
10
  import mimetypes
11
+ import requests
11
12
  from google.cloud import storage
12
13
  from dl2050utils.core import oget
13
14
  from dl2050utils.env import config_load
14
15
  from dl2050utils.fs import json_save
16
+ import hashlib
17
+ import hmac
18
+ import urllib.parse
19
+ from pathlib import Path
15
20
 
16
21
 
22
+ class _URLSigner:
23
+ """Internal HMAC-based URL signer for local backend."""
24
+
25
+ def __init__(self, secret_key: str, base_url: str):
26
+ self.secret = secret_key.encode("utf-8")
27
+ self.base_url = base_url.rstrip("/")
28
+
29
+ def _make_signature(self, method: str, bucket: str, blob: str, exp: int, max_size: int | None):
30
+ payload = f"{method}\n{bucket}\n{blob}\n{exp}\n{max_size or ''}"
31
+ return hmac.new(self.secret, payload.encode("utf-8"), hashlib.sha256).hexdigest()
32
+
33
+ def generate_url(self, path: str, method: str, bucket: str, blob: str,
34
+ timeout: int, max_size: int | None = None) -> str:
35
+ import time
36
+ exp = int(time.time()) + timeout
37
+ sig = self._make_signature(method, bucket, blob, exp, max_size)
38
+ query = {
39
+ "bucket": bucket,
40
+ "blob": blob,
41
+ "exp": exp,
42
+ "method": method,
43
+ "sig": sig,
44
+ }
45
+ if max_size is not None:
46
+ query["max_size"] = max_size
47
+ qs = urllib.parse.urlencode(query)
48
+ return f"{self.base_url}{path}?{qs}"
49
+
17
50
  class GS:
18
51
  """
19
52
  Google Cloud Storage helper class to manage buckets, files, and URLs.
@@ -27,23 +60,39 @@ class GS:
27
60
  """
28
61
 
29
62
  def __init__(self, service, default_location="europe-west1"):
30
- """
31
- Initializes the GS class with the specified Google Cloud service and location.
32
- Args:
33
- service (str): The Google Cloud service name.
34
- default_location (str): Default location for bucket creation. Defaults to "europe-west1".
35
- """
36
63
  cfg = config_load(service)
37
- # Create credentials file from config yml
64
+ # Try Google Cloud first
38
65
  key_dict = oget(cfg, ["gcloud", "gs_key"])
39
- assert key_dict["type"] == "service_account"
40
- credentials_p = "./gs-keyfile.json"
41
- json_save(credentials_p, key_dict)
42
- # Set the GOOGLE_APPLICATION_CREDENTIALS env var to use the credentials file
43
- os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = credentials_p
44
- # Connect
45
- self.default_location = default_location
46
- self.gc = storage.Client()
66
+ fs_cfg = oget(cfg, ["fs"]) or {}
67
+
68
+ self.mode = None # 'gcloud' or 'local'
69
+
70
+ if key_dict is not None:
71
+ # ---------- GCS MODE ----------
72
+ assert key_dict["type"] == "service_account"
73
+ credentials_p = "./gs-keyfile.json"
74
+ json_save(credentials_p, key_dict)
75
+ os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = credentials_p
76
+ self.default_location = default_location
77
+ self.gc = storage.Client()
78
+ self.mode = "gcloud"
79
+ elif fs_cfg.get("backend") == "local":
80
+ # ---------- LOCAL MODE ----------
81
+ self.mode = "local"
82
+ self.default_location = "local"
83
+ self.gc = None # not used
84
+
85
+ self.root_dir = Path(fs_cfg.get("root_dir", f"/data/{service}/fs"))
86
+ self.root_dir.mkdir(parents=True, exist_ok=True)
87
+
88
+ base_url = fs_cfg.get("url", "http://localhost:8001")
89
+ secret = fs_cfg.get("secret")
90
+ if not secret:
91
+ raise RuntimeError("GS local backend enabled but fs.secret not configured")
92
+
93
+ self._signer = _URLSigner(secret_key=secret, base_url=base_url)
94
+ else:
95
+ raise RuntimeError("GS: neither gcloud.gs_key nor fs.backend=local configured")
47
96
 
48
97
  # ####################################################################################################
49
98
  # Admin
@@ -172,30 +221,56 @@ class GS:
172
221
  # Memmory Download, Upload
173
222
  # ###################################################################################################################
174
223
 
175
- def upload_mem(self, bucket_name, blob_name, data, content_type="application/octet-stream", use_pickle=True):
224
+ def upload_mem(self, bucket_name, blob_name, data,
225
+ content_type="application/octet-stream",
226
+ use_pickle=True):
176
227
  """
177
- Uploads data from memory to a specified bucket and blob.
178
- Args:
179
- bucket_name (str): Name of the GCS bucket.
180
- blob_name (str): Name of the blob to upload.
181
- data (str or bytes): Data to upload. Can be a string or bytes.
182
- content_type (str, optional): MIME type of the data. Defaults to 'application/octet-stream'.
183
- use_pickle (bool, optional): If True, serializes the data using pickle before uploading. Defaults to False.
184
- Returns:
185
- int: 0 if upload is successful, 1 otherwise.
186
- Examples:
187
- gs.upload_mem(bucket_name, blob_name, data="Hello, world!", content_type='text/plain')
188
- gs.upload_mem(bucket_name, blob_name, data=b'\x89PNG\r\n\x1a...', content_type='image/png')
228
+ Uploads data from memory to storage (GCS or local FS).
189
229
  """
190
230
  try:
191
231
  if use_pickle:
192
232
  data = pickle.dumps(data)
193
233
  elif isinstance(data, str):
194
234
  data = data.encode("utf-8")
195
- bucket = self.gc.bucket(bucket_name)
196
- blob = bucket.blob(blob_name)
197
- blob.upload_from_string(data, content_type=content_type)
198
- return 0
235
+
236
+
237
+ if self.mode == "gcloud":
238
+ # --------- Google Cloud ---------
239
+ bucket = self.gc.bucket(bucket_name)
240
+ blob = bucket.blob(blob_name)
241
+ blob.upload_from_string(data, content_type=content_type)
242
+ return 0
243
+
244
+ elif self.mode == "local":
245
+ # --------- Local fs-server (fs.py) via HTTP ---------
246
+ size = len(data)
247
+ upload_url = self.upload_url(
248
+ bucket_name,
249
+ blob_name,
250
+ timeout=15 * 60,
251
+ size=size,
252
+ )
253
+ if not upload_url:
254
+ print("upload_mem (local) ERROR: could not generate upload_url")
255
+ return 1
256
+
257
+ resp = requests.put(
258
+ upload_url,
259
+ data=data,
260
+ headers={"Content-Type": content_type},
261
+ timeout=60,
262
+ )
263
+
264
+ if resp.status_code not in (200, 201):
265
+ print("upload_mem (local) ERROR:", resp.status_code, resp.text)
266
+ return 1
267
+
268
+ return 0
269
+
270
+ else:
271
+ print("upload_mem ERROR: unknown mode", self.mode)
272
+ return 1
273
+
199
274
  except Exception as exc:
200
275
  print(f"upload_mem EXCEPTION: {str(exc)}")
201
276
  return 1
@@ -215,21 +290,48 @@ class GS:
215
290
  Any: The data from the blob, possibly decoded or deserialized.
216
291
  """
217
292
  try:
218
- bucket = self.gc.bucket(bucket_name)
219
- blob = bucket.blob(blob_name)
220
- data = blob.download_as_bytes()
293
+ if self.mode == "gcloud":
294
+ # --------- Google Cloud ---------
295
+ bucket = self.gc.bucket(bucket_name)
296
+ blob = bucket.blob(blob_name)
297
+ data = blob.download_as_bytes()
298
+
299
+ elif self.mode == "local":
300
+ # --------- Local fs-server (fs.py) via HTTP ---------
301
+ download_url = self.download_url(
302
+ bucket_name,
303
+ blob_name,
304
+ timeout=24 * 3600,
305
+ )
306
+ if not download_url:
307
+ print("download_mem (local) ERROR: could not generate download_url")
308
+ return None
309
+ internal_token = os.environ.get("FS_INTERNAL_TOKEN")
310
+ headers = {}
311
+ if internal_token:
312
+ headers["X-Internal-Token"] = internal_token
313
+
314
+
315
+ resp = requests.get(download_url, headers=headers, timeout=60)
316
+
317
+ if resp.status_code != 200:
318
+ print("download_mem (local) ERROR:", resp.status_code, resp.text)
319
+ return None
320
+
321
+ data = resp.content
322
+
323
+ else:
324
+ print("download_mem ERROR: unknown mode", self.mode)
325
+ return None
326
+ # Pós-processamento igual para os dois modos
221
327
  if use_pickle:
222
- # Deserialize the data using pickle
223
328
  data = pickle.loads(data)
224
329
  elif as_string:
225
- # Decode the data using the specified encoding
226
330
  data = data.decode(encoding)
227
- # If neither use_pickle nor as_string is True, return the raw bytes
228
331
  return data
229
332
  except Exception as exc:
230
333
  print(f"download_mem EXCEPTION: {str(exc)}")
231
334
  return None
232
-
233
335
  # ###################################################################################################################
234
336
  # File Download, Upload
235
337
  # ###################################################################################################################
@@ -243,18 +345,20 @@ class GS:
243
345
  ):
244
346
  """
245
347
  Uploads a local file to a specified bucket and blob.
246
- Args:
247
- bucket_name (str): Name of the GCS bucket.
248
- blob_name (str): Name of the blob to upload.
249
- local_file_path (str): Local path of the file to upload.
250
- content_type (str, optional): MIME type of the data. Defaults to 'application/octet-stream'.
251
- Returns:
252
- int: 0 if upload is successful, 1 otherwise.
348
+ - In GCS mode: uploads to Google Cloud.
349
+ - In local mode: copies the file into root_dir / bucket / blob.
253
350
  """
254
351
  try:
255
- bucket = self.gc.bucket(bucket_name)
256
- blob = bucket.blob(blob_name)
257
- blob.upload_from_filename(local_file_path, content_type=content_type)
352
+ if self.mode == "gcloud":
353
+ bucket = self.gc.bucket(bucket_name)
354
+ blob = bucket.blob(blob_name)
355
+ blob.upload_from_filename(local_file_path, content_type=content_type)
356
+ return 0
357
+ # LOCAL MODE
358
+ dst = self.root_dir / bucket_name / blob_name
359
+ dst.parent.mkdir(parents=True, exist_ok=True)
360
+ import shutil
361
+ shutil.copy2(local_file_path, dst)
258
362
  return 0
259
363
  except Exception as exc:
260
364
  print(f"upload_file EXCEPTION: {str(exc)}")
@@ -263,18 +367,50 @@ class GS:
263
367
  def download_file(self, bucket_name, blob_name, local_file_path):
264
368
  """
265
369
  Downloads a blob from the bucket to a local file.
266
- Args:
267
- bucket_name (str): Name of the GCS bucket.
268
- blob_name (str): Name of the blob to download.
269
- local_file_path (str): Local path to save the downloaded file.
270
- Returns:
271
- int: 0 if upload is successful, 1 otherwise.
370
+ - In GCS mode: downloads from Google Cloud.
371
+ - In local mode: copies from root_dir / bucket / blob.
272
372
  """
273
373
  try:
274
- bucket = self.gc.bucket(bucket_name)
275
- blob = bucket.blob(blob_name)
276
- blob.download_to_filename(local_file_path)
277
- return 0
374
+ if self.mode == "gcloud":
375
+ bucket = self.gc.bucket(bucket_name)
376
+ blob = bucket.blob(blob_name)
377
+ blob.download_to_filename(local_file_path)
378
+ return 0
379
+
380
+ # LOCAL MODE
381
+ elif self.mode == "local":
382
+ # --------- Local fs-server (fs.py) via HTTP ---------
383
+ download_url = self.download_url(
384
+ bucket_name,
385
+ blob_name,
386
+ timeout=24 * 3600,
387
+ )
388
+ if not download_url:
389
+ print("download_file (local) ERROR: could not generate download_url")
390
+ return 1
391
+
392
+ internal_token = os.environ.get("FS_INTERNAL_TOKEN")
393
+ headers = {}
394
+ if internal_token:
395
+ headers["X-Internal-Token"] = internal_token
396
+
397
+ # stream to not load everyting in ram
398
+ with requests.get(download_url, headers=headers, stream=True, timeout=60) as r:
399
+ if r.status_code != 200:
400
+ print("download_file (local) ERROR:", r.status_code, r.text)
401
+ return 1
402
+
403
+ Path(local_file_path).parent.mkdir(parents=True, exist_ok=True)
404
+ with open(local_file_path, "wb") as f:
405
+ for chunk in r.iter_content(chunk_size=1024 * 1024):
406
+ if chunk:
407
+ f.write(chunk)
408
+
409
+ return 0
410
+
411
+ else:
412
+ print("download_file ERROR: unknown mode", self.mode)
413
+ return 1
278
414
  except Exception as exc:
279
415
  print(f"download_file EXCEPTION: {str(exc)}")
280
416
  return 1
@@ -357,68 +493,79 @@ class GS:
357
493
  def upload_url(self, bucket_name, blob_name, timeout=15 * 60, size=None):
358
494
  """
359
495
  Generates a signed URL for uploading a blob.
360
- Args:
361
- bucket_name (str): Name of the GCS bucket.
362
- blob_name (str): Name of the blob to upload.
363
- timeout (int, optional): URL expiration time in seconds. Defaults to 15 minutes.
364
- size (int, optional): Maximum allowed size of the upload in bytes.
365
- Returns:
366
- str or None: Signed URL for uploading or None if an error occurs.
496
+ - Local mode: signed URL for local fileserver (/upload).
367
497
  """
498
+ if self.mode == "gcloud":
499
+ try:
500
+ bucket = self.gc.bucket(bucket_name)
501
+ blob = bucket.blob(blob_name)
502
+ query_parameters = (
503
+ None if size is None else {"x-goog-content-length-range": f"0,{size}"}
504
+ )
505
+ url = blob.generate_signed_url(
506
+ version="v4",
507
+ expiration=datetime.timedelta(seconds=timeout),
508
+ method="PUT",
509
+ content_type="application/octet-stream",
510
+ query_parameters=query_parameters,
511
+ )
512
+ return url
513
+ except Exception as exc:
514
+ print(f"upload_url EXCEPTION: {str(exc)}")
515
+ return None
516
+
517
+ # LOCAL MODE
368
518
  try:
369
- bucket = self.gc.bucket(bucket_name)
370
- blob = bucket.blob(blob_name)
371
- query_parameters = (
372
- None if size is None else {"x-goog-content-length-range": f"0,{size}"}
373
- )
374
- url = blob.generate_signed_url(
375
- version="v4",
376
- expiration=datetime.timedelta(seconds=timeout),
519
+ return self._signer.generate_url(
520
+ path="/upload",
377
521
  method="PUT",
378
- content_type="application/octet-stream",
379
- query_parameters=query_parameters,
522
+ bucket=bucket_name,
523
+ blob=blob_name,
524
+ timeout=timeout,
525
+ max_size=size,
380
526
  )
381
- return url
382
527
  except Exception as exc:
383
- print(f"upload_url EXCEPTION: {str(exc)}")
528
+ print(f"upload_url (local) EXCEPTION: {str(exc)}")
384
529
  return None
385
530
 
386
531
  def download_url(self, bucket_name, blob_name, timeout=24 * 3600):
387
532
  """
388
533
  Generates a signed URL for downloading a blob.
389
- Args:
390
- bucket_name (str): Name of the GCS bucket.
391
- blob_name (str): Name of the blob to download.
392
- timeout (int, optional): URL expiration time in seconds. Defaults to 24 hours.
393
- Returns:
394
- str or None: Signed URL for downloading or None if an error occurs.
534
+ - Local mode: signed URL for local fileserver (/download).
395
535
  """
536
+ if self.mode == "gcloud":
537
+ try:
538
+ bucket = self.gc.bucket(bucket_name)
539
+ blob = bucket.blob(blob_name)
540
+ url = blob.generate_signed_url(
541
+ version="v4",
542
+ expiration=datetime.timedelta(seconds=timeout),
543
+ method="GET",
544
+ )
545
+ return url
546
+ except Exception as exc:
547
+ print(f"download_url EXCEPTION: {str(exc)}")
548
+ return None
549
+
550
+ # LOCAL MODE
396
551
  try:
397
- bucket = self.gc.bucket(bucket_name)
398
- blob = bucket.blob(blob_name)
399
- url = blob.generate_signed_url(
400
- version="v4",
401
- expiration=datetime.timedelta(seconds=timeout),
552
+ return self._signer.generate_url(
553
+ path="/download",
402
554
  method="GET",
555
+ bucket=bucket_name,
556
+ blob=blob_name,
557
+ timeout=timeout,
558
+ max_size=None,
403
559
  )
404
- # Append the blob_name for the download client to be able to recover the file name
405
- # url = f'{url}&filename={blob_name}'
406
- return url
407
560
  except Exception as exc:
408
- print(f"download_url EXCEPTION: {str(exc)}")
561
+ print(f"download_url (local) EXCEPTION: {str(exc)}")
409
562
  return None
410
563
 
411
564
  def urls(self, bucket_name, blob_name, timeout=24 * 3600, size=None):
412
565
  """
413
566
  Generates both upload and download signed URLs for a blob.
414
- Args:
415
- bucket_name (str): Name of the GCS bucket.
416
- blob_name (str): Name of the blob.
417
- timeout (int, optional): URL expiration time in seconds. Defaults to 24 hours.
418
- size (int, optional): Maximum allowed size of the upload in bytes.
419
- Returns:
420
- tuple: (upload_url, download_url)
421
567
  """
422
- return self.upload_url(
423
- bucket_name, blob_name, timeout=timeout, size=size
424
- ), self.download_url(bucket_name, blob_name, timeout=timeout)
568
+ return (
569
+ self.upload_url(bucket_name, blob_name, timeout=timeout, size=size),
570
+ self.download_url(bucket_name, blob_name, timeout=timeout),
571
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kardioutils
3
- Version: 1.0.10
3
+ Version: 1.0.11
4
4
  Summary: Utils lib
5
5
  Author: João Neto
6
6
  Author-email: joao.filipe.neto@gmail.com
@@ -15,6 +15,7 @@ dl2050utils/db.py
15
15
  dl2050utils/dbdf.py
16
16
  dl2050utils/dbutils.py
17
17
  dl2050utils/df.py
18
+ dl2050utils/df_utils.py
18
19
  dl2050utils/env.py
19
20
  dl2050utils/etl.py
20
21
  dl2050utils/fdb.py
@@ -1 +0,0 @@
1
- version = "1.0.10"
File without changes
File without changes
File without changes
File without changes