kardioutils 1.0.19__tar.gz → 1.0.21__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {kardioutils-1.0.19/kardioutils.egg-info → kardioutils-1.0.21}/PKG-INFO +1 -1
- kardioutils-1.0.21/dl2050utils/__version__.py +1 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/api.py +11 -2
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/db.py +45 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/fdb.py +9 -6
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/gs.py +14 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/restapp.py +6 -1
- {kardioutils-1.0.19 → kardioutils-1.0.21/kardioutils.egg-info}/PKG-INFO +1 -1
- kardioutils-1.0.19/dl2050utils/__version__.py +0 -1
- {kardioutils-1.0.19 → kardioutils-1.0.21}/LICENSE.txt +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/README.md +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/__config__.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/__init__.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/auth.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/com.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/common.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/core.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/db copy.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/dbdf.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/dbutils.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/df.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/df_utils.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/env.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/etl.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/fs.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/graphql.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/ju.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/log.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/mq.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/rest.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/restutils.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/sqlite.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/ulists.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/dl2050utils/wsgi.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/kardioutils.egg-info/SOURCES.txt +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/kardioutils.egg-info/dependency_links.txt +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/kardioutils.egg-info/top_level.txt +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/setup.cfg +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/setup.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/test/test_core.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/test/test_db.py +0 -0
- {kardioutils-1.0.19 → kardioutils-1.0.21}/test/test_env.py +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
version = "1.0.21"
|
|
@@ -149,6 +149,7 @@ def db_create(db):
|
|
|
149
149
|
key CHAR(256),
|
|
150
150
|
kind CHAR(32),
|
|
151
151
|
status CHAR(32),
|
|
152
|
+
bucket_key CHAR(64),
|
|
152
153
|
urls TEXT, -- json stringyfied list of dicts with url and file name
|
|
153
154
|
results_url TEXT,
|
|
154
155
|
eta DATETIME,
|
|
@@ -342,8 +343,10 @@ class APIServer:
|
|
|
342
343
|
data = await request.json()
|
|
343
344
|
urls = get_param(request, 'urls', list, data=data, LOG=self.LOG)
|
|
344
345
|
req_uid = get_uid()
|
|
346
|
+
bucket_key = get_param(request, 'bucket_key', str, required=False, data=data, LOG=self.LOG)
|
|
347
|
+
|
|
345
348
|
task = {'req_uid':req_uid, 'created_at':now(), 'key':key, 'kind':'ASYNC', 'status':'REQUESTED',
|
|
346
|
-
'urls': json_dumps(urls)}
|
|
349
|
+
'urls': json_dumps(urls), 'bucket_key': bucket_key,}
|
|
347
350
|
self.db.insert('api_tasks', task)
|
|
348
351
|
self.LOG(2, 0, label='APIServer', label2='/apiserver/req', msg=f"req_uid={req_uid}")
|
|
349
352
|
return rest_ok({'req_uid':req_uid})
|
|
@@ -456,7 +459,13 @@ class APIClient:
|
|
|
456
459
|
return sync_request(f'{self.url}{route}', method=method, headers=headers, payload=payload)
|
|
457
460
|
def get_signed_urls(self, fnames): return self.do_request('/apiserver/get_signed_urls', {'fnames':fnames})
|
|
458
461
|
def proc(self, data): return self.do_request('/apiserver/proc', {'base64':base64_encode(data)})
|
|
459
|
-
|
|
462
|
+
|
|
463
|
+
def req(self, urls, bucket_key=None):
|
|
464
|
+
payload = {'urls': listify(urls)}
|
|
465
|
+
if bucket_key is not None:
|
|
466
|
+
payload['bucket_key'] = bucket_key
|
|
467
|
+
return self.do_request('/apiserver/req', payload)
|
|
468
|
+
|
|
460
469
|
def check(self, req_uid): return self.do_request('/apiserver/check', {'req_uid':req_uid})
|
|
461
470
|
|
|
462
471
|
# #################################################################################################################
|
|
@@ -785,3 +785,48 @@ def db_enable_serial(db, tbl, col):
|
|
|
785
785
|
db.sync_execute(
|
|
786
786
|
f"ALTER TABLE {tbl} ALTER COLUMN {col} SET DEFAULT nextval('{tbl}_{col}_seq')"
|
|
787
787
|
)
|
|
788
|
+
|
|
789
|
+
def row_exists_full(db, tbl, row: dict, cols=None):
|
|
790
|
+
cols = cols or list(row.keys())
|
|
791
|
+
where = " AND ".join([f"{c} IS NOT DISTINCT FROM ${i+1}" for i, c in enumerate(cols)])
|
|
792
|
+
q = f"SELECT 1 FROM {tbl} WHERE {where} LIMIT 1"
|
|
793
|
+
params = [convert_type(row.get(c)) for c in cols]
|
|
794
|
+
res = db.sync_execute(q, *params)
|
|
795
|
+
return bool(res)
|
|
796
|
+
|
|
797
|
+
def db_import_tbl_full_compare(db, p, tbl, delete=False, cols=None):
|
|
798
|
+
from pathlib import Path
|
|
799
|
+
p = Path(p)
|
|
800
|
+
if not p.with_suffix(".pickle").is_file():
|
|
801
|
+
return log_and_return(f"Error importing {tbl}: file {p} not found")
|
|
802
|
+
|
|
803
|
+
rows = pickle_load(p)
|
|
804
|
+
if rows is None:
|
|
805
|
+
return log_and_return(f"Cant read {p}")
|
|
806
|
+
if not rows:
|
|
807
|
+
print("No rows to import.")
|
|
808
|
+
return 0
|
|
809
|
+
|
|
810
|
+
if delete:
|
|
811
|
+
res = db.sync_execute(f"DELETE FROM {tbl}")
|
|
812
|
+
if res is None:
|
|
813
|
+
return log_and_return(f"Error deleting tbl {tbl}")
|
|
814
|
+
|
|
815
|
+
cols = cols or list(rows[0].keys())
|
|
816
|
+
col_list = ", ".join(cols)
|
|
817
|
+
placeholders = ", ".join([f"${i}" for i in range(1, len(cols)+1)])
|
|
818
|
+
qins = f"INSERT INTO {tbl} ({col_list}) VALUES ({placeholders})"
|
|
819
|
+
|
|
820
|
+
n_new, n_skip = 0, 0
|
|
821
|
+
for r in rows:
|
|
822
|
+
r2 = {c: r.get(c) for c in cols}
|
|
823
|
+
if not delete and row_exists_full(db, tbl, r2, cols=cols):
|
|
824
|
+
n_skip += 1
|
|
825
|
+
continue
|
|
826
|
+
res = db.sync_execute(qins, *[convert_type(r2.get(c)) for c in cols])
|
|
827
|
+
if res is not None:
|
|
828
|
+
try: n_new += int(str(res).split()[-1])
|
|
829
|
+
except: pass
|
|
830
|
+
|
|
831
|
+
print(f"new={n_new} skipped={n_skip}")
|
|
832
|
+
return 0
|
|
@@ -59,7 +59,7 @@ class FileDB():
|
|
|
59
59
|
"""
|
|
60
60
|
return f'{pre}{key}{ext}'
|
|
61
61
|
|
|
62
|
-
def save(self, key, data, pre='', ext='', save_f=pickle_save, allow_pickle=False):
|
|
62
|
+
def save(self, key, data, pre='', ext='', save_f=pickle_save, allow_pickle=False, show=False):
|
|
63
63
|
"""
|
|
64
64
|
Saves data to a file in the directory corresponding to the key.
|
|
65
65
|
Args:
|
|
@@ -81,10 +81,11 @@ class FileDB():
|
|
|
81
81
|
save_f(p, data)
|
|
82
82
|
return 0
|
|
83
83
|
except Exception as exc:
|
|
84
|
-
|
|
84
|
+
if show:
|
|
85
|
+
print(f'FileDB save Exception: {exc}')
|
|
85
86
|
return 1
|
|
86
87
|
|
|
87
|
-
def load(self, key, pre='', ext='', load_f=pickle_load, allow_pickle=False):
|
|
88
|
+
def load(self, key, pre='', ext='', load_f=pickle_load, allow_pickle=False,show=False):
|
|
88
89
|
"""
|
|
89
90
|
Loads data from a file based on the key, prefix, and extension.
|
|
90
91
|
Args:
|
|
@@ -102,10 +103,11 @@ class FileDB():
|
|
|
102
103
|
if ext=='.npy': return np.load(p, allow_pickle=allow_pickle)
|
|
103
104
|
return load_f(p)
|
|
104
105
|
except Exception as exc:
|
|
105
|
-
|
|
106
|
+
if show:
|
|
107
|
+
print(f'FileDB load Exception: {exc}')
|
|
106
108
|
return None
|
|
107
109
|
|
|
108
|
-
def delete(self, key, pre='', ext=''):
|
|
110
|
+
def delete(self, key, pre='', ext='',show=False):
|
|
109
111
|
"""
|
|
110
112
|
Deletes file based on the key, prefix, and extension.
|
|
111
113
|
Args:
|
|
@@ -121,5 +123,6 @@ class FileDB():
|
|
|
121
123
|
p.unlink()
|
|
122
124
|
return 0
|
|
123
125
|
except Exception as exc:
|
|
124
|
-
|
|
126
|
+
if show:
|
|
127
|
+
print(f'FileDB delete Exception: {exc}')
|
|
125
128
|
return 1
|
|
@@ -64,6 +64,9 @@ class GS:
|
|
|
64
64
|
# Try Google Cloud first
|
|
65
65
|
key_dict = oget(cfg, ["gcloud", "gs_key"])
|
|
66
66
|
fs_cfg = oget(cfg, ["fs"]) or {}
|
|
67
|
+
bucket_cfg = oget(cfg, ["bucket"]) or {}
|
|
68
|
+
self.bucket_map = bucket_cfg if isinstance(bucket_cfg, dict) else {}
|
|
69
|
+
self.default_bucket = self.bucket_map.get("general")
|
|
67
70
|
self.internal_token = fs_cfg.get("internal_token")
|
|
68
71
|
if self.internal_token:
|
|
69
72
|
os.environ["FS_INTERNAL_TOKEN"] = self.internal_token
|
|
@@ -573,3 +576,14 @@ class GS:
|
|
|
573
576
|
self.upload_url(bucket_name, blob_name, timeout=timeout, size=size),
|
|
574
577
|
self.download_url(bucket_name, blob_name, timeout=timeout),
|
|
575
578
|
)
|
|
579
|
+
def resolve_bucket(self, bucket_name=None, bucket_key=None):
|
|
580
|
+
if bucket_name:
|
|
581
|
+
return bucket_name
|
|
582
|
+
if bucket_key:
|
|
583
|
+
if bucket_key in self.bucket_map:
|
|
584
|
+
return self.bucket_map[bucket_key]
|
|
585
|
+
raise RuntimeError(f"GS: unknown bucket_key '{bucket_key}'")
|
|
586
|
+
if self.default_bucket:
|
|
587
|
+
return self.default_bucket
|
|
588
|
+
raise RuntimeError("GS: missing bucket.general in config")
|
|
589
|
+
|
|
@@ -173,7 +173,12 @@ class App():
|
|
|
173
173
|
files = args['files']
|
|
174
174
|
uid = get_uid()
|
|
175
175
|
upload_urls,download_urls = [],[]
|
|
176
|
-
|
|
176
|
+
u = await self.auth.check_auth(request)
|
|
177
|
+
org_id= u.get("org")
|
|
178
|
+
org = await self.db.select_one("orgs", {"id": org_id})
|
|
179
|
+
bucket_key = org.get("bucket_key")
|
|
180
|
+
bucket = self.fs.resolve_bucket(bucket_key=bucket_key)
|
|
181
|
+
#bucket = f'{self.service}-apiserver'
|
|
177
182
|
for file in files:
|
|
178
183
|
upload_url,download_url = self.fs.urls(bucket, f'{uid}/{file}')
|
|
179
184
|
if upload_url is None or download_url is None:
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
version = "1.0.19"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|