sapiopycommons 2025.8.14a703__py3-none-any.whl → 2026.1.22a847__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sapiopycommons/ai/agent_service_base.py +2051 -0
- sapiopycommons/ai/converter_service_base.py +163 -0
- sapiopycommons/ai/external_credentials.py +131 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2.py +87 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2.pyi +282 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2_grpc.py +154 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2.py +49 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2.pyi +40 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2.py +61 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2.pyi +181 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.py +41 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.pyi +36 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.py +51 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.pyi +59 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.py +123 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.pyi +599 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2.py +59 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2.pyi +68 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2_grpc.py +149 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2.py +69 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2.pyi +109 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2_grpc.py +153 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2.py +49 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2.pyi +56 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2.py +43 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2.pyi +44 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.py +39 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.pyi +33 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2_grpc.py +24 -0
- sapiopycommons/ai/protobuf_utils.py +583 -0
- sapiopycommons/ai/request_validation.py +561 -0
- sapiopycommons/ai/server.py +152 -0
- sapiopycommons/ai/test_client.py +534 -0
- sapiopycommons/callbacks/callback_util.py +26 -7
- sapiopycommons/eln/experiment_handler.py +12 -5
- sapiopycommons/files/file_util.py +128 -1
- sapiopycommons/files/temp_files.py +82 -0
- sapiopycommons/general/aliases.py +4 -1
- sapiopycommons/general/macros.py +172 -0
- sapiopycommons/general/time_util.py +199 -4
- sapiopycommons/recordmodel/record_handler.py +47 -12
- sapiopycommons/rules/eln_rule_handler.py +3 -0
- sapiopycommons/rules/on_save_rule_handler.py +3 -0
- sapiopycommons/webhook/webservice_handlers.py +1 -1
- {sapiopycommons-2025.8.14a703.dist-info → sapiopycommons-2026.1.22a847.dist-info}/METADATA +2 -2
- sapiopycommons-2026.1.22a847.dist-info/RECORD +113 -0
- sapiopycommons/ai/tool_of_tools.py +0 -917
- sapiopycommons-2025.8.14a703.dist-info/RECORD +0 -72
- {sapiopycommons-2025.8.14a703.dist-info → sapiopycommons-2026.1.22a847.dist-info}/WHEEL +0 -0
- {sapiopycommons-2025.8.14a703.dist-info → sapiopycommons-2026.1.22a847.dist-info}/licenses/LICENSE +0 -0
|
@@ -858,9 +858,9 @@ class CallbackUtil:
|
|
|
858
858
|
raise SapioException("No records provided.")
|
|
859
859
|
data_type: str = AliasUtil.to_singular_data_type_name(records)
|
|
860
860
|
if index_field is not None:
|
|
861
|
-
field_map_list: list[FieldMap] = self.__get_indexed_field_maps(records, index_field)
|
|
861
|
+
field_map_list: list[FieldMap] = self.__get_indexed_field_maps(records, index_field, True)
|
|
862
862
|
else:
|
|
863
|
-
field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
|
|
863
|
+
field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records, True)
|
|
864
864
|
|
|
865
865
|
# Convert the group_by parameter to a field name.
|
|
866
866
|
if group_by is not None:
|
|
@@ -882,6 +882,18 @@ class CallbackUtil:
|
|
|
882
882
|
temp_dt = self.__temp_dt_from_field_names(data_type, fields, None, default_modifier, field_modifiers)
|
|
883
883
|
temp_dt.record_image_assignable = bool(image_data)
|
|
884
884
|
|
|
885
|
+
# PR-47894: If the RecordId field is not present in the layout, then it should not be included in the field
|
|
886
|
+
# maps, as otherwise selection list fields can break.
|
|
887
|
+
remove_record_id: bool = True
|
|
888
|
+
for field_def in temp_dt.get_field_def_list():
|
|
889
|
+
if field_def.data_field_name == "RecordId":
|
|
890
|
+
remove_record_id = False
|
|
891
|
+
break
|
|
892
|
+
if remove_record_id:
|
|
893
|
+
for field_map in field_map_list:
|
|
894
|
+
if "RecordId" in field_map:
|
|
895
|
+
del field_map["RecordId"]
|
|
896
|
+
|
|
885
897
|
# Send the request to the user.
|
|
886
898
|
request = TableEntryDialogRequest(title, msg, temp_dt, field_map_list,
|
|
887
899
|
record_image_data_list=image_data, group_by_field=group_by,
|
|
@@ -1928,7 +1940,8 @@ class CallbackUtil:
|
|
|
1928
1940
|
self.write_file(zip_name, FileUtil.zip_files(files))
|
|
1929
1941
|
|
|
1930
1942
|
@staticmethod
|
|
1931
|
-
def __get_indexed_field_maps(records: Iterable[SapioRecord], index_field: str)
|
|
1943
|
+
def __get_indexed_field_maps(records: Iterable[SapioRecord], index_field: str, include_record_id: bool = False) \
|
|
1944
|
+
-> list[FieldMap]:
|
|
1932
1945
|
"""
|
|
1933
1946
|
For dialogs that accept multiple records, we may want to be able to match the returned results back to the
|
|
1934
1947
|
records that they're for. In this case, we need to add an index to each record so that we can match them back
|
|
@@ -1938,12 +1951,13 @@ class CallbackUtil:
|
|
|
1938
1951
|
:param records: The records to return indexed field maps of.
|
|
1939
1952
|
:param index_field: The name of the field to use as the index. Make sure that this field doesn't exist on the
|
|
1940
1953
|
records, as then it will overwrite the existing value.
|
|
1954
|
+
:param include_record_id: Whether to include the RecordId field in the field maps.
|
|
1941
1955
|
:return: A list of field maps for the records, with an index field added to each. The value of the index on
|
|
1942
1956
|
each field map is the record's record ID (even if it's a record model with a negative ID).
|
|
1943
1957
|
"""
|
|
1944
1958
|
ret_val: list[FieldMap] = []
|
|
1945
1959
|
for record in records:
|
|
1946
|
-
field_map: FieldMap = AliasUtil.to_field_map(record)
|
|
1960
|
+
field_map: FieldMap = AliasUtil.to_field_map(record, include_record_id)
|
|
1947
1961
|
field_map[index_field] = AliasUtil.to_record_id(record)
|
|
1948
1962
|
ret_val.append(field_map)
|
|
1949
1963
|
return ret_val
|
|
@@ -1984,7 +1998,10 @@ class CallbackUtil:
|
|
|
1984
1998
|
if field_def.key_field:
|
|
1985
1999
|
field_def = modifier.modify_field(field_def)
|
|
1986
2000
|
builder.add_field(field_def, column, span)
|
|
1987
|
-
|
|
2001
|
+
# PR-47917: Set fill_view to false on the layout of temp data types created by CallbackUtil.
|
|
2002
|
+
temp_dt = builder.get_temporary_data_type()
|
|
2003
|
+
temp_dt.data_type_layout.fill_view = False
|
|
2004
|
+
return temp_dt
|
|
1988
2005
|
|
|
1989
2006
|
def __temp_dt_from_field_names(self, data_type: str, fields: Iterable[FieldIdentifier | FieldFilterCriteria],
|
|
1990
2007
|
column_positions: dict[str, tuple[int, int]] | None,
|
|
@@ -2055,8 +2072,10 @@ class CallbackUtil:
|
|
|
2055
2072
|
modifier: FieldModifier = field_modifiers.get(field_name, default_modifier)
|
|
2056
2073
|
builder.add_field(modifier.modify_field(field_def), current_column, span)
|
|
2057
2074
|
current_column += span
|
|
2058
|
-
|
|
2059
|
-
|
|
2075
|
+
# PR-47917: Set fill_view to false on the layout of temp data types created by CallbackUtil.
|
|
2076
|
+
temp_dt = builder.get_temporary_data_type()
|
|
2077
|
+
temp_dt.data_type_layout.fill_view = False
|
|
2078
|
+
return temp_dt
|
|
2060
2079
|
|
|
2061
2080
|
# CR-47309: Allow layouts to be provided in place of field names for record dialogs.
|
|
2062
2081
|
def __temp_dt_from_layout(self, data_type: str, layout: DataTypeLayoutIdentifier,
|
|
@@ -206,12 +206,11 @@ class ExperimentHandler:
|
|
|
206
206
|
else:
|
|
207
207
|
user = context
|
|
208
208
|
context = None
|
|
209
|
-
if context is not None and context.eln_experiment is not None and experiment is None:
|
|
210
|
-
experiment = context.eln_experiment
|
|
211
209
|
# FR-46495 - Allow the init function of ExperimentHandler to take in an ElnExperiment that is separate from the
|
|
212
210
|
# context.
|
|
213
211
|
# CR-37038 - Allow other experiment object types to be provided. Convert them all down to ElnExperiment.
|
|
214
|
-
|
|
212
|
+
# PR-47793 - Fix cases where both a SapioWebhookContext and an experiment parameter are provided.
|
|
213
|
+
if experiment is not None:
|
|
215
214
|
eln_manager = DataMgmtServer.get_eln_manager(user)
|
|
216
215
|
# If this object is already an ElnExperiment, do nothing.
|
|
217
216
|
if isinstance(experiment, ElnExperiment):
|
|
@@ -227,13 +226,19 @@ class ExperimentHandler:
|
|
|
227
226
|
raise SapioException(f"No experiment with notebook ID {notebook_id} located in the system.")
|
|
228
227
|
# If this object is a record, assume it is an experiment record that we can query the system with.
|
|
229
228
|
else:
|
|
230
|
-
record_id: int = AliasUtil.
|
|
229
|
+
record_id: int = AliasUtil.to_record_id(experiment)
|
|
231
230
|
experiment: ElnExperiment = eln_manager.get_eln_experiment_by_record_id(record_id)
|
|
232
231
|
if not experiment:
|
|
233
232
|
raise SapioException(f"No experiment with record ID {record_id} located in the system.")
|
|
233
|
+
elif context is not None and context.eln_experiment is not None:
|
|
234
|
+
experiment = context.eln_experiment
|
|
235
|
+
|
|
234
236
|
if experiment is None:
|
|
235
237
|
raise SapioException("Cannot initialize ExperimentHandler. No ELN Experiment found in the provided "
|
|
236
238
|
"parameters.")
|
|
239
|
+
elif not isinstance(experiment, ElnExperiment):
|
|
240
|
+
raise SapioException("Cannot initialize ExperimentHandler. The experiment variable is not an "
|
|
241
|
+
"ElnExperiment!")
|
|
237
242
|
|
|
238
243
|
return user, context, experiment
|
|
239
244
|
|
|
@@ -1425,7 +1430,9 @@ class ExperimentHandler:
|
|
|
1425
1430
|
:return: The map of options for the input step.
|
|
1426
1431
|
"""
|
|
1427
1432
|
step: ElnEntryStep = self.get_step(step)
|
|
1428
|
-
|
|
1433
|
+
# PR-47796: Fix the get_step_options function making a webservice query every time it is called instead of
|
|
1434
|
+
# properly checking its cache of entry options.
|
|
1435
|
+
if step.get_id() not in self._step_options:
|
|
1429
1436
|
self._step_options.update(ExperimentReportUtil.get_experiment_entry_options(self.user,
|
|
1430
1437
|
self.get_all_steps()))
|
|
1431
1438
|
return self._step_options[step.get_id()]
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
+
import gzip
|
|
1
2
|
import io
|
|
3
|
+
import tarfile
|
|
4
|
+
import time
|
|
2
5
|
import warnings
|
|
3
6
|
import zipfile
|
|
4
7
|
|
|
@@ -322,7 +325,7 @@ class FileUtil:
|
|
|
322
325
|
@staticmethod
|
|
323
326
|
def zip_files(files: dict[str, str | bytes]) -> bytes:
|
|
324
327
|
"""
|
|
325
|
-
Create a zip file for a collection of files.
|
|
328
|
+
Create a .zip file for a collection of files.
|
|
326
329
|
|
|
327
330
|
:param files: A dictionary of file name to file data as a string or bytes.
|
|
328
331
|
:return: The bytes for a zip file containing the input files.
|
|
@@ -335,6 +338,130 @@ class FileUtil:
|
|
|
335
338
|
# throws an I/O exception.
|
|
336
339
|
return zip_buffer.getvalue()
|
|
337
340
|
|
|
341
|
+
# FR-47422: Add a function for unzipping files that may have been zipped by the above function.
|
|
342
|
+
@staticmethod
|
|
343
|
+
def unzip_files(zip_file: bytes) -> dict[str, bytes]:
|
|
344
|
+
"""
|
|
345
|
+
Decompress a .zip file from an in-memory bytes object and extracts all files into a dictionary.
|
|
346
|
+
|
|
347
|
+
:param zip_file: The bytes of the zip file to be decompressed.
|
|
348
|
+
:return: A dictionary of file name to file bytes for each file in the zip.
|
|
349
|
+
"""
|
|
350
|
+
extracted_files: dict[str, bytes] = {}
|
|
351
|
+
with io.BytesIO(zip_file) as zip_buffer:
|
|
352
|
+
with zipfile.ZipFile(zip_buffer, "r") as zip_file:
|
|
353
|
+
for file_name in zip_file.namelist():
|
|
354
|
+
with zip_file.open(file_name) as file:
|
|
355
|
+
extracted_files[file_name] = file.read()
|
|
356
|
+
return extracted_files
|
|
357
|
+
|
|
358
|
+
# FR-47422: Add functions for compressing and decompressing .gz, .tar, and .tar.gz files.
|
|
359
|
+
@staticmethod
|
|
360
|
+
def gzip_file(file_data: bytes | str) -> bytes:
|
|
361
|
+
"""
|
|
362
|
+
Create a .gz file for a single file.
|
|
363
|
+
|
|
364
|
+
:param file_data: The file data to be compressed as bytes or a string.
|
|
365
|
+
:return: The bytes of the gzip-compressed file.
|
|
366
|
+
"""
|
|
367
|
+
return gzip.compress(file_data.encode() if isinstance(file_data, str) else file_data)
|
|
368
|
+
|
|
369
|
+
@staticmethod
|
|
370
|
+
def ungzip_file(gzip_file: bytes) -> bytes:
|
|
371
|
+
"""
|
|
372
|
+
Decompress a .gz file.
|
|
373
|
+
|
|
374
|
+
:param gzip_file: The bytes of the gzip-compressed file.
|
|
375
|
+
:return: The decompressed file data as bytes.
|
|
376
|
+
"""
|
|
377
|
+
return gzip.decompress(gzip_file)
|
|
378
|
+
|
|
379
|
+
@staticmethod
|
|
380
|
+
def tar_files(files: dict[str, str | bytes]) -> bytes:
|
|
381
|
+
"""
|
|
382
|
+
Create a .tar file for a collection of files.
|
|
383
|
+
|
|
384
|
+
:param files: A dictionary of file name to file data as a string or bytes.
|
|
385
|
+
:return: The bytes for a tar file containing the input files.
|
|
386
|
+
"""
|
|
387
|
+
with io.BytesIO() as tar_buffer:
|
|
388
|
+
with tarfile.open(fileobj=tar_buffer, mode="w") as tar:
|
|
389
|
+
for name, data in files.items():
|
|
390
|
+
if isinstance(data, str):
|
|
391
|
+
data: bytes = data.encode('utf-8')
|
|
392
|
+
|
|
393
|
+
tarinfo = tarfile.TarInfo(name=name)
|
|
394
|
+
tarinfo.size = len(data)
|
|
395
|
+
tarinfo.mtime = int(time.time())
|
|
396
|
+
|
|
397
|
+
with io.BytesIO(data) as file:
|
|
398
|
+
tar.addfile(tarinfo=tarinfo, fileobj=file)
|
|
399
|
+
|
|
400
|
+
tar_buffer.seek(0)
|
|
401
|
+
return tar_buffer.getvalue()
|
|
402
|
+
|
|
403
|
+
@staticmethod
|
|
404
|
+
def untar_files(tar_file: bytes) -> dict[str, bytes]:
|
|
405
|
+
"""
|
|
406
|
+
Decompress a .tar file from an in-memory bytes object and extracts all files into a dictionary.
|
|
407
|
+
|
|
408
|
+
:param tar_file: The bytes of the tar file to be decompressed.
|
|
409
|
+
:return: A dictionary of file name to file bytes for each file in the tar.
|
|
410
|
+
"""
|
|
411
|
+
extracted_files: dict[str, bytes] = {}
|
|
412
|
+
with io.BytesIO(tar_file) as tar_buffer:
|
|
413
|
+
with tarfile.open(fileobj=tar_buffer, mode="r") as tar:
|
|
414
|
+
for member in tar.getmembers():
|
|
415
|
+
if member.isfile():
|
|
416
|
+
file_obj = tar.extractfile(member)
|
|
417
|
+
if file_obj:
|
|
418
|
+
with file_obj:
|
|
419
|
+
extracted_files[member.name] = file_obj.read()
|
|
420
|
+
return extracted_files
|
|
421
|
+
|
|
422
|
+
@staticmethod
|
|
423
|
+
def tar_gzip_files(files: dict[str, str | bytes]) -> bytes:
|
|
424
|
+
"""
|
|
425
|
+
Create a .tar.gz file for a collection of files.
|
|
426
|
+
|
|
427
|
+
:param files: A dictionary of file name to file data as a string or bytes.
|
|
428
|
+
:return: The bytes for a tar.gz file containing the input files.
|
|
429
|
+
"""
|
|
430
|
+
with io.BytesIO() as tar_buffer:
|
|
431
|
+
with tarfile.open(fileobj=tar_buffer, mode="w:gz") as tar:
|
|
432
|
+
for name, data in files.items():
|
|
433
|
+
if isinstance(data, str):
|
|
434
|
+
data: bytes = data.encode('utf-8')
|
|
435
|
+
|
|
436
|
+
tarinfo = tarfile.TarInfo(name=name)
|
|
437
|
+
tarinfo.size = len(data)
|
|
438
|
+
tarinfo.mtime = int(time.time())
|
|
439
|
+
|
|
440
|
+
with io.BytesIO(data) as file:
|
|
441
|
+
tar.addfile(tarinfo=tarinfo, fileobj=file)
|
|
442
|
+
|
|
443
|
+
tar_buffer.seek(0)
|
|
444
|
+
return tar_buffer.getvalue()
|
|
445
|
+
|
|
446
|
+
@staticmethod
|
|
447
|
+
def untar_gzip_files(tar_gzip_file: bytes) -> dict[str, bytes]:
|
|
448
|
+
"""
|
|
449
|
+
Decompress a .tar.gz file from an in-memory bytes object and extracts all files into a dictionary.
|
|
450
|
+
|
|
451
|
+
:param tar_gzip_file: The bytes of the tar.gz file to be decompressed.
|
|
452
|
+
:return: A dictionary of file name to file bytes for each file in the tar.gz
|
|
453
|
+
"""
|
|
454
|
+
extracted_files: dict[str, bytes] = {}
|
|
455
|
+
with io.BytesIO(tar_gzip_file) as tar_buffer:
|
|
456
|
+
with tarfile.open(fileobj=tar_buffer, mode="r:gz") as tar:
|
|
457
|
+
for member in tar.getmembers():
|
|
458
|
+
if member.isfile():
|
|
459
|
+
file_obj = tar.extractfile(member)
|
|
460
|
+
if file_obj:
|
|
461
|
+
with file_obj:
|
|
462
|
+
extracted_files[member.name] = file_obj.read()
|
|
463
|
+
return extracted_files
|
|
464
|
+
|
|
338
465
|
# Deprecated functions:
|
|
339
466
|
|
|
340
467
|
# FR-46097 - Add write file request shorthand functions to FileUtil.
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import tempfile
|
|
4
|
+
from typing import Callable, Any
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# FR-47422: Created class.
|
|
8
|
+
class TempFileHandler:
|
|
9
|
+
"""
|
|
10
|
+
A utility class to manage temporary files and directories.
|
|
11
|
+
"""
|
|
12
|
+
directories: list[str]
|
|
13
|
+
files: list[str]
|
|
14
|
+
|
|
15
|
+
def __init__(self):
|
|
16
|
+
self.directories = []
|
|
17
|
+
self.files = []
|
|
18
|
+
|
|
19
|
+
def create_temp_directory(self) -> str:
|
|
20
|
+
"""
|
|
21
|
+
Create a temporary directory.
|
|
22
|
+
|
|
23
|
+
:return: The path to a newly created temporary directory.
|
|
24
|
+
"""
|
|
25
|
+
directory: str = tempfile.mkdtemp()
|
|
26
|
+
self.directories.append(directory)
|
|
27
|
+
return directory
|
|
28
|
+
|
|
29
|
+
def create_temp_file(self, data: str | bytes, suffix: str = "") -> str:
|
|
30
|
+
"""
|
|
31
|
+
Create a temporary file with the specified data and optional suffix.
|
|
32
|
+
|
|
33
|
+
:param data: The data to write to the temporary file.
|
|
34
|
+
:param suffix: An optional suffix for the temporary file.
|
|
35
|
+
:return: The path to a newly created temporary file containing the provided data.
|
|
36
|
+
"""
|
|
37
|
+
mode: str = 'w' if isinstance(data, str) else 'wb'
|
|
38
|
+
with tempfile.NamedTemporaryFile(mode=mode, suffix=suffix, delete=False) as tmp_file:
|
|
39
|
+
tmp_file.write(data)
|
|
40
|
+
file_path: str = tmp_file.name
|
|
41
|
+
self.files.append(file_path)
|
|
42
|
+
return file_path
|
|
43
|
+
|
|
44
|
+
def create_temp_file_from_func(self, func: Callable, params: dict[str, Any], suffix: str = "",
|
|
45
|
+
is_binary: bool = True) -> str:
|
|
46
|
+
"""
|
|
47
|
+
Create a temporary file and populate it using the provided function. The function should accept parameters as
|
|
48
|
+
specified in the `params` dictionary.
|
|
49
|
+
|
|
50
|
+
:param func: The function to call with the temporary file path that will populate the file.
|
|
51
|
+
:param params: Keyword arguments to pass to the function. If "<NEW_FILE>" is used as a value, it will be
|
|
52
|
+
replaced with the temporary file object. If "<NEW_FILE_PATH>" is used as a value, it will be replaced with
|
|
53
|
+
the temporary file path.
|
|
54
|
+
:param suffix: An optional suffix for the temporary file.
|
|
55
|
+
:param is_binary: Whether to open the temporary file in binary mode.
|
|
56
|
+
:return: The path to the newly created temporary file.
|
|
57
|
+
"""
|
|
58
|
+
mode: str = 'wb' if is_binary else 'w'
|
|
59
|
+
with tempfile.NamedTemporaryFile(mode, suffix=suffix, delete=False) as tmp_file:
|
|
60
|
+
for key, value in params.items():
|
|
61
|
+
if value == "<NEW_FILE>":
|
|
62
|
+
params[key] = tmp_file
|
|
63
|
+
elif value == "<NEW_FILE_PATH>":
|
|
64
|
+
params[key] = tmp_file.name
|
|
65
|
+
func(**params)
|
|
66
|
+
file_path: str = tmp_file.name
|
|
67
|
+
self.files.append(file_path)
|
|
68
|
+
return file_path
|
|
69
|
+
|
|
70
|
+
def cleanup(self) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Delete all temporary files and directories created by this handler.
|
|
73
|
+
"""
|
|
74
|
+
for directory in self.directories:
|
|
75
|
+
if os.path.exists(directory):
|
|
76
|
+
shutil.rmtree(directory)
|
|
77
|
+
self.directories.clear()
|
|
78
|
+
|
|
79
|
+
for file_path in self.files:
|
|
80
|
+
if os.path.exists(file_path):
|
|
81
|
+
os.remove(file_path)
|
|
82
|
+
self.files.clear()
|
|
@@ -223,9 +223,12 @@ class AliasUtil:
|
|
|
223
223
|
# macros get translated to valid field values.
|
|
224
224
|
fields: FieldMap = {f: record.fields.get(f) for f in record.fields}
|
|
225
225
|
# PR-47457: Only include the record ID if the caller requests it, since including the record ID can break
|
|
226
|
-
# callbacks in certain circumstances
|
|
226
|
+
# callbacks in certain circumstances.
|
|
227
|
+
# PR-47894: Also remove the RecordId key if it exists and the caller doesn't want it included.
|
|
227
228
|
if include_record_id:
|
|
228
229
|
fields["RecordId"] = AliasUtil.to_record_id(record)
|
|
230
|
+
elif "RecordId" in fields:
|
|
231
|
+
del fields["RecordId"]
|
|
229
232
|
return fields
|
|
230
233
|
|
|
231
234
|
@staticmethod
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
|
|
2
|
+
import re
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
|
|
5
|
+
from sapiopycommons.general.exceptions import SapioException
|
|
6
|
+
|
|
7
|
+
date_macro_pattern: str = (
|
|
8
|
+
r"@(?:today|yesterday|thisweek|"
|
|
9
|
+
r"nextmonth|thismonth|lastmonth|"
|
|
10
|
+
r"nextyear|thisyear|lastyear|"
|
|
11
|
+
r"month(?:january|february|march|april|may|june|july|august|september|october|november|december)|"
|
|
12
|
+
r"last\d+days|next\d+days)"
|
|
13
|
+
)
|
|
14
|
+
"""A regular expression that can be used to determine whether a given value matches one of the supported date macros."""
|
|
15
|
+
|
|
16
|
+
date_macro_values: list[str] = [
|
|
17
|
+
"@today", "@yesterday", "@thisweek",
|
|
18
|
+
"@nextmonth", "@thismonth", "@lastmonth",
|
|
19
|
+
"@nextyear", "@thisyear", "@lastyear",
|
|
20
|
+
"@monthjanuary", "@monthfebruary", "@monthmarch", "@monthapril", "@monthmay", "@monthjune", "@monthjuly",
|
|
21
|
+
"@monthaugust", "@monthseptember", "@monthoctober", "@monthnovember", "@monthdecember",
|
|
22
|
+
"@next_days", "@last_days"
|
|
23
|
+
]
|
|
24
|
+
"""A list of the supported date macros. For @next_days and @last_days, the underscore is expected to be replaced with
|
|
25
|
+
an integer."""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class MacroParser:
|
|
29
|
+
"""
|
|
30
|
+
A utility class for parsing macros used in the Sapio platform.
|
|
31
|
+
"""
|
|
32
|
+
_reg_month = re.compile(r"@\w*month\w*")
|
|
33
|
+
_reg_digits = re.compile(r"@\w*(\d+)\w*")
|
|
34
|
+
_reg_last_days = re.compile(r"@last(\d+)days")
|
|
35
|
+
_reg_next_days = re.compile(r"@next(\d+)days")
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def _now() -> datetime:
|
|
39
|
+
"""
|
|
40
|
+
:return: A datetime object for the current time in UTC.
|
|
41
|
+
"""
|
|
42
|
+
return datetime.now(timezone.utc)
|
|
43
|
+
|
|
44
|
+
@staticmethod
|
|
45
|
+
def _dates_to_timestamps(a: datetime, b: datetime) -> tuple[int, int]:
|
|
46
|
+
"""
|
|
47
|
+
Convert the given datetimes to epoch-millisecond timestamps on the start of the first date
|
|
48
|
+
and the end of the second date.
|
|
49
|
+
|
|
50
|
+
:param a: A datetime object.
|
|
51
|
+
:param b: A datetime object.
|
|
52
|
+
:return: A tuple containing the start and end timestamps in milliseconds since the epoch.
|
|
53
|
+
"""
|
|
54
|
+
# The start of the date on the first datetime.
|
|
55
|
+
a = a.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
56
|
+
# The end of the date on the second datetime.
|
|
57
|
+
b = b.replace(hour=23, minute=59, second=59, microsecond=999000)
|
|
58
|
+
return int(a.timestamp() * 1000), int(b.timestamp() * 1000)
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def parse_date_macro(cls, macro: str) -> tuple[int, int]:
|
|
62
|
+
"""
|
|
63
|
+
Convert a date macro string into a range of epoch millisecond timestamps.
|
|
64
|
+
All macros are considered from the current time in UTC.
|
|
65
|
+
|
|
66
|
+
:param macro: A valid date macro string. If an invalid macro is provided, an exception will be raised.
|
|
67
|
+
:return: A tuple containing the start and end timestamps in milliseconds since the epoch for the given macro.
|
|
68
|
+
The returned range is inclusive; the first value is the first millisecond of the starting date
|
|
69
|
+
(00:00:00.000) and the last value is the final millisecond of the ending date (23:59:59.999).
|
|
70
|
+
"""
|
|
71
|
+
if macro is None or not macro.strip():
|
|
72
|
+
raise SapioException(f"Invalid macro. None or empty/blank string provided.")
|
|
73
|
+
macro = macro.strip().lower()
|
|
74
|
+
|
|
75
|
+
now: datetime = cls._now()
|
|
76
|
+
|
|
77
|
+
# --- @today: 00:00:00.000 to 23:59:59.999 today ---
|
|
78
|
+
if macro == "@today":
|
|
79
|
+
return cls._dates_to_timestamps(now, now)
|
|
80
|
+
|
|
81
|
+
# --- @yesterday: 00:00:00.000 to 23:59:59.999 yesterday ---
|
|
82
|
+
if macro == "@yesterday":
|
|
83
|
+
yesterday: datetime = now - timedelta(days=1)
|
|
84
|
+
return cls._dates_to_timestamps(yesterday, yesterday)
|
|
85
|
+
|
|
86
|
+
# --- @thisweek: Sunday -> Saturday (inclusive) ---
|
|
87
|
+
if macro == "@thisweek":
|
|
88
|
+
weekday: int = now.weekday() # Monday=0 ... Sunday=6
|
|
89
|
+
# TODO: Some way to control what the first day of the week is considered?
|
|
90
|
+
# +2 = Saturday
|
|
91
|
+
# +1 = Sunday
|
|
92
|
+
# +0 = Monday
|
|
93
|
+
days_since_sunday: int = (weekday + 1) % 7
|
|
94
|
+
sunday: datetime = now - timedelta(days=days_since_sunday)
|
|
95
|
+
saturday: datetime = sunday + timedelta(days=6)
|
|
96
|
+
return cls._dates_to_timestamps(sunday, saturday)
|
|
97
|
+
|
|
98
|
+
# --- last/next N days ---
|
|
99
|
+
if cls._reg_digits.fullmatch(macro):
|
|
100
|
+
# --- @lastNdays ---
|
|
101
|
+
if m := cls._reg_last_days.fullmatch(macro):
|
|
102
|
+
days = int(m.group(1))
|
|
103
|
+
return cls._dates_to_timestamps(now - timedelta(days=days), now)
|
|
104
|
+
|
|
105
|
+
# --- @nextNdays ---
|
|
106
|
+
if m := cls._reg_next_days.fullmatch(macro):
|
|
107
|
+
days = int(m.group(1))
|
|
108
|
+
return cls._dates_to_timestamps(now, now + timedelta(days=days))
|
|
109
|
+
|
|
110
|
+
raise SapioException(f"Invalid macro: {macro}")
|
|
111
|
+
|
|
112
|
+
# --- Month macros ---
|
|
113
|
+
if cls._reg_month.fullmatch(macro):
|
|
114
|
+
year: int = now.year
|
|
115
|
+
month: int = now.month
|
|
116
|
+
|
|
117
|
+
if macro == "@lastmonth":
|
|
118
|
+
month -= 1
|
|
119
|
+
if month == 0:
|
|
120
|
+
year -= 1
|
|
121
|
+
month = 12
|
|
122
|
+
elif macro == "@nextmonth":
|
|
123
|
+
month += 1
|
|
124
|
+
if month == 13:
|
|
125
|
+
year += 1
|
|
126
|
+
month = 1
|
|
127
|
+
# @thismonth uses the current month and year, so no replacement needed.
|
|
128
|
+
elif macro != "@thismonth":
|
|
129
|
+
month_map: dict[str, int] = {
|
|
130
|
+
"@monthjanuary": 1,
|
|
131
|
+
"@monthfebruary": 2,
|
|
132
|
+
"@monthmarch": 3,
|
|
133
|
+
"@monthapril": 4,
|
|
134
|
+
"@monthmay": 5,
|
|
135
|
+
"@monthjune": 6,
|
|
136
|
+
"@monthjuly": 7,
|
|
137
|
+
"@monthaugust": 8,
|
|
138
|
+
"@monthseptember": 9,
|
|
139
|
+
"@monthoctober": 10,
|
|
140
|
+
"@monthnovember": 11,
|
|
141
|
+
"@monthdecember": 12,
|
|
142
|
+
}
|
|
143
|
+
if macro in month_map:
|
|
144
|
+
month = month_map[macro]
|
|
145
|
+
else:
|
|
146
|
+
raise SapioException(f"Invalid macro: {macro}")
|
|
147
|
+
|
|
148
|
+
month_start: datetime = now.replace(year=year, month=month, day=1)
|
|
149
|
+
# Find the first day of next month.
|
|
150
|
+
if month == 12:
|
|
151
|
+
next_month = datetime(year + 1, 1, 1, tzinfo=timezone.utc)
|
|
152
|
+
else:
|
|
153
|
+
next_month = datetime(year, month + 1, 1, tzinfo=timezone.utc)
|
|
154
|
+
# Then subtract one day to find the last day of the start month.
|
|
155
|
+
month_end: datetime = next_month - timedelta(days=1)
|
|
156
|
+
|
|
157
|
+
return cls._dates_to_timestamps(month_start, month_end)
|
|
158
|
+
|
|
159
|
+
# --- Year macros ---
|
|
160
|
+
if macro in ("@thisyear", "@lastyear", "@nextyear"):
|
|
161
|
+
year: int = now.year
|
|
162
|
+
if macro == "@lastyear":
|
|
163
|
+
year -= 1
|
|
164
|
+
elif macro == "@nextyear":
|
|
165
|
+
year += 1
|
|
166
|
+
# No change in year needed for @thisyear.
|
|
167
|
+
|
|
168
|
+
year_start: datetime = now.replace(year=year, month=1, day=1)
|
|
169
|
+
year_end: datetime = year_start.replace(year=year_start.year + 1) - timedelta(days=1)
|
|
170
|
+
return cls._dates_to_timestamps(year_start, year_end)
|
|
171
|
+
|
|
172
|
+
raise SapioException(f"Invalid macro: {macro}")
|