sapiopycommons 2025.3.10a455__py3-none-any.whl → 2025.3.17a456__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sapiopycommons might be problematic. Click here for more details.
- sapiopycommons/callbacks/callback_util.py +1220 -366
- sapiopycommons/chem/Molecules.py +0 -2
- sapiopycommons/customreport/auto_pagers.py +270 -0
- sapiopycommons/datatype/data_fields.py +1 -1
- sapiopycommons/eln/experiment_handler.py +2 -1
- sapiopycommons/eln/experiment_report_util.py +7 -7
- sapiopycommons/files/file_bridge.py +76 -0
- sapiopycommons/files/file_bridge_handler.py +325 -110
- sapiopycommons/files/file_data_handler.py +2 -2
- sapiopycommons/files/file_util.py +36 -11
- sapiopycommons/files/file_validator.py +6 -5
- sapiopycommons/files/file_writer.py +1 -1
- sapiopycommons/flowcyto/flow_cyto.py +1 -1
- sapiopycommons/general/accession_service.py +1 -1
- sapiopycommons/general/aliases.py +48 -28
- sapiopycommons/general/audit_log.py +2 -2
- sapiopycommons/general/custom_report_util.py +24 -1
- sapiopycommons/general/directive_util.py +86 -0
- sapiopycommons/general/exceptions.py +41 -2
- sapiopycommons/general/popup_util.py +2 -2
- sapiopycommons/multimodal/multimodal.py +1 -0
- sapiopycommons/processtracking/custom_workflow_handler.py +3 -3
- sapiopycommons/recordmodel/record_handler.py +5 -3
- sapiopycommons/samples/aliquot.py +48 -0
- sapiopycommons/webhook/webhook_handlers.py +445 -55
- {sapiopycommons-2025.3.10a455.dist-info → sapiopycommons-2025.3.17a456.dist-info}/METADATA +1 -1
- {sapiopycommons-2025.3.10a455.dist-info → sapiopycommons-2025.3.17a456.dist-info}/RECORD +29 -26
- {sapiopycommons-2025.3.10a455.dist-info → sapiopycommons-2025.3.17a456.dist-info}/WHEEL +0 -0
- {sapiopycommons-2025.3.10a455.dist-info → sapiopycommons-2025.3.17a456.dist-info}/licenses/LICENSE +0 -0
|
@@ -21,10 +21,14 @@ class FileUtil:
|
|
|
21
21
|
Utilities for the handling of files, including the requesting of files from the user and the parsing of files into
|
|
22
22
|
tokenized lists. Makes use of Pandas DataFrames for any file parsing purposes.
|
|
23
23
|
"""
|
|
24
|
+
# PR-47433: Add a keep_default_na argument to FileUtil.tokenize_csv and FileUtil.tokenize_xlsx so that N/A values
|
|
25
|
+
# don't get returned as NoneType, and add **kwargs in case any other Pandas input parameters need changed by the
|
|
26
|
+
# caller.
|
|
24
27
|
@staticmethod
|
|
25
28
|
def tokenize_csv(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
|
|
26
29
|
seperator: str = ",", *, encoding: str | None = None, encoding_error: str | None = "strict",
|
|
27
|
-
exception_on_empty: bool = True
|
|
30
|
+
exception_on_empty: bool = True, keep_default_na: bool = False, **kwargs) \
|
|
31
|
+
-> tuple[list[dict[str, str]], list[list[str]]]:
|
|
28
32
|
"""
|
|
29
33
|
Tokenize a CSV file. The provided file must be uniform. That is, if row 1 has 10 cells, all the rows in the file
|
|
30
34
|
must have 10 cells. Otherwise, the Pandas parser throws a tokenizer exception.
|
|
@@ -46,6 +50,9 @@ class FileUtil:
|
|
|
46
50
|
https://docs.python.org/3/library/codecs.html#error-handlers
|
|
47
51
|
:param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
|
|
48
52
|
the first element of the returned tuple.
|
|
53
|
+
:param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
|
|
54
|
+
If True, these values will be converted to a NoneType value.
|
|
55
|
+
:param kwargs: Additional arguments to be passed to the pandas read_csv function.
|
|
49
56
|
:return: The CSV parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
|
|
50
57
|
that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
|
|
51
58
|
If the header row index is 0 or None, this list will be empty.
|
|
@@ -53,7 +60,8 @@ class FileUtil:
|
|
|
53
60
|
# Parse the file bytes into two DataFrames. The first is metadata of the file located above the header row,
|
|
54
61
|
# while the second is the body of the file below the header row.
|
|
55
62
|
file_body, file_metadata = FileUtil.csv_to_data_frames(file_bytes, header_row_index, seperator,
|
|
56
|
-
encoding=encoding, encoding_error=encoding_error
|
|
63
|
+
encoding=encoding, encoding_error=encoding_error,
|
|
64
|
+
keep_default_na=keep_default_na, **kwargs)
|
|
57
65
|
# Parse the metadata from above the header row index into a list of lists.
|
|
58
66
|
metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
|
|
59
67
|
# Parse the data from the file body into a list of dicts.
|
|
@@ -64,7 +72,8 @@ class FileUtil:
|
|
|
64
72
|
|
|
65
73
|
@staticmethod
|
|
66
74
|
def tokenize_xlsx(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
|
|
67
|
-
*, exception_on_empty: bool = True
|
|
75
|
+
*, exception_on_empty: bool = True, keep_default_na: bool = False, **kwargs) \
|
|
76
|
+
-> tuple[list[dict[str, str]], list[list[str]]]:
|
|
68
77
|
"""
|
|
69
78
|
Tokenize an XLSX file row by row.
|
|
70
79
|
|
|
@@ -77,13 +86,17 @@ class FileUtil:
|
|
|
77
86
|
is assumed to be the header row.
|
|
78
87
|
:param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
|
|
79
88
|
the first element of the returned tuple.
|
|
89
|
+
:param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
|
|
90
|
+
If True, these values will be converted to a NoneType value.
|
|
91
|
+
:param kwargs: Additional arguments to be passed to the pandas read_excel function.
|
|
80
92
|
:return: The XLSX parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
|
|
81
93
|
that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
|
|
82
94
|
If the header row index is 0 or None, this list will be empty.
|
|
83
95
|
"""
|
|
84
96
|
# Parse the file bytes into two DataFrames. The first is metadata of the file located above the header row,
|
|
85
97
|
# while the second is the body of the file below the header row.
|
|
86
|
-
file_body, file_metadata = FileUtil.xlsx_to_data_frames(file_bytes, header_row_index
|
|
98
|
+
file_body, file_metadata = FileUtil.xlsx_to_data_frames(file_bytes, header_row_index,
|
|
99
|
+
keep_default_na=keep_default_na, **kwargs)
|
|
87
100
|
# Parse the metadata from above the header row index into a list of lists.
|
|
88
101
|
metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
|
|
89
102
|
# Parse the data from the file body into a list of dicts.
|
|
@@ -94,7 +107,8 @@ class FileUtil:
|
|
|
94
107
|
|
|
95
108
|
@staticmethod
|
|
96
109
|
def csv_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, seperator: str = ",",
|
|
97
|
-
*, encoding: str | None = None, encoding_error: str | None = "strict"
|
|
110
|
+
*, encoding: str | None = None, encoding_error: str | None = "strict",
|
|
111
|
+
keep_default_na: bool = False, **kwargs) \
|
|
98
112
|
-> tuple[DataFrame, DataFrame | None]:
|
|
99
113
|
"""
|
|
100
114
|
Parse the file bytes for a CSV into DataFrames. The provided file must be uniform. That is, if row 1 has 10
|
|
@@ -113,6 +127,9 @@ class FileUtil:
|
|
|
113
127
|
is "strict", meaning that encoding errors raise an exception. Change this to "ignore" to skip over invalid
|
|
114
128
|
characters or "replace" to replace invalid characters with a ? character. For a full list of options, see
|
|
115
129
|
https://docs.python.org/3/library/codecs.html#error-handlers
|
|
130
|
+
:param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
|
|
131
|
+
If True, these values will be converted to a NoneType value.
|
|
132
|
+
:param kwargs: Additional arguments to be passed to the pandas read_csv function.
|
|
116
133
|
:return: A tuple of two DataFrames. The first is the frame for the CSV table body, while the second is for the
|
|
117
134
|
metadata from above the header row, or None if there is no metadata.
|
|
118
135
|
"""
|
|
@@ -125,19 +142,21 @@ class FileUtil:
|
|
|
125
142
|
file_metadata = pandas.read_csv(file_io, header=None, dtype=dtype(str),
|
|
126
143
|
skiprows=lambda x: x >= header_row_index,
|
|
127
144
|
skip_blank_lines=False, sep=seperator, encoding=encoding,
|
|
128
|
-
encoding_errors=encoding_error
|
|
145
|
+
encoding_errors=encoding_error, keep_default_na=keep_default_na,
|
|
146
|
+
**kwargs)
|
|
129
147
|
with io.BytesIO(file_bytes) as file_io:
|
|
130
148
|
# The use of the dtype argument is to ensure that everything from the file gets read as a string. Added
|
|
131
149
|
# because some numerical values would get ".0" appended to them, even when casting the DataFrame cell to a
|
|
132
150
|
# string.
|
|
133
151
|
file_body: DataFrame = pandas.read_csv(file_io, header=header_row_index, dtype=dtype(str),
|
|
134
|
-
skip_blank_lines=False, sep=seperator, encoding=encoding
|
|
152
|
+
skip_blank_lines=False, sep=seperator, encoding=encoding,
|
|
153
|
+
keep_default_na=keep_default_na, **kwargs)
|
|
135
154
|
|
|
136
155
|
return file_body, file_metadata
|
|
137
156
|
|
|
138
157
|
@staticmethod
|
|
139
|
-
def xlsx_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0
|
|
140
|
-
|
|
158
|
+
def xlsx_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, *, keep_default_na: bool = False,
|
|
159
|
+
**kwargs) -> tuple[DataFrame, DataFrame | None]:
|
|
141
160
|
"""
|
|
142
161
|
Parse the file bytes for an XLSX into DataFrames.
|
|
143
162
|
|
|
@@ -146,6 +165,9 @@ class FileUtil:
|
|
|
146
165
|
row is returned in the metadata list. If input is None, then no row is considered to be the header row,
|
|
147
166
|
meaning that required headers are also ignored if any are provided. By default, the first row (0th index)
|
|
148
167
|
is assumed to be the header row.
|
|
168
|
+
:param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
|
|
169
|
+
If True, these values will be converted to a NoneType value.
|
|
170
|
+
:param kwargs: Additional arguments to be passed to the pandas read_excel function.
|
|
149
171
|
:return: A tuple of two DataFrames. The first is the frame for the XLSX table body, while the second is for the
|
|
150
172
|
metadata from above the header row, or None if there is no metadata.
|
|
151
173
|
"""
|
|
@@ -155,12 +177,14 @@ class FileUtil:
|
|
|
155
177
|
# The metadata DataFrame has no headers and only consists of the rows above the header row index.
|
|
156
178
|
# Therefore, we skip every row including and past the header.
|
|
157
179
|
file_metadata = pandas.read_excel(file_io, header=None, dtype=dtype(str),
|
|
158
|
-
skiprows=lambda x: x >= header_row_index
|
|
180
|
+
skiprows=lambda x: x >= header_row_index,
|
|
181
|
+
keep_default_na=keep_default_na, **kwargs)
|
|
159
182
|
with io.BytesIO(file_bytes) as file_io:
|
|
160
183
|
# The use of the dtype argument is to ensure that everything from the file gets read as a string. Added
|
|
161
184
|
# because some numerical values would get ".0" appended to them, even when casting the DataFrame cell to a
|
|
162
185
|
# string.
|
|
163
|
-
file_body: DataFrame = pandas.read_excel(file_io, header=header_row_index, dtype=dtype(str)
|
|
186
|
+
file_body: DataFrame = pandas.read_excel(file_io, header=header_row_index, dtype=dtype(str),
|
|
187
|
+
keep_default_na=keep_default_na, **kwargs)
|
|
164
188
|
|
|
165
189
|
return file_body, file_metadata
|
|
166
190
|
|
|
@@ -255,6 +279,7 @@ class FileUtil:
|
|
|
255
279
|
data_frame = pandas.read_csv(csv, sep=",", header=None)
|
|
256
280
|
|
|
257
281
|
with io.BytesIO() as output:
|
|
282
|
+
# noinspection PyTypeChecker
|
|
258
283
|
with pandas.ExcelWriter(output, engine='xlsxwriter') as writer:
|
|
259
284
|
# Setting header and index to false makes the CSV convert to an XLSX as-is.
|
|
260
285
|
data_frame.to_excel(writer, sheet_name='Sheet1', header=False, index=False)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import re
|
|
3
4
|
from abc import abstractmethod
|
|
4
5
|
from typing import Any
|
|
5
6
|
|
|
@@ -9,9 +10,9 @@ from sapiopylib.rest.pojo.datatype.FieldDefinition import VeloxIntegerFieldDefin
|
|
|
9
10
|
AbstractVeloxFieldDefinition
|
|
10
11
|
|
|
11
12
|
from sapiopycommons.callbacks.callback_util import CallbackUtil
|
|
13
|
+
from sapiopycommons.customreport.auto_pagers import QuickReportDictAutoPager
|
|
12
14
|
from sapiopycommons.files.file_data_handler import FileDataHandler, FilterList
|
|
13
15
|
from sapiopycommons.general.aliases import UserIdentifier, AliasUtil
|
|
14
|
-
from sapiopycommons.general.custom_report_util import CustomReportUtil
|
|
15
16
|
from sapiopycommons.general.exceptions import SapioUserCancelledException
|
|
16
17
|
from sapiopycommons.general.time_util import TimeUtil
|
|
17
18
|
|
|
@@ -311,8 +312,8 @@ class MatchesPatternRule(ColumnRule):
|
|
|
311
312
|
"""
|
|
312
313
|
pattern: str
|
|
313
314
|
|
|
314
|
-
def __init__(self, header: str, pattern: str, *, reason: str | None = None,
|
|
315
|
-
blacklist: FilterList = None):
|
|
315
|
+
def __init__(self, header: str, pattern: str | re.Pattern[str], *, reason: str | None = None,
|
|
316
|
+
whitelist: FilterList = None, blacklist: FilterList = None):
|
|
316
317
|
"""
|
|
317
318
|
:param header: The header that this rule acts upon.
|
|
318
319
|
:param pattern: A regex pattern.
|
|
@@ -529,7 +530,7 @@ class UniqueSystemValueRule(ColumnRule):
|
|
|
529
530
|
# Run a quick report for all records of this type that match these field values.
|
|
530
531
|
term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
|
|
531
532
|
"{" + ",".join(values) + "}")
|
|
532
|
-
results: list[dict[str, Any]] =
|
|
533
|
+
results: list[dict[str, Any]] = QuickReportDictAutoPager(self.user, term).get_all_at_once()
|
|
533
534
|
existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
|
|
534
535
|
return file_handler.get_in_list(self.header, existing_values)
|
|
535
536
|
|
|
@@ -563,6 +564,6 @@ class ExistingSystemValueRule(ColumnRule):
|
|
|
563
564
|
# Run a quick report for all records of this type that match these field values.
|
|
564
565
|
term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
|
|
565
566
|
"{" + ",".join(values) + "}")
|
|
566
|
-
results: list[dict[str, Any]] =
|
|
567
|
+
results: list[dict[str, Any]] = QuickReportDictAutoPager(self.user, term).get_all_at_once()
|
|
567
568
|
existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
|
|
568
569
|
return file_handler.get_not_in_list(self.header, existing_values)
|
|
@@ -307,7 +307,7 @@ class FieldColumn(ColumnDef):
|
|
|
307
307
|
elif self.search_order == FieldSearchOrder.BUNDLE_ONLY:
|
|
308
308
|
return row.fields.get(self.field_name)
|
|
309
309
|
elif self.search_order == FieldSearchOrder.RECORD_FIRST:
|
|
310
|
-
fields: dict[str, Any] = AliasUtil.
|
|
310
|
+
fields: dict[str, Any] = AliasUtil.to_field_map(record) if record else {}
|
|
311
311
|
if self.field_name not in fields or (self.skip_none_values and fields.get(self.field_name) is None):
|
|
312
312
|
return row.fields.get(self.field_name)
|
|
313
313
|
return fields.get(self.field_name)
|
|
@@ -2,8 +2,8 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from weakref import WeakValueDictionary
|
|
4
4
|
|
|
5
|
-
from sapiopylib.rest.User import SapioUser
|
|
6
5
|
from databind.json import dumps
|
|
6
|
+
from sapiopylib.rest.User import SapioUser
|
|
7
7
|
|
|
8
8
|
from sapiopycommons.flowcyto.flowcyto_data import FlowJoWorkspaceInputJson, UploadFCSInputJson, \
|
|
9
9
|
ComputeFlowStatisticsInputJson
|
|
@@ -95,7 +95,7 @@ class AccessionWithPrefixSuffix(AbstractAccessionServiceOperator):
|
|
|
95
95
|
|
|
96
96
|
@property
|
|
97
97
|
def default_accessor_name(self):
|
|
98
|
-
return "PREFIX_AND_SUFFIX" + "(" + self.prefix + "," + self.suffix + ")"
|
|
98
|
+
return "PREFIX_AND_SUFFIX" + "(" + self.prefix + "," + self.suffix + ")"
|
|
99
99
|
|
|
100
100
|
|
|
101
101
|
class AccessionGlobalPrefixSuffix(AbstractAccessionServiceOperator):
|
|
@@ -1,50 +1,50 @@
|
|
|
1
1
|
from collections.abc import Iterable
|
|
2
|
-
from typing import Any
|
|
2
|
+
from typing import Any, TypeAlias
|
|
3
3
|
|
|
4
4
|
from sapiopylib.rest.User import SapioUser
|
|
5
5
|
from sapiopylib.rest.pojo.DataRecord import DataRecord
|
|
6
|
-
from sapiopylib.rest.pojo.datatype.FieldDefinition import FieldType
|
|
6
|
+
from sapiopylib.rest.pojo.datatype.FieldDefinition import FieldType, AbstractVeloxFieldDefinition
|
|
7
7
|
from sapiopylib.rest.pojo.eln.ElnExperiment import ElnExperiment
|
|
8
8
|
from sapiopylib.rest.pojo.eln.ExperimentEntry import ExperimentEntry
|
|
9
9
|
from sapiopylib.rest.pojo.eln.SapioELNEnums import ElnBaseDataType
|
|
10
10
|
from sapiopylib.rest.pojo.webhook.WebhookContext import SapioWebhookContext
|
|
11
11
|
from sapiopylib.rest.utils.Protocols import ElnExperimentProtocol, ElnEntryStep
|
|
12
|
-
from sapiopylib.rest.utils.recordmodel.PyRecordModel import PyRecordModel
|
|
12
|
+
from sapiopylib.rest.utils.recordmodel.PyRecordModel import PyRecordModel, AbstractRecordModel
|
|
13
13
|
from sapiopylib.rest.utils.recordmodel.RecordModelWrapper import WrappedRecordModel, WrappedType, WrapperField
|
|
14
14
|
|
|
15
15
|
from sapiopycommons.general.exceptions import SapioException
|
|
16
16
|
|
|
17
|
-
FieldValue = int | float | str | bool | None
|
|
17
|
+
FieldValue: TypeAlias = int | float | str | bool | None
|
|
18
18
|
"""Allowable values for fields in the system."""
|
|
19
|
-
RecordModel = PyRecordModel | WrappedRecordModel
|
|
19
|
+
RecordModel: TypeAlias = PyRecordModel | AbstractRecordModel | WrappedRecordModel
|
|
20
20
|
"""Different forms that a record model could take."""
|
|
21
|
-
SapioRecord = DataRecord | RecordModel
|
|
21
|
+
SapioRecord: TypeAlias = DataRecord | RecordModel
|
|
22
22
|
"""A record could be provided as either a DataRecord, PyRecordModel, or WrappedRecordModel (WrappedType)."""
|
|
23
|
-
RecordIdentifier = SapioRecord | int
|
|
23
|
+
RecordIdentifier: TypeAlias = SapioRecord | int
|
|
24
24
|
"""A RecordIdentifier is either a record type or an integer for the record's record ID."""
|
|
25
|
-
DataTypeIdentifier = SapioRecord | type[WrappedType] | str
|
|
25
|
+
DataTypeIdentifier: TypeAlias = SapioRecord | type[WrappedType] | str
|
|
26
26
|
"""A DataTypeIdentifier is either a SapioRecord, a record model wrapper type, or a string."""
|
|
27
|
-
FieldIdentifier = WrapperField | str | tuple[str, FieldType]
|
|
27
|
+
FieldIdentifier: TypeAlias = AbstractVeloxFieldDefinition | WrapperField | str | tuple[str, FieldType]
|
|
28
28
|
"""A FieldIdentifier is either wrapper field from a record model wrapper, a string, or a tuple of string
|
|
29
29
|
and field type."""
|
|
30
|
-
FieldIdentifierKey = WrapperField | str
|
|
30
|
+
FieldIdentifierKey: TypeAlias = WrapperField | str
|
|
31
31
|
"""A FieldIdentifierKey is a FieldIdentifier, except it can't be a tuple, s tuples can't be used as keys in
|
|
32
32
|
dictionaries.."""
|
|
33
|
-
HasFieldWrappers = type[WrappedType] | WrappedRecordModel
|
|
33
|
+
HasFieldWrappers: TypeAlias = type[WrappedType] | WrappedRecordModel
|
|
34
34
|
"""An identifier for classes that have wrapper fields."""
|
|
35
|
-
ExperimentIdentifier = ElnExperimentProtocol | ElnExperiment | int
|
|
35
|
+
ExperimentIdentifier: TypeAlias = ElnExperimentProtocol | ElnExperiment | int
|
|
36
36
|
"""An ExperimentIdentifier is either an experiment protocol, experiment, or an integer for the experiment's notebook
|
|
37
37
|
ID."""
|
|
38
|
-
ExperimentEntryIdentifier = ElnEntryStep | ExperimentEntry | int
|
|
38
|
+
ExperimentEntryIdentifier: TypeAlias = ElnEntryStep | ExperimentEntry | int
|
|
39
39
|
"""An ExperimentEntryIdentifier is either an ELN entry step, experiment entry, or an integer for the entry's ID."""
|
|
40
|
-
FieldMap = dict[str, FieldValue]
|
|
40
|
+
FieldMap: TypeAlias = dict[str, FieldValue]
|
|
41
41
|
"""A field map is simply a dict of data field names to values. The purpose of aliasing this is to help distinguish
|
|
42
42
|
any random dict in a webhook from one which is explicitly used for record fields."""
|
|
43
|
-
FieldIdentifierMap = dict[FieldIdentifierKey, FieldValue]
|
|
43
|
+
FieldIdentifierMap: TypeAlias = dict[FieldIdentifierKey, FieldValue]
|
|
44
44
|
"""A field identifier map is the same thing as a field map, except the keys can be field identifiers instead
|
|
45
45
|
of just strings. Note that although one of the allowed field identifiers is a tuple, you can't use tuples as
|
|
46
46
|
keys in a dictionary."""
|
|
47
|
-
UserIdentifier = SapioWebhookContext | SapioUser
|
|
47
|
+
UserIdentifier: TypeAlias = SapioWebhookContext | SapioUser
|
|
48
48
|
"""An identifier for classes from which a user object can be used for sending requests."""
|
|
49
49
|
|
|
50
50
|
|
|
@@ -142,23 +142,25 @@ class AliasUtil:
|
|
|
142
142
|
@staticmethod
|
|
143
143
|
def to_data_field_name(value: FieldIdentifier) -> str:
|
|
144
144
|
"""
|
|
145
|
-
Convert
|
|
145
|
+
Convert an object that can be used to identify a data field to a data field name string.
|
|
146
146
|
|
|
147
|
-
:param value:
|
|
147
|
+
:param value: An object that can be used to identify a data field.
|
|
148
148
|
:return: A string of the data field name of the input value.
|
|
149
149
|
"""
|
|
150
150
|
if isinstance(value, tuple):
|
|
151
151
|
return value[0]
|
|
152
152
|
if isinstance(value, WrapperField):
|
|
153
153
|
return value.field_name
|
|
154
|
+
if isinstance(value, AbstractVeloxFieldDefinition):
|
|
155
|
+
return value.data_field_name
|
|
154
156
|
return value
|
|
155
157
|
|
|
156
158
|
@staticmethod
|
|
157
159
|
def to_data_field_names(values: Iterable[FieldIdentifier]) -> list[str]:
|
|
158
160
|
"""
|
|
159
|
-
Convert an iterable of
|
|
161
|
+
Convert an iterable of objects that can be used to identify data fields to a list of data field name strings.
|
|
160
162
|
|
|
161
|
-
:param values: An iterable of
|
|
163
|
+
:param values: An iterable of objects that can be used to identify a data field.
|
|
162
164
|
:return: A list of strings of the data field names of the input values.
|
|
163
165
|
"""
|
|
164
166
|
return [AliasUtil.to_data_field_name(x) for x in values]
|
|
@@ -205,20 +207,38 @@ class AliasUtil:
|
|
|
205
207
|
f"field with the name \"{field}\",")
|
|
206
208
|
|
|
207
209
|
@staticmethod
|
|
208
|
-
def
|
|
210
|
+
def to_field_map(record: SapioRecord, include_record_id: bool = False) -> FieldMap:
|
|
209
211
|
"""
|
|
210
|
-
Convert a
|
|
211
|
-
|
|
212
|
+
Convert a given record value to a field map.
|
|
213
|
+
|
|
214
|
+
:param record: A record which is a DataRecord, PyRecordModel, or WrappedRecordModel.
|
|
215
|
+
:param include_record_id: If true, include the record ID of the record in the field map using the RecordId key.
|
|
216
|
+
:return: The field map for the input record.
|
|
217
|
+
"""
|
|
218
|
+
if isinstance(record, DataRecord):
|
|
219
|
+
# noinspection PyTypeChecker
|
|
220
|
+
fields: FieldMap = record.get_fields()
|
|
221
|
+
else:
|
|
222
|
+
fields: FieldMap = record.fields.copy_to_dict()
|
|
223
|
+
# PR-47457: Only include the record ID if the caller requests it, since including the record ID can break
|
|
224
|
+
# callbacks in certain circumstances if the record ID is negative.
|
|
225
|
+
if include_record_id:
|
|
226
|
+
fields["RecordId"] = AliasUtil.to_record_id(record)
|
|
227
|
+
return fields
|
|
228
|
+
|
|
229
|
+
@staticmethod
|
|
230
|
+
def to_field_map_list(records: Iterable[SapioRecord], include_record_id: bool = False) -> list[FieldMap]:
|
|
231
|
+
"""
|
|
232
|
+
Convert a list of variables that could either be DataRecords, PyRecordModels, or WrappedRecordModels
|
|
233
|
+
to a list of their field maps. This includes the given RecordId of the given records.
|
|
212
234
|
|
|
235
|
+
:param records: An iterable of records which are DataRecords, PyRecordModels, or WrappedRecordModels.
|
|
236
|
+
:param include_record_id: If true, include the record ID of the records in the field map using the RecordId key.
|
|
213
237
|
:return: A list of field maps for the input records.
|
|
214
238
|
"""
|
|
215
239
|
field_map_list: list[FieldMap] = []
|
|
216
240
|
for record in records:
|
|
217
|
-
|
|
218
|
-
# noinspection PyTypeChecker
|
|
219
|
-
field_map_list.append(record.get_fields())
|
|
220
|
-
else:
|
|
221
|
-
field_map_list.append(record.fields.copy_to_dict())
|
|
241
|
+
field_map_list.append(AliasUtil.to_field_map(record, include_record_id))
|
|
222
242
|
return field_map_list
|
|
223
243
|
|
|
224
244
|
@staticmethod
|
|
@@ -3,11 +3,11 @@ from enum import Enum
|
|
|
3
3
|
from sapiopylib.rest.User import SapioUser
|
|
4
4
|
from sapiopylib.rest.pojo.CustomReport import ReportColumn, CustomReportCriteria
|
|
5
5
|
|
|
6
|
+
from sapiopycommons.customreport.auto_pagers import CustomReportDictAutoPager
|
|
6
7
|
from sapiopycommons.customreport.column_builder import ColumnBuilder
|
|
7
8
|
from sapiopycommons.customreport.term_builder import TermBuilder
|
|
8
9
|
from sapiopycommons.datatype.pseudo_data_types import AuditLogPseudoDef
|
|
9
10
|
from sapiopycommons.general.aliases import RecordIdentifier, AliasUtil, UserIdentifier, FieldIdentifier, FieldValue
|
|
10
|
-
from sapiopycommons.general.custom_report_util import CustomReportUtil
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class EventType(Enum):
|
|
@@ -164,7 +164,7 @@ class AuditLogUtil:
|
|
|
164
164
|
criteria = AuditLogUtil.create_data_record_audit_log_report(records, fields)
|
|
165
165
|
|
|
166
166
|
# Then we must run the custom report using that criteria.
|
|
167
|
-
raw_report_data: list[dict[str, FieldValue]] =
|
|
167
|
+
raw_report_data: list[dict[str, FieldValue]] = CustomReportDictAutoPager(self.user, criteria).get_all_at_once()
|
|
168
168
|
|
|
169
169
|
# This section will prepare a map matching the original RecordIdentifier by record id.
|
|
170
170
|
# This is because the audit log entries will have record ids, but we want the keys in our result map
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import warnings
|
|
1
2
|
from collections.abc import Iterable
|
|
2
3
|
|
|
3
4
|
from sapiopylib.rest.DataMgmtService import DataMgmtServer
|
|
@@ -40,6 +41,7 @@ class CustomReportUtil:
|
|
|
40
41
|
had a Sample column with a data field name of Identifier and a Request column with the same data field name,
|
|
41
42
|
then the dictionary keys for these columns would be Sample.Identifier and Request.Identifier respectively.
|
|
42
43
|
"""
|
|
44
|
+
warnings.warn("Deprecated in favor of the SystemReportDictAutoPager class.", DeprecationWarning)
|
|
43
45
|
results: tuple = CustomReportUtil._exhaust_system_report(context, report_name, page_limit,
|
|
44
46
|
page_size, page_number)
|
|
45
47
|
columns: list[ReportColumn] = results[0]
|
|
@@ -82,6 +84,7 @@ class CustomReportUtil:
|
|
|
82
84
|
had a Sample column with a data field name of Identifier and a Request column with the same data field name,
|
|
83
85
|
then the dictionary keys for these columns would be Sample.Identifier and Request.Identifier respectively.
|
|
84
86
|
"""
|
|
87
|
+
warnings.warn("Deprecated in favor of the CustomReportDictAutoPager class.", DeprecationWarning)
|
|
85
88
|
results: tuple = CustomReportUtil._exhaust_custom_report(context, report_criteria, page_limit,
|
|
86
89
|
page_size, page_number)
|
|
87
90
|
columns: list[ReportColumn] = results[0]
|
|
@@ -117,6 +120,7 @@ class CustomReportUtil:
|
|
|
117
120
|
:return: The results of the report listed row by row, mapping each cell to the header it is under. The header
|
|
118
121
|
values in the dicts are the data field names of the columns.
|
|
119
122
|
"""
|
|
123
|
+
warnings.warn("Deprecated in favor of the QuickReportDictAutoPager class.", DeprecationWarning)
|
|
120
124
|
results: tuple = CustomReportUtil._exhaust_quick_report(context, report_term, page_limit,
|
|
121
125
|
page_size, page_number)
|
|
122
126
|
columns: list[ReportColumn] = results[0]
|
|
@@ -127,7 +131,8 @@ class CustomReportUtil:
|
|
|
127
131
|
def get_system_report_criteria(context: UserIdentifier, report_name: str) -> CustomReport:
|
|
128
132
|
"""
|
|
129
133
|
Retrieve a custom report from the system given the name of the report. This works by querying the system report
|
|
130
|
-
with a page number and size of 1 to minimize the amount of data transfer needed to retrieve the
|
|
134
|
+
with a page number of 0 and page size of 1 to minimize the amount of data transfer needed to retrieve the
|
|
135
|
+
report's config.
|
|
131
136
|
|
|
132
137
|
System reports are also known as predefined searches in the system and must be defined in the data designer for
|
|
133
138
|
a specific data type. That is, saved searches created by users cannot be run using this function.
|
|
@@ -143,6 +148,24 @@ class CustomReportUtil:
|
|
|
143
148
|
report_man = DataMgmtServer.get_custom_report_manager(user)
|
|
144
149
|
return report_man.run_system_report_by_name(report_name, 1, 0)
|
|
145
150
|
|
|
151
|
+
@staticmethod
|
|
152
|
+
def get_quick_report_criteria(context: UserIdentifier, report_term: RawReportTerm) -> CustomReport:
|
|
153
|
+
"""
|
|
154
|
+
Retrieve a quick report from the system given a report term. This works by making a quick report query
|
|
155
|
+
with a page number of 0 and page size of 1 to minimize the amount of data transfer needed to retrieve the
|
|
156
|
+
report's config.
|
|
157
|
+
|
|
158
|
+
Using this, you can add to the root term of the search to then run a new search, or provide it to client
|
|
159
|
+
callbacks or directives that take CustomReports.
|
|
160
|
+
|
|
161
|
+
:param context: The current webhook context or a user object to send requests from.
|
|
162
|
+
:param report_term: The raw report term to use for the quick report.
|
|
163
|
+
:return: The CustomReport object for the given report term.
|
|
164
|
+
"""
|
|
165
|
+
user: SapioUser = AliasUtil.to_sapio_user(context)
|
|
166
|
+
report_man = DataMgmtServer.get_custom_report_manager(user)
|
|
167
|
+
return report_man.run_quick_report(report_term, 1, 0)
|
|
168
|
+
|
|
146
169
|
@staticmethod
|
|
147
170
|
def _exhaust_system_report(context: UserIdentifier,
|
|
148
171
|
report_name: str,
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from typing import Iterable, cast
|
|
2
|
+
|
|
3
|
+
from sapiopylib.rest.User import SapioUser
|
|
4
|
+
from sapiopylib.rest.pojo.CustomReport import CustomReportCriteria, CustomReport
|
|
5
|
+
from sapiopylib.rest.pojo.webhook.WebhookDirective import HomePageDirective, FormDirective, TableDirective, \
|
|
6
|
+
CustomReportDirective, ElnExperimentDirective, ExperimentEntryDirective
|
|
7
|
+
|
|
8
|
+
from sapiopycommons.general.aliases import SapioRecord, AliasUtil, ExperimentIdentifier, ExperimentEntryIdentifier, \
|
|
9
|
+
UserIdentifier
|
|
10
|
+
from sapiopycommons.general.custom_report_util import CustomReportUtil
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# FR-47392: Create a DirectiveUtil class to simplify the creation of directives.
|
|
14
|
+
class DirectiveUtil:
|
|
15
|
+
"""
|
|
16
|
+
DirectiveUtil is a class for creating webhook directives. The utility functions reduce the provided variables
|
|
17
|
+
down to the exact type that the directives require, removing the need for the caller to handle the conversion.
|
|
18
|
+
"""
|
|
19
|
+
user: SapioUser
|
|
20
|
+
|
|
21
|
+
def __init__(self, context: UserIdentifier):
|
|
22
|
+
"""
|
|
23
|
+
:param context: The current webhook context or a user object to send requests from.
|
|
24
|
+
"""
|
|
25
|
+
self.user = AliasUtil.to_sapio_user(context)
|
|
26
|
+
|
|
27
|
+
@staticmethod
|
|
28
|
+
def homepage() -> HomePageDirective:
|
|
29
|
+
"""
|
|
30
|
+
:return: A directive that sends the user back to their home page.
|
|
31
|
+
"""
|
|
32
|
+
return HomePageDirective()
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def record_form(record: SapioRecord) -> FormDirective:
|
|
36
|
+
"""
|
|
37
|
+
:param record: A record in the system.
|
|
38
|
+
:return: A directive that sends the user to a specific data record form.
|
|
39
|
+
"""
|
|
40
|
+
return FormDirective(AliasUtil.to_data_record(record))
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def record_table(records: Iterable[SapioRecord]) -> TableDirective:
|
|
44
|
+
"""
|
|
45
|
+
:param records: A list of records in the system.
|
|
46
|
+
:return: A directive that sends the user to a table of data records.
|
|
47
|
+
"""
|
|
48
|
+
return TableDirective(AliasUtil.to_data_records(records))
|
|
49
|
+
|
|
50
|
+
@staticmethod
|
|
51
|
+
def record_adaptive(records: Iterable[SapioRecord]) -> TableDirective | FormDirective:
|
|
52
|
+
"""
|
|
53
|
+
:param records: A list of records in the system.
|
|
54
|
+
:return: A directive that sends the user to a table of data records if there are multiple records,
|
|
55
|
+
or a directive that sends the user to a specific data record form if there is only one record.
|
|
56
|
+
"""
|
|
57
|
+
records: list[SapioRecord] = list(records)
|
|
58
|
+
if len(records) == 1:
|
|
59
|
+
return DirectiveUtil.record_form(records[0])
|
|
60
|
+
return DirectiveUtil.record_table(records)
|
|
61
|
+
|
|
62
|
+
def custom_report(self, report: CustomReport | CustomReportCriteria | str) -> CustomReportDirective:
|
|
63
|
+
"""
|
|
64
|
+
:param report: A custom report, the criteria for a custom report, or the name of a system report.
|
|
65
|
+
:return: A directive that sends the user to the results of the provided custom report.
|
|
66
|
+
"""
|
|
67
|
+
if isinstance(report, str):
|
|
68
|
+
report: CustomReport = CustomReportUtil.get_system_report_criteria(self.user, report)
|
|
69
|
+
return CustomReportDirective(cast(CustomReport, report))
|
|
70
|
+
|
|
71
|
+
@staticmethod
|
|
72
|
+
def eln_experiment(experiment: ExperimentIdentifier) -> ElnExperimentDirective:
|
|
73
|
+
"""
|
|
74
|
+
:param experiment: An identifier for an experiment.
|
|
75
|
+
:return: A directive that sends the user to the ELN experiment.
|
|
76
|
+
"""
|
|
77
|
+
return ElnExperimentDirective(AliasUtil.to_notebook_id(experiment))
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def eln_entry(experiment: ExperimentIdentifier, entry: ExperimentEntryIdentifier) -> ExperimentEntryDirective:
|
|
81
|
+
"""
|
|
82
|
+
:param experiment: An identifier for an experiment.
|
|
83
|
+
:param entry: An identifier for an entry in the experiment.
|
|
84
|
+
:return: A directive that sends the user to the provided experiment entry within its ELN experiment.
|
|
85
|
+
"""
|
|
86
|
+
return ExperimentEntryDirective(AliasUtil.to_notebook_id(experiment), AliasUtil.to_entry_id(entry))
|
|
@@ -1,3 +1,20 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class MessageDisplayType(Enum):
|
|
5
|
+
"""
|
|
6
|
+
An enum representing the different ways in which a message can be displayed to the user.
|
|
7
|
+
"""
|
|
8
|
+
TOASTER_SUCCESS = 0
|
|
9
|
+
TOASTER_INFO = 1
|
|
10
|
+
TOASTER_WARNING = 2
|
|
11
|
+
TOASTER_ERROR = 3
|
|
12
|
+
OK_DIALOG = 4
|
|
13
|
+
DISPLAY_INFO = 5
|
|
14
|
+
DISPLAY_WARNING = 6
|
|
15
|
+
DISPLAY_ERROR = 7
|
|
16
|
+
|
|
17
|
+
|
|
1
18
|
# FR-46064 - Initial port of PyWebhookUtils to sapiopycommons.
|
|
2
19
|
class SapioException(Exception):
|
|
3
20
|
"""
|
|
@@ -29,7 +46,29 @@ class SapioDialogTimeoutException(SapioException):
|
|
|
29
46
|
pass
|
|
30
47
|
|
|
31
48
|
|
|
32
|
-
class
|
|
49
|
+
class DisplayableException(SapioException):
|
|
50
|
+
"""
|
|
51
|
+
A generic exception that promises to return a user-friendly message explaining the error that should be displayed to
|
|
52
|
+
the user. Note that it is up to whichever class that catches this exception to actually display the message.
|
|
53
|
+
"""
|
|
54
|
+
msg: str
|
|
55
|
+
display_type: MessageDisplayType | None
|
|
56
|
+
title: str | None
|
|
57
|
+
|
|
58
|
+
def __init__(self, msg: str, display_type: MessageDisplayType | None = None, title: str | None = None):
|
|
59
|
+
"""
|
|
60
|
+
:param msg: The message that should be displayed to the user.
|
|
61
|
+
:param display_type: The manner in which the message should be displayed. If None, then the display type should
|
|
62
|
+
be controlled by the class that catches this exception.
|
|
63
|
+
:param title: If the display type is able to have a title, this is the title that will be displayed. If None,
|
|
64
|
+
then the title should be controlled by the class that catches this exception.
|
|
65
|
+
"""
|
|
66
|
+
self.msg = msg
|
|
67
|
+
self.display_type = display_type
|
|
68
|
+
self.title = title
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class SapioUserErrorException(DisplayableException):
|
|
33
72
|
"""
|
|
34
73
|
An exception caused by user error (e.g. user provided a CSV when an XLSX was expected), which promises to return a
|
|
35
74
|
user-friendly message explaining the error that should be displayed to the user.
|
|
@@ -39,7 +78,7 @@ class SapioUserErrorException(SapioException):
|
|
|
39
78
|
pass
|
|
40
79
|
|
|
41
80
|
|
|
42
|
-
class SapioCriticalErrorException(
|
|
81
|
+
class SapioCriticalErrorException(DisplayableException):
|
|
43
82
|
"""
|
|
44
83
|
A critical exception caused by user error, which promises to return a user-friendly message explaining the error
|
|
45
84
|
that should be displayed to the user.
|
|
@@ -311,7 +311,7 @@ class PopupUtil:
|
|
|
311
311
|
raise SapioException("Multiple data type names encountered in records list for record table popup.")
|
|
312
312
|
data_type: str = data_types.pop()
|
|
313
313
|
# Get the field maps from the records.
|
|
314
|
-
field_map_list: list[FieldMap] = AliasUtil.
|
|
314
|
+
field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
|
|
315
315
|
# Get the field definitions of the data type.
|
|
316
316
|
type_man = DataMgmtServer.get_data_type_manager(context.user)
|
|
317
317
|
type_def: DataTypeDefinition = type_man.get_data_type_definition(data_type)
|
|
@@ -366,7 +366,7 @@ class PopupUtil:
|
|
|
366
366
|
raise SapioException("Multiple data type names encountered in records list for record table popup.")
|
|
367
367
|
data_type: str = data_types.pop()
|
|
368
368
|
# Get the field maps from the records.
|
|
369
|
-
field_map_list: list[FieldMap] = AliasUtil.
|
|
369
|
+
field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
|
|
370
370
|
# Get the field definitions of the data type.
|
|
371
371
|
type_man = DataMgmtServer.get_data_type_manager(context.user)
|
|
372
372
|
type_def: DataTypeDefinition = type_man.get_data_type_definition(data_type)
|
|
@@ -6,6 +6,7 @@ from weakref import WeakValueDictionary
|
|
|
6
6
|
|
|
7
7
|
from databind.json import dumps, loads
|
|
8
8
|
from sapiopylib.rest.User import SapioUser
|
|
9
|
+
from sapiopylib.rest.pojo.DataRecord import DataRecord
|
|
9
10
|
|
|
10
11
|
from sapiopycommons.general.exceptions import SapioException
|
|
11
12
|
from sapiopycommons.multimodal.multimodal_data import *
|