sapiopycommons 2025.3.6a453__py3-none-any.whl → 2025.3.10a455__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sapiopycommons might be problematic. Click here for more details.

Files changed (33) hide show
  1. sapiopycommons/callbacks/callback_util.py +366 -1220
  2. sapiopycommons/chem/Molecules.py +2 -0
  3. sapiopycommons/datatype/data_fields.py +1 -1
  4. sapiopycommons/eln/experiment_handler.py +1 -2
  5. sapiopycommons/eln/experiment_report_util.py +7 -7
  6. sapiopycommons/files/file_bridge.py +0 -76
  7. sapiopycommons/files/file_bridge_handler.py +110 -325
  8. sapiopycommons/files/file_data_handler.py +2 -2
  9. sapiopycommons/files/file_util.py +11 -36
  10. sapiopycommons/files/file_validator.py +5 -6
  11. sapiopycommons/files/file_writer.py +1 -1
  12. sapiopycommons/flowcyto/flow_cyto.py +1 -1
  13. sapiopycommons/general/accession_service.py +1 -1
  14. sapiopycommons/general/aliases.py +28 -48
  15. sapiopycommons/general/audit_log.py +2 -2
  16. sapiopycommons/general/custom_report_util.py +1 -24
  17. sapiopycommons/general/exceptions.py +2 -41
  18. sapiopycommons/general/popup_util.py +2 -2
  19. sapiopycommons/general/sapio_links.py +4 -12
  20. sapiopycommons/multimodal/multimodal.py +0 -1
  21. sapiopycommons/processtracking/custom_workflow_handler.py +3 -3
  22. sapiopycommons/recordmodel/record_handler.py +108 -156
  23. sapiopycommons/webhook/webhook_handlers.py +55 -445
  24. {sapiopycommons-2025.3.6a453.dist-info → sapiopycommons-2025.3.10a455.dist-info}/METADATA +1 -1
  25. {sapiopycommons-2025.3.6a453.dist-info → sapiopycommons-2025.3.10a455.dist-info}/RECORD +27 -33
  26. sapiopycommons/ai/__init__.py +0 -0
  27. sapiopycommons/ai/tool_of_tools.py +0 -917
  28. sapiopycommons/customreport/auto_pagers.py +0 -278
  29. sapiopycommons/general/directive_util.py +0 -86
  30. sapiopycommons/general/html_formatter.py +0 -456
  31. sapiopycommons/samples/aliquot.py +0 -48
  32. {sapiopycommons-2025.3.6a453.dist-info → sapiopycommons-2025.3.10a455.dist-info}/WHEEL +0 -0
  33. {sapiopycommons-2025.3.6a453.dist-info → sapiopycommons-2025.3.10a455.dist-info}/licenses/LICENSE +0 -0
@@ -21,14 +21,10 @@ class FileUtil:
21
21
  Utilities for the handling of files, including the requesting of files from the user and the parsing of files into
22
22
  tokenized lists. Makes use of Pandas DataFrames for any file parsing purposes.
23
23
  """
24
- # PR-47433: Add a keep_default_na argument to FileUtil.tokenize_csv and FileUtil.tokenize_xlsx so that N/A values
25
- # don't get returned as NoneType, and add **kwargs in case any other Pandas input parameters need changed by the
26
- # caller.
27
24
  @staticmethod
28
25
  def tokenize_csv(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
29
26
  seperator: str = ",", *, encoding: str | None = None, encoding_error: str | None = "strict",
30
- exception_on_empty: bool = True, keep_default_na: bool = False, **kwargs) \
31
- -> tuple[list[dict[str, str]], list[list[str]]]:
27
+ exception_on_empty: bool = True) -> tuple[list[dict[str, str]], list[list[str]]]:
32
28
  """
33
29
  Tokenize a CSV file. The provided file must be uniform. That is, if row 1 has 10 cells, all the rows in the file
34
30
  must have 10 cells. Otherwise, the Pandas parser throws a tokenizer exception.
@@ -50,9 +46,6 @@ class FileUtil:
50
46
  https://docs.python.org/3/library/codecs.html#error-handlers
51
47
  :param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
52
48
  the first element of the returned tuple.
53
- :param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
54
- If True, these values will be converted to a NoneType value.
55
- :param kwargs: Additional arguments to be passed to the pandas read_csv function.
56
49
  :return: The CSV parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
57
50
  that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
58
51
  If the header row index is 0 or None, this list will be empty.
@@ -60,8 +53,7 @@ class FileUtil:
60
53
  # Parse the file bytes into two DataFrames. The first is metadata of the file located above the header row,
61
54
  # while the second is the body of the file below the header row.
62
55
  file_body, file_metadata = FileUtil.csv_to_data_frames(file_bytes, header_row_index, seperator,
63
- encoding=encoding, encoding_error=encoding_error,
64
- keep_default_na=keep_default_na, **kwargs)
56
+ encoding=encoding, encoding_error=encoding_error)
65
57
  # Parse the metadata from above the header row index into a list of lists.
66
58
  metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
67
59
  # Parse the data from the file body into a list of dicts.
@@ -72,8 +64,7 @@ class FileUtil:
72
64
 
73
65
  @staticmethod
74
66
  def tokenize_xlsx(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
75
- *, exception_on_empty: bool = True, keep_default_na: bool = False, **kwargs) \
76
- -> tuple[list[dict[str, str]], list[list[str]]]:
67
+ *, exception_on_empty: bool = True) -> tuple[list[dict[str, str]], list[list[str]]]:
77
68
  """
78
69
  Tokenize an XLSX file row by row.
79
70
 
@@ -86,17 +77,13 @@ class FileUtil:
86
77
  is assumed to be the header row.
87
78
  :param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
88
79
  the first element of the returned tuple.
89
- :param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
90
- If True, these values will be converted to a NoneType value.
91
- :param kwargs: Additional arguments to be passed to the pandas read_excel function.
92
80
  :return: The XLSX parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
93
81
  that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
94
82
  If the header row index is 0 or None, this list will be empty.
95
83
  """
96
84
  # Parse the file bytes into two DataFrames. The first is metadata of the file located above the header row,
97
85
  # while the second is the body of the file below the header row.
98
- file_body, file_metadata = FileUtil.xlsx_to_data_frames(file_bytes, header_row_index,
99
- keep_default_na=keep_default_na, **kwargs)
86
+ file_body, file_metadata = FileUtil.xlsx_to_data_frames(file_bytes, header_row_index)
100
87
  # Parse the metadata from above the header row index into a list of lists.
101
88
  metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
102
89
  # Parse the data from the file body into a list of dicts.
@@ -107,8 +94,7 @@ class FileUtil:
107
94
 
108
95
  @staticmethod
109
96
  def csv_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, seperator: str = ",",
110
- *, encoding: str | None = None, encoding_error: str | None = "strict",
111
- keep_default_na: bool = False, **kwargs) \
97
+ *, encoding: str | None = None, encoding_error: str | None = "strict") \
112
98
  -> tuple[DataFrame, DataFrame | None]:
113
99
  """
114
100
  Parse the file bytes for a CSV into DataFrames. The provided file must be uniform. That is, if row 1 has 10
@@ -127,9 +113,6 @@ class FileUtil:
127
113
  is "strict", meaning that encoding errors raise an exception. Change this to "ignore" to skip over invalid
128
114
  characters or "replace" to replace invalid characters with a ? character. For a full list of options, see
129
115
  https://docs.python.org/3/library/codecs.html#error-handlers
130
- :param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
131
- If True, these values will be converted to a NoneType value.
132
- :param kwargs: Additional arguments to be passed to the pandas read_csv function.
133
116
  :return: A tuple of two DataFrames. The first is the frame for the CSV table body, while the second is for the
134
117
  metadata from above the header row, or None if there is no metadata.
135
118
  """
@@ -142,21 +125,19 @@ class FileUtil:
142
125
  file_metadata = pandas.read_csv(file_io, header=None, dtype=dtype(str),
143
126
  skiprows=lambda x: x >= header_row_index,
144
127
  skip_blank_lines=False, sep=seperator, encoding=encoding,
145
- encoding_errors=encoding_error, keep_default_na=keep_default_na,
146
- **kwargs)
128
+ encoding_errors=encoding_error)
147
129
  with io.BytesIO(file_bytes) as file_io:
148
130
  # The use of the dtype argument is to ensure that everything from the file gets read as a string. Added
149
131
  # because some numerical values would get ".0" appended to them, even when casting the DataFrame cell to a
150
132
  # string.
151
133
  file_body: DataFrame = pandas.read_csv(file_io, header=header_row_index, dtype=dtype(str),
152
- skip_blank_lines=False, sep=seperator, encoding=encoding,
153
- keep_default_na=keep_default_na, **kwargs)
134
+ skip_blank_lines=False, sep=seperator, encoding=encoding)
154
135
 
155
136
  return file_body, file_metadata
156
137
 
157
138
  @staticmethod
158
- def xlsx_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, *, keep_default_na: bool = False,
159
- **kwargs) -> tuple[DataFrame, DataFrame | None]:
139
+ def xlsx_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0) \
140
+ -> tuple[DataFrame, DataFrame | None]:
160
141
  """
161
142
  Parse the file bytes for an XLSX into DataFrames.
162
143
 
@@ -165,9 +146,6 @@ class FileUtil:
165
146
  row is returned in the metadata list. If input is None, then no row is considered to be the header row,
166
147
  meaning that required headers are also ignored if any are provided. By default, the first row (0th index)
167
148
  is assumed to be the header row.
168
- :param keep_default_na: If False, values that are recognized as NaN (e.g. N/A, NA, NaN) will remain as strings.
169
- If True, these values will be converted to a NoneType value.
170
- :param kwargs: Additional arguments to be passed to the pandas read_excel function.
171
149
  :return: A tuple of two DataFrames. The first is the frame for the XLSX table body, while the second is for the
172
150
  metadata from above the header row, or None if there is no metadata.
173
151
  """
@@ -177,14 +155,12 @@ class FileUtil:
177
155
  # The metadata DataFrame has no headers and only consists of the rows above the header row index.
178
156
  # Therefore, we skip every row including and past the header.
179
157
  file_metadata = pandas.read_excel(file_io, header=None, dtype=dtype(str),
180
- skiprows=lambda x: x >= header_row_index,
181
- keep_default_na=keep_default_na, **kwargs)
158
+ skiprows=lambda x: x >= header_row_index)
182
159
  with io.BytesIO(file_bytes) as file_io:
183
160
  # The use of the dtype argument is to ensure that everything from the file gets read as a string. Added
184
161
  # because some numerical values would get ".0" appended to them, even when casting the DataFrame cell to a
185
162
  # string.
186
- file_body: DataFrame = pandas.read_excel(file_io, header=header_row_index, dtype=dtype(str),
187
- keep_default_na=keep_default_na, **kwargs)
163
+ file_body: DataFrame = pandas.read_excel(file_io, header=header_row_index, dtype=dtype(str))
188
164
 
189
165
  return file_body, file_metadata
190
166
 
@@ -279,7 +255,6 @@ class FileUtil:
279
255
  data_frame = pandas.read_csv(csv, sep=",", header=None)
280
256
 
281
257
  with io.BytesIO() as output:
282
- # noinspection PyTypeChecker
283
258
  with pandas.ExcelWriter(output, engine='xlsxwriter') as writer:
284
259
  # Setting header and index to false makes the CSV convert to an XLSX as-is.
285
260
  data_frame.to_excel(writer, sheet_name='Sheet1', header=False, index=False)
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import re
4
3
  from abc import abstractmethod
5
4
  from typing import Any
6
5
 
@@ -10,9 +9,9 @@ from sapiopylib.rest.pojo.datatype.FieldDefinition import VeloxIntegerFieldDefin
10
9
  AbstractVeloxFieldDefinition
11
10
 
12
11
  from sapiopycommons.callbacks.callback_util import CallbackUtil
13
- from sapiopycommons.customreport.auto_pagers import QuickReportDictAutoPager
14
12
  from sapiopycommons.files.file_data_handler import FileDataHandler, FilterList
15
13
  from sapiopycommons.general.aliases import UserIdentifier, AliasUtil
14
+ from sapiopycommons.general.custom_report_util import CustomReportUtil
16
15
  from sapiopycommons.general.exceptions import SapioUserCancelledException
17
16
  from sapiopycommons.general.time_util import TimeUtil
18
17
 
@@ -312,8 +311,8 @@ class MatchesPatternRule(ColumnRule):
312
311
  """
313
312
  pattern: str
314
313
 
315
- def __init__(self, header: str, pattern: str | re.Pattern[str], *, reason: str | None = None,
316
- whitelist: FilterList = None, blacklist: FilterList = None):
314
+ def __init__(self, header: str, pattern: str, *, reason: str | None = None, whitelist: FilterList = None,
315
+ blacklist: FilterList = None):
317
316
  """
318
317
  :param header: The header that this rule acts upon.
319
318
  :param pattern: A regex pattern.
@@ -530,7 +529,7 @@ class UniqueSystemValueRule(ColumnRule):
530
529
  # Run a quick report for all records of this type that match these field values.
531
530
  term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
532
531
  "{" + ",".join(values) + "}")
533
- results: list[dict[str, Any]] = QuickReportDictAutoPager(self.user, term).get_all_at_once()
532
+ results: list[dict[str, Any]] = CustomReportUtil.run_quick_report(self.user, term)
534
533
  existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
535
534
  return file_handler.get_in_list(self.header, existing_values)
536
535
 
@@ -564,6 +563,6 @@ class ExistingSystemValueRule(ColumnRule):
564
563
  # Run a quick report for all records of this type that match these field values.
565
564
  term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
566
565
  "{" + ",".join(values) + "}")
567
- results: list[dict[str, Any]] = QuickReportDictAutoPager(self.user, term).get_all_at_once()
566
+ results: list[dict[str, Any]] = CustomReportUtil.run_quick_report(self.user, term)
568
567
  existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
569
568
  return file_handler.get_not_in_list(self.header, existing_values)
@@ -307,7 +307,7 @@ class FieldColumn(ColumnDef):
307
307
  elif self.search_order == FieldSearchOrder.BUNDLE_ONLY:
308
308
  return row.fields.get(self.field_name)
309
309
  elif self.search_order == FieldSearchOrder.RECORD_FIRST:
310
- fields: dict[str, Any] = AliasUtil.to_field_map(record) if record else {}
310
+ fields: dict[str, Any] = AliasUtil.to_field_map_lists([record])[0] if record else {}
311
311
  if self.field_name not in fields or (self.skip_none_values and fields.get(self.field_name) is None):
312
312
  return row.fields.get(self.field_name)
313
313
  return fields.get(self.field_name)
@@ -2,8 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  from weakref import WeakValueDictionary
4
4
 
5
- from databind.json import dumps
6
5
  from sapiopylib.rest.User import SapioUser
6
+ from databind.json import dumps
7
7
 
8
8
  from sapiopycommons.flowcyto.flowcyto_data import FlowJoWorkspaceInputJson, UploadFCSInputJson, \
9
9
  ComputeFlowStatisticsInputJson
@@ -95,7 +95,7 @@ class AccessionWithPrefixSuffix(AbstractAccessionServiceOperator):
95
95
 
96
96
  @property
97
97
  def default_accessor_name(self):
98
- return "PREFIX_AND_SUFFIX" + "(" + self.prefix + "," + self.suffix + ")"
98
+ return "PREFIX_AND_SUFFIX" + "(" + self.prefix + "," + self.suffix + ")";
99
99
 
100
100
 
101
101
  class AccessionGlobalPrefixSuffix(AbstractAccessionServiceOperator):
@@ -1,50 +1,50 @@
1
1
  from collections.abc import Iterable
2
- from typing import Any, TypeAlias
2
+ from typing import Any
3
3
 
4
4
  from sapiopylib.rest.User import SapioUser
5
5
  from sapiopylib.rest.pojo.DataRecord import DataRecord
6
- from sapiopylib.rest.pojo.datatype.FieldDefinition import FieldType, AbstractVeloxFieldDefinition
6
+ from sapiopylib.rest.pojo.datatype.FieldDefinition import FieldType
7
7
  from sapiopylib.rest.pojo.eln.ElnExperiment import ElnExperiment
8
8
  from sapiopylib.rest.pojo.eln.ExperimentEntry import ExperimentEntry
9
9
  from sapiopylib.rest.pojo.eln.SapioELNEnums import ElnBaseDataType
10
10
  from sapiopylib.rest.pojo.webhook.WebhookContext import SapioWebhookContext
11
11
  from sapiopylib.rest.utils.Protocols import ElnExperimentProtocol, ElnEntryStep
12
- from sapiopylib.rest.utils.recordmodel.PyRecordModel import PyRecordModel, AbstractRecordModel
12
+ from sapiopylib.rest.utils.recordmodel.PyRecordModel import PyRecordModel
13
13
  from sapiopylib.rest.utils.recordmodel.RecordModelWrapper import WrappedRecordModel, WrappedType, WrapperField
14
14
 
15
15
  from sapiopycommons.general.exceptions import SapioException
16
16
 
17
- FieldValue: TypeAlias = int | float | str | bool | None
17
+ FieldValue = int | float | str | bool | None
18
18
  """Allowable values for fields in the system."""
19
- RecordModel: TypeAlias = PyRecordModel | AbstractRecordModel | WrappedRecordModel
19
+ RecordModel = PyRecordModel | WrappedRecordModel
20
20
  """Different forms that a record model could take."""
21
- SapioRecord: TypeAlias = DataRecord | RecordModel
21
+ SapioRecord = DataRecord | RecordModel
22
22
  """A record could be provided as either a DataRecord, PyRecordModel, or WrappedRecordModel (WrappedType)."""
23
- RecordIdentifier: TypeAlias = SapioRecord | int
23
+ RecordIdentifier = SapioRecord | int
24
24
  """A RecordIdentifier is either a record type or an integer for the record's record ID."""
25
- DataTypeIdentifier: TypeAlias = SapioRecord | type[WrappedType] | str
25
+ DataTypeIdentifier = SapioRecord | type[WrappedType] | str
26
26
  """A DataTypeIdentifier is either a SapioRecord, a record model wrapper type, or a string."""
27
- FieldIdentifier: TypeAlias = AbstractVeloxFieldDefinition | WrapperField | str | tuple[str, FieldType]
27
+ FieldIdentifier = WrapperField | str | tuple[str, FieldType]
28
28
  """A FieldIdentifier is either wrapper field from a record model wrapper, a string, or a tuple of string
29
29
  and field type."""
30
- FieldIdentifierKey: TypeAlias = WrapperField | str
30
+ FieldIdentifierKey = WrapperField | str
31
31
  """A FieldIdentifierKey is a FieldIdentifier, except it can't be a tuple, s tuples can't be used as keys in
32
32
  dictionaries.."""
33
- HasFieldWrappers: TypeAlias = type[WrappedType] | WrappedRecordModel
33
+ HasFieldWrappers = type[WrappedType] | WrappedRecordModel
34
34
  """An identifier for classes that have wrapper fields."""
35
- ExperimentIdentifier: TypeAlias = ElnExperimentProtocol | ElnExperiment | int
35
+ ExperimentIdentifier = ElnExperimentProtocol | ElnExperiment | int
36
36
  """An ExperimentIdentifier is either an experiment protocol, experiment, or an integer for the experiment's notebook
37
37
  ID."""
38
- ExperimentEntryIdentifier: TypeAlias = ElnEntryStep | ExperimentEntry | int
38
+ ExperimentEntryIdentifier = ElnEntryStep | ExperimentEntry | int
39
39
  """An ExperimentEntryIdentifier is either an ELN entry step, experiment entry, or an integer for the entry's ID."""
40
- FieldMap: TypeAlias = dict[str, FieldValue]
40
+ FieldMap = dict[str, FieldValue]
41
41
  """A field map is simply a dict of data field names to values. The purpose of aliasing this is to help distinguish
42
42
  any random dict in a webhook from one which is explicitly used for record fields."""
43
- FieldIdentifierMap: TypeAlias = dict[FieldIdentifierKey, FieldValue]
43
+ FieldIdentifierMap = dict[FieldIdentifierKey, FieldValue]
44
44
  """A field identifier map is the same thing as a field map, except the keys can be field identifiers instead
45
45
  of just strings. Note that although one of the allowed field identifiers is a tuple, you can't use tuples as
46
46
  keys in a dictionary."""
47
- UserIdentifier: TypeAlias = SapioWebhookContext | SapioUser
47
+ UserIdentifier = SapioWebhookContext | SapioUser
48
48
  """An identifier for classes from which a user object can be used for sending requests."""
49
49
 
50
50
 
@@ -142,25 +142,23 @@ class AliasUtil:
142
142
  @staticmethod
143
143
  def to_data_field_name(value: FieldIdentifier) -> str:
144
144
  """
145
- Convert an object that can be used to identify a data field to a data field name string.
145
+ Convert a string or WrapperField to a data field name string.
146
146
 
147
- :param value: An object that can be used to identify a data field.
147
+ :param value: A string or WrapperField.
148
148
  :return: A string of the data field name of the input value.
149
149
  """
150
150
  if isinstance(value, tuple):
151
151
  return value[0]
152
152
  if isinstance(value, WrapperField):
153
153
  return value.field_name
154
- if isinstance(value, AbstractVeloxFieldDefinition):
155
- return value.data_field_name
156
154
  return value
157
155
 
158
156
  @staticmethod
159
157
  def to_data_field_names(values: Iterable[FieldIdentifier]) -> list[str]:
160
158
  """
161
- Convert an iterable of objects that can be used to identify data fields to a list of data field name strings.
159
+ Convert an iterable of strings or WrapperFields to a list of data field name strings.
162
160
 
163
- :param values: An iterable of objects that can be used to identify a data field.
161
+ :param values: An iterable of strings or WrapperFields.
164
162
  :return: A list of strings of the data field names of the input values.
165
163
  """
166
164
  return [AliasUtil.to_data_field_name(x) for x in values]
@@ -207,38 +205,20 @@ class AliasUtil:
207
205
  f"field with the name \"{field}\",")
208
206
 
209
207
  @staticmethod
210
- def to_field_map(record: SapioRecord, include_record_id: bool = False) -> FieldMap:
208
+ def to_field_map_lists(records: Iterable[SapioRecord]) -> list[FieldMap]:
211
209
  """
212
- Convert a given record value to a field map.
213
-
214
- :param record: A record which is a DataRecord, PyRecordModel, or WrappedRecordModel.
215
- :param include_record_id: If true, include the record ID of the record in the field map using the RecordId key.
216
- :return: The field map for the input record.
217
- """
218
- if isinstance(record, DataRecord):
219
- # noinspection PyTypeChecker
220
- fields: FieldMap = record.get_fields()
221
- else:
222
- fields: FieldMap = record.fields.copy_to_dict()
223
- # PR-47457: Only include the record ID if the caller requests it, since including the record ID can break
224
- # callbacks in certain circumstances if the record ID is negative.
225
- if include_record_id:
226
- fields["RecordId"] = AliasUtil.to_record_id(record)
227
- return fields
228
-
229
- @staticmethod
230
- def to_field_map_list(records: Iterable[SapioRecord], include_record_id: bool = False) -> list[FieldMap]:
231
- """
232
- Convert a list of variables that could either be DataRecords, PyRecordModels, or WrappedRecordModels
233
- to a list of their field maps. This includes the given RecordId of the given records.
210
+ Convert a list of variables that could either be DataRecords, PyRecordModels,
211
+ or WrappedRecordModels to a list of their field maps.
234
212
 
235
- :param records: An iterable of records which are DataRecords, PyRecordModels, or WrappedRecordModels.
236
- :param include_record_id: If true, include the record ID of the records in the field map using the RecordId key.
237
213
  :return: A list of field maps for the input records.
238
214
  """
239
215
  field_map_list: list[FieldMap] = []
240
216
  for record in records:
241
- field_map_list.append(AliasUtil.to_field_map(record, include_record_id))
217
+ if isinstance(record, DataRecord):
218
+ # noinspection PyTypeChecker
219
+ field_map_list.append(record.get_fields())
220
+ else:
221
+ field_map_list.append(record.fields.copy_to_dict())
242
222
  return field_map_list
243
223
 
244
224
  @staticmethod
@@ -3,11 +3,11 @@ from enum import Enum
3
3
  from sapiopylib.rest.User import SapioUser
4
4
  from sapiopylib.rest.pojo.CustomReport import ReportColumn, CustomReportCriteria
5
5
 
6
- from sapiopycommons.customreport.auto_pagers import CustomReportDictAutoPager
7
6
  from sapiopycommons.customreport.column_builder import ColumnBuilder
8
7
  from sapiopycommons.customreport.term_builder import TermBuilder
9
8
  from sapiopycommons.datatype.pseudo_data_types import AuditLogPseudoDef
10
9
  from sapiopycommons.general.aliases import RecordIdentifier, AliasUtil, UserIdentifier, FieldIdentifier, FieldValue
10
+ from sapiopycommons.general.custom_report_util import CustomReportUtil
11
11
 
12
12
 
13
13
  class EventType(Enum):
@@ -164,7 +164,7 @@ class AuditLogUtil:
164
164
  criteria = AuditLogUtil.create_data_record_audit_log_report(records, fields)
165
165
 
166
166
  # Then we must run the custom report using that criteria.
167
- raw_report_data: list[dict[str, FieldValue]] = CustomReportDictAutoPager(self.user, criteria).get_all_at_once()
167
+ raw_report_data: list[dict[str, FieldValue]] = CustomReportUtil.run_custom_report(self.user, criteria)
168
168
 
169
169
  # This section will prepare a map matching the original RecordIdentifier by record id.
170
170
  # This is because the audit log entries will have record ids, but we want the keys in our result map
@@ -1,4 +1,3 @@
1
- import warnings
2
1
  from collections.abc import Iterable
3
2
 
4
3
  from sapiopylib.rest.DataMgmtService import DataMgmtServer
@@ -41,7 +40,6 @@ class CustomReportUtil:
41
40
  had a Sample column with a data field name of Identifier and a Request column with the same data field name,
42
41
  then the dictionary keys for these columns would be Sample.Identifier and Request.Identifier respectively.
43
42
  """
44
- warnings.warn("Deprecated in favor of the SystemReportDictAutoPager class.", DeprecationWarning)
45
43
  results: tuple = CustomReportUtil._exhaust_system_report(context, report_name, page_limit,
46
44
  page_size, page_number)
47
45
  columns: list[ReportColumn] = results[0]
@@ -84,7 +82,6 @@ class CustomReportUtil:
84
82
  had a Sample column with a data field name of Identifier and a Request column with the same data field name,
85
83
  then the dictionary keys for these columns would be Sample.Identifier and Request.Identifier respectively.
86
84
  """
87
- warnings.warn("Deprecated in favor of the CustomReportDictAutoPager class.", DeprecationWarning)
88
85
  results: tuple = CustomReportUtil._exhaust_custom_report(context, report_criteria, page_limit,
89
86
  page_size, page_number)
90
87
  columns: list[ReportColumn] = results[0]
@@ -120,7 +117,6 @@ class CustomReportUtil:
120
117
  :return: The results of the report listed row by row, mapping each cell to the header it is under. The header
121
118
  values in the dicts are the data field names of the columns.
122
119
  """
123
- warnings.warn("Deprecated in favor of the QuickReportDictAutoPager class.", DeprecationWarning)
124
120
  results: tuple = CustomReportUtil._exhaust_quick_report(context, report_term, page_limit,
125
121
  page_size, page_number)
126
122
  columns: list[ReportColumn] = results[0]
@@ -131,8 +127,7 @@ class CustomReportUtil:
131
127
  def get_system_report_criteria(context: UserIdentifier, report_name: str) -> CustomReport:
132
128
  """
133
129
  Retrieve a custom report from the system given the name of the report. This works by querying the system report
134
- with a page number of 0 and page size of 1 to minimize the amount of data transfer needed to retrieve the
135
- report's config.
130
+ with a page number and size of 1 to minimize the amount of data transfer needed to retrieve the report's config.
136
131
 
137
132
  System reports are also known as predefined searches in the system and must be defined in the data designer for
138
133
  a specific data type. That is, saved searches created by users cannot be run using this function.
@@ -148,24 +143,6 @@ class CustomReportUtil:
148
143
  report_man = DataMgmtServer.get_custom_report_manager(user)
149
144
  return report_man.run_system_report_by_name(report_name, 1, 0)
150
145
 
151
- @staticmethod
152
- def get_quick_report_criteria(context: UserIdentifier, report_term: RawReportTerm) -> CustomReport:
153
- """
154
- Retrieve a quick report from the system given a report term. This works by making a quick report query
155
- with a page number of 0 and page size of 1 to minimize the amount of data transfer needed to retrieve the
156
- report's config.
157
-
158
- Using this, you can add to the root term of the search to then run a new search, or provide it to client
159
- callbacks or directives that take CustomReports.
160
-
161
- :param context: The current webhook context or a user object to send requests from.
162
- :param report_term: The raw report term to use for the quick report.
163
- :return: The CustomReport object for the given report term.
164
- """
165
- user: SapioUser = AliasUtil.to_sapio_user(context)
166
- report_man = DataMgmtServer.get_custom_report_manager(user)
167
- return report_man.run_quick_report(report_term, 1, 0)
168
-
169
146
  @staticmethod
170
147
  def _exhaust_system_report(context: UserIdentifier,
171
148
  report_name: str,
@@ -1,20 +1,3 @@
1
- from enum import Enum
2
-
3
-
4
- class MessageDisplayType(Enum):
5
- """
6
- An enum representing the different ways in which a message can be displayed to the user.
7
- """
8
- TOASTER_SUCCESS = 0
9
- TOASTER_INFO = 1
10
- TOASTER_WARNING = 2
11
- TOASTER_ERROR = 3
12
- OK_DIALOG = 4
13
- DISPLAY_INFO = 5
14
- DISPLAY_WARNING = 6
15
- DISPLAY_ERROR = 7
16
-
17
-
18
1
  # FR-46064 - Initial port of PyWebhookUtils to sapiopycommons.
19
2
  class SapioException(Exception):
20
3
  """
@@ -46,29 +29,7 @@ class SapioDialogTimeoutException(SapioException):
46
29
  pass
47
30
 
48
31
 
49
- class DisplayableException(SapioException):
50
- """
51
- A generic exception that promises to return a user-friendly message explaining the error that should be displayed to
52
- the user. Note that it is up to whichever class that catches this exception to actually display the message.
53
- """
54
- msg: str
55
- display_type: MessageDisplayType | None
56
- title: str | None
57
-
58
- def __init__(self, msg: str, display_type: MessageDisplayType | None = None, title: str | None = None):
59
- """
60
- :param msg: The message that should be displayed to the user.
61
- :param display_type: The manner in which the message should be displayed. If None, then the display type should
62
- be controlled by the class that catches this exception.
63
- :param title: If the display type is able to have a title, this is the title that will be displayed. If None,
64
- then the title should be controlled by the class that catches this exception.
65
- """
66
- self.msg = msg
67
- self.display_type = display_type
68
- self.title = title
69
-
70
-
71
- class SapioUserErrorException(DisplayableException):
32
+ class SapioUserErrorException(SapioException):
72
33
  """
73
34
  An exception caused by user error (e.g. user provided a CSV when an XLSX was expected), which promises to return a
74
35
  user-friendly message explaining the error that should be displayed to the user.
@@ -78,7 +39,7 @@ class SapioUserErrorException(DisplayableException):
78
39
  pass
79
40
 
80
41
 
81
- class SapioCriticalErrorException(DisplayableException):
42
+ class SapioCriticalErrorException(SapioException):
82
43
  """
83
44
  A critical exception caused by user error, which promises to return a user-friendly message explaining the error
84
45
  that should be displayed to the user.
@@ -311,7 +311,7 @@ class PopupUtil:
311
311
  raise SapioException("Multiple data type names encountered in records list for record table popup.")
312
312
  data_type: str = data_types.pop()
313
313
  # Get the field maps from the records.
314
- field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
314
+ field_map_list: list[FieldMap] = AliasUtil.to_field_map_lists(records)
315
315
  # Get the field definitions of the data type.
316
316
  type_man = DataMgmtServer.get_data_type_manager(context.user)
317
317
  type_def: DataTypeDefinition = type_man.get_data_type_definition(data_type)
@@ -366,7 +366,7 @@ class PopupUtil:
366
366
  raise SapioException("Multiple data type names encountered in records list for record table popup.")
367
367
  data_type: str = data_types.pop()
368
368
  # Get the field maps from the records.
369
- field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
369
+ field_map_list: list[FieldMap] = AliasUtil.to_field_map_lists(records)
370
370
  # Get the field definitions of the data type.
371
371
  type_man = DataMgmtServer.get_data_type_manager(context.user)
372
372
  type_def: DataTypeDefinition = type_man.get_data_type_definition(data_type)
@@ -10,8 +10,7 @@ class SapioNavigationLinker:
10
10
  Given a URL to a system's webservice API (example: https://company.exemplareln.com/webservice/api), construct
11
11
  URLs for navigation links to various locations in the system.
12
12
  """
13
- client_url: str
14
- webservice_url: str
13
+ base_url: str
15
14
 
16
15
  def __init__(self, url: str | SapioUser | SapioWebhookContext):
17
16
  """
@@ -22,14 +21,7 @@ class SapioNavigationLinker:
22
21
  url = url.user.url
23
22
  elif isinstance(url, SapioUser):
24
23
  url = url.url
25
- self.webservice_url = url.rstrip("/")
26
- self.client_url = url.rstrip("/").replace('webservice/api', 'veloxClient')
27
-
28
- def homepage(self) -> str:
29
- """
30
- :return: A URL for navigating to the system's homepage.
31
- """
32
- return self.client_url + "/#view=homepage"
24
+ self.base_url = url.rstrip("/").replace('webservice/api', 'veloxClient')
33
25
 
34
26
  def data_record(self, record_identifier: RecordIdentifier, data_type_name: DataTypeIdentifier | None = None) -> str:
35
27
  """
@@ -47,7 +39,7 @@ class SapioNavigationLinker:
47
39
  if not data_type_name:
48
40
  raise SapioException("Unable to create a data record link without a data type name. "
49
41
  "Only a record ID was provided.")
50
- return self.client_url + f"/#dataType={data_type_name};recordId={record_id};view=dataRecord"
42
+ return self.base_url + f"/#dataType={data_type_name};recordId={record_id};view=dataRecord"
51
43
 
52
44
  def experiment(self, experiment: ExperimentIdentifier) -> str:
53
45
  """
@@ -55,4 +47,4 @@ class SapioNavigationLinker:
55
47
  object, experiment protocol, or a notebook ID.
56
48
  :return: A URL for navigating to the input experiment.
57
49
  """
58
- return self.client_url + f"/#notebookExperimentId={AliasUtil.to_notebook_id(experiment)};view=eln"
50
+ return self.base_url + f"/#notebookExperimentId={AliasUtil.to_notebook_id(experiment)};view=eln"
@@ -6,7 +6,6 @@ from weakref import WeakValueDictionary
6
6
 
7
7
  from databind.json import dumps, loads
8
8
  from sapiopylib.rest.User import SapioUser
9
- from sapiopylib.rest.pojo.DataRecord import DataRecord
10
9
 
11
10
  from sapiopycommons.general.exceptions import SapioException
12
11
  from sapiopycommons.multimodal.multimodal_data import *
@@ -5,10 +5,10 @@ from sapiopylib.rest.pojo.CustomReport import CustomReportCriteria
5
5
  from sapiopylib.rest.pojo.webhook.WebhookContext import SapioWebhookContext
6
6
  from sapiopylib.rest.utils.recordmodel.RecordModelWrapper import WrappedType
7
7
 
8
- from sapiopycommons.customreport.auto_pagers import CustomReportDictAutoPager, CustomReportRecordAutoPager
9
8
  from sapiopycommons.customreport.custom_report_builder import CustomReportBuilder
10
9
  from sapiopycommons.datatype.data_fields import ProcessQueueItemFields, SystemFields, ProcessWorkflowTrackingFields
11
10
  from sapiopycommons.general.aliases import UserIdentifier, AliasUtil, SapioRecord
11
+ from sapiopycommons.general.custom_report_util import CustomReportUtil
12
12
  from sapiopycommons.general.exceptions import SapioException
13
13
  from sapiopycommons.general.time_util import TimeUtil
14
14
  from sapiopycommons.recordmodel.record_handler import RecordHandler
@@ -185,7 +185,7 @@ class QueueItemHandler:
185
185
  :return: A list of every queue item in the system that matches the search criteria.
186
186
  """
187
187
  report = self.build_queue_item_report(criteria)
188
- return CustomReportRecordAutoPager(self.user, report, wrapper).get_all_at_once()
188
+ return self.rec_handler.query_models_by_report(wrapper, report)
189
189
 
190
190
  def get_records_from_item_report(self, wrapper: type[WrappedType],
191
191
  criteria: QueueItemReportCriteria = QueueItemReportCriteria()) -> list[WrappedType]:
@@ -203,7 +203,7 @@ class QueueItemHandler:
203
203
  criteria.not_data_type_names = None
204
204
  report = self.build_queue_item_report(criteria)
205
205
  record_ids: list[int] = [x[ProcessQueueItemFields.DATA_RECORD_ID__FIELD.field_name]
206
- for x in CustomReportDictAutoPager(self.user, report)]
206
+ for x in CustomReportUtil.run_custom_report(self.user, report)]
207
207
  return self.rec_handler.query_models_by_id(wrapper, record_ids)
208
208
 
209
209
  def get_queue_items_for_records(self, records: Iterable[SapioRecord], wrapper: type[WrappedType],