sapiopycommons 2024.11.8a355__py3-none-any.whl → 2024.11.8a359__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sapiopycommons might be problematic. Click here for more details.

Files changed (47) hide show
  1. sapiopycommons/callbacks/callback_util.py +83 -532
  2. sapiopycommons/chem/IndigoMolecules.py +0 -2
  3. sapiopycommons/chem/Molecules.py +18 -77
  4. sapiopycommons/datatype/attachment_util.py +10 -11
  5. sapiopycommons/eln/experiment_handler.py +70 -272
  6. sapiopycommons/files/complex_data_loader.py +4 -5
  7. sapiopycommons/files/file_bridge.py +24 -31
  8. sapiopycommons/files/file_data_handler.py +5 -2
  9. sapiopycommons/files/file_util.py +9 -59
  10. sapiopycommons/files/file_validator.py +6 -92
  11. sapiopycommons/files/file_writer.py +15 -44
  12. sapiopycommons/general/aliases.py +6 -207
  13. sapiopycommons/general/custom_report_util.py +37 -212
  14. sapiopycommons/general/exceptions.py +8 -21
  15. sapiopycommons/general/popup_util.py +0 -21
  16. sapiopycommons/general/time_util.py +2 -8
  17. sapiopycommons/processtracking/endpoints.py +22 -22
  18. sapiopycommons/recordmodel/record_handler.py +97 -481
  19. sapiopycommons/rules/eln_rule_handler.py +25 -34
  20. sapiopycommons/rules/on_save_rule_handler.py +31 -34
  21. sapiopycommons/webhook/webhook_handlers.py +42 -201
  22. {sapiopycommons-2024.11.8a355.dist-info → sapiopycommons-2024.11.8a359.dist-info}/METADATA +2 -4
  23. sapiopycommons-2024.11.8a359.dist-info/RECORD +38 -0
  24. sapiopycommons/callbacks/field_builder.py +0 -537
  25. sapiopycommons/customreport/__init__.py +0 -0
  26. sapiopycommons/customreport/column_builder.py +0 -60
  27. sapiopycommons/customreport/custom_report_builder.py +0 -130
  28. sapiopycommons/customreport/term_builder.py +0 -299
  29. sapiopycommons/datatype/data_fields.py +0 -61
  30. sapiopycommons/datatype/pseudo_data_types.py +0 -440
  31. sapiopycommons/eln/experiment_report_util.py +0 -653
  32. sapiopycommons/files/file_bridge_handler.py +0 -340
  33. sapiopycommons/flowcyto/flow_cyto.py +0 -77
  34. sapiopycommons/flowcyto/flowcyto_data.py +0 -75
  35. sapiopycommons/general/accession_service.py +0 -375
  36. sapiopycommons/general/audit_log.py +0 -189
  37. sapiopycommons/general/sapio_links.py +0 -50
  38. sapiopycommons/multimodal/multimodal.py +0 -146
  39. sapiopycommons/multimodal/multimodal_data.py +0 -489
  40. sapiopycommons/processtracking/custom_workflow_handler.py +0 -406
  41. sapiopycommons/sftpconnect/__init__.py +0 -0
  42. sapiopycommons/sftpconnect/sftp_builder.py +0 -69
  43. sapiopycommons/webhook/webhook_context.py +0 -39
  44. sapiopycommons/webhook/webservice_handlers.py +0 -67
  45. sapiopycommons-2024.11.8a355.dist-info/RECORD +0 -59
  46. {sapiopycommons-2024.11.8a355.dist-info → sapiopycommons-2024.11.8a359.dist-info}/WHEEL +0 -0
  47. {sapiopycommons-2024.11.8a355.dist-info → sapiopycommons-2024.11.8a359.dist-info}/licenses/LICENSE +0 -0
@@ -4,21 +4,19 @@ import urllib.parse
4
4
 
5
5
  from requests import Response
6
6
  from sapiopylib.rest.User import SapioUser
7
-
8
- from sapiopycommons.general.aliases import UserIdentifier, AliasUtil
7
+ from sapiopylib.rest.pojo.webhook.WebhookContext import SapioWebhookContext
9
8
 
10
9
 
11
10
  # FR-46064 - Initial port of PyWebhookUtils to sapiopycommons.
12
11
  class FileBridge:
13
12
  @staticmethod
14
- def read_file(context: UserIdentifier, bridge_name: str, file_path: str,
13
+ def read_file(context: SapioWebhookContext | SapioUser, bridge_name: str, file_path: str,
15
14
  base64_decode: bool = True) -> bytes:
16
15
  """
17
16
  Read a file from FileBridge.
18
17
 
19
18
  :param context: The current webhook context or a user object to send requests from.
20
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
21
- file bridge configurations.
19
+ :param bridge_name: The name of the bridge to use.
22
20
  :param file_path: The path to read the file from.
23
21
  :param base64_decode: If true, base64 decode the file. Files are by default base64 encoded when retrieved from
24
22
  FileBridge.
@@ -28,7 +26,7 @@ class FileBridge:
28
26
  params = {
29
27
  'Filepath': f"bridge://{bridge_name}/{file_path}"
30
28
  }
31
- user: SapioUser = AliasUtil.to_sapio_user(context)
29
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
32
30
  response = user.get(sub_path, params)
33
31
  user.raise_for_status(response)
34
32
 
@@ -38,14 +36,13 @@ class FileBridge:
38
36
  return ret_val
39
37
 
40
38
  @staticmethod
41
- def write_file(context: UserIdentifier, bridge_name: str, file_path: str,
39
+ def write_file(context: SapioWebhookContext | SapioUser, bridge_name: str, file_path: str,
42
40
  file_data: bytes | str) -> None:
43
41
  """
44
42
  Write a file to FileBridge.
45
43
 
46
44
  :param context: The current webhook context or a user object to send requests from.
47
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
48
- file bridge configurations.
45
+ :param bridge_name: The name of the bridge to use.
49
46
  :param file_path: The path to write the file to. If a file already exists at the given path then the file is
50
47
  overwritten.
51
48
  :param file_data: A string or bytes of the file to be written.
@@ -54,43 +51,41 @@ class FileBridge:
54
51
  params = {
55
52
  'Filepath': f"bridge://{bridge_name}/{file_path}"
56
53
  }
57
- user: SapioUser = AliasUtil.to_sapio_user(context)
58
- with io.BytesIO(file_data.encode() if isinstance(file_data, str) else file_data) as data_stream:
54
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
55
+ with io.StringIO(file_data) if isinstance(file_data, str) else io.BytesIO(file_data) as data_stream:
59
56
  response = user.post_data_stream(sub_path, params=params, data_stream=data_stream)
60
57
  user.raise_for_status(response)
61
58
 
62
59
  @staticmethod
63
- def list_directory(context: UserIdentifier, bridge_name: str,
60
+ def list_directory(context: SapioWebhookContext | SapioUser, bridge_name: str,
64
61
  file_path: str | None = "") -> list[str]:
65
62
  """
66
63
  List the contents of a FileBridge directory.
67
64
 
68
65
  :param context: The current webhook context or a user object to send requests from.
69
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
70
- file bridge configurations.
66
+ :param bridge_name: The name of the bridge to use.
71
67
  :param file_path: The path to read the directory from.
72
- :return: A list of names of files and folders in the directory.
68
+ :return: A list of name of files and folders in the directory.
73
69
  """
74
70
  sub_path = '/ext/filebridge/listDirectory'
75
71
  params = {
76
72
  'Filepath': f"bridge://{bridge_name}/{file_path}"
77
73
  }
78
- user: SapioUser = AliasUtil.to_sapio_user(context)
74
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
79
75
  response: Response = user.get(sub_path, params=params)
80
76
  user.raise_for_status(response)
81
77
 
82
78
  response_body: list[str] = response.json()
83
79
  path_length = len(f"bridge://{bridge_name}/")
84
- return [urllib.parse.unquote(value)[path_length:] for value in response_body]
80
+ return [urllib.parse.unquote(value[path_length:]) for value in response_body]
85
81
 
86
82
  @staticmethod
87
- def create_directory(context: UserIdentifier, bridge_name: str, file_path: str) -> None:
83
+ def create_directory(context: SapioWebhookContext | SapioUser, bridge_name: str, file_path: str) -> None:
88
84
  """
89
85
  Create a new directory in FileBridge.
90
86
 
91
87
  :param context: The current webhook context or a user object to send requests from.
92
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
93
- file bridge configurations.
88
+ :param bridge_name: The name of the bridge to use.
94
89
  :param file_path: The path to create the directory at. If a directory already exists at the given path then an
95
90
  exception is raised.
96
91
  """
@@ -98,42 +93,40 @@ class FileBridge:
98
93
  params = {
99
94
  'Filepath': f"bridge://{bridge_name}/{file_path}"
100
95
  }
101
- user: SapioUser = AliasUtil.to_sapio_user(context)
96
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
102
97
  response = user.post(sub_path, params=params)
103
98
  user.raise_for_status(response)
104
99
 
105
100
  @staticmethod
106
- def delete_file(context: UserIdentifier, bridge_name: str, file_path: str) -> None:
101
+ def delete_file(context: SapioWebhookContext | SapioUser, bridge_name: str, file_path: str) -> None:
107
102
  """
108
103
  Delete an existing file in FileBridge.
109
104
 
110
105
  :param context: The current webhook context or a user object to send requests from.
111
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
112
- file bridge configurations.
106
+ :param bridge_name: The name of the bridge to use.
113
107
  :param file_path: The path to the file to delete.
114
108
  """
115
109
  sub_path = '/ext/filebridge/deleteFile'
116
110
  params = {
117
111
  'Filepath': f"bridge://{bridge_name}/{file_path}"
118
112
  }
119
- user: SapioUser = AliasUtil.to_sapio_user(context)
120
- response = user.delete(sub_path, params=params)
113
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
114
+ response = user.post(sub_path, params=params)
121
115
  user.raise_for_status(response)
122
116
 
123
117
  @staticmethod
124
- def delete_directory(context: UserIdentifier, bridge_name: str, file_path: str) -> None:
118
+ def delete_directory(context: SapioWebhookContext | SapioUser, bridge_name: str, file_path: str) -> None:
125
119
  """
126
120
  Delete an existing directory in FileBridge.
127
121
 
128
122
  :param context: The current webhook context or a user object to send requests from.
129
- :param bridge_name: The name of the bridge to use. This is the "connection name" in the
130
- file bridge configurations.
123
+ :param bridge_name: The name of the bridge to use.
131
124
  :param file_path: The path to the directory to delete.
132
125
  """
133
126
  sub_path = '/ext/filebridge/deleteDirectory'
134
127
  params = {
135
128
  'Filepath': f"bridge://{bridge_name}/{file_path}"
136
129
  }
137
- user: SapioUser = AliasUtil.to_sapio_user(context)
138
- response = user.delete(sub_path, params=params)
130
+ user: SapioUser = context if isinstance(context, SapioUser) else context.user
131
+ response = user.post(sub_path, params=params)
139
132
  user.raise_for_status(response)
@@ -1,11 +1,14 @@
1
1
  import re
2
2
  from typing import Any, Callable, Iterable
3
3
 
4
- from sapiopycommons.general.aliases import SapioRecord
5
4
  from sapiopycommons.general.exceptions import SapioException
6
- from sapiopycommons.general.time_util import TimeUtil
5
+
7
6
  from sapiopycommons.recordmodel.record_handler import RecordHandler
8
7
 
8
+ from sapiopycommons.general.aliases import SapioRecord
9
+
10
+ from sapiopycommons.general.time_util import TimeUtil
11
+
9
12
  FilterList = Iterable[int] | range | Callable[[int, dict[str, Any]], bool] | None
10
13
  """A FilterList is an object used to determine if a row in the file data should be skipped over. This can take the
11
14
  form of am iterable (e.g. list, set) of its or a range where row indices in the list or range are skipped, or it can be
@@ -1,6 +1,4 @@
1
1
  import io
2
- import warnings
3
- import zipfile
4
2
 
5
3
  import pandas
6
4
  from numpy import dtype
@@ -23,8 +21,7 @@ class FileUtil:
23
21
  """
24
22
  @staticmethod
25
23
  def tokenize_csv(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
26
- seperator: str = ",", *, encoding: str | None = None, encoding_error: str | None = "strict",
27
- exception_on_empty: bool = True) -> tuple[list[dict[str, str]], list[list[str]]]:
24
+ seperator: str = ",") -> tuple[list[dict[str, str]], list[list[str]]]:
28
25
  """
29
26
  Tokenize a CSV file. The provided file must be uniform. That is, if row 1 has 10 cells, all the rows in the file
30
27
  must have 10 cells. Otherwise, the Pandas parser throws a tokenizer exception.
@@ -37,34 +34,22 @@ class FileUtil:
37
34
  meaning that required headers are also ignored if any are provided. By default, the first row (0th index)
38
35
  is assumed to be the header row.
39
36
  :param seperator: The character that separates cells in the table.
40
- :param encoding: The encoding used to read the given file bytes. If not provided, uses utf-8. If your file
41
- contains a non-utf-8 character, then a UnicodeDecodeError will be thrown. If this happens, consider using
42
- ISO-8859-1 as the encoding, or investigate what encoding would handle the characters in your file.
43
- :param encoding_error: The error handling behavior if an encoding error is encountered. By default, the behavior
44
- is "strict", meaning that encoding errors raise an exception. Change this to "ignore" to skip over invalid
45
- characters or "replace" to replace invalid characters with a ? character. For a full list of options, see
46
- https://docs.python.org/3/library/codecs.html#error-handlers
47
- :param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
48
- the first element of the returned tuple.
49
37
  :return: The CSV parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
50
38
  that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
51
39
  If the header row index is 0 or None, this list will be empty.
52
40
  """
53
41
  # Parse the file bytes into two DataFrames. The first is metadata of the file located above the header row,
54
42
  # while the second is the body of the file below the header row.
55
- file_body, file_metadata = FileUtil.csv_to_data_frames(file_bytes, header_row_index, seperator,
56
- encoding=encoding, encoding_error=encoding_error)
43
+ file_body, file_metadata = FileUtil.csv_to_data_frames(file_bytes, header_row_index, seperator)
57
44
  # Parse the metadata from above the header row index into a list of lists.
58
45
  metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
59
46
  # Parse the data from the file body into a list of dicts.
60
47
  rows: list[dict[str, str]] = FileUtil.data_frame_to_dicts(file_body, required_headers, header_row_index)
61
- if exception_on_empty and not rows:
62
- raise SapioUserErrorException("The provided file contains no rows of information below the headers.")
63
48
  return rows, metadata
64
49
 
65
50
  @staticmethod
66
- def tokenize_xlsx(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0,
67
- *, exception_on_empty: bool = True) -> tuple[list[dict[str, str]], list[list[str]]]:
51
+ def tokenize_xlsx(file_bytes: bytes, required_headers: list[str] | None = None, header_row_index: int | None = 0) \
52
+ -> tuple[list[dict[str, str]], list[list[str]]]:
68
53
  """
69
54
  Tokenize an XLSX file row by row.
70
55
 
@@ -75,8 +60,6 @@ class FileUtil:
75
60
  row is returned in the metadata list. If input is None, then no row is considered to be the header row,
76
61
  meaning that required headers are also ignored if any are provided. By default, the first row (0th index)
77
62
  is assumed to be the header row.
78
- :param exception_on_empty: Throw a user error exception if the provided file bytes result in an empty list in
79
- the first element of the returned tuple.
80
63
  :return: The XLSX parsed into a list of dicts where each dict is a row, mapping the headers to the cells for
81
64
  that row. Also returns a list of each row above the headers (the metadata), parsed into a list of each cell.
82
65
  If the header row index is 0 or None, this list will be empty.
@@ -88,13 +71,10 @@ class FileUtil:
88
71
  metadata: list[list[str]] = FileUtil.data_frame_to_lists(file_metadata)
89
72
  # Parse the data from the file body into a list of dicts.
90
73
  rows: list[dict[str, str]] = FileUtil.data_frame_to_dicts(file_body, required_headers, header_row_index)
91
- if exception_on_empty and not rows:
92
- raise SapioUserErrorException("The provided file contains no rows of information below the headers.")
93
74
  return rows, metadata
94
75
 
95
76
  @staticmethod
96
- def csv_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, seperator: str = ",",
97
- *, encoding: str | None = None, encoding_error: str | None = "strict") \
77
+ def csv_to_data_frames(file_bytes: bytes, header_row_index: int | None = 0, seperator: str = ",") \
98
78
  -> tuple[DataFrame, DataFrame | None]:
99
79
  """
100
80
  Parse the file bytes for a CSV into DataFrames. The provided file must be uniform. That is, if row 1 has 10
@@ -106,13 +86,6 @@ class FileUtil:
106
86
  meaning that required headers are also ignored if any are provided. By default, the first row (0th index)
107
87
  is assumed to be the header row.
108
88
  :param seperator: The character that separates cells in the table.
109
- :param encoding: The encoding used to read the given file bytes. If not provided, uses utf-8. If your file
110
- contains a non-utf-8 character, then a UnicodeDecodeError will be thrown. If this happens, consider using
111
- ISO-8859-1 as the encoding, or investigate what encoding would handle the characters in your file.
112
- :param encoding_error: The error handling behavior if an encoding error is encountered. By default, the behavior
113
- is "strict", meaning that encoding errors raise an exception. Change this to "ignore" to skip over invalid
114
- characters or "replace" to replace invalid characters with a ? character. For a full list of options, see
115
- https://docs.python.org/3/library/codecs.html#error-handlers
116
89
  :return: A tuple of two DataFrames. The first is the frame for the CSV table body, while the second is for the
117
90
  metadata from above the header row, or None if there is no metadata.
118
91
  """
@@ -124,14 +97,13 @@ class FileUtil:
124
97
  # can throw off the header row index.
125
98
  file_metadata = pandas.read_csv(file_io, header=None, dtype=dtype(str),
126
99
  skiprows=lambda x: x >= header_row_index,
127
- skip_blank_lines=False, sep=seperator, encoding=encoding,
128
- encoding_errors=encoding_error)
100
+ skip_blank_lines=False, sep=seperator)
129
101
  with io.BytesIO(file_bytes) as file_io:
130
102
  # The use of the dtype argument is to ensure that everything from the file gets read as a string. Added
131
103
  # because some numerical values would get ".0" appended to them, even when casting the DataFrame cell to a
132
104
  # string.
133
105
  file_body: DataFrame = pandas.read_csv(file_io, header=header_row_index, dtype=dtype(str),
134
- skip_blank_lines=False, sep=seperator, encoding=encoding)
106
+ skip_blank_lines=False, sep=seperator)
135
107
 
136
108
  return file_body, file_metadata
137
109
 
@@ -250,7 +222,7 @@ class FileUtil:
250
222
  :param file_data: The CSV file to be converted.
251
223
  :return: The bytes of the CSV file converted to an XLSX file.
252
224
  """
253
- with (io.BytesIO(file_data.encode() if isinstance(file_data, str) else file_data)) as csv:
225
+ with (io.BytesIO(file_data) if isinstance(file_data, bytes) else io.StringIO(file_data)) as csv:
254
226
  # Setting header to false makes pandas read the CSV as-is.
255
227
  data_frame = pandas.read_csv(csv, sep=",", header=None)
256
228
 
@@ -294,20 +266,6 @@ class FileUtil:
294
266
  file_bytes: bytes = buffer.getvalue()
295
267
  return file_bytes
296
268
 
297
- @staticmethod
298
- def zip_files(files: dict[str, str | bytes]) -> bytes:
299
- """
300
- Create a zip file for a collection of files.
301
-
302
- :param files: A dictionary of file name to file data as a string or bytes.
303
- :return: The bytes for a zip file containing the input files.
304
- """
305
- zip_buffer: io.BytesIO = io.BytesIO()
306
- with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
307
- for file_name, file_data in files.items():
308
- zip_file.writestr(file_name, file_data)
309
- return zip_buffer.getvalue()
310
-
311
269
  # Deprecated functions:
312
270
 
313
271
  # FR-46097 - Add write file request shorthand functions to FileUtil.
@@ -325,8 +283,6 @@ class FileUtil:
325
283
  :param request_context: Context that will be returned to the webhook server in the client callback result.
326
284
  :return: A SapioWebhookResult with the write request as its client callback request.
327
285
  """
328
- warnings.warn("FileUtil.write_file is deprecated as of 24.5+. Use CallbackUtil.write_file instead.",
329
- DeprecationWarning)
330
286
  return SapioWebhookResult(True, client_callback_request=WriteFileRequest(file_bytes, file_name,
331
287
  request_context))
332
288
 
@@ -343,8 +299,6 @@ class FileUtil:
343
299
  :param request_context: Context that will be returned to the webhook server in the client callback result.
344
300
  :return: A SapioWebhookResult with the write request as its client callback request.
345
301
  """
346
- warnings.warn("FileUtil.write_files is deprecated as of 24.5+. Use CallbackUtil.write_file instead.",
347
- DeprecationWarning)
348
302
  return SapioWebhookResult(True, client_callback_request=MultiFileRequest(files, request_context))
349
303
 
350
304
  @staticmethod
@@ -372,8 +326,6 @@ class FileUtil:
372
326
  1 - The file name of the requested file if the user provided one.
373
327
  2 - The file bytes of the requested file if the user provided one.
374
328
  """
375
- warnings.warn("FileUtil.request_file is deprecated as of 24.5+. Use CallbackUtil.request_file instead.",
376
- DeprecationWarning)
377
329
  client_callback = context.client_callback_result
378
330
  result_context: str | None = client_callback.callback_context_data if client_callback else None
379
331
  # If the user cancels, terminate the interaction.
@@ -426,8 +378,6 @@ class FileUtil:
426
378
  May also contain a result that will terminate the client interaction if the user canceled the prompt.
427
379
  1 - A dictionary that maps the file names to the file bytes for each provided file.
428
380
  """
429
- warnings.warn("FileUtil.request_files is deprecated as of 24.5+. Use CallbackUtil.request_files instead.",
430
- DeprecationWarning)
431
381
  client_callback = context.client_callback_result
432
382
  result_context: str | None = client_callback.callback_context_data if client_callback else None
433
383
  # If the user cancels, terminate the interaction.
@@ -470,7 +420,7 @@ class FileUtil:
470
420
  if len(allowed_extensions) != 0:
471
421
  matches: bool = False
472
422
  for ext in allowed_extensions:
473
- if file_path.endswith("." + ext.lstrip(".")):
423
+ if file_path.endswith("." + ext):
474
424
  matches = True
475
425
  break
476
426
  if matches is False:
@@ -4,15 +4,12 @@ from abc import abstractmethod
4
4
  from typing import Any
5
5
 
6
6
  from sapiopylib.rest.User import SapioUser
7
- from sapiopylib.rest.pojo.CustomReport import RawReportTerm, RawTermOperation
8
7
  from sapiopylib.rest.pojo.datatype.FieldDefinition import VeloxIntegerFieldDefinition, VeloxStringFieldDefinition, \
9
8
  AbstractVeloxFieldDefinition
9
+ from sapiopylib.rest.pojo.webhook.WebhookResult import SapioWebhookResult
10
10
 
11
11
  from sapiopycommons.callbacks.callback_util import CallbackUtil
12
12
  from sapiopycommons.files.file_data_handler import FileDataHandler, FilterList
13
- from sapiopycommons.general.aliases import UserIdentifier, AliasUtil
14
- from sapiopycommons.general.custom_report_util import CustomReportUtil
15
- from sapiopycommons.general.exceptions import SapioUserCancelledException
16
13
  from sapiopycommons.general.time_util import TimeUtil
17
14
 
18
15
 
@@ -80,10 +77,10 @@ class FileValidator:
80
77
 
81
78
  return failed_rows
82
79
 
83
- def build_violation_report(self, context: UserIdentifier,
80
+ def build_violation_report(self, context: SapioWebhookResult | SapioUser,
84
81
  rule_violations: dict[int, list[ValidationRule]]) -> None:
85
82
  """
86
- Display a simple report of any rule violations in the file to the user as a table dialog.
83
+ Build a simple report of any rule violations in the file to display to the user as a table dialog.
87
84
 
88
85
  :param context: The current webhook context or a user object to send requests from.
89
86
  :param rule_violations: A dict of rule violations generated by a call to validate_file.
@@ -121,24 +118,9 @@ class FileValidator:
121
118
  "Reason": violation.reason[:2000]
122
119
  })
123
120
 
124
- callback = CallbackUtil(context)
125
- callback.table_dialog("Errors", "The following rule violations were encountered in the provided file.",
126
- columns, rows)
127
-
128
- def validate_and_report_errors(self, context: UserIdentifier) -> None:
129
- """
130
- Validate the file. If any rule violations are found, display a simple report of any rule violations in the file
131
- to the user as a table dialog and throw a SapioUserCancelled exception after the user acknowledges the dialog
132
- to end the webhook interaction.
133
-
134
- Shorthand for calling validate_file() and then build_violation_report() if there are any errors.
135
-
136
- :param context: The current webhook context or a user object to send requests from.
137
- """
138
- violations = self.validate_file()
139
- if violations:
140
- self.build_violation_report(context, violations)
141
- raise SapioUserCancelledException()
121
+ callback_util = CallbackUtil(context)
122
+ callback_util.table_dialog("Errors", "The following rule violations were encountered in the provided file.",
123
+ columns, rows)
142
124
 
143
125
 
144
126
  class ValidationRule:
@@ -498,71 +480,3 @@ class ContainsSubstringFromCellRule(RowRule):
498
480
 
499
481
  def validate(self, row: dict[str, Any]) -> bool:
500
482
  return row.get(self.second) in row.get(self.first)
501
-
502
-
503
- class UniqueSystemValueRule(ColumnRule):
504
- """
505
- Requires that every cell in the column has a value that is not already in use in the system for a given data type
506
- and field name.
507
- """
508
- user: SapioUser
509
- data_type_name: str
510
- data_field_name: str
511
-
512
- def __init__(self, context: UserIdentifier, header: str, data_type_name: str,
513
- data_field_name: str):
514
- """
515
- :param context: The current webhook context or a user object to send requests from.
516
- :param header: The header that this rule acts upon.
517
- :param data_type_name: The data type name to search on.
518
- :param data_field_name: The data field name to search on. This is expected to be a string field.
519
- """
520
- self.user = AliasUtil.to_sapio_user(context)
521
- self.data_type_name = data_type_name
522
- self.data_field_name = data_field_name
523
- super().__init__(header, f"This value already exists in the system.")
524
-
525
- def validate(self, rows: list[dict[str, Any]]) -> list[int]:
526
- file_handler = FileDataHandler(rows)
527
- values: list[str] = file_handler.get_values_list(self.header)
528
-
529
- # Run a quick report for all records of this type that match these field values.
530
- term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
531
- "{" + ",".join(values) + "}")
532
- results: list[dict[str, Any]] = CustomReportUtil.run_quick_report(self.user, term)
533
- existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
534
- return file_handler.get_in_list(self.header, existing_values)
535
-
536
-
537
- class ExistingSystemValueRule(ColumnRule):
538
- """
539
- Requires that every cell in the column has a value that is already in use in the system for a given data type
540
- and field name.
541
- """
542
- user: SapioUser
543
- data_type_name: str
544
- data_field_name: str
545
-
546
- def __init__(self, context: UserIdentifier, header: str, data_type_name: str,
547
- data_field_name: str):
548
- """
549
- :param context: The current webhook context or a user object to send requests from.
550
- :param header: The header that this rule acts upon.
551
- :param data_type_name: The data type name to search on.
552
- :param data_field_name: The data field name to search on. This is expected to be a string field.
553
- """
554
- self.user = AliasUtil.to_sapio_user(context)
555
- self.data_type_name = data_type_name
556
- self.data_field_name = data_field_name
557
- super().__init__(header, f"This value doesn't exist in the system.")
558
-
559
- def validate(self, rows: list[dict[str, Any]]) -> list[int]:
560
- file_handler = FileDataHandler(rows)
561
- values: list[str] = file_handler.get_values_list(self.header)
562
-
563
- # Run a quick report for all records of this type that match these field values.
564
- term = RawReportTerm(self.data_type_name, self.data_field_name, RawTermOperation.EQUAL_TO_OPERATOR,
565
- "{" + ",".join(values) + "}")
566
- results: list[dict[str, Any]] = CustomReportUtil.run_quick_report(self.user, term)
567
- existing_values: list[Any] = [x.get(self.data_field_name) for x in results]
568
- return file_handler.get_not_in_list(self.header, existing_values)
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import warnings
4
3
  from abc import abstractmethod
5
4
  from enum import Enum
6
5
  from typing import Any
@@ -19,7 +18,7 @@ class FileWriter:
19
18
  body: list[list[Any]]
20
19
  delimiter: str
21
20
  line_break: str
22
- column_definitions: dict[str, ColumnDef]
21
+ column_definitions: list[ColumnDef]
23
22
 
24
23
  def __init__(self, headers: list[str], delimiter: str = ",", line_break: str = "\r\n"):
25
24
  """
@@ -31,7 +30,7 @@ class FileWriter:
31
30
  self.delimiter = delimiter
32
31
  self.line_break = line_break
33
32
  self.body = []
34
- self.column_definitions = {}
33
+ self.column_definitions = []
35
34
 
36
35
  def add_row_list(self, row: list[Any]) -> None:
37
36
  """
@@ -66,49 +65,21 @@ class FileWriter:
66
65
  new_row.append(row.get(header, ""))
67
66
  self.body.append(new_row)
68
67
 
69
- def add_column_definition(self, header: str, column_def: ColumnDef) -> None:
68
+ def add_column_definitions(self, column_defs: list[ColumnDef]) -> None:
70
69
  """
71
- Add a new column definition to this FileWriter for a specific header.
70
+ Add new column definitions to this FileWriter. Column definitions are evaluated in the order they are added,
71
+ meaning that they map to the header with the equivalent index. Before the file is built, the number of column
72
+ definitions must equal the number of headers if any column definition is provided.
72
73
 
73
- ColumnDefs are only used if the build_file function is provided with a list of RowBundles. Every header must
74
- have a column definition if this is the case.
74
+ ColumnDefs are only used if the build_file function is provided with a list of RowBundles.
75
75
 
76
76
  Custom column definitions can be created by defining a class that extends ColumnDef and implements the print
77
77
  method.
78
78
 
79
- :param column_def: A column definitions to be used to construct the file when build_file is
79
+ :param column_defs: A list of column definitions to be used to construct the file when build_file is
80
80
  called.
81
- :param header: The header that this column definition is for. If a header is provided that isn't in the headers
82
- list, the header is appended to the end of the list.
83
81
  """
84
- if header not in self.headers:
85
- self.headers.append(header)
86
- self.column_definitions[header] = column_def
87
-
88
- def add_column_definitions(self, column_defs: dict[str, ColumnDef]) -> None:
89
- """
90
- Add new column definitions to this FileWriter.
91
-
92
- ColumnDefs are only used if the build_file function is provided with a list of RowBundles. Every header must
93
- have a column definition if this is the case.
94
-
95
- Custom column definitions can be created by defining a class that extends ColumnDef and implements the print
96
- method.
97
-
98
- :param column_defs: A dictionary of header names to column definitions to be used to construct the file when
99
- build_file is called.
100
- """
101
- # For backwards compatibility purposes, if column definitions are provided as a list,
102
- # add them in order of appearance of the headers. This will only work if the headers are defined first, though.
103
- if isinstance(column_defs, list):
104
- warnings.warn("Adding column definitions is no longer expected as a list. Continuing to provide a list to "
105
- "this function may result in undesirable behavior.", UserWarning)
106
- if not self.headers:
107
- raise SapioException("No headers provided to FileWriter before the column definitions were added.")
108
- for header, column_def in zip(self.headers, column_defs):
109
- self.column_definitions[header] = column_def
110
- for header, column_def in column_defs.items():
111
- self.add_column_definition(header, column_def)
82
+ self.column_definitions.extend(column_defs)
112
83
 
113
84
  def build_file(self, rows: list[RowBundle] | None = None, sorter=None, reverse: bool = False) -> str:
114
85
  """
@@ -129,10 +100,11 @@ class FileWriter:
129
100
  """
130
101
  # If any column definitions have been provided, the number of column definitions and headers must be equal.
131
102
  if self.column_definitions:
132
- for header in self.headers:
133
- if header not in self.column_definitions:
134
- raise SapioException(f"FileWriter has no column definition for the header {header}. If any column "
135
- f"definitions are provided, then all headers must have a column definition.")
103
+ def_count: int = len(self.column_definitions)
104
+ header_count: int = len(self.headers)
105
+ if def_count != header_count:
106
+ raise SapioException(f"FileWriter has {def_count} column definitions defined but {header_count} "
107
+ f"headers. The number of column definitions must equal the number of headers.")
136
108
  # If any RowBundles have been provided, there must be column definitions for mapping them to the file.
137
109
  elif rows:
138
110
  raise SapioException(f"FileWriter was given RowBundles but contains no column definitions for mapping "
@@ -158,8 +130,7 @@ class FileWriter:
158
130
  rows.sort(key=lambda x: x.index)
159
131
  for row in rows:
160
132
  new_row: list[Any] = []
161
- for header in self.headers:
162
- column = self.column_definitions[header]
133
+ for column in self.column_definitions:
163
134
  if column.may_skip and row.may_skip:
164
135
  new_row.append("")
165
136
  else: