sapiopycommons 2025.10.17a787__py3-none-any.whl → 2025.10.21a791__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sapiopycommons might be problematic. Click here for more details.

Files changed (50) hide show
  1. sapiopycommons/ai/agent_service_base.py +1297 -0
  2. sapiopycommons/ai/converter_service_base.py +163 -0
  3. sapiopycommons/ai/external_credentials.py +128 -0
  4. sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.py +41 -0
  5. sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.pyi +35 -0
  6. sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2_grpc.py +24 -0
  7. sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.py +43 -0
  8. sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.pyi +31 -0
  9. sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2_grpc.py +24 -0
  10. sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.py +123 -0
  11. sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.pyi +598 -0
  12. sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2_grpc.py +24 -0
  13. sapiopycommons/ai/protoapi/plan/converter/converter_pb2.py +51 -0
  14. sapiopycommons/ai/protoapi/plan/converter/converter_pb2.pyi +63 -0
  15. sapiopycommons/ai/protoapi/plan/converter/converter_pb2_grpc.py +149 -0
  16. sapiopycommons/ai/protoapi/plan/item/item_container_pb2.py +55 -0
  17. sapiopycommons/ai/protoapi/plan/item/item_container_pb2.pyi +90 -0
  18. sapiopycommons/ai/protoapi/plan/item/item_container_pb2_grpc.py +24 -0
  19. sapiopycommons/ai/protoapi/plan/script/script_pb2.py +61 -0
  20. sapiopycommons/ai/protoapi/plan/script/script_pb2.pyi +108 -0
  21. sapiopycommons/ai/protoapi/plan/script/script_pb2_grpc.py +153 -0
  22. sapiopycommons/ai/protoapi/plan/step_output_pb2.py +45 -0
  23. sapiopycommons/ai/protoapi/plan/step_output_pb2.pyi +42 -0
  24. sapiopycommons/ai/protoapi/plan/step_output_pb2_grpc.py +24 -0
  25. sapiopycommons/ai/protoapi/plan/step_pb2.py +43 -0
  26. sapiopycommons/ai/protoapi/plan/step_pb2.pyi +43 -0
  27. sapiopycommons/ai/protoapi/plan/step_pb2_grpc.py +24 -0
  28. sapiopycommons/ai/protoapi/plan/tool/entry_pb2.py +41 -0
  29. sapiopycommons/ai/protoapi/plan/tool/entry_pb2.pyi +35 -0
  30. sapiopycommons/ai/protoapi/plan/tool/entry_pb2_grpc.py +24 -0
  31. sapiopycommons/ai/protoapi/plan/tool/tool_pb2.py +79 -0
  32. sapiopycommons/ai/protoapi/plan/tool/tool_pb2.pyi +261 -0
  33. sapiopycommons/ai/protoapi/plan/tool/tool_pb2_grpc.py +154 -0
  34. sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.py +39 -0
  35. sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.pyi +32 -0
  36. sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2_grpc.py +24 -0
  37. sapiopycommons/ai/protobuf_utils.py +504 -0
  38. sapiopycommons/ai/request_validation.py +478 -0
  39. sapiopycommons/ai/server.py +152 -0
  40. sapiopycommons/ai/test_client.py +446 -0
  41. sapiopycommons/callbacks/callback_util.py +4 -18
  42. sapiopycommons/files/file_util.py +128 -1
  43. sapiopycommons/files/temp_files.py +82 -0
  44. sapiopycommons/general/aliases.py +0 -3
  45. sapiopycommons/webhook/webservice_handlers.py +1 -1
  46. {sapiopycommons-2025.10.17a787.dist-info → sapiopycommons-2025.10.21a791.dist-info}/METADATA +1 -1
  47. {sapiopycommons-2025.10.17a787.dist-info → sapiopycommons-2025.10.21a791.dist-info}/RECORD +49 -9
  48. sapiopycommons/ai/tool_of_tools.py +0 -917
  49. {sapiopycommons-2025.10.17a787.dist-info → sapiopycommons-2025.10.21a791.dist-info}/WHEEL +0 -0
  50. {sapiopycommons-2025.10.17a787.dist-info → sapiopycommons-2025.10.21a791.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,446 @@
1
+ import base64
2
+ import json
3
+ import os
4
+ from enum import Enum
5
+ from typing import Any
6
+
7
+ import grpc
8
+ from sapiopylib.rest.User import SapioUser
9
+
10
+ from sapiopycommons.ai.external_credentials import ExternalCredentials
11
+ from sapiopycommons.ai.protoapi.externalcredentials.external_credentials_pb2 import ExternalCredentialsPbo
12
+ from sapiopycommons.ai.protoapi.fielddefinitions.fields_pb2 import FieldValuePbo
13
+ from sapiopycommons.ai.protoapi.plan.converter.converter_pb2 import ConverterDetailsRequestPbo, \
14
+ ConverterDetailsResponsePbo, ConvertResponsePbo, ConvertRequestPbo
15
+ from sapiopycommons.ai.protoapi.plan.converter.converter_pb2_grpc import ConverterServiceStub
16
+ from sapiopycommons.ai.protoapi.plan.item.item_container_pb2 import ContentTypePbo
17
+ from sapiopycommons.ai.protoapi.plan.tool.entry_pb2 import StepBinaryContainerPbo, StepCsvRowPbo, \
18
+ StepCsvHeaderRowPbo, StepCsvContainerPbo, StepJsonContainerPbo, StepTextContainerPbo, \
19
+ StepItemContainerPbo, StepInputBatchPbo
20
+ from sapiopycommons.ai.protoapi.plan.tool.tool_pb2 import ProcessStepResponsePbo, ProcessStepRequestPbo, \
21
+ ToolDetailsRequestPbo, ToolDetailsResponsePbo, ProcessStepResponseStatusPbo
22
+ from sapiopycommons.ai.protoapi.plan.tool.tool_pb2_grpc import ToolServiceStub
23
+ from sapiopycommons.ai.protoapi.session.sapio_conn_info_pb2 import SapioConnectionInfoPbo, SapioUserSecretTypePbo
24
+ from sapiopycommons.ai.protobuf_utils import ProtobufUtils
25
+ from sapiopycommons.general.aliases import FieldValue
26
+ from sapiopycommons.general.time_util import TimeUtil
27
+
28
+
29
+ class ContainerType(Enum):
30
+ """
31
+ An enum of the different container contents of a StepItemContainerPbo.
32
+ """
33
+ BINARY = "binary"
34
+ CSV = "csv"
35
+ JSON = "json"
36
+ TEXT = "text"
37
+
38
+
39
+ # FR-47422: Created class.
40
+ class AgentOutput:
41
+ """
42
+ A class for holding the output of a TestClient that calls an AgentService. AgentOutput objects an be
43
+ printed to show the output of the agent in a human-readable format.
44
+ """
45
+ agent_name: str
46
+
47
+ status: str
48
+ message: str
49
+
50
+ # Outputs are lists of lists, where the outer lists are the different outputs of the tool, and the inner lists
51
+ # are the entries for that output.
52
+ binary_output: list[list[bytes]]
53
+ csv_output: list[list[dict[str, Any]]]
54
+ json_output: list[list[dict[str, Any]]]
55
+ text_output: list[list[str]]
56
+
57
+ new_records: list[dict[str, FieldValue]]
58
+
59
+ logs: list[str]
60
+
61
+ def __init__(self, agent_name: str):
62
+ self.agent_name = agent_name
63
+ self.binary_output = []
64
+ self.csv_output = []
65
+ self.json_output = []
66
+ self.text_output = []
67
+ self.new_records = []
68
+ self.logs = []
69
+
70
+ def save_outputs(self, path: str = "test_outputs", subfolder: str | None = None,
71
+ file_extensions: list[str] | None = None) -> None:
72
+ """
73
+ Save all outputs to files in the specified output directory.
74
+
75
+ :param path: The directory to save the output files to.
76
+ :param subfolder: An optional subfolder within the path to save the output files to. Useful for when you are
77
+ calling the same agent multiple times for separate test cases.
78
+ :param file_extensions: A list of file extensions to use for binary output files. The length of this list
79
+ should match the number of binary outputs.
80
+ """
81
+ if not self:
82
+ return
83
+ output_path: str = os.path.join(path, self.agent_name)
84
+ if subfolder:
85
+ output_path = os.path.join(output_path, subfolder)
86
+ os.makedirs(output_path, exist_ok=True)
87
+ if self.binary_output and (file_extensions is None or len(file_extensions) != len(self.binary_output)):
88
+ raise ValueError("File extensions must be provided for each binary output.")
89
+ for i, output in enumerate(self.binary_output):
90
+ ext: str = "." + file_extensions[i].lstrip(".")
91
+ for j, entry in enumerate(output):
92
+ with open(os.path.join(output_path, f"binary_output_{i}_{j}{ext}"), "wb") as f:
93
+ f.write(entry)
94
+ for i, output in enumerate(self.csv_output):
95
+ with open(os.path.join(output_path, f"csv_output_{i}.csv"), "w", encoding="utf-8") as f:
96
+ headers = output[0].keys()
97
+ f.write(",".join(headers) + "\n")
98
+ for row in output:
99
+ f.write(",".join(f'"{str(row[h])}"' for h in headers) + "\n")
100
+ for i, output in enumerate(self.json_output):
101
+ for j, entry in enumerate(output):
102
+ with open(os.path.join(output_path, f"json_output_{i}_{j}.json"), "w", encoding="utf-8") as f:
103
+ json.dump(entry, f, indent=2)
104
+ for i, output in enumerate(self.text_output):
105
+ for j, entry in enumerate(output):
106
+ with open(os.path.join(output_path, f"text_output_{i}_{j}.txt"), "w", encoding="utf-8") as f:
107
+ f.write(entry)
108
+
109
+ def __bool__(self):
110
+ """
111
+ Return True if the agent call was successful, False otherwise.
112
+ """
113
+ return self.status == "Success"
114
+
115
+ def __str__(self):
116
+ """
117
+ Return a string representing a summary of the agent output.
118
+ """
119
+ ret_val: str = f"{self.agent_name} Output:\n"
120
+ ret_val += f"\tStatus: {self.status}\n"
121
+ ret_val += f"\tMessage: {self.message}\n"
122
+ ret_val += "-" * 25 + "\n"
123
+
124
+ if self.status == "Success":
125
+ ret_val += f"Binary Output: {sum(len(x) for x in self.binary_output)} item(s) across {len(self.binary_output)} output(s)\n"
126
+ for i, output in enumerate(self.binary_output, start=1):
127
+ output: list[bytes]
128
+ ret_val += f"\tBinary Output {i}:\n"
129
+ for binary in output:
130
+ ret_val += f"\t\t{len(binary)} byte(s): {binary[:50]}...\n"
131
+
132
+ ret_val += f"CSV Output: {sum(len(x) for x in self.csv_output)} item(s) across {len(self.csv_output)} output(s)\n"
133
+ for i, output in enumerate(self.csv_output, start=1):
134
+ output: list[dict[str, Any]]
135
+ ret_val += f"\tCSV Output {i}:\n"
136
+ ret_val += f"\t\tHeaders: {', '.join(output[0].keys())}\n"
137
+ for j, csv_row in enumerate(output):
138
+ ret_val += f"\t\t{j}: {', '.join(f'{v}' for k, v in csv_row.items())}\n"
139
+
140
+ ret_val += f"JSON Output: {sum(len(x) for x in self.json_output)} item(s) across {len(self.json_output)} output(s)\n"
141
+ for i, output in enumerate(self.json_output, start=1):
142
+ output: list[Any]
143
+ ret_val += f"\tJSON Output {i}:\n"
144
+ for json_obj in output:
145
+ ret_val += f"\t\t"
146
+ ret_val += json.dumps(json_obj, indent=2).replace("\n", "\n\t\t") + "\n"
147
+
148
+ ret_val += f"Text Output: {sum(len(x) for x in self.text_output)} item(s) across {len(self.text_output)} output(s)\n"
149
+ for i, output in enumerate(self.text_output, start=1):
150
+ output: list[str]
151
+ ret_val += f"\tText Output {i}:\n"
152
+ for text in output:
153
+ ret_val += f"\t\t{text}\n"
154
+
155
+ ret_val += f"New Records: {len(self.new_records)} item(s)\n"
156
+ for record in self.new_records:
157
+ ret_val += f"{json.dumps(record, indent=2)}\n"
158
+
159
+ ret_val += f"Logs: {len(self.logs)} item(s)\n"
160
+ for log in self.logs:
161
+ ret_val += f"\t{log}\n"
162
+ return ret_val
163
+
164
+
165
+ class TestClient:
166
+ """
167
+ A client for testing an AgentService.
168
+ """
169
+ grpc_server_url: str
170
+ options: list[tuple[str, Any]] | None
171
+ connection: SapioConnectionInfoPbo
172
+ _request_inputs: list[StepItemContainerPbo]
173
+ _config_fields: dict[str, FieldValuePbo]
174
+ _credentials: list[ExternalCredentialsPbo]
175
+
176
+ def __init__(self, grpc_server_url: str, message_mb_size: int = 1024, user: SapioUser | None = None,
177
+ options: list[tuple[str, Any]] | None = None):
178
+ """
179
+ :param grpc_server_url: The URL of the gRPC server to connect to.
180
+ :param message_mb_size: The maximum size of a sent or received message in megabytes.
181
+ :param user: Optional SapioUser object to use for the connection. If not provided, a default connection
182
+ will be created with test credentials.
183
+ :param options: Optional list of gRPC channel options.
184
+ """
185
+ self.grpc_server_url = grpc_server_url
186
+ self.options = [
187
+ ('grpc.max_send_message_length', message_mb_size * 1024 * 1024),
188
+ ('grpc.max_receive_message_length', message_mb_size * 1024 * 1024)
189
+ ]
190
+ if options:
191
+ self.options.extend(options)
192
+ self._create_connection(user)
193
+ self._request_inputs = []
194
+ self._config_fields = {}
195
+ self._credentials = []
196
+
197
+ def _create_connection(self, user: SapioUser | None = None):
198
+ """
199
+ Create a SapioConnectionInfoPbo object with test credentials. This method can be overridden to
200
+ create a user with specific credentials for testing.
201
+ """
202
+ self.connection = SapioConnectionInfoPbo()
203
+ self.connection.username = user.username if user else "Testing"
204
+ self.connection.webservice_url = user.url if user else "https://localhost:8080/webservice/api"
205
+ self.connection.app_guid = user.guid if user else "1234567890"
206
+ self.connection.rmi_host.append("Testing")
207
+ self.connection.rmi_port = 9001
208
+ if user and user.password:
209
+ self.connection.secret_type = SapioUserSecretTypePbo.PASSWORD
210
+ self.connection.secret = "Basic " + base64.b64encode(f'{user.username}:{user.password}'.encode()).decode()
211
+ else:
212
+ self.connection.secret_type = SapioUserSecretTypePbo.SESSION_TOKEN
213
+ self.connection.secret = user.api_token if user and user.api_token else "test_api_token"
214
+
215
+ def add_binary_input(self, input_data: list[bytes]) -> None:
216
+ """
217
+ Add a binary input to the the next request.
218
+ """
219
+ self._add_input(ContainerType.BINARY, StepBinaryContainerPbo(items=input_data))
220
+
221
+ def add_csv_input(self, input_data: list[dict[str, Any]]) -> None:
222
+ """
223
+ Add a CSV input to the next request.
224
+ """
225
+ csv_items = []
226
+ for row in input_data:
227
+ csv_items.append(StepCsvRowPbo(cells=[str(value) for value in row.values()]))
228
+ header = StepCsvHeaderRowPbo(cells=list(input_data[0].keys()))
229
+ self._add_input(ContainerType.CSV, StepCsvContainerPbo(header=header, items=csv_items))
230
+
231
+ def add_json_input(self, input_data: list[dict[str, Any]]) -> None:
232
+ """
233
+ Add a JSON input to the next request.
234
+ """
235
+ self._add_input(ContainerType.JSON, StepJsonContainerPbo(items=[json.dumps(x) for x in input_data]))
236
+
237
+ def add_text_input(self, input_data: list[str]) -> None:
238
+ """
239
+ Add a text input to the next request.
240
+ """
241
+ self._add_input(ContainerType.TEXT, StepTextContainerPbo(items=input_data))
242
+
243
+ def clear_inputs(self) -> None:
244
+ """
245
+ Clear all inputs that have been added to the next request.
246
+ This is useful if you want to start a new request without the previous inputs.
247
+ """
248
+ self._request_inputs.clear()
249
+
250
+ def add_config_field(self, field_name: str, value: FieldValue | list[str]) -> None:
251
+ """
252
+ Add a configuration field value to the next request.
253
+
254
+ :param field_name: The name of the configuration field.
255
+ :param value: The value to set for the configuration field. If a list is provided, it will be
256
+ converted to a comma-separated string.
257
+ """
258
+ if isinstance(value, list):
259
+ value = ",".join(str(x) for x in value)
260
+ if not isinstance(value, FieldValuePbo):
261
+ value = ProtobufUtils.value_to_field_pbo(value)
262
+ self._config_fields[field_name] = value
263
+
264
+ def add_config_fields(self, config_fields: dict[str, FieldValue | list[str]]) -> None:
265
+ """
266
+ Add multiple configuration field values to the next request.
267
+
268
+ :param config_fields: A dictionary of configuration field names and their corresponding values.
269
+ """
270
+ for x, y in config_fields.items():
271
+ self.add_config_field(x, y)
272
+
273
+ def clear_configs(self) -> None:
274
+ """
275
+ Clear all configuration field values that have been added to the next request.
276
+ This is useful if you want to start a new request without the previous configurations.
277
+ """
278
+ self._config_fields.clear()
279
+
280
+ def add_credentials(self, credentials: list[ExternalCredentials]) -> None:
281
+ """
282
+ Add external credentials to the connection info for the next request.
283
+
284
+ :param credentials: A list of ExternalCredentialsPbo objects to add to the connection info.
285
+ """
286
+ for cred in credentials:
287
+ self._credentials.append(cred.to_pbo())
288
+
289
+ def clear_credentials(self) -> None:
290
+ """
291
+ Clear all external credentials that have been added to the next request.
292
+ This is useful if you want to start a new request without the previous credentials.
293
+ """
294
+ self._credentials.clear()
295
+
296
+ def clear_request(self) -> None:
297
+ """
298
+ Clear all inputs and configuration fields that have been added to the next request.
299
+ This is useful if you want to start a new request without the previous inputs and configurations.
300
+
301
+ Credentials are not cleared, as they may be reused across multiple requests.
302
+ """
303
+ self.clear_inputs()
304
+ self.clear_configs()
305
+
306
+ def _add_input(self, container_type: ContainerType, items: Any) -> None:
307
+ """
308
+ Helper method for adding inputs to the next request.
309
+ """
310
+ container: StepItemContainerPbo | None = None
311
+ match container_type:
312
+ # The content type doesn't matter when we're just testing.
313
+ case ContainerType.BINARY:
314
+ container = StepItemContainerPbo(content_type=ContentTypePbo(), binary_container=items)
315
+ case ContainerType.CSV:
316
+ container = StepItemContainerPbo(content_type=ContentTypePbo(), csv_container=items)
317
+ case ContainerType.JSON:
318
+ container = StepItemContainerPbo(content_type=ContentTypePbo(), json_container=items)
319
+ case ContainerType.TEXT:
320
+ container = StepItemContainerPbo(content_type=ContentTypePbo(), text_container=items)
321
+ case _:
322
+ raise ValueError(f"Unsupported container type: {container_type}")
323
+ self._request_inputs.append(container)
324
+
325
+ def get_service_details(self) -> ToolDetailsResponsePbo:
326
+ """
327
+ Get the details of the agents from the server.
328
+
329
+ :return: A ToolDetailsResponsePbo object containing the details of the agent service.
330
+ """
331
+ with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
332
+ stub = ToolServiceStub(channel)
333
+ return stub.GetToolDetails(ToolDetailsRequestPbo(sapio_conn_info=self.connection))
334
+
335
+ def call_agent(self, agent_name: str, is_verbose: bool = True, is_dry_run: bool = False) -> AgentOutput:
336
+ """
337
+ Send the request to the agent service for a particular agent name. This will send all the inputs that have been
338
+ added using the add_X_input functions.
339
+
340
+ :param agent_name: The name of the agent to call on the server.
341
+ :param is_verbose: If True, the agent will log verbosely.
342
+ :param is_dry_run: If True, the agent will not be executed, but the request will be validated.
343
+ :return: An AgentOutput object containing the results of the agent service call.
344
+ """
345
+ print(f"Calling agent \"{agent_name}\"...")
346
+ with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
347
+ stub = ToolServiceStub(channel)
348
+
349
+ start = TimeUtil.now_in_millis()
350
+ response: ProcessStepResponsePbo = stub.ProcessData(
351
+ ProcessStepRequestPbo(
352
+ sapio_user=self.connection,
353
+ tool_name=agent_name,
354
+ config_field_values=self._config_fields,
355
+ dry_run=is_dry_run,
356
+ verbose_logging=is_verbose,
357
+ external_credential=self._credentials,
358
+ input=[
359
+ StepInputBatchPbo(is_partial=False, item_container=item)
360
+ for item in self._request_inputs
361
+ ]
362
+ )
363
+ )
364
+ end = TimeUtil.now_in_millis()
365
+ print(f"Agent call completed in {(end - start) / 1000.:.3f} seconds")
366
+
367
+ results = AgentOutput(agent_name)
368
+
369
+ match response.status:
370
+ case ProcessStepResponseStatusPbo.SUCCESS:
371
+ results.status = "Success"
372
+ case ProcessStepResponseStatusPbo.FAILURE:
373
+ results.status = "Failure"
374
+ case _:
375
+ results.status = "Unknown"
376
+ results.message = response.status_message
377
+
378
+ for item in response.output:
379
+ container = item.item_container
380
+
381
+ if container.HasField("binary_container"):
382
+ results.binary_output.append(list(container.binary_container.items))
383
+ elif container.HasField("csv_container"):
384
+ csv_output: list[dict[str, Any]] = []
385
+ for header in container.csv_container.header.cells:
386
+ output_row: dict[str, Any] = {}
387
+ for i, row in enumerate(container.csv_container.items):
388
+ output_row[header] = row.cells[i]
389
+ csv_output.append(output_row)
390
+ results.csv_output.append(csv_output)
391
+ elif container.HasField("json_container"):
392
+ results.json_output.append([json.loads(x) for x in container.json_container.items])
393
+ elif container.HasField("text_container"):
394
+ results.text_output.append(list(container.text_container.items))
395
+
396
+ for record in response.new_records:
397
+ field_map: dict[str, Any] = {x: ProtobufUtils.field_pbo_to_value(y) for x, y in record.fields.items()}
398
+ results.new_records.append(field_map)
399
+
400
+ results.logs.extend(response.log)
401
+
402
+ return results
403
+
404
+
405
+ class TestConverterClient:
406
+ """
407
+ A client for testing a ConverterService.
408
+ """
409
+ grpc_server_url: str
410
+ options: list[tuple[str, Any]] | None
411
+
412
+ def __init__(self, grpc_server_url: str, options: list[tuple[str, Any]] | None = None):
413
+ """
414
+ :param grpc_server_url: The URL of the gRPC server to connect to.
415
+ :param options: Optional list of gRPC channel options.
416
+ """
417
+ self.grpc_server_url = grpc_server_url
418
+ self.options = options
419
+
420
+ def get_converter_details(self) -> ConverterDetailsResponsePbo:
421
+ """
422
+ Get the details of the converters from the server.
423
+
424
+ :return: A ToolDetailsResponsePbo object containing the details of the converter service.
425
+ """
426
+ with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
427
+ stub = ConverterServiceStub(channel)
428
+ return stub.GetConverterDetails(ConverterDetailsRequestPbo())
429
+
430
+ def convert_content(self, input_container: StepItemContainerPbo, target_type: ContentTypePbo) \
431
+ -> StepItemContainerPbo:
432
+ """
433
+ Convert the content of the input container to the target content type.
434
+
435
+ :param input_container: The input container to convert. This container must have a ContentTypePbo set that
436
+ matches one of the input types that the converter service supports.
437
+ :param target_type: The target content type to convert to. This must match one of the target types that the
438
+ converter service supports.
439
+ :return: A StepItemContainerPbo object containing the converted content.
440
+ """
441
+ with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
442
+ stub = ConverterServiceStub(channel)
443
+ response: ConvertResponsePbo = stub.ConvertContent(
444
+ ConvertRequestPbo(item_container=input_container, target_content_type=target_type)
445
+ )
446
+ return response.item_container
@@ -858,9 +858,9 @@ class CallbackUtil:
858
858
  raise SapioException("No records provided.")
859
859
  data_type: str = AliasUtil.to_singular_data_type_name(records)
860
860
  if index_field is not None:
861
- field_map_list: list[FieldMap] = self.__get_indexed_field_maps(records, index_field, True)
861
+ field_map_list: list[FieldMap] = self.__get_indexed_field_maps(records, index_field)
862
862
  else:
863
- field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records, True)
863
+ field_map_list: list[FieldMap] = AliasUtil.to_field_map_list(records)
864
864
 
865
865
  # Convert the group_by parameter to a field name.
866
866
  if group_by is not None:
@@ -882,18 +882,6 @@ class CallbackUtil:
882
882
  temp_dt = self.__temp_dt_from_field_names(data_type, fields, None, default_modifier, field_modifiers)
883
883
  temp_dt.record_image_assignable = bool(image_data)
884
884
 
885
- # PR-47894: If the RecordId field is not present in the layout, then it should not be included in the field
886
- # maps, as otherwise selection list fields can break.
887
- remove_record_id: bool = True
888
- for field_def in temp_dt.get_field_def_list():
889
- if field_def.data_field_name == "RecordId":
890
- remove_record_id = False
891
- break
892
- if remove_record_id:
893
- for field_map in field_map_list:
894
- if "RecordId" in field_map:
895
- del field_map["RecordId"]
896
-
897
885
  # Send the request to the user.
898
886
  request = TableEntryDialogRequest(title, msg, temp_dt, field_map_list,
899
887
  record_image_data_list=image_data, group_by_field=group_by,
@@ -1940,8 +1928,7 @@ class CallbackUtil:
1940
1928
  self.write_file(zip_name, FileUtil.zip_files(files))
1941
1929
 
1942
1930
  @staticmethod
1943
- def __get_indexed_field_maps(records: Iterable[SapioRecord], index_field: str, include_record_id: bool = False) \
1944
- -> list[FieldMap]:
1931
+ def __get_indexed_field_maps(records: Iterable[SapioRecord], index_field: str) -> list[FieldMap]:
1945
1932
  """
1946
1933
  For dialogs that accept multiple records, we may want to be able to match the returned results back to the
1947
1934
  records that they're for. In this case, we need to add an index to each record so that we can match them back
@@ -1951,13 +1938,12 @@ class CallbackUtil:
1951
1938
  :param records: The records to return indexed field maps of.
1952
1939
  :param index_field: The name of the field to use as the index. Make sure that this field doesn't exist on the
1953
1940
  records, as then it will overwrite the existing value.
1954
- :param include_record_id: Whether to include the RecordId field in the field maps.
1955
1941
  :return: A list of field maps for the records, with an index field added to each. The value of the index on
1956
1942
  each field map is the record's record ID (even if it's a record model with a negative ID).
1957
1943
  """
1958
1944
  ret_val: list[FieldMap] = []
1959
1945
  for record in records:
1960
- field_map: FieldMap = AliasUtil.to_field_map(record, include_record_id)
1946
+ field_map: FieldMap = AliasUtil.to_field_map(record)
1961
1947
  field_map[index_field] = AliasUtil.to_record_id(record)
1962
1948
  ret_val.append(field_map)
1963
1949
  return ret_val
@@ -1,4 +1,7 @@
1
+ import gzip
1
2
  import io
3
+ import tarfile
4
+ import time
2
5
  import warnings
3
6
  import zipfile
4
7
 
@@ -322,7 +325,7 @@ class FileUtil:
322
325
  @staticmethod
323
326
  def zip_files(files: dict[str, str | bytes]) -> bytes:
324
327
  """
325
- Create a zip file for a collection of files.
328
+ Create a .zip file for a collection of files.
326
329
 
327
330
  :param files: A dictionary of file name to file data as a string or bytes.
328
331
  :return: The bytes for a zip file containing the input files.
@@ -335,6 +338,130 @@ class FileUtil:
335
338
  # throws an I/O exception.
336
339
  return zip_buffer.getvalue()
337
340
 
341
+ # FR-47422: Add a function for unzipping files that may have been zipped by the above function.
342
+ @staticmethod
343
+ def unzip_files(zip_file: bytes) -> dict[str, bytes]:
344
+ """
345
+ Decompress a .zip file from an in-memory bytes object and extracts all files into a dictionary.
346
+
347
+ :param zip_file: The bytes of the zip file to be decompressed.
348
+ :return: A dictionary of file name to file bytes for each file in the zip.
349
+ """
350
+ extracted_files: dict[str, bytes] = {}
351
+ with io.BytesIO(zip_file) as zip_buffer:
352
+ with zipfile.ZipFile(zip_buffer, "r") as zip_file:
353
+ for file_name in zip_file.namelist():
354
+ with zip_file.open(file_name) as file:
355
+ extracted_files[file_name] = file.read()
356
+ return extracted_files
357
+
358
+ # FR-47422: Add functions for compressing and decompressing .gz, .tar, and .tar.gz files.
359
+ @staticmethod
360
+ def gzip_file(file_data: bytes | str) -> bytes:
361
+ """
362
+ Create a .gz file for a single file.
363
+
364
+ :param file_data: The file data to be compressed as bytes or a string.
365
+ :return: The bytes of the gzip-compressed file.
366
+ """
367
+ return gzip.compress(file_data.encode() if isinstance(file_data, str) else file_data)
368
+
369
+ @staticmethod
370
+ def ungzip_file(gzip_file: bytes) -> bytes:
371
+ """
372
+ Decompress a .gz file.
373
+
374
+ :param gzip_file: The bytes of the gzip-compressed file.
375
+ :return: The decompressed file data as bytes.
376
+ """
377
+ return gzip.decompress(gzip_file)
378
+
379
+ @staticmethod
380
+ def tar_files(files: dict[str, str | bytes]) -> bytes:
381
+ """
382
+ Create a .tar file for a collection of files.
383
+
384
+ :param files: A dictionary of file name to file data as a string or bytes.
385
+ :return: The bytes for a tar file containing the input files.
386
+ """
387
+ with io.BytesIO() as tar_buffer:
388
+ with tarfile.open(fileobj=tar_buffer, mode="w") as tar:
389
+ for name, data in files.items():
390
+ if isinstance(data, str):
391
+ data: bytes = data.encode('utf-8')
392
+
393
+ tarinfo = tarfile.TarInfo(name=name)
394
+ tarinfo.size = len(data)
395
+ tarinfo.mtime = int(time.time())
396
+
397
+ with io.BytesIO(data) as file:
398
+ tar.addfile(tarinfo=tarinfo, fileobj=file)
399
+
400
+ tar_buffer.seek(0)
401
+ return tar_buffer.getvalue()
402
+
403
+ @staticmethod
404
+ def untar_files(tar_file: bytes) -> dict[str, bytes]:
405
+ """
406
+ Decompress a .tar file from an in-memory bytes object and extracts all files into a dictionary.
407
+
408
+ :param tar_file: The bytes of the tar file to be decompressed.
409
+ :return: A dictionary of file name to file bytes for each file in the tar.
410
+ """
411
+ extracted_files: dict[str, bytes] = {}
412
+ with io.BytesIO(tar_file) as tar_buffer:
413
+ with tarfile.open(fileobj=tar_buffer, mode="r") as tar:
414
+ for member in tar.getmembers():
415
+ if member.isfile():
416
+ file_obj = tar.extractfile(member)
417
+ if file_obj:
418
+ with file_obj:
419
+ extracted_files[member.name] = file_obj.read()
420
+ return extracted_files
421
+
422
+ @staticmethod
423
+ def tar_gzip_files(files: dict[str, str | bytes]) -> bytes:
424
+ """
425
+ Create a .tar.gz file for a collection of files.
426
+
427
+ :param files: A dictionary of file name to file data as a string or bytes.
428
+ :return: The bytes for a tar.gz file containing the input files.
429
+ """
430
+ with io.BytesIO() as tar_buffer:
431
+ with tarfile.open(fileobj=tar_buffer, mode="w:gz") as tar:
432
+ for name, data in files.items():
433
+ if isinstance(data, str):
434
+ data: bytes = data.encode('utf-8')
435
+
436
+ tarinfo = tarfile.TarInfo(name=name)
437
+ tarinfo.size = len(data)
438
+ tarinfo.mtime = int(time.time())
439
+
440
+ with io.BytesIO(data) as file:
441
+ tar.addfile(tarinfo=tarinfo, fileobj=file)
442
+
443
+ tar_buffer.seek(0)
444
+ return tar_buffer.getvalue()
445
+
446
+ @staticmethod
447
+ def untar_gzip_files(tar_gzip_file: bytes) -> dict[str, bytes]:
448
+ """
449
+ Decompress a .tar.gz file from an in-memory bytes object and extracts all files into a dictionary.
450
+
451
+ :param tar_gzip_file: The bytes of the tar.gz file to be decompressed.
452
+ :return: A dictionary of file name to file bytes for each file in the tar.gz
453
+ """
454
+ extracted_files: dict[str, bytes] = {}
455
+ with io.BytesIO(tar_gzip_file) as tar_buffer:
456
+ with tarfile.open(fileobj=tar_buffer, mode="r:gz") as tar:
457
+ for member in tar.getmembers():
458
+ if member.isfile():
459
+ file_obj = tar.extractfile(member)
460
+ if file_obj:
461
+ with file_obj:
462
+ extracted_files[member.name] = file_obj.read()
463
+ return extracted_files
464
+
338
465
  # Deprecated functions:
339
466
 
340
467
  # FR-46097 - Add write file request shorthand functions to FileUtil.