sapiopycommons 2025.6.19a564__py3-none-any.whl → 2026.1.22a847__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sapiopycommons/ai/__init__.py +0 -0
- sapiopycommons/ai/agent_service_base.py +2051 -0
- sapiopycommons/ai/converter_service_base.py +163 -0
- sapiopycommons/ai/external_credentials.py +131 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2.py +87 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2.pyi +282 -0
- sapiopycommons/ai/protoapi/agent/agent_pb2_grpc.py +154 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2.py +49 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2.pyi +40 -0
- sapiopycommons/ai/protoapi/agent/entry_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2.py +61 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2.pyi +181 -0
- sapiopycommons/ai/protoapi/agent/item/item_container_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.py +41 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2.pyi +36 -0
- sapiopycommons/ai/protoapi/externalcredentials/external_credentials_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.py +51 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2.pyi +59 -0
- sapiopycommons/ai/protoapi/fielddefinitions/fields_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.py +123 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2.pyi +599 -0
- sapiopycommons/ai/protoapi/fielddefinitions/velox_field_def_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2.py +59 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2.pyi +68 -0
- sapiopycommons/ai/protoapi/pipeline/converter/converter_pb2_grpc.py +149 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2.py +69 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2.pyi +109 -0
- sapiopycommons/ai/protoapi/pipeline/script/script_pb2_grpc.py +153 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2.py +49 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2.pyi +56 -0
- sapiopycommons/ai/protoapi/pipeline/step_output_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2.py +43 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2.pyi +44 -0
- sapiopycommons/ai/protoapi/pipeline/step_pb2_grpc.py +24 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.py +39 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2.pyi +33 -0
- sapiopycommons/ai/protoapi/session/sapio_conn_info_pb2_grpc.py +24 -0
- sapiopycommons/ai/protobuf_utils.py +583 -0
- sapiopycommons/ai/request_validation.py +561 -0
- sapiopycommons/ai/server.py +152 -0
- sapiopycommons/ai/test_client.py +534 -0
- sapiopycommons/callbacks/callback_util.py +53 -24
- sapiopycommons/eln/experiment_handler.py +12 -5
- sapiopycommons/files/assay_plate_reader.py +93 -0
- sapiopycommons/files/file_text_converter.py +207 -0
- sapiopycommons/files/file_util.py +128 -1
- sapiopycommons/files/temp_files.py +82 -0
- sapiopycommons/flowcyto/flow_cyto.py +2 -24
- sapiopycommons/general/accession_service.py +2 -28
- sapiopycommons/general/aliases.py +4 -1
- sapiopycommons/general/macros.py +172 -0
- sapiopycommons/general/time_util.py +199 -4
- sapiopycommons/multimodal/multimodal.py +2 -24
- sapiopycommons/recordmodel/record_handler.py +200 -111
- sapiopycommons/rules/eln_rule_handler.py +3 -0
- sapiopycommons/rules/on_save_rule_handler.py +3 -0
- sapiopycommons/webhook/webhook_handlers.py +6 -4
- sapiopycommons/webhook/webservice_handlers.py +1 -1
- {sapiopycommons-2025.6.19a564.dist-info → sapiopycommons-2026.1.22a847.dist-info}/METADATA +2 -2
- sapiopycommons-2026.1.22a847.dist-info/RECORD +113 -0
- sapiopycommons-2025.6.19a564.dist-info/RECORD +0 -68
- {sapiopycommons-2025.6.19a564.dist-info → sapiopycommons-2026.1.22a847.dist-info}/WHEEL +0 -0
- {sapiopycommons-2025.6.19a564.dist-info → sapiopycommons-2026.1.22a847.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from argparse import ArgumentParser
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import grpc
|
|
8
|
+
|
|
9
|
+
from sapiopycommons.ai.converter_service_base import ConverterServiceBase
|
|
10
|
+
from sapiopycommons.ai.protoapi.pipeline.converter.converter_pb2_grpc import add_ConverterServiceServicer_to_server, \
|
|
11
|
+
ConverterServiceServicer
|
|
12
|
+
from sapiopycommons.ai.protoapi.pipeline.script.script_pb2_grpc import add_ScriptServiceServicer_to_server, \
|
|
13
|
+
ScriptServiceServicer
|
|
14
|
+
from sapiopycommons.ai.protoapi.agent.agent_pb2_grpc import add_AgentServiceServicer_to_server, AgentServiceServicer
|
|
15
|
+
from sapiopycommons.ai.agent_service_base import AgentServiceBase
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AgentGrpcServer:
|
|
19
|
+
"""
|
|
20
|
+
A gRPC server for handling the various agent gRPC services.
|
|
21
|
+
"""
|
|
22
|
+
port: int
|
|
23
|
+
options: list[tuple[str, Any]]
|
|
24
|
+
debug_mode: bool
|
|
25
|
+
_converter_services: list[ConverterServiceServicer]
|
|
26
|
+
_script_services: list[ScriptServiceServicer]
|
|
27
|
+
_agent_services: list[AgentServiceServicer]
|
|
28
|
+
|
|
29
|
+
@staticmethod
|
|
30
|
+
def args_parser() -> ArgumentParser:
|
|
31
|
+
"""
|
|
32
|
+
Create an argument parser for the gRPC server.
|
|
33
|
+
|
|
34
|
+
:return: The argument parser.
|
|
35
|
+
"""
|
|
36
|
+
parser = ArgumentParser()
|
|
37
|
+
parser.add_argument("--debug_mode", "-d", action="store_true")
|
|
38
|
+
parser.add_argument("--port", "-p", default=50051, type=int)
|
|
39
|
+
parser.add_argument("--message_mb_size", "-s", default=1024, type=int)
|
|
40
|
+
return parser
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def from_args(options: list[tuple[str, Any]] | None = None) -> AgentGrpcServer:
|
|
44
|
+
return AgentGrpcServer(options=options, **vars(AgentGrpcServer.args_parser().parse_args()))
|
|
45
|
+
|
|
46
|
+
def __init__(self, port: int = 50051, message_mb_size: int = 1024, debug_mode: bool = False,
|
|
47
|
+
options: list[tuple[str, Any]] | None = None) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Initialize the gRPC server with the specified port and message size.
|
|
50
|
+
|
|
51
|
+
:param port: The port to listen on for incoming gRPC requests.
|
|
52
|
+
:param message_mb_size: The maximum size of a sent or received message in megabytes.
|
|
53
|
+
:param debug_mode: Sets the debug mode for services.
|
|
54
|
+
:param options: Additional gRPC server options to set. This should be a list of tuples where the first item is
|
|
55
|
+
the option name and the second item is the option value.
|
|
56
|
+
"""
|
|
57
|
+
if isinstance(port, str):
|
|
58
|
+
port = int(port)
|
|
59
|
+
self.port = port
|
|
60
|
+
self.options = [
|
|
61
|
+
('grpc.max_send_message_length', message_mb_size * 1024 * 1024),
|
|
62
|
+
('grpc.max_receive_message_length', message_mb_size * 1024 * 1024)
|
|
63
|
+
]
|
|
64
|
+
if options:
|
|
65
|
+
self.options.extend(options)
|
|
66
|
+
self.debug_mode = debug_mode
|
|
67
|
+
if debug_mode:
|
|
68
|
+
print("Debug mode is enabled.")
|
|
69
|
+
self._converter_services = []
|
|
70
|
+
self._script_services = []
|
|
71
|
+
self._agent_services = []
|
|
72
|
+
|
|
73
|
+
def update_message_size(self, message_mb_size: int) -> None:
|
|
74
|
+
"""
|
|
75
|
+
Update the maximum message size for the gRPC server.
|
|
76
|
+
|
|
77
|
+
:param message_mb_size: The new maximum message size in megabytes.
|
|
78
|
+
"""
|
|
79
|
+
for i, (option_name, _) in enumerate(self.options):
|
|
80
|
+
if option_name in ('grpc.max_send_message_length', 'grpc.max_receive_message_length'):
|
|
81
|
+
self.options[i] = (option_name, message_mb_size * 1024 * 1024)
|
|
82
|
+
|
|
83
|
+
def add_converter_service(self, service: ConverterServiceBase) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Add a converter service to the gRPC server.
|
|
86
|
+
|
|
87
|
+
:param service: The converter service to register with the server.
|
|
88
|
+
"""
|
|
89
|
+
service.debug_mode = self.debug_mode
|
|
90
|
+
self._converter_services.append(service)
|
|
91
|
+
|
|
92
|
+
def add_script_service(self, service: ScriptServiceServicer) -> None:
|
|
93
|
+
"""
|
|
94
|
+
Add a script service to the gRPC server.
|
|
95
|
+
|
|
96
|
+
:param service: The script service to register with the server.
|
|
97
|
+
"""
|
|
98
|
+
self._script_services.append(service)
|
|
99
|
+
|
|
100
|
+
def add_agent_service(self, service: AgentServiceBase) -> None:
|
|
101
|
+
"""
|
|
102
|
+
Add an agent service to the gRPC server.
|
|
103
|
+
|
|
104
|
+
:param service: The agent service to register with the server.
|
|
105
|
+
"""
|
|
106
|
+
service.debug_mode = self.debug_mode
|
|
107
|
+
self._agent_services.append(service)
|
|
108
|
+
|
|
109
|
+
def start(self) -> None:
|
|
110
|
+
"""
|
|
111
|
+
Start the gRPC server for the provided servicers.
|
|
112
|
+
"""
|
|
113
|
+
if not (self._converter_services or self._script_services or self._agent_services):
|
|
114
|
+
raise ValueError("No services have been added to the server. Use add_converter_service, add_script_service,"
|
|
115
|
+
"or add_agent_service to register a service before starting the server.")
|
|
116
|
+
|
|
117
|
+
async def serve():
|
|
118
|
+
server = grpc.aio.server(options=self.options)
|
|
119
|
+
|
|
120
|
+
for service in self._converter_services:
|
|
121
|
+
print(f"Registering Converter service: {service.__class__.__name__}")
|
|
122
|
+
add_ConverterServiceServicer_to_server(service, server)
|
|
123
|
+
for service in self._script_services:
|
|
124
|
+
print(f"Registering Script service: {service.__class__.__name__}")
|
|
125
|
+
add_ScriptServiceServicer_to_server(service, server)
|
|
126
|
+
for service in self._agent_services:
|
|
127
|
+
print(f"Registering Agent service: {service.__class__.__name__}")
|
|
128
|
+
add_AgentServiceServicer_to_server(service, server)
|
|
129
|
+
|
|
130
|
+
from grpc_health.v1 import health_pb2, health_pb2_grpc
|
|
131
|
+
from grpc_health.v1.health import HealthServicer
|
|
132
|
+
health_servicer = HealthServicer()
|
|
133
|
+
health_servicer.set("", health_pb2.HealthCheckResponse.ServingStatus.SERVING)
|
|
134
|
+
health_servicer.set("ScriptService", health_pb2.HealthCheckResponse.ServingStatus.SERVING)
|
|
135
|
+
health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
|
|
136
|
+
|
|
137
|
+
server.add_insecure_port(f"[::]:{self.port}")
|
|
138
|
+
await server.start()
|
|
139
|
+
print(f"Server started, listening on {self.port}")
|
|
140
|
+
try:
|
|
141
|
+
await server.wait_for_termination()
|
|
142
|
+
finally:
|
|
143
|
+
print("Stopping server...")
|
|
144
|
+
await server.stop(0)
|
|
145
|
+
print("Server stopped.")
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
asyncio.run(serve())
|
|
149
|
+
except KeyboardInterrupt:
|
|
150
|
+
print("Server stopped by user.")
|
|
151
|
+
except Exception as e:
|
|
152
|
+
print(f"An error occurred: {e}")
|
|
@@ -0,0 +1,534 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import grpc
|
|
7
|
+
|
|
8
|
+
from sapiopycommons.ai.agent_service_base import ContainerType
|
|
9
|
+
from sapiopycommons.files.file_util import FileUtil
|
|
10
|
+
from sapiopylib.rest.User import SapioUser
|
|
11
|
+
|
|
12
|
+
from sapiopycommons.ai.external_credentials import ExternalCredentials
|
|
13
|
+
from sapiopycommons.ai.protoapi.externalcredentials.external_credentials_pb2 import ExternalCredentialsPbo
|
|
14
|
+
from sapiopycommons.ai.protoapi.fielddefinitions.fields_pb2 import FieldValuePbo
|
|
15
|
+
from sapiopycommons.ai.protoapi.pipeline.converter.converter_pb2 import ConverterDetailsRequestPbo, \
|
|
16
|
+
ConverterDetailsResponsePbo, ConvertResponsePbo, ConvertRequestPbo
|
|
17
|
+
from sapiopycommons.ai.protoapi.pipeline.converter.converter_pb2_grpc import ConverterServiceStub
|
|
18
|
+
from sapiopycommons.ai.protoapi.agent.item.item_container_pb2 import ContentTypePbo
|
|
19
|
+
from sapiopycommons.ai.protoapi.agent.entry_pb2 import StepBinaryContainerPbo, StepCsvRowPbo, \
|
|
20
|
+
StepCsvHeaderRowPbo, StepCsvContainerPbo, StepJsonContainerPbo, StepTextContainerPbo, \
|
|
21
|
+
StepItemContainerPbo, StepInputBatchPbo
|
|
22
|
+
from sapiopycommons.ai.protoapi.agent.agent_pb2 import ProcessStepResponsePbo, ProcessStepRequestPbo, \
|
|
23
|
+
AgentDetailsRequestPbo, AgentDetailsResponsePbo, ProcessStepResponseStatusPbo
|
|
24
|
+
from sapiopycommons.ai.protoapi.agent.agent_pb2_grpc import AgentServiceStub
|
|
25
|
+
from sapiopycommons.ai.protoapi.session.sapio_conn_info_pb2 import SapioConnectionInfoPbo, SapioUserSecretTypePbo
|
|
26
|
+
from sapiopycommons.ai.protobuf_utils import ProtobufUtils
|
|
27
|
+
from sapiopycommons.general.aliases import FieldValue
|
|
28
|
+
from sapiopycommons.general.time_util import TimeUtil
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# FR-47422: Created class.
|
|
32
|
+
class AgentOutput:
|
|
33
|
+
"""
|
|
34
|
+
A class for holding the output of a TestClient that calls an AgentService. AgentOutput objects an be
|
|
35
|
+
printed to show the output of the agent in a human-readable format.
|
|
36
|
+
"""
|
|
37
|
+
agent_name: str
|
|
38
|
+
|
|
39
|
+
status: str
|
|
40
|
+
message: str
|
|
41
|
+
|
|
42
|
+
# Outputs are lists of lists, where the outer lists are the different outputs of the tool, and the inner lists
|
|
43
|
+
# are the entries for that output.
|
|
44
|
+
binary_output: list[list[bytes]]
|
|
45
|
+
csv_output: list[list[dict[str, Any]]]
|
|
46
|
+
json_output: list[list[dict[str, Any]]]
|
|
47
|
+
text_output: list[list[str]]
|
|
48
|
+
# A mapping of index from the raw output to the container type and index from the lists above.
|
|
49
|
+
index_map: dict[int, tuple[ContainerType, int]]
|
|
50
|
+
|
|
51
|
+
new_records: list[dict[str, FieldValue]]
|
|
52
|
+
|
|
53
|
+
logs: list[str]
|
|
54
|
+
|
|
55
|
+
def __init__(self, agent_name: str):
|
|
56
|
+
self.agent_name = agent_name
|
|
57
|
+
self.binary_output = []
|
|
58
|
+
self.csv_output = []
|
|
59
|
+
self.json_output = []
|
|
60
|
+
self.text_output = []
|
|
61
|
+
self.index_map = {}
|
|
62
|
+
self.new_records = []
|
|
63
|
+
self.logs = []
|
|
64
|
+
|
|
65
|
+
def save_outputs(self, path: str = "test_outputs", subfolder: str | None = None,
|
|
66
|
+
file_extensions: list[str] | None = None) -> None:
|
|
67
|
+
"""
|
|
68
|
+
Save all outputs to files in the specified output directory.
|
|
69
|
+
|
|
70
|
+
:param path: The directory to save the output files to.
|
|
71
|
+
:param subfolder: An optional subfolder within the path to save the output files to. Useful for when you are
|
|
72
|
+
calling the same agent multiple times for separate test cases.
|
|
73
|
+
:param file_extensions: A list of file extensions to use for binary output files. The length of this list
|
|
74
|
+
should match the number of binary outputs.
|
|
75
|
+
"""
|
|
76
|
+
if not self:
|
|
77
|
+
return
|
|
78
|
+
output_path: str = os.path.join(path, self.agent_name)
|
|
79
|
+
if subfolder:
|
|
80
|
+
output_path = os.path.join(output_path, subfolder)
|
|
81
|
+
os.makedirs(output_path, exist_ok=True)
|
|
82
|
+
if self.binary_output and (file_extensions is None or len(file_extensions) != len(self.binary_output)):
|
|
83
|
+
raise ValueError("File extensions must be provided for each binary output.")
|
|
84
|
+
|
|
85
|
+
for i, mapping in self.index_map.items():
|
|
86
|
+
output_type: ContainerType = mapping[0]
|
|
87
|
+
output_index: int = mapping[1]
|
|
88
|
+
|
|
89
|
+
match output_type:
|
|
90
|
+
case ContainerType.BINARY:
|
|
91
|
+
output: list[bytes] = self.binary_output[output_index]
|
|
92
|
+
binary_zip: dict[str, bytes] = {}
|
|
93
|
+
ext: str = "." + file_extensions[output_index].lstrip(".")
|
|
94
|
+
total_output: int = len(output)
|
|
95
|
+
for j, entry in enumerate(output):
|
|
96
|
+
file_name: str = f"output_{i}_binary_{j}{ext}"
|
|
97
|
+
if j >= 5 and total_output > 6:
|
|
98
|
+
binary_zip[file_name] = entry
|
|
99
|
+
else:
|
|
100
|
+
with open(os.path.join(output_path, file_name), "wb") as f:
|
|
101
|
+
f.write(entry)
|
|
102
|
+
if binary_zip:
|
|
103
|
+
zip_file: bytes = FileUtil.tar_gzip_files(binary_zip)
|
|
104
|
+
zip_name: str = f"output_{i}_binary - {total_output - 5}_remaining_results.tar.gz"
|
|
105
|
+
with open(os.path.join(output_path, zip_name), "wb") as f:
|
|
106
|
+
f.write(zip_file)
|
|
107
|
+
case ContainerType.CSV:
|
|
108
|
+
output: list[dict[str, Any]] = self.csv_output[output_index]
|
|
109
|
+
with open(os.path.join(output_path, f"output_{i}_csv.csv"), "w", encoding="utf-8") as f:
|
|
110
|
+
headers = output[0].keys()
|
|
111
|
+
f.write(",".join(headers) + "\n")
|
|
112
|
+
for row in output:
|
|
113
|
+
f.write(",".join(f'"{str(row[h])}"' for h in headers) + "\n")
|
|
114
|
+
case ContainerType.JSON:
|
|
115
|
+
output: list[dict[str, Any]] = self.json_output[output_index]
|
|
116
|
+
json_zip: dict[str, str] = {}
|
|
117
|
+
total_output: int = len(output)
|
|
118
|
+
for j, entry in enumerate(output):
|
|
119
|
+
file_name: str = f"output_{i}_json_{j}.json"
|
|
120
|
+
if j >= 5 and total_output > 6:
|
|
121
|
+
json_zip[file_name] = json.dumps(entry, indent=2)
|
|
122
|
+
else:
|
|
123
|
+
with open(os.path.join(output_path, file_name), "w", encoding="utf-8") as f:
|
|
124
|
+
json.dump(entry, f, indent=2)
|
|
125
|
+
if json_zip:
|
|
126
|
+
zip_file: bytes = FileUtil.tar_gzip_files(json_zip)
|
|
127
|
+
zip_name: str = f"output_{i}_json - {total_output - 5}_remaining_results.tar.gz"
|
|
128
|
+
with open(os.path.join(output_path, zip_name), "wb") as f:
|
|
129
|
+
f.write(zip_file)
|
|
130
|
+
case ContainerType.TEXT:
|
|
131
|
+
output: list[str] = self.text_output[output_index]
|
|
132
|
+
text_zip: dict[str, str] = {}
|
|
133
|
+
total_output: int = len(output)
|
|
134
|
+
for j, entry in enumerate(output):
|
|
135
|
+
file_name: str = f"output_{i}_text_{j}.txt"
|
|
136
|
+
if j >= 5 and total_output > 6:
|
|
137
|
+
text_zip[file_name] = entry
|
|
138
|
+
else:
|
|
139
|
+
with open(os.path.join(output_path, file_name), "w", encoding="utf-8") as f:
|
|
140
|
+
f.write(entry)
|
|
141
|
+
if text_zip:
|
|
142
|
+
zip_file: bytes = FileUtil.tar_gzip_files(text_zip)
|
|
143
|
+
zip_name: str = f"output_{i}_text - {total_output - 5}_remaining_results.tar.gz"
|
|
144
|
+
with open(os.path.join(output_path, zip_name), "wb") as f:
|
|
145
|
+
f.write(zip_file)
|
|
146
|
+
|
|
147
|
+
def __bool__(self):
|
|
148
|
+
"""
|
|
149
|
+
Return True if the agent call was successful, False otherwise.
|
|
150
|
+
"""
|
|
151
|
+
return self.status == "Success"
|
|
152
|
+
|
|
153
|
+
def __str__(self):
|
|
154
|
+
"""
|
|
155
|
+
Return a string representing a summary of the agent output.
|
|
156
|
+
"""
|
|
157
|
+
ret_val: str = f"{self.agent_name} Output:\n"
|
|
158
|
+
ret_val += f"\tStatus: {self.status}\n"
|
|
159
|
+
ret_val += f"\tMessage: {self.message}\n"
|
|
160
|
+
ret_val += "-" * 25 + "\n"
|
|
161
|
+
|
|
162
|
+
if self and self.index_map:
|
|
163
|
+
ret_val += f"Total Binary Output:\n"
|
|
164
|
+
ret_val += f"\t{len(self.binary_output)} BINARY output(s)\n"
|
|
165
|
+
ret_val += f"\t{sum(len(x) for x in self.binary_output):,} file(s)\n"
|
|
166
|
+
ret_val += f"\t{sum(sum(len(y) for y in x) for x in self.binary_output):,} byte(s)\n"
|
|
167
|
+
ret_val += f"Total CSV Output:\n"
|
|
168
|
+
ret_val += f"\t{len(self.csv_output)} CSV output(s)\n"
|
|
169
|
+
ret_val += f"\t{sum(len(x) for x in self.csv_output):,} rows(s)\n"
|
|
170
|
+
ret_val += f"Total JSON Output:\n"
|
|
171
|
+
ret_val += f"\t{len(self.json_output)} JSON output(s)\n"
|
|
172
|
+
ret_val += f"\t{sum(len(x) for x in self.json_output):,} item(s)\n"
|
|
173
|
+
ret_val += f"Total Text Output:\n"
|
|
174
|
+
ret_val += f"\t{len(self.text_output)} TEXT output(s)\n"
|
|
175
|
+
ret_val += f"\t{sum(len(x) for x in self.text_output):,} item(s)\n"
|
|
176
|
+
ret_val += f"\t{sum(sum(len(y) for y in x) for x in self.text_output):,} characters(s)\n\n"
|
|
177
|
+
|
|
178
|
+
for i, mapping in self.index_map.items():
|
|
179
|
+
output_type: ContainerType = mapping[0]
|
|
180
|
+
output_index: int = mapping[1]
|
|
181
|
+
|
|
182
|
+
match output_type:
|
|
183
|
+
case ContainerType.BINARY:
|
|
184
|
+
output: list[bytes] = self.binary_output[output_index]
|
|
185
|
+
ret_val += f"Output Index {i}: BINARY with {len(output)} file(s)\n"
|
|
186
|
+
for j, binary in enumerate(output):
|
|
187
|
+
ret_val += f"\t{len(binary)} byte(s): {binary[:50]}...\n"
|
|
188
|
+
if j == 5:
|
|
189
|
+
ret_val += f"\tAnd {len(output) - j} more binary items...\n"
|
|
190
|
+
break
|
|
191
|
+
case ContainerType.CSV:
|
|
192
|
+
output: list[dict[str, Any]] = self.csv_output[output_index]
|
|
193
|
+
ret_val += f"Output Index {i}: CSV with {len(output)} row(s)\n"
|
|
194
|
+
ret_val += f"\tHeaders: {', '.join(output[0].keys())}\n"
|
|
195
|
+
for j, csv_row in enumerate(output, start=1):
|
|
196
|
+
ret_val += f"\t{j}: {', '.join(f'{v}' for k, v in csv_row.items())}\n"
|
|
197
|
+
if j == 5:
|
|
198
|
+
ret_val += f"\tAnd {len(output) - j} more CSV rows...\n"
|
|
199
|
+
break
|
|
200
|
+
case ContainerType.JSON:
|
|
201
|
+
lines: int = 0
|
|
202
|
+
output: list[dict[str, Any]] = self.json_output[output_index]
|
|
203
|
+
ret_val += f"Output Index {i}: JSON with {len(output)} item(s)\n"
|
|
204
|
+
for j, json_obj in enumerate(output, start=1):
|
|
205
|
+
ret_val += f"\t"
|
|
206
|
+
dump = json.dumps(json_obj, indent=2).replace("\n", "\n\t") + "\n"
|
|
207
|
+
dump_size = dump.count("\n")
|
|
208
|
+
if dump_size > 200:
|
|
209
|
+
dump = "\n".join(dump.splitlines()[:200]) + f"\n\t\t... (truncated {dump_size - 200})\n"
|
|
210
|
+
dump_size = dump.count("\n")
|
|
211
|
+
lines += dump_size
|
|
212
|
+
ret_val += dump
|
|
213
|
+
if j == 5 or lines > 100:
|
|
214
|
+
ret_val += f"\tAnd {len(output) - j} more JSON items...\n"
|
|
215
|
+
break
|
|
216
|
+
case ContainerType.TEXT:
|
|
217
|
+
lines: int = 0
|
|
218
|
+
output: list[str] = self.text_output[output_index]
|
|
219
|
+
ret_val += f"Output Index {i}: TEXT with {len(output)} item(s)\n"
|
|
220
|
+
for j, text in enumerate(output, start=1):
|
|
221
|
+
lines += text.count("\n")
|
|
222
|
+
ret_val += f"\t{text}\n"
|
|
223
|
+
if j == 5 or lines > 100:
|
|
224
|
+
ret_val += f"\tAnd {len(output) - j} more text items...\n"
|
|
225
|
+
break
|
|
226
|
+
|
|
227
|
+
ret_val += f"New Records: {len(self.new_records)} item(s)\n"
|
|
228
|
+
lines: int = 0
|
|
229
|
+
for i, record in enumerate(self.new_records, start=1):
|
|
230
|
+
ret_val += f"\t"
|
|
231
|
+
dump = json.dumps(record, indent=2).replace("\n", "\n\t") + "\n"
|
|
232
|
+
dump_size = dump.count("\n")
|
|
233
|
+
if dump_size > 200:
|
|
234
|
+
dump = "\n".join(dump.splitlines()[:200]) + f"\n\t\t... (truncated {dump_size - 200})\n"
|
|
235
|
+
dump_size = dump.count("\n")
|
|
236
|
+
lines += dump_size
|
|
237
|
+
ret_val += dump
|
|
238
|
+
if i == 5 or lines > 100:
|
|
239
|
+
ret_val += f"\tAnd {len(self.new_records) - i} more new records...\n"
|
|
240
|
+
break
|
|
241
|
+
|
|
242
|
+
ret_val += f"Logs: {len(self.logs)} item(s)\n"
|
|
243
|
+
for log in self.logs:
|
|
244
|
+
ret_val += f"\t{log}\n"
|
|
245
|
+
return ret_val
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
class TestClient:
|
|
249
|
+
"""
|
|
250
|
+
A client for testing an AgentService.
|
|
251
|
+
"""
|
|
252
|
+
grpc_server_url: str
|
|
253
|
+
options: list[tuple[str, Any]] | None
|
|
254
|
+
connection: SapioConnectionInfoPbo
|
|
255
|
+
_request_inputs: list[StepItemContainerPbo]
|
|
256
|
+
_config_fields: dict[str, FieldValuePbo]
|
|
257
|
+
_credentials: list[ExternalCredentialsPbo]
|
|
258
|
+
|
|
259
|
+
def __init__(self, grpc_server_url: str, message_mb_size: int = 1024, user: SapioUser | None = None,
|
|
260
|
+
options: list[tuple[str, Any]] | None = None):
|
|
261
|
+
"""
|
|
262
|
+
:param grpc_server_url: The URL of the gRPC server to connect to.
|
|
263
|
+
:param message_mb_size: The maximum size of a sent or received message in megabytes.
|
|
264
|
+
:param user: Optional SapioUser object to use for the connection. If not provided, a default connection
|
|
265
|
+
will be created with test credentials.
|
|
266
|
+
:param options: Optional list of gRPC channel options.
|
|
267
|
+
"""
|
|
268
|
+
self.grpc_server_url = grpc_server_url
|
|
269
|
+
self.options = [
|
|
270
|
+
('grpc.max_send_message_length', message_mb_size * 1024 * 1024),
|
|
271
|
+
('grpc.max_receive_message_length', message_mb_size * 1024 * 1024)
|
|
272
|
+
]
|
|
273
|
+
if options:
|
|
274
|
+
self.options.extend(options)
|
|
275
|
+
self._create_connection(user)
|
|
276
|
+
self._request_inputs = []
|
|
277
|
+
self._config_fields = {}
|
|
278
|
+
self._credentials = []
|
|
279
|
+
|
|
280
|
+
def _create_connection(self, user: SapioUser | None = None):
|
|
281
|
+
"""
|
|
282
|
+
Create a SapioConnectionInfoPbo object with test credentials. This method can be overridden to
|
|
283
|
+
create a user with specific credentials for testing.
|
|
284
|
+
"""
|
|
285
|
+
self.connection = SapioConnectionInfoPbo()
|
|
286
|
+
self.connection.username = user.username if user and user.username else "Testing"
|
|
287
|
+
self.connection.webservice_url = user.url if user and user.url else "https://localhost:8080/webservice/api"
|
|
288
|
+
if user and user.guid:
|
|
289
|
+
self.connection.app_guid = user.guid
|
|
290
|
+
self.connection.rmi_host.append("Testing")
|
|
291
|
+
self.connection.rmi_port = 9001
|
|
292
|
+
if user and user.password:
|
|
293
|
+
self.connection.secret_type = SapioUserSecretTypePbo.PASSWORD
|
|
294
|
+
self.connection.secret = "Basic " + base64.b64encode(f'{user.username}:{user.password}'.encode()).decode()
|
|
295
|
+
else:
|
|
296
|
+
self.connection.secret_type = SapioUserSecretTypePbo.SESSION_TOKEN
|
|
297
|
+
self.connection.secret = user.api_token if user and user.api_token else "test_api_token"
|
|
298
|
+
|
|
299
|
+
def add_binary_input(self, input_data: list[bytes]) -> None:
|
|
300
|
+
"""
|
|
301
|
+
Add a binary input to the the next request.
|
|
302
|
+
"""
|
|
303
|
+
self._add_input(ContainerType.BINARY, StepBinaryContainerPbo(items=input_data))
|
|
304
|
+
|
|
305
|
+
def add_csv_input(self, input_data: list[dict[str, Any]]) -> None:
|
|
306
|
+
"""
|
|
307
|
+
Add a CSV input to the next request.
|
|
308
|
+
"""
|
|
309
|
+
csv_items = []
|
|
310
|
+
for row in input_data:
|
|
311
|
+
csv_items.append(StepCsvRowPbo(cells=[str(value) for value in row.values()]))
|
|
312
|
+
header = StepCsvHeaderRowPbo(cells=list(input_data[0].keys()))
|
|
313
|
+
self._add_input(ContainerType.CSV, StepCsvContainerPbo(header=header, items=csv_items))
|
|
314
|
+
|
|
315
|
+
def add_json_input(self, input_data: list[dict[str, Any]]) -> None:
|
|
316
|
+
"""
|
|
317
|
+
Add a JSON input to the next request.
|
|
318
|
+
"""
|
|
319
|
+
self._add_input(ContainerType.JSON, StepJsonContainerPbo(items=[json.dumps(x) for x in input_data]))
|
|
320
|
+
|
|
321
|
+
def add_text_input(self, input_data: list[str]) -> None:
|
|
322
|
+
"""
|
|
323
|
+
Add a text input to the next request.
|
|
324
|
+
"""
|
|
325
|
+
self._add_input(ContainerType.TEXT, StepTextContainerPbo(items=input_data))
|
|
326
|
+
|
|
327
|
+
def clear_inputs(self) -> None:
|
|
328
|
+
"""
|
|
329
|
+
Clear all inputs that have been added to the next request.
|
|
330
|
+
This is useful if you want to start a new request without the previous inputs.
|
|
331
|
+
"""
|
|
332
|
+
self._request_inputs.clear()
|
|
333
|
+
|
|
334
|
+
def add_config_field(self, field_name: str, value: FieldValue | list[str]) -> None:
|
|
335
|
+
"""
|
|
336
|
+
Add a configuration field value to the next request.
|
|
337
|
+
|
|
338
|
+
:param field_name: The name of the configuration field.
|
|
339
|
+
:param value: The value to set for the configuration field. If a list is provided, it will be
|
|
340
|
+
converted to a comma-separated string.
|
|
341
|
+
"""
|
|
342
|
+
if isinstance(value, list):
|
|
343
|
+
value = ",".join(str(x) for x in value)
|
|
344
|
+
if not isinstance(value, FieldValuePbo):
|
|
345
|
+
value = ProtobufUtils.value_to_field_pbo(value)
|
|
346
|
+
self._config_fields[field_name] = value
|
|
347
|
+
|
|
348
|
+
def add_config_fields(self, config_fields: dict[str, FieldValue | list[str]]) -> None:
|
|
349
|
+
"""
|
|
350
|
+
Add multiple configuration field values to the next request.
|
|
351
|
+
|
|
352
|
+
:param config_fields: A dictionary of configuration field names and their corresponding values.
|
|
353
|
+
"""
|
|
354
|
+
for x, y in config_fields.items():
|
|
355
|
+
self.add_config_field(x, y)
|
|
356
|
+
|
|
357
|
+
def clear_configs(self) -> None:
|
|
358
|
+
"""
|
|
359
|
+
Clear all configuration field values that have been added to the next request.
|
|
360
|
+
This is useful if you want to start a new request without the previous configurations.
|
|
361
|
+
"""
|
|
362
|
+
self._config_fields.clear()
|
|
363
|
+
|
|
364
|
+
def add_credentials(self, credentials: list[ExternalCredentials]) -> None:
|
|
365
|
+
"""
|
|
366
|
+
Add external credentials to the connection info for the next request.
|
|
367
|
+
|
|
368
|
+
:param credentials: A list of ExternalCredentialsPbo objects to add to the connection info.
|
|
369
|
+
"""
|
|
370
|
+
for cred in credentials:
|
|
371
|
+
self._credentials.append(cred.to_pbo())
|
|
372
|
+
|
|
373
|
+
def clear_credentials(self) -> None:
|
|
374
|
+
"""
|
|
375
|
+
Clear all external credentials that have been added to the next request.
|
|
376
|
+
This is useful if you want to start a new request without the previous credentials.
|
|
377
|
+
"""
|
|
378
|
+
self._credentials.clear()
|
|
379
|
+
|
|
380
|
+
def clear_request(self) -> None:
|
|
381
|
+
"""
|
|
382
|
+
Clear all inputs and configuration fields that have been added to the next request.
|
|
383
|
+
This is useful if you want to start a new request without the previous inputs and configurations.
|
|
384
|
+
|
|
385
|
+
Credentials are not cleared, as they may be reused across multiple requests.
|
|
386
|
+
"""
|
|
387
|
+
self.clear_inputs()
|
|
388
|
+
self.clear_configs()
|
|
389
|
+
|
|
390
|
+
def _add_input(self, container_type: ContainerType, items: Any) -> None:
|
|
391
|
+
"""
|
|
392
|
+
Helper method for adding inputs to the next request.
|
|
393
|
+
"""
|
|
394
|
+
container: StepItemContainerPbo | None = None
|
|
395
|
+
match container_type:
|
|
396
|
+
# The content type doesn't matter when we're just testing.
|
|
397
|
+
case ContainerType.BINARY:
|
|
398
|
+
container = StepItemContainerPbo(content_type=ContentTypePbo(), binary_container=items)
|
|
399
|
+
case ContainerType.CSV:
|
|
400
|
+
container = StepItemContainerPbo(content_type=ContentTypePbo(), csv_container=items)
|
|
401
|
+
case ContainerType.JSON:
|
|
402
|
+
container = StepItemContainerPbo(content_type=ContentTypePbo(), json_container=items)
|
|
403
|
+
case ContainerType.TEXT:
|
|
404
|
+
container = StepItemContainerPbo(content_type=ContentTypePbo(), text_container=items)
|
|
405
|
+
case _:
|
|
406
|
+
raise ValueError(f"Unsupported container type: {container_type}")
|
|
407
|
+
self._request_inputs.append(container)
|
|
408
|
+
|
|
409
|
+
def get_service_details(self) -> AgentDetailsResponsePbo:
|
|
410
|
+
"""
|
|
411
|
+
Get the details of the agents from the server.
|
|
412
|
+
|
|
413
|
+
:return: A ToolDetailsResponsePbo object containing the details of the agent service.
|
|
414
|
+
"""
|
|
415
|
+
with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
|
|
416
|
+
stub = AgentServiceStub(channel)
|
|
417
|
+
return stub.GetAgentDetails(AgentDetailsRequestPbo(sapio_conn_info=self.connection))
|
|
418
|
+
|
|
419
|
+
def call_agent(self, agent_name: str, is_verbose: bool = True, is_dry_run: bool = False) -> AgentOutput:
|
|
420
|
+
"""
|
|
421
|
+
Send the request to the agent service for a particular agent name. This will send all the inputs that have been
|
|
422
|
+
added using the add_X_input functions.
|
|
423
|
+
|
|
424
|
+
:param agent_name: The name of the agent to call on the server.
|
|
425
|
+
:param is_verbose: If True, the agent will log verbosely.
|
|
426
|
+
:param is_dry_run: If True, the agent will not be executed, but the request will be validated.
|
|
427
|
+
:return: An AgentOutput object containing the results of the agent service call.
|
|
428
|
+
"""
|
|
429
|
+
print(f"Calling agent \"{agent_name}\"...")
|
|
430
|
+
with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
|
|
431
|
+
stub = AgentServiceStub(channel)
|
|
432
|
+
|
|
433
|
+
start = TimeUtil.now_in_millis()
|
|
434
|
+
response: ProcessStepResponsePbo = stub.ProcessData(
|
|
435
|
+
ProcessStepRequestPbo(
|
|
436
|
+
sapio_user=self.connection,
|
|
437
|
+
agent_name=agent_name,
|
|
438
|
+
config_field_values=self._config_fields,
|
|
439
|
+
dry_run=is_dry_run,
|
|
440
|
+
verbose_logging=is_verbose,
|
|
441
|
+
external_credential=self._credentials,
|
|
442
|
+
input=[
|
|
443
|
+
StepInputBatchPbo(is_partial=False, item_container=item)
|
|
444
|
+
for item in self._request_inputs
|
|
445
|
+
]
|
|
446
|
+
)
|
|
447
|
+
)
|
|
448
|
+
end = TimeUtil.now_in_millis()
|
|
449
|
+
print(f"Agent call completed in {(end - start) / 1000.:.3f} seconds")
|
|
450
|
+
|
|
451
|
+
results = AgentOutput(agent_name)
|
|
452
|
+
|
|
453
|
+
match response.status:
|
|
454
|
+
case ProcessStepResponseStatusPbo.SUCCESS:
|
|
455
|
+
results.status = "Success"
|
|
456
|
+
case ProcessStepResponseStatusPbo.FAILURE:
|
|
457
|
+
results.status = "Failure"
|
|
458
|
+
case _:
|
|
459
|
+
results.status = "Unknown"
|
|
460
|
+
results.message = response.status_message
|
|
461
|
+
|
|
462
|
+
for i, output in enumerate(response.output):
|
|
463
|
+
container = output.item_container
|
|
464
|
+
|
|
465
|
+
if container.HasField("binary_container"):
|
|
466
|
+
results.index_map[i] = (ContainerType.BINARY, len(results.binary_output))
|
|
467
|
+
results.binary_output.append(list(container.binary_container.items))
|
|
468
|
+
elif container.HasField("csv_container"):
|
|
469
|
+
results.index_map[i] = (ContainerType.CSV, len(results.csv_output))
|
|
470
|
+
csv_output: list[dict[str, Any]] = []
|
|
471
|
+
for row in container.csv_container.items:
|
|
472
|
+
output_row: dict[str, Any] = {}
|
|
473
|
+
for j, header in enumerate(container.csv_container.header.cells):
|
|
474
|
+
output_row[header] = row.cells[j]
|
|
475
|
+
csv_output.append(output_row)
|
|
476
|
+
results.csv_output.append(csv_output)
|
|
477
|
+
elif container.HasField("json_container"):
|
|
478
|
+
results.index_map[i] = (ContainerType.JSON, len(results.json_output))
|
|
479
|
+
results.json_output.append([json.loads(x) for x in container.json_container.items])
|
|
480
|
+
elif container.HasField("text_container"):
|
|
481
|
+
results.index_map[i] = (ContainerType.TEXT, len(results.text_output))
|
|
482
|
+
results.text_output.append(list(container.text_container.items))
|
|
483
|
+
|
|
484
|
+
for record in response.new_records:
|
|
485
|
+
field_map: dict[str, Any] = {x: ProtobufUtils.field_pbo_to_value(y) for x, y in record.fields.items()}
|
|
486
|
+
results.new_records.append(field_map)
|
|
487
|
+
|
|
488
|
+
results.logs.extend(response.log)
|
|
489
|
+
|
|
490
|
+
return results
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
class TestConverterClient:
|
|
494
|
+
"""
|
|
495
|
+
A client for testing a ConverterService.
|
|
496
|
+
"""
|
|
497
|
+
grpc_server_url: str
|
|
498
|
+
options: list[tuple[str, Any]] | None
|
|
499
|
+
|
|
500
|
+
def __init__(self, grpc_server_url: str, options: list[tuple[str, Any]] | None = None):
|
|
501
|
+
"""
|
|
502
|
+
:param grpc_server_url: The URL of the gRPC server to connect to.
|
|
503
|
+
:param options: Optional list of gRPC channel options.
|
|
504
|
+
"""
|
|
505
|
+
self.grpc_server_url = grpc_server_url
|
|
506
|
+
self.options = options
|
|
507
|
+
|
|
508
|
+
def get_converter_details(self) -> ConverterDetailsResponsePbo:
|
|
509
|
+
"""
|
|
510
|
+
Get the details of the converters from the server.
|
|
511
|
+
|
|
512
|
+
:return: A ToolDetailsResponsePbo object containing the details of the converter service.
|
|
513
|
+
"""
|
|
514
|
+
with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
|
|
515
|
+
stub = ConverterServiceStub(channel)
|
|
516
|
+
return stub.GetConverterDetails(ConverterDetailsRequestPbo())
|
|
517
|
+
|
|
518
|
+
def convert_content(self, input_container: StepItemContainerPbo, target_type: ContentTypePbo) \
|
|
519
|
+
-> StepItemContainerPbo:
|
|
520
|
+
"""
|
|
521
|
+
Convert the content of the input container to the target content type.
|
|
522
|
+
|
|
523
|
+
:param input_container: The input container to convert. This container must have a ContentTypePbo set that
|
|
524
|
+
matches one of the input types that the converter service supports.
|
|
525
|
+
:param target_type: The target content type to convert to. This must match one of the target types that the
|
|
526
|
+
converter service supports.
|
|
527
|
+
:return: A StepItemContainerPbo object containing the converted content.
|
|
528
|
+
"""
|
|
529
|
+
with grpc.insecure_channel(self.grpc_server_url, options=self.options) as channel:
|
|
530
|
+
stub = ConverterServiceStub(channel)
|
|
531
|
+
response: ConvertResponsePbo = stub.ConvertContent(
|
|
532
|
+
ConvertRequestPbo(item_container=input_container, target_content_type=target_type)
|
|
533
|
+
)
|
|
534
|
+
return response.item_container
|