alita-sdk 0.3.326__py3-none-any.whl → 0.3.328__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/runtime/langchain/langraph_agent.py +48 -3
- alita_sdk/runtime/langchain/utils.py +3 -0
- alita_sdk/tools/figma/api_wrapper.py +84 -50
- alita_sdk/tools/jira/api_wrapper.py +5 -1
- alita_sdk/tools/utils/__init__.py +2 -0
- {alita_sdk-0.3.326.dist-info → alita_sdk-0.3.328.dist-info}/METADATA +2 -3
- {alita_sdk-0.3.326.dist-info → alita_sdk-0.3.328.dist-info}/RECORD +10 -10
- {alita_sdk-0.3.326.dist-info → alita_sdk-0.3.328.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.326.dist-info → alita_sdk-0.3.328.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.326.dist-info → alita_sdk-0.3.328.dist-info}/top_level.txt +0 -0
|
@@ -248,19 +248,36 @@ class StateModifierNode(Runnable):
|
|
|
248
248
|
|
|
249
249
|
# Collect input variables from state
|
|
250
250
|
input_data = {}
|
|
251
|
+
|
|
251
252
|
for var in self.input_variables:
|
|
252
253
|
if var in state:
|
|
253
254
|
input_data[var] = state.get(var)
|
|
254
|
-
|
|
255
|
+
type_of_output = type(state.get(self.output_variables[0])) if self.output_variables else None
|
|
255
256
|
# Render the template using Jinja
|
|
256
257
|
import json
|
|
258
|
+
import base64
|
|
257
259
|
from jinja2 import Environment
|
|
258
260
|
|
|
259
261
|
def from_json(value):
|
|
260
|
-
|
|
262
|
+
"""Convert JSON string to Python object"""
|
|
263
|
+
try:
|
|
264
|
+
return json.loads(value)
|
|
265
|
+
except (json.JSONDecodeError, TypeError) as e:
|
|
266
|
+
logger.warning(f"Failed to parse JSON value: {e}")
|
|
267
|
+
return value
|
|
268
|
+
|
|
269
|
+
def base64_to_string(value):
|
|
270
|
+
"""Convert base64 encoded string to regular string"""
|
|
271
|
+
try:
|
|
272
|
+
return base64.b64decode(value).decode('utf-8')
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.warning(f"Failed to decode base64 value: {e}")
|
|
275
|
+
return value
|
|
276
|
+
|
|
261
277
|
|
|
262
278
|
env = Environment()
|
|
263
279
|
env.filters['from_json'] = from_json
|
|
280
|
+
env.filters['base64ToString'] = base64_to_string
|
|
264
281
|
|
|
265
282
|
template = env.from_string(self.template)
|
|
266
283
|
rendered_message = template.render(**input_data)
|
|
@@ -269,7 +286,35 @@ class StateModifierNode(Runnable):
|
|
|
269
286
|
if len(self.output_variables) > 0:
|
|
270
287
|
# Use the first output variable to store the rendered content
|
|
271
288
|
output_var = self.output_variables[0]
|
|
272
|
-
|
|
289
|
+
|
|
290
|
+
# Convert rendered_message to the appropriate type
|
|
291
|
+
if type_of_output is not None:
|
|
292
|
+
try:
|
|
293
|
+
if type_of_output == dict:
|
|
294
|
+
result[output_var] = json.loads(rendered_message) if isinstance(rendered_message, str) else dict(rendered_message)
|
|
295
|
+
elif type_of_output == list:
|
|
296
|
+
result[output_var] = json.loads(rendered_message) if isinstance(rendered_message, str) else list(rendered_message)
|
|
297
|
+
elif type_of_output == int:
|
|
298
|
+
result[output_var] = int(rendered_message)
|
|
299
|
+
elif type_of_output == float:
|
|
300
|
+
result[output_var] = float(rendered_message)
|
|
301
|
+
elif type_of_output == str:
|
|
302
|
+
result[output_var] = str(rendered_message)
|
|
303
|
+
elif type_of_output == bool:
|
|
304
|
+
if isinstance(rendered_message, str):
|
|
305
|
+
result[output_var] = rendered_message.lower() in ('true', '1', 'yes', 'on')
|
|
306
|
+
else:
|
|
307
|
+
result[output_var] = bool(rendered_message)
|
|
308
|
+
elif type_of_output == type(None):
|
|
309
|
+
result[output_var] = None
|
|
310
|
+
else:
|
|
311
|
+
# Fallback to string if type is not recognized
|
|
312
|
+
result[output_var] = str(rendered_message)
|
|
313
|
+
except (ValueError, TypeError, json.JSONDecodeError) as e:
|
|
314
|
+
logger.warning(f"Failed to convert rendered_message to {type_of_output.__name__}: {e}. Using string fallback.")
|
|
315
|
+
result[output_var] = str(rendered_message)
|
|
316
|
+
else:
|
|
317
|
+
result[output_var] = rendered_message
|
|
273
318
|
|
|
274
319
|
# Clean up specified variables (make them empty, not delete)
|
|
275
320
|
|
|
@@ -121,6 +121,8 @@ def parse_type(type_str):
|
|
|
121
121
|
"""Parse a type string into an actual Python type."""
|
|
122
122
|
try:
|
|
123
123
|
# Evaluate the type string using builtins and imported modules
|
|
124
|
+
if type_str == 'number':
|
|
125
|
+
type_str = 'int'
|
|
124
126
|
return eval(type_str, {**vars(builtins), **globals()})
|
|
125
127
|
except Exception as e:
|
|
126
128
|
print(f"Error parsing type: {e}")
|
|
@@ -138,6 +140,7 @@ def create_state(data: Optional[dict] = None):
|
|
|
138
140
|
state_dict[key] = Annotated[list[AnyMessage], add_messages]
|
|
139
141
|
elif value in ['str', 'int', 'float', 'bool', 'list', 'dict', 'number']:
|
|
140
142
|
state_dict[key] = parse_type(value)
|
|
143
|
+
logger.debug(f"Created state: {state_dict}")
|
|
141
144
|
return TypedDict('State', state_dict)
|
|
142
145
|
|
|
143
146
|
def create_typed_dict_from_yaml(data):
|
|
@@ -307,76 +307,110 @@ class FigmaApiWrapper(NonCodeIndexerToolkit):
|
|
|
307
307
|
else:
|
|
308
308
|
raise ValueError("You must provide at least project_id or file_keys_include.")
|
|
309
309
|
|
|
310
|
+
def has_image_representation(self, node):
|
|
311
|
+
node_type = node.get('type', '').lower()
|
|
312
|
+
default_images_types = [
|
|
313
|
+
'image', 'canvas', 'frame', 'vector', 'table', 'slice', 'sticky', 'shape_with_text', 'connector'
|
|
314
|
+
]
|
|
315
|
+
# filter nodes of type which has image representation
|
|
316
|
+
# or rectangles with image as background
|
|
317
|
+
if (node_type in default_images_types
|
|
318
|
+
or (node_type == 'rectangle' and 'fills' in node and any(
|
|
319
|
+
fill.get('type') == 'IMAGE' for fill in node['fills'] if isinstance(fill, dict)))):
|
|
320
|
+
return True
|
|
321
|
+
return False
|
|
322
|
+
|
|
323
|
+
def get_texts_recursive(self, node):
|
|
324
|
+
texts = []
|
|
325
|
+
node_type = node.get('type', '').lower()
|
|
326
|
+
if node_type == 'text':
|
|
327
|
+
texts.append(node.get('characters', ''))
|
|
328
|
+
if 'children' in node:
|
|
329
|
+
for child in node['children']:
|
|
330
|
+
texts.extend(self.get_texts_recursive(child))
|
|
331
|
+
return texts
|
|
332
|
+
|
|
310
333
|
def _process_document(self, document: Document) -> Generator[Document, None, None]:
|
|
311
334
|
file_key = document.metadata.get('id', '')
|
|
312
335
|
self._log_tool_event(f"Loading details (images) for `{file_key}`")
|
|
313
|
-
#
|
|
314
336
|
figma_pages = self._client.get_file(file_key).document.get('children', [])
|
|
315
337
|
node_ids_include = document.metadata.pop('figma_pages_include', [])
|
|
316
338
|
node_ids_exclude = document.metadata.pop('figma_pages_exclude', [])
|
|
317
|
-
node_types_include = [t.lower() for t in document.metadata.pop('figma_nodes_include', [])]
|
|
318
|
-
node_types_exclude = [t.lower() for t in document.metadata.pop('figma_nodes_exclude', [])]
|
|
339
|
+
node_types_include = [t.strip().lower() for t in document.metadata.pop('figma_nodes_include', [])]
|
|
340
|
+
node_types_exclude = [t.strip().lower() for t in document.metadata.pop('figma_nodes_exclude', [])]
|
|
319
341
|
self._log_tool_event(f"Included pages: {node_ids_include}. Excluded pages: {node_ids_exclude}.")
|
|
320
342
|
if node_ids_include:
|
|
321
343
|
figma_pages = [node for node in figma_pages if ('id' in node and node['id'].replace(':', '-') in node_ids_include)]
|
|
322
344
|
elif node_ids_exclude:
|
|
323
345
|
figma_pages = [node for node in figma_pages if ('id' in node and node['id'].replace(':', '-') not in node_ids_exclude)]
|
|
324
346
|
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
if
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
347
|
+
image_nodes = []
|
|
348
|
+
text_nodes = {}
|
|
349
|
+
for page in figma_pages:
|
|
350
|
+
for node in page.get('children', []):
|
|
351
|
+
# filter by node_type if specified any include or exclude
|
|
352
|
+
node_type = node.get('type', '').lower()
|
|
353
|
+
include = node_types_include and node_type in node_types_include
|
|
354
|
+
exclude = node_types_exclude and node_type not in node_types_exclude
|
|
355
|
+
no_filter = not node_types_include and not node_types_exclude
|
|
356
|
+
|
|
357
|
+
if include or exclude or no_filter:
|
|
358
|
+
node_id = node.get('id')
|
|
359
|
+
if node_id:
|
|
360
|
+
if self.has_image_representation(node):
|
|
361
|
+
image_nodes.append(node['id'])
|
|
362
|
+
else:
|
|
363
|
+
text_nodes[node['id']] = self.get_texts_recursive(node)
|
|
364
|
+
# process image nodes
|
|
365
|
+
if image_nodes:
|
|
366
|
+
images = self._client.get_file_images(file_key, image_nodes).images or {}
|
|
367
|
+
total_images = len(images)
|
|
368
|
+
if total_images == 0:
|
|
369
|
+
logging.info(f"No images found for file {file_key}.")
|
|
370
|
+
return
|
|
371
|
+
progress_step = max(1, total_images // 10)
|
|
372
|
+
for idx, (node_id, image_url) in enumerate(images.items(), 1):
|
|
373
|
+
if not image_url:
|
|
374
|
+
logging.warning(f"Image URL not found for node_id {node_id} in file {file_key}. Skipping.")
|
|
375
|
+
continue
|
|
376
|
+
response = requests.get(image_url)
|
|
377
|
+
if response.status_code == 200:
|
|
378
|
+
content_type = response.headers.get('Content-Type', '')
|
|
379
|
+
if 'text/html' not in content_type.lower():
|
|
380
|
+
extension = f".{content_type.split('/')[-1]}" if content_type.startswith('image') else '.txt'
|
|
381
|
+
page_content = load_content_from_bytes(
|
|
382
|
+
file_content=response.content,
|
|
383
|
+
extension=extension, llm=self.llm)
|
|
384
|
+
yield Document(
|
|
385
|
+
page_content=page_content,
|
|
386
|
+
metadata={
|
|
387
|
+
'id': node_id,
|
|
388
|
+
'updated_on': document.metadata.get('updated_on', ''),
|
|
389
|
+
'file_key': file_key,
|
|
390
|
+
'node_id': node_id,
|
|
391
|
+
'image_url': image_url,
|
|
392
|
+
'type': 'image'
|
|
393
|
+
}
|
|
394
|
+
)
|
|
395
|
+
if idx % progress_step == 0 or idx == total_images:
|
|
396
|
+
percent = int((idx / total_images) * 100)
|
|
397
|
+
msg = f"Processed {idx}/{total_images} images ({percent}%) for file {file_key}."
|
|
398
|
+
logging.info(msg)
|
|
399
|
+
self._log_tool_event(msg)
|
|
400
|
+
# process text nodes
|
|
401
|
+
if text_nodes:
|
|
402
|
+
for node_id, texts in text_nodes.items():
|
|
403
|
+
if texts:
|
|
365
404
|
yield Document(
|
|
366
|
-
page_content=
|
|
405
|
+
page_content="\n".join(texts),
|
|
367
406
|
metadata={
|
|
368
407
|
'id': node_id,
|
|
369
408
|
'updated_on': document.metadata.get('updated_on', ''),
|
|
370
409
|
'file_key': file_key,
|
|
371
410
|
'node_id': node_id,
|
|
372
|
-
'
|
|
411
|
+
'type': 'text'
|
|
373
412
|
}
|
|
374
413
|
)
|
|
375
|
-
if idx % progress_step == 0 or idx == total_images:
|
|
376
|
-
percent = int((idx / total_images) * 100)
|
|
377
|
-
msg = f"Processed {idx}/{total_images} images ({percent}%) for file {file_key}."
|
|
378
|
-
logging.info(msg)
|
|
379
|
-
self._log_tool_event(msg)
|
|
380
414
|
|
|
381
415
|
def _remove_metadata_keys(self):
|
|
382
416
|
return super()._remove_metadata_keys() + ['figma_pages_include', 'figma_pages_exclude', 'figma_nodes_include', 'figma_nodes_exclude']
|
|
@@ -563,12 +563,14 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
563
563
|
Use the appropriate issue link type (e.g., "Test", "Relates", "Blocks").
|
|
564
564
|
If we use "Test" linktype, the test is inward issue, the story/other issue is outward issue.."""
|
|
565
565
|
|
|
566
|
+
comment = "This test is linked to the story."
|
|
567
|
+
comment_body = {"content": [{"content": [{"text": comment,"type": "text"}],"type": "paragraph"}],"type": "doc","version": 1} if self.api_version == "3" else comment
|
|
566
568
|
link_data = {
|
|
567
569
|
"type": {"name": f"{linktype}"},
|
|
568
570
|
"inwardIssue": {"key": f"{inward_issue_key}"},
|
|
569
571
|
"outwardIssue": {"key": f"{outward_issue_key}"},
|
|
570
572
|
"comment": {
|
|
571
|
-
"body":
|
|
573
|
+
"body": comment_body
|
|
572
574
|
}
|
|
573
575
|
}
|
|
574
576
|
self._client.create_issue_link(link_data)
|
|
@@ -706,6 +708,8 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
706
708
|
def add_comments(self, issue_key: str, comment: str):
|
|
707
709
|
""" Add a comment to a Jira issue."""
|
|
708
710
|
try:
|
|
711
|
+
if self.api_version == '3':
|
|
712
|
+
comment = {"content": [{"content": [{"text": comment,"type": "text"}],"type": "paragraph"}],"type": "doc","version": 1}
|
|
709
713
|
self._client.issue_add_comment(issue_key, comment)
|
|
710
714
|
issue_url = f"{self._client.url}browse/{issue_key}"
|
|
711
715
|
output = f"Done. Comment is added for issue {issue_key}. You can view it at {issue_url}"
|
|
@@ -55,6 +55,8 @@ def parse_type(type_str):
|
|
|
55
55
|
"""Parse a type string into an actual Python type."""
|
|
56
56
|
try:
|
|
57
57
|
# Evaluate the type string using builtins and imported modules
|
|
58
|
+
if type_str == 'number':
|
|
59
|
+
type_str = 'int'
|
|
58
60
|
return eval(type_str, {**vars(builtins), **globals()})
|
|
59
61
|
except Exception as e:
|
|
60
62
|
print(f"Error parsing type: {e}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: alita_sdk
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.328
|
|
4
4
|
Summary: SDK for building langchain agents using resources from Alita
|
|
5
5
|
Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedj27@gmail.com>, Artem Dubrovskiy <ad13box@gmail.com>
|
|
6
6
|
License-Expression: Apache-2.0
|
|
@@ -74,8 +74,7 @@ Requires-Dist: paramiko==3.3.1; extra == "tools"
|
|
|
74
74
|
Requires-Dist: pygithub==2.3.0; extra == "tools"
|
|
75
75
|
Requires-Dist: python-gitlab==4.5.0; extra == "tools"
|
|
76
76
|
Requires-Dist: gitpython==3.1.43; extra == "tools"
|
|
77
|
-
Requires-Dist: atlassian-python-api~=
|
|
78
|
-
Requires-Dist: atlassian_python_api==3.41.16; extra == "tools"
|
|
77
|
+
Requires-Dist: atlassian-python-api~=4.0.7; extra == "tools"
|
|
79
78
|
Requires-Dist: jira==3.8.0; extra == "tools"
|
|
80
79
|
Requires-Dist: qtest-swagger-client==0.0.3; extra == "tools"
|
|
81
80
|
Requires-Dist: testrail-api==1.13.2; extra == "tools"
|
|
@@ -44,11 +44,11 @@ alita_sdk/runtime/langchain/assistant.py,sha256=1Eq8BIefp8suhbC9CssoOXtC-plkemoU
|
|
|
44
44
|
alita_sdk/runtime/langchain/chat_message_template.py,sha256=kPz8W2BG6IMyITFDA5oeb5BxVRkHEVZhuiGl4MBZKdc,2176
|
|
45
45
|
alita_sdk/runtime/langchain/constants.py,sha256=eHVJ_beJNTf1WJo4yq7KMK64fxsRvs3lKc34QCXSbpk,3319
|
|
46
46
|
alita_sdk/runtime/langchain/indexer.py,sha256=0ENHy5EOhThnAiYFc7QAsaTNp9rr8hDV_hTK8ahbatk,37592
|
|
47
|
-
alita_sdk/runtime/langchain/langraph_agent.py,sha256=
|
|
47
|
+
alita_sdk/runtime/langchain/langraph_agent.py,sha256=z_Bontl600nV7ombsomKXtRCuwCJc-5b5P91wapHYo4,47523
|
|
48
48
|
alita_sdk/runtime/langchain/mixedAgentParser.py,sha256=M256lvtsL3YtYflBCEp-rWKrKtcY1dJIyRGVv7KW9ME,2611
|
|
49
49
|
alita_sdk/runtime/langchain/mixedAgentRenderes.py,sha256=asBtKqm88QhZRILditjYICwFVKF5KfO38hu2O-WrSWE,5964
|
|
50
50
|
alita_sdk/runtime/langchain/store_manager.py,sha256=i8Fl11IXJhrBXq1F1ukEVln57B1IBe-tqSUvfUmBV4A,2218
|
|
51
|
-
alita_sdk/runtime/langchain/utils.py,sha256=
|
|
51
|
+
alita_sdk/runtime/langchain/utils.py,sha256=r5X00cgUjx4OwjwxHCnSyO-AbAw2gPVvBx2KfQr87lw,6742
|
|
52
52
|
alita_sdk/runtime/langchain/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
53
|
alita_sdk/runtime/langchain/agents/xml_chat.py,sha256=Mx7PK5T97_GrFCwHHZ3JZP42S7MwtUzV0W-_8j6Amt8,6212
|
|
54
54
|
alita_sdk/runtime/langchain/document_loaders/AlitaBDDScenariosLoader.py,sha256=4kFU1ijrM1Jw7cywQv8mUiBHlE6w-uqfzSZP4hUV5P4,3771
|
|
@@ -235,7 +235,7 @@ alita_sdk/tools/custom_open_api/api_wrapper.py,sha256=sDSFpvEqpSvXHGiBISdQQcUecf
|
|
|
235
235
|
alita_sdk/tools/elastic/__init__.py,sha256=iwnSRppRpzvJ1da2K3Glu8Uu41MhBDCYbguboLkEbW0,2818
|
|
236
236
|
alita_sdk/tools/elastic/api_wrapper.py,sha256=pl8CqQxteJAGwyOhMcld-ZgtOTFwwbv42OITQVe8rM0,1948
|
|
237
237
|
alita_sdk/tools/figma/__init__.py,sha256=W6vIMMkZI2Lmpg6_CRRV3oadaIbVI-qTLmKUh6enqWs,4509
|
|
238
|
-
alita_sdk/tools/figma/api_wrapper.py,sha256
|
|
238
|
+
alita_sdk/tools/figma/api_wrapper.py,sha256=-vsIy0Y4UFJBjKumRAOqmgS7pEXVn0UjaNSNN7pFsLs,29351
|
|
239
239
|
alita_sdk/tools/github/__init__.py,sha256=2rHu0zZyZGnLC5CkHgDIhe14N9yCyaEfrrt7ydH8478,5191
|
|
240
240
|
alita_sdk/tools/github/api_wrapper.py,sha256=uDwYckdnpYRJtb0uZnDkaz2udvdDLVxuCh1tSwspsiU,8411
|
|
241
241
|
alita_sdk/tools/github/github_client.py,sha256=nxnSXsDul2PPbWvYZS8TmAFFmR-5ALyakNoV5LN2D4U,86617
|
|
@@ -260,7 +260,7 @@ alita_sdk/tools/google/bigquery/tool.py,sha256=Esf9Hsp8I0e7-5EdkFqQ-bid0cfrg-bfS
|
|
|
260
260
|
alita_sdk/tools/google_places/__init__.py,sha256=QtmBCI0bHDK79u4hsCSWFcUihu-h4EmPSh9Yll7zz3w,3590
|
|
261
261
|
alita_sdk/tools/google_places/api_wrapper.py,sha256=7nZly6nk4f4Tm7s2MVdnnwlb-1_WHRrDhyjDiqoyPjA,4674
|
|
262
262
|
alita_sdk/tools/jira/__init__.py,sha256=G-9qnOYKFWM_adG0QFexh5-2pj_WaxIxxZanB3ARFqI,6339
|
|
263
|
-
alita_sdk/tools/jira/api_wrapper.py,sha256
|
|
263
|
+
alita_sdk/tools/jira/api_wrapper.py,sha256=-juLuxeOCyDKb_-ZS8eTOeUJWEHKcCiBlXyFY2vbL4Q,81296
|
|
264
264
|
alita_sdk/tools/keycloak/__init__.py,sha256=0WB9yXMUUAHQRni1ghDEmd7GYa7aJPsTVlZgMCM9cQ0,3050
|
|
265
265
|
alita_sdk/tools/keycloak/api_wrapper.py,sha256=cOGr0f3S3-c6tRDBWI8wMnetjoNSxiV5rvC_0VHb8uw,3100
|
|
266
266
|
alita_sdk/tools/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -325,7 +325,7 @@ alita_sdk/tools/testio/__init__.py,sha256=NEvQtzsffqAXryaffVk0GpdcxZQ1AMkfeztnxH
|
|
|
325
325
|
alita_sdk/tools/testio/api_wrapper.py,sha256=BvmL5h634BzG6p7ajnQLmj-uoAw1gjWnd4FHHu1h--Q,21638
|
|
326
326
|
alita_sdk/tools/testrail/__init__.py,sha256=Xg4nVjULL_D8JpIXLYXppnwUfGF4-lguFwKHmP5VwxM,4696
|
|
327
327
|
alita_sdk/tools/testrail/api_wrapper.py,sha256=PKhtf04C6PFDexGCAJm-hjA9Gpu4crx6EXKT5K-b_Pk,32985
|
|
328
|
-
alita_sdk/tools/utils/__init__.py,sha256=
|
|
328
|
+
alita_sdk/tools/utils/__init__.py,sha256=W9rCCUPtHCP5nGAbWp0n5jaNA84572aiRoqKneBnaS4,3330
|
|
329
329
|
alita_sdk/tools/utils/available_tools_decorator.py,sha256=IbrdfeQkswxUFgvvN7-dyLMZMyXLiwvX7kgi3phciCk,273
|
|
330
330
|
alita_sdk/tools/utils/content_parser.py,sha256=7k5Ddv3Nzp3UoocgslwwSXi1G9ZR7sXzj6593IDeOcM,14063
|
|
331
331
|
alita_sdk/tools/vector_adapters/VectorStoreAdapter.py,sha256=ypBEAkFRGHv5edW0N9rdo1yKurNGQ4pRVEWtrN_7SeA,17656
|
|
@@ -349,8 +349,8 @@ alita_sdk/tools/zephyr_scale/api_wrapper.py,sha256=kT0TbmMvuKhDUZc0i7KO18O38JM9S
|
|
|
349
349
|
alita_sdk/tools/zephyr_squad/__init__.py,sha256=0ne8XLJEQSLOWfzd2HdnqOYmQlUliKHbBED5kW_Vias,2895
|
|
350
350
|
alita_sdk/tools/zephyr_squad/api_wrapper.py,sha256=kmw_xol8YIYFplBLWTqP_VKPRhL_1ItDD0_vXTe_UuI,14906
|
|
351
351
|
alita_sdk/tools/zephyr_squad/zephyr_squad_cloud_client.py,sha256=R371waHsms4sllHCbijKYs90C-9Yu0sSR3N4SUfQOgU,5066
|
|
352
|
-
alita_sdk-0.3.
|
|
353
|
-
alita_sdk-0.3.
|
|
354
|
-
alita_sdk-0.3.
|
|
355
|
-
alita_sdk-0.3.
|
|
356
|
-
alita_sdk-0.3.
|
|
352
|
+
alita_sdk-0.3.328.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
353
|
+
alita_sdk-0.3.328.dist-info/METADATA,sha256=jRVOPK8heveuCV9P6VcjO4DE5h6kMdu2TUXKU6VOH7Q,18835
|
|
354
|
+
alita_sdk-0.3.328.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
355
|
+
alita_sdk-0.3.328.dist-info/top_level.txt,sha256=0vJYy5p_jK6AwVb1aqXr7Kgqgk3WDtQ6t5C-XI9zkmg,10
|
|
356
|
+
alita_sdk-0.3.328.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|