alita-sdk 0.3.602__py3-none-any.whl → 0.3.609__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/agents.py +108 -826
- alita_sdk/cli/testcases/__init__.py +94 -0
- alita_sdk/cli/testcases/data_generation.py +119 -0
- alita_sdk/cli/testcases/discovery.py +96 -0
- alita_sdk/cli/testcases/executor.py +84 -0
- alita_sdk/cli/testcases/logger.py +85 -0
- alita_sdk/cli/testcases/parser.py +172 -0
- alita_sdk/cli/testcases/prompts.py +91 -0
- alita_sdk/cli/testcases/reporting.py +125 -0
- alita_sdk/cli/testcases/setup.py +108 -0
- alita_sdk/cli/testcases/test_runner.py +282 -0
- alita_sdk/cli/testcases/utils.py +39 -0
- alita_sdk/cli/testcases/validation.py +90 -0
- alita_sdk/cli/testcases/workflow.py +196 -0
- alita_sdk/configurations/openapi.py +2 -2
- alita_sdk/runtime/clients/artifact.py +1 -1
- alita_sdk/runtime/tools/artifact.py +253 -8
- alita_sdk/runtime/tools/llm.py +12 -11
- alita_sdk/tools/bitbucket/api_wrapper.py +31 -30
- alita_sdk/tools/bitbucket/cloud_api_wrapper.py +49 -35
- alita_sdk/tools/confluence/api_wrapper.py +8 -1
- alita_sdk/tools/elitea_base.py +40 -36
- alita_sdk/tools/figma/api_wrapper.py +140 -83
- alita_sdk/tools/github/github_client.py +18 -10
- alita_sdk/tools/github/graphql_client_wrapper.py +1 -0
- alita_sdk/tools/utils/text_operations.py +156 -52
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/RECORD +32 -19
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/entry_points.txt +0 -0
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.602.dist-info → alita_sdk-0.3.609.dist-info}/top_level.txt +0 -0
|
@@ -270,11 +270,20 @@ class BitbucketAPIWrapper(CodeIndexerToolkit):
|
|
|
270
270
|
>>>> NEW
|
|
271
271
|
branch(str): branch name (by default: active_branch)
|
|
272
272
|
Returns:
|
|
273
|
-
str: A success or failure
|
|
273
|
+
str | ToolException: A success message or a ToolException on failure.
|
|
274
274
|
"""
|
|
275
275
|
try:
|
|
276
|
-
|
|
277
|
-
|
|
276
|
+
# Use the shared edit_file logic from BaseCodeToolApiWrapper, operating on
|
|
277
|
+
# this wrapper instance, which provides _read_file and _write_file.
|
|
278
|
+
result = self.edit_file(
|
|
279
|
+
file_path=file_path,
|
|
280
|
+
branch=branch,
|
|
281
|
+
file_query=update_query,
|
|
282
|
+
)
|
|
283
|
+
return result
|
|
284
|
+
except ToolException as e:
|
|
285
|
+
# Pass through ToolExceptions as-is so callers can handle them uniformly.
|
|
286
|
+
return e
|
|
278
287
|
except Exception as e:
|
|
279
288
|
return ToolException(f"File was not updated due to error: {str(e)}")
|
|
280
289
|
|
|
@@ -415,37 +424,29 @@ class BitbucketAPIWrapper(CodeIndexerToolkit):
|
|
|
415
424
|
file_path: str,
|
|
416
425
|
content: str,
|
|
417
426
|
branch: str = None,
|
|
418
|
-
commit_message: str = None
|
|
427
|
+
commit_message: str = None,
|
|
419
428
|
) -> str:
|
|
429
|
+
"""Write content to a file (create or update) via the underlying Bitbucket client.
|
|
430
|
+
|
|
431
|
+
This delegates to the low-level BitbucketServerApi/BitbucketCloudApi `_write_file`
|
|
432
|
+
implementations, so all backend-specific commit behavior (server vs cloud) is
|
|
433
|
+
centralized there. Used by BaseCodeToolApiWrapper.edit_file.
|
|
420
434
|
"""
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
Parameters:
|
|
424
|
-
file_path: Path to the file
|
|
425
|
-
content: New file content
|
|
426
|
-
branch: Branch name (uses active branch if None)
|
|
427
|
-
commit_message: Commit message (not used by Bitbucket API)
|
|
428
|
-
|
|
429
|
-
Returns:
|
|
430
|
-
Success message
|
|
431
|
-
"""
|
|
435
|
+
branch = branch or self._active_branch
|
|
432
436
|
try:
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
# File doesn't exist, create it
|
|
445
|
-
self._bitbucket.create_file(file_path=file_path, file_contents=content, branch=branch)
|
|
446
|
-
return f"Created file {file_path}"
|
|
437
|
+
# Delegate actual write/commit to the underlying API wrapper, which
|
|
438
|
+
# implements _write_file(file_path, content, branch, commit_message).
|
|
439
|
+
self._bitbucket._write_file(
|
|
440
|
+
file_path=file_path,
|
|
441
|
+
content=content,
|
|
442
|
+
branch=branch,
|
|
443
|
+
commit_message=commit_message or f"Update {file_path}",
|
|
444
|
+
)
|
|
445
|
+
return f"Update {file_path}"
|
|
446
|
+
except ToolException:
|
|
447
|
+
raise
|
|
447
448
|
except Exception as e:
|
|
448
|
-
raise ToolException(f"Unable to write file {file_path}: {str(e)}")
|
|
449
|
+
raise ToolException(f"Unable to write file {file_path} on branch {branch}: {str(e)}")
|
|
449
450
|
|
|
450
451
|
@extend_with_parent_available_tools
|
|
451
452
|
@extend_with_file_operations
|
|
@@ -142,32 +142,28 @@ class BitbucketServerApi(BitbucketApiAbstract):
|
|
|
142
142
|
filename=file_path
|
|
143
143
|
)
|
|
144
144
|
|
|
145
|
-
def
|
|
146
|
-
|
|
147
|
-
updated_file_content = file_content
|
|
148
|
-
for old, new in parse_old_new_markers(update_query):
|
|
149
|
-
if not old.strip():
|
|
150
|
-
continue
|
|
151
|
-
updated_file_content = updated_file_content.replace(old, new)
|
|
152
|
-
|
|
153
|
-
if file_content == updated_file_content:
|
|
154
|
-
raise ToolException(
|
|
155
|
-
"File content was not updated because old content was not found or empty. "
|
|
156
|
-
"It may be helpful to use the read_file action to get "
|
|
157
|
-
"the current file contents."
|
|
158
|
-
)
|
|
145
|
+
def _write_file(self, file_path: str, content: str, branch: str, commit_message: str) -> str:
|
|
146
|
+
"""Write updated file content to Bitbucket Server.
|
|
159
147
|
|
|
148
|
+
it creates a new commit on the given branch that edits the existing file.
|
|
149
|
+
"""
|
|
150
|
+
# Get the latest commit on the branch (used as source_commit_id)
|
|
160
151
|
source_commit_generator = self.api_client.get_commits(project_key=self.project, repository_slug=self.repository,
|
|
161
152
|
hash_newest=branch, limit=1)
|
|
162
|
-
source_commit = next(source_commit_generator)
|
|
153
|
+
source_commit = next(source_commit_generator, None)
|
|
154
|
+
if not source_commit:
|
|
155
|
+
raise ToolException(
|
|
156
|
+
f"Unable to determine latest commit on branch '{branch}' for repository '{self.repository}'."
|
|
157
|
+
)
|
|
158
|
+
|
|
163
159
|
return self.api_client.update_file(
|
|
164
160
|
project_key=self.project,
|
|
165
161
|
repository_slug=self.repository,
|
|
166
|
-
content=
|
|
167
|
-
message=f"Update {file_path}",
|
|
162
|
+
content=content,
|
|
163
|
+
message=commit_message or f"Update {file_path}",
|
|
168
164
|
branch=branch,
|
|
169
165
|
filename=file_path,
|
|
170
|
-
source_commit_id=source_commit['id']
|
|
166
|
+
source_commit_id=source_commit['id'],
|
|
171
167
|
)
|
|
172
168
|
|
|
173
169
|
def get_pull_request_commits(self, pr_id: str) -> List[Dict[str, Any]]:
|
|
@@ -294,7 +290,37 @@ class BitbucketCloudApi(BitbucketApiAbstract):
|
|
|
294
290
|
return None
|
|
295
291
|
|
|
296
292
|
def get_file(self, file_path: str, branch: str) -> str:
|
|
297
|
-
return
|
|
293
|
+
"""Fetch a file's content from Bitbucket Cloud and return it as text.
|
|
294
|
+
|
|
295
|
+
Uses the 'get' endpoint with advanced_mode to get a rich response object.
|
|
296
|
+
"""
|
|
297
|
+
try:
|
|
298
|
+
file_response = self.repository.get(
|
|
299
|
+
path=f"src/{branch}/{file_path}",
|
|
300
|
+
advanced_mode=True,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
# Prefer HTTP status when available
|
|
304
|
+
status = getattr(file_response, "status_code", None)
|
|
305
|
+
if status is not None and status != 200:
|
|
306
|
+
raise ToolException(
|
|
307
|
+
f"Failed to retrieve text from file '{file_path}' from branch '{branch}': "
|
|
308
|
+
f"HTTP {status}"
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Safely extract text content
|
|
312
|
+
file_text = getattr(file_response, "text", None)
|
|
313
|
+
if not isinstance(file_text, str) or not file_text:
|
|
314
|
+
raise ToolException(
|
|
315
|
+
f"File '{file_path}' from branch '{branch}' is empty or could not be retrieved."
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
return file_text
|
|
319
|
+
except Exception as e:
|
|
320
|
+
# Network/transport or client-level failure
|
|
321
|
+
raise ToolException(
|
|
322
|
+
f"Failed to retrieve text from file '{file_path}' from branch '{branch}': {e}"
|
|
323
|
+
)
|
|
298
324
|
|
|
299
325
|
def get_files_list(self, file_path: str, branch: str) -> list:
|
|
300
326
|
files_list = []
|
|
@@ -315,22 +341,10 @@ class BitbucketCloudApi(BitbucketApiAbstract):
|
|
|
315
341
|
return self.repository.post(path='src', data=form_data, files={},
|
|
316
342
|
headers={'Content-Type': 'application/x-www-form-urlencoded'})
|
|
317
343
|
|
|
318
|
-
def
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
for old, new in parse_old_new_markers(file_query=update_query):
|
|
323
|
-
if not old.strip():
|
|
324
|
-
continue
|
|
325
|
-
updated_file_content = updated_file_content.replace(old, new)
|
|
326
|
-
|
|
327
|
-
if file_content == updated_file_content:
|
|
328
|
-
return ToolException(
|
|
329
|
-
"File content was not updated because old content was not found or empty. "
|
|
330
|
-
"It may be helpful to use the read_file action to get "
|
|
331
|
-
"the current file contents."
|
|
332
|
-
)
|
|
333
|
-
return self.create_file(file_path, updated_file_content, branch)
|
|
344
|
+
def _write_file(self, file_path: str, content: str, branch: str, commit_message: str) -> str:
|
|
345
|
+
"""Write updated file content to Bitbucket Cloud.
|
|
346
|
+
"""
|
|
347
|
+
return self.create_file(file_path=file_path, file_contents=content, branch=branch)
|
|
334
348
|
|
|
335
349
|
def get_pull_request_commits(self, pr_id: str) -> List[Dict[str, Any]]:
|
|
336
350
|
"""
|
|
@@ -620,11 +620,18 @@ class ConfluenceAPIWrapper(NonCodeIndexerToolkit):
|
|
|
620
620
|
def _process_search(self, cql, skip_images: bool = False):
|
|
621
621
|
start = 0
|
|
622
622
|
pages_info = []
|
|
623
|
+
seen_ids: set = set() # Track seen page IDs to avoid duplicates
|
|
623
624
|
for _ in range((self.max_pages + self.limit - 1) // self.limit):
|
|
624
625
|
pages = self.client.cql(cql, start=start, limit=self.limit).get("results", [])
|
|
625
626
|
if not pages:
|
|
626
627
|
break
|
|
627
|
-
|
|
628
|
+
# Deduplicate page IDs before processing
|
|
629
|
+
page_ids = []
|
|
630
|
+
for page in pages:
|
|
631
|
+
page_id = page['content']['id']
|
|
632
|
+
if page_id not in seen_ids:
|
|
633
|
+
seen_ids.add(page_id)
|
|
634
|
+
page_ids.append(page_id)
|
|
628
635
|
for page in self.get_pages_by_id(page_ids, skip_images):
|
|
629
636
|
page_info = {
|
|
630
637
|
'content': page.page_content,
|
alita_sdk/tools/elitea_base.py
CHANGED
|
@@ -837,10 +837,7 @@ class BaseCodeToolApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
837
837
|
commit_message: Commit message (VCS toolkits only)
|
|
838
838
|
|
|
839
839
|
Returns:
|
|
840
|
-
Success message or
|
|
841
|
-
|
|
842
|
-
Raises:
|
|
843
|
-
ToolException: If file is not text-editable or edit fails
|
|
840
|
+
Success message or raises ToolException on failure.
|
|
844
841
|
"""
|
|
845
842
|
from .utils.text_operations import parse_old_new_markers, is_text_editable, try_apply_edit
|
|
846
843
|
from langchain_core.callbacks import dispatch_custom_event
|
|
@@ -868,45 +865,35 @@ class BaseCodeToolApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
868
865
|
raise current_content if isinstance(current_content, Exception) else ToolException(str(current_content))
|
|
869
866
|
except Exception as e:
|
|
870
867
|
raise ToolException(f"Failed to read file {file_path}: {e}")
|
|
871
|
-
|
|
872
|
-
# Apply all edits (
|
|
868
|
+
|
|
869
|
+
# Apply all edits (stop on first warning/error)
|
|
873
870
|
updated_content = current_content
|
|
874
|
-
fallbacks_used = 0
|
|
875
871
|
edits_applied = 0
|
|
876
872
|
for old_text, new_text in edits:
|
|
877
873
|
if not old_text.strip():
|
|
878
874
|
continue
|
|
879
875
|
|
|
880
|
-
new_updated,
|
|
876
|
+
new_updated, error_message = try_apply_edit(
|
|
881
877
|
content=updated_content,
|
|
882
878
|
old_text=old_text,
|
|
883
879
|
new_text=new_text,
|
|
884
880
|
file_path=file_path,
|
|
885
881
|
)
|
|
886
882
|
|
|
887
|
-
if
|
|
888
|
-
|
|
889
|
-
logger.warning(
|
|
890
|
-
"Old content not found or could not be safely matched in %s. Snippet: %s...",
|
|
891
|
-
file_path,
|
|
892
|
-
old_text[:100].replace("\n", "\\n"),
|
|
893
|
-
)
|
|
894
|
-
continue
|
|
883
|
+
if error_message:
|
|
884
|
+
return error_message
|
|
895
885
|
|
|
896
886
|
# A replacement was applied
|
|
897
887
|
edits_applied += 1
|
|
898
|
-
if used_fallback:
|
|
899
|
-
fallbacks_used += 1
|
|
900
|
-
|
|
901
888
|
updated_content = new_updated
|
|
902
889
|
|
|
903
890
|
# Check if any changes were made
|
|
904
|
-
if current_content == updated_content
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
891
|
+
if current_content == updated_content:
|
|
892
|
+
# At least one edit was applied, but the final content is identical.
|
|
893
|
+
# This usually means the sequence of OLD/NEW pairs is redundant or cancels out.
|
|
894
|
+
return (f"Edits for {file_path} were applied but the final content is identical to the original. "
|
|
895
|
+
"The sequence of OLD/NEW pairs appears to be redundant or self-cancelling. "
|
|
896
|
+
"Please simplify or review the update_query.")
|
|
910
897
|
|
|
911
898
|
# Write updated content
|
|
912
899
|
try:
|
|
@@ -1116,60 +1103,77 @@ def extend_with_file_operations(method):
|
|
|
1116
1103
|
"""
|
|
1117
1104
|
Decorator to automatically add file operation tools to toolkits that implement
|
|
1118
1105
|
_read_file and _write_file methods.
|
|
1119
|
-
|
|
1106
|
+
|
|
1120
1107
|
Adds:
|
|
1121
1108
|
- read_file_chunk: Read specific line ranges
|
|
1122
1109
|
- read_multiple_files: Batch read files
|
|
1123
1110
|
- search_file: Search for patterns in files
|
|
1124
1111
|
- edit_file: Edit files using OLD/NEW markers
|
|
1112
|
+
|
|
1113
|
+
Custom Schema Support:
|
|
1114
|
+
Toolkits can provide custom schemas by implementing _get_file_operation_schemas() method
|
|
1115
|
+
that returns a dict mapping tool names to Pydantic models. This allows toolkits like
|
|
1116
|
+
ArtifactWrapper to use bucket_name instead of branch.
|
|
1117
|
+
|
|
1118
|
+
Example:
|
|
1119
|
+
def _get_file_operation_schemas(self):
|
|
1120
|
+
return {
|
|
1121
|
+
"read_file_chunk": MyCustomReadFileChunkInput,
|
|
1122
|
+
"read_multiple_files": MyCustomReadMultipleFilesInput,
|
|
1123
|
+
}
|
|
1125
1124
|
"""
|
|
1126
1125
|
def wrapper(self, *args, **kwargs):
|
|
1127
1126
|
tools = method(self, *args, **kwargs)
|
|
1128
|
-
|
|
1127
|
+
|
|
1129
1128
|
# Only add file operations if toolkit has implemented the required methods
|
|
1130
1129
|
# Check for both _read_file and _write_file methods
|
|
1131
1130
|
has_file_ops = (hasattr(self, '_read_file') and callable(getattr(self, '_read_file')) and
|
|
1132
1131
|
hasattr(self, '_write_file') and callable(getattr(self, '_write_file')))
|
|
1133
|
-
|
|
1132
|
+
|
|
1134
1133
|
if has_file_ops:
|
|
1135
1134
|
# Import schemas from elitea_base
|
|
1136
1135
|
from . import elitea_base
|
|
1137
|
-
|
|
1136
|
+
|
|
1137
|
+
# Check for toolkit-specific custom schemas
|
|
1138
|
+
custom_schemas = {}
|
|
1139
|
+
if hasattr(self, '_get_file_operation_schemas') and callable(getattr(self, '_get_file_operation_schemas')):
|
|
1140
|
+
custom_schemas = self._get_file_operation_schemas() or {}
|
|
1141
|
+
|
|
1138
1142
|
file_operation_tools = [
|
|
1139
1143
|
{
|
|
1140
1144
|
"name": "read_file_chunk",
|
|
1141
1145
|
"mode": "read_file_chunk",
|
|
1142
1146
|
"ref": self.read_file_chunk,
|
|
1143
1147
|
"description": self.read_file_chunk.__doc__,
|
|
1144
|
-
"args_schema": elitea_base.ReadFileChunkInput
|
|
1148
|
+
"args_schema": custom_schemas.get("read_file_chunk", elitea_base.ReadFileChunkInput)
|
|
1145
1149
|
},
|
|
1146
1150
|
{
|
|
1147
1151
|
"name": "read_multiple_files",
|
|
1148
1152
|
"mode": "read_multiple_files",
|
|
1149
1153
|
"ref": self.read_multiple_files,
|
|
1150
1154
|
"description": self.read_multiple_files.__doc__,
|
|
1151
|
-
"args_schema": elitea_base.ReadMultipleFilesInput
|
|
1155
|
+
"args_schema": custom_schemas.get("read_multiple_files", elitea_base.ReadMultipleFilesInput)
|
|
1152
1156
|
},
|
|
1153
1157
|
{
|
|
1154
1158
|
"name": "search_file",
|
|
1155
1159
|
"mode": "search_file",
|
|
1156
1160
|
"ref": self.search_file,
|
|
1157
1161
|
"description": self.search_file.__doc__,
|
|
1158
|
-
"args_schema": elitea_base.SearchFileInput
|
|
1162
|
+
"args_schema": custom_schemas.get("search_file", elitea_base.SearchFileInput)
|
|
1159
1163
|
},
|
|
1160
1164
|
{
|
|
1161
1165
|
"name": "edit_file",
|
|
1162
1166
|
"mode": "edit_file",
|
|
1163
1167
|
"ref": self.edit_file,
|
|
1164
1168
|
"description": self.edit_file.__doc__,
|
|
1165
|
-
"args_schema": elitea_base.EditFileInput
|
|
1169
|
+
"args_schema": custom_schemas.get("edit_file", elitea_base.EditFileInput)
|
|
1166
1170
|
},
|
|
1167
1171
|
]
|
|
1168
|
-
|
|
1172
|
+
|
|
1169
1173
|
tools.extend(file_operation_tools)
|
|
1170
|
-
|
|
1174
|
+
|
|
1171
1175
|
return tools
|
|
1172
|
-
|
|
1176
|
+
|
|
1173
1177
|
return wrapper
|
|
1174
1178
|
|
|
1175
1179
|
|
|
@@ -369,50 +369,94 @@ class FigmaApiWrapper(NonCodeIndexerToolkit):
|
|
|
369
369
|
)
|
|
370
370
|
|
|
371
371
|
def _base_loader(
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
number_of_threads: Optional[int] = None,
|
|
381
|
-
**kwargs
|
|
372
|
+
self,
|
|
373
|
+
urls_or_file_keys: Optional[str] = None,
|
|
374
|
+
node_ids_include: Optional[List[str]] = None,
|
|
375
|
+
node_ids_exclude: Optional[List[str]] = None,
|
|
376
|
+
node_types_include: Optional[List[str]] = None,
|
|
377
|
+
node_types_exclude: Optional[List[str]] = None,
|
|
378
|
+
number_of_threads: Optional[int] = None,
|
|
379
|
+
**kwargs,
|
|
382
380
|
) -> Generator[Document, None, None]:
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
381
|
+
"""Base loader used by the indexer tool.
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
urls_or_file_keys: Comma-separated list of Figma file URLs or raw file keys. Each
|
|
385
|
+
entry can be:
|
|
386
|
+
- a full Figma URL (https://www.figma.com/file/... or /design/...) optionally
|
|
387
|
+
with a node-id query parameter, or
|
|
388
|
+
- a bare file key string.
|
|
389
|
+
URL entries are parsed via _parse_figma_url; raw keys are used as-is.
|
|
390
|
+
node_ids_include: Optional list of top-level node IDs (pages) to include when an
|
|
391
|
+
entry does not specify node-id in the URL and is not otherwise constrained.
|
|
392
|
+
node_ids_exclude: Optional list of top-level node IDs (pages) to exclude when
|
|
393
|
+
node_ids_include is not provided.
|
|
394
|
+
node_types_include: Optional list of node types to include within each page.
|
|
395
|
+
node_types_exclude: Optional list of node types to exclude when node_types_include
|
|
396
|
+
is not provided.
|
|
397
|
+
number_of_threads: Optional override for number of worker threads to use when
|
|
398
|
+
processing images.
|
|
399
|
+
"""
|
|
400
|
+
if not urls_or_file_keys:
|
|
401
|
+
raise ValueError("You must provide urls_or_file_keys with at least one URL or file key.")
|
|
402
|
+
|
|
403
|
+
# Parse the comma-separated entries into concrete (file_key, per_entry_node_ids_include)
|
|
404
|
+
entries = [item.strip() for item in urls_or_file_keys.split(',') if item.strip()]
|
|
405
|
+
if not entries:
|
|
406
|
+
raise ValueError("You must provide urls_or_file_keys with at least one non-empty value.")
|
|
407
|
+
|
|
408
|
+
# Validate number_of_threads override once and pass via metadata
|
|
409
|
+
metadata_threads_override: Optional[int] = None
|
|
410
|
+
if isinstance(number_of_threads, int) and 1 <= number_of_threads <= 5:
|
|
411
|
+
metadata_threads_override = number_of_threads
|
|
412
|
+
|
|
413
|
+
for entry in entries:
|
|
414
|
+
per_file_node_ids_include: Optional[List[str]] = None
|
|
415
|
+
file_key: Optional[str] = None
|
|
416
|
+
|
|
417
|
+
# Heuristic: treat as URL if it has a scheme and figma.com host
|
|
418
|
+
if entry.startswith("http://") or entry.startswith("https://"):
|
|
419
|
+
file_key, node_ids_from_url = self._parse_figma_url(entry)
|
|
420
|
+
per_file_node_ids_include = node_ids_from_url
|
|
421
|
+
else:
|
|
422
|
+
# Assume this is a raw file key
|
|
423
|
+
file_key = entry
|
|
424
|
+
|
|
425
|
+
if not file_key:
|
|
426
|
+
continue
|
|
427
|
+
|
|
428
|
+
# If URL-derived node IDs exist, they take precedence over global include list
|
|
429
|
+
effective_node_ids_include = per_file_node_ids_include or node_ids_include or []
|
|
430
|
+
|
|
431
|
+
self._log_tool_event(f"Loading file `{file_key}`")
|
|
432
|
+
try:
|
|
433
|
+
file = self._client.get_file(file_key, geometry='depth=1')
|
|
434
|
+
except ToolException as e:
|
|
435
|
+
# Enrich the error message with the file_key for easier troubleshooting
|
|
436
|
+
raise ToolException(
|
|
437
|
+
f"Failed to retrieve Figma file '{file_key}'. Original error: {e}"
|
|
438
|
+
) from e
|
|
439
|
+
|
|
440
|
+
if not file:
|
|
441
|
+
raise ToolException(
|
|
442
|
+
f"Unexpected error while retrieving file {file_key}. Please try specifying the node-id of an inner page."
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
metadata = {
|
|
446
|
+
'id': file_key,
|
|
447
|
+
'file_key': file_key,
|
|
448
|
+
'name': file.name,
|
|
449
|
+
'updated_on': file.last_modified,
|
|
450
|
+
'figma_pages_include': effective_node_ids_include,
|
|
451
|
+
'figma_pages_exclude': node_ids_exclude or [],
|
|
452
|
+
'figma_nodes_include': node_types_include or [],
|
|
453
|
+
'figma_nodes_exclude': node_types_exclude or [],
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
if metadata_threads_override is not None:
|
|
457
|
+
metadata['number_of_threads_override'] = metadata_threads_override
|
|
458
|
+
|
|
459
|
+
yield Document(page_content=json.dumps(metadata), metadata=metadata)
|
|
416
460
|
|
|
417
461
|
def has_image_representation(self, node):
|
|
418
462
|
node_type = node.get('type', '').lower()
|
|
@@ -672,44 +716,58 @@ class FigmaApiWrapper(NonCodeIndexerToolkit):
|
|
|
672
716
|
)
|
|
673
717
|
|
|
674
718
|
def _index_tool_params(self):
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
719
|
+
"""Return the parameters for indexing data."""
|
|
720
|
+
return {
|
|
721
|
+
"urls_or_file_keys": (str, Field(
|
|
722
|
+
description=(
|
|
723
|
+
"Comma-separated list of Figma file URLs or raw file keys to index. "
|
|
724
|
+
"Each entry may be a full Figma URL (with optional node-id query) or a file key. "
|
|
725
|
+
"Example: 'https://www.figma.com/file/<FILE_KEY>/...?node-id=<NODE_ID>,Fp24FuzPwH0L74ODSrCnQo'."
|
|
726
|
+
))),
|
|
727
|
+
'number_of_threads': (Optional[int], Field(
|
|
728
|
+
description=(
|
|
729
|
+
"Optional override for the number of worker threads used when indexing Figma images. "
|
|
730
|
+
f"Valid values are from 1 to 5. Default is {DEFAULT_NUMBER_OF_THREADS}."
|
|
731
|
+
),
|
|
732
|
+
default=DEFAULT_NUMBER_OF_THREADS,
|
|
733
|
+
ge=1,
|
|
734
|
+
le=5,
|
|
735
|
+
)),
|
|
736
|
+
'node_ids_include': (Optional[List[str]], Field(
|
|
737
|
+
description=(
|
|
738
|
+
"List of top-level node IDs (pages) to include in the index. Values should match "
|
|
739
|
+
"Figma node-id format like ['123-56', '7651-9230']. These include rules are applied "
|
|
740
|
+
"for each entry in urls_or_file_keys when the URL does not specify node-id and for "
|
|
741
|
+
"each raw file_key entry."
|
|
742
|
+
),
|
|
743
|
+
default=None,
|
|
744
|
+
)),
|
|
745
|
+
'node_ids_exclude': (Optional[List[str]], Field(
|
|
746
|
+
description=(
|
|
747
|
+
"List of top-level node IDs (pages) to exclude from the index when node_ids_include "
|
|
748
|
+
"is not provided. Values should match Figma node-id format. These exclude rules are "
|
|
749
|
+
"applied for each entry in urls_or_file_keys (URLs without node-id and raw fileKey "
|
|
750
|
+
"entries)."
|
|
751
|
+
),
|
|
752
|
+
default=None,
|
|
753
|
+
)),
|
|
754
|
+
'node_types_include': (Optional[List[str]], Field(
|
|
755
|
+
description=(
|
|
756
|
+
"List of node types to include in the index, e.g. ['FRAME', 'COMPONENT', 'RECTANGLE', "
|
|
757
|
+
"'COMPONENT_SET', 'INSTANCE', 'VECTOR', ...]. If provided, only these types are indexed "
|
|
758
|
+
"for each page loaded from each urls_or_file_keys entry."
|
|
759
|
+
),
|
|
760
|
+
default=None,
|
|
761
|
+
)),
|
|
762
|
+
'node_types_exclude': (Optional[List[str]], Field(
|
|
763
|
+
description=(
|
|
764
|
+
"List of node types to exclude from the index when node_types_include is not provided. "
|
|
765
|
+
"These exclude rules are applied to nodes within each page loaded from each "
|
|
766
|
+
"urls_or_file_keys entry."
|
|
767
|
+
),
|
|
768
|
+
default=None,
|
|
769
|
+
)),
|
|
770
|
+
}
|
|
713
771
|
|
|
714
772
|
def _send_request(
|
|
715
773
|
self,
|
|
@@ -972,8 +1030,7 @@ class FigmaApiWrapper(NonCodeIndexerToolkit):
|
|
|
972
1030
|
|
|
973
1031
|
# Delegate URL and file_key handling to _base_loader
|
|
974
1032
|
base_docs = self._base_loader(
|
|
975
|
-
|
|
976
|
-
file_keys_include=[file_key] if file_key else None,
|
|
1033
|
+
urls_or_file_keys=url or file_key,
|
|
977
1034
|
node_ids_include=node_ids_include_list,
|
|
978
1035
|
node_ids_exclude=node_ids_exclude_list,
|
|
979
1036
|
)
|