alita-sdk 0.3.150__py3-none-any.whl → 0.3.152__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2032 @@
1
+ import json
2
+ import logging
3
+ import re
4
+ from typing import Any, Dict, List, Optional, Union
5
+ from traceback import format_exc
6
+
7
+ import requests
8
+ from langchain_core.tools import ToolException
9
+ from pydantic import Field, PrivateAttr, model_validator, create_model, SecretStr
10
+
11
+ from ..elitea_base import BaseToolApiWrapper
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # Pydantic models for request schemas
16
+ PostmanGetCollections = create_model(
17
+ "PostmanGetCollections"
18
+ )
19
+
20
+ PostmanGetCollection = create_model(
21
+ "PostmanGetCollection",
22
+ collection_id=(str, Field(
23
+ description="The ID of the collection to retrieve"))
24
+ )
25
+
26
+ PostmanGetFolder = create_model(
27
+ "PostmanGetFolder",
28
+ collection_id=(str, Field(description="The ID of the collection")),
29
+ folder_path=(str, Field(
30
+ description="The path to the folder (e.g., 'API/Users' for nested folders)"))
31
+ )
32
+
33
+ PostmanGetFolderRequests = create_model(
34
+ "PostmanGetFolderRequests",
35
+ collection_id=(str, Field(description="The ID of the collection")),
36
+ folder_path=(str, Field(description="The path to the folder")),
37
+ include_details=(bool, Field(
38
+ description="Include detailed request information", default=False))
39
+ )
40
+
41
+ PostmanSearchRequests = create_model(
42
+ "PostmanSearchRequests",
43
+ collection_id=(str, Field(
44
+ description="The ID of the collection to search in")),
45
+ query=(str, Field(description="The search query")),
46
+ search_in=(str, Field(
47
+ description="Where to search: name, url, description, all", default="all")),
48
+ method=(Optional[str], Field(
49
+ description="Optional HTTP method filter", default=None))
50
+ )
51
+
52
+ PostmanAnalyzeCollection = create_model(
53
+ "PostmanAnalyzeCollection",
54
+ collection_id=(str, Field(
55
+ description="The ID of the collection to analyze"))
56
+ )
57
+
58
+ PostmanAnalyzeFolder = create_model(
59
+ "PostmanAnalyzeFolder",
60
+ collection_id=(str, Field(description="The ID of the collection")),
61
+ folder_path=(str, Field(description="The path to the folder to analyze"))
62
+ )
63
+
64
+ PostmanGetImprovementSuggestions = create_model(
65
+ "PostmanGetImprovementSuggestions",
66
+ collection_id=(str, Field(
67
+ description="The ID of the collection to get improvements for"))
68
+ )
69
+
70
+ PostmanCreateCollection = create_model(
71
+ "PostmanCreateCollection",
72
+ name=(str, Field(description="The name of the new collection")),
73
+ description=(Optional[str], Field(
74
+ description="Optional description for the collection", default=None)),
75
+ variables=(Optional[List[Dict]], Field(
76
+ description="Optional collection variables", default=None)),
77
+ auth=(Optional[Dict], Field(
78
+ description="Optional default authentication", default=None))
79
+ )
80
+
81
+ PostmanUpdateCollection = create_model(
82
+ "PostmanUpdateCollection",
83
+ collection_id=(str, Field(
84
+ description="The ID of the collection to update")),
85
+ name=(Optional[str], Field(
86
+ description="New name for the collection", default=None)),
87
+ description=(Optional[str], Field(
88
+ description="New description for the collection", default=None)),
89
+ variables=(Optional[List[Dict]], Field(
90
+ description="Updated collection variables", default=None)),
91
+ auth=(Optional[Dict], Field(
92
+ description="Updated authentication settings", default=None))
93
+ )
94
+
95
+ PostmanDeleteCollection = create_model(
96
+ "PostmanDeleteCollection",
97
+ collection_id=(str, Field(
98
+ description="The ID of the collection to delete"))
99
+ )
100
+
101
+ PostmanDuplicateCollection = create_model(
102
+ "PostmanDuplicateCollection",
103
+ collection_id=(str, Field(
104
+ description="The ID of the collection to duplicate")),
105
+ new_name=(str, Field(description="Name for the new collection copy"))
106
+ )
107
+
108
+ PostmanCreateFolder = create_model(
109
+ "PostmanCreateFolder",
110
+ collection_id=(str, Field(description="The ID of the collection")),
111
+ name=(str, Field(description="Name of the new folder")),
112
+ description=(Optional[str], Field(
113
+ description="Optional description for the folder", default=None)),
114
+ parent_path=(Optional[str], Field(
115
+ description="Optional parent folder path", default=None)),
116
+ auth=(Optional[Dict], Field(
117
+ description="Optional folder-level authentication", default=None))
118
+ )
119
+
120
+ PostmanUpdateFolder = create_model(
121
+ "PostmanUpdateFolder",
122
+ collection_id=(str, Field(description="The ID of the collection")),
123
+ folder_path=(str, Field(description="Path to the folder to update")),
124
+ name=(Optional[str], Field(
125
+ description="New name for the folder", default=None)),
126
+ description=(Optional[str], Field(
127
+ description="New description for the folder", default=None)),
128
+ auth=(Optional[Dict], Field(
129
+ description="Updated authentication settings", default=None))
130
+ )
131
+
132
+ PostmanDeleteFolder = create_model(
133
+ "PostmanDeleteFolder",
134
+ collection_id=(str, Field(description="The ID of the collection")),
135
+ folder_path=(str, Field(description="Path to the folder to delete"))
136
+ )
137
+
138
+ PostmanMoveFolder = create_model(
139
+ "PostmanMoveFolder",
140
+ collection_id=(str, Field(description="The ID of the collection")),
141
+ source_path=(str, Field(description="Current path of the folder to move")),
142
+ target_path=(Optional[str], Field(
143
+ description="New parent folder path", default=None))
144
+ )
145
+
146
+ PostmanCreateRequest = create_model(
147
+ "PostmanCreateRequest",
148
+ collection_id=(str, Field(description="The ID of the collection")),
149
+ folder_path=(Optional[str], Field(
150
+ description="Path to the folder", default=None)),
151
+ name=(str, Field(description="Name of the new request")),
152
+ method=(str, Field(description="HTTP method for the request")),
153
+ url=(str, Field(description="URL for the request")),
154
+ description=(Optional[str], Field(
155
+ description="Optional description for the request", default=None)),
156
+ headers=(Optional[List[Dict]], Field(
157
+ description="Optional request headers", default=None)),
158
+ body=(Optional[Dict], Field(
159
+ description="Optional request body", default=None)),
160
+ auth=(Optional[Dict], Field(
161
+ description="Optional request authentication", default=None)),
162
+ tests=(Optional[str], Field(
163
+ description="Optional test script code", default=None)),
164
+ pre_request_script=(Optional[str], Field(
165
+ description="Optional pre-request script code", default=None))
166
+ )
167
+
168
+ PostmanUpdateRequest = create_model(
169
+ "PostmanUpdateRequest",
170
+ collection_id=(str, Field(description="The ID of the collection")),
171
+ request_path=(str, Field(
172
+ description="Path to the request (folder/requestName)")),
173
+ name=(Optional[str], Field(
174
+ description="New name for the request", default=None)),
175
+ method=(Optional[str], Field(
176
+ description="HTTP method for the request", default=None)),
177
+ url=(Optional[str], Field(
178
+ description="URL for the request", default=None)),
179
+ description=(Optional[str], Field(
180
+ description="Description for the request", default=None)),
181
+ headers=(Optional[List[Dict]], Field(
182
+ description="Request headers", default=None)),
183
+ body=(Optional[Dict], Field(description="Request body", default=None)),
184
+ auth=(Optional[Dict], Field(
185
+ description="Request authentication", default=None)),
186
+ tests=(Optional[str], Field(description="Test script code", default=None)),
187
+ pre_request_script=(Optional[str], Field(
188
+ description="Pre-request script code", default=None))
189
+ )
190
+
191
+ PostmanDeleteRequest = create_model(
192
+ "PostmanDeleteRequest",
193
+ collection_id=(str, Field(description="The ID of the collection")),
194
+ request_path=(str, Field(description="Path to the request to delete"))
195
+ )
196
+
197
+ PostmanDuplicateRequest = create_model(
198
+ "PostmanDuplicateRequest",
199
+ collection_id=(str, Field(description="The ID of the collection")),
200
+ source_path=(str, Field(description="Path to the request to duplicate")),
201
+ new_name=(str, Field(description="Name for the duplicated request")),
202
+ target_path=(Optional[str], Field(
203
+ description="Target folder path", default=None))
204
+ )
205
+
206
+ PostmanMoveRequest = create_model(
207
+ "PostmanMoveRequest",
208
+ collection_id=(str, Field(description="The ID of the collection")),
209
+ source_path=(str, Field(
210
+ description="Current path of the request to move")),
211
+ target_path=(Optional[str], Field(
212
+ description="New folder path", default=None))
213
+ )
214
+
215
+
216
+ class PostmanApiWrapper(BaseToolApiWrapper):
217
+ """Wrapper for Postman API."""
218
+
219
+ api_key: SecretStr
220
+ base_url: str = "https://api.getpostman.com"
221
+ collection_id: Optional[str] = None
222
+ workspace_id: Optional[str] = None
223
+ timeout: int = 30
224
+ _session: requests.Session = PrivateAttr()
225
+
226
+ @model_validator(mode='before')
227
+ @classmethod
228
+ def validate_toolkit(cls, values):
229
+ try:
230
+ import requests # noqa: F401
231
+ except ImportError:
232
+ raise ImportError(
233
+ "`requests` package not found, please run "
234
+ "`pip install requests`"
235
+ )
236
+ return values
237
+
238
+ def __init__(self, **data):
239
+ super().__init__(**data)
240
+ self._session = requests.Session()
241
+ self._session.headers.update({
242
+ 'X-API-Key': self.api_key.get_secret_value(),
243
+ 'Content-Type': 'application/json',
244
+ })
245
+ # Removed ineffective timeout assignment. Timeout will be enforced in `_make_request`.
246
+
247
+ def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]:
248
+ """Make HTTP request to Postman API."""
249
+ url = f"{self.base_url.rstrip('/')}{endpoint}"
250
+
251
+ try:
252
+ logger.info(f"Making {method.upper()} request to {url}")
253
+ response = self._session.request(method, url, timeout=self.timeout, **kwargs)
254
+ response.raise_for_status()
255
+
256
+ if response.content:
257
+ return response.json()
258
+ return {}
259
+
260
+ except requests.exceptions.RequestException as e:
261
+ logger.error(f"Request failed: {e}")
262
+ raise ToolException(f"Postman API request failed: {str(e)}")
263
+ except json.JSONDecodeError as e:
264
+ logger.error(f"Failed to decode JSON response: {e}")
265
+ raise ToolException(
266
+ f"Invalid JSON response from Postman API: {str(e)}")
267
+
268
+ def get_available_tools(self):
269
+ """Return list of available tools with their configurations."""
270
+ return [
271
+ {
272
+ "name": "get_collections",
273
+ "mode": "get_collections",
274
+ "description": "Get all Postman collections accessible to the user",
275
+ "args_schema": PostmanGetCollections,
276
+ "ref": self.get_collections
277
+ },
278
+ {
279
+ "name": "get_collection",
280
+ "mode": "get_collection",
281
+ "description": "Get a specific Postman collection by ID",
282
+ "args_schema": PostmanGetCollection,
283
+ "ref": self.get_collection
284
+ },
285
+ {
286
+ "name": "get_folder",
287
+ "mode": "get_folder",
288
+ "description": "Get folders from a collection by path (supports nested paths like 'API/Users')",
289
+ "args_schema": PostmanGetFolder,
290
+ "ref": self.get_folder
291
+ },
292
+ {
293
+ "name": "get_folder_requests",
294
+ "mode": "get_folder_requests",
295
+ "description": "Get detailed information about all requests in a folder",
296
+ "args_schema": PostmanGetFolderRequests,
297
+ "ref": self.get_folder_requests
298
+ },
299
+ {
300
+ "name": "search_requests",
301
+ "mode": "search_requests",
302
+ "description": "Search for requests across the collection",
303
+ "args_schema": PostmanSearchRequests,
304
+ "ref": self.search_requests
305
+ },
306
+ {
307
+ "name": "analyze_collection",
308
+ "mode": "analyze_collection",
309
+ "description": "Analyze a collection for API quality, best practices, and issues",
310
+ "args_schema": PostmanAnalyzeCollection,
311
+ "ref": self.analyze_collection
312
+ },
313
+ {
314
+ "name": "analyze_folder",
315
+ "mode": "analyze_folder",
316
+ "description": "Analyze a specific folder within a collection",
317
+ "args_schema": PostmanAnalyzeFolder,
318
+ "ref": self.analyze_folder
319
+ },
320
+ {
321
+ "name": "get_improvement_suggestions",
322
+ "mode": "get_improvement_suggestions",
323
+ "description": "Get improvement suggestions for a collection",
324
+ "args_schema": PostmanGetImprovementSuggestions,
325
+ "ref": self.get_improvement_suggestions
326
+ },
327
+ {
328
+ "name": "create_collection",
329
+ "mode": "create_collection",
330
+ "description": "Create a new Postman collection",
331
+ "args_schema": PostmanCreateCollection,
332
+ "ref": self.create_collection
333
+ },
334
+ {
335
+ "name": "update_collection",
336
+ "mode": "update_collection",
337
+ "description": "Update an existing collection (name, description, variables, auth)",
338
+ "args_schema": PostmanUpdateCollection,
339
+ "ref": self.update_collection
340
+ },
341
+ {
342
+ "name": "delete_collection",
343
+ "mode": "delete_collection",
344
+ "description": "Delete a collection permanently",
345
+ "args_schema": PostmanDeleteCollection,
346
+ "ref": self.delete_collection
347
+ },
348
+ {
349
+ "name": "duplicate_collection",
350
+ "mode": "duplicate_collection",
351
+ "description": "Create a copy of an existing collection",
352
+ "args_schema": PostmanDuplicateCollection,
353
+ "ref": self.duplicate_collection
354
+ },
355
+
356
+ {
357
+ "name": "create_folder",
358
+ "mode": "create_folder",
359
+ "description": "Create a new folder in a collection",
360
+ "args_schema": PostmanCreateFolder,
361
+ "ref": self.create_folder
362
+ },
363
+ {
364
+ "name": "update_folder",
365
+ "mode": "update_folder",
366
+ "description": "Update folder properties (name, description, auth)",
367
+ "args_schema": PostmanUpdateFolder,
368
+ "ref": self.update_folder
369
+ },
370
+ {
371
+ "name": "delete_folder",
372
+ "mode": "delete_folder",
373
+ "description": "Delete a folder and all its contents permanently",
374
+ "args_schema": PostmanDeleteFolder,
375
+ "ref": self.delete_folder
376
+ },
377
+ {
378
+ "name": "move_folder",
379
+ "mode": "move_folder",
380
+ "description": "Move a folder to a different location within the collection",
381
+ "args_schema": PostmanMoveFolder,
382
+ "ref": self.move_folder
383
+ },
384
+ {
385
+ "name": "create_request",
386
+ "mode": "create_request",
387
+ "description": "Create a new API request in a folder",
388
+ "args_schema": PostmanCreateRequest,
389
+ "ref": self.create_request
390
+ },
391
+ {
392
+ "name": "update_request",
393
+ "mode": "update_request",
394
+ "description": "Update an existing API request",
395
+ "args_schema": PostmanUpdateRequest,
396
+ "ref": self.update_request
397
+ },
398
+ {
399
+ "name": "delete_request",
400
+ "mode": "delete_request",
401
+ "description": "Delete an API request permanently",
402
+ "args_schema": PostmanDeleteRequest,
403
+ "ref": self.delete_request
404
+ },
405
+ {
406
+ "name": "duplicate_request",
407
+ "mode": "duplicate_request",
408
+ "description": "Create a copy of an existing API request",
409
+ "args_schema": PostmanDuplicateRequest,
410
+ "ref": self.duplicate_request
411
+ },
412
+ {
413
+ "name": "move_request",
414
+ "mode": "move_request",
415
+ "description": "Move an API request to a different folder",
416
+ "args_schema": PostmanMoveRequest,
417
+ "ref": self.move_request
418
+ }
419
+ ]
420
+
421
+ # =================================================================
422
+ # ANALYSIS AND READ-ONLY METHODS
423
+ # =================================================================
424
+
425
+ def get_collections(self, **kwargs) -> str:
426
+ """Get all Postman collections accessible to the user."""
427
+ try:
428
+ response = self._make_request('GET', '/collections')
429
+ return json.dumps(response, indent=2)
430
+ except Exception as e:
431
+ stacktrace = format_exc()
432
+ logger.error(f"Exception when getting collections: {stacktrace}")
433
+ raise ToolException(f"Unable to get collections: {str(e)}")
434
+
435
+ def get_collection(self, collection_id: str, **kwargs) -> str:
436
+ """Get a specific collection by ID."""
437
+ try:
438
+ response = self._make_request(
439
+ 'GET', f'/collections/{collection_id}')
440
+ return json.dumps(response, indent=2)
441
+ except Exception as e:
442
+ stacktrace = format_exc()
443
+ logger.error(
444
+ f"Exception when getting collection {collection_id}: {stacktrace}")
445
+ raise ToolException(
446
+ f"Unable to get collection {collection_id}: {str(e)}")
447
+
448
+ def get_folder(self, collection_id: str, folder_path: str, **kwargs) -> str:
449
+ """Get folders from a collection by path."""
450
+ try:
451
+ collection = self._make_request(
452
+ 'GET', f'/collections/{collection_id}')
453
+ folders = self._find_folders_by_path(
454
+ collection['collection']['item'], folder_path)
455
+ return json.dumps(folders, indent=2)
456
+ except Exception as e:
457
+ stacktrace = format_exc()
458
+ logger.error(
459
+ f"Exception when getting folder {folder_path}: {stacktrace}")
460
+ raise ToolException(
461
+ f"Unable to get folder {folder_path} from collection {collection_id}: {str(e)}")
462
+
463
+ def get_folder_requests(self, collection_id: str, folder_path: str, include_details: bool = False, **kwargs) -> str:
464
+ """Get detailed information about all requests in a folder."""
465
+ try:
466
+ collection = self._make_request(
467
+ 'GET', f'/collections/{collection_id}')
468
+ folders = self._find_folders_by_path(
469
+ collection['collection']['item'], folder_path)
470
+
471
+ if not folders:
472
+ raise ToolException(f"Folder '{folder_path}' not found in collection '{collection_id}'.")
473
+
474
+ folder = folders[0]
475
+ requests = self._extract_requests_from_items(
476
+ folder.get('item', []), include_details)
477
+
478
+ result = {
479
+ "folder_name": folder['name'],
480
+ "folder_path": folder_path,
481
+ "request_count": len(requests),
482
+ "requests": requests
483
+ }
484
+
485
+ return json.dumps(result, indent=2)
486
+ except Exception as e:
487
+ stacktrace = format_exc()
488
+ logger.error(
489
+ f"Exception when getting folder requests: {stacktrace}")
490
+ raise ToolException(
491
+ f"Unable to get requests from folder {folder_path}: {str(e)}")
492
+
493
+ def search_requests(self, collection_id: str, query: str, search_in: str = "all", method: str = None, **kwargs) -> str:
494
+ """Search for requests across the collection."""
495
+ try:
496
+ collection = self._make_request(
497
+ 'GET', f'/collections/{collection_id}')
498
+ requests = self._search_requests_in_items(
499
+ collection['collection']['item'], query, search_in, method)
500
+
501
+ result = {
502
+ "collection_id": collection_id,
503
+ "query": query,
504
+ "search_in": search_in,
505
+ "method_filter": method,
506
+ "results_count": len(requests),
507
+ "results": requests
508
+ }
509
+
510
+ return json.dumps(result, indent=2)
511
+ except Exception as e:
512
+ stacktrace = format_exc()
513
+ logger.error(f"Exception when searching requests: {stacktrace}")
514
+ raise ToolException(
515
+ f"Unable to search requests in collection {collection_id}: {str(e)}")
516
+
517
+ def analyze_collection(self, collection_id: str, **kwargs) -> str:
518
+ """Analyze a collection for API quality and best practices."""
519
+ try:
520
+ collection = self._make_request(
521
+ 'GET', f'/collections/{collection_id}')
522
+ analysis = self._perform_collection_analysis(collection)
523
+ return json.dumps(analysis, indent=2)
524
+ except Exception as e:
525
+ stacktrace = format_exc()
526
+ logger.error(f"Exception when analyzing collection: {stacktrace}")
527
+ raise ToolException(
528
+ f"Unable to analyze collection {collection_id}: {str(e)}")
529
+
530
+ def analyze_folder(self, collection_id: str, folder_path: str, **kwargs) -> str:
531
+ """Analyze a specific folder within a collection."""
532
+ try:
533
+ collection = self._make_request(
534
+ 'GET', f'/collections/{collection_id}')
535
+ folders = self._find_folders_by_path(
536
+ collection['collection']['item'], folder_path)
537
+
538
+ if not folders:
539
+ return json.dumps({"error": f"Folder '{folder_path}' not found"}, indent=2)
540
+
541
+ folder_analyses = []
542
+ for folder in folders:
543
+ analysis = self._perform_folder_analysis(folder, folder_path)
544
+ folder_analyses.append(analysis)
545
+
546
+ return json.dumps(folder_analyses, indent=2)
547
+ except Exception as e:
548
+ stacktrace = format_exc()
549
+ logger.error(f"Exception when analyzing folder: {stacktrace}")
550
+ raise ToolException(
551
+ f"Unable to analyze folder {folder_path}: {str(e)}")
552
+
553
+ def get_improvement_suggestions(self, collection_id: str, **kwargs) -> str:
554
+ """Get improvement suggestions for a collection."""
555
+ try:
556
+ collection = self._make_request(
557
+ 'GET', f'/collections/{collection_id}')
558
+ analysis = self._perform_collection_analysis(collection)
559
+ improvements = self._generate_improvements(analysis)
560
+
561
+ result = {
562
+ "collection_id": collection_id,
563
+ "collection_name": analysis["collection_name"],
564
+ "improvement_count": len(improvements),
565
+ "improvements": improvements
566
+ }
567
+
568
+ return json.dumps(result, indent=2)
569
+ except Exception as e:
570
+ stacktrace = format_exc()
571
+ logger.error(
572
+ f"Exception when generating improvements: {stacktrace}")
573
+ raise ToolException(
574
+ f"Unable to generate improvements for collection {collection_id}: {str(e)}")
575
+
576
+ # =================================================================
577
+ # COLLECTION MANAGEMENT METHODS
578
+ # =================================================================
579
+
580
+ def create_collection(self, name: str, description: str = None, variables: List[Dict] = None, auth: Dict = None, **kwargs) -> str:
581
+ """Create a new Postman collection."""
582
+ try:
583
+ collection_data = {
584
+ "collection": {
585
+ "info": {
586
+ "name": name,
587
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
588
+ },
589
+ "item": []
590
+ }
591
+ }
592
+
593
+ if description:
594
+ collection_data["collection"]["info"]["description"] = description
595
+
596
+ if variables:
597
+ collection_data["collection"]["variable"] = variables
598
+
599
+ if auth:
600
+ collection_data["collection"]["auth"] = auth
601
+
602
+ response = self._make_request(
603
+ 'POST', '/collections', json=collection_data)
604
+ return json.dumps(response, indent=2)
605
+ except Exception as e:
606
+ stacktrace = format_exc()
607
+ logger.error(f"Exception when creating collection: {stacktrace}")
608
+ raise ToolException(
609
+ f"Unable to create collection '{name}': {str(e)}")
610
+
611
+ def update_collection(self, collection_id: str, name: str = None, description: str = None,
612
+ variables: List[Dict] = None, auth: Dict = None, **kwargs) -> str:
613
+ """Update an existing collection."""
614
+ try:
615
+ # Get current collection
616
+ current = self._make_request(
617
+ 'GET', f'/collections/{collection_id}')
618
+ collection_data = current["collection"]
619
+
620
+ # Update fields if provided
621
+ if name:
622
+ collection_data["info"]["name"] = name
623
+ if description is not None:
624
+ collection_data["info"]["description"] = description
625
+ if variables is not None:
626
+ collection_data["variable"] = variables
627
+ if auth is not None:
628
+ collection_data["auth"] = auth
629
+
630
+ response = self._make_request('PUT', f'/collections/{collection_id}',
631
+ json={"collection": collection_data})
632
+ return json.dumps(response, indent=2)
633
+ except Exception as e:
634
+ stacktrace = format_exc()
635
+ logger.error(f"Exception when updating collection: {stacktrace}")
636
+ raise ToolException(
637
+ f"Unable to update collection {collection_id}: {str(e)}")
638
+
639
+ def delete_collection(self, collection_id: str, **kwargs) -> str:
640
+ """Delete a collection permanently."""
641
+ try:
642
+ response = self._make_request(
643
+ 'DELETE', f'/collections/{collection_id}')
644
+ return json.dumps({"message": f"Collection {collection_id} deleted successfully"}, indent=2)
645
+ except Exception as e:
646
+ stacktrace = format_exc()
647
+ logger.error(f"Exception when deleting collection: {stacktrace}")
648
+ raise ToolException(
649
+ f"Unable to delete collection {collection_id}: {str(e)}")
650
+
651
+ def duplicate_collection(self, collection_id: str, new_name: str, **kwargs) -> str:
652
+ """Create a copy of an existing collection."""
653
+ try:
654
+ # Get the original collection
655
+ original = self._make_request(
656
+ 'GET', f'/collections/{collection_id}')
657
+ collection_data = original["collection"]
658
+
659
+ # Update the name and remove IDs to create a new collection
660
+ collection_data["info"]["name"] = new_name
661
+ if "_postman_id" in collection_data["info"]:
662
+ del collection_data["info"]["_postman_id"]
663
+
664
+ # Remove item IDs recursively
665
+ self._remove_item_ids(collection_data.get("item", []))
666
+
667
+ response = self._make_request(
668
+ 'POST', '/collections', json={"collection": collection_data})
669
+ return json.dumps(response, indent=2)
670
+ except Exception as e:
671
+ stacktrace = format_exc()
672
+ logger.error(
673
+ f"Exception when duplicating collection: {stacktrace}")
674
+ raise ToolException(
675
+ f"Unable to duplicate collection {collection_id}: {str(e)}")
676
+
677
+ # =================================================================
678
+ # FOLDER MANAGEMENT METHODS
679
+ # =================================================================
680
+
681
+ def create_folder(self, collection_id: str, name: str, description: str = None,
682
+ parent_path: str = None, auth: Dict = None, **kwargs) -> str:
683
+ """Create a new folder in a collection."""
684
+ try:
685
+ # Get current collection
686
+ collection = self._make_request(
687
+ 'GET', f'/collections/{collection_id}')
688
+ collection_data = collection["collection"]
689
+
690
+ # Create folder item
691
+ folder_item = {
692
+ "name": name,
693
+ "item": []
694
+ }
695
+
696
+ if description:
697
+ folder_item["description"] = description
698
+ if auth:
699
+ folder_item["auth"] = auth
700
+
701
+ # Add folder to appropriate location
702
+ if parent_path:
703
+ parent_folders = self._find_folders_by_path(
704
+ collection_data["item"], parent_path)
705
+ if not parent_folders:
706
+ raise ToolException(
707
+ f"Parent folder '{parent_path}' not found")
708
+ parent_folders[0]["item"].append(folder_item)
709
+ else:
710
+ collection_data["item"].append(folder_item)
711
+
712
+ # Update collection
713
+ response = self._make_request('PUT', f'/collections/{collection_id}',
714
+ json={"collection": collection_data})
715
+ return json.dumps({"message": f"Folder '{name}' created successfully"}, indent=2)
716
+ except Exception as e:
717
+ stacktrace = format_exc()
718
+ logger.error(f"Exception when creating folder: {stacktrace}")
719
+ raise ToolException(f"Unable to create folder '{name}': {str(e)}")
720
+
721
+ def update_folder(self, collection_id: str, folder_path: str, name: str = None,
722
+ description: str = None, auth: Dict = None, **kwargs) -> str:
723
+ """Update folder properties."""
724
+ try:
725
+ # Get current collection
726
+ collection = self._make_request(
727
+ 'GET', f'/collections/{collection_id}')
728
+ collection_data = collection["collection"]
729
+
730
+ # Find the folder
731
+ folders = self._find_folders_by_path(
732
+ collection_data["item"], folder_path)
733
+ if not folders:
734
+ raise ToolException(f"Folder '{folder_path}' not found")
735
+
736
+ folder = folders[0]
737
+
738
+ # Update fields if provided
739
+ if name:
740
+ folder["name"] = name
741
+ if description is not None:
742
+ folder["description"] = description
743
+ if auth is not None:
744
+ folder["auth"] = auth
745
+
746
+ # Update collection
747
+ response = self._make_request('PUT', f'/collections/{collection_id}',
748
+ json={"collection": collection_data})
749
+ return json.dumps({"message": f"Folder '{folder_path}' updated successfully"}, indent=2)
750
+ except Exception as e:
751
+ stacktrace = format_exc()
752
+ logger.error(f"Exception when updating folder: {stacktrace}")
753
+ raise ToolException(
754
+ f"Unable to update folder '{folder_path}': {str(e)}")
755
+
756
+ def delete_folder(self, collection_id: str, folder_path: str, **kwargs) -> str:
757
+ """Delete a folder and all its contents permanently."""
758
+ try:
759
+ # Get current collection
760
+ collection = self._make_request(
761
+ 'GET', f'/collections/{collection_id}')
762
+ collection_data = collection["collection"]
763
+
764
+ # Find and remove the folder
765
+ if self._remove_folder_by_path(collection_data["item"], folder_path):
766
+ # Update collection
767
+ response = self._make_request('PUT', f'/collections/{collection_id}',
768
+ json={"collection": collection_data})
769
+ return json.dumps({"message": f"Folder '{folder_path}' deleted successfully"}, indent=2)
770
+ else:
771
+ raise ToolException(f"Folder '{folder_path}' not found")
772
+ except Exception as e:
773
+ stacktrace = format_exc()
774
+ logger.error(f"Exception when deleting folder: {stacktrace}")
775
+ raise ToolException(
776
+ f"Unable to delete folder '{folder_path}': {str(e)}")
777
+
778
+ def move_folder(self, collection_id: str, source_path: str, target_path: str = None, **kwargs) -> str:
779
+ """Move a folder to a different location within the collection."""
780
+ try:
781
+ # Get current collection
782
+ collection = self._make_request(
783
+ 'GET', f'/collections/{collection_id}')
784
+ collection_data = collection["collection"]
785
+
786
+ # Find source folder
787
+ source_folder = self._find_folders_by_path(
788
+ collection_data["item"], source_path)
789
+ if not source_folder:
790
+ raise ToolException(f"Source folder '{source_path}' not found")
791
+
792
+ folder_data = source_folder[0].copy()
793
+
794
+ # Remove from source location
795
+ self._remove_folder_by_path(collection_data["item"], source_path)
796
+
797
+ # Add to target location
798
+ if target_path:
799
+ target_folders = self._find_folders_by_path(
800
+ collection_data["item"], target_path)
801
+ if not target_folders:
802
+ raise ToolException(
803
+ f"Target folder '{target_path}' not found")
804
+ target_folders[0]["item"].append(folder_data)
805
+ else:
806
+ collection_data["item"].append(folder_data)
807
+
808
+ # Update collection
809
+ response = self._make_request('PUT', f'/collections/{collection_id}',
810
+ json={"collection": collection_data})
811
+ return json.dumps({"message": f"Folder moved from '{source_path}' to '{target_path or 'root'}'"}, indent=2)
812
+ except Exception as e:
813
+ stacktrace = format_exc()
814
+ logger.error(f"Exception when moving folder: {stacktrace}")
815
+ raise ToolException(
816
+ f"Unable to move folder from '{source_path}': {str(e)}")
817
+
818
+ # =================================================================
819
+ # REQUEST MANAGEMENT METHODS
820
+ # =================================================================
821
+
822
+ def create_request(self, collection_id: str, name: str, method: str, url: str,
823
+ folder_path: str = None, description: str = None, headers: List[Dict] = None,
824
+ body: Dict = None, auth: Dict = None, tests: str = None,
825
+ pre_request_script: str = None, **kwargs) -> str:
826
+ """Create a new API request in a folder."""
827
+ try:
828
+ # Get current collection
829
+ collection = self._make_request(
830
+ 'GET', f'/collections/{collection_id}')
831
+ collection_data = collection["collection"]
832
+
833
+ # Create request item
834
+ request_item = {
835
+ "name": name,
836
+ "request": {
837
+ "method": method.upper(),
838
+ "header": headers or [],
839
+ "url": url
840
+ }
841
+ }
842
+
843
+ if description:
844
+ request_item["request"]["description"] = description
845
+ if body:
846
+ request_item["request"]["body"] = body
847
+ if auth:
848
+ request_item["request"]["auth"] = auth
849
+
850
+ # Add events if provided
851
+ events = []
852
+ if pre_request_script:
853
+ events.append({
854
+ "listen": "prerequest",
855
+ "script": {
856
+ "exec": pre_request_script.split('\n'),
857
+ "type": "text/javascript"
858
+ }
859
+ })
860
+ if tests:
861
+ events.append({
862
+ "listen": "test",
863
+ "script": {
864
+ "exec": tests.split('\n'),
865
+ "type": "text/javascript"
866
+ }
867
+ })
868
+ if events:
869
+ request_item["event"] = events
870
+
871
+ # Add request to appropriate location
872
+ if folder_path:
873
+ folders = self._find_folders_by_path(
874
+ collection_data["item"], folder_path)
875
+ if not folders:
876
+ raise ToolException(f"Folder '{folder_path}' not found")
877
+ folders[0]["item"].append(request_item)
878
+ else:
879
+ collection_data["item"].append(request_item)
880
+
881
+ # Update collection
882
+ response = self._make_request('PUT', f'/collections/{collection_id}',
883
+ json={"collection": collection_data})
884
+ return json.dumps({"message": f"Request '{name}' created successfully"}, indent=2)
885
+ except Exception as e:
886
+ stacktrace = format_exc()
887
+ logger.error(f"Exception when creating request: {stacktrace}")
888
+ raise ToolException(f"Unable to create request '{name}': {str(e)}")
889
+
890
+ def update_request(self, collection_id: str, request_path: str, name: str = None,
891
+ method: str = None, url: str = None, description: str = None,
892
+ headers: List[Dict] = None, body: Dict = None, auth: Dict = None,
893
+ tests: str = None, pre_request_script: str = None, **kwargs) -> str:
894
+ """Update an existing API request."""
895
+ try:
896
+ # Get current collection
897
+ collection = self._make_request(
898
+ 'GET', f'/collections/{collection_id}')
899
+ collection_data = collection["collection"]
900
+
901
+ # Find the request
902
+ request_item = self._find_request_by_path(
903
+ collection_data["item"], request_path)
904
+ if not request_item:
905
+ raise ToolException(f"Request '{request_path}' not found")
906
+
907
+ # Update fields if provided
908
+ if name:
909
+ request_item["name"] = name
910
+ if method:
911
+ request_item["request"]["method"] = method.upper()
912
+ if url:
913
+ request_item["request"]["url"] = url
914
+ if description is not None:
915
+ request_item["request"]["description"] = description
916
+ if headers is not None:
917
+ request_item["request"]["header"] = headers
918
+ if body is not None:
919
+ request_item["request"]["body"] = body
920
+ if auth is not None:
921
+ request_item["request"]["auth"] = auth
922
+
923
+ # Update events
924
+ if tests is not None or pre_request_script is not None:
925
+ events = request_item.get("event", [])
926
+
927
+ if pre_request_script is not None:
928
+ # Remove existing prerequest events
929
+ events = [e for e in events if e.get(
930
+ "listen") != "prerequest"]
931
+ if pre_request_script:
932
+ events.append({
933
+ "listen": "prerequest",
934
+ "script": {
935
+ "exec": pre_request_script.split('\n'),
936
+ "type": "text/javascript"
937
+ }
938
+ })
939
+
940
+ if tests is not None:
941
+ # Remove existing test events
942
+ events = [e for e in events if e.get("listen") != "test"]
943
+ if tests:
944
+ events.append({
945
+ "listen": "test",
946
+ "script": {
947
+ "exec": tests.split('\n'),
948
+ "type": "text/javascript"
949
+ }
950
+ })
951
+
952
+ request_item["event"] = events
953
+
954
+ # Update collection
955
+ response = self._make_request('PUT', f'/collections/{collection_id}',
956
+ json={"collection": collection_data})
957
+ return json.dumps({"message": f"Request '{request_path}' updated successfully"}, indent=2)
958
+ except Exception as e:
959
+ stacktrace = format_exc()
960
+ logger.error(f"Exception when updating request: {stacktrace}")
961
+ raise ToolException(
962
+ f"Unable to update request '{request_path}': {str(e)}")
963
+
964
+ def delete_request(self, collection_id: str, request_path: str, **kwargs) -> str:
965
+ """Delete an API request permanently."""
966
+ try:
967
+ # Get current collection
968
+ collection = self._make_request(
969
+ 'GET', f'/collections/{collection_id}')
970
+ collection_data = collection["collection"]
971
+
972
+ # Find and remove the request
973
+ if self._remove_request_by_path(collection_data["item"], request_path):
974
+ # Update collection
975
+ response = self._make_request('PUT', f'/collections/{collection_id}',
976
+ json={"collection": collection_data})
977
+ return json.dumps({"message": f"Request '{request_path}' deleted successfully"}, indent=2)
978
+ else:
979
+ raise ToolException(f"Request '{request_path}' not found")
980
+ except Exception as e:
981
+ stacktrace = format_exc()
982
+ logger.error(f"Exception when deleting request: {stacktrace}")
983
+ raise ToolException(
984
+ f"Unable to delete request '{request_path}': {str(e)}")
985
+
986
+ def duplicate_request(self, collection_id: str, source_path: str, new_name: str,
987
+ target_path: str = None, **kwargs) -> str:
988
+ """Create a copy of an existing API request."""
989
+ try:
990
+ # Get current collection
991
+ collection = self._make_request(
992
+ 'GET', f'/collections/{collection_id}')
993
+ collection_data = collection["collection"]
994
+
995
+ # Find source request
996
+ source_request = self._find_request_by_path(
997
+ collection_data["item"], source_path)
998
+ if not source_request:
999
+ raise ToolException(
1000
+ f"Source request '{source_path}' not found")
1001
+
1002
+ # Create copy
1003
+ request_copy = json.loads(json.dumps(source_request)) # Deep copy
1004
+ request_copy["name"] = new_name
1005
+
1006
+ # Remove IDs if present
1007
+ if "id" in request_copy:
1008
+ del request_copy["id"]
1009
+
1010
+ # Add to target location
1011
+ if target_path:
1012
+ folders = self._find_folders_by_path(
1013
+ collection_data["item"], target_path)
1014
+ if not folders:
1015
+ raise ToolException(
1016
+ f"Target folder '{target_path}' not found")
1017
+ folders[0]["item"].append(request_copy)
1018
+ else:
1019
+ # Add to same location as source
1020
+ source_folder_path = "/".join(source_path.split("/")[:-1])
1021
+ if source_folder_path:
1022
+ folders = self._find_folders_by_path(
1023
+ collection_data["item"], source_folder_path)
1024
+ folders[0]["item"].append(request_copy)
1025
+ else:
1026
+ collection_data["item"].append(request_copy)
1027
+
1028
+ # Update collection
1029
+ response = self._make_request('PUT', f'/collections/{collection_id}',
1030
+ json={"collection": collection_data})
1031
+ return json.dumps({"message": f"Request duplicated as '{new_name}'"}, indent=2)
1032
+ except Exception as e:
1033
+ stacktrace = format_exc()
1034
+ logger.error(f"Exception when duplicating request: {stacktrace}")
1035
+ raise ToolException(
1036
+ f"Unable to duplicate request '{source_path}': {str(e)}")
1037
+
1038
+ def move_request(self, collection_id: str, source_path: str, target_path: str = None, **kwargs) -> str:
1039
+ """Move an API request to a different folder."""
1040
+ try:
1041
+ # Get current collection
1042
+ collection = self._make_request(
1043
+ 'GET', f'/collections/{collection_id}')
1044
+ collection_data = collection["collection"]
1045
+
1046
+ # Find source request
1047
+ source_request = self._find_request_by_path(
1048
+ collection_data["item"], source_path)
1049
+ if not source_request:
1050
+ raise ToolException(
1051
+ f"Source request '{source_path}' not found")
1052
+
1053
+ request_data = json.loads(json.dumps(source_request)) # Deep copy
1054
+
1055
+ # Remove from source location
1056
+ self._remove_request_by_path(collection_data["item"], source_path)
1057
+
1058
+ # Add to target location
1059
+ if target_path:
1060
+ folders = self._find_folders_by_path(
1061
+ collection_data["item"], target_path)
1062
+ if not folders:
1063
+ raise ToolException(
1064
+ f"Target folder '{target_path}' not found")
1065
+ folders[0]["item"].append(request_data)
1066
+ else:
1067
+ collection_data["item"].append(request_data)
1068
+
1069
+ # Update collection
1070
+ response = self._make_request('PUT', f'/collections/{collection_id}',
1071
+ json={"collection": collection_data})
1072
+ return json.dumps({"message": f"Request moved from '{source_path}' to '{target_path or 'root'}'"}, indent=2)
1073
+ except Exception as e:
1074
+ stacktrace = format_exc()
1075
+ logger.error(f"Exception when moving request: {stacktrace}")
1076
+ raise ToolException(
1077
+ f"Unable to move request '{source_path}': {str(e)}")
1078
+
1079
+ # =================================================================
1080
+ # HELPER METHODS
1081
+ # =================================================================
1082
+
1083
+ def _find_folders_by_path(self, items: List[Dict], path: str) -> List[Dict]:
1084
+ """Find folders by path (supports nested paths like 'API/Users')."""
1085
+ path_parts = [part.strip() for part in path.split('/') if part.strip()]
1086
+ if not path_parts:
1087
+ return items
1088
+
1089
+ results = []
1090
+
1091
+ def find_in_items(current_items: List[Dict], current_path: List[str], depth: int = 0):
1092
+ if depth >= len(current_path):
1093
+ results.extend(current_items)
1094
+ return
1095
+
1096
+ target_name = current_path[depth]
1097
+ for item in current_items:
1098
+ if (item.get('name', '').lower() == target_name.lower() or
1099
+ target_name.lower() in item.get('name', '').lower()) and item.get('item'):
1100
+ if depth == len(current_path) - 1:
1101
+ # This is the target folder
1102
+ results.append(item)
1103
+ else:
1104
+ # Continue searching in subfolders
1105
+ find_in_items(item['item'], current_path, depth + 1)
1106
+
1107
+ find_in_items(items, path_parts)
1108
+ return results
1109
+
1110
+ def _extract_requests_from_items(self, items: List[Dict], include_details: bool = False) -> List[Dict]:
1111
+ """Extract requests from items recursively."""
1112
+ requests = []
1113
+
1114
+ for item in items:
1115
+ if item.get('request'):
1116
+ # This is a request
1117
+ request_data = {
1118
+ "name": item.get('name'),
1119
+ "method": item['request'].get('method'),
1120
+ "url": item['request'].get('url')
1121
+ }
1122
+
1123
+ if include_details:
1124
+ request_data.update({
1125
+ "description": item.get('description'),
1126
+ "headers": item['request'].get('header', []),
1127
+ "body": item['request'].get('body'),
1128
+ "auth": item['request'].get('auth'),
1129
+ "tests": [e for e in item.get('event', []) if e.get('listen') == 'test'],
1130
+ "pre_request_scripts": [e for e in item.get('event', []) if e.get('listen') == 'prerequest']
1131
+ })
1132
+
1133
+ requests.append(request_data)
1134
+ elif item.get('item'):
1135
+ # This is a folder, recurse
1136
+ requests.extend(self._extract_requests_from_items(
1137
+ item['item'], include_details))
1138
+
1139
+ return requests
1140
+
1141
+ def _search_requests_in_items(self, items: List[Dict], query: str, search_in: str, method: str = None) -> List[Dict]:
1142
+ """Search for requests in items recursively."""
1143
+ results = []
1144
+ query_lower = query.lower()
1145
+
1146
+ for item in items:
1147
+ if item.get('request'):
1148
+ # This is a request
1149
+ request = item['request']
1150
+ matches = False
1151
+
1152
+ # Check method filter first
1153
+ if method and request.get('method', '').upper() != method.upper():
1154
+ continue
1155
+
1156
+ # Check search criteria
1157
+ if search_in == 'all' or search_in == 'name':
1158
+ if query_lower in item.get('name', '').lower():
1159
+ matches = True
1160
+
1161
+ if search_in == 'all' or search_in == 'url':
1162
+ url = request.get('url', '')
1163
+ if isinstance(url, dict):
1164
+ url = url.get('raw', '')
1165
+ if query_lower in url.lower():
1166
+ matches = True
1167
+
1168
+ if search_in == 'all' or search_in == 'description':
1169
+ description = item.get(
1170
+ 'description', '') or request.get('description', '')
1171
+ if query_lower in description.lower():
1172
+ matches = True
1173
+
1174
+ if matches:
1175
+ results.append({
1176
+ "name": item.get('name'),
1177
+ "method": request.get('method'),
1178
+ "url": request.get('url'),
1179
+ "description": item.get('description') or request.get('description'),
1180
+ "path": self._get_item_path(items, item)
1181
+ })
1182
+
1183
+ elif item.get('item'):
1184
+ # This is a folder, recurse
1185
+ results.extend(self._search_requests_in_items(
1186
+ item['item'], query, search_in, method))
1187
+
1188
+ return results
1189
+
1190
+ def _get_item_path(self, root_items: List[Dict], target_item: Dict, current_path: str = "") -> str:
1191
+ """Get the path of an item within the collection structure."""
1192
+ for item in root_items:
1193
+ item_path = f"{current_path}/{item['name']}" if current_path else item['name']
1194
+
1195
+ if item == target_item:
1196
+ return item_path
1197
+
1198
+ if item.get('item'):
1199
+ result = self._get_item_path(
1200
+ item['item'], target_item, item_path)
1201
+ if result:
1202
+ return result
1203
+
1204
+ return ""
1205
+
1206
+ def _find_request_by_path(self, items: List[Dict], request_path: str) -> Optional[Dict]:
1207
+ """Find a request by its path."""
1208
+ path_parts = [part.strip()
1209
+ for part in request_path.split('/') if part.strip()]
1210
+ if not path_parts:
1211
+ return None
1212
+
1213
+ current_items = items
1214
+
1215
+ # Navigate through folders to the request
1216
+ for i, part in enumerate(path_parts):
1217
+ found = False
1218
+ for item in current_items:
1219
+ if item.get('name', '').lower() == part.lower():
1220
+ if i == len(path_parts) - 1:
1221
+ # This should be the request
1222
+ if item.get('request'):
1223
+ return item
1224
+ else:
1225
+ return None
1226
+ else:
1227
+ # This should be a folder
1228
+ if item.get('item'):
1229
+ current_items = item['item']
1230
+ found = True
1231
+ break
1232
+ else:
1233
+ return None
1234
+
1235
+ if not found:
1236
+ return None
1237
+
1238
+ return None
1239
+
1240
+ def _remove_folder_by_path(self, items: List[Dict], folder_path: str) -> bool:
1241
+ """Remove a folder by its path."""
1242
+ path_parts = [part.strip()
1243
+ for part in folder_path.split('/') if part.strip()]
1244
+ if not path_parts:
1245
+ return False
1246
+
1247
+ if len(path_parts) == 1:
1248
+ # Remove from current level
1249
+ for i, item in enumerate(items):
1250
+ if item.get('name', '').lower() == path_parts[0].lower() and item.get('item') is not None:
1251
+ del items[i]
1252
+ return True
1253
+ return False
1254
+ else:
1255
+ # Navigate to parent folder
1256
+ parent_path = '/'.join(path_parts[:-1])
1257
+ parent_folders = self._find_folders_by_path(items, parent_path)
1258
+ if parent_folders:
1259
+ return self._remove_folder_by_path(parent_folders[0]['item'], path_parts[-1])
1260
+ return False
1261
+
1262
+ def _remove_request_by_path(self, items: List[Dict], request_path: str) -> bool:
1263
+ """Remove a request by its path."""
1264
+ path_parts = [part.strip()
1265
+ for part in request_path.split('/') if part.strip()]
1266
+ if not path_parts:
1267
+ return False
1268
+
1269
+ if len(path_parts) == 1:
1270
+ # Remove from current level
1271
+ for i, item in enumerate(items):
1272
+ if item.get('name', '').lower() == path_parts[0].lower() and item.get('request'):
1273
+ del items[i]
1274
+ return True
1275
+ return False
1276
+ else:
1277
+ # Navigate to parent folder
1278
+ parent_path = '/'.join(path_parts[:-1])
1279
+ parent_folders = self._find_folders_by_path(items, parent_path)
1280
+ if parent_folders:
1281
+ return self._remove_request_by_path(parent_folders[0]['item'], path_parts[-1])
1282
+ return False
1283
+
1284
+ def _remove_item_ids(self, items: List[Dict]):
1285
+ """Remove IDs from items recursively for duplication."""
1286
+ for item in items:
1287
+ if 'id' in item:
1288
+ del item['id']
1289
+ if item.get('item'):
1290
+ self._remove_item_ids(item['item'])
1291
+
1292
+ # =================================================================
1293
+ # ANALYSIS HELPER METHODS
1294
+ # =================================================================
1295
+
1296
+ def _perform_collection_analysis(self, collection: Dict) -> Dict:
1297
+ """Perform comprehensive analysis of a collection."""
1298
+ collection_data = collection['collection']
1299
+ folders = self._analyze_folders(collection_data.get('item', []))
1300
+ total_requests = self._count_requests(collection_data.get('item', []))
1301
+ issues = self._identify_collection_issues(collection_data)
1302
+ score = self._calculate_quality_score(collection_data, folders, issues)
1303
+ recommendations = self._generate_recommendations(issues)
1304
+
1305
+ return {
1306
+ "collection_id": collection_data['info'].get('_postman_id', ''),
1307
+ "collection_name": collection_data['info'].get('name', ''),
1308
+ "total_requests": total_requests,
1309
+ "folders": folders,
1310
+ "issues": issues,
1311
+ "recommendations": recommendations,
1312
+ "score": score,
1313
+ "overall_security_score": self._calculate_overall_security_score(folders),
1314
+ "overall_performance_score": self._calculate_overall_performance_score(folders),
1315
+ "overall_documentation_score": self._calculate_overall_documentation_score(folders)
1316
+ }
1317
+
1318
+ def _analyze_folders(self, items: List[Dict], base_path: str = "") -> List[Dict]:
1319
+ """Analyze all folders in a collection."""
1320
+ folders = []
1321
+
1322
+ for item in items:
1323
+ if item.get('item') is not None: # This is a folder
1324
+ folder_path = f"{base_path}/{item['name']}" if base_path else item['name']
1325
+ analysis = self._perform_folder_analysis(item, folder_path)
1326
+ folders.append(analysis)
1327
+
1328
+ # Recursively analyze subfolders
1329
+ subfolders = self._analyze_folders(item['item'], folder_path)
1330
+ folders.extend(subfolders)
1331
+
1332
+ return folders
1333
+
1334
+ def _perform_folder_analysis(self, folder: Dict, path: str) -> Dict:
1335
+ """Perform analysis of a specific folder."""
1336
+ requests = self._analyze_requests(folder.get('item', []))
1337
+ request_count = self._count_requests(folder.get('item', []))
1338
+ issues = self._identify_folder_issues(folder, requests)
1339
+
1340
+ return {
1341
+ "name": folder['name'],
1342
+ "path": path,
1343
+ "request_count": request_count,
1344
+ "requests": requests,
1345
+ "issues": issues,
1346
+ "has_consistent_naming": self._check_consistent_naming(folder.get('item', [])),
1347
+ "has_proper_structure": bool(folder.get('description') and folder.get('item')),
1348
+ "auth_consistency": self._check_auth_consistency(requests),
1349
+ "avg_documentation_quality": self._calculate_avg_documentation_quality(requests),
1350
+ "avg_security_score": self._calculate_avg_security_score(requests),
1351
+ "avg_performance_score": self._calculate_avg_performance_score(requests)
1352
+ }
1353
+
1354
+ def _analyze_requests(self, items: List[Dict]) -> List[Dict]:
1355
+ """Analyze requests within a folder."""
1356
+ requests = []
1357
+
1358
+ for item in items:
1359
+ if item.get('request'): # This is a request
1360
+ analysis = self._perform_request_analysis(item)
1361
+ requests.append(analysis)
1362
+
1363
+ return requests
1364
+
1365
+ def _perform_request_analysis(self, item: Dict) -> Dict:
1366
+ """Perform comprehensive analysis of a specific request."""
1367
+ request = item['request']
1368
+ issues = []
1369
+
1370
+ # Basic checks
1371
+ has_auth = bool(request.get('auth')
1372
+ or self._has_auth_in_headers(request))
1373
+ has_description = bool(item.get('description')
1374
+ or request.get('description'))
1375
+ has_tests = bool([e for e in item.get('event', [])
1376
+ if e.get('listen') == 'test'])
1377
+ has_examples = bool(item.get('response', []))
1378
+
1379
+ # Enhanced analysis
1380
+ url = request.get('url', '')
1381
+ if isinstance(url, dict):
1382
+ url = url.get('raw', '')
1383
+
1384
+ has_hardcoded_url = self._detect_hardcoded_url(url)
1385
+ has_hardcoded_data = self._detect_hardcoded_data(request)
1386
+ has_proper_headers = self._validate_headers(request)
1387
+ has_variables = self._detect_variable_usage(request)
1388
+ has_error_handling = self._detect_error_handling(item)
1389
+ follows_naming_convention = self._validate_naming_convention(
1390
+ item['name'])
1391
+ has_security_issues = self._detect_security_issues(request)
1392
+ has_performance_issues = self._detect_performance_issues(request)
1393
+
1394
+ # Calculate scores
1395
+ security_score = self._calculate_security_score(
1396
+ request, has_auth, has_security_issues)
1397
+ performance_score = self._calculate_performance_score(
1398
+ request, has_performance_issues)
1399
+
1400
+ # Generate issues
1401
+ self._generate_request_issues(issues, item, {
1402
+ 'has_description': has_description,
1403
+ 'has_auth': has_auth,
1404
+ 'has_tests': has_tests,
1405
+ 'has_hardcoded_url': has_hardcoded_url,
1406
+ 'has_hardcoded_data': has_hardcoded_data,
1407
+ 'has_proper_headers': has_proper_headers,
1408
+ 'has_security_issues': has_security_issues,
1409
+ 'follows_naming_convention': follows_naming_convention
1410
+ })
1411
+
1412
+ return {
1413
+ "name": item['name'],
1414
+ "method": request.get('method'),
1415
+ "url": url,
1416
+ "has_auth": has_auth,
1417
+ "has_description": has_description,
1418
+ "has_tests": has_tests,
1419
+ "has_examples": has_examples,
1420
+ "issues": issues,
1421
+ "has_hardcoded_url": has_hardcoded_url,
1422
+ "has_hardcoded_data": has_hardcoded_data,
1423
+ "has_proper_headers": has_proper_headers,
1424
+ "has_variables": has_variables,
1425
+ "has_error_handling": has_error_handling,
1426
+ "follows_naming_convention": follows_naming_convention,
1427
+ "has_security_issues": has_security_issues,
1428
+ "has_performance_issues": has_performance_issues,
1429
+ "auth_type": request.get('auth', {}).get('type'),
1430
+ "response_examples": len(item.get('response', [])),
1431
+ "test_coverage": self._assess_test_coverage(item),
1432
+ "documentation_quality": self._assess_documentation_quality(item),
1433
+ "security_score": security_score,
1434
+ "performance_score": performance_score
1435
+ }
1436
+
1437
+ def _count_requests(self, items: List[Dict]) -> int:
1438
+ """Count total requests in items."""
1439
+ count = 0
1440
+ for item in items:
1441
+ if item.get('request'):
1442
+ count += 1
1443
+ elif item.get('item'):
1444
+ count += self._count_requests(item['item'])
1445
+ return count
1446
+
1447
+ def _has_auth_in_headers(self, request: Dict) -> bool:
1448
+ """Check if request has authentication in headers."""
1449
+ headers = request.get('header', [])
1450
+ auth_headers = ['authorization', 'x-api-key', 'x-auth-token']
1451
+ return any(h.get('key', '').lower() in auth_headers for h in headers)
1452
+
1453
+ def _detect_hardcoded_url(self, url: str) -> bool:
1454
+ """Detect hardcoded URLs that should use variables."""
1455
+ hardcoded_patterns = [
1456
+ r'^https?://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', # IP addresses
1457
+ r'^https?://localhost', # localhost
1458
+ # Direct domains
1459
+ r'^https?://[a-zA-Z0-9.-]+\.(com|org|net|io|dev)',
1460
+ r'api\.example\.com', # Example domains
1461
+ r'staging\.|dev\.|test\.' # Environment-specific
1462
+ ]
1463
+ return any(re.search(pattern, url) for pattern in hardcoded_patterns) and '{{' not in url
1464
+
1465
+ def _detect_hardcoded_data(self, request: Dict) -> bool:
1466
+ """Detect hardcoded data in request body and headers."""
1467
+ # Check headers
1468
+ headers = request.get('header', [])
1469
+ has_hardcoded_headers = any(
1470
+ ('token' in h.get('key', '').lower() or
1471
+ 'key' in h.get('key', '').lower() or
1472
+ 'secret' in h.get('key', '').lower()) and
1473
+ '{{' not in h.get('value', '')
1474
+ for h in headers
1475
+ )
1476
+
1477
+ # Check body
1478
+ has_hardcoded_body = False
1479
+ body = request.get('body', {})
1480
+ if body.get('raw'):
1481
+ try:
1482
+ body_data = json.loads(body['raw'])
1483
+ has_hardcoded_body = self._contains_hardcoded_values(body_data)
1484
+ except json.JSONDecodeError:
1485
+ # If not JSON, check for common patterns
1486
+ has_hardcoded_body = re.search(
1487
+ r'("api_key"|"token"|"password"):\s*"[^{]', body['raw']) is not None
1488
+
1489
+ return has_hardcoded_headers or has_hardcoded_body
1490
+
1491
+ def _contains_hardcoded_values(self, obj: Any) -> bool:
1492
+ """Check if object contains hardcoded values that should be variables."""
1493
+ if not isinstance(obj, dict):
1494
+ return False
1495
+
1496
+ for key, value in obj.items():
1497
+ if isinstance(value, str):
1498
+ # Check for sensitive keys
1499
+ if key.lower() in ['token', 'key', 'secret', 'password', 'api_key', 'client_id', 'client_secret']:
1500
+ if '{{' not in value:
1501
+ return True
1502
+ # Check for email patterns, URLs
1503
+ if re.search(r'@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}', value) or value.startswith('http'):
1504
+ if '{{' not in value:
1505
+ return True
1506
+ elif isinstance(value, dict):
1507
+ if self._contains_hardcoded_values(value):
1508
+ return True
1509
+
1510
+ return False
1511
+
1512
+ def _validate_headers(self, request: Dict) -> bool:
1513
+ """Validate request headers."""
1514
+ headers = request.get('header', [])
1515
+ header_names = [h.get('key', '').lower() for h in headers]
1516
+ method = request.get('method', '').upper()
1517
+
1518
+ # Check for essential headers
1519
+ if method in ['POST', 'PUT', 'PATCH'] and request.get('body'):
1520
+ if 'content-type' not in header_names:
1521
+ return False
1522
+
1523
+ if method in ['GET', 'POST', 'PUT', 'PATCH']:
1524
+ if 'accept' not in header_names:
1525
+ return False
1526
+
1527
+ return True
1528
+
1529
+ def _detect_variable_usage(self, request: Dict) -> bool:
1530
+ """Detect variable usage in request."""
1531
+ url = request.get('url', '')
1532
+ if isinstance(url, dict):
1533
+ url = url.get('raw', '')
1534
+
1535
+ has_url_variables = '{{' in url
1536
+ has_header_variables = any('{{' in h.get('value', '')
1537
+ for h in request.get('header', []))
1538
+
1539
+ has_body_variables = False
1540
+ body = request.get('body', {})
1541
+ if body.get('raw'):
1542
+ has_body_variables = '{{' in body['raw']
1543
+
1544
+ return has_url_variables or has_header_variables or has_body_variables
1545
+
1546
+ def _detect_error_handling(self, item: Dict) -> bool:
1547
+ """Detect error handling in tests."""
1548
+ test_scripts = [e for e in item.get(
1549
+ 'event', []) if e.get('listen') == 'test']
1550
+
1551
+ for script in test_scripts:
1552
+ script_code = '\n'.join(script.get('script', {}).get('exec', []))
1553
+ if ('4' in script_code or '5' in script_code or
1554
+ 'error' in script_code.lower() or 'fail' in script_code.lower()):
1555
+ return True
1556
+
1557
+ return False
1558
+
1559
+ def _validate_naming_convention(self, name: str) -> bool:
1560
+ """Validate naming convention."""
1561
+ has_consistent_case = re.match(
1562
+ r'^[a-zA-Z][a-zA-Z0-9\s\-_]*$', name) is not None
1563
+ has_descriptive_name = len(
1564
+ name) > 3 and 'test' not in name.lower() and 'temp' not in name.lower()
1565
+ return has_consistent_case and has_descriptive_name
1566
+
1567
+ def _detect_security_issues(self, request: Dict) -> bool:
1568
+ """Detect security issues."""
1569
+ url = request.get('url', '')
1570
+ if isinstance(url, dict):
1571
+ url = url.get('raw', '')
1572
+
1573
+ # Check for exposed credentials in URL
1574
+ if re.search(r'[?&](token|key|password|secret)=([^&\s]+)', url):
1575
+ return True
1576
+
1577
+ # Check for weak authentication
1578
+ auth = request.get('auth', {})
1579
+ if auth.get('type') == 'basic' and not url.startswith('https'):
1580
+ return True
1581
+
1582
+ # Check headers for exposed credentials
1583
+ headers = request.get('header', [])
1584
+ return any('secret' in h.get('key', '').lower() or 'password' in h.get('key', '').lower()
1585
+ for h in headers)
1586
+
1587
+ def _detect_performance_issues(self, request: Dict) -> bool:
1588
+ """Detect performance issues."""
1589
+ # Large request body
1590
+ body = request.get('body', {})
1591
+ if body.get('raw') and len(body['raw']) > 10000:
1592
+ return True
1593
+
1594
+ # Too many headers
1595
+ if len(request.get('header', [])) > 20:
1596
+ return True
1597
+
1598
+ # Too many query parameters
1599
+ url = request.get('url', '')
1600
+ if isinstance(url, dict):
1601
+ url = url.get('raw', '')
1602
+
1603
+ query_params = url.split('?')[1] if '?' in url else ''
1604
+ if query_params and len(query_params.split('&')) > 15:
1605
+ return True
1606
+
1607
+ return False
1608
+
1609
+ def _calculate_security_score(self, request: Dict, has_auth: bool, has_security_issues: bool) -> int:
1610
+ """Calculate security score."""
1611
+ score = 100
1612
+ method = request.get('method', '').upper()
1613
+
1614
+ if not has_auth and method in ['POST', 'PUT', 'PATCH', 'DELETE']:
1615
+ score -= 40
1616
+
1617
+ if has_security_issues:
1618
+ score -= 30
1619
+
1620
+ url = request.get('url', '')
1621
+ if isinstance(url, dict):
1622
+ url = url.get('raw', '')
1623
+
1624
+ if url.startswith('http://'):
1625
+ score -= 20
1626
+
1627
+ auth = request.get('auth', {})
1628
+ if auth.get('type') == 'basic':
1629
+ score -= 10
1630
+
1631
+ return max(0, score)
1632
+
1633
+ def _calculate_performance_score(self, request: Dict, has_performance_issues: bool) -> int:
1634
+ """Calculate performance score."""
1635
+ score = 100
1636
+
1637
+ if has_performance_issues:
1638
+ score -= 50
1639
+
1640
+ headers = request.get('header', [])
1641
+ header_names = [h.get('key', '').lower() for h in headers]
1642
+
1643
+ if 'cache-control' not in header_names:
1644
+ score -= 10
1645
+
1646
+ if 'accept-encoding' not in header_names:
1647
+ score -= 10
1648
+
1649
+ return max(0, score)
1650
+
1651
+ def _assess_test_coverage(self, item: Dict) -> str:
1652
+ """Assess test coverage."""
1653
+ test_scripts = [e for e in item.get(
1654
+ 'event', []) if e.get('listen') == 'test']
1655
+
1656
+ if not test_scripts:
1657
+ return 'none'
1658
+
1659
+ all_test_code = '\n'.join([
1660
+ '\n'.join(script.get('script', {}).get('exec', []))
1661
+ for script in test_scripts
1662
+ ])
1663
+
1664
+ checks = [
1665
+ 'pm.response.code' in all_test_code or 'status' in all_test_code,
1666
+ 'responseTime' in all_test_code,
1667
+ 'pm.response.json' in all_test_code or 'body' in all_test_code,
1668
+ '4' in all_test_code or '5' in all_test_code
1669
+ ]
1670
+
1671
+ check_count = sum(checks)
1672
+
1673
+ if check_count >= 3:
1674
+ return 'comprehensive'
1675
+ elif check_count >= 1:
1676
+ return 'basic'
1677
+
1678
+ return 'none'
1679
+
1680
+ def _assess_documentation_quality(self, item: Dict) -> str:
1681
+ """Assess documentation quality."""
1682
+ description = item.get('description', '') or item.get(
1683
+ 'request', {}).get('description', '')
1684
+
1685
+ if not description:
1686
+ return 'none'
1687
+
1688
+ description_lower = description.lower()
1689
+ quality_factors = [
1690
+ 'parameter' in description_lower,
1691
+ 'response' in description_lower,
1692
+ 'example' in description_lower,
1693
+ 'auth' in description_lower,
1694
+ 'error' in description_lower
1695
+ ]
1696
+
1697
+ factor_count = sum(quality_factors)
1698
+
1699
+ if factor_count >= 4:
1700
+ return 'excellent'
1701
+ elif factor_count >= 2:
1702
+ return 'good'
1703
+ elif factor_count >= 1 or len(description) > 50:
1704
+ return 'minimal'
1705
+
1706
+ return 'none'
1707
+
1708
+ def _check_consistent_naming(self, items: List[Dict]) -> bool:
1709
+ """Check if items have consistent naming."""
1710
+ if len(items) <= 1:
1711
+ return True
1712
+
1713
+ naming_patterns = []
1714
+ for item in items:
1715
+ name = item.get('name', '').lower()
1716
+ if re.match(r'^[a-z][a-z0-9]*(_[a-z0-9]+)*$', name):
1717
+ naming_patterns.append('snake_case')
1718
+ elif re.match(r'^[a-z][a-zA-Z0-9]*$', name):
1719
+ naming_patterns.append('camelCase')
1720
+ elif re.match(r'^[a-z][a-z0-9]*(-[a-z0-9]+)*$', name):
1721
+ naming_patterns.append('kebab-case')
1722
+ else:
1723
+ naming_patterns.append('mixed')
1724
+
1725
+ unique_patterns = set(naming_patterns)
1726
+ return len(unique_patterns) == 1 and 'mixed' not in unique_patterns
1727
+
1728
+ def _check_auth_consistency(self, requests: List[Dict]) -> str:
1729
+ """Check authentication consistency across requests."""
1730
+ if not requests:
1731
+ return 'none'
1732
+
1733
+ auth_types = set(req.get('auth_type') or 'none' for req in requests)
1734
+
1735
+ if len(auth_types) == 1:
1736
+ return 'none' if 'none' in auth_types else 'consistent'
1737
+
1738
+ return 'mixed'
1739
+
1740
+ def _calculate_avg_documentation_quality(self, requests: List[Dict]) -> int:
1741
+ """Calculate average documentation quality score."""
1742
+ if not requests:
1743
+ return 0
1744
+
1745
+ quality_scores = {
1746
+ 'excellent': 100,
1747
+ 'good': 75,
1748
+ 'minimal': 50,
1749
+ 'none': 0
1750
+ }
1751
+
1752
+ scores = [quality_scores.get(
1753
+ req.get('documentation_quality', 'none'), 0) for req in requests]
1754
+ return round(sum(scores) / len(scores))
1755
+
1756
+ def _calculate_avg_security_score(self, requests: List[Dict]) -> int:
1757
+ """Calculate average security score."""
1758
+ if not requests:
1759
+ return 0
1760
+
1761
+ scores = [req.get('security_score', 0) for req in requests]
1762
+ return round(sum(scores) / len(scores))
1763
+
1764
+ def _calculate_avg_performance_score(self, requests: List[Dict]) -> int:
1765
+ """Calculate average performance score."""
1766
+ if not requests:
1767
+ return 0
1768
+
1769
+ scores = [req.get('performance_score', 0) for req in requests]
1770
+ return round(sum(scores) / len(scores))
1771
+
1772
+ def _identify_collection_issues(self, collection_data: Dict) -> List[Dict]:
1773
+ """Identify collection-level issues."""
1774
+ issues = []
1775
+
1776
+ if not collection_data.get('info', {}).get('description'):
1777
+ issues.append({
1778
+ 'type': 'warning',
1779
+ 'severity': 'medium',
1780
+ 'message': 'Collection lacks description',
1781
+ 'location': 'Collection root',
1782
+ 'suggestion': 'Add a description explaining the purpose of this collection'
1783
+ })
1784
+
1785
+ if not collection_data.get('auth'):
1786
+ issues.append({
1787
+ 'type': 'info',
1788
+ 'severity': 'low',
1789
+ 'message': 'Collection lacks default authentication',
1790
+ 'location': 'Collection root',
1791
+ 'suggestion': 'Consider setting up collection-level authentication'
1792
+ })
1793
+
1794
+ return issues
1795
+
1796
+ def _identify_folder_issues(self, folder: Dict, requests: List[Dict]) -> List[Dict]:
1797
+ """Identify folder-level issues."""
1798
+ issues = []
1799
+
1800
+ if not folder.get('description'):
1801
+ issues.append({
1802
+ 'type': 'warning',
1803
+ 'severity': 'low',
1804
+ 'message': 'Folder lacks description',
1805
+ 'location': folder['name'],
1806
+ 'suggestion': 'Add a description explaining the purpose of this folder'
1807
+ })
1808
+
1809
+ if not requests and (not folder.get('item') or len(folder['item']) == 0):
1810
+ issues.append({
1811
+ 'type': 'warning',
1812
+ 'severity': 'medium',
1813
+ 'message': 'Empty folder',
1814
+ 'location': folder['name'],
1815
+ 'suggestion': 'Consider removing empty folders or adding requests'
1816
+ })
1817
+
1818
+ return issues
1819
+
1820
+ def _generate_request_issues(self, issues: List[Dict], item: Dict, analysis: Dict):
1821
+ """Generate request-specific issues."""
1822
+ if not analysis['has_description']:
1823
+ issues.append({
1824
+ 'type': 'warning',
1825
+ 'severity': 'medium',
1826
+ 'message': 'Request lacks description',
1827
+ 'location': item['name'],
1828
+ 'suggestion': 'Add a clear description explaining what this request does'
1829
+ })
1830
+
1831
+ if not analysis['has_auth'] and item['request']['method'] in ['POST', 'PUT', 'PATCH', 'DELETE']:
1832
+ issues.append({
1833
+ 'type': 'warning',
1834
+ 'severity': 'high',
1835
+ 'message': 'Sensitive operation without authentication',
1836
+ 'location': item['name'],
1837
+ 'suggestion': 'Add authentication for this request'
1838
+ })
1839
+
1840
+ if not analysis['has_tests']:
1841
+ issues.append({
1842
+ 'type': 'info',
1843
+ 'severity': 'high',
1844
+ 'message': 'Request lacks test scripts',
1845
+ 'location': item['name'],
1846
+ 'suggestion': 'Add test scripts to validate response'
1847
+ })
1848
+
1849
+ if analysis['has_hardcoded_url']:
1850
+ issues.append({
1851
+ 'type': 'warning',
1852
+ 'severity': 'high',
1853
+ 'message': 'Request contains hardcoded URL',
1854
+ 'location': item['name'],
1855
+ 'suggestion': 'Replace hardcoded URLs with environment variables'
1856
+ })
1857
+
1858
+ if analysis['has_security_issues']:
1859
+ issues.append({
1860
+ 'type': 'error',
1861
+ 'severity': 'high',
1862
+ 'message': 'Security vulnerabilities detected',
1863
+ 'location': item['name'],
1864
+ 'suggestion': 'Address security issues such as exposed credentials'
1865
+ })
1866
+
1867
+ def _calculate_quality_score(self, collection_data: Dict, folders: List[Dict], issues: List[Dict]) -> int:
1868
+ """Calculate quality score (0-100)."""
1869
+ score = 100
1870
+
1871
+ # Deduct points for issues
1872
+ for issue in issues:
1873
+ severity = issue.get('severity', 'low')
1874
+ if severity == 'high':
1875
+ score -= 10
1876
+ elif severity == 'medium':
1877
+ score -= 5
1878
+ elif severity == 'low':
1879
+ score -= 2
1880
+
1881
+ # Deduct points for folder and request issues
1882
+ for folder in folders:
1883
+ for issue in folder.get('issues', []):
1884
+ severity = issue.get('severity', 'low')
1885
+ if severity == 'high':
1886
+ score -= 5
1887
+ elif severity == 'medium':
1888
+ score -= 3
1889
+ elif severity == 'low':
1890
+ score -= 1
1891
+
1892
+ for request in folder.get('requests', []):
1893
+ for issue in request.get('issues', []):
1894
+ severity = issue.get('severity', 'low')
1895
+ if severity == 'high':
1896
+ score -= 3
1897
+ elif severity == 'medium':
1898
+ score -= 2
1899
+ elif severity == 'low':
1900
+ score -= 1
1901
+
1902
+ return max(0, min(100, score))
1903
+
1904
+ def _generate_recommendations(self, issues: List[Dict]) -> List[str]:
1905
+ """Generate recommendations based on issues."""
1906
+ recommendations = []
1907
+ suggestion_counts = {}
1908
+
1909
+ # Count similar suggestions
1910
+ for issue in issues:
1911
+ suggestion = issue.get('suggestion', '')
1912
+ if suggestion:
1913
+ suggestion_counts[suggestion] = suggestion_counts.get(
1914
+ suggestion, 0) + 1
1915
+
1916
+ # Generate recommendations from most common suggestions
1917
+ sorted_suggestions = sorted(
1918
+ suggestion_counts.items(), key=lambda x: x[1], reverse=True)[:10]
1919
+
1920
+ for suggestion, count in sorted_suggestions:
1921
+ if count > 1:
1922
+ recommendations.append(f"{suggestion} ({count} instances)")
1923
+ else:
1924
+ recommendations.append(suggestion)
1925
+
1926
+ return recommendations
1927
+
1928
+ def _calculate_overall_security_score(self, folders: List[Dict]) -> int:
1929
+ """Calculate overall security score."""
1930
+ if not folders:
1931
+ return 0
1932
+
1933
+ scores = []
1934
+ for folder in folders:
1935
+ avg_score = folder.get('avg_security_score', 0)
1936
+ if avg_score > 0:
1937
+ scores.append(avg_score)
1938
+
1939
+ return round(sum(scores) / len(scores)) if scores else 0
1940
+
1941
+ def _calculate_overall_performance_score(self, folders: List[Dict]) -> int:
1942
+ """Calculate overall performance score."""
1943
+ if not folders:
1944
+ return 0
1945
+
1946
+ scores = []
1947
+ for folder in folders:
1948
+ avg_score = folder.get('avg_performance_score', 0)
1949
+ if avg_score > 0:
1950
+ scores.append(avg_score)
1951
+
1952
+ return round(sum(scores) / len(scores)) if scores else 0
1953
+
1954
+ def _calculate_overall_documentation_score(self, folders: List[Dict]) -> int:
1955
+ """Calculate overall documentation score."""
1956
+ if not folders:
1957
+ return 0
1958
+
1959
+ scores = []
1960
+ for folder in folders:
1961
+ avg_score = folder.get('avg_documentation_quality', 0)
1962
+ if avg_score > 0:
1963
+ scores.append(avg_score)
1964
+
1965
+ return round(sum(scores) / len(scores)) if scores else 0
1966
+
1967
+ def _generate_improvements(self, analysis: Dict) -> List[Dict]:
1968
+ """Generate improvement suggestions with enhanced analysis."""
1969
+ improvements = []
1970
+
1971
+ # Collection-level improvements
1972
+ if analysis['score'] < 80:
1973
+ improvements.append({
1974
+ 'id': 'collection-quality',
1975
+ 'title': 'Improve Overall Collection Quality',
1976
+ 'description': f"Collection quality score is {analysis['score']}/100. Focus on addressing high-priority issues.",
1977
+ 'priority': 'high',
1978
+ 'category': 'quality',
1979
+ 'impact': 'high'
1980
+ })
1981
+
1982
+ if analysis['overall_security_score'] < 70:
1983
+ improvements.append({
1984
+ 'id': 'security-enhancement',
1985
+ 'title': 'Enhance Security Practices',
1986
+ 'description': f"Security score is {analysis['overall_security_score']}/100. Review authentication and data handling.",
1987
+ 'priority': 'high',
1988
+ 'category': 'security',
1989
+ 'impact': 'high'
1990
+ })
1991
+
1992
+ if analysis['overall_documentation_score'] < 60:
1993
+ improvements.append({
1994
+ 'id': 'documentation-improvement',
1995
+ 'title': 'Improve Documentation',
1996
+ 'description': f"Documentation score is {analysis['overall_documentation_score']}/100. Add descriptions and examples.",
1997
+ 'priority': 'medium',
1998
+ 'category': 'documentation',
1999
+ 'impact': 'medium'
2000
+ })
2001
+
2002
+ # Add specific improvements based on common issues
2003
+ issue_counts = {}
2004
+ for folder in analysis.get('folders', []):
2005
+ for request in folder.get('requests', []):
2006
+ for issue in request.get('issues', []):
2007
+ issue_type = issue.get('message', '')
2008
+ issue_counts[issue_type] = issue_counts.get(
2009
+ issue_type, 0) + 1
2010
+
2011
+ # Generate improvements for most common issues
2012
+ if issue_counts.get('Request lacks test scripts', 0) > 3:
2013
+ improvements.append({
2014
+ 'id': 'add-test-scripts',
2015
+ 'title': 'Add Test Scripts to Requests',
2016
+ 'description': f"Found {issue_counts['Request lacks test scripts']} requests without test scripts.",
2017
+ 'priority': 'medium',
2018
+ 'category': 'testing',
2019
+ 'impact': 'medium'
2020
+ })
2021
+
2022
+ if issue_counts.get('Request contains hardcoded URL', 0) > 2:
2023
+ improvements.append({
2024
+ 'id': 'use-environment-variables',
2025
+ 'title': 'Use Environment Variables',
2026
+ 'description': f"Found {issue_counts['Request contains hardcoded URL']} requests with hardcoded URLs.",
2027
+ 'priority': 'high',
2028
+ 'category': 'maintainability',
2029
+ 'impact': 'high'
2030
+ })
2031
+
2032
+ return improvements