iflow-mcp_democratize-technology-chronos-mcp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. chronos_mcp/__init__.py +5 -0
  2. chronos_mcp/__main__.py +9 -0
  3. chronos_mcp/accounts.py +410 -0
  4. chronos_mcp/bulk.py +946 -0
  5. chronos_mcp/caldav_utils.py +149 -0
  6. chronos_mcp/calendars.py +204 -0
  7. chronos_mcp/config.py +187 -0
  8. chronos_mcp/credentials.py +190 -0
  9. chronos_mcp/events.py +515 -0
  10. chronos_mcp/exceptions.py +477 -0
  11. chronos_mcp/journals.py +477 -0
  12. chronos_mcp/logging_config.py +23 -0
  13. chronos_mcp/models.py +202 -0
  14. chronos_mcp/py.typed +0 -0
  15. chronos_mcp/rrule.py +259 -0
  16. chronos_mcp/search.py +315 -0
  17. chronos_mcp/server.py +121 -0
  18. chronos_mcp/tasks.py +518 -0
  19. chronos_mcp/tools/__init__.py +29 -0
  20. chronos_mcp/tools/accounts.py +151 -0
  21. chronos_mcp/tools/base.py +59 -0
  22. chronos_mcp/tools/bulk.py +557 -0
  23. chronos_mcp/tools/calendars.py +142 -0
  24. chronos_mcp/tools/events.py +698 -0
  25. chronos_mcp/tools/journals.py +310 -0
  26. chronos_mcp/tools/tasks.py +414 -0
  27. chronos_mcp/utils.py +163 -0
  28. chronos_mcp/validation.py +636 -0
  29. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/METADATA +299 -0
  30. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/RECORD +68 -0
  31. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/WHEEL +5 -0
  32. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/entry_points.txt +2 -0
  33. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/licenses/LICENSE +21 -0
  34. iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/top_level.txt +2 -0
  35. tests/__init__.py +0 -0
  36. tests/conftest.py +91 -0
  37. tests/unit/__init__.py +0 -0
  38. tests/unit/test_accounts.py +380 -0
  39. tests/unit/test_accounts_ssrf.py +134 -0
  40. tests/unit/test_base.py +135 -0
  41. tests/unit/test_bulk.py +380 -0
  42. tests/unit/test_bulk_create.py +408 -0
  43. tests/unit/test_bulk_delete.py +341 -0
  44. tests/unit/test_bulk_resource_limits.py +74 -0
  45. tests/unit/test_caldav_utils.py +300 -0
  46. tests/unit/test_calendars.py +286 -0
  47. tests/unit/test_config.py +111 -0
  48. tests/unit/test_config_validation.py +128 -0
  49. tests/unit/test_credentials_security.py +189 -0
  50. tests/unit/test_cryptography_security.py +178 -0
  51. tests/unit/test_events.py +536 -0
  52. tests/unit/test_exceptions.py +58 -0
  53. tests/unit/test_journals.py +1097 -0
  54. tests/unit/test_models.py +95 -0
  55. tests/unit/test_race_conditions.py +202 -0
  56. tests/unit/test_recurring_events.py +156 -0
  57. tests/unit/test_rrule.py +217 -0
  58. tests/unit/test_search.py +372 -0
  59. tests/unit/test_search_advanced.py +333 -0
  60. tests/unit/test_server_input_validation.py +219 -0
  61. tests/unit/test_ssrf_protection.py +505 -0
  62. tests/unit/test_tasks.py +918 -0
  63. tests/unit/test_thread_safety.py +301 -0
  64. tests/unit/test_tools_journals.py +617 -0
  65. tests/unit/test_tools_tasks.py +968 -0
  66. tests/unit/test_url_validation_security.py +234 -0
  67. tests/unit/test_utils.py +180 -0
  68. tests/unit/test_validation.py +983 -0
@@ -0,0 +1,59 @@
1
+ """
2
+ Base utilities for MCP tools
3
+ """
4
+
5
+ import uuid
6
+ from functools import wraps
7
+ from typing import Any, Dict
8
+
9
+ from ..exceptions import ChronosError, ErrorSanitizer
10
+ from ..logging_config import setup_logging
11
+
12
+ logger = setup_logging()
13
+
14
+
15
+ def handle_tool_errors(func):
16
+ """Decorator to handle common error patterns in tools"""
17
+
18
+ @wraps(func)
19
+ async def wrapper(*args, **kwargs):
20
+ request_id = str(uuid.uuid4())
21
+ kwargs["request_id"] = request_id
22
+
23
+ try:
24
+ return await func(*args, **kwargs)
25
+ except ChronosError as e:
26
+ e.request_id = request_id
27
+ logger.error(
28
+ f"Request {request_id} failed: {ErrorSanitizer.sanitize_message(str(e))}"
29
+ )
30
+ return {
31
+ "success": False,
32
+ "error": ErrorSanitizer.sanitize_message(str(e)),
33
+ "error_code": type(e).__name__,
34
+ "request_id": request_id,
35
+ }
36
+ except Exception as e:
37
+ sanitized_error = ErrorSanitizer.sanitize_message(str(e))
38
+ logger.error(
39
+ f"Unexpected error in request {request_id}: {type(e).__name__}: {sanitized_error}"
40
+ )
41
+ return {
42
+ "success": False,
43
+ "error": f"Error: {type(e).__name__}: {sanitized_error}",
44
+ "error_code": type(e).__name__,
45
+ "request_id": request_id,
46
+ }
47
+
48
+ return wrapper
49
+
50
+
51
+ def create_success_response(message: str, request_id: str, **kwargs) -> Dict[str, Any]:
52
+ """Create a standardized success response"""
53
+ response = {
54
+ "success": True,
55
+ "message": message,
56
+ "request_id": request_id,
57
+ }
58
+ response.update(kwargs)
59
+ return response
@@ -0,0 +1,557 @@
1
+ """
2
+ Bulk operation tools for Chronos MCP
3
+ """
4
+
5
+ import uuid
6
+ from typing import Any, Dict, List, Optional
7
+
8
+ from pydantic import Field
9
+
10
+ from ..bulk import BulkOperationMode, BulkOptions
11
+ from ..exceptions import ChronosError, ErrorSanitizer, ValidationError
12
+ from ..logging_config import setup_logging
13
+ from ..validation import InputValidator
14
+ from .base import create_success_response, handle_tool_errors
15
+
16
+ logger = setup_logging()
17
+
18
+ # Module-level managers dictionary for dependency injection
19
+ _managers = {}
20
+
21
+
22
+ def _format_bulk_response(result, request_id: str, **extra_fields) -> Dict[str, Any]:
23
+ """Format bulk operation response with consistent success indicators"""
24
+ response = {
25
+ "success": result.failed == 0, # Only true if ALL succeed
26
+ "partial_success": 0
27
+ < result.successful
28
+ < result.total, # True for mixed results
29
+ "total": result.total,
30
+ "succeeded": result.successful,
31
+ "failed": result.failed,
32
+ "request_id": request_id,
33
+ }
34
+
35
+ # Add any extra fields
36
+ response.update(extra_fields)
37
+
38
+ return response
39
+
40
+
41
+ def _ensure_managers_initialized():
42
+ """Ensure managers are initialized, with fallback to server-level managers"""
43
+ if not _managers:
44
+ try:
45
+ # Try to import and use server-level managers for backwards compatibility
46
+ from .. import server
47
+
48
+ # Use the real bulk manager from the server
49
+ bulk_manager = getattr(server, "bulk_manager", None)
50
+ event_manager = getattr(server, "event_manager", None)
51
+ task_manager = getattr(server, "task_manager", None)
52
+ journal_manager = getattr(server, "journal_manager", None)
53
+
54
+ if not bulk_manager:
55
+ raise AttributeError("bulk_manager not found in server module")
56
+
57
+ _managers.update(
58
+ {
59
+ "bulk_manager": bulk_manager,
60
+ "event_manager": event_manager,
61
+ "task_manager": task_manager,
62
+ "journal_manager": journal_manager,
63
+ }
64
+ )
65
+ except (ImportError, AttributeError) as e:
66
+ logger.error(f"Failed to initialize managers: {str(e)}")
67
+ raise RuntimeError(f"Manager initialization failed: {str(e)}")
68
+
69
+
70
+ # Bulk tool functions - defined as standalone functions for importability
71
+ async def bulk_create_events(
72
+ calendar_uid: str = Field(..., description="Calendar UID"),
73
+ events: List[Dict[str, Any]] = Field(
74
+ ..., description="List of event data dictionaries"
75
+ ),
76
+ mode: str = Field("continue", description="Operation mode: continue, fail_fast"),
77
+ validate_before_execute: bool = Field(
78
+ True, description="Validate events before creation"
79
+ ),
80
+ account: Optional[str] = Field(None, description="Account alias"),
81
+ ) -> Dict[str, Any]:
82
+ """Create multiple events in bulk"""
83
+ request_id = str(uuid.uuid4())
84
+
85
+ # Ensure managers are available for backwards compatibility with tests
86
+ _ensure_managers_initialized()
87
+
88
+ try:
89
+ # Validate input
90
+ if not isinstance(events, list):
91
+ return {
92
+ "success": False,
93
+ "error": "Events must be a list",
94
+ "error_code": "VALIDATION_ERROR",
95
+ "request_id": request_id,
96
+ }
97
+
98
+ # Validate mode
99
+ if mode not in ["continue", "fail_fast"]:
100
+ return {
101
+ "success": False,
102
+ "error": f"Invalid mode: {mode}. Must be 'continue' or 'fail_fast'",
103
+ "error_code": "VALIDATION_ERROR",
104
+ "request_id": request_id,
105
+ }
106
+
107
+ # Handle empty list
108
+ if not events:
109
+ return {
110
+ "success": True,
111
+ "total": 0,
112
+ "succeeded": 0,
113
+ "failed": 0,
114
+ "details": [],
115
+ "request_id": request_id,
116
+ }
117
+
118
+ # Convert mode to BulkOperationMode for compatibility
119
+ if mode == "continue":
120
+ bulk_mode = BulkOperationMode.CONTINUE_ON_ERROR
121
+ else: # fail_fast
122
+ bulk_mode = BulkOperationMode.FAIL_FAST
123
+
124
+ # Create bulk options
125
+ options = BulkOptions(mode=bulk_mode)
126
+
127
+ # Parse datetime fields and attendees JSON in events
128
+ import json
129
+
130
+ from ..utils import parse_datetime
131
+
132
+ parsed_events = []
133
+ for event in events:
134
+ parsed_event = event.copy()
135
+
136
+ # Normalize field names: convert "start"/"end" to "dtstart"/"dtend"
137
+ if "start" in parsed_event:
138
+ parsed_event["dtstart"] = parsed_event.pop("start")
139
+ if "end" in parsed_event:
140
+ parsed_event["dtend"] = parsed_event.pop("end")
141
+
142
+ # Parse datetime fields
143
+ if "dtstart" in parsed_event and isinstance(parsed_event["dtstart"], str):
144
+ parsed_event["dtstart"] = parse_datetime(parsed_event["dtstart"])
145
+ if "dtend" in parsed_event and isinstance(parsed_event["dtend"], str):
146
+ parsed_event["dtend"] = parse_datetime(parsed_event["dtend"])
147
+
148
+ # Parse alarm_minutes if it's a string
149
+ if "alarm_minutes" in parsed_event and isinstance(
150
+ parsed_event["alarm_minutes"], str
151
+ ):
152
+ try:
153
+ parsed_event["alarm_minutes"] = int(parsed_event["alarm_minutes"])
154
+ except ValueError:
155
+ pass # Keep as string if not a valid int
156
+
157
+ # Parse attendees JSON if provided
158
+ if "attendees_json" in parsed_event:
159
+ try:
160
+ parsed_event["attendees"] = json.loads(
161
+ parsed_event["attendees_json"]
162
+ )
163
+ del parsed_event["attendees_json"]
164
+ except json.JSONDecodeError:
165
+ pass # Will be caught by validation
166
+
167
+ parsed_events.append(parsed_event)
168
+
169
+ # Execute bulk operation
170
+ result = _managers["bulk_manager"].bulk_create_events(
171
+ calendar_uid=calendar_uid,
172
+ events=parsed_events,
173
+ options=options,
174
+ account_alias=account,
175
+ )
176
+
177
+ # Format response to match test expectations
178
+ details = []
179
+ for res in result.results:
180
+ detail = {
181
+ "index": res.index,
182
+ "success": res.success,
183
+ }
184
+ if res.success:
185
+ detail["uid"] = res.uid
186
+ # Try to get summary from original event data
187
+ if res.index < len(events):
188
+ detail["summary"] = events[res.index].get("summary")
189
+ else:
190
+ detail["error"] = res.error
191
+ details.append(detail)
192
+
193
+ return _format_bulk_response(
194
+ result,
195
+ request_id,
196
+ details=details,
197
+ )
198
+
199
+ except ChronosError as e:
200
+ e.request_id = request_id
201
+ logger.error(f"Bulk create events failed: {e}")
202
+ return {
203
+ "success": False,
204
+ "error": ErrorSanitizer.get_user_friendly_message(e),
205
+ "error_code": e.error_code,
206
+ "request_id": request_id,
207
+ }
208
+
209
+ except Exception as e:
210
+ chronos_error = ChronosError(
211
+ message=f"Failed to bulk create events: {str(e)}",
212
+ details={
213
+ "tool": "bulk_create_events",
214
+ "calendar_uid": calendar_uid,
215
+ "original_error": str(e),
216
+ "original_type": type(e).__name__,
217
+ },
218
+ request_id=request_id,
219
+ )
220
+ logger.error(f"Unexpected error in bulk_create_events: {chronos_error}")
221
+ return {
222
+ "success": False,
223
+ "error": ErrorSanitizer.get_user_friendly_message(chronos_error),
224
+ "error_code": chronos_error.error_code,
225
+ "request_id": request_id,
226
+ }
227
+
228
+
229
+ @handle_tool_errors
230
+ async def bulk_delete_events(
231
+ calendar_uid: str = Field(..., description="Calendar UID"),
232
+ event_uids: List[str] = Field(..., description="List of event UIDs to delete"),
233
+ mode: str = Field("continue", description="Operation mode"),
234
+ parallel: bool = Field(True, description="Execute operations in parallel"),
235
+ account: Optional[str] = Field(None, description="Account alias"),
236
+ request_id: str = None,
237
+ ) -> Dict[str, Any]:
238
+ """Delete multiple events in bulk"""
239
+ # Ensure managers are available for backwards compatibility with tests
240
+ _ensure_managers_initialized()
241
+
242
+ # Convert mode string to BulkOperationMode
243
+ mode_mapping = {
244
+ "continue": BulkOperationMode.CONTINUE_ON_ERROR,
245
+ "fail_fast": BulkOperationMode.FAIL_FAST,
246
+ "atomic": BulkOperationMode.ATOMIC,
247
+ }
248
+
249
+ if mode not in mode_mapping:
250
+ raise ValidationError(
251
+ f"Invalid mode: {mode}. Must be one of: continue, fail_fast, atomic"
252
+ )
253
+
254
+ bulk_mode = mode_mapping[mode]
255
+
256
+ # Create bulk options
257
+ options = BulkOptions(mode=bulk_mode)
258
+
259
+ # Execute bulk operation
260
+ result = _managers["bulk_manager"].bulk_delete_events(
261
+ calendar_uid=calendar_uid,
262
+ event_uids=event_uids,
263
+ options=options,
264
+ account_alias=account,
265
+ request_id=request_id,
266
+ )
267
+
268
+ # Format response to match test expectations
269
+ details = []
270
+ for res in result.results:
271
+ detail = {
272
+ "index": res.index,
273
+ "success": res.success,
274
+ "uid": res.uid if res.uid else event_uids[res.index],
275
+ }
276
+ if not res.success:
277
+ detail["error"] = res.error
278
+ details.append(detail)
279
+
280
+ return _format_bulk_response(
281
+ result,
282
+ request_id,
283
+ details=details,
284
+ )
285
+
286
+
287
+ @handle_tool_errors
288
+ async def bulk_create_tasks(
289
+ calendar_uid: str = Field(..., description="Calendar UID"),
290
+ tasks_json: str = Field(..., description="JSON array of task data"),
291
+ mode: str = Field("continue", description="Operation mode"),
292
+ parallel: bool = Field(True, description="Execute operations in parallel"),
293
+ account: Optional[str] = Field(None, description="Account alias"),
294
+ request_id: str = None,
295
+ ) -> Dict[str, Any]:
296
+ """Create multiple tasks in bulk"""
297
+ import json
298
+
299
+ # Ensure managers are available for backwards compatibility with tests
300
+ _ensure_managers_initialized()
301
+
302
+ # Parse tasks JSON
303
+ try:
304
+ tasks_data = json.loads(tasks_json)
305
+ if not isinstance(tasks_data, list):
306
+ raise ValueError("Tasks data must be a JSON array")
307
+ except (json.JSONDecodeError, ValueError) as e:
308
+ raise ValidationError(f"Invalid tasks JSON: {str(e)}")
309
+
310
+ # Convert mode string to BulkOperationMode
311
+ mode_mapping = {
312
+ "continue": BulkOperationMode.CONTINUE_ON_ERROR,
313
+ "fail_fast": BulkOperationMode.FAIL_FAST,
314
+ "atomic": BulkOperationMode.ATOMIC,
315
+ }
316
+
317
+ if mode not in mode_mapping:
318
+ raise ValidationError(
319
+ f"Invalid mode: {mode}. Must be one of: continue, fail_fast, atomic"
320
+ )
321
+
322
+ bulk_mode = mode_mapping[mode]
323
+
324
+ # Create bulk options
325
+ options = BulkOptions(mode=bulk_mode)
326
+
327
+ # Execute bulk operation
328
+ if "bulk_manager" not in _managers or _managers["bulk_manager"] is None:
329
+ raise RuntimeError("BulkOperationManager not available")
330
+
331
+ try:
332
+ result = _managers["bulk_manager"].bulk_create_tasks(
333
+ calendar_uid=calendar_uid,
334
+ tasks=tasks_data,
335
+ options=options,
336
+ account_alias=account,
337
+ )
338
+ except AttributeError as e:
339
+ raise RuntimeError(f"BulkOperationManager missing method: {str(e)}")
340
+ except Exception as e:
341
+ logger.error(f"Bulk task creation failed: {type(e).__name__}: {str(e)}")
342
+ raise
343
+
344
+ return _format_bulk_response(
345
+ result,
346
+ request_id,
347
+ message=f"Bulk task creation completed: {result.successful} created, {result.failed} failed",
348
+ created_count=result.successful,
349
+ failed_count=result.failed,
350
+ results=result.results,
351
+ errors=[r.error for r in result.results if r.error],
352
+ )
353
+
354
+
355
+ @handle_tool_errors
356
+ async def bulk_delete_tasks(
357
+ calendar_uid: str = Field(..., description="Calendar UID"),
358
+ task_uids: List[str] = Field(..., description="List of task UIDs to delete"),
359
+ mode: str = Field("continue", description="Operation mode"),
360
+ parallel: bool = Field(True, description="Execute operations in parallel"),
361
+ account: Optional[str] = Field(None, description="Account alias"),
362
+ request_id: str = None,
363
+ ) -> Dict[str, Any]:
364
+ """Delete multiple tasks in bulk"""
365
+ # Ensure managers are available for backwards compatibility with tests
366
+ _ensure_managers_initialized()
367
+
368
+ # Convert mode string to BulkOperationMode
369
+ mode_mapping = {
370
+ "continue": BulkOperationMode.CONTINUE_ON_ERROR,
371
+ "fail_fast": BulkOperationMode.FAIL_FAST,
372
+ "atomic": BulkOperationMode.ATOMIC,
373
+ }
374
+
375
+ if mode not in mode_mapping:
376
+ raise ValidationError(
377
+ f"Invalid mode: {mode}. Must be one of: continue, fail_fast, atomic"
378
+ )
379
+
380
+ bulk_mode = mode_mapping[mode]
381
+
382
+ # Create bulk options
383
+ options = BulkOptions(mode=bulk_mode)
384
+
385
+ # Execute bulk operation
386
+ result = _managers["bulk_manager"].bulk_delete_tasks(
387
+ calendar_uid=calendar_uid,
388
+ task_uids=task_uids,
389
+ options=options,
390
+ account_alias=account,
391
+ request_id=request_id,
392
+ )
393
+
394
+ return _format_bulk_response(
395
+ result,
396
+ request_id,
397
+ message=f"Bulk task deletion completed: {result.successful} deleted, {result.failed} failed",
398
+ deleted_count=result.successful,
399
+ failed_count=result.failed,
400
+ results=result.results,
401
+ errors=[r.error for r in result.results if r.error],
402
+ )
403
+
404
+
405
+ @handle_tool_errors
406
+ async def bulk_create_journals(
407
+ calendar_uid: str = Field(..., description="Calendar UID"),
408
+ journals_json: str = Field(..., description="JSON array of journal data"),
409
+ mode: str = Field("continue", description="Operation mode"),
410
+ parallel: bool = Field(True, description="Execute operations in parallel"),
411
+ account: Optional[str] = Field(None, description="Account alias"),
412
+ request_id: str = None,
413
+ ) -> Dict[str, Any]:
414
+ """Create multiple journal entries in bulk"""
415
+ import json
416
+
417
+ # Ensure managers are available for backwards compatibility with tests
418
+ _ensure_managers_initialized()
419
+
420
+ # Parse journals JSON
421
+ try:
422
+ journals_data = json.loads(journals_json)
423
+ if not isinstance(journals_data, list):
424
+ raise ValueError("Journals data must be a JSON array")
425
+ except (json.JSONDecodeError, ValueError) as e:
426
+ raise ValidationError(f"Invalid journals JSON: {str(e)}")
427
+
428
+ # Convert mode string to BulkOperationMode
429
+ mode_mapping = {
430
+ "continue": BulkOperationMode.CONTINUE_ON_ERROR,
431
+ "fail_fast": BulkOperationMode.FAIL_FAST,
432
+ "atomic": BulkOperationMode.ATOMIC,
433
+ }
434
+
435
+ if mode not in mode_mapping:
436
+ raise ValidationError(
437
+ f"Invalid mode: {mode}. Must be one of: continue, fail_fast, atomic"
438
+ )
439
+
440
+ bulk_mode = mode_mapping[mode]
441
+
442
+ # Create bulk options
443
+ options = BulkOptions(mode=bulk_mode)
444
+
445
+ # Execute bulk operation
446
+ if "bulk_manager" not in _managers or _managers["bulk_manager"] is None:
447
+ raise RuntimeError("BulkOperationManager not available")
448
+
449
+ try:
450
+ result = _managers["bulk_manager"].bulk_create_journals(
451
+ calendar_uid=calendar_uid,
452
+ journals=journals_data,
453
+ options=options,
454
+ account_alias=account,
455
+ )
456
+ except AttributeError as e:
457
+ raise RuntimeError(f"BulkOperationManager missing method: {str(e)}")
458
+ except Exception as e:
459
+ logger.error(f"Bulk journal creation failed: {type(e).__name__}: {str(e)}")
460
+ raise
461
+
462
+ return _format_bulk_response(
463
+ result,
464
+ request_id,
465
+ message=f"Bulk journal creation completed: {result.successful} created, {result.failed} failed",
466
+ created_count=result.successful,
467
+ failed_count=result.failed,
468
+ results=result.results,
469
+ errors=[r.error for r in result.results if r.error],
470
+ )
471
+
472
+
473
+ @handle_tool_errors
474
+ async def bulk_delete_journals(
475
+ calendar_uid: str = Field(..., description="Calendar UID"),
476
+ journal_uids: List[str] = Field(..., description="List of journal UIDs to delete"),
477
+ mode: str = Field("continue", description="Operation mode"),
478
+ parallel: bool = Field(True, description="Execute operations in parallel"),
479
+ account: Optional[str] = Field(None, description="Account alias"),
480
+ request_id: str = None,
481
+ ) -> Dict[str, Any]:
482
+ """Delete multiple journal entries in bulk"""
483
+ # Ensure managers are available for backwards compatibility with tests
484
+ _ensure_managers_initialized()
485
+
486
+ # Convert mode string to BulkOperationMode
487
+ mode_mapping = {
488
+ "continue": BulkOperationMode.CONTINUE_ON_ERROR,
489
+ "fail_fast": BulkOperationMode.FAIL_FAST,
490
+ "atomic": BulkOperationMode.ATOMIC,
491
+ }
492
+
493
+ if mode not in mode_mapping:
494
+ raise ValidationError(
495
+ f"Invalid mode: {mode}. Must be one of: continue, fail_fast, atomic"
496
+ )
497
+
498
+ bulk_mode = mode_mapping[mode]
499
+
500
+ # Create bulk options
501
+ options = BulkOptions(mode=bulk_mode)
502
+
503
+ # Execute bulk operation
504
+ result = _managers["bulk_manager"].bulk_delete_journals(
505
+ calendar_uid=calendar_uid,
506
+ journal_uids=journal_uids,
507
+ options=options,
508
+ account_alias=account,
509
+ request_id=request_id,
510
+ )
511
+
512
+ return _format_bulk_response(
513
+ result,
514
+ request_id,
515
+ message=f"Bulk journal deletion completed: {result.successful} deleted, {result.failed} failed",
516
+ deleted_count=result.successful,
517
+ failed_count=result.failed,
518
+ results=result.results,
519
+ errors=[r.error for r in result.results if r.error],
520
+ )
521
+
522
+
523
+ def register_bulk_tools(mcp, managers):
524
+ """Register bulk operation tools with the MCP server"""
525
+
526
+ # Update module-level managers for dependency injection
527
+ _managers.update(managers)
528
+
529
+ # Register all bulk tools with the MCP server
530
+ mcp.tool(bulk_create_events)
531
+ mcp.tool(bulk_delete_events)
532
+ mcp.tool(bulk_create_tasks)
533
+ mcp.tool(bulk_delete_tasks)
534
+ mcp.tool(bulk_create_journals)
535
+ mcp.tool(bulk_delete_journals)
536
+
537
+
538
+ # Add .fn attribute to each function for backwards compatibility with tests
539
+ # This mimics the behavior of FastMCP decorated functions
540
+ bulk_create_events.fn = bulk_create_events
541
+ bulk_delete_events.fn = bulk_delete_events
542
+ bulk_create_tasks.fn = bulk_create_tasks
543
+ bulk_delete_tasks.fn = bulk_delete_tasks
544
+ bulk_create_journals.fn = bulk_create_journals
545
+ bulk_delete_journals.fn = bulk_delete_journals
546
+
547
+
548
+ # Export all tools for backwards compatibility
549
+ __all__ = [
550
+ "bulk_create_events",
551
+ "bulk_delete_events",
552
+ "bulk_create_tasks",
553
+ "bulk_delete_tasks",
554
+ "bulk_create_journals",
555
+ "bulk_delete_journals",
556
+ "register_bulk_tools",
557
+ ]