iflow-mcp_democratize-technology-chronos-mcp 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chronos_mcp/__init__.py +5 -0
- chronos_mcp/__main__.py +9 -0
- chronos_mcp/accounts.py +410 -0
- chronos_mcp/bulk.py +946 -0
- chronos_mcp/caldav_utils.py +149 -0
- chronos_mcp/calendars.py +204 -0
- chronos_mcp/config.py +187 -0
- chronos_mcp/credentials.py +190 -0
- chronos_mcp/events.py +515 -0
- chronos_mcp/exceptions.py +477 -0
- chronos_mcp/journals.py +477 -0
- chronos_mcp/logging_config.py +23 -0
- chronos_mcp/models.py +202 -0
- chronos_mcp/py.typed +0 -0
- chronos_mcp/rrule.py +259 -0
- chronos_mcp/search.py +315 -0
- chronos_mcp/server.py +121 -0
- chronos_mcp/tasks.py +518 -0
- chronos_mcp/tools/__init__.py +29 -0
- chronos_mcp/tools/accounts.py +151 -0
- chronos_mcp/tools/base.py +59 -0
- chronos_mcp/tools/bulk.py +557 -0
- chronos_mcp/tools/calendars.py +142 -0
- chronos_mcp/tools/events.py +698 -0
- chronos_mcp/tools/journals.py +310 -0
- chronos_mcp/tools/tasks.py +414 -0
- chronos_mcp/utils.py +163 -0
- chronos_mcp/validation.py +636 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/METADATA +299 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/RECORD +68 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/WHEEL +5 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/licenses/LICENSE +21 -0
- iflow_mcp_democratize_technology_chronos_mcp-2.0.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/conftest.py +91 -0
- tests/unit/__init__.py +0 -0
- tests/unit/test_accounts.py +380 -0
- tests/unit/test_accounts_ssrf.py +134 -0
- tests/unit/test_base.py +135 -0
- tests/unit/test_bulk.py +380 -0
- tests/unit/test_bulk_create.py +408 -0
- tests/unit/test_bulk_delete.py +341 -0
- tests/unit/test_bulk_resource_limits.py +74 -0
- tests/unit/test_caldav_utils.py +300 -0
- tests/unit/test_calendars.py +286 -0
- tests/unit/test_config.py +111 -0
- tests/unit/test_config_validation.py +128 -0
- tests/unit/test_credentials_security.py +189 -0
- tests/unit/test_cryptography_security.py +178 -0
- tests/unit/test_events.py +536 -0
- tests/unit/test_exceptions.py +58 -0
- tests/unit/test_journals.py +1097 -0
- tests/unit/test_models.py +95 -0
- tests/unit/test_race_conditions.py +202 -0
- tests/unit/test_recurring_events.py +156 -0
- tests/unit/test_rrule.py +217 -0
- tests/unit/test_search.py +372 -0
- tests/unit/test_search_advanced.py +333 -0
- tests/unit/test_server_input_validation.py +219 -0
- tests/unit/test_ssrf_protection.py +505 -0
- tests/unit/test_tasks.py +918 -0
- tests/unit/test_thread_safety.py +301 -0
- tests/unit/test_tools_journals.py +617 -0
- tests/unit/test_tools_tasks.py +968 -0
- tests/unit/test_url_validation_security.py +234 -0
- tests/unit/test_utils.py +180 -0
- tests/unit/test_validation.py +983 -0
tests/unit/test_bulk.py
ADDED
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unit tests for bulk operations
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from unittest.mock import Mock
|
|
6
|
+
|
|
7
|
+
from chronos_mcp.bulk import (
|
|
8
|
+
BulkOperationManager,
|
|
9
|
+
BulkOperationMode,
|
|
10
|
+
BulkOptions,
|
|
11
|
+
BulkResult,
|
|
12
|
+
OperationResult,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TestBulkOptions:
|
|
17
|
+
def test_bulk_options_defaults(self):
|
|
18
|
+
"""Test BulkOptions default values"""
|
|
19
|
+
opts = BulkOptions()
|
|
20
|
+
|
|
21
|
+
assert opts.mode == BulkOperationMode.CONTINUE_ON_ERROR
|
|
22
|
+
assert opts.max_parallel == 5
|
|
23
|
+
assert opts.timeout_per_operation == 30
|
|
24
|
+
assert opts.validate_before_execute is True
|
|
25
|
+
assert opts.dry_run is False
|
|
26
|
+
assert opts.adaptive_scaling is True
|
|
27
|
+
assert opts.backpressure_threshold_ms == 1000.0
|
|
28
|
+
assert opts.min_parallel == 1
|
|
29
|
+
assert opts.max_parallel_limit == 20
|
|
30
|
+
|
|
31
|
+
def test_bulk_operation_modes(self):
|
|
32
|
+
"""Test different bulk operation modes"""
|
|
33
|
+
assert BulkOperationMode.ATOMIC.value == "atomic"
|
|
34
|
+
assert BulkOperationMode.CONTINUE_ON_ERROR.value == "continue"
|
|
35
|
+
assert BulkOperationMode.FAIL_FAST.value == "fail_fast"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class TestBulkResult:
|
|
39
|
+
def test_bulk_result_properties(self):
|
|
40
|
+
"""Test BulkResult calculated properties"""
|
|
41
|
+
result = BulkResult(total=10, successful=7, failed=3, duration_ms=1500.5)
|
|
42
|
+
|
|
43
|
+
assert result.success_rate == 70.0
|
|
44
|
+
|
|
45
|
+
# Add some results
|
|
46
|
+
result.results = [
|
|
47
|
+
OperationResult(index=0, success=True, uid="uid1"),
|
|
48
|
+
OperationResult(index=1, success=False, error="Failed"),
|
|
49
|
+
OperationResult(index=2, success=True, uid="uid2"),
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
failures = result.get_failures()
|
|
53
|
+
assert len(failures) == 1
|
|
54
|
+
assert failures[0].index == 1
|
|
55
|
+
|
|
56
|
+
successes = result.get_successes()
|
|
57
|
+
assert len(successes) == 2
|
|
58
|
+
assert successes[0].uid == "uid1"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class TestBulkOperationManager:
|
|
62
|
+
def setup_method(self):
|
|
63
|
+
"""Set up test fixtures"""
|
|
64
|
+
self.mock_event_manager = Mock()
|
|
65
|
+
self.bulk_manager = BulkOperationManager(self.mock_event_manager)
|
|
66
|
+
|
|
67
|
+
# Sample events for testing
|
|
68
|
+
self.test_events = [
|
|
69
|
+
{
|
|
70
|
+
"summary": "Event 1",
|
|
71
|
+
"dtstart": "2025-07-10T10:00:00",
|
|
72
|
+
"dtend": "2025-07-10T11:00:00",
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
"summary": "Event 2",
|
|
76
|
+
"dtstart": "2025-07-10T14:00:00",
|
|
77
|
+
"dtend": "2025-07-10T15:00:00",
|
|
78
|
+
},
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
def test_validate_events_success(self):
|
|
82
|
+
"""Test event validation with valid events"""
|
|
83
|
+
errors = self.bulk_manager._validate_events(self.test_events)
|
|
84
|
+
assert len(errors) == 0
|
|
85
|
+
|
|
86
|
+
def test_validate_events_missing_fields(self):
|
|
87
|
+
"""Test event validation with missing required fields"""
|
|
88
|
+
invalid_events = [
|
|
89
|
+
{"summary": "No dates"}, # Missing dtstart and dtend
|
|
90
|
+
{
|
|
91
|
+
"dtstart": "2025-07-10T10:00:00",
|
|
92
|
+
"dtend": "2025-07-10T11:00:00",
|
|
93
|
+
}, # Missing summary
|
|
94
|
+
{
|
|
95
|
+
"summary": "Invalid dates",
|
|
96
|
+
"dtstart": "2025-07-10T11:00:00",
|
|
97
|
+
"dtend": "2025-07-10T10:00:00", # End before start
|
|
98
|
+
},
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
errors = self.bulk_manager._validate_events(invalid_events)
|
|
102
|
+
assert (
|
|
103
|
+
len(errors) == 5
|
|
104
|
+
) # 3 errors for first (missing dtstart, dtend, invalid date), 1 for second, 1 for third
|
|
105
|
+
|
|
106
|
+
# Check specific errors
|
|
107
|
+
error_messages = [err[1] for err in errors]
|
|
108
|
+
assert any("Missing required field: dtstart" in msg for msg in error_messages)
|
|
109
|
+
assert any("Missing required field: summary" in msg for msg in error_messages)
|
|
110
|
+
assert any("End time before start time" in msg for msg in error_messages)
|
|
111
|
+
|
|
112
|
+
def test_bulk_create_dry_run(self):
|
|
113
|
+
"""Test bulk create in dry run mode"""
|
|
114
|
+
options = BulkOptions(dry_run=True)
|
|
115
|
+
|
|
116
|
+
result = self.bulk_manager.bulk_create_events(
|
|
117
|
+
calendar_uid="cal123", events=self.test_events, options=options
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
assert result.total == 2
|
|
121
|
+
assert result.successful == 2
|
|
122
|
+
assert result.failed == 0
|
|
123
|
+
assert len(result.results) == 2
|
|
124
|
+
|
|
125
|
+
# Event manager should not be called in dry run
|
|
126
|
+
self.mock_event_manager.create_event.assert_not_called()
|
|
127
|
+
|
|
128
|
+
def test_bulk_create_continue_on_error(self):
|
|
129
|
+
"""Test bulk create with continue on error mode"""
|
|
130
|
+
# Mock event manager to fail on second event
|
|
131
|
+
mock_event1 = Mock()
|
|
132
|
+
mock_event1.uid = "created-1"
|
|
133
|
+
self.mock_event_manager.create_event.side_effect = [
|
|
134
|
+
mock_event1, # Success
|
|
135
|
+
Exception("Network error"), # Failure
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
options = BulkOptions(mode=BulkOperationMode.CONTINUE_ON_ERROR)
|
|
139
|
+
|
|
140
|
+
result = self.bulk_manager.bulk_create_events(
|
|
141
|
+
calendar_uid="cal123", events=self.test_events, options=options
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
assert result.total == 2
|
|
145
|
+
assert result.successful == 1
|
|
146
|
+
assert result.failed == 1
|
|
147
|
+
assert self.mock_event_manager.create_event.call_count == 2
|
|
148
|
+
|
|
149
|
+
def test_bulk_create_fail_fast(self):
|
|
150
|
+
"""Test bulk create with fail fast mode"""
|
|
151
|
+
# Use smaller batch to test fail fast properly
|
|
152
|
+
test_events = [
|
|
153
|
+
{
|
|
154
|
+
"summary": f"Event {i}",
|
|
155
|
+
"dtstart": "2025-07-10T16:00:00",
|
|
156
|
+
"dtend": "2025-07-10T17:00:00",
|
|
157
|
+
}
|
|
158
|
+
for i in range(1, 4)
|
|
159
|
+
]
|
|
160
|
+
|
|
161
|
+
# Mock to fail on second event
|
|
162
|
+
mock_event1 = Mock()
|
|
163
|
+
mock_event1.uid = "created-1"
|
|
164
|
+
mock_event3 = Mock()
|
|
165
|
+
mock_event3.uid = "created-3"
|
|
166
|
+
self.mock_event_manager.create_event.side_effect = [
|
|
167
|
+
mock_event1,
|
|
168
|
+
Exception("API limit reached"),
|
|
169
|
+
mock_event3,
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
options = BulkOptions(mode=BulkOperationMode.FAIL_FAST, max_parallel=2)
|
|
173
|
+
|
|
174
|
+
result = self.bulk_manager.bulk_create_events(
|
|
175
|
+
calendar_uid="cal123", events=test_events, options=options
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# In fail_fast mode with batch processing
|
|
179
|
+
assert result.failed >= 1 # At least one failure
|
|
180
|
+
assert result.total == 3
|
|
181
|
+
# Due to parallel batch processing, it may process 1-2 before stopping
|
|
182
|
+
assert result.successful <= 2
|
|
183
|
+
|
|
184
|
+
def test_bulk_create_parallel_execution(self):
|
|
185
|
+
"""Test that bulk operations execute in batches"""
|
|
186
|
+
# Mock successful event creation
|
|
187
|
+
mock_event1 = Mock()
|
|
188
|
+
mock_event1.uid = "uid1"
|
|
189
|
+
mock_event2 = Mock()
|
|
190
|
+
mock_event2.uid = "uid2"
|
|
191
|
+
|
|
192
|
+
self.mock_event_manager.create_event.side_effect = [mock_event1, mock_event2]
|
|
193
|
+
|
|
194
|
+
options = BulkOptions(max_parallel=2)
|
|
195
|
+
|
|
196
|
+
# Call the batch execution method directly
|
|
197
|
+
results = self.bulk_manager._execute_batch_create(
|
|
198
|
+
calendar_uid="cal123",
|
|
199
|
+
batch=self.test_events,
|
|
200
|
+
start_idx=0,
|
|
201
|
+
options=options,
|
|
202
|
+
account_alias=None,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# Should have created 2 events
|
|
206
|
+
assert self.mock_event_manager.create_event.call_count == 2
|
|
207
|
+
assert len(results) == 2
|
|
208
|
+
assert all(r.success for r in results)
|
|
209
|
+
assert results[0].uid == "uid1"
|
|
210
|
+
assert results[1].uid == "uid2"
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class TestBulkDelete:
|
|
214
|
+
def setup_method(self):
|
|
215
|
+
"""Set up test fixtures"""
|
|
216
|
+
self.mock_event_manager = Mock()
|
|
217
|
+
self.bulk_manager = BulkOperationManager(self.mock_event_manager)
|
|
218
|
+
self.test_uids = ["uid1", "uid2", "uid3"]
|
|
219
|
+
|
|
220
|
+
def test_bulk_delete_success(self):
|
|
221
|
+
"""Test successful bulk delete"""
|
|
222
|
+
# Mock successful deletions
|
|
223
|
+
self.mock_event_manager.delete_event.return_value = True
|
|
224
|
+
|
|
225
|
+
options = BulkOptions()
|
|
226
|
+
result = self.bulk_manager.bulk_delete_events(
|
|
227
|
+
calendar_uid="cal123", event_uids=self.test_uids, options=options
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
assert result.total == 3
|
|
231
|
+
assert result.successful == 3
|
|
232
|
+
assert result.failed == 0
|
|
233
|
+
assert self.mock_event_manager.delete_event.call_count == 3
|
|
234
|
+
|
|
235
|
+
def test_bulk_delete_with_failures(self):
|
|
236
|
+
"""Test bulk delete with some failures"""
|
|
237
|
+
# Mock mixed results
|
|
238
|
+
self.mock_event_manager.delete_event.side_effect = [
|
|
239
|
+
True, # Success
|
|
240
|
+
Exception("Event not found"), # Failure
|
|
241
|
+
True, # Success
|
|
242
|
+
]
|
|
243
|
+
|
|
244
|
+
options = BulkOptions(mode=BulkOperationMode.CONTINUE_ON_ERROR)
|
|
245
|
+
result = self.bulk_manager.bulk_delete_events(
|
|
246
|
+
calendar_uid="cal123", event_uids=self.test_uids, options=options
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
assert result.total == 3
|
|
250
|
+
assert result.successful == 2
|
|
251
|
+
assert result.failed == 1
|
|
252
|
+
|
|
253
|
+
# Check that the failed operation has error info
|
|
254
|
+
failures = result.get_failures()
|
|
255
|
+
assert len(failures) == 1
|
|
256
|
+
assert "Event not found" in failures[0].error
|
|
257
|
+
|
|
258
|
+
def test_adaptive_scaling_performance_tracking(self):
|
|
259
|
+
"""Test that performance metrics are tracked for adaptive scaling"""
|
|
260
|
+
bulk_manager = BulkOperationManager()
|
|
261
|
+
|
|
262
|
+
# Track some performance data
|
|
263
|
+
bulk_manager._track_operation_performance("create_event", 500.0)
|
|
264
|
+
bulk_manager._track_operation_performance("create_event", 1500.0)
|
|
265
|
+
bulk_manager._track_operation_performance("create_event", 750.0)
|
|
266
|
+
|
|
267
|
+
recent_perf = bulk_manager._get_recent_performance("create_event")
|
|
268
|
+
assert len(recent_perf) == 3
|
|
269
|
+
assert 500.0 in recent_perf
|
|
270
|
+
assert 1500.0 in recent_perf
|
|
271
|
+
assert 750.0 in recent_perf
|
|
272
|
+
|
|
273
|
+
def test_adaptive_scaling_parallelism_calculation(self):
|
|
274
|
+
"""Test adaptive parallelism calculation based on performance"""
|
|
275
|
+
bulk_manager = BulkOperationManager()
|
|
276
|
+
options = BulkOptions(max_parallel=10, backpressure_threshold_ms=1000.0)
|
|
277
|
+
|
|
278
|
+
# Test fast operations - should increase parallelism
|
|
279
|
+
fast_performance = [200.0, 300.0, 250.0] # All under threshold/2
|
|
280
|
+
new_parallel = bulk_manager._calculate_adaptive_parallelism(
|
|
281
|
+
options, "create_event", fast_performance
|
|
282
|
+
)
|
|
283
|
+
assert new_parallel > options.max_parallel # Should increase
|
|
284
|
+
|
|
285
|
+
# Test slow operations - should decrease parallelism
|
|
286
|
+
slow_performance = [1500.0, 2000.0, 1800.0] # All over threshold
|
|
287
|
+
new_parallel = bulk_manager._calculate_adaptive_parallelism(
|
|
288
|
+
options, "create_event", slow_performance
|
|
289
|
+
)
|
|
290
|
+
assert new_parallel == options.max_parallel // 2 # Should decrease
|
|
291
|
+
|
|
292
|
+
# Test mixed performance - should stay same
|
|
293
|
+
mixed_performance = [800.0, 900.0, 700.0] # Within acceptable range
|
|
294
|
+
new_parallel = bulk_manager._calculate_adaptive_parallelism(
|
|
295
|
+
options, "create_event", mixed_performance
|
|
296
|
+
)
|
|
297
|
+
assert new_parallel == options.max_parallel # Should stay same
|
|
298
|
+
|
|
299
|
+
def test_adaptive_scaling_disabled(self):
|
|
300
|
+
"""Test that adaptive scaling can be disabled"""
|
|
301
|
+
bulk_manager = BulkOperationManager()
|
|
302
|
+
options = BulkOptions(adaptive_scaling=False, max_parallel=5)
|
|
303
|
+
|
|
304
|
+
# Even with slow performance, should return original max_parallel
|
|
305
|
+
slow_performance = [2000.0, 3000.0, 2500.0]
|
|
306
|
+
new_parallel = bulk_manager._calculate_adaptive_parallelism(
|
|
307
|
+
options, "create_event", slow_performance
|
|
308
|
+
)
|
|
309
|
+
assert new_parallel == options.max_parallel
|
|
310
|
+
|
|
311
|
+
def test_performance_tracker_sliding_window(self):
|
|
312
|
+
"""Test that performance tracker maintains sliding window"""
|
|
313
|
+
bulk_manager = BulkOperationManager()
|
|
314
|
+
|
|
315
|
+
# Add more than 50 measurements
|
|
316
|
+
for i in range(60):
|
|
317
|
+
bulk_manager._track_operation_performance("create_event", float(i * 10))
|
|
318
|
+
|
|
319
|
+
recent_perf = bulk_manager._get_recent_performance("create_event")
|
|
320
|
+
# Should keep only last 50 measurements
|
|
321
|
+
assert len(recent_perf) == 50
|
|
322
|
+
# Should contain the most recent values (590, 580, ... 100)
|
|
323
|
+
assert 590.0 in recent_perf
|
|
324
|
+
assert 100.0 in recent_perf
|
|
325
|
+
assert 90.0 not in recent_perf # Should have been removed
|
|
326
|
+
|
|
327
|
+
def test_bulk_create_with_adaptive_scaling(self):
|
|
328
|
+
"""Test bulk create operations adapt parallelism based on performance"""
|
|
329
|
+
# Create a larger set of events to test adaptive scaling
|
|
330
|
+
test_events = [
|
|
331
|
+
{
|
|
332
|
+
"summary": f"Event {i}",
|
|
333
|
+
"dtstart": "2025-07-10T10:00:00",
|
|
334
|
+
"dtend": "2025-07-10T11:00:00",
|
|
335
|
+
}
|
|
336
|
+
for i in range(15) # More events to trigger multiple batches
|
|
337
|
+
]
|
|
338
|
+
|
|
339
|
+
mock_event_manager = Mock()
|
|
340
|
+
bulk_manager = BulkOperationManager(mock_event_manager)
|
|
341
|
+
|
|
342
|
+
# Mock successful event creation with varying response times
|
|
343
|
+
def create_event_mock(*args, **kwargs):
|
|
344
|
+
# Extract the event number from the summary
|
|
345
|
+
summary = kwargs.get("summary", args[1] if len(args) > 1 else "")
|
|
346
|
+
if summary and "Event " in summary:
|
|
347
|
+
event_num = summary.split("Event ")[1]
|
|
348
|
+
mock_event = Mock()
|
|
349
|
+
mock_event.uid = f"uid{event_num}"
|
|
350
|
+
return mock_event
|
|
351
|
+
else:
|
|
352
|
+
# Fallback for any unexpected calls
|
|
353
|
+
mock_event = Mock()
|
|
354
|
+
mock_event.uid = (
|
|
355
|
+
f"uid_unknown_{len(mock_event_manager.create_event.call_args_list)}"
|
|
356
|
+
)
|
|
357
|
+
return mock_event
|
|
358
|
+
|
|
359
|
+
mock_event_manager.create_event.side_effect = create_event_mock
|
|
360
|
+
|
|
361
|
+
options = BulkOptions(
|
|
362
|
+
adaptive_scaling=True,
|
|
363
|
+
max_parallel=5,
|
|
364
|
+
backpressure_threshold_ms=1000.0,
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
# Simulate some performance data that would trigger scaling
|
|
368
|
+
for _ in range(10):
|
|
369
|
+
bulk_manager._track_operation_performance("create_event", 1500.0) # Slow
|
|
370
|
+
|
|
371
|
+
result = bulk_manager.bulk_create_events(
|
|
372
|
+
calendar_uid="cal123", events=test_events, options=options
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
assert result.total == 15
|
|
376
|
+
assert result.successful == 15
|
|
377
|
+
assert result.failed == 0
|
|
378
|
+
|
|
379
|
+
# Verify all events were created
|
|
380
|
+
assert mock_event_manager.create_event.call_count == 15
|