mcp-proxy-adapter 2.0.1__py3-none-any.whl → 6.9.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-proxy-adapter might be problematic. Click here for more details.
- mcp_proxy_adapter/__init__.py +47 -0
- mcp_proxy_adapter/__main__.py +13 -0
- mcp_proxy_adapter/api/__init__.py +0 -0
- mcp_proxy_adapter/api/app.py +66 -0
- mcp_proxy_adapter/api/core/__init__.py +18 -0
- mcp_proxy_adapter/api/core/app_factory.py +400 -0
- mcp_proxy_adapter/api/core/lifespan_manager.py +55 -0
- mcp_proxy_adapter/api/core/registration_context.py +356 -0
- mcp_proxy_adapter/api/core/registration_manager.py +307 -0
- mcp_proxy_adapter/api/core/registration_tasks.py +84 -0
- mcp_proxy_adapter/api/core/ssl_context_factory.py +88 -0
- mcp_proxy_adapter/api/handlers.py +181 -0
- mcp_proxy_adapter/api/middleware/__init__.py +21 -0
- mcp_proxy_adapter/api/middleware/base.py +54 -0
- mcp_proxy_adapter/api/middleware/command_permission_middleware.py +73 -0
- mcp_proxy_adapter/api/middleware/error_handling.py +76 -0
- mcp_proxy_adapter/api/middleware/factory.py +147 -0
- mcp_proxy_adapter/api/middleware/logging.py +31 -0
- mcp_proxy_adapter/api/middleware/performance.py +51 -0
- mcp_proxy_adapter/api/middleware/protocol_middleware.py +140 -0
- mcp_proxy_adapter/api/middleware/transport_middleware.py +87 -0
- mcp_proxy_adapter/api/middleware/unified_security.py +223 -0
- mcp_proxy_adapter/api/middleware/user_info_middleware.py +132 -0
- mcp_proxy_adapter/api/openapi/__init__.py +21 -0
- mcp_proxy_adapter/api/openapi/command_integration.py +105 -0
- mcp_proxy_adapter/api/openapi/openapi_generator.py +40 -0
- mcp_proxy_adapter/api/openapi/openapi_registry.py +62 -0
- mcp_proxy_adapter/api/openapi/schema_loader.py +116 -0
- mcp_proxy_adapter/api/schemas.py +270 -0
- mcp_proxy_adapter/api/tool_integration.py +131 -0
- mcp_proxy_adapter/api/tools.py +163 -0
- mcp_proxy_adapter/cli/__init__.py +12 -0
- mcp_proxy_adapter/cli/commands/__init__.py +15 -0
- mcp_proxy_adapter/cli/commands/client.py +100 -0
- mcp_proxy_adapter/cli/commands/config_generate.py +105 -0
- mcp_proxy_adapter/cli/commands/config_validate.py +94 -0
- mcp_proxy_adapter/cli/commands/generate.py +259 -0
- mcp_proxy_adapter/cli/commands/server.py +174 -0
- mcp_proxy_adapter/cli/commands/sets.py +132 -0
- mcp_proxy_adapter/cli/commands/testconfig.py +177 -0
- mcp_proxy_adapter/cli/examples/__init__.py +8 -0
- mcp_proxy_adapter/cli/examples/http_basic.py +82 -0
- mcp_proxy_adapter/cli/examples/https_token.py +96 -0
- mcp_proxy_adapter/cli/examples/mtls_roles.py +103 -0
- mcp_proxy_adapter/cli/main.py +63 -0
- mcp_proxy_adapter/cli/parser.py +338 -0
- mcp_proxy_adapter/cli/validators.py +231 -0
- mcp_proxy_adapter/client/jsonrpc_client/__init__.py +9 -0
- mcp_proxy_adapter/client/jsonrpc_client/client.py +42 -0
- mcp_proxy_adapter/client/jsonrpc_client/command_api.py +45 -0
- mcp_proxy_adapter/client/jsonrpc_client/proxy_api.py +224 -0
- mcp_proxy_adapter/client/jsonrpc_client/queue_api.py +60 -0
- mcp_proxy_adapter/client/jsonrpc_client/transport.py +108 -0
- mcp_proxy_adapter/client/proxy.py +123 -0
- mcp_proxy_adapter/commands/__init__.py +66 -0
- mcp_proxy_adapter/commands/auth_validation_command.py +69 -0
- mcp_proxy_adapter/commands/base.py +389 -0
- mcp_proxy_adapter/commands/builtin_commands.py +30 -0
- mcp_proxy_adapter/commands/catalog/__init__.py +20 -0
- mcp_proxy_adapter/commands/catalog/catalog_loader.py +34 -0
- mcp_proxy_adapter/commands/catalog/catalog_manager.py +122 -0
- mcp_proxy_adapter/commands/catalog/catalog_syncer.py +149 -0
- mcp_proxy_adapter/commands/catalog/command_catalog.py +43 -0
- mcp_proxy_adapter/commands/catalog/dependency_manager.py +37 -0
- mcp_proxy_adapter/commands/catalog_manager.py +97 -0
- mcp_proxy_adapter/commands/cert_monitor_command.py +552 -0
- mcp_proxy_adapter/commands/certificate_management_command.py +562 -0
- mcp_proxy_adapter/commands/command_registry.py +298 -0
- mcp_proxy_adapter/commands/config_command.py +102 -0
- mcp_proxy_adapter/commands/dependency_container.py +40 -0
- mcp_proxy_adapter/commands/dependency_manager.py +143 -0
- mcp_proxy_adapter/commands/echo_command.py +48 -0
- mcp_proxy_adapter/commands/health_command.py +142 -0
- mcp_proxy_adapter/commands/help_command.py +175 -0
- mcp_proxy_adapter/commands/hooks.py +172 -0
- mcp_proxy_adapter/commands/key_management_command.py +484 -0
- mcp_proxy_adapter/commands/load_command.py +123 -0
- mcp_proxy_adapter/commands/plugins_command.py +246 -0
- mcp_proxy_adapter/commands/protocol_management_command.py +216 -0
- mcp_proxy_adapter/commands/proxy_registration_command.py +319 -0
- mcp_proxy_adapter/commands/queue_commands.py +750 -0
- mcp_proxy_adapter/commands/registration_status_command.py +76 -0
- mcp_proxy_adapter/commands/registry/__init__.py +18 -0
- mcp_proxy_adapter/commands/registry/command_info.py +103 -0
- mcp_proxy_adapter/commands/registry/command_loader.py +207 -0
- mcp_proxy_adapter/commands/registry/command_manager.py +119 -0
- mcp_proxy_adapter/commands/registry/command_registry.py +217 -0
- mcp_proxy_adapter/commands/reload_command.py +136 -0
- mcp_proxy_adapter/commands/result.py +157 -0
- mcp_proxy_adapter/commands/role_test_command.py +99 -0
- mcp_proxy_adapter/commands/roles_management_command.py +502 -0
- mcp_proxy_adapter/commands/security_command.py +472 -0
- mcp_proxy_adapter/commands/settings_command.py +113 -0
- mcp_proxy_adapter/commands/ssl_setup_command.py +306 -0
- mcp_proxy_adapter/commands/token_management_command.py +500 -0
- mcp_proxy_adapter/commands/transport_management_command.py +129 -0
- mcp_proxy_adapter/commands/unload_command.py +92 -0
- mcp_proxy_adapter/config.py +32 -0
- mcp_proxy_adapter/core/__init__.py +8 -0
- mcp_proxy_adapter/core/app_factory.py +560 -0
- mcp_proxy_adapter/core/app_runner.py +318 -0
- mcp_proxy_adapter/core/auth_validator.py +508 -0
- mcp_proxy_adapter/core/certificate/__init__.py +20 -0
- mcp_proxy_adapter/core/certificate/certificate_creator.py +372 -0
- mcp_proxy_adapter/core/certificate/certificate_extractor.py +185 -0
- mcp_proxy_adapter/core/certificate/certificate_utils.py +249 -0
- mcp_proxy_adapter/core/certificate/certificate_validator.py +481 -0
- mcp_proxy_adapter/core/certificate/ssl_context_manager.py +65 -0
- mcp_proxy_adapter/core/certificate_utils.py +249 -0
- mcp_proxy_adapter/core/client.py +608 -0
- mcp_proxy_adapter/core/client_manager.py +271 -0
- mcp_proxy_adapter/core/client_security.py +411 -0
- mcp_proxy_adapter/core/config/__init__.py +18 -0
- mcp_proxy_adapter/core/config/config.py +237 -0
- mcp_proxy_adapter/core/config/config_factory.py +22 -0
- mcp_proxy_adapter/core/config/config_loader.py +66 -0
- mcp_proxy_adapter/core/config/feature_manager.py +31 -0
- mcp_proxy_adapter/core/config/simple_config.py +204 -0
- mcp_proxy_adapter/core/config/simple_config_generator.py +131 -0
- mcp_proxy_adapter/core/config/simple_config_validator.py +476 -0
- mcp_proxy_adapter/core/config_converter.py +252 -0
- mcp_proxy_adapter/core/config_validator.py +211 -0
- mcp_proxy_adapter/core/crl_utils.py +362 -0
- mcp_proxy_adapter/core/errors.py +276 -0
- mcp_proxy_adapter/core/job_manager.py +54 -0
- mcp_proxy_adapter/core/logging.py +250 -0
- mcp_proxy_adapter/core/mtls_asgi.py +140 -0
- mcp_proxy_adapter/core/mtls_asgi_app.py +187 -0
- mcp_proxy_adapter/core/mtls_proxy.py +229 -0
- mcp_proxy_adapter/core/mtls_server.py +154 -0
- mcp_proxy_adapter/core/protocol_manager.py +232 -0
- mcp_proxy_adapter/core/proxy/__init__.py +19 -0
- mcp_proxy_adapter/core/proxy/auth_manager.py +26 -0
- mcp_proxy_adapter/core/proxy/proxy_registration_manager.py +160 -0
- mcp_proxy_adapter/core/proxy/registration_client.py +186 -0
- mcp_proxy_adapter/core/proxy/ssl_manager.py +101 -0
- mcp_proxy_adapter/core/proxy_client.py +184 -0
- mcp_proxy_adapter/core/proxy_registration.py +80 -0
- mcp_proxy_adapter/core/role_utils.py +103 -0
- mcp_proxy_adapter/core/security_adapter.py +343 -0
- mcp_proxy_adapter/core/security_factory.py +96 -0
- mcp_proxy_adapter/core/security_integration.py +342 -0
- mcp_proxy_adapter/core/server_adapter.py +251 -0
- mcp_proxy_adapter/core/server_engine.py +217 -0
- mcp_proxy_adapter/core/settings.py +260 -0
- mcp_proxy_adapter/core/signal_handler.py +107 -0
- mcp_proxy_adapter/core/ssl_utils.py +161 -0
- mcp_proxy_adapter/core/transport_manager.py +153 -0
- mcp_proxy_adapter/core/unified_config_adapter.py +471 -0
- mcp_proxy_adapter/core/utils.py +101 -0
- mcp_proxy_adapter/core/validation/__init__.py +21 -0
- mcp_proxy_adapter/core/validation/config_validator.py +219 -0
- mcp_proxy_adapter/core/validation/file_validator.py +131 -0
- mcp_proxy_adapter/core/validation/protocol_validator.py +205 -0
- mcp_proxy_adapter/core/validation/security_validator.py +140 -0
- mcp_proxy_adapter/core/validation/validation_result.py +27 -0
- mcp_proxy_adapter/custom_openapi.py +58 -0
- mcp_proxy_adapter/examples/__init__.py +16 -0
- mcp_proxy_adapter/examples/basic_framework/__init__.py +9 -0
- mcp_proxy_adapter/examples/basic_framework/commands/__init__.py +4 -0
- mcp_proxy_adapter/examples/basic_framework/hooks/__init__.py +4 -0
- mcp_proxy_adapter/examples/basic_framework/main.py +52 -0
- mcp_proxy_adapter/examples/bugfix_certificate_config.py +261 -0
- mcp_proxy_adapter/examples/cert_manager_bugfix.py +203 -0
- mcp_proxy_adapter/examples/check_config.py +413 -0
- mcp_proxy_adapter/examples/client_usage_example.py +164 -0
- mcp_proxy_adapter/examples/commands/__init__.py +5 -0
- mcp_proxy_adapter/examples/config_builder.py +234 -0
- mcp_proxy_adapter/examples/config_cli.py +282 -0
- mcp_proxy_adapter/examples/create_test_configs.py +174 -0
- mcp_proxy_adapter/examples/debug_request_state.py +130 -0
- mcp_proxy_adapter/examples/debug_role_chain.py +191 -0
- mcp_proxy_adapter/examples/demo_client.py +287 -0
- mcp_proxy_adapter/examples/full_application/__init__.py +12 -0
- mcp_proxy_adapter/examples/full_application/commands/__init__.py +8 -0
- mcp_proxy_adapter/examples/full_application/commands/custom_echo_command.py +45 -0
- mcp_proxy_adapter/examples/full_application/commands/dynamic_calculator_command.py +52 -0
- mcp_proxy_adapter/examples/full_application/commands/echo_command.py +32 -0
- mcp_proxy_adapter/examples/full_application/commands/help_command.py +54 -0
- mcp_proxy_adapter/examples/full_application/commands/list_command.py +57 -0
- mcp_proxy_adapter/examples/full_application/hooks/__init__.py +5 -0
- mcp_proxy_adapter/examples/full_application/hooks/application_hooks.py +29 -0
- mcp_proxy_adapter/examples/full_application/hooks/builtin_command_hooks.py +27 -0
- mcp_proxy_adapter/examples/full_application/main.py +311 -0
- mcp_proxy_adapter/examples/full_application/proxy_endpoints.py +161 -0
- mcp_proxy_adapter/examples/full_application/run_mtls.py +252 -0
- mcp_proxy_adapter/examples/full_application/run_simple.py +152 -0
- mcp_proxy_adapter/examples/full_application/test_minimal_server.py +45 -0
- mcp_proxy_adapter/examples/full_application/test_server.py +163 -0
- mcp_proxy_adapter/examples/full_application/test_simple_server.py +62 -0
- mcp_proxy_adapter/examples/generate_config.py +502 -0
- mcp_proxy_adapter/examples/proxy_registration_example.py +335 -0
- mcp_proxy_adapter/examples/queue_demo_simple.py +632 -0
- mcp_proxy_adapter/examples/queue_integration_example.py +578 -0
- mcp_proxy_adapter/examples/queue_server_demo.py +82 -0
- mcp_proxy_adapter/examples/queue_server_example.py +85 -0
- mcp_proxy_adapter/examples/queue_server_simple.py +173 -0
- mcp_proxy_adapter/examples/required_certificates.py +208 -0
- mcp_proxy_adapter/examples/run_example.py +77 -0
- mcp_proxy_adapter/examples/run_full_test_suite.py +619 -0
- mcp_proxy_adapter/examples/run_proxy_server.py +153 -0
- mcp_proxy_adapter/examples/run_security_tests_fixed.py +435 -0
- mcp_proxy_adapter/examples/security_test/__init__.py +18 -0
- mcp_proxy_adapter/examples/security_test/auth_manager.py +14 -0
- mcp_proxy_adapter/examples/security_test/ssl_context_manager.py +28 -0
- mcp_proxy_adapter/examples/security_test/test_client.py +159 -0
- mcp_proxy_adapter/examples/security_test/test_result.py +22 -0
- mcp_proxy_adapter/examples/security_test_client.py +72 -0
- mcp_proxy_adapter/examples/setup/__init__.py +24 -0
- mcp_proxy_adapter/examples/setup/certificate_manager.py +215 -0
- mcp_proxy_adapter/examples/setup/config_generator.py +12 -0
- mcp_proxy_adapter/examples/setup/config_validator.py +118 -0
- mcp_proxy_adapter/examples/setup/environment_setup.py +62 -0
- mcp_proxy_adapter/examples/setup/test_files_generator.py +10 -0
- mcp_proxy_adapter/examples/setup/test_runner.py +89 -0
- mcp_proxy_adapter/examples/setup_test_environment.py +235 -0
- mcp_proxy_adapter/examples/simple_protocol_test.py +125 -0
- mcp_proxy_adapter/examples/test_chk_hostname_automated.py +211 -0
- mcp_proxy_adapter/examples/test_config.py +205 -0
- mcp_proxy_adapter/examples/test_config_builder.py +110 -0
- mcp_proxy_adapter/examples/test_examples.py +308 -0
- mcp_proxy_adapter/examples/test_framework_complete.py +267 -0
- mcp_proxy_adapter/examples/test_mcp_server.py +187 -0
- mcp_proxy_adapter/examples/test_protocol_examples.py +337 -0
- mcp_proxy_adapter/examples/universal_client.py +674 -0
- mcp_proxy_adapter/examples/update_config_certificates.py +135 -0
- mcp_proxy_adapter/examples/validate_generator_compatibility.py +385 -0
- mcp_proxy_adapter/examples/validate_generator_compatibility_simple.py +61 -0
- mcp_proxy_adapter/integrations/__init__.py +25 -0
- mcp_proxy_adapter/integrations/queuemgr_integration.py +462 -0
- mcp_proxy_adapter/main.py +311 -0
- mcp_proxy_adapter/openapi.py +375 -0
- mcp_proxy_adapter/schemas/base_schema.json +114 -0
- mcp_proxy_adapter/schemas/openapi_schema.json +314 -0
- mcp_proxy_adapter/schemas/roles.json +37 -0
- mcp_proxy_adapter/schemas/roles_schema.json +162 -0
- mcp_proxy_adapter/version.py +5 -0
- mcp_proxy_adapter-6.9.50.dist-info/METADATA +1088 -0
- mcp_proxy_adapter-6.9.50.dist-info/RECORD +242 -0
- {mcp_proxy_adapter-2.0.1.dist-info → mcp_proxy_adapter-6.9.50.dist-info}/WHEEL +1 -1
- mcp_proxy_adapter-6.9.50.dist-info/entry_points.txt +14 -0
- mcp_proxy_adapter-6.9.50.dist-info/top_level.txt +1 -0
- adapters/__init__.py +0 -16
- analyzers/__init__.py +0 -14
- analyzers/docstring_analyzer.py +0 -199
- analyzers/type_analyzer.py +0 -151
- cli/__init__.py +0 -12
- cli/__main__.py +0 -79
- cli/command_runner.py +0 -233
- dispatchers/__init__.py +0 -14
- dispatchers/base_dispatcher.py +0 -85
- dispatchers/json_rpc_dispatcher.py +0 -198
- generators/__init__.py +0 -14
- generators/endpoint_generator.py +0 -172
- generators/openapi_generator.py +0 -254
- generators/rest_api_generator.py +0 -207
- mcp_proxy_adapter-2.0.1.dist-info/METADATA +0 -272
- mcp_proxy_adapter-2.0.1.dist-info/RECORD +0 -28
- mcp_proxy_adapter-2.0.1.dist-info/licenses/LICENSE +0 -21
- mcp_proxy_adapter-2.0.1.dist-info/top_level.txt +0 -7
- openapi_schema/__init__.py +0 -38
- openapi_schema/command_registry.py +0 -312
- openapi_schema/rest_schema.py +0 -510
- openapi_schema/rpc_generator.py +0 -307
- openapi_schema/rpc_schema.py +0 -416
- validators/__init__.py +0 -14
- validators/base_validator.py +0 -23
- validators/docstring_validator.py +0 -75
- validators/metadata_validator.py +0 -76
|
@@ -0,0 +1,750 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Queue management commands for MCP Proxy Adapter.
|
|
3
|
+
|
|
4
|
+
This module provides JSON-RPC commands for managing background jobs
|
|
5
|
+
using the queuemgr integration.
|
|
6
|
+
|
|
7
|
+
Author: Vasiliy Zdanovskiy
|
|
8
|
+
email: vasilyvz@gmail.com
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
from typing import Dict, Any
|
|
13
|
+
|
|
14
|
+
from mcp_proxy_adapter.commands.base import Command
|
|
15
|
+
from mcp_proxy_adapter.commands.result import SuccessResult, ErrorResult
|
|
16
|
+
from mcp_proxy_adapter.integrations.queuemgr_integration import (
|
|
17
|
+
QueueManagerIntegration,
|
|
18
|
+
QueueJobBase,
|
|
19
|
+
QueueJobResult,
|
|
20
|
+
QueueJobStatus,
|
|
21
|
+
QueueJobError,
|
|
22
|
+
get_global_queue_manager,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class QueueAddJobCommand(Command):
|
|
27
|
+
"""Command to add a job to the queue."""
|
|
28
|
+
|
|
29
|
+
def __init__(self):
|
|
30
|
+
super().__init__()
|
|
31
|
+
self.name = "queue_add_job"
|
|
32
|
+
self.description = "Add a job to the background queue"
|
|
33
|
+
self.version = "1.0.0"
|
|
34
|
+
|
|
35
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
36
|
+
"""Get command schema."""
|
|
37
|
+
return {
|
|
38
|
+
"type": "object",
|
|
39
|
+
"properties": {
|
|
40
|
+
"job_type": {
|
|
41
|
+
"type": "string",
|
|
42
|
+
"description": "Type of job to add",
|
|
43
|
+
"enum": ["data_processing", "file_operation", "api_call", "custom", "long_running", "batch_processing", "file_download"]
|
|
44
|
+
},
|
|
45
|
+
"job_id": {
|
|
46
|
+
"type": "string",
|
|
47
|
+
"description": "Unique job identifier",
|
|
48
|
+
"minLength": 1
|
|
49
|
+
},
|
|
50
|
+
"params": {
|
|
51
|
+
"type": "object",
|
|
52
|
+
"description": "Job-specific parameters",
|
|
53
|
+
"properties": {
|
|
54
|
+
"data": {"type": "object", "description": "Data to process"},
|
|
55
|
+
"operation": {"type": "string", "description": "Operation type"},
|
|
56
|
+
"file_path": {"type": "string", "description": "File path for file operations"},
|
|
57
|
+
"url": {"type": "string", "description": "URL for API calls"},
|
|
58
|
+
"method": {"type": "string", "description": "HTTP method for API calls"},
|
|
59
|
+
"headers": {"type": "object", "description": "HTTP headers"},
|
|
60
|
+
"timeout": {"type": "number", "description": "Job timeout in seconds"},
|
|
61
|
+
"priority": {"type": "integer", "description": "Job priority (1-10)"},
|
|
62
|
+
"duration": {"type": "integer", "description": "Duration for long-running jobs (seconds)"},
|
|
63
|
+
"task_type": {"type": "string", "description": "Type of task for long-running jobs"},
|
|
64
|
+
"batch_size": {"type": "integer", "description": "Batch size for batch processing jobs"},
|
|
65
|
+
"items": {"type": "array", "description": "Items to process in batch jobs"},
|
|
66
|
+
"file_size": {"type": "integer", "description": "File size for download jobs (bytes)"}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
"required": ["job_type", "job_id", "params"]
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
74
|
+
"""Execute queue add job command."""
|
|
75
|
+
try:
|
|
76
|
+
job_type = params.get("job_type")
|
|
77
|
+
job_id = params.get("job_id")
|
|
78
|
+
job_params = params.get("params", {})
|
|
79
|
+
|
|
80
|
+
if not job_type or not job_id:
|
|
81
|
+
raise ValidationError("job_type and job_id are required")
|
|
82
|
+
|
|
83
|
+
# Get global queue manager
|
|
84
|
+
queue_manager = await get_global_queue_manager()
|
|
85
|
+
|
|
86
|
+
# Map job types to classes
|
|
87
|
+
job_classes = {
|
|
88
|
+
"data_processing": DataProcessingJob,
|
|
89
|
+
"file_operation": FileOperationJob,
|
|
90
|
+
"api_call": ApiCallJob,
|
|
91
|
+
"custom": CustomJob,
|
|
92
|
+
"long_running": LongRunningJob,
|
|
93
|
+
"batch_processing": BatchProcessingJob,
|
|
94
|
+
"file_download": FileDownloadJob,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
if job_type not in job_classes:
|
|
98
|
+
raise ValidationError(f"Unknown job type: {job_type}")
|
|
99
|
+
|
|
100
|
+
# Add job to queue
|
|
101
|
+
result = await queue_manager.add_job(
|
|
102
|
+
job_classes[job_type],
|
|
103
|
+
job_id,
|
|
104
|
+
job_params
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
return SuccessResult(
|
|
108
|
+
data={
|
|
109
|
+
"message": f"Job {job_id} added successfully",
|
|
110
|
+
"job_id": job_id,
|
|
111
|
+
"job_type": job_type,
|
|
112
|
+
"status": result.status,
|
|
113
|
+
"description": result.description
|
|
114
|
+
}
|
|
115
|
+
).to_dict()
|
|
116
|
+
|
|
117
|
+
except QueueJobError as e:
|
|
118
|
+
return ErrorResult(
|
|
119
|
+
error_code="QUEUE_JOB_ERROR",
|
|
120
|
+
message=f"Queue job error: {str(e)}",
|
|
121
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
122
|
+
).to_dict()
|
|
123
|
+
except Exception as e:
|
|
124
|
+
return ErrorResult(
|
|
125
|
+
error_code="INTERNAL_ERROR",
|
|
126
|
+
message=f"Failed to add job: {str(e)}"
|
|
127
|
+
).to_dict()
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class QueueStartJobCommand(Command):
|
|
131
|
+
"""Command to start a job in the queue."""
|
|
132
|
+
|
|
133
|
+
def __init__(self):
|
|
134
|
+
super().__init__()
|
|
135
|
+
self.name = "queue_start_job"
|
|
136
|
+
self.description = "Start a job in the background queue"
|
|
137
|
+
self.version = "1.0.0"
|
|
138
|
+
|
|
139
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
140
|
+
"""Get command schema."""
|
|
141
|
+
return {
|
|
142
|
+
"type": "object",
|
|
143
|
+
"properties": {
|
|
144
|
+
"job_id": {
|
|
145
|
+
"type": "string",
|
|
146
|
+
"description": "Job identifier to start",
|
|
147
|
+
"minLength": 1
|
|
148
|
+
}
|
|
149
|
+
},
|
|
150
|
+
"required": ["job_id"]
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
154
|
+
"""Execute queue start job command."""
|
|
155
|
+
try:
|
|
156
|
+
job_id = params.get("job_id")
|
|
157
|
+
|
|
158
|
+
if not job_id:
|
|
159
|
+
raise ValidationError("job_id is required")
|
|
160
|
+
|
|
161
|
+
# Get global queue manager
|
|
162
|
+
queue_manager = await get_global_queue_manager()
|
|
163
|
+
|
|
164
|
+
# Start job
|
|
165
|
+
result = await queue_manager.start_job(job_id)
|
|
166
|
+
|
|
167
|
+
return SuccessResult(
|
|
168
|
+
data={
|
|
169
|
+
"message": f"Job {job_id} started successfully",
|
|
170
|
+
"job_id": job_id,
|
|
171
|
+
"status": result.status,
|
|
172
|
+
"description": result.description
|
|
173
|
+
}
|
|
174
|
+
).to_dict()
|
|
175
|
+
|
|
176
|
+
except QueueJobError as e:
|
|
177
|
+
return ErrorResult(
|
|
178
|
+
error_code="QUEUE_JOB_ERROR",
|
|
179
|
+
message=f"Queue job error: {str(e)}",
|
|
180
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
181
|
+
).to_dict()
|
|
182
|
+
except Exception as e:
|
|
183
|
+
return ErrorResult(
|
|
184
|
+
error_code="INTERNAL_ERROR",
|
|
185
|
+
message=f"Failed to start job: {str(e)}"
|
|
186
|
+
).to_dict()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class QueueStopJobCommand(Command):
|
|
190
|
+
"""Command to stop a job in the queue."""
|
|
191
|
+
|
|
192
|
+
def __init__(self):
|
|
193
|
+
super().__init__()
|
|
194
|
+
self.name = "queue_stop_job"
|
|
195
|
+
self.description = "Stop a running job in the background queue"
|
|
196
|
+
self.version = "1.0.0"
|
|
197
|
+
|
|
198
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
199
|
+
"""Get command schema."""
|
|
200
|
+
return {
|
|
201
|
+
"type": "object",
|
|
202
|
+
"properties": {
|
|
203
|
+
"job_id": {
|
|
204
|
+
"type": "string",
|
|
205
|
+
"description": "Job identifier to stop",
|
|
206
|
+
"minLength": 1
|
|
207
|
+
}
|
|
208
|
+
},
|
|
209
|
+
"required": ["job_id"]
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
213
|
+
"""Execute queue stop job command."""
|
|
214
|
+
try:
|
|
215
|
+
job_id = params.get("job_id")
|
|
216
|
+
|
|
217
|
+
if not job_id:
|
|
218
|
+
raise ValidationError("job_id is required")
|
|
219
|
+
|
|
220
|
+
# Get global queue manager
|
|
221
|
+
queue_manager = await get_global_queue_manager()
|
|
222
|
+
|
|
223
|
+
# Stop job
|
|
224
|
+
result = await queue_manager.stop_job(job_id)
|
|
225
|
+
|
|
226
|
+
return SuccessResult(
|
|
227
|
+
data={
|
|
228
|
+
"message": f"Job {job_id} stopped successfully",
|
|
229
|
+
"job_id": job_id,
|
|
230
|
+
"status": result.status,
|
|
231
|
+
"description": result.description
|
|
232
|
+
}
|
|
233
|
+
).to_dict()
|
|
234
|
+
|
|
235
|
+
except QueueJobError as e:
|
|
236
|
+
return ErrorResult(
|
|
237
|
+
error_code="QUEUE_JOB_ERROR",
|
|
238
|
+
message=f"Queue job error: {str(e)}",
|
|
239
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
240
|
+
).to_dict()
|
|
241
|
+
except Exception as e:
|
|
242
|
+
return ErrorResult(
|
|
243
|
+
error_code="INTERNAL_ERROR",
|
|
244
|
+
message=f"Failed to stop job: {str(e)}"
|
|
245
|
+
).to_dict()
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
class QueueDeleteJobCommand(Command):
|
|
249
|
+
"""Command to delete a job from the queue."""
|
|
250
|
+
|
|
251
|
+
def __init__(self):
|
|
252
|
+
super().__init__()
|
|
253
|
+
self.name = "queue_delete_job"
|
|
254
|
+
self.description = "Delete a job from the background queue"
|
|
255
|
+
self.version = "1.0.0"
|
|
256
|
+
|
|
257
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
258
|
+
"""Get command schema."""
|
|
259
|
+
return {
|
|
260
|
+
"type": "object",
|
|
261
|
+
"properties": {
|
|
262
|
+
"job_id": {
|
|
263
|
+
"type": "string",
|
|
264
|
+
"description": "Job identifier to delete",
|
|
265
|
+
"minLength": 1
|
|
266
|
+
}
|
|
267
|
+
},
|
|
268
|
+
"required": ["job_id"]
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
272
|
+
"""Execute queue delete job command."""
|
|
273
|
+
try:
|
|
274
|
+
job_id = params.get("job_id")
|
|
275
|
+
|
|
276
|
+
if not job_id:
|
|
277
|
+
raise ValidationError("job_id is required")
|
|
278
|
+
|
|
279
|
+
# Get global queue manager
|
|
280
|
+
queue_manager = await get_global_queue_manager()
|
|
281
|
+
|
|
282
|
+
# Delete job
|
|
283
|
+
result = await queue_manager.delete_job(job_id)
|
|
284
|
+
|
|
285
|
+
return SuccessResult(
|
|
286
|
+
data={
|
|
287
|
+
"message": f"Job {job_id} deleted successfully",
|
|
288
|
+
"job_id": job_id,
|
|
289
|
+
"status": result.status,
|
|
290
|
+
"description": result.description
|
|
291
|
+
}
|
|
292
|
+
).to_dict()
|
|
293
|
+
|
|
294
|
+
except QueueJobError as e:
|
|
295
|
+
return ErrorResult(
|
|
296
|
+
error_code="QUEUE_JOB_ERROR",
|
|
297
|
+
message=f"Queue job error: {str(e)}",
|
|
298
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
299
|
+
).to_dict()
|
|
300
|
+
except Exception as e:
|
|
301
|
+
return ErrorResult(
|
|
302
|
+
error_code="INTERNAL_ERROR",
|
|
303
|
+
message=f"Failed to delete job: {str(e)}"
|
|
304
|
+
).to_dict()
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
class QueueGetJobStatusCommand(Command):
|
|
308
|
+
"""Command to get the status of a job."""
|
|
309
|
+
|
|
310
|
+
def __init__(self):
|
|
311
|
+
super().__init__()
|
|
312
|
+
self.name = "queue_get_job_status"
|
|
313
|
+
self.description = "Get the status and details of a job"
|
|
314
|
+
self.version = "1.0.0"
|
|
315
|
+
|
|
316
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
317
|
+
"""Get command schema."""
|
|
318
|
+
return {
|
|
319
|
+
"type": "object",
|
|
320
|
+
"properties": {
|
|
321
|
+
"job_id": {
|
|
322
|
+
"type": "string",
|
|
323
|
+
"description": "Job identifier to get status for",
|
|
324
|
+
"minLength": 1
|
|
325
|
+
}
|
|
326
|
+
},
|
|
327
|
+
"required": ["job_id"]
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
331
|
+
"""Execute queue get job status command."""
|
|
332
|
+
try:
|
|
333
|
+
job_id = params.get("job_id")
|
|
334
|
+
|
|
335
|
+
if not job_id:
|
|
336
|
+
raise ValidationError("job_id is required")
|
|
337
|
+
|
|
338
|
+
# Get global queue manager
|
|
339
|
+
queue_manager = await get_global_queue_manager()
|
|
340
|
+
|
|
341
|
+
# Get job status
|
|
342
|
+
result = await queue_manager.get_job_status(job_id)
|
|
343
|
+
|
|
344
|
+
return SuccessResult(
|
|
345
|
+
data={
|
|
346
|
+
"job_id": result.job_id,
|
|
347
|
+
"status": result.status,
|
|
348
|
+
"progress": result.progress,
|
|
349
|
+
"description": result.description,
|
|
350
|
+
"result": result.result,
|
|
351
|
+
"error": result.error
|
|
352
|
+
}
|
|
353
|
+
).to_dict()
|
|
354
|
+
|
|
355
|
+
except QueueJobError as e:
|
|
356
|
+
return ErrorResult(
|
|
357
|
+
error_code="QUEUE_JOB_ERROR",
|
|
358
|
+
message=f"Queue job error: {str(e)}",
|
|
359
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
360
|
+
).to_dict()
|
|
361
|
+
except Exception as e:
|
|
362
|
+
return ErrorResult(
|
|
363
|
+
error_code="INTERNAL_ERROR",
|
|
364
|
+
message=f"Failed to get job status: {str(e)}"
|
|
365
|
+
).to_dict()
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
class QueueListJobsCommand(Command):
|
|
369
|
+
"""Command to list all jobs in the queue."""
|
|
370
|
+
|
|
371
|
+
def __init__(self):
|
|
372
|
+
super().__init__()
|
|
373
|
+
self.name = "queue_list_jobs"
|
|
374
|
+
self.description = "List all jobs in the background queue"
|
|
375
|
+
self.version = "1.0.0"
|
|
376
|
+
|
|
377
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
378
|
+
"""Get command schema."""
|
|
379
|
+
return {
|
|
380
|
+
"type": "object",
|
|
381
|
+
"properties": {
|
|
382
|
+
"status_filter": {
|
|
383
|
+
"type": "string",
|
|
384
|
+
"description": "Filter jobs by status",
|
|
385
|
+
"enum": ["pending", "running", "completed", "failed", "stopped", "deleted"]
|
|
386
|
+
},
|
|
387
|
+
"limit": {
|
|
388
|
+
"type": "integer",
|
|
389
|
+
"description": "Maximum number of jobs to return",
|
|
390
|
+
"minimum": 1,
|
|
391
|
+
"maximum": 1000
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
397
|
+
"""Execute queue list jobs command."""
|
|
398
|
+
try:
|
|
399
|
+
status_filter = params.get("status_filter")
|
|
400
|
+
limit = params.get("limit", 100)
|
|
401
|
+
|
|
402
|
+
# Get global queue manager
|
|
403
|
+
queue_manager = await get_global_queue_manager()
|
|
404
|
+
|
|
405
|
+
# List jobs
|
|
406
|
+
jobs = await queue_manager.list_jobs()
|
|
407
|
+
|
|
408
|
+
# Apply filters
|
|
409
|
+
if status_filter:
|
|
410
|
+
jobs = [job for job in jobs if job.status == status_filter]
|
|
411
|
+
|
|
412
|
+
# Apply limit
|
|
413
|
+
if limit and len(jobs) > limit:
|
|
414
|
+
jobs = jobs[:limit]
|
|
415
|
+
|
|
416
|
+
# Convert to dict format
|
|
417
|
+
jobs_data = []
|
|
418
|
+
for job in jobs:
|
|
419
|
+
jobs_data.append({
|
|
420
|
+
"job_id": job.job_id,
|
|
421
|
+
"status": job.status,
|
|
422
|
+
"progress": job.progress,
|
|
423
|
+
"description": job.description,
|
|
424
|
+
"has_result": bool(job.result),
|
|
425
|
+
"has_error": bool(job.error)
|
|
426
|
+
})
|
|
427
|
+
|
|
428
|
+
return SuccessResult(
|
|
429
|
+
data={
|
|
430
|
+
"jobs": jobs_data,
|
|
431
|
+
"total_count": len(jobs_data),
|
|
432
|
+
"status_filter": status_filter,
|
|
433
|
+
"limit": limit
|
|
434
|
+
}
|
|
435
|
+
).to_dict()
|
|
436
|
+
|
|
437
|
+
except QueueJobError as e:
|
|
438
|
+
return ErrorResult(
|
|
439
|
+
error_code="QUEUE_JOB_ERROR",
|
|
440
|
+
message=f"Queue job error: {str(e)}",
|
|
441
|
+
details={"job_id": getattr(e, 'job_id', 'unknown')}
|
|
442
|
+
).to_dict()
|
|
443
|
+
except Exception as e:
|
|
444
|
+
return ErrorResult(
|
|
445
|
+
error_code="INTERNAL_ERROR",
|
|
446
|
+
message=f"Failed to list jobs: {str(e)}"
|
|
447
|
+
).to_dict()
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
class QueueHealthCommand(Command):
|
|
451
|
+
"""Command to check queue system health."""
|
|
452
|
+
|
|
453
|
+
def __init__(self):
|
|
454
|
+
super().__init__()
|
|
455
|
+
self.name = "queue_health"
|
|
456
|
+
self.description = "Check the health status of the queue system"
|
|
457
|
+
self.version = "1.0.0"
|
|
458
|
+
|
|
459
|
+
def get_schema(self) -> Dict[str, Any]:
|
|
460
|
+
"""Get command schema."""
|
|
461
|
+
return {"type": "object", "properties": {}}
|
|
462
|
+
|
|
463
|
+
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
|
464
|
+
"""Execute queue health command."""
|
|
465
|
+
try:
|
|
466
|
+
# Get global queue manager
|
|
467
|
+
queue_manager = await get_global_queue_manager()
|
|
468
|
+
|
|
469
|
+
# Get health information
|
|
470
|
+
health = await queue_manager.get_queue_health()
|
|
471
|
+
|
|
472
|
+
return SuccessResult(data=health).to_dict()
|
|
473
|
+
|
|
474
|
+
except Exception as e:
|
|
475
|
+
return ErrorResult(
|
|
476
|
+
error_code="INTERNAL_ERROR",
|
|
477
|
+
message=f"Failed to check queue health: {str(e)}"
|
|
478
|
+
).to_dict()
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
# Example job classes for demonstration
|
|
482
|
+
class DataProcessingJob(QueueJobBase):
|
|
483
|
+
"""Example data processing job."""
|
|
484
|
+
|
|
485
|
+
def run(self) -> None:
|
|
486
|
+
"""Execute data processing job."""
|
|
487
|
+
import time
|
|
488
|
+
import json
|
|
489
|
+
|
|
490
|
+
self.logger.info(f"DataProcessingJob {self.job_id}: Starting data processing")
|
|
491
|
+
|
|
492
|
+
# Simulate processing
|
|
493
|
+
data = self.mcp_params.get("data", {})
|
|
494
|
+
operation = self.mcp_params.get("operation", "process")
|
|
495
|
+
|
|
496
|
+
time.sleep(2) # Simulate work
|
|
497
|
+
|
|
498
|
+
result = {
|
|
499
|
+
"job_id": self.job_id,
|
|
500
|
+
"operation": operation,
|
|
501
|
+
"processed_at": time.time(),
|
|
502
|
+
"data_size": len(json.dumps(data)),
|
|
503
|
+
"status": "completed"
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
self.set_mcp_result(result)
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
class FileOperationJob(QueueJobBase):
|
|
510
|
+
"""Example file operation job."""
|
|
511
|
+
|
|
512
|
+
def run(self) -> None:
|
|
513
|
+
"""Execute file operation job."""
|
|
514
|
+
import os
|
|
515
|
+
import time
|
|
516
|
+
|
|
517
|
+
self.logger.info(f"FileOperationJob {self.job_id}: Starting file operation")
|
|
518
|
+
|
|
519
|
+
file_path = self.mcp_params.get("file_path", "")
|
|
520
|
+
operation = self.mcp_params.get("operation", "read")
|
|
521
|
+
|
|
522
|
+
try:
|
|
523
|
+
if operation == "read" and os.path.exists(file_path):
|
|
524
|
+
with open(file_path, "r") as f:
|
|
525
|
+
content = f.read()
|
|
526
|
+
|
|
527
|
+
result = {
|
|
528
|
+
"job_id": self.job_id,
|
|
529
|
+
"operation": operation,
|
|
530
|
+
"file_path": file_path,
|
|
531
|
+
"file_size": len(content),
|
|
532
|
+
"status": "completed"
|
|
533
|
+
}
|
|
534
|
+
else:
|
|
535
|
+
result = {
|
|
536
|
+
"job_id": self.job_id,
|
|
537
|
+
"operation": operation,
|
|
538
|
+
"file_path": file_path,
|
|
539
|
+
"error": f"File not found or invalid operation: {operation}",
|
|
540
|
+
"status": "failed"
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
self.set_mcp_result(result, result["status"])
|
|
544
|
+
|
|
545
|
+
except Exception as e:
|
|
546
|
+
self.set_mcp_error(f"File operation failed: {str(e)}")
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
class ApiCallJob(QueueJobBase):
|
|
550
|
+
"""Example API call job."""
|
|
551
|
+
|
|
552
|
+
def run(self) -> None:
|
|
553
|
+
"""Execute API call job."""
|
|
554
|
+
import requests
|
|
555
|
+
import time
|
|
556
|
+
|
|
557
|
+
self.logger.info(f"ApiCallJob {self.job_id}: Starting API call")
|
|
558
|
+
|
|
559
|
+
url = self.mcp_params.get("url", "")
|
|
560
|
+
method = self.mcp_params.get("method", "GET")
|
|
561
|
+
headers = self.mcp_params.get("headers", {})
|
|
562
|
+
timeout = self.mcp_params.get("timeout", 30)
|
|
563
|
+
|
|
564
|
+
try:
|
|
565
|
+
response = requests.request(
|
|
566
|
+
method=method,
|
|
567
|
+
url=url,
|
|
568
|
+
headers=headers,
|
|
569
|
+
timeout=timeout
|
|
570
|
+
)
|
|
571
|
+
|
|
572
|
+
result = {
|
|
573
|
+
"job_id": self.job_id,
|
|
574
|
+
"url": url,
|
|
575
|
+
"method": method,
|
|
576
|
+
"status_code": response.status_code,
|
|
577
|
+
"response_size": len(response.content),
|
|
578
|
+
"status": "completed"
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
self.set_mcp_result(result)
|
|
582
|
+
|
|
583
|
+
except Exception as e:
|
|
584
|
+
self.set_mcp_error(f"API call failed: {str(e)}")
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
class CustomJob(QueueJobBase):
|
|
588
|
+
"""Example custom job."""
|
|
589
|
+
|
|
590
|
+
def run(self) -> None:
|
|
591
|
+
"""Execute custom job."""
|
|
592
|
+
import time
|
|
593
|
+
|
|
594
|
+
self.logger.info(f"CustomJob {self.job_id}: Starting custom job")
|
|
595
|
+
|
|
596
|
+
# Custom job logic here
|
|
597
|
+
time.sleep(1) # Simulate work
|
|
598
|
+
|
|
599
|
+
result = {
|
|
600
|
+
"job_id": self.job_id,
|
|
601
|
+
"custom_data": self.mcp_params.get("custom_data", {}),
|
|
602
|
+
"status": "completed"
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
self.set_mcp_result(result)
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
class LongRunningJob(QueueJobBase):
|
|
609
|
+
"""Example long-running job with progress updates."""
|
|
610
|
+
|
|
611
|
+
def run(self) -> None:
|
|
612
|
+
"""Execute long-running job with progress updates."""
|
|
613
|
+
import time
|
|
614
|
+
import random
|
|
615
|
+
|
|
616
|
+
self.logger.info(f"LongRunningJob {self.job_id}: Starting long-running job")
|
|
617
|
+
|
|
618
|
+
duration = self.mcp_params.get("duration", 10) # Default 10 seconds
|
|
619
|
+
task_type = self.mcp_params.get("task_type", "data_processing")
|
|
620
|
+
|
|
621
|
+
self.set_status("running")
|
|
622
|
+
self.set_description(f"Processing {task_type} task...")
|
|
623
|
+
|
|
624
|
+
# Simulate long-running work with progress updates
|
|
625
|
+
for i in range(duration):
|
|
626
|
+
# Update progress
|
|
627
|
+
progress = int((i + 1) / duration * 100)
|
|
628
|
+
self.set_progress(progress)
|
|
629
|
+
self.set_description(f"Processing {task_type} task... {progress}% complete")
|
|
630
|
+
|
|
631
|
+
# Simulate work
|
|
632
|
+
time.sleep(1)
|
|
633
|
+
|
|
634
|
+
# Simulate occasional errors (5% chance)
|
|
635
|
+
if random.random() < 0.05:
|
|
636
|
+
self.set_mcp_error(f"Simulated error at {progress}%", "failed")
|
|
637
|
+
return
|
|
638
|
+
|
|
639
|
+
# Complete successfully
|
|
640
|
+
result = {
|
|
641
|
+
"job_id": self.job_id,
|
|
642
|
+
"task_type": task_type,
|
|
643
|
+
"duration": duration,
|
|
644
|
+
"completed_at": time.time(),
|
|
645
|
+
"status": "completed"
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
self.set_mcp_result(result)
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
class BatchProcessingJob(QueueJobBase):
|
|
652
|
+
"""Example batch processing job."""
|
|
653
|
+
|
|
654
|
+
def run(self) -> None:
|
|
655
|
+
"""Execute batch processing job."""
|
|
656
|
+
import time
|
|
657
|
+
import random
|
|
658
|
+
|
|
659
|
+
self.logger.info(f"BatchProcessingJob {self.job_id}: Starting batch processing")
|
|
660
|
+
|
|
661
|
+
batch_size = self.mcp_params.get("batch_size", 100)
|
|
662
|
+
items = self.mcp_params.get("items", [])
|
|
663
|
+
|
|
664
|
+
self.set_status("running")
|
|
665
|
+
self.set_description(f"Processing batch of {len(items)} items...")
|
|
666
|
+
|
|
667
|
+
processed_items = []
|
|
668
|
+
|
|
669
|
+
for i, item in enumerate(items):
|
|
670
|
+
# Update progress
|
|
671
|
+
progress = int((i + 1) / len(items) * 100)
|
|
672
|
+
self.set_progress(progress)
|
|
673
|
+
self.set_description(f"Processing item {i+1}/{len(items)}... {progress}% complete")
|
|
674
|
+
|
|
675
|
+
# Simulate processing each item
|
|
676
|
+
time.sleep(0.1) # 100ms per item
|
|
677
|
+
|
|
678
|
+
# Simulate processing result
|
|
679
|
+
processed_item = {
|
|
680
|
+
"original": item,
|
|
681
|
+
"processed": f"processed_{item}",
|
|
682
|
+
"timestamp": time.time()
|
|
683
|
+
}
|
|
684
|
+
processed_items.append(processed_item)
|
|
685
|
+
|
|
686
|
+
# Simulate occasional processing errors (2% chance)
|
|
687
|
+
if random.random() < 0.02:
|
|
688
|
+
self.set_mcp_error(f"Processing failed at item {i+1}: {item}", "failed")
|
|
689
|
+
return
|
|
690
|
+
|
|
691
|
+
# Complete successfully
|
|
692
|
+
result = {
|
|
693
|
+
"job_id": self.job_id,
|
|
694
|
+
"batch_size": batch_size,
|
|
695
|
+
"processed_count": len(processed_items),
|
|
696
|
+
"processed_items": processed_items,
|
|
697
|
+
"completed_at": time.time(),
|
|
698
|
+
"status": "completed"
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
self.set_mcp_result(result)
|
|
702
|
+
|
|
703
|
+
|
|
704
|
+
class FileDownloadJob(QueueJobBase):
|
|
705
|
+
"""Example file download job with progress tracking."""
|
|
706
|
+
|
|
707
|
+
def run(self) -> None:
|
|
708
|
+
"""Execute file download job."""
|
|
709
|
+
import time
|
|
710
|
+
import random
|
|
711
|
+
|
|
712
|
+
self.logger.info(f"FileDownloadJob {self.job_id}: Starting file download")
|
|
713
|
+
|
|
714
|
+
url = self.mcp_params.get("url", "https://example.com/file.zip")
|
|
715
|
+
file_size = self.mcp_params.get("file_size", 1024 * 1024) # Default 1MB
|
|
716
|
+
|
|
717
|
+
self.set_status("running")
|
|
718
|
+
self.set_description(f"Downloading {url}...")
|
|
719
|
+
|
|
720
|
+
# Simulate download with progress updates
|
|
721
|
+
downloaded = 0
|
|
722
|
+
chunk_size = 64 * 1024 # 64KB chunks
|
|
723
|
+
|
|
724
|
+
while downloaded < file_size:
|
|
725
|
+
# Simulate download chunk
|
|
726
|
+
chunk = min(chunk_size, file_size - downloaded)
|
|
727
|
+
time.sleep(0.1) # Simulate network delay
|
|
728
|
+
|
|
729
|
+
downloaded += chunk
|
|
730
|
+
progress = int(downloaded / file_size * 100)
|
|
731
|
+
|
|
732
|
+
self.set_progress(progress)
|
|
733
|
+
self.set_description(f"Downloading {url}... {progress}% complete ({downloaded}/{file_size} bytes)")
|
|
734
|
+
|
|
735
|
+
# Simulate occasional network errors (3% chance)
|
|
736
|
+
if random.random() < 0.03:
|
|
737
|
+
self.set_mcp_error(f"Network error during download at {progress}%", "failed")
|
|
738
|
+
return
|
|
739
|
+
|
|
740
|
+
# Complete successfully
|
|
741
|
+
result = {
|
|
742
|
+
"job_id": self.job_id,
|
|
743
|
+
"url": url,
|
|
744
|
+
"file_size": file_size,
|
|
745
|
+
"downloaded_bytes": downloaded,
|
|
746
|
+
"completed_at": time.time(),
|
|
747
|
+
"status": "completed"
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
self.set_mcp_result(result)
|