isa-model 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. isa_model/__init__.py +30 -1
  2. isa_model/client.py +770 -0
  3. isa_model/core/config/__init__.py +16 -0
  4. isa_model/core/config/config_manager.py +514 -0
  5. isa_model/core/config.py +426 -0
  6. isa_model/core/models/model_billing_tracker.py +476 -0
  7. isa_model/core/models/model_manager.py +399 -0
  8. isa_model/core/{storage/supabase_storage.py → models/model_repo.py} +72 -73
  9. isa_model/core/pricing_manager.py +426 -0
  10. isa_model/core/services/__init__.py +19 -0
  11. isa_model/core/services/intelligent_model_selector.py +547 -0
  12. isa_model/core/types.py +291 -0
  13. isa_model/deployment/__init__.py +2 -0
  14. isa_model/deployment/cloud/modal/isa_vision_doc_service.py +157 -3
  15. isa_model/deployment/cloud/modal/isa_vision_table_service.py +532 -0
  16. isa_model/deployment/cloud/modal/isa_vision_ui_service.py +104 -3
  17. isa_model/deployment/cloud/modal/register_models.py +321 -0
  18. isa_model/deployment/runtime/deployed_service.py +338 -0
  19. isa_model/deployment/services/__init__.py +9 -0
  20. isa_model/deployment/services/auto_deploy_vision_service.py +537 -0
  21. isa_model/deployment/services/model_service.py +332 -0
  22. isa_model/deployment/services/service_monitor.py +356 -0
  23. isa_model/deployment/services/service_registry.py +527 -0
  24. isa_model/eval/__init__.py +80 -44
  25. isa_model/eval/config/__init__.py +10 -0
  26. isa_model/eval/config/evaluation_config.py +108 -0
  27. isa_model/eval/evaluators/__init__.py +18 -0
  28. isa_model/eval/evaluators/base_evaluator.py +503 -0
  29. isa_model/eval/evaluators/llm_evaluator.py +472 -0
  30. isa_model/eval/factory.py +417 -709
  31. isa_model/eval/infrastructure/__init__.py +24 -0
  32. isa_model/eval/infrastructure/experiment_tracker.py +466 -0
  33. isa_model/eval/metrics.py +191 -21
  34. isa_model/inference/ai_factory.py +181 -605
  35. isa_model/inference/services/audio/base_stt_service.py +65 -1
  36. isa_model/inference/services/audio/base_tts_service.py +75 -1
  37. isa_model/inference/services/audio/openai_stt_service.py +189 -151
  38. isa_model/inference/services/audio/openai_tts_service.py +12 -10
  39. isa_model/inference/services/audio/replicate_tts_service.py +61 -56
  40. isa_model/inference/services/base_service.py +55 -17
  41. isa_model/inference/services/embedding/base_embed_service.py +65 -1
  42. isa_model/inference/services/embedding/ollama_embed_service.py +103 -43
  43. isa_model/inference/services/embedding/openai_embed_service.py +8 -10
  44. isa_model/inference/services/helpers/stacked_config.py +148 -0
  45. isa_model/inference/services/img/__init__.py +18 -0
  46. isa_model/inference/services/{vision → img}/base_image_gen_service.py +80 -1
  47. isa_model/inference/services/{stacked → img}/flux_professional_service.py +25 -1
  48. isa_model/inference/services/{stacked → img/helpers}/base_stacked_service.py +40 -35
  49. isa_model/inference/services/{vision → img}/replicate_image_gen_service.py +44 -31
  50. isa_model/inference/services/llm/__init__.py +3 -3
  51. isa_model/inference/services/llm/base_llm_service.py +492 -40
  52. isa_model/inference/services/llm/helpers/llm_prompts.py +258 -0
  53. isa_model/inference/services/llm/helpers/llm_utils.py +280 -0
  54. isa_model/inference/services/llm/ollama_llm_service.py +51 -17
  55. isa_model/inference/services/llm/openai_llm_service.py +70 -19
  56. isa_model/inference/services/llm/yyds_llm_service.py +24 -23
  57. isa_model/inference/services/vision/__init__.py +38 -4
  58. isa_model/inference/services/vision/base_vision_service.py +218 -117
  59. isa_model/inference/services/vision/{isA_vision_service.py → disabled/isA_vision_service.py} +98 -0
  60. isa_model/inference/services/{stacked → vision}/doc_analysis_service.py +1 -1
  61. isa_model/inference/services/vision/helpers/base_stacked_service.py +274 -0
  62. isa_model/inference/services/vision/helpers/image_utils.py +272 -3
  63. isa_model/inference/services/vision/helpers/vision_prompts.py +297 -0
  64. isa_model/inference/services/vision/openai_vision_service.py +104 -307
  65. isa_model/inference/services/vision/replicate_vision_service.py +140 -325
  66. isa_model/inference/services/{stacked → vision}/ui_analysis_service.py +2 -498
  67. isa_model/scripts/register_models.py +370 -0
  68. isa_model/scripts/register_models_with_embeddings.py +510 -0
  69. isa_model/serving/api/fastapi_server.py +6 -1
  70. isa_model/serving/api/routes/unified.py +202 -0
  71. {isa_model-0.3.5.dist-info → isa_model-0.3.6.dist-info}/METADATA +4 -1
  72. {isa_model-0.3.5.dist-info → isa_model-0.3.6.dist-info}/RECORD +77 -53
  73. isa_model/config/__init__.py +0 -9
  74. isa_model/config/config_manager.py +0 -213
  75. isa_model/core/model_manager.py +0 -213
  76. isa_model/core/model_registry.py +0 -375
  77. isa_model/core/vision_models_init.py +0 -116
  78. isa_model/inference/billing_tracker.py +0 -406
  79. isa_model/inference/services/llm/triton_llm_service.py +0 -481
  80. isa_model/inference/services/stacked/__init__.py +0 -26
  81. isa_model/inference/services/stacked/config.py +0 -426
  82. isa_model/inference/services/vision/ollama_vision_service.py +0 -194
  83. /isa_model/core/{model_storage.py → models/model_storage.py} +0 -0
  84. /isa_model/inference/services/{vision → embedding}/helpers/text_splitter.py +0 -0
  85. /isa_model/inference/services/llm/{llm_adapter.py → helpers/llm_adapter.py} +0 -0
  86. {isa_model-0.3.5.dist-info → isa_model-0.3.6.dist-info}/WHEEL +0 -0
  87. {isa_model-0.3.5.dist-info → isa_model-0.3.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,537 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Auto-Deploy Vision Service Wrapper
4
+
5
+ Automatically deploys Modal services when needed and shuts them down after completion.
6
+ """
7
+
8
+ import logging
9
+ from typing import Dict, Any, Optional, Union, List, BinaryIO
10
+
11
+ from isa_model.inference.services.vision.base_vision_service import BaseVisionService
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class AutoDeployVisionService(BaseVisionService):
17
+ """
18
+ Vision service wrapper that handles automatic deployment and shutdown
19
+ of Modal services for ISA vision tasks.
20
+ """
21
+
22
+ def __init__(self, provider_name: str = "modal", model_name: str = "qwen_table", **kwargs):
23
+ # Use centralized architecture
24
+ super().__init__(provider_name, model_name, **kwargs)
25
+ self.model_name = model_name
26
+ self.underlying_service = None
27
+ self._factory = None
28
+
29
+ def _get_factory(self):
30
+ """Get AIFactory instance for service management"""
31
+ if not self._factory:
32
+ from isa_model.inference.ai_factory import AIFactory
33
+ self._factory = AIFactory()
34
+ return self._factory
35
+
36
+ def _ensure_service_deployed(self) -> bool:
37
+ """Ensure the Modal service is deployed before use"""
38
+ factory = self._get_factory()
39
+
40
+ # Check if service is available
41
+ app_name = factory._get_modal_app_name(self.model_name)
42
+ if not factory._check_modal_service_availability(app_name):
43
+ logger.info(f"Deploying {self.model_name} service...")
44
+ success = factory._auto_deploy_modal_service(self.model_name)
45
+ if not success:
46
+ logger.error(f"Failed to deploy {self.model_name}")
47
+ return False
48
+
49
+ # Wait for service to be ready
50
+ logger.info(f"Waiting for {self.model_name} service to be ready...")
51
+ self._wait_for_service_ready(app_name)
52
+
53
+ # Initialize underlying service using proper factory method
54
+ if not self.underlying_service:
55
+ # Use the factory's get_vision method with modal provider
56
+ self.underlying_service = factory.get_vision(
57
+ model_name=self.model_name,
58
+ provider_name="modal"
59
+ )
60
+
61
+ return True
62
+
63
+ def _wait_for_service_ready(self, app_name: str, max_wait_time: int = 300):
64
+ """Wait for Modal service to be ready by checking health endpoint"""
65
+ import time
66
+
67
+ logger.info(f"Waiting up to {max_wait_time} seconds for {app_name} to be ready...")
68
+ start_time = time.time()
69
+
70
+ while time.time() - start_time < max_wait_time:
71
+ try:
72
+ import modal
73
+ # Try to lookup the app and call health check
74
+ app = modal.App.lookup(app_name)
75
+
76
+ # Different health check methods for different services
77
+ if "table" in app_name:
78
+ service_cls = modal.Cls.from_name(app_name, "QwenTableExtractionService")
79
+ elif "ui" in app_name:
80
+ service_cls = modal.Cls.from_name(app_name, "UIDetectionService")
81
+ elif "doc" in app_name:
82
+ service_cls = modal.Cls.from_name(app_name, "DocumentAnalysisService")
83
+ else:
84
+ # Default wait time for unknown services
85
+ time.sleep(60)
86
+ return
87
+
88
+ # Try to call health check
89
+ health_result = service_cls().health_check.remote()
90
+ if health_result and health_result.get("status") == "healthy":
91
+ logger.info(f"Service {app_name} is ready!")
92
+ return
93
+
94
+ except Exception as e:
95
+ logger.debug(f"Service not ready yet: {e}")
96
+
97
+ # Wait 10 seconds before next check
98
+ time.sleep(10)
99
+ logger.info(f"Still waiting for {app_name}... ({int(time.time() - start_time)}s elapsed)")
100
+
101
+ logger.warning(f"Service {app_name} may not be fully ready after {max_wait_time}s")
102
+
103
+ def _shutdown_service_after_completion(self):
104
+ """Shutdown Modal service after task completion"""
105
+ try:
106
+ factory = self._get_factory()
107
+ factory._shutdown_modal_service(self.model_name)
108
+ except Exception as e:
109
+ logger.warning(f"Failed to shutdown service {self.model_name}: {e}")
110
+
111
+ async def extract_table_data(
112
+ self,
113
+ image: Union[str, BinaryIO],
114
+ extraction_format: str = "markdown",
115
+ custom_prompt: Optional[str] = None
116
+ ) -> Dict[str, Any]:
117
+ """Extract table data with auto-deploy and shutdown"""
118
+
119
+ # Ensure service is deployed
120
+ if not self._ensure_service_deployed():
121
+ return {
122
+ 'success': False,
123
+ 'error': f'Failed to deploy {self.model_name} service',
124
+ 'service': self.model_name
125
+ }
126
+
127
+ try:
128
+ # Call the underlying service
129
+ result = await self.underlying_service.extract_table_data(
130
+ image=image,
131
+ extraction_format=extraction_format,
132
+ custom_prompt=custom_prompt
133
+ )
134
+
135
+ # Shutdown service after completion
136
+ self._shutdown_service_after_completion()
137
+
138
+ return result
139
+
140
+ except Exception as e:
141
+ logger.error(f"Table extraction failed: {e}")
142
+ # Still try to shutdown even if request failed
143
+ self._shutdown_service_after_completion()
144
+
145
+ return {
146
+ 'success': False,
147
+ 'error': str(e),
148
+ 'service': self.model_name
149
+ }
150
+
151
+ async def batch_extract_tables(
152
+ self,
153
+ images: List[Union[str, BinaryIO]],
154
+ extraction_format: str = "markdown"
155
+ ) -> Dict[str, Any]:
156
+ """Batch extract tables with auto-deploy and shutdown"""
157
+
158
+ # Ensure service is deployed
159
+ if not self._ensure_service_deployed():
160
+ return {
161
+ 'success': False,
162
+ 'error': f'Failed to deploy {self.model_name} service',
163
+ 'service': self.model_name
164
+ }
165
+
166
+ try:
167
+ # Call the underlying service
168
+ result = await self.underlying_service.batch_extract_tables(
169
+ images=images,
170
+ extraction_format=extraction_format
171
+ )
172
+
173
+ # Shutdown service after completion
174
+ self._shutdown_service_after_completion()
175
+
176
+ return result
177
+
178
+ except Exception as e:
179
+ logger.error(f"Batch table extraction failed: {e}")
180
+ # Still try to shutdown even if request failed
181
+ self._shutdown_service_after_completion()
182
+
183
+ return {
184
+ 'success': False,
185
+ 'error': str(e),
186
+ 'service': self.model_name
187
+ }
188
+
189
+ async def detect_ui_elements(self, image: Union[str, BinaryIO]) -> Dict[str, Any]:
190
+ """Detect UI elements with auto-deploy and shutdown"""
191
+
192
+ # Ensure service is deployed
193
+ if not self._ensure_service_deployed():
194
+ return {
195
+ 'success': False,
196
+ 'error': f'Failed to deploy {self.model_name} service',
197
+ 'service': self.model_name
198
+ }
199
+
200
+ try:
201
+ # Call the underlying service
202
+ result = await self.underlying_service.detect_ui_elements(image=image)
203
+
204
+ # Shutdown service after completion
205
+ self._shutdown_service_after_completion()
206
+
207
+ return result
208
+
209
+ except Exception as e:
210
+ logger.error(f"UI detection failed: {e}")
211
+ # Still try to shutdown even if request failed
212
+ self._shutdown_service_after_completion()
213
+
214
+ return {
215
+ 'success': False,
216
+ 'error': str(e),
217
+ 'service': self.model_name
218
+ }
219
+
220
+ async def analyze_document(self, image: Union[str, BinaryIO]) -> Dict[str, Any]:
221
+ """Analyze document with auto-deploy and shutdown"""
222
+
223
+ # Ensure service is deployed
224
+ if not self._ensure_service_deployed():
225
+ return {
226
+ 'success': False,
227
+ 'error': f'Failed to deploy {self.model_name} service',
228
+ 'service': self.model_name
229
+ }
230
+
231
+ try:
232
+ # Call the underlying service
233
+ result = await self.underlying_service.analyze_document(image=image)
234
+
235
+ # Shutdown service after completion
236
+ self._shutdown_service_after_completion()
237
+
238
+ return result
239
+
240
+ except Exception as e:
241
+ logger.error(f"Document analysis failed: {e}")
242
+ # Still try to shutdown even if request failed
243
+ self._shutdown_service_after_completion()
244
+
245
+ return {
246
+ 'success': False,
247
+ 'error': str(e),
248
+ 'service': self.model_name
249
+ }
250
+
251
+ # Implement all required abstract methods from BaseVisionService
252
+
253
+ async def invoke(
254
+ self,
255
+ image: Union[str, BinaryIO],
256
+ prompt: Optional[str] = None,
257
+ task: Optional[str] = None,
258
+ **kwargs
259
+ ) -> Dict[str, Any]:
260
+ """Unified invoke method for all vision operations"""
261
+ if not self._ensure_service_deployed():
262
+ return {
263
+ 'success': False,
264
+ 'error': f'Failed to deploy {self.model_name} service',
265
+ 'service': self.model_name
266
+ }
267
+
268
+ try:
269
+ result = await self.underlying_service.invoke(image=image, prompt=prompt, task=task, **kwargs)
270
+ self._shutdown_service_after_completion()
271
+ return result
272
+ except Exception as e:
273
+ logger.error(f"Vision invoke failed: {e}")
274
+ self._shutdown_service_after_completion()
275
+ return {
276
+ 'success': False,
277
+ 'error': str(e),
278
+ 'service': self.model_name
279
+ }
280
+
281
+ async def analyze_image(
282
+ self,
283
+ image: Union[str, BinaryIO],
284
+ prompt: Optional[str] = None,
285
+ max_tokens: int = 1000
286
+ ) -> Dict[str, Any]:
287
+ """Analyze image with auto-deploy and shutdown"""
288
+ if not self._ensure_service_deployed():
289
+ return {
290
+ 'success': False,
291
+ 'error': f'Failed to deploy {self.model_name} service',
292
+ 'service': self.model_name
293
+ }
294
+
295
+ try:
296
+ result = await self.underlying_service.analyze_image(
297
+ image=image, prompt=prompt, max_tokens=max_tokens
298
+ )
299
+ self._shutdown_service_after_completion()
300
+ return result
301
+ except Exception as e:
302
+ logger.error(f"Image analysis failed: {e}")
303
+ self._shutdown_service_after_completion()
304
+ return {
305
+ 'success': False,
306
+ 'error': str(e),
307
+ 'service': self.model_name
308
+ }
309
+
310
+ async def analyze_images(
311
+ self,
312
+ images: List[Union[str, BinaryIO]],
313
+ prompt: Optional[str] = None,
314
+ max_tokens: int = 1000
315
+ ) -> List[Dict[str, Any]]:
316
+ """Analyze multiple images with auto-deploy and shutdown"""
317
+ if not self._ensure_service_deployed():
318
+ return [{
319
+ 'success': False,
320
+ 'error': f'Failed to deploy {self.model_name} service',
321
+ 'service': self.model_name
322
+ }]
323
+
324
+ try:
325
+ result = await self.underlying_service.analyze_images(
326
+ images=images, prompt=prompt, max_tokens=max_tokens
327
+ )
328
+ self._shutdown_service_after_completion()
329
+ return result
330
+ except Exception as e:
331
+ logger.error(f"Multiple image analysis failed: {e}")
332
+ self._shutdown_service_after_completion()
333
+ return [{
334
+ 'success': False,
335
+ 'error': str(e),
336
+ 'service': self.model_name
337
+ }]
338
+
339
+ async def describe_image(
340
+ self,
341
+ image: Union[str, BinaryIO],
342
+ detail_level: str = "medium"
343
+ ) -> Dict[str, Any]:
344
+ """Generate detailed description of image"""
345
+ if not self._ensure_service_deployed():
346
+ return {
347
+ 'success': False,
348
+ 'error': f'Failed to deploy {self.model_name} service',
349
+ 'service': self.model_name
350
+ }
351
+
352
+ try:
353
+ result = await self.underlying_service.describe_image(
354
+ image=image, detail_level=detail_level
355
+ )
356
+ self._shutdown_service_after_completion()
357
+ return result
358
+ except Exception as e:
359
+ logger.error(f"Image description failed: {e}")
360
+ self._shutdown_service_after_completion()
361
+ return {
362
+ 'success': False,
363
+ 'error': str(e),
364
+ 'service': self.model_name
365
+ }
366
+
367
+ async def extract_text(self, image: Union[str, BinaryIO]) -> Dict[str, Any]:
368
+ """Extract text from image (OCR)"""
369
+ if not self._ensure_service_deployed():
370
+ return {
371
+ 'success': False,
372
+ 'error': f'Failed to deploy {self.model_name} service',
373
+ 'service': self.model_name
374
+ }
375
+
376
+ try:
377
+ result = await self.underlying_service.extract_text(image=image)
378
+ self._shutdown_service_after_completion()
379
+ return result
380
+ except Exception as e:
381
+ logger.error(f"Text extraction failed: {e}")
382
+ self._shutdown_service_after_completion()
383
+ return {
384
+ 'success': False,
385
+ 'error': str(e),
386
+ 'service': self.model_name
387
+ }
388
+
389
+ async def detect_objects(
390
+ self,
391
+ image: Union[str, BinaryIO],
392
+ confidence_threshold: float = 0.5
393
+ ) -> Dict[str, Any]:
394
+ """Detect objects in image"""
395
+ if not self._ensure_service_deployed():
396
+ return {
397
+ 'success': False,
398
+ 'error': f'Failed to deploy {self.model_name} service',
399
+ 'service': self.model_name
400
+ }
401
+
402
+ try:
403
+ result = await self.underlying_service.detect_objects(
404
+ image=image, confidence_threshold=confidence_threshold
405
+ )
406
+ self._shutdown_service_after_completion()
407
+ return result
408
+ except Exception as e:
409
+ logger.error(f"Object detection failed: {e}")
410
+ self._shutdown_service_after_completion()
411
+ return {
412
+ 'success': False,
413
+ 'error': str(e),
414
+ 'service': self.model_name
415
+ }
416
+
417
+ async def get_object_coordinates(
418
+ self,
419
+ image: Union[str, BinaryIO],
420
+ object_name: str
421
+ ) -> Dict[str, Any]:
422
+ """Get coordinates of a specific object in the image"""
423
+ if not self._ensure_service_deployed():
424
+ return {
425
+ 'success': False,
426
+ 'error': f'Failed to deploy {self.model_name} service',
427
+ 'service': self.model_name
428
+ }
429
+
430
+ try:
431
+ result = await self.underlying_service.get_object_coordinates(
432
+ image=image, object_name=object_name
433
+ )
434
+ self._shutdown_service_after_completion()
435
+ return result
436
+ except Exception as e:
437
+ logger.error(f"Object coordinate detection failed: {e}")
438
+ self._shutdown_service_after_completion()
439
+ return {
440
+ 'success': False,
441
+ 'error': str(e),
442
+ 'service': self.model_name
443
+ }
444
+
445
+ async def classify_image(
446
+ self,
447
+ image: Union[str, BinaryIO],
448
+ categories: Optional[List[str]] = None
449
+ ) -> Dict[str, Any]:
450
+ """Classify image into categories"""
451
+ if not self._ensure_service_deployed():
452
+ return {
453
+ 'success': False,
454
+ 'error': f'Failed to deploy {self.model_name} service',
455
+ 'service': self.model_name
456
+ }
457
+
458
+ try:
459
+ result = await self.underlying_service.classify_image(
460
+ image=image, categories=categories
461
+ )
462
+ self._shutdown_service_after_completion()
463
+ return result
464
+ except Exception as e:
465
+ logger.error(f"Image classification failed: {e}")
466
+ self._shutdown_service_after_completion()
467
+ return {
468
+ 'success': False,
469
+ 'error': str(e),
470
+ 'service': self.model_name
471
+ }
472
+
473
+ async def compare_images(
474
+ self,
475
+ image1: Union[str, BinaryIO],
476
+ image2: Union[str, BinaryIO]
477
+ ) -> Dict[str, Any]:
478
+ """Compare two images for similarity"""
479
+ if not self._ensure_service_deployed():
480
+ return {
481
+ 'success': False,
482
+ 'error': f'Failed to deploy {self.model_name} service',
483
+ 'service': self.model_name
484
+ }
485
+
486
+ try:
487
+ result = await self.underlying_service.compare_images(
488
+ image1=image1, image2=image2
489
+ )
490
+ self._shutdown_service_after_completion()
491
+ return result
492
+ except Exception as e:
493
+ logger.error(f"Image comparison failed: {e}")
494
+ self._shutdown_service_after_completion()
495
+ return {
496
+ 'success': False,
497
+ 'error': str(e),
498
+ 'service': self.model_name
499
+ }
500
+
501
+ def get_supported_formats(self) -> List[str]:
502
+ """Get list of supported image formats"""
503
+ # Initialize underlying service if needed (non-async)
504
+ if not self.underlying_service:
505
+ factory = self._get_factory()
506
+ self.underlying_service = factory.get_vision(
507
+ model_name=self.model_name,
508
+ provider_name="modal"
509
+ )
510
+ return self.underlying_service.get_supported_formats()
511
+
512
+ def get_max_image_size(self) -> Dict[str, int]:
513
+ """Get maximum supported image dimensions"""
514
+ # Initialize underlying service if needed (non-async)
515
+ if not self.underlying_service:
516
+ factory = self._get_factory()
517
+ self.underlying_service = factory.get_vision(
518
+ model_name=self.model_name,
519
+ provider_name="modal"
520
+ )
521
+ return self.underlying_service.get_max_image_size()
522
+
523
+ async def close(self):
524
+ """Cleanup resources"""
525
+ if self.underlying_service:
526
+ await self.underlying_service.close()
527
+ # Ensure service is shut down
528
+ self._shutdown_service_after_completion()
529
+
530
+ # Pass through other methods to underlying service
531
+ async def generate_image(self, prompt: str, **kwargs) -> Dict[str, Any]:
532
+ """Generate image (not applicable for ISA vision services)"""
533
+ return {
534
+ 'success': False,
535
+ 'error': 'Image generation not supported by ISA vision services',
536
+ 'service': self.model_name
537
+ }