kailash 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -48,7 +48,7 @@ Template System:
48
48
 
49
49
  Example Usage:
50
50
  Basic configuration:
51
-
51
+
52
52
  >>> from kailash.workflow.cycle_config import CycleConfig
53
53
  >>> config = CycleConfig(
54
54
  ... max_iterations=100,
@@ -61,9 +61,9 @@ Example Usage:
61
61
  ... "processor", "evaluator",
62
62
  ... cycle=True, cycle_config=config
63
63
  ... )
64
-
64
+
65
65
  Template usage:
66
-
66
+
67
67
  >>> from kailash.workflow.cycle_config import CycleTemplates
68
68
  >>> # Pre-optimized configuration
69
69
  >>> config = CycleTemplates.optimization_loop(
@@ -75,9 +75,9 @@ Example Usage:
75
75
  ... timeout=600.0,
76
76
  ... memory_limit=2048
77
77
  ... ))
78
-
78
+
79
79
  Configuration management:
80
-
80
+
81
81
  >>> # Export for reuse
82
82
  >>> template_data = config.create_template("ml_training")
83
83
  >>> # Import and modify
@@ -136,7 +136,7 @@ class CycleConfig:
136
136
  >>> # Basic configuration
137
137
  >>> config = CycleConfig(max_iterations=100, convergence_check="error < 0.01")
138
138
  >>> workflow.connect("a", "b", cycle_config=config)
139
-
139
+
140
140
  >>> # Advanced configuration with all features
141
141
  >>> config = CycleConfig(
142
142
  ... max_iterations=50,
@@ -166,7 +166,7 @@ class CycleConfig:
166
166
  # Execution control and conditions
167
167
  condition: Optional[str] = None # When to execute the cycle
168
168
  priority: int = 0 # Execution priority for multiple cycles
169
-
169
+
170
170
  # Advanced configuration
171
171
  retry_policy: Dict[str, Any] = field(default_factory=dict)
172
172
  metadata: Dict[str, Any] = field(default_factory=dict)
@@ -214,9 +214,9 @@ class CycleConfig:
214
214
  termination_conditions = [
215
215
  self.max_iterations is not None,
216
216
  self.convergence_check is not None,
217
- self.timeout is not None
217
+ self.timeout is not None,
218
218
  ]
219
-
219
+
220
220
  if not any(termination_conditions):
221
221
  errors.append(
222
222
  "At least one termination condition is required: "
@@ -233,8 +233,8 @@ class CycleConfig:
233
233
  invalid_params={"max_iterations": self.max_iterations},
234
234
  suggestions=[
235
235
  "Use integer values for max_iterations",
236
- "Convert float values to int if needed"
237
- ]
236
+ "Convert float values to int if needed",
237
+ ],
238
238
  )
239
239
  elif self.max_iterations <= 0:
240
240
  raise CycleConfigurationError(
@@ -244,8 +244,8 @@ class CycleConfig:
244
244
  suggestions=[
245
245
  "Use 10-100 iterations for quick convergence",
246
246
  "Use 100-1000 iterations for complex optimization",
247
- "Consider adding convergence_check for early termination"
248
- ]
247
+ "Consider adding convergence_check for early termination",
248
+ ],
249
249
  )
250
250
  elif self.max_iterations > 10000:
251
251
  warnings.append(
@@ -263,7 +263,14 @@ class CycleConfig:
263
263
  )
264
264
  else:
265
265
  # Validate expression safety
266
- unsafe_patterns = ['import ', 'exec(', 'eval(', '__', 'open(', 'file(']
266
+ unsafe_patterns = [
267
+ "import ",
268
+ "exec(",
269
+ "eval(",
270
+ "__",
271
+ "open(",
272
+ "file(",
273
+ ]
267
274
  for pattern in unsafe_patterns:
268
275
  if pattern in self.convergence_check:
269
276
  errors.append(
@@ -293,7 +300,9 @@ class CycleConfig:
293
300
  # Validate memory_limit
294
301
  if self.memory_limit is not None:
295
302
  if not isinstance(self.memory_limit, int):
296
- errors.append(f"memory_limit must be an integer, got {type(self.memory_limit)}")
303
+ errors.append(
304
+ f"memory_limit must be an integer, got {type(self.memory_limit)}"
305
+ )
297
306
  elif self.memory_limit <= 0:
298
307
  errors.append(
299
308
  f"memory_limit must be positive, got {self.memory_limit}. "
@@ -307,7 +316,9 @@ class CycleConfig:
307
316
 
308
317
  # Validate iteration_safety_factor
309
318
  if not isinstance(self.iteration_safety_factor, (int, float)):
310
- errors.append(f"iteration_safety_factor must be numeric, got {type(self.iteration_safety_factor)}")
319
+ errors.append(
320
+ f"iteration_safety_factor must be numeric, got {type(self.iteration_safety_factor)}"
321
+ )
311
322
  elif self.iteration_safety_factor < 1.0:
312
323
  errors.append(
313
324
  f"iteration_safety_factor must be >= 1.0, got {self.iteration_safety_factor}. "
@@ -353,7 +364,9 @@ class CycleConfig:
353
364
 
354
365
  # Raise errors if any found
355
366
  if errors:
356
- error_message = "CycleConfig validation failed:\n" + "\n".join(f"• {error}" for error in errors)
367
+ error_message = "CycleConfig validation failed:\n" + "\n".join(
368
+ f"• {error}" for error in errors
369
+ )
357
370
  raise CycleConfigurationError(
358
371
  error_message,
359
372
  error_code="CYCLE_CONFIG_001",
@@ -361,8 +374,8 @@ class CycleConfig:
361
374
  "Ensure at least one termination condition (max_iterations, convergence_check, or timeout)",
362
375
  "Use positive values for numeric parameters",
363
376
  "Avoid unsafe operations in convergence expressions",
364
- "Check the CycleConfig documentation for valid parameter ranges"
365
- ]
377
+ "Check the CycleConfig documentation for valid parameter ranges",
378
+ ],
366
379
  )
367
380
 
368
381
  def get_effective_max_iterations(self) -> Optional[int]:
@@ -408,15 +421,15 @@ class CycleConfig:
408
421
  {'max_iterations': 100, 'iteration_safety_factor': 1.5, ...}
409
422
  """
410
423
  result = {}
411
-
424
+
412
425
  for key, value in self.__dict__.items():
413
426
  if value is not None:
414
427
  # Skip callable convergence_check for serialization
415
- if key == 'convergence_check' and callable(value):
416
- result[key] = '<callable>'
428
+ if key == "convergence_check" and callable(value):
429
+ result[key] = "<callable>"
417
430
  else:
418
431
  result[key] = value
419
-
432
+
420
433
  return result
421
434
 
422
435
  @classmethod
@@ -447,11 +460,13 @@ class CycleConfig:
447
460
  # Filter out unknown fields
448
461
  known_fields = {f.name for f in cls.__dataclass_fields__.values()}
449
462
  filtered_data = {k: v for k, v in data.items() if k in known_fields}
450
-
463
+
451
464
  try:
452
465
  return cls(**filtered_data)
453
466
  except Exception as e:
454
- raise CycleConfigurationError(f"Failed to create CycleConfig from data: {e}") from e
467
+ raise CycleConfigurationError(
468
+ f"Failed to create CycleConfig from data: {e}"
469
+ ) from e
455
470
 
456
471
  def merge(self, other: "CycleConfig") -> "CycleConfig":
457
472
  """
@@ -480,17 +495,17 @@ class CycleConfig:
480
495
  >>> # Result has max_iterations=100, timeout=60.0, cycle_id="custom"
481
496
  """
482
497
  merged_data = {}
483
-
498
+
484
499
  # Start with this configuration
485
500
  for key, value in self.__dict__.items():
486
501
  if value is not None:
487
502
  merged_data[key] = value
488
-
503
+
489
504
  # Override with other configuration
490
505
  for key, value in other.__dict__.items():
491
506
  if value is not None:
492
507
  merged_data[key] = value
493
-
508
+
494
509
  return CycleConfig(**merged_data)
495
510
 
496
511
  def create_template(self, template_name: str) -> Dict[str, Any]:
@@ -522,10 +537,10 @@ class CycleConfig:
522
537
  "usage_notes": {
523
538
  "max_iterations": "Adjust based on expected convergence time",
524
539
  "convergence_check": "Modify condition for your specific metrics",
525
- "timeout": "Set based on acceptable execution time"
526
- }
540
+ "timeout": "Set based on acceptable execution time",
541
+ },
527
542
  }
528
-
543
+
529
544
  return template_data
530
545
 
531
546
  def __repr__(self) -> str:
@@ -541,17 +556,21 @@ class CycleConfig:
541
556
  'CycleConfig(max_iterations=100, timeout=60.0, cycle_id=None)'
542
557
  """
543
558
  key_params = []
544
-
559
+
545
560
  if self.max_iterations is not None:
546
561
  key_params.append(f"max_iterations={self.max_iterations}")
547
562
  if self.convergence_check is not None:
548
- conv_str = self.convergence_check if isinstance(self.convergence_check, str) else "<callable>"
563
+ conv_str = (
564
+ self.convergence_check
565
+ if isinstance(self.convergence_check, str)
566
+ else "<callable>"
567
+ )
549
568
  key_params.append(f"convergence_check='{conv_str}'")
550
569
  if self.timeout is not None:
551
570
  key_params.append(f"timeout={self.timeout}")
552
571
  if self.cycle_id is not None:
553
572
  key_params.append(f"cycle_id='{self.cycle_id}'")
554
-
573
+
555
574
  return f"CycleConfig({', '.join(key_params)})"
556
575
 
557
576
 
@@ -572,7 +591,7 @@ class CycleTemplates:
572
591
  Example:
573
592
  >>> # Quick optimization cycle
574
593
  >>> config = CycleTemplates.optimization_loop(max_iterations=50)
575
-
594
+
576
595
  >>> # Retry logic with exponential backoff
577
596
  >>> config = CycleTemplates.retry_cycle(max_retries=3)
578
597
  """
@@ -581,7 +600,7 @@ class CycleTemplates:
581
600
  def optimization_loop(
582
601
  max_iterations: int = 100,
583
602
  convergence_threshold: float = 0.01,
584
- timeout: Optional[float] = None
603
+ timeout: Optional[float] = None,
585
604
  ) -> CycleConfig:
586
605
  """
587
606
  Create configuration for optimization cycles.
@@ -608,13 +627,12 @@ class CycleTemplates:
608
627
  timeout=timeout,
609
628
  cycle_id="optimization_loop",
610
629
  description="Iterative optimization cycle with convergence detection",
611
- iteration_safety_factor=2.0 # Higher safety for optimization
630
+ iteration_safety_factor=2.0, # Higher safety for optimization
612
631
  )
613
632
 
614
633
  @staticmethod
615
634
  def retry_cycle(
616
- max_retries: int = 3,
617
- timeout_per_retry: float = 30.0
635
+ max_retries: int = 3, timeout_per_retry: float = 30.0
618
636
  ) -> CycleConfig:
619
637
  """
620
638
  Create configuration for retry logic patterns.
@@ -640,13 +658,12 @@ class CycleTemplates:
640
658
  cycle_id="retry_cycle",
641
659
  description="Retry cycle with exponential backoff support",
642
660
  condition="error_occurred == True",
643
- iteration_safety_factor=1.2 # Conservative safety for retries
661
+ iteration_safety_factor=1.2, # Conservative safety for retries
644
662
  )
645
663
 
646
664
  @staticmethod
647
665
  def data_quality_cycle(
648
- quality_threshold: float = 0.95,
649
- max_iterations: int = 10
666
+ quality_threshold: float = 0.95, max_iterations: int = 10
650
667
  ) -> CycleConfig:
651
668
  """
652
669
  Create configuration for data quality improvement cycles.
@@ -677,8 +694,7 @@ class CycleTemplates:
677
694
 
678
695
  @staticmethod
679
696
  def training_loop(
680
- max_epochs: int = 100,
681
- early_stopping_patience: int = 10
697
+ max_epochs: int = 100, early_stopping_patience: int = 10
682
698
  ) -> CycleConfig:
683
699
  """
684
700
  Create configuration for machine learning training cycles.
@@ -705,5 +721,5 @@ class CycleTemplates:
705
721
  cycle_id="training_loop",
706
722
  description="ML training cycle with early stopping",
707
723
  memory_limit=8192, # 8GB for ML training
708
- iteration_safety_factor=1.1 # Conservative for long training
709
- )
724
+ iteration_safety_factor=1.1, # Conservative for long training
725
+ )