flock-core 0.3.41__py3-none-any.whl → 0.4.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/__init__.py +31 -0
- flock/cli/create_flock.py +58 -3
- flock/cli/load_flock.py +135 -1
- flock/cli/registry_management.py +367 -96
- flock/cli/yaml_editor.py +119 -6
- flock/core/__init__.py +13 -1
- flock/core/flock.py +918 -49
- flock/core/flock_agent.py +114 -22
- flock/core/flock_registry.py +37 -5
- flock/core/serialization/serializable.py +35 -8
- flock/core/serialization/serialization_utils.py +96 -1
- flock/core/util/cli_helper.py +2 -2
- flock/core/util/file_path_utils.py +223 -0
- {flock_core-0.3.41.dist-info → flock_core-0.4.0b2.dist-info}/METADATA +1 -1
- {flock_core-0.3.41.dist-info → flock_core-0.4.0b2.dist-info}/RECORD +18 -17
- {flock_core-0.3.41.dist-info → flock_core-0.4.0b2.dist-info}/WHEEL +0 -0
- {flock_core-0.3.41.dist-info → flock_core-0.4.0b2.dist-info}/entry_points.txt +0 -0
- {flock_core-0.3.41.dist-info → flock_core-0.4.0b2.dist-info}/licenses/LICENSE +0 -0
flock/core/flock.py
CHANGED
|
@@ -7,7 +7,7 @@ import asyncio
|
|
|
7
7
|
import os
|
|
8
8
|
import uuid
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from typing import TYPE_CHECKING, Any, TypeVar
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Literal, TypeVar
|
|
11
11
|
|
|
12
12
|
from box import Box
|
|
13
13
|
from opentelemetry import trace
|
|
@@ -23,6 +23,10 @@ from flock.core.context.context_manager import initialize_context
|
|
|
23
23
|
from flock.core.execution.local_executor import run_local_workflow
|
|
24
24
|
from flock.core.execution.temporal_executor import run_temporal_workflow
|
|
25
25
|
from flock.core.logging.logging import LOGGERS, get_logger, get_module_loggers
|
|
26
|
+
from flock.core.serialization.serialization_utils import (
|
|
27
|
+
extract_pydantic_models_from_type_string,
|
|
28
|
+
)
|
|
29
|
+
from flock.core.util.input_resolver import split_top_level
|
|
26
30
|
|
|
27
31
|
# Import FlockAgent using TYPE_CHECKING to avoid circular import at runtime
|
|
28
32
|
if TYPE_CHECKING:
|
|
@@ -90,6 +94,14 @@ class Flock(BaseModel, Serializable):
|
|
|
90
94
|
default=False,
|
|
91
95
|
description="If True, execute workflows via Temporal; otherwise, run locally.",
|
|
92
96
|
)
|
|
97
|
+
enable_logging: bool = Field(
|
|
98
|
+
default=False,
|
|
99
|
+
description="If True, enable logging for the Flock instance.",
|
|
100
|
+
)
|
|
101
|
+
show_flock_banner: bool = Field(
|
|
102
|
+
default=True,
|
|
103
|
+
description="If True, show the Flock banner.",
|
|
104
|
+
)
|
|
93
105
|
# --- Runtime Attributes (Excluded from Serialization) ---
|
|
94
106
|
# Store agents internally but don't make it part of the Pydantic model definition
|
|
95
107
|
# Use a regular attribute, initialized in __init__
|
|
@@ -126,6 +138,8 @@ class Flock(BaseModel, Serializable):
|
|
|
126
138
|
model=model,
|
|
127
139
|
description=description,
|
|
128
140
|
enable_temporal=enable_temporal,
|
|
141
|
+
enable_logging=enable_logging,
|
|
142
|
+
show_flock_banner=show_flock_banner,
|
|
129
143
|
**kwargs, # Pass extra kwargs to Pydantic BaseModel
|
|
130
144
|
)
|
|
131
145
|
|
|
@@ -275,30 +289,21 @@ class Flock(BaseModel, Serializable):
|
|
|
275
289
|
# Check if an event loop is already running
|
|
276
290
|
try:
|
|
277
291
|
loop = asyncio.get_running_loop()
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
# No running event loop, create a new one with asyncio.run
|
|
292
|
-
return asyncio.run(
|
|
293
|
-
self.run_async(
|
|
294
|
-
start_agent=start_agent,
|
|
295
|
-
input=input,
|
|
296
|
-
context=context,
|
|
297
|
-
run_id=run_id,
|
|
298
|
-
box_result=box_result,
|
|
299
|
-
agents=agents,
|
|
300
|
-
)
|
|
292
|
+
except (
|
|
293
|
+
RuntimeError
|
|
294
|
+
): # 'RuntimeError: There is no current event loop...'
|
|
295
|
+
loop = asyncio.new_event_loop()
|
|
296
|
+
asyncio.set_event_loop(loop)
|
|
297
|
+
return loop.run_until_complete(
|
|
298
|
+
self.run_async(
|
|
299
|
+
start_agent=start_agent,
|
|
300
|
+
input=input,
|
|
301
|
+
context=context,
|
|
302
|
+
run_id=run_id,
|
|
303
|
+
box_result=box_result,
|
|
304
|
+
agents=agents,
|
|
301
305
|
)
|
|
306
|
+
)
|
|
302
307
|
|
|
303
308
|
async def run_async(
|
|
304
309
|
self,
|
|
@@ -446,33 +451,446 @@ class Flock(BaseModel, Serializable):
|
|
|
446
451
|
|
|
447
452
|
# --- ADDED Serialization Methods ---
|
|
448
453
|
|
|
449
|
-
def to_dict(
|
|
450
|
-
"""
|
|
454
|
+
def to_dict(
|
|
455
|
+
self, path_type: Literal["absolute", "relative"] = "absolute"
|
|
456
|
+
) -> dict[str, Any]:
|
|
457
|
+
"""Convert Flock instance to dictionary representation.
|
|
458
|
+
|
|
459
|
+
Args:
|
|
460
|
+
path_type: How file paths should be formatted ('absolute' or 'relative')
|
|
461
|
+
"""
|
|
451
462
|
logger.debug("Serializing Flock instance to dict.")
|
|
452
463
|
# Use Pydantic's dump for base fields
|
|
453
464
|
data = self.model_dump(mode="json", exclude_none=True)
|
|
465
|
+
logger.info(
|
|
466
|
+
f"Serializing Flock '{self.name}' with {len(self._agents)} agents"
|
|
467
|
+
)
|
|
454
468
|
|
|
455
469
|
# Manually add serialized agents
|
|
456
470
|
data["agents"] = {}
|
|
471
|
+
|
|
472
|
+
# Track custom types used across all agents
|
|
473
|
+
custom_types = {}
|
|
474
|
+
# Track components used across all agents
|
|
475
|
+
components = {}
|
|
476
|
+
|
|
457
477
|
for name, agent_instance in self._agents.items():
|
|
458
478
|
try:
|
|
479
|
+
logger.debug(f"Serializing agent '{name}'")
|
|
459
480
|
# Agents handle their own serialization via their to_dict
|
|
460
|
-
|
|
481
|
+
agent_data = agent_instance.to_dict()
|
|
482
|
+
data["agents"][name] = agent_data
|
|
483
|
+
|
|
484
|
+
if agent_instance.input:
|
|
485
|
+
logger.debug(
|
|
486
|
+
f"Extracting type information from agent '{name}' input: {agent_instance.input}"
|
|
487
|
+
)
|
|
488
|
+
input_types = self._extract_types_from_signature(
|
|
489
|
+
agent_instance.input
|
|
490
|
+
)
|
|
491
|
+
if input_types:
|
|
492
|
+
logger.debug(
|
|
493
|
+
f"Found input types in agent '{name}': {input_types}"
|
|
494
|
+
)
|
|
495
|
+
custom_types.update(
|
|
496
|
+
self._get_type_definitions(input_types)
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
# Extract type information from agent outputs
|
|
500
|
+
if agent_instance.output:
|
|
501
|
+
logger.debug(
|
|
502
|
+
f"Extracting type information from agent '{name}' output: {agent_instance.output}"
|
|
503
|
+
)
|
|
504
|
+
output_types = self._extract_types_from_signature(
|
|
505
|
+
agent_instance.output
|
|
506
|
+
)
|
|
507
|
+
if output_types:
|
|
508
|
+
logger.debug(
|
|
509
|
+
f"Found output types in agent '{name}': {output_types}"
|
|
510
|
+
)
|
|
511
|
+
custom_types.update(
|
|
512
|
+
self._get_type_definitions(output_types)
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
# Extract component information
|
|
516
|
+
if (
|
|
517
|
+
"evaluator" in agent_data
|
|
518
|
+
and "type" in agent_data["evaluator"]
|
|
519
|
+
):
|
|
520
|
+
component_type = agent_data["evaluator"]["type"]
|
|
521
|
+
logger.debug(
|
|
522
|
+
f"Adding evaluator component '{component_type}' from agent '{name}'"
|
|
523
|
+
)
|
|
524
|
+
components[component_type] = self._get_component_definition(
|
|
525
|
+
component_type, path_type
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
# Extract module component information
|
|
529
|
+
if "modules" in agent_data:
|
|
530
|
+
for module_name, module_data in agent_data[
|
|
531
|
+
"modules"
|
|
532
|
+
].items():
|
|
533
|
+
if "type" in module_data:
|
|
534
|
+
component_type = module_data["type"]
|
|
535
|
+
logger.debug(
|
|
536
|
+
f"Adding module component '{component_type}' from module '{module_name}' in agent '{name}'"
|
|
537
|
+
)
|
|
538
|
+
components[component_type] = (
|
|
539
|
+
self._get_component_definition(
|
|
540
|
+
component_type, path_type
|
|
541
|
+
)
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
# Extract tool (callable) information
|
|
545
|
+
if agent_data.get("tools"):
|
|
546
|
+
logger.debug(
|
|
547
|
+
f"Extracting tool information from agent '{name}': {agent_data['tools']}"
|
|
548
|
+
)
|
|
549
|
+
# Get references to the actual tool objects
|
|
550
|
+
tool_objs = (
|
|
551
|
+
agent_instance.tools if agent_instance.tools else []
|
|
552
|
+
)
|
|
553
|
+
for i, tool_name in enumerate(agent_data["tools"]):
|
|
554
|
+
if i < len(tool_objs):
|
|
555
|
+
tool = tool_objs[i]
|
|
556
|
+
if callable(tool) and not isinstance(tool, type):
|
|
557
|
+
# Get the fully qualified name for registry lookup
|
|
558
|
+
path_str = (
|
|
559
|
+
get_registry().get_callable_path_string(
|
|
560
|
+
tool
|
|
561
|
+
)
|
|
562
|
+
)
|
|
563
|
+
if path_str:
|
|
564
|
+
logger.debug(
|
|
565
|
+
f"Adding tool '{tool_name}' (from path '{path_str}') to components"
|
|
566
|
+
)
|
|
567
|
+
# Add definition using just the function name as the key
|
|
568
|
+
components[tool_name] = (
|
|
569
|
+
self._get_callable_definition(
|
|
570
|
+
path_str, tool_name, path_type
|
|
571
|
+
)
|
|
572
|
+
)
|
|
573
|
+
|
|
461
574
|
except Exception as e:
|
|
462
575
|
logger.error(
|
|
463
|
-
f"Failed to serialize agent '{name}' within Flock: {e}"
|
|
576
|
+
f"Failed to serialize agent '{name}' within Flock: {e}",
|
|
577
|
+
exc_info=True,
|
|
464
578
|
)
|
|
465
579
|
# Optionally skip problematic agents or raise error
|
|
466
580
|
# data["agents"][name] = {"error": f"Serialization failed: {e}"}
|
|
467
581
|
|
|
468
|
-
#
|
|
469
|
-
|
|
470
|
-
|
|
582
|
+
# Add type definitions to the serialized output if any were found
|
|
583
|
+
if custom_types:
|
|
584
|
+
logger.info(
|
|
585
|
+
f"Adding {len(custom_types)} custom type definitions to serialized output"
|
|
586
|
+
)
|
|
587
|
+
data["types"] = custom_types
|
|
588
|
+
|
|
589
|
+
# Add component definitions to the serialized output if any were found
|
|
590
|
+
if components:
|
|
591
|
+
logger.info(
|
|
592
|
+
f"Adding {len(components)} component definitions to serialized output"
|
|
593
|
+
)
|
|
594
|
+
data["components"] = components
|
|
595
|
+
|
|
596
|
+
# Add dependencies section
|
|
597
|
+
data["dependencies"] = self._get_dependencies()
|
|
598
|
+
|
|
599
|
+
# Add serialization settings
|
|
600
|
+
data["metadata"] = {"path_type": path_type}
|
|
601
|
+
|
|
602
|
+
logger.debug(
|
|
603
|
+
f"Flock serialization complete with {len(data['agents'])} agents, {len(custom_types)} types, {len(components)} components"
|
|
604
|
+
)
|
|
471
605
|
|
|
472
|
-
# Filter final dict (optional, Pydantic's exclude_none helps)
|
|
473
|
-
# return self._filter_none_values(data)
|
|
474
606
|
return data
|
|
475
607
|
|
|
608
|
+
def _extract_types_from_signature(self, signature: str) -> list[str]:
|
|
609
|
+
"""Extract type names from an input/output signature string."""
|
|
610
|
+
if not signature:
|
|
611
|
+
return []
|
|
612
|
+
|
|
613
|
+
signature_parts = split_top_level(signature)
|
|
614
|
+
|
|
615
|
+
# Basic type extraction - handles simple cases like "result: TypeName" or "list[TypeName]"
|
|
616
|
+
custom_types = []
|
|
617
|
+
|
|
618
|
+
# Look for type annotations (everything after ":")
|
|
619
|
+
for part in signature_parts:
|
|
620
|
+
parts = part.split(":")
|
|
621
|
+
if len(parts) > 1:
|
|
622
|
+
type_part = parts[1].strip()
|
|
623
|
+
|
|
624
|
+
pydantic_models = extract_pydantic_models_from_type_string(
|
|
625
|
+
type_part
|
|
626
|
+
)
|
|
627
|
+
if pydantic_models:
|
|
628
|
+
for model in pydantic_models:
|
|
629
|
+
custom_types.append(model.__name__)
|
|
630
|
+
|
|
631
|
+
# # Extract from list[Type]
|
|
632
|
+
# if "list[" in type_part:
|
|
633
|
+
# inner_type = type_part.split("list[")[1].split("]")[0].strip()
|
|
634
|
+
# if inner_type and inner_type.lower() not in [
|
|
635
|
+
# "str",
|
|
636
|
+
# "int",
|
|
637
|
+
# "float",
|
|
638
|
+
# "bool",
|
|
639
|
+
# "dict",
|
|
640
|
+
# "list",
|
|
641
|
+
# ]:
|
|
642
|
+
# custom_types.append(inner_type)
|
|
643
|
+
|
|
644
|
+
# # Extract direct type references
|
|
645
|
+
# elif type_part and type_part.lower() not in [
|
|
646
|
+
# "str",
|
|
647
|
+
# "int",
|
|
648
|
+
# "float",
|
|
649
|
+
# "bool",
|
|
650
|
+
# "dict",
|
|
651
|
+
# "list",
|
|
652
|
+
# ]:
|
|
653
|
+
# custom_types.append(
|
|
654
|
+
# type_part.split()[0]
|
|
655
|
+
# ) # Take the first word in case there's a description
|
|
656
|
+
|
|
657
|
+
return custom_types
|
|
658
|
+
|
|
659
|
+
def _get_type_definitions(self, type_names: list[str]) -> dict[str, Any]:
|
|
660
|
+
"""Get definitions for the specified custom types."""
|
|
661
|
+
from flock.core.flock_registry import get_registry
|
|
662
|
+
|
|
663
|
+
type_definitions = {}
|
|
664
|
+
registry = get_registry()
|
|
665
|
+
|
|
666
|
+
for type_name in type_names:
|
|
667
|
+
try:
|
|
668
|
+
# Try to get the type from registry
|
|
669
|
+
type_obj = registry._types.get(type_name)
|
|
670
|
+
if type_obj:
|
|
671
|
+
type_def = self._extract_type_definition(
|
|
672
|
+
type_name, type_obj
|
|
673
|
+
)
|
|
674
|
+
if type_def:
|
|
675
|
+
type_definitions[type_name] = type_def
|
|
676
|
+
except Exception as e:
|
|
677
|
+
logger.warning(
|
|
678
|
+
f"Could not extract definition for type {type_name}: {e}"
|
|
679
|
+
)
|
|
680
|
+
|
|
681
|
+
return type_definitions
|
|
682
|
+
|
|
683
|
+
def _extract_type_definition(
|
|
684
|
+
self, type_name: str, type_obj: type
|
|
685
|
+
) -> dict[str, Any]:
|
|
686
|
+
"""Extract a definition for a custom type."""
|
|
687
|
+
import inspect
|
|
688
|
+
from dataclasses import is_dataclass
|
|
689
|
+
|
|
690
|
+
type_def = {
|
|
691
|
+
"module_path": type_obj.__module__,
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
# Handle Pydantic models
|
|
695
|
+
if hasattr(type_obj, "model_json_schema") and callable(
|
|
696
|
+
getattr(type_obj, "model_json_schema")
|
|
697
|
+
):
|
|
698
|
+
type_def["type"] = "pydantic.BaseModel"
|
|
699
|
+
try:
|
|
700
|
+
schema = type_obj.model_json_schema()
|
|
701
|
+
# Clean up schema to remove unnecessary fields
|
|
702
|
+
if "title" in schema and schema["title"] == type_name:
|
|
703
|
+
del schema["title"]
|
|
704
|
+
type_def["schema"] = schema
|
|
705
|
+
except Exception as e:
|
|
706
|
+
logger.warning(
|
|
707
|
+
f"Could not extract schema for Pydantic model {type_name}: {e}"
|
|
708
|
+
)
|
|
709
|
+
|
|
710
|
+
# Handle dataclasses
|
|
711
|
+
elif is_dataclass(type_obj):
|
|
712
|
+
type_def["type"] = "dataclass"
|
|
713
|
+
fields = {}
|
|
714
|
+
for field_name, field in type_obj.__dataclass_fields__.items():
|
|
715
|
+
fields[field_name] = {
|
|
716
|
+
"type": str(field.type),
|
|
717
|
+
"default": str(field.default)
|
|
718
|
+
if field.default is not inspect.Parameter.empty
|
|
719
|
+
else None,
|
|
720
|
+
}
|
|
721
|
+
type_def["fields"] = fields
|
|
722
|
+
|
|
723
|
+
# Handle other types - just store basic information
|
|
724
|
+
else:
|
|
725
|
+
type_def["type"] = "custom"
|
|
726
|
+
|
|
727
|
+
# Extract import statement (simplified version)
|
|
728
|
+
type_def["imports"] = [f"from {type_obj.__module__} import {type_name}"]
|
|
729
|
+
|
|
730
|
+
return type_def
|
|
731
|
+
|
|
732
|
+
def _get_component_definition(
|
|
733
|
+
self, component_type: str, path_type: Literal["absolute", "relative"]
|
|
734
|
+
) -> dict[str, Any]:
|
|
735
|
+
"""Get definition for a component type."""
|
|
736
|
+
import os
|
|
737
|
+
import sys
|
|
738
|
+
|
|
739
|
+
from flock.core.flock_registry import get_registry
|
|
740
|
+
|
|
741
|
+
registry = get_registry()
|
|
742
|
+
component_def = {}
|
|
743
|
+
|
|
744
|
+
try:
|
|
745
|
+
# Try to get the component class from registry
|
|
746
|
+
component_class = registry._components.get(component_type)
|
|
747
|
+
if component_class:
|
|
748
|
+
# Get the standard module path
|
|
749
|
+
module_path = component_class.__module__
|
|
750
|
+
|
|
751
|
+
# Get the actual file system path if possible
|
|
752
|
+
file_path = None
|
|
753
|
+
try:
|
|
754
|
+
if (
|
|
755
|
+
hasattr(component_class, "__module__")
|
|
756
|
+
and component_class.__module__
|
|
757
|
+
):
|
|
758
|
+
module = sys.modules.get(component_class.__module__)
|
|
759
|
+
if module and hasattr(module, "__file__"):
|
|
760
|
+
file_path = os.path.abspath(module.__file__)
|
|
761
|
+
# Convert to relative path if needed
|
|
762
|
+
if path_type == "relative" and file_path:
|
|
763
|
+
try:
|
|
764
|
+
file_path = os.path.relpath(file_path)
|
|
765
|
+
except ValueError:
|
|
766
|
+
# Keep as absolute if can't make relative
|
|
767
|
+
logger.warning(
|
|
768
|
+
f"Could not convert path to relative: {file_path}"
|
|
769
|
+
)
|
|
770
|
+
except Exception as e:
|
|
771
|
+
# If we can't get the file path, we'll just use the module path
|
|
772
|
+
logger.warning(
|
|
773
|
+
f"Error getting file path for component {component_type}: {e}"
|
|
774
|
+
)
|
|
775
|
+
pass
|
|
776
|
+
|
|
777
|
+
component_def = {
|
|
778
|
+
"type": "flock_component",
|
|
779
|
+
"module_path": module_path,
|
|
780
|
+
"file_path": file_path, # Include actual file system path
|
|
781
|
+
"description": getattr(
|
|
782
|
+
component_class, "__doc__", ""
|
|
783
|
+
).strip()
|
|
784
|
+
or f"{component_type} component",
|
|
785
|
+
}
|
|
786
|
+
except Exception as e:
|
|
787
|
+
logger.warning(
|
|
788
|
+
f"Could not extract definition for component {component_type}: {e}"
|
|
789
|
+
)
|
|
790
|
+
# Provide minimal information if we can't extract details
|
|
791
|
+
component_def = {
|
|
792
|
+
"type": "flock_component",
|
|
793
|
+
"module_path": "unknown",
|
|
794
|
+
"file_path": None,
|
|
795
|
+
"description": f"{component_type} component (definition incomplete)",
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
return component_def
|
|
799
|
+
|
|
800
|
+
def _get_callable_definition(
|
|
801
|
+
self,
|
|
802
|
+
callable_ref: str,
|
|
803
|
+
func_name: str,
|
|
804
|
+
path_type: Literal["absolute", "relative"],
|
|
805
|
+
) -> dict[str, Any]:
|
|
806
|
+
"""Get definition for a callable reference.
|
|
807
|
+
|
|
808
|
+
Args:
|
|
809
|
+
callable_ref: The fully qualified path to the callable
|
|
810
|
+
func_name: The simple function name (for display purposes)
|
|
811
|
+
path_type: How file paths should be formatted ('absolute' or 'relative')
|
|
812
|
+
"""
|
|
813
|
+
import inspect
|
|
814
|
+
import os
|
|
815
|
+
import sys
|
|
816
|
+
|
|
817
|
+
from flock.core.flock_registry import get_registry
|
|
818
|
+
|
|
819
|
+
registry = get_registry()
|
|
820
|
+
callable_def = {}
|
|
821
|
+
|
|
822
|
+
try:
|
|
823
|
+
# Try to get the callable from registry
|
|
824
|
+
logger.debug(
|
|
825
|
+
f"Getting callable definition for '{callable_ref}' (display name: '{func_name}')"
|
|
826
|
+
)
|
|
827
|
+
func = registry.get_callable(callable_ref)
|
|
828
|
+
if func:
|
|
829
|
+
# Get the standard module path
|
|
830
|
+
module_path = func.__module__
|
|
831
|
+
|
|
832
|
+
# Get the actual file system path if possible
|
|
833
|
+
file_path = None
|
|
834
|
+
try:
|
|
835
|
+
if func.__module__ and func.__module__ != "builtins":
|
|
836
|
+
module = sys.modules.get(func.__module__)
|
|
837
|
+
if module and hasattr(module, "__file__"):
|
|
838
|
+
file_path = os.path.abspath(module.__file__)
|
|
839
|
+
# Convert to relative path if needed
|
|
840
|
+
if path_type == "relative" and file_path:
|
|
841
|
+
try:
|
|
842
|
+
file_path = os.path.relpath(file_path)
|
|
843
|
+
except ValueError:
|
|
844
|
+
# Keep as absolute if can't make relative
|
|
845
|
+
logger.warning(
|
|
846
|
+
f"Could not convert path to relative: {file_path}"
|
|
847
|
+
)
|
|
848
|
+
except Exception as e:
|
|
849
|
+
# If we can't get the file path, just use the module path
|
|
850
|
+
logger.warning(
|
|
851
|
+
f"Error getting file path for callable {callable_ref}: {e}"
|
|
852
|
+
)
|
|
853
|
+
pass
|
|
854
|
+
|
|
855
|
+
# Get the docstring for description
|
|
856
|
+
docstring = (
|
|
857
|
+
inspect.getdoc(func) or f"Callable function {func_name}"
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
callable_def = {
|
|
861
|
+
"type": "flock_callable",
|
|
862
|
+
"module_path": module_path,
|
|
863
|
+
"file_path": file_path,
|
|
864
|
+
"description": docstring.strip(),
|
|
865
|
+
}
|
|
866
|
+
logger.debug(
|
|
867
|
+
f"Created callable definition for '{func_name}': module={module_path}, file={file_path}"
|
|
868
|
+
)
|
|
869
|
+
except Exception as e:
|
|
870
|
+
logger.warning(
|
|
871
|
+
f"Could not extract definition for callable {callable_ref}: {e}"
|
|
872
|
+
)
|
|
873
|
+
# Provide minimal information
|
|
874
|
+
callable_def = {
|
|
875
|
+
"type": "flock_callable",
|
|
876
|
+
"module_path": callable_ref.split(".")[0]
|
|
877
|
+
if "." in callable_ref
|
|
878
|
+
else "unknown",
|
|
879
|
+
"file_path": None,
|
|
880
|
+
"description": f"Callable {func_name} (definition incomplete)",
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
return callable_def
|
|
884
|
+
|
|
885
|
+
def _get_dependencies(self) -> list[str]:
|
|
886
|
+
"""Get list of dependencies required by this Flock."""
|
|
887
|
+
# This is a simplified version - in production, you might want to detect
|
|
888
|
+
# actual versions of installed packages
|
|
889
|
+
return [
|
|
890
|
+
"pydantic>=2.0.0",
|
|
891
|
+
"flock>=0.3.41", # Assuming this is the package name
|
|
892
|
+
]
|
|
893
|
+
|
|
476
894
|
@classmethod
|
|
477
895
|
def from_dict(cls: type[T], data: dict[str, Any]) -> T:
|
|
478
896
|
"""Create Flock instance from dictionary representation."""
|
|
@@ -480,6 +898,30 @@ class Flock(BaseModel, Serializable):
|
|
|
480
898
|
f"Deserializing Flock from dict. Provided keys: {list(data.keys())}"
|
|
481
899
|
)
|
|
482
900
|
|
|
901
|
+
# Check for serialization settings
|
|
902
|
+
serialization_settings = data.pop("serialization_settings", {})
|
|
903
|
+
path_type = serialization_settings.get("path_type", "absolute")
|
|
904
|
+
logger.debug(
|
|
905
|
+
f"Using path_type '{path_type}' from serialization settings"
|
|
906
|
+
)
|
|
907
|
+
|
|
908
|
+
# First, handle type definitions if present
|
|
909
|
+
if "types" in data:
|
|
910
|
+
logger.info(f"Processing {len(data['types'])} type definitions")
|
|
911
|
+
cls._register_type_definitions(data["types"])
|
|
912
|
+
|
|
913
|
+
# Then, handle component definitions if present
|
|
914
|
+
if "components" in data:
|
|
915
|
+
logger.info(
|
|
916
|
+
f"Processing {len(data['components'])} component definitions"
|
|
917
|
+
)
|
|
918
|
+
cls._register_component_definitions(data["components"], path_type)
|
|
919
|
+
|
|
920
|
+
# Check dependencies if present
|
|
921
|
+
if "dependencies" in data:
|
|
922
|
+
logger.debug(f"Checking {len(data['dependencies'])} dependencies")
|
|
923
|
+
cls._check_dependencies(data["dependencies"])
|
|
924
|
+
|
|
483
925
|
# Ensure FlockAgent is importable for type checking later
|
|
484
926
|
try:
|
|
485
927
|
from flock.core.flock_agent import FlockAgent as ConcreteFlockAgent
|
|
@@ -491,11 +933,22 @@ class Flock(BaseModel, Serializable):
|
|
|
491
933
|
|
|
492
934
|
# Extract agent data before initializing Flock base model
|
|
493
935
|
agents_data = data.pop("agents", {})
|
|
936
|
+
logger.info(f"Found {len(agents_data)} agents to deserialize")
|
|
937
|
+
|
|
938
|
+
# Remove types, components, and dependencies sections as they're not part of Flock fields
|
|
939
|
+
data.pop("types", None)
|
|
940
|
+
data.pop("components", None)
|
|
941
|
+
data.pop("dependencies", None)
|
|
942
|
+
# Remove metadata if present
|
|
943
|
+
data.pop("metadata", None)
|
|
494
944
|
|
|
495
945
|
# Create Flock instance using Pydantic constructor for basic fields
|
|
496
946
|
try:
|
|
497
947
|
# Pass only fields defined in Flock's Pydantic model
|
|
498
948
|
init_data = {k: v for k, v in data.items() if k in cls.model_fields}
|
|
949
|
+
logger.debug(
|
|
950
|
+
f"Creating Flock instance with fields: {list(init_data.keys())}"
|
|
951
|
+
)
|
|
499
952
|
flock_instance = cls(**init_data)
|
|
500
953
|
except Exception as e:
|
|
501
954
|
logger.error(
|
|
@@ -508,6 +961,7 @@ class Flock(BaseModel, Serializable):
|
|
|
508
961
|
# Deserialize and add agents AFTER Flock instance exists
|
|
509
962
|
for name, agent_data in agents_data.items():
|
|
510
963
|
try:
|
|
964
|
+
logger.debug(f"Deserializing agent '{name}'")
|
|
511
965
|
# Ensure agent_data has the name, or add it from the key
|
|
512
966
|
agent_data.setdefault("name", name)
|
|
513
967
|
# Use FlockAgent's from_dict method
|
|
@@ -515,6 +969,7 @@ class Flock(BaseModel, Serializable):
|
|
|
515
969
|
flock_instance.add_agent(
|
|
516
970
|
agent_instance
|
|
517
971
|
) # Adds to _agents and registers
|
|
972
|
+
logger.debug(f"Successfully added agent '{name}' to Flock")
|
|
518
973
|
except Exception as e:
|
|
519
974
|
logger.error(
|
|
520
975
|
f"Failed to deserialize or add agent '{name}' during Flock deserialization: {e}",
|
|
@@ -522,9 +977,412 @@ class Flock(BaseModel, Serializable):
|
|
|
522
977
|
)
|
|
523
978
|
# Decide: skip agent or raise error?
|
|
524
979
|
|
|
525
|
-
logger.info(
|
|
980
|
+
logger.info(
|
|
981
|
+
f"Successfully deserialized Flock instance '{flock_instance.name}' with {len(flock_instance._agents)} agents"
|
|
982
|
+
)
|
|
526
983
|
return flock_instance
|
|
527
984
|
|
|
985
|
+
@classmethod
|
|
986
|
+
def _register_type_definitions(cls, type_defs: dict[str, Any]) -> None:
|
|
987
|
+
"""Register type definitions from serialized data."""
|
|
988
|
+
import importlib
|
|
989
|
+
|
|
990
|
+
from flock.core.flock_registry import get_registry
|
|
991
|
+
|
|
992
|
+
registry = get_registry()
|
|
993
|
+
|
|
994
|
+
for type_name, type_def in type_defs.items():
|
|
995
|
+
logger.debug(f"Registering type: {type_name}")
|
|
996
|
+
|
|
997
|
+
try:
|
|
998
|
+
# First try to import the type directly
|
|
999
|
+
module_path = type_def.get("module_path")
|
|
1000
|
+
if module_path:
|
|
1001
|
+
try:
|
|
1002
|
+
module = importlib.import_module(module_path)
|
|
1003
|
+
if hasattr(module, type_name):
|
|
1004
|
+
type_obj = getattr(module, type_name)
|
|
1005
|
+
registry.register_type(type_obj, type_name)
|
|
1006
|
+
logger.info(
|
|
1007
|
+
f"Registered type {type_name} from module {module_path}"
|
|
1008
|
+
)
|
|
1009
|
+
continue
|
|
1010
|
+
except ImportError:
|
|
1011
|
+
logger.debug(
|
|
1012
|
+
f"Could not import {module_path}, trying dynamic type creation"
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
# If direct import fails, try to create the type dynamically
|
|
1016
|
+
if (
|
|
1017
|
+
type_def.get("type") == "pydantic.BaseModel"
|
|
1018
|
+
and "schema" in type_def
|
|
1019
|
+
):
|
|
1020
|
+
cls._create_pydantic_model(type_name, type_def)
|
|
1021
|
+
elif (
|
|
1022
|
+
type_def.get("type") == "dataclass" and "fields" in type_def
|
|
1023
|
+
):
|
|
1024
|
+
cls._create_dataclass(type_name, type_def)
|
|
1025
|
+
else:
|
|
1026
|
+
logger.warning(
|
|
1027
|
+
f"Unsupported type definition for {type_name}, type: {type_def.get('type')}"
|
|
1028
|
+
)
|
|
1029
|
+
|
|
1030
|
+
except Exception as e:
|
|
1031
|
+
logger.error(f"Failed to register type {type_name}: {e}")
|
|
1032
|
+
|
|
1033
|
+
@classmethod
|
|
1034
|
+
def _create_pydantic_model(
|
|
1035
|
+
cls, type_name: str, type_def: dict[str, Any]
|
|
1036
|
+
) -> None:
|
|
1037
|
+
"""Dynamically create a Pydantic model from a schema definition."""
|
|
1038
|
+
from pydantic import create_model
|
|
1039
|
+
|
|
1040
|
+
from flock.core.flock_registry import get_registry
|
|
1041
|
+
|
|
1042
|
+
registry = get_registry()
|
|
1043
|
+
schema = type_def.get("schema", {})
|
|
1044
|
+
|
|
1045
|
+
try:
|
|
1046
|
+
# Extract field definitions from schema
|
|
1047
|
+
fields = {}
|
|
1048
|
+
properties = schema.get("properties", {})
|
|
1049
|
+
required = schema.get("required", [])
|
|
1050
|
+
|
|
1051
|
+
for field_name, field_schema in properties.items():
|
|
1052
|
+
# Determine the field type based on schema
|
|
1053
|
+
field_type = cls._get_type_from_schema(field_schema)
|
|
1054
|
+
|
|
1055
|
+
# Determine if field is required
|
|
1056
|
+
default = ... if field_name in required else None
|
|
1057
|
+
|
|
1058
|
+
# Add to fields dict
|
|
1059
|
+
fields[field_name] = (field_type, default)
|
|
1060
|
+
|
|
1061
|
+
# Create the model
|
|
1062
|
+
DynamicModel = create_model(type_name, **fields)
|
|
1063
|
+
|
|
1064
|
+
# Register it
|
|
1065
|
+
registry.register_type(DynamicModel, type_name)
|
|
1066
|
+
logger.info(f"Created and registered Pydantic model: {type_name}")
|
|
1067
|
+
|
|
1068
|
+
except Exception as e:
|
|
1069
|
+
logger.error(f"Failed to create Pydantic model {type_name}: {e}")
|
|
1070
|
+
|
|
1071
|
+
@classmethod
|
|
1072
|
+
def _get_type_from_schema(cls, field_schema: dict[str, Any]) -> Any:
|
|
1073
|
+
"""Convert JSON schema type to Python type."""
|
|
1074
|
+
schema_type = field_schema.get("type")
|
|
1075
|
+
|
|
1076
|
+
# Basic type mapping
|
|
1077
|
+
type_mapping = {
|
|
1078
|
+
"string": str,
|
|
1079
|
+
"integer": int,
|
|
1080
|
+
"number": float,
|
|
1081
|
+
"boolean": bool,
|
|
1082
|
+
"array": list,
|
|
1083
|
+
"object": dict,
|
|
1084
|
+
}
|
|
1085
|
+
|
|
1086
|
+
# Handle basic types
|
|
1087
|
+
if schema_type in type_mapping:
|
|
1088
|
+
return type_mapping[schema_type]
|
|
1089
|
+
|
|
1090
|
+
# Handle enums
|
|
1091
|
+
if "enum" in field_schema:
|
|
1092
|
+
from typing import Literal
|
|
1093
|
+
|
|
1094
|
+
return Literal[tuple(field_schema["enum"])]
|
|
1095
|
+
|
|
1096
|
+
# Default
|
|
1097
|
+
return Any
|
|
1098
|
+
|
|
1099
|
+
@classmethod
|
|
1100
|
+
def _create_dataclass(
|
|
1101
|
+
cls, type_name: str, type_def: dict[str, Any]
|
|
1102
|
+
) -> None:
|
|
1103
|
+
"""Dynamically create a dataclass from a field definition."""
|
|
1104
|
+
from dataclasses import make_dataclass
|
|
1105
|
+
|
|
1106
|
+
from flock.core.flock_registry import get_registry
|
|
1107
|
+
|
|
1108
|
+
registry = get_registry()
|
|
1109
|
+
fields_def = type_def.get("fields", {})
|
|
1110
|
+
|
|
1111
|
+
try:
|
|
1112
|
+
fields = []
|
|
1113
|
+
for field_name, field_props in fields_def.items():
|
|
1114
|
+
field_type = eval(
|
|
1115
|
+
field_props.get("type", "str")
|
|
1116
|
+
) # Note: eval is used here for simplicity
|
|
1117
|
+
fields.append((field_name, field_type))
|
|
1118
|
+
|
|
1119
|
+
# Create the dataclass
|
|
1120
|
+
DynamicDataclass = make_dataclass(type_name, fields)
|
|
1121
|
+
|
|
1122
|
+
# Register it
|
|
1123
|
+
registry.register_type(DynamicDataclass, type_name)
|
|
1124
|
+
logger.info(f"Created and registered dataclass: {type_name}")
|
|
1125
|
+
|
|
1126
|
+
except Exception as e:
|
|
1127
|
+
logger.error(f"Failed to create dataclass {type_name}: {e}")
|
|
1128
|
+
|
|
1129
|
+
@classmethod
|
|
1130
|
+
def _register_component_definitions(
|
|
1131
|
+
cls,
|
|
1132
|
+
component_defs: dict[str, Any],
|
|
1133
|
+
path_type: Literal["absolute", "relative"],
|
|
1134
|
+
) -> None:
|
|
1135
|
+
"""Register component definitions from serialized data."""
|
|
1136
|
+
import importlib
|
|
1137
|
+
import importlib.util
|
|
1138
|
+
import os
|
|
1139
|
+
import sys
|
|
1140
|
+
|
|
1141
|
+
from flock.core.flock_registry import get_registry
|
|
1142
|
+
|
|
1143
|
+
registry = get_registry()
|
|
1144
|
+
|
|
1145
|
+
for component_name, component_def in component_defs.items():
|
|
1146
|
+
logger.debug(f"Registering component: {component_name}")
|
|
1147
|
+
component_type = component_def.get("type", "flock_component")
|
|
1148
|
+
|
|
1149
|
+
try:
|
|
1150
|
+
# Handle callables differently than components
|
|
1151
|
+
if component_type == "flock_callable":
|
|
1152
|
+
# For callables, component_name is just the function name
|
|
1153
|
+
func_name = component_name
|
|
1154
|
+
module_path = component_def.get("module_path")
|
|
1155
|
+
file_path = component_def.get("file_path")
|
|
1156
|
+
|
|
1157
|
+
# Convert relative path to absolute if needed
|
|
1158
|
+
if (
|
|
1159
|
+
path_type == "relative"
|
|
1160
|
+
and file_path
|
|
1161
|
+
and not os.path.isabs(file_path)
|
|
1162
|
+
):
|
|
1163
|
+
try:
|
|
1164
|
+
# Make absolute based on current directory
|
|
1165
|
+
file_path = os.path.abspath(file_path)
|
|
1166
|
+
logger.debug(
|
|
1167
|
+
f"Converted relative path '{component_def.get('file_path')}' to absolute: '{file_path}'"
|
|
1168
|
+
)
|
|
1169
|
+
except Exception as e:
|
|
1170
|
+
logger.warning(
|
|
1171
|
+
f"Could not convert relative path to absolute: {e}"
|
|
1172
|
+
)
|
|
1173
|
+
|
|
1174
|
+
logger.debug(
|
|
1175
|
+
f"Processing callable '{func_name}' from module '{module_path}', file: {file_path}"
|
|
1176
|
+
)
|
|
1177
|
+
|
|
1178
|
+
# Try direct import first
|
|
1179
|
+
if module_path:
|
|
1180
|
+
try:
|
|
1181
|
+
logger.debug(
|
|
1182
|
+
f"Attempting to import module: {module_path}"
|
|
1183
|
+
)
|
|
1184
|
+
module = importlib.import_module(module_path)
|
|
1185
|
+
if hasattr(module, func_name):
|
|
1186
|
+
callable_obj = getattr(module, func_name)
|
|
1187
|
+
# Register with just the name for easier lookup
|
|
1188
|
+
registry.register_callable(
|
|
1189
|
+
callable_obj, func_name
|
|
1190
|
+
)
|
|
1191
|
+
logger.info(
|
|
1192
|
+
f"Registered callable with name: {func_name}"
|
|
1193
|
+
)
|
|
1194
|
+
# Also register with fully qualified path for compatibility
|
|
1195
|
+
if module_path != "__main__":
|
|
1196
|
+
full_path = f"{module_path}.{func_name}"
|
|
1197
|
+
registry.register_callable(
|
|
1198
|
+
callable_obj, full_path
|
|
1199
|
+
)
|
|
1200
|
+
logger.info(
|
|
1201
|
+
f"Also registered callable with full path: {full_path}"
|
|
1202
|
+
)
|
|
1203
|
+
logger.info(
|
|
1204
|
+
f"Successfully registered callable {func_name} from module {module_path}"
|
|
1205
|
+
)
|
|
1206
|
+
continue
|
|
1207
|
+
else:
|
|
1208
|
+
logger.warning(
|
|
1209
|
+
f"Function '{func_name}' not found in module {module_path}"
|
|
1210
|
+
)
|
|
1211
|
+
except ImportError:
|
|
1212
|
+
logger.debug(
|
|
1213
|
+
f"Could not import module {module_path}, trying file path"
|
|
1214
|
+
)
|
|
1215
|
+
|
|
1216
|
+
# Try file path if module import fails
|
|
1217
|
+
if file_path and os.path.exists(file_path):
|
|
1218
|
+
try:
|
|
1219
|
+
logger.debug(
|
|
1220
|
+
f"Attempting to load file: {file_path}"
|
|
1221
|
+
)
|
|
1222
|
+
# Create a module name from file path
|
|
1223
|
+
mod_name = f"{func_name}_module"
|
|
1224
|
+
spec = importlib.util.spec_from_file_location(
|
|
1225
|
+
mod_name, file_path
|
|
1226
|
+
)
|
|
1227
|
+
if spec and spec.loader:
|
|
1228
|
+
module = importlib.util.module_from_spec(spec)
|
|
1229
|
+
sys.modules[spec.name] = module
|
|
1230
|
+
spec.loader.exec_module(module)
|
|
1231
|
+
logger.debug(
|
|
1232
|
+
f"Successfully loaded module from file, searching for function '{func_name}'"
|
|
1233
|
+
)
|
|
1234
|
+
|
|
1235
|
+
# Look for the function in the loaded module
|
|
1236
|
+
if hasattr(module, func_name):
|
|
1237
|
+
callable_obj = getattr(module, func_name)
|
|
1238
|
+
registry.register_callable(
|
|
1239
|
+
callable_obj, func_name
|
|
1240
|
+
)
|
|
1241
|
+
logger.info(
|
|
1242
|
+
f"Successfully registered callable {func_name} from file {file_path}"
|
|
1243
|
+
)
|
|
1244
|
+
else:
|
|
1245
|
+
logger.warning(
|
|
1246
|
+
f"Function {func_name} not found in file {file_path}"
|
|
1247
|
+
)
|
|
1248
|
+
else:
|
|
1249
|
+
logger.warning(
|
|
1250
|
+
f"Could not create import spec for {file_path}"
|
|
1251
|
+
)
|
|
1252
|
+
except Exception as e:
|
|
1253
|
+
logger.error(
|
|
1254
|
+
f"Error loading callable {func_name} from file {file_path}: {e}",
|
|
1255
|
+
exc_info=True,
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
# Handle regular components (existing code)
|
|
1259
|
+
else:
|
|
1260
|
+
# First try using the module path (Python import)
|
|
1261
|
+
module_path = component_def.get("module_path")
|
|
1262
|
+
if module_path and module_path != "unknown":
|
|
1263
|
+
try:
|
|
1264
|
+
logger.debug(
|
|
1265
|
+
f"Attempting to import module '{module_path}' for component '{component_name}'"
|
|
1266
|
+
)
|
|
1267
|
+
module = importlib.import_module(module_path)
|
|
1268
|
+
# Find the component class in the module
|
|
1269
|
+
for attr_name in dir(module):
|
|
1270
|
+
if attr_name == component_name:
|
|
1271
|
+
component_class = getattr(module, attr_name)
|
|
1272
|
+
registry.register_component(
|
|
1273
|
+
component_class, component_name
|
|
1274
|
+
)
|
|
1275
|
+
logger.info(
|
|
1276
|
+
f"Registered component {component_name} from {module_path}"
|
|
1277
|
+
)
|
|
1278
|
+
break
|
|
1279
|
+
else:
|
|
1280
|
+
logger.warning(
|
|
1281
|
+
f"Component {component_name} not found in module {module_path}"
|
|
1282
|
+
)
|
|
1283
|
+
# If we didn't find the component, try using file_path next
|
|
1284
|
+
raise ImportError(
|
|
1285
|
+
f"Component {component_name} not found in module {module_path}"
|
|
1286
|
+
)
|
|
1287
|
+
except ImportError:
|
|
1288
|
+
# If module import fails, try file_path approach
|
|
1289
|
+
file_path = component_def.get("file_path")
|
|
1290
|
+
|
|
1291
|
+
# Convert relative path to absolute if needed
|
|
1292
|
+
if (
|
|
1293
|
+
path_type == "relative"
|
|
1294
|
+
and file_path
|
|
1295
|
+
and not os.path.isabs(file_path)
|
|
1296
|
+
):
|
|
1297
|
+
try:
|
|
1298
|
+
# Make absolute based on current directory
|
|
1299
|
+
file_path = os.path.abspath(file_path)
|
|
1300
|
+
logger.debug(
|
|
1301
|
+
f"Converted relative path '{component_def.get('file_path')}' to absolute: '{file_path}'"
|
|
1302
|
+
)
|
|
1303
|
+
except Exception as e:
|
|
1304
|
+
logger.warning(
|
|
1305
|
+
f"Could not convert relative path to absolute: {e}"
|
|
1306
|
+
)
|
|
1307
|
+
|
|
1308
|
+
if file_path and os.path.exists(file_path):
|
|
1309
|
+
logger.debug(
|
|
1310
|
+
f"Attempting to load {component_name} from file: {file_path}"
|
|
1311
|
+
)
|
|
1312
|
+
try:
|
|
1313
|
+
# Load the module from file path
|
|
1314
|
+
spec = (
|
|
1315
|
+
importlib.util.spec_from_file_location(
|
|
1316
|
+
f"{component_name}_module",
|
|
1317
|
+
file_path,
|
|
1318
|
+
)
|
|
1319
|
+
)
|
|
1320
|
+
if spec and spec.loader:
|
|
1321
|
+
module = (
|
|
1322
|
+
importlib.util.module_from_spec(
|
|
1323
|
+
spec
|
|
1324
|
+
)
|
|
1325
|
+
)
|
|
1326
|
+
sys.modules[spec.name] = module
|
|
1327
|
+
spec.loader.exec_module(module)
|
|
1328
|
+
logger.debug(
|
|
1329
|
+
f"Successfully loaded module from file, searching for component class '{component_name}'"
|
|
1330
|
+
)
|
|
1331
|
+
|
|
1332
|
+
# Find the component class in the loaded module
|
|
1333
|
+
for attr_name in dir(module):
|
|
1334
|
+
if attr_name == component_name:
|
|
1335
|
+
component_class = getattr(
|
|
1336
|
+
module, attr_name
|
|
1337
|
+
)
|
|
1338
|
+
registry.register_component(
|
|
1339
|
+
component_class,
|
|
1340
|
+
component_name,
|
|
1341
|
+
)
|
|
1342
|
+
logger.info(
|
|
1343
|
+
f"Registered component {component_name} from file {file_path}"
|
|
1344
|
+
)
|
|
1345
|
+
break
|
|
1346
|
+
else:
|
|
1347
|
+
logger.warning(
|
|
1348
|
+
f"Component {component_name} not found in file {file_path}"
|
|
1349
|
+
)
|
|
1350
|
+
except Exception as e:
|
|
1351
|
+
logger.error(
|
|
1352
|
+
f"Error loading component {component_name} from file {file_path}: {e}",
|
|
1353
|
+
exc_info=True,
|
|
1354
|
+
)
|
|
1355
|
+
else:
|
|
1356
|
+
logger.warning(
|
|
1357
|
+
f"No valid file path found for component {component_name}"
|
|
1358
|
+
)
|
|
1359
|
+
else:
|
|
1360
|
+
logger.warning(
|
|
1361
|
+
f"Missing or unknown module path for component {component_name}"
|
|
1362
|
+
)
|
|
1363
|
+
except Exception as e:
|
|
1364
|
+
logger.error(
|
|
1365
|
+
f"Failed to register component {component_name}: {e}",
|
|
1366
|
+
exc_info=True,
|
|
1367
|
+
)
|
|
1368
|
+
|
|
1369
|
+
@classmethod
|
|
1370
|
+
def _check_dependencies(cls, dependencies: list[str]) -> None:
|
|
1371
|
+
"""Check if required dependencies are available."""
|
|
1372
|
+
import importlib
|
|
1373
|
+
import re
|
|
1374
|
+
|
|
1375
|
+
for dependency in dependencies:
|
|
1376
|
+
# Extract package name and version
|
|
1377
|
+
match = re.match(r"([^>=<]+)([>=<].+)?", dependency)
|
|
1378
|
+
if match:
|
|
1379
|
+
package_name = match.group(1)
|
|
1380
|
+
try:
|
|
1381
|
+
importlib.import_module(package_name.replace("-", "_"))
|
|
1382
|
+
logger.debug(f"Dependency {package_name} is available")
|
|
1383
|
+
except ImportError:
|
|
1384
|
+
logger.warning(f"Dependency {dependency} is not installed")
|
|
1385
|
+
|
|
528
1386
|
# --- API Start Method ---
|
|
529
1387
|
def start_api(
|
|
530
1388
|
self,
|
|
@@ -600,20 +1458,31 @@ class Flock(BaseModel, Serializable):
|
|
|
600
1458
|
if not p.exists():
|
|
601
1459
|
raise FileNotFoundError(f"Flock file not found: {file_path}")
|
|
602
1460
|
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
1461
|
+
try:
|
|
1462
|
+
if p.suffix in [".yaml", ".yml"]:
|
|
1463
|
+
return Flock.from_yaml_file(p)
|
|
1464
|
+
elif p.suffix == ".json":
|
|
1465
|
+
return Flock.from_json(p.read_text())
|
|
1466
|
+
elif p.suffix == ".msgpack":
|
|
1467
|
+
return Flock.from_msgpack_file(p)
|
|
1468
|
+
elif p.suffix == ".pkl":
|
|
1469
|
+
if PICKLE_AVAILABLE:
|
|
1470
|
+
return Flock.from_pickle_file(p)
|
|
1471
|
+
else:
|
|
1472
|
+
raise RuntimeError(
|
|
1473
|
+
"Cannot load Pickle file: cloudpickle not installed."
|
|
1474
|
+
)
|
|
612
1475
|
else:
|
|
613
|
-
raise
|
|
614
|
-
"
|
|
1476
|
+
raise ValueError(
|
|
1477
|
+
f"Unsupported file extension: {p.suffix}. Use .yaml, .json, .msgpack, or .pkl."
|
|
615
1478
|
)
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
1479
|
+
except Exception as e:
|
|
1480
|
+
# Check if it's an exception about missing types
|
|
1481
|
+
if "Could not get registered type name" in str(e):
|
|
1482
|
+
logger.error(
|
|
1483
|
+
f"Failed to load Flock from {file_path}: Missing type definition. "
|
|
1484
|
+
"This may happen if the YAML was created on a system with different types registered. "
|
|
1485
|
+
"Check if the file includes 'types' section with necessary type definitions."
|
|
1486
|
+
)
|
|
1487
|
+
logger.error(f"Error loading Flock from {file_path}: {e}")
|
|
1488
|
+
raise
|