powermem 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. powermem/__init__.py +103 -0
  2. powermem/agent/__init__.py +35 -0
  3. powermem/agent/abstract/__init__.py +22 -0
  4. powermem/agent/abstract/collaboration.py +259 -0
  5. powermem/agent/abstract/context.py +187 -0
  6. powermem/agent/abstract/manager.py +232 -0
  7. powermem/agent/abstract/permission.py +217 -0
  8. powermem/agent/abstract/privacy.py +267 -0
  9. powermem/agent/abstract/scope.py +199 -0
  10. powermem/agent/agent.py +791 -0
  11. powermem/agent/components/__init__.py +18 -0
  12. powermem/agent/components/collaboration_coordinator.py +645 -0
  13. powermem/agent/components/permission_controller.py +586 -0
  14. powermem/agent/components/privacy_protector.py +767 -0
  15. powermem/agent/components/scope_controller.py +685 -0
  16. powermem/agent/factories/__init__.py +16 -0
  17. powermem/agent/factories/agent_factory.py +266 -0
  18. powermem/agent/factories/config_factory.py +308 -0
  19. powermem/agent/factories/memory_factory.py +229 -0
  20. powermem/agent/implementations/__init__.py +16 -0
  21. powermem/agent/implementations/hybrid.py +728 -0
  22. powermem/agent/implementations/multi_agent.py +1040 -0
  23. powermem/agent/implementations/multi_user.py +1020 -0
  24. powermem/agent/types.py +53 -0
  25. powermem/agent/wrappers/__init__.py +14 -0
  26. powermem/agent/wrappers/agent_memory_wrapper.py +427 -0
  27. powermem/agent/wrappers/compatibility_wrapper.py +520 -0
  28. powermem/config_loader.py +318 -0
  29. powermem/configs.py +249 -0
  30. powermem/core/__init__.py +19 -0
  31. powermem/core/async_memory.py +1493 -0
  32. powermem/core/audit.py +258 -0
  33. powermem/core/base.py +165 -0
  34. powermem/core/memory.py +1567 -0
  35. powermem/core/setup.py +162 -0
  36. powermem/core/telemetry.py +215 -0
  37. powermem/integrations/__init__.py +17 -0
  38. powermem/integrations/embeddings/__init__.py +13 -0
  39. powermem/integrations/embeddings/aws_bedrock.py +100 -0
  40. powermem/integrations/embeddings/azure_openai.py +55 -0
  41. powermem/integrations/embeddings/base.py +31 -0
  42. powermem/integrations/embeddings/config/base.py +132 -0
  43. powermem/integrations/embeddings/configs.py +31 -0
  44. powermem/integrations/embeddings/factory.py +48 -0
  45. powermem/integrations/embeddings/gemini.py +39 -0
  46. powermem/integrations/embeddings/huggingface.py +41 -0
  47. powermem/integrations/embeddings/langchain.py +35 -0
  48. powermem/integrations/embeddings/lmstudio.py +29 -0
  49. powermem/integrations/embeddings/mock.py +11 -0
  50. powermem/integrations/embeddings/ollama.py +53 -0
  51. powermem/integrations/embeddings/openai.py +49 -0
  52. powermem/integrations/embeddings/qwen.py +102 -0
  53. powermem/integrations/embeddings/together.py +31 -0
  54. powermem/integrations/embeddings/vertexai.py +54 -0
  55. powermem/integrations/llm/__init__.py +18 -0
  56. powermem/integrations/llm/anthropic.py +87 -0
  57. powermem/integrations/llm/base.py +132 -0
  58. powermem/integrations/llm/config/anthropic.py +56 -0
  59. powermem/integrations/llm/config/azure.py +56 -0
  60. powermem/integrations/llm/config/base.py +62 -0
  61. powermem/integrations/llm/config/deepseek.py +56 -0
  62. powermem/integrations/llm/config/ollama.py +56 -0
  63. powermem/integrations/llm/config/openai.py +79 -0
  64. powermem/integrations/llm/config/qwen.py +68 -0
  65. powermem/integrations/llm/config/qwen_asr.py +46 -0
  66. powermem/integrations/llm/config/vllm.py +56 -0
  67. powermem/integrations/llm/configs.py +26 -0
  68. powermem/integrations/llm/deepseek.py +106 -0
  69. powermem/integrations/llm/factory.py +118 -0
  70. powermem/integrations/llm/gemini.py +201 -0
  71. powermem/integrations/llm/langchain.py +65 -0
  72. powermem/integrations/llm/ollama.py +106 -0
  73. powermem/integrations/llm/openai.py +166 -0
  74. powermem/integrations/llm/openai_structured.py +80 -0
  75. powermem/integrations/llm/qwen.py +207 -0
  76. powermem/integrations/llm/qwen_asr.py +171 -0
  77. powermem/integrations/llm/vllm.py +106 -0
  78. powermem/integrations/rerank/__init__.py +20 -0
  79. powermem/integrations/rerank/base.py +43 -0
  80. powermem/integrations/rerank/config/__init__.py +7 -0
  81. powermem/integrations/rerank/config/base.py +27 -0
  82. powermem/integrations/rerank/configs.py +23 -0
  83. powermem/integrations/rerank/factory.py +68 -0
  84. powermem/integrations/rerank/qwen.py +159 -0
  85. powermem/intelligence/__init__.py +17 -0
  86. powermem/intelligence/ebbinghaus_algorithm.py +354 -0
  87. powermem/intelligence/importance_evaluator.py +361 -0
  88. powermem/intelligence/intelligent_memory_manager.py +284 -0
  89. powermem/intelligence/manager.py +148 -0
  90. powermem/intelligence/plugin.py +229 -0
  91. powermem/prompts/__init__.py +29 -0
  92. powermem/prompts/graph/graph_prompts.py +217 -0
  93. powermem/prompts/graph/graph_tools_prompts.py +469 -0
  94. powermem/prompts/importance_evaluation.py +246 -0
  95. powermem/prompts/intelligent_memory_prompts.py +163 -0
  96. powermem/prompts/templates.py +193 -0
  97. powermem/storage/__init__.py +14 -0
  98. powermem/storage/adapter.py +896 -0
  99. powermem/storage/base.py +109 -0
  100. powermem/storage/config/base.py +13 -0
  101. powermem/storage/config/oceanbase.py +58 -0
  102. powermem/storage/config/pgvector.py +52 -0
  103. powermem/storage/config/sqlite.py +27 -0
  104. powermem/storage/configs.py +159 -0
  105. powermem/storage/factory.py +59 -0
  106. powermem/storage/migration_manager.py +438 -0
  107. powermem/storage/oceanbase/__init__.py +8 -0
  108. powermem/storage/oceanbase/constants.py +162 -0
  109. powermem/storage/oceanbase/oceanbase.py +1384 -0
  110. powermem/storage/oceanbase/oceanbase_graph.py +1441 -0
  111. powermem/storage/pgvector/__init__.py +7 -0
  112. powermem/storage/pgvector/pgvector.py +420 -0
  113. powermem/storage/sqlite/__init__.py +0 -0
  114. powermem/storage/sqlite/sqlite.py +218 -0
  115. powermem/storage/sqlite/sqlite_vector_store.py +311 -0
  116. powermem/utils/__init__.py +35 -0
  117. powermem/utils/utils.py +605 -0
  118. powermem/version.py +23 -0
  119. powermem-0.1.0.dist-info/METADATA +187 -0
  120. powermem-0.1.0.dist-info/RECORD +123 -0
  121. powermem-0.1.0.dist-info/WHEEL +5 -0
  122. powermem-0.1.0.dist-info/licenses/LICENSE +206 -0
  123. powermem-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,109 @@
1
+ """
2
+ Abstract base class for storage implementations
3
+
4
+ This module defines the storage interface that all implementations must follow.
5
+ """
6
+
7
+ from abc import ABC, abstractmethod
8
+ from typing import Dict, Optional, Any, List
9
+
10
+ from pydantic import BaseModel
11
+
12
+
13
+ class OutputData(BaseModel):
14
+ id: Optional[int] # memory id (Snowflake ID - 64-bit integer)
15
+ score: Optional[float] # distance
16
+ payload: Optional[Dict] # metadata
17
+
18
+ class VectorStoreBase(ABC):
19
+ """
20
+ Abstract base class for storage implementations.
21
+
22
+ This class defines the interface that all storage backends must implement.
23
+ """
24
+
25
+ @abstractmethod
26
+ def create_col(self, name, vector_size, distance):
27
+ """Create a new collection."""
28
+ pass
29
+
30
+ @abstractmethod
31
+ def insert(self, vectors, payloads=None, ids=None):
32
+ """Insert vectors into a collection."""
33
+ pass
34
+
35
+ @abstractmethod
36
+ def search(self, query, vectors, limit=5, filters=None):
37
+ """Search for similar vectors."""
38
+ pass
39
+
40
+ @abstractmethod
41
+ def delete(self, vector_id):
42
+ """Delete a vector by ID."""
43
+ pass
44
+
45
+ @abstractmethod
46
+ def update(self, vector_id, vector=None, payload=None):
47
+ """Update a vector and its payload."""
48
+ pass
49
+
50
+ @abstractmethod
51
+ def get(self, vector_id):
52
+ """Retrieve a vector by ID."""
53
+ pass
54
+
55
+ @abstractmethod
56
+ def list_cols(self):
57
+ """List all collections."""
58
+ pass
59
+
60
+ @abstractmethod
61
+ def delete_col(self):
62
+ """Delete a collection."""
63
+ pass
64
+
65
+ @abstractmethod
66
+ def col_info(self):
67
+ """Get information about a collection."""
68
+ pass
69
+
70
+ @abstractmethod
71
+ def list(self, filters=None, limit=None):
72
+ """List all memories."""
73
+ pass
74
+
75
+ @abstractmethod
76
+ def reset(self):
77
+ """Reset by delete the collection and recreate it."""
78
+ pass
79
+
80
+ class GraphStoreBase(ABC):
81
+ """
82
+ Abstract base class for graph storage implementations.
83
+
84
+ This class defines the interface that all graph storage backends must implement.
85
+ """
86
+ @abstractmethod
87
+ def add(self, data: str, filters: Dict[str, Any]) -> Dict[str, Any]:
88
+ """Add data to the graph."""
89
+ pass
90
+
91
+ @abstractmethod
92
+ def search(self, query: str, filters: Dict[str, Any], limit: int = 10) -> List[Dict[str, Any]]:
93
+ """Search for memories."""
94
+ pass
95
+
96
+ @abstractmethod
97
+ def delete_all(self, filters: Dict[str, Any]) -> None:
98
+ """Delete all graph data for the given filters."""
99
+ pass
100
+
101
+ @abstractmethod
102
+ def get_all(self, filters: Dict[str, Any], limit: int = 100) -> List[Dict[str, str]]:
103
+ """Retrieve all nodes and relationships from the graph database."""
104
+ pass
105
+
106
+ @abstractmethod
107
+ def reset(self) -> None:
108
+ """Reset the graph by clearing all nodes and relationships."""
109
+ pass
@@ -0,0 +1,13 @@
1
+ from abc import ABC
2
+ from typing import Any, Dict
3
+
4
+ from pydantic import BaseModel, model_validator
5
+
6
+
7
+ class BaseVectorStoreConfig(BaseModel, ABC):
8
+ """
9
+ Base configuration class for all vector store providers.
10
+
11
+ This class provides common validation logic that is shared
12
+ across all vector store implementations.
13
+ """
@@ -0,0 +1,58 @@
1
+ from typing import Any, ClassVar, Dict, Optional
2
+
3
+ from pydantic import Field, model_validator
4
+
5
+ from powermem.storage.config.base import BaseVectorStoreConfig
6
+
7
+
8
+ class OceanBaseConfig(BaseVectorStoreConfig):
9
+ try:
10
+ from pyobvector import ObVecClient
11
+ except ImportError:
12
+ raise ImportError("The 'pyobvector' library is required. Please install it using 'pip install pyobvector'.")
13
+ ObVecClient: ClassVar[type] = ObVecClient
14
+
15
+ collection_name: str = Field("power_mem", description="Default name for the collection")
16
+
17
+ # Connection parameters
18
+ host: str = Field("localhost", description="OceanBase server host")
19
+ port: str = Field("2881", description="OceanBase server port")
20
+ user: str = Field("root@test", description="OceanBase username")
21
+ password: str = Field("", description="OceanBase password")
22
+ db_name: str = Field("test", description="OceanBase database name")
23
+
24
+ # Vector index parameters
25
+ index_type: str = Field("HNSW", description="Type of vector index (HNSW, IVF, FLAT, etc.)")
26
+ vidx_metric_type: str = Field("l2", description="Distance metric (l2, inner_product, cosine)")
27
+ embedding_model_dims: Optional[int] = Field(None, description="Dimension of vectors")
28
+
29
+ # Advanced parameters
30
+ vidx_algo_params: Optional[Dict[str, Any]] = Field(None, description="Index algorithm parameters")
31
+ normalize: bool = Field(False, description="Whether to normalize vectors")
32
+ include_sparse: bool = Field(False, description="Whether to include sparse vector support")
33
+ hybrid_search: bool = Field(True, description="Whether to enable hybrid search")
34
+ auto_configure_vector_index: bool = Field(True,
35
+ description="Whether to automatically configure vector index settings")
36
+
37
+ # Fulltext search parameters
38
+ fulltext_parser: str = Field("ik", description="Fulltext parser type (ik, ngram, ngram2, beng, space)")
39
+
40
+ # Field names
41
+ primary_field: str = Field("id", description="Primary key field name")
42
+ vector_field: str = Field("embedding", description="Vector field name")
43
+ text_field: str = Field("document", description="Text field name")
44
+ metadata_field: str = Field("metadata", description="Metadata field name")
45
+ vidx_name: str = Field("vidx", description="Vector index name")
46
+
47
+ vector_weight: float = Field(0.5, description="Weight for vector search")
48
+ fts_weight: float = Field(0.5, description="Weight for fulltext search")
49
+
50
+ model_config = {
51
+ "arbitrary_types_allowed": True,
52
+ }
53
+
54
+
55
+
56
+ class OceanBaseGraphConfig(OceanBaseConfig):
57
+ # Graph search parameters
58
+ max_hops: int = Field(3, description="Maximum number of hops for multi-hop graph search")
@@ -0,0 +1,52 @@
1
+ from typing import Any, Optional
2
+
3
+ from pydantic import Field, model_validator
4
+
5
+ from powermem.storage.config.base import BaseVectorStoreConfig
6
+
7
+
8
+ class PGVectorConfig(BaseVectorStoreConfig):
9
+ dbname: str = Field("postgres", description="Default name for the database")
10
+ collection_name: str = Field("power_mem", description="Default name for the collection")
11
+ embedding_model_dims: Optional[int] = Field(1536, description="Dimensions of the embedding model")
12
+ user: Optional[str] = Field(None, description="Database user")
13
+ password: Optional[str] = Field(None, description="Database password")
14
+ host: Optional[str] = Field(None, description="Database host. Default is localhost")
15
+ port: Optional[int] = Field(None, description="Database port. Default is 1536")
16
+ diskann: Optional[bool] = Field(False, description="Use diskann for approximate nearest neighbors search")
17
+ hnsw: Optional[bool] = Field(True, description="Use hnsw for faster search")
18
+ minconn: Optional[int] = Field(1, description="Minimum number of connections in the pool")
19
+ maxconn: Optional[int] = Field(5, description="Maximum number of connections in the pool")
20
+ # New SSL and connection options
21
+ sslmode: Optional[str] = Field(None,
22
+ description="SSL mode for PostgreSQL connection (e.g., 'require', 'prefer', 'disable')")
23
+ connection_string: Optional[str] = Field(None,
24
+ description="PostgreSQL connection string (overrides individual connection parameters)")
25
+ connection_pool: Optional[Any] = Field(None,
26
+ description="psycopg connection pool object (overrides connection string and individual parameters)")
27
+
28
+ @model_validator(mode="before")
29
+ @classmethod
30
+ def check_auth_and_connection(cls, values):
31
+ # If connection_pool is provided, skip validation of individual connection parameters
32
+ if values.get("connection_pool") is not None:
33
+ return values
34
+
35
+ # If connection_string is provided, skip validation of individual connection parameters
36
+ if values.get("connection_string") is not None:
37
+ return values
38
+
39
+ # Otherwise, validate individual connection parameters
40
+ user, password = values.get("user"), values.get("password")
41
+ host, port = values.get("host"), values.get("port")
42
+
43
+ # Only validate if user explicitly provided values (not using defaults)
44
+ if user is not None or password is not None:
45
+ if not user or not password:
46
+ raise ValueError("Both 'user' and 'password' must be provided when not using connection_string.")
47
+
48
+ if host is not None or port is not None:
49
+ if not host or not port:
50
+ raise ValueError("Both 'host' and 'port' must be provided when not using connection_string.")
51
+
52
+ return values
@@ -0,0 +1,27 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import Field
4
+
5
+ from powermem.storage.config.base import BaseVectorStoreConfig
6
+
7
+
8
+ class SQLiteConfig(BaseVectorStoreConfig):
9
+ """Configuration for SQLite vector store."""
10
+
11
+ database_path: str = Field(
12
+ default="./data/powermem_dev.db",
13
+ description="Path to SQLite database file"
14
+ )
15
+ collection_name: str = Field(
16
+ default="memories",
17
+ description="Name of the collection/table"
18
+ )
19
+ enable_wal: bool = Field(
20
+ default=True,
21
+ description="Enable Write-Ahead Logging for better concurrency"
22
+ )
23
+ timeout: int = Field(
24
+ default=30,
25
+ description="Connection timeout in seconds"
26
+ )
27
+
@@ -0,0 +1,159 @@
1
+ """
2
+ Storage configuration management
3
+
4
+ This module handles storage configuration and validation.
5
+ """
6
+
7
+ from typing import Dict, Optional, Union
8
+
9
+ from pydantic import BaseModel, Field, model_validator
10
+
11
+ from powermem.integrations.llm.configs import LLMConfig
12
+ from powermem.storage.config.oceanbase import OceanBaseGraphConfig
13
+
14
+
15
+ class VectorStoreConfig(BaseModel):
16
+ provider: str = Field(
17
+ description="Provider of the vector store (e.g., 'oceanbase', 'pgvector')",
18
+ default="oceanbase",
19
+ )
20
+ config: Optional[Dict] = Field(
21
+ description="Configuration for the specific vector store",
22
+ default=None
23
+ )
24
+
25
+ _provider_configs: Dict[str, str] = {
26
+ "oceanbase": "OceanBaseConfig",
27
+ "pgvector": "PGVectorConfig",
28
+ "sqlite": "SQLiteConfig",
29
+ }
30
+
31
+ @model_validator(mode="after")
32
+ def validate_config(self) -> "VectorStoreConfig":
33
+ """
34
+ Validate the configuration without converting to provider-specific config class.
35
+ The conversion is handled by VectorStoreFactory.create() when needed.
36
+ """
37
+ provider = self.provider
38
+ config = self.config
39
+
40
+ if provider is not None and provider == "postgres":
41
+ provider = "pgvector"
42
+
43
+ if provider not in self._provider_configs:
44
+ raise ValueError(f"Unsupported vector store provider: {provider}")
45
+
46
+ if config is None:
47
+ self.config = {}
48
+ return self
49
+
50
+ if not isinstance(config, dict):
51
+ raise ValueError(f"Config must be a dictionary, got {type(config)}")
52
+
53
+ # Handle connection_args for backward compatibility
54
+ # If connection_args exists, flatten it into the main config
55
+ if "connection_args" in config:
56
+ connection_args = config.pop("connection_args")
57
+ if isinstance(connection_args, dict):
58
+ # Merge connection_args into config (connection_args values take precedence)
59
+ for key, value in connection_args.items():
60
+ if key not in config:
61
+ # Convert port to string if it's an int (for OceanBase compatibility)
62
+ if key == "port" and isinstance(value, int):
63
+ config[key] = str(value)
64
+ else:
65
+ config[key] = value
66
+ self.config = config
67
+
68
+ # Convert port to string if it's an int (for OceanBase compatibility)
69
+ # This handles both direct port field and port from connection_args
70
+ if "port" in config and isinstance(config["port"], int):
71
+ config["port"] = str(config["port"])
72
+ self.config = config
73
+
74
+ # Validate config by attempting to create provider-specific config instance
75
+ # This ensures the config has valid fields, but we don't store the converted object
76
+ module = __import__(
77
+ f"powermem.storage.config.{provider}",
78
+ fromlist=[self._provider_configs[provider]],
79
+ )
80
+ config_class = getattr(module, self._provider_configs[provider])
81
+
82
+ # Add default path if needed
83
+ if "path" not in config and "path" in config_class.__annotations__:
84
+ config["path"] = f"/tmp/{provider}"
85
+ self.config = config
86
+
87
+ # Validate by creating instance (throws error if invalid)
88
+ try:
89
+ config_class(**config)
90
+ except Exception as e:
91
+ raise ValueError(f"Invalid configuration for {provider}: {e}")
92
+
93
+ # Keep config as dict, don't convert to config_class instance
94
+ return self
95
+
96
+ class GraphStoreConfig(BaseModel):
97
+ enabled: bool = Field(
98
+ description="Whether to enable graph store",
99
+ default=False,
100
+ )
101
+ provider: str = Field(
102
+ description="Provider of the data store (e.g., 'oceanbase')",
103
+ default="oceanbase",
104
+ )
105
+ config: Optional[Union[Dict, OceanBaseGraphConfig]] = Field(
106
+ description="Configuration for the specific data store",
107
+ default=None
108
+ )
109
+ llm: Optional[LLMConfig] = Field(
110
+ description="LLM configuration for querying the graph store",
111
+ default=None
112
+ )
113
+ custom_prompt: Optional[str] = Field(
114
+ description="Custom prompt to fetch entities from the given text",
115
+ default=None
116
+ )
117
+ custom_extract_relations_prompt: Optional[str] = Field(
118
+ description="Custom prompt for extracting relations from text",
119
+ default=None
120
+ )
121
+ custom_update_graph_prompt: Optional[str] = Field(
122
+ description="Custom prompt for updating graph memories",
123
+ default=None
124
+ )
125
+ custom_delete_relations_prompt: Optional[str] = Field(
126
+ description="Custom prompt for deleting relations",
127
+ default=None
128
+ )
129
+
130
+ @model_validator(mode="after")
131
+ def validate_config(self) -> "GraphStoreConfig":
132
+ """
133
+ Validate the configuration without converting to provider-specific config class.
134
+ Keep config as dict for consistency.
135
+ """
136
+ if self.config is None:
137
+ self.config = {}
138
+ return self
139
+
140
+ # If config is a Pydantic BaseModel instance, convert it to dict
141
+ if isinstance(self.config, BaseModel):
142
+ self.config = self.config.model_dump()
143
+
144
+ if not isinstance(self.config, dict):
145
+ raise ValueError(f"Config must be a dictionary or BaseModel instance, got {type(self.config)}")
146
+
147
+
148
+ # Validate config based on provider
149
+ provider = self.provider
150
+ if provider == "oceanbase":
151
+ try:
152
+ OceanBaseGraphConfig(**self.config)
153
+ except Exception as e:
154
+ raise ValueError(f"Invalid configuration for {provider}: {e}")
155
+ else:
156
+ raise ValueError(f"Unsupported graph store provider: {provider}")
157
+
158
+ # Keep config as dict, don't convert
159
+ return self
@@ -0,0 +1,59 @@
1
+ """
2
+ Storage factory for creating storage instances
3
+
4
+ This module provides a factory for creating different storage backends.
5
+ """
6
+
7
+ import importlib
8
+
9
+
10
+ def load_class(class_type):
11
+ module_path, class_name = class_type.rsplit(".", 1)
12
+ module = importlib.import_module(module_path)
13
+ return getattr(module, class_name)
14
+
15
+ class VectorStoreFactory:
16
+ provider_to_class = {
17
+ "oceanbase": "powermem.storage.oceanbase.oceanbase.OceanBaseVectorStore",
18
+ "sqlite": "powermem.storage.sqlite.sqlite_vector_store.SQLiteVectorStore",
19
+ "pgvector": "powermem.storage.pgvector.pgvector.PGVectorStore",
20
+ "postgres": "powermem.storage.pgvector.pgvector.PGVectorStore", # Alias for pgvector
21
+ }
22
+
23
+ @classmethod
24
+ def create(cls, provider_name, config):
25
+ class_type = cls.provider_to_class.get(provider_name)
26
+ if class_type:
27
+ if not isinstance(config, dict):
28
+ config = config.model_dump()
29
+ vector_store_instance = load_class(class_type)
30
+ return vector_store_instance(**config)
31
+ else:
32
+ raise ValueError(f"Unsupported VectorStore provider: {provider_name}")
33
+
34
+ @classmethod
35
+ def reset(cls, instance):
36
+ instance.reset()
37
+ return instance
38
+
39
+
40
+ class GraphStoreFactory:
41
+ """
42
+ Factory for creating MemoryGraph instances for different graph store providers.
43
+ Usage: GraphStoreFactory.create(provider_name, config)
44
+ """
45
+
46
+ provider_to_class = {
47
+ "oceanbase": "powermem.storage.oceanbase.oceanbase_graph.MemoryGraph",
48
+ "default": "powermem.storage.oceanbase.oceanbase_graph.MemoryGraph",
49
+ }
50
+
51
+ @classmethod
52
+ def create(cls, provider_name, config):
53
+ class_type = cls.provider_to_class.get(provider_name, cls.provider_to_class["default"])
54
+ try:
55
+ GraphClass = load_class(class_type)
56
+ except (ImportError, AttributeError) as e:
57
+ raise ImportError(f"Could not import MemoryGraph for provider '{provider_name}': {e}")
58
+ return GraphClass(config)
59
+