naas-abi 1.0.8__tar.gz → 1.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {naas_abi-1.0.8 → naas_abi-1.0.10}/PKG-INFO +2 -2
  2. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/__init__.py +16 -1
  3. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/AbiAgent.py +27 -18
  4. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/EntitytoSPARQLAgent.py +22 -17
  5. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/KnowledgeGraphBuilderAgent.py +17 -15
  6. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/OntologyEngineerAgent.py +4 -15
  7. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/apps/oxigraph_admin/main.py +7 -4
  8. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/apps/sparql_terminal/main.py +7 -4
  9. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/AIAgentOntologyGenerationPipeline_test.py +4 -3
  10. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/AddIndividualPipeline_test.py +10 -7
  11. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/InsertDataSPARQLPipeline.py +6 -4
  12. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/InsertDataSPARQLPipeline_test.py +9 -6
  13. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/MergeIndividualsPipeline.py +6 -2
  14. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/MergeIndividualsPipeline_test.py +10 -9
  15. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/RemoveIndividualPipeline.py +6 -2
  16. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/RemoveIndividualPipeline_test.py +8 -8
  17. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateDataPropertyPipeline.py +6 -2
  18. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/GetObjectPropertiesFromClassWorkflow_test.py +7 -2
  19. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/GetSubjectGraphWorkflow_test.py +8 -3
  20. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/SearchIndividualWorkflow_test.py +6 -6
  21. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi.egg-info/PKG-INFO +2 -2
  22. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi.egg-info/SOURCES.txt +0 -12
  23. {naas_abi-1.0.8 → naas_abi-1.0.10}/pyproject.toml +8 -2
  24. naas_abi-1.0.8/naas_abi/apps/terminal_agent/main.py +0 -553
  25. naas_abi-1.0.8/naas_abi/apps/terminal_agent/terminal_style.py +0 -175
  26. naas_abi-1.0.8/naas_abi/mappings.py +0 -83
  27. naas_abi-1.0.8/naas_abi/triggers.py +0 -131
  28. naas_abi-1.0.8/naas_abi/workflows/ConvertOntologyGraphToYamlWorkflow.py +0 -210
  29. naas_abi-1.0.8/naas_abi/workflows/ConvertOntologyGraphToYamlWorkflow_test.py +0 -78
  30. naas_abi-1.0.8/naas_abi/workflows/CreateClassOntologyYamlWorkflow.py +0 -214
  31. naas_abi-1.0.8/naas_abi/workflows/CreateClassOntologyYamlWorkflow_test.py +0 -65
  32. naas_abi-1.0.8/naas_abi/workflows/CreateIndividualOntologyYamlWorkflow.py +0 -183
  33. naas_abi-1.0.8/naas_abi/workflows/CreateIndividualOntologyYamlWorkflow_test.py +0 -86
  34. naas_abi-1.0.8/naas_abi/workflows/ExportGraphInstancesToExcelWorkflow.py +0 -450
  35. naas_abi-1.0.8/naas_abi/workflows/ExportGraphInstancesToExcelWorkflow_test.py +0 -33
  36. {naas_abi-1.0.8 → naas_abi-1.0.10}/README.md +0 -0
  37. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/AbiAgent_test.py +0 -0
  38. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/EntitytoSPARQLAgent_test.py +0 -0
  39. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/KnowledgeGraphBuilderAgent_test.py +0 -0
  40. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/agents/OntologyEngineerAgent_test.py +0 -0
  41. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/apps/oxigraph_admin/terminal_style.py +0 -0
  42. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/apps/sparql_terminal/terminal_style.py +0 -0
  43. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/cli.py +0 -0
  44. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/models/airgap_gemma.py +0 -0
  45. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/models/airgap_qwen.py +0 -0
  46. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/models/default.py +0 -0
  47. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/AIAgentOntologyGenerationPipeline.py +0 -0
  48. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/AddIndividualPipeline.py +0 -0
  49. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateCommercialOrganizationPipeline.py +0 -0
  50. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateLegalNamePipeline.py +0 -0
  51. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateLinkedInPagePipeline.py +0 -0
  52. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdatePersonPipeline.py +0 -0
  53. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateSkillPipeline.py +0 -0
  54. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateTickerPipeline.py +0 -0
  55. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/pipelines/UpdateWebsitePipeline.py +0 -0
  56. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/AgentRecommendationWorkflow.py +0 -0
  57. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/AgentRecommendationWorkflow_test.py +0 -0
  58. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/ArtificialAnalysisWorkflow.py +0 -0
  59. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/ArtificialAnalysisWorkflow_test.py +0 -0
  60. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/GetObjectPropertiesFromClassWorkflow.py +0 -0
  61. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/GetSubjectGraphWorkflow.py +0 -0
  62. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi/workflows/SearchIndividualWorkflow.py +0 -0
  63. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi.egg-info/dependency_links.txt +0 -0
  64. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi.egg-info/requires.txt +0 -0
  65. {naas_abi-1.0.8 → naas_abi-1.0.10}/naas_abi.egg-info/top_level.txt +0 -0
  66. {naas_abi-1.0.8 → naas_abi-1.0.10}/setup.cfg +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: naas-abi
3
- Version: 1.0.8
4
- Summary: Add your description here
3
+ Version: 1.0.10
4
+ Summary: Multi-agent orchestrator and knowledge graph management system for AI orchestration, providing comprehensive coordination of specialized AI agents and semantic data management capabilities
5
5
  Author-email: Maxime Jublou <maxime@naas.ai>, Florent Ravenel <florent@naas.ai>, Jeremy Ravenel <jeremy@naas.ai>
6
6
  Requires-Python: <4,>=3.10
7
7
  Description-Content-Type: text/markdown
@@ -14,13 +14,28 @@ class ABIModule(BaseModule):
14
14
  dependencies: ModuleDependencies = ModuleDependencies(
15
15
  modules=[
16
16
  "naas_abi_marketplace.ai.chatgpt",
17
- # "naas_abi_core.modules.templatablesparqlquery",
17
+ "naas_abi_core.modules.templatablesparqlquery",
18
+ "naas_abi_marketplace.ai.qwen#soft",
19
+ "naas_abi_marketplace.applications.naas#soft",
18
20
  ],
19
21
  services=[Secret, TripleStoreService, ObjectStorageService],
20
22
  )
21
23
 
22
24
  class Configuration(ModuleConfiguration):
25
+ """
26
+ Configuration example:
27
+
28
+ module: naas_abi
29
+ enabled: true
30
+ config:
31
+ datastore_path: "abi"
32
+ workspace_id: "{{ secret.WORKSPACE_ID }}"
33
+ storage_name: "{{ secret.STORAGE_NAME }}"
34
+ """
35
+
23
36
  datastore_path: str = "abi"
37
+ workspace_id: str | None = None
38
+ storage_name: str | None = None
24
39
 
25
40
  # def on_initialized(self):
26
41
  # if (
@@ -1,8 +1,6 @@
1
1
  from typing import Optional
2
2
 
3
3
  from langchain_core.tools import tool
4
- from naas_abi import ABIModule
5
- from naas_abi.models.default import get_model
6
4
  from naas_abi_core.services.agent.IntentAgent import (
7
5
  AgentConfiguration,
8
6
  AgentSharedState,
@@ -132,6 +130,8 @@ def create_agent(
132
130
  agent_configuration: Optional[AgentConfiguration] = None,
133
131
  ) -> IntentAgent:
134
132
  # Define model based on AI_MODE
133
+ from naas_abi.models.default import get_model
134
+
135
135
  model = get_model()
136
136
 
137
137
  # Define tools
@@ -149,19 +149,22 @@ You can browse the data and run queries there."""
149
149
 
150
150
  tools.append(open_knowledge_graph_explorer)
151
151
 
152
- # templatable_sparql_query_module = ABIModule.get_instance().engine.modules[
153
- # "naas_abi_core.modules.templatablesparqlquery"
154
- # ]
152
+ from naas_abi_core.modules.templatablesparqlquery import (
153
+ ABIModule as TemplatableSparqlQueryABIModule,
154
+ )
155
155
 
156
- # agent_recommendation_tools = [
157
- # "find_business_proposal_agents",
158
- # "find_coding_agents",
159
- # "find_math_agents",
160
- # "find_best_value_agents",
161
- # "find_fastest_agents",
162
- # "find_cheapest_agents",
163
- # ]
164
- # tools.extend(templatable_sparql_query_module.get_tools(agent_recommendation_tools))
156
+ agent_recommendation_tools = [
157
+ "find_business_proposal_agents",
158
+ "find_coding_agents",
159
+ "find_math_agents",
160
+ "find_best_value_agents",
161
+ "find_fastest_agents",
162
+ "find_cheapest_agents",
163
+ ]
164
+ sparql_query_tools_list = TemplatableSparqlQueryABIModule.get_instance().get_tools(
165
+ agent_recommendation_tools
166
+ )
167
+ tools += sparql_query_tools_list
165
168
 
166
169
  shared_state = agent_shared_state or AgentSharedState(
167
170
  thread_id="0", supervisor_agent=NAME
@@ -173,16 +176,22 @@ You can browse the data and run queries there."""
173
176
 
174
177
  # Define agents - all agents are now loaded automatically during module loading
175
178
  agents: list = []
179
+ from naas_abi import ABIModule
180
+ from naas_abi_core import logger
176
181
 
177
182
  modules = ABIModule.get_instance().engine.modules.values()
178
- for module in modules:
183
+ for module in sorted(modules, key=lambda x: x.__class__.__module__):
184
+ logger.info(f"🔍 Checking module: {module.__class__.__module__}")
179
185
  if hasattr(module, "agents"):
180
186
  for agent in module.agents:
181
187
  if (
182
188
  agent is not None
183
- and agent != AbiAgent
184
- and (hasattr(agent, "NAME") and not agent.NAME.endswith("Research"))
185
- ): # exclude ChatGPT and Perplexity Research Agents NOT working properly with supervisor
189
+ and hasattr(agent.New(), "name")
190
+ and not agent.New().name.endswith("Research")
191
+ ):
192
+ logger.info(
193
+ f"🤖 Adding agent: {agent.New().name} as sub-agent of {NAME}"
194
+ )
186
195
  new_agent = agent.New().duplicate(
187
196
  agent_queue, agent_shared_state=shared_state
188
197
  )
@@ -5,15 +5,12 @@ from typing import Any, Callable, Optional, Union
5
5
  from langchain_core.language_models import BaseChatModel
6
6
  from langchain_core.messages import AIMessage, BaseMessage, SystemMessage
7
7
  from langchain_core.tools import BaseTool, Tool
8
- from langchain_openai import ChatOpenAI # noqa: F401
9
8
  from langgraph.checkpoint.base import BaseCheckpointSaver
10
9
  from langgraph.checkpoint.memory import MemorySaver
11
10
  from langgraph.graph import START, StateGraph
12
11
  from langgraph.graph.message import MessagesState
13
12
  from langgraph.types import Command
14
13
  from naas_abi import ABIModule
15
-
16
- # from naas_abi import secret
17
14
  from naas_abi_core.engine.EngineProxy import ServicesProxy
18
15
  from naas_abi_core.models.Model import ChatModel
19
16
  from naas_abi_core.services.agent.Agent import (
@@ -21,6 +18,7 @@ from naas_abi_core.services.agent.Agent import (
21
18
  AgentConfiguration,
22
19
  AgentSharedState,
23
20
  )
21
+ from naas_abi_core.utils.StorageUtils import StorageUtils
24
22
 
25
23
  MODULE: ABIModule = ABIModule.get_instance()
26
24
  SERVICES: ServicesProxy = MODULE.engine.services
@@ -146,15 +144,10 @@ def create_agent(
146
144
  agent_shared_state: Optional[AgentSharedState] = None,
147
145
  agent_configuration: Optional[AgentConfiguration] = None,
148
146
  ) -> Optional[Agent]:
149
- # Set model based on AI_MODE
150
- ai_mode = MODULE.configuration.global_config.ai_mode
151
-
152
- if ai_mode == "airgap":
153
- from naas_abi.models.default import get_model
147
+ # Set model
148
+ from naas_abi.models.default import get_model
154
149
 
155
- model = get_model()
156
- else:
157
- from naas_abi_marketplace.ai.chatgpt.models.o3_mini import model
150
+ model = get_model()
158
151
 
159
152
  if agent_configuration is None:
160
153
  agent_configuration = AgentConfiguration(system_prompt=SYSTEM_PROMPT)
@@ -191,6 +184,15 @@ class EntityExtractionState(MessagesState):
191
184
 
192
185
 
193
186
  class EntitytoSPARQLAgent(Agent):
187
+ """
188
+ Agent for entity to SPARQL conversion.
189
+
190
+ Attributes:
191
+ __storage_utils (StorageUtils): Storage utilities for saving data.
192
+ """
193
+
194
+ __storage_utils: StorageUtils
195
+
194
196
  def __init__(
195
197
  self,
196
198
  name: str,
@@ -216,6 +218,9 @@ class EntitytoSPARQLAgent(Agent):
216
218
  )
217
219
 
218
220
  self.datastore_path = f"datastore/ontology/entities_to_sparql/{datetime.now().strftime('%Y%m%d%H%M%S')}"
221
+ self.__storage_utils = StorageUtils(
222
+ ABIModule.get_instance().engine.services.object_storage
223
+ )
219
224
 
220
225
  def entity_extract(self, state: EntityExtractionState) -> Command:
221
226
  """
@@ -756,7 +761,6 @@ If you find you missed entities, you can add it again in the message.
756
761
  """
757
762
  import uuid
758
763
 
759
- from naas_abi_core.utils.Storage import save_json, save_text
760
764
  from naas_abi.workflows.GetObjectPropertiesFromClassWorkflow import (
761
765
  GetObjectPropertiesFromClassWorkflow,
762
766
  GetObjectPropertiesFromClassWorkflowConfiguration,
@@ -796,14 +800,16 @@ If you find you missed entities, you can add it again in the message.
796
800
 
797
801
  # Save data to storage
798
802
  last_last_message_content_str = str(state["messages"][-2].content)
799
- save_text(
803
+ self.__storage_utils.save_text(
800
804
  last_last_message_content_str,
801
805
  self.datastore_path,
802
806
  "init_text.txt",
803
807
  copy=False,
804
808
  )
805
- save_json(entities, self.datastore_path, "entities.json", copy=False)
806
- save_json(
809
+ self.__storage_utils.save_json(
810
+ entities, self.datastore_path, "entities.json", copy=False
811
+ )
812
+ self.__storage_utils.save_json(
807
813
  object_properties, self.datastore_path, "object_properties.json", copy=False
808
814
  )
809
815
 
@@ -828,7 +834,6 @@ If you find you missed entities, you can add it again in the message.
828
834
  Returns:
829
835
  Command: Command to end the workflow with the generated SPARQL statement
830
836
  """
831
- from naas_abi_core.utils.Storage import save_text
832
837
 
833
838
  # Create system message for SPARQL generation
834
839
  system_prompt = """# ROLE:
@@ -910,7 +915,7 @@ INSERT DATA {
910
915
 
911
916
  # Save SPARQL statement to storage
912
917
  response_content_str = str(response.content) if response.content else ""
913
- save_text(
918
+ self.__storage_utils.save_text(
914
919
  response_content_str, self.datastore_path, "insert_data.sparql", copy=False
915
920
  )
916
921
 
@@ -1,16 +1,11 @@
1
- # from naas_abi import secret
2
1
  from typing import Optional
3
2
 
4
- from langchain_openai import ChatOpenAI # noqa: F401
5
- from naas_abi import ABIModule
6
3
  from naas_abi_core.services.agent.Agent import (
7
4
  Agent,
8
5
  AgentConfiguration,
9
6
  AgentSharedState,
10
7
  )
11
8
 
12
- MODULE: ABIModule = ABIModule.get_instance()
13
-
14
9
  NAME: str = "Knowledge_Graph_Builder"
15
10
  AVATAR_URL: str = "https://upload.wikimedia.org/wikipedia/commons/thumb/f/f3/Rdf_logo.svg/1200px-Rdf_logo.svg.png"
16
11
  DESCRIPTION: str = (
@@ -148,13 +143,9 @@ def create_agent(
148
143
 
149
144
  model = get_model()
150
145
 
151
- # Use provided configuration or create default one
152
- if agent_configuration is None:
153
- agent_configuration = AgentConfiguration(system_prompt=SYSTEM_PROMPT)
146
+ from naas_abi import ABIModule
154
147
 
155
- # Use provided shared state or create new one
156
- if agent_shared_state is None:
157
- agent_shared_state = AgentSharedState()
148
+ MODULE: ABIModule = ABIModule.get_instance()
158
149
 
159
150
  # Init tools
160
151
  tools: list = []
@@ -290,9 +281,9 @@ def create_agent(
290
281
  tools += Pipeline(Configuration(MODULE.engine.services.triple_store)).as_tools()
291
282
 
292
283
  # Add search organizations tools
293
- templatable_sparql_query_module = ABIModule.get_instance().engine.modules[
294
- "naas_abi_core.modules.templatablesparqlquery"
295
- ]
284
+ from naas_abi_core.modules.templatablesparqlquery import (
285
+ ABIModule as TemplatableSparqlQueryABIModule,
286
+ )
296
287
 
297
288
  ontology_tools: list = [
298
289
  "search_class",
@@ -304,7 +295,18 @@ def create_agent(
304
295
  "merge_individuals",
305
296
  "remove_individuals",
306
297
  ]
307
- tools.extend(templatable_sparql_query_module.get_tools(ontology_tools))
298
+ sparql_query_tools_list = TemplatableSparqlQueryABIModule.get_instance().get_tools(
299
+ ontology_tools
300
+ )
301
+ tools += sparql_query_tools_list
302
+
303
+ # Use provided configuration or create default one
304
+ if agent_configuration is None:
305
+ agent_configuration = AgentConfiguration(system_prompt=SYSTEM_PROMPT)
306
+
307
+ # Use provided shared state or create new one
308
+ if agent_shared_state is None:
309
+ agent_shared_state = AgentSharedState()
308
310
 
309
311
  return KnowledgeGraphBuilderAgent(
310
312
  name=NAME,
@@ -1,18 +1,12 @@
1
1
  from typing import Optional
2
2
 
3
- from langchain_openai import ChatOpenAI # noqa: F401
4
- from naas_abi import ABIModule
5
-
6
- # from naas_abi import secret
7
3
  from naas_abi_core.services.agent.Agent import (
8
4
  Agent,
9
5
  AgentConfiguration,
10
6
  AgentSharedState,
11
7
  )
12
8
 
13
- MODULE: ABIModule = ABIModule.get_instance()
14
-
15
- NAME = "Ontology_Engineer_Agent"
9
+ NAME = "Ontology_Engineer"
16
10
  DESCRIPTION = "A agent that helps users understand BFO Ontology and transform text into ontologies."
17
11
  SYSTEM_PROMPT = """
18
12
  # ROLE:
@@ -69,15 +63,10 @@ def create_agent(
69
63
  agent_shared_state: Optional[AgentSharedState] = None,
70
64
  agent_configuration: Optional[AgentConfiguration] = None,
71
65
  ) -> Optional[Agent]:
72
- # Set model based on AI_MODE
73
- ai_mode = MODULE.configuration.global_config.ai_mode
74
-
75
- if ai_mode == "airgap":
76
- from naas_abi.models.default import get_model
66
+ # Set model
67
+ from naas_abi.models.default import get_model
77
68
 
78
- model = get_model()
79
- else:
80
- from naas_abi_marketplace.ai.chatgpt.models.o3_mini import model
69
+ model = get_model()
81
70
 
82
71
  # Use provided configuration or create default one
83
72
  if agent_configuration is None:
@@ -6,7 +6,6 @@ Provides terminal-based management and monitoring for Oxigraph triple store
6
6
  import subprocess
7
7
 
8
8
  import requests
9
- from naas_abi import services
10
9
  from naas_abi.apps.oxigraph_admin.terminal_style import (
11
10
  clear_screen,
12
11
  get_user_input,
@@ -17,6 +16,7 @@ from naas_abi.apps.oxigraph_admin.terminal_style import (
17
16
  print_success_message,
18
17
  print_welcome_message,
19
18
  )
19
+ from naas_abi_core.services.triple_store.TripleStorePorts import ITripleStoreService
20
20
  from rich.console import Console
21
21
  from rich.table import Table
22
22
 
@@ -24,9 +24,9 @@ console = Console()
24
24
 
25
25
 
26
26
  class OxigraphAdmin:
27
- def __init__(self):
27
+ def __init__(self, triple_store_service: ITripleStoreService):
28
28
  self.oxigraph_url = "http://localhost:7878"
29
- self.triple_store_service = services.triple_store_service
29
+ self.triple_store_service = triple_store_service
30
30
  self.query_templates = self._init_query_templates()
31
31
 
32
32
  def _init_query_templates(self):
@@ -384,7 +384,10 @@ SELECT ?entity ?label ?type WHERE {
384
384
 
385
385
 
386
386
  def main():
387
- admin = OxigraphAdmin()
387
+ from naas_abi import ABIModule
388
+
389
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
390
+ admin = OxigraphAdmin(triple_store_service)
388
391
  admin.run()
389
392
 
390
393
 
@@ -1,4 +1,3 @@
1
- from naas_abi import services
2
1
  from naas_abi.apps.sparql_terminal.terminal_style import (
3
2
  clear_screen,
4
3
  get_user_input,
@@ -9,11 +8,12 @@ from naas_abi.apps.sparql_terminal.terminal_style import (
9
8
  print_system_message,
10
9
  print_welcome_message,
11
10
  )
11
+ from naas_abi_core.services.triple_store.TripleStorePorts import ITripleStoreService
12
12
 
13
13
 
14
14
  class SPARQLTerminal:
15
- def __init__(self):
16
- self.triple_store_service = services.triple_store_service
15
+ def __init__(self, triple_store_service: ITripleStoreService):
16
+ self.triple_store_service = triple_store_service
17
17
 
18
18
  def execute_query(self, query):
19
19
  """Execute a SPARQL query and return the results"""
@@ -60,7 +60,10 @@ class SPARQLTerminal:
60
60
 
61
61
 
62
62
  def main():
63
- terminal = SPARQLTerminal()
63
+ from naas_abi import ABIModule
64
+
65
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
66
+ terminal = SPARQLTerminal(triple_store_service)
64
67
  terminal.run()
65
68
 
66
69
 
@@ -1,19 +1,20 @@
1
1
  import json
2
2
 
3
3
  import pytest
4
+ from naas_abi import ABIModule
4
5
  from naas_abi.pipelines.AIAgentOntologyGenerationPipeline import (
5
6
  AIAgentOntologyGenerationConfiguration,
6
7
  AIAgentOntologyGenerationParameters,
7
8
  AIAgentOntologyGenerationPipeline,
8
9
  )
9
10
 
11
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
12
+
10
13
 
11
14
  @pytest.fixture
12
15
  def pipeline() -> AIAgentOntologyGenerationPipeline:
13
- from naas_abi import services
14
-
15
16
  pipeline_configuration = AIAgentOntologyGenerationConfiguration(
16
- triple_store=services.triple_store,
17
+ triple_store=triple_store_service,
17
18
  datastore_path="storage/datastore/test/ai_agent_ontology_generation",
18
19
  source_datastore_path="storage/datastore/test/artificial_analysis_workflow",
19
20
  )
@@ -1,5 +1,5 @@
1
1
  import pytest
2
- from naas_abi import services
2
+ from naas_abi import ABIModule
3
3
  from naas_abi.pipelines.AddIndividualPipeline import (
4
4
  AddIndividualPipeline,
5
5
  AddIndividualPipelineConfiguration,
@@ -8,16 +8,20 @@ from naas_abi.pipelines.AddIndividualPipeline import (
8
8
  from naas_abi.workflows.SearchIndividualWorkflow import (
9
9
  SearchIndividualWorkflowConfiguration,
10
10
  )
11
+ from naas_abi_core.utils.SPARQL import SPARQLUtils
12
+
13
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
14
+ sparql_utils = SPARQLUtils(triple_store_service)
11
15
 
12
16
 
13
17
  @pytest.fixture
14
18
  def pipeline() -> AddIndividualPipeline:
15
19
  search_individual_workflow_configuration = SearchIndividualWorkflowConfiguration(
16
- triple_store=services.triple_store_service
20
+ triple_store=triple_store_service
17
21
  )
18
22
  pipeline = AddIndividualPipeline(
19
23
  configuration=AddIndividualPipelineConfiguration(
20
- triple_store=services.triple_store_service,
24
+ triple_store=triple_store_service,
21
25
  search_individual_configuration=search_individual_workflow_configuration,
22
26
  )
23
27
  )
@@ -25,7 +29,6 @@ def pipeline() -> AddIndividualPipeline:
25
29
 
26
30
 
27
31
  def test_add_individual_pipeline(pipeline: AddIndividualPipeline):
28
- from naas_abi_core.utils.SPARQL import results_to_list
29
32
  from rdflib import OWL, RDF, RDFS, Literal, URIRef
30
33
 
31
34
  label = "Naas.ai"
@@ -45,7 +48,7 @@ def test_add_individual_pipeline(pipeline: AddIndividualPipeline):
45
48
  )
46
49
 
47
50
  # Remove graph
48
- services.triple_store_service.remove(graph)
51
+ triple_store_service.remove(graph)
49
52
 
50
53
  # Check triples are removed from the triple store
51
54
  sparql_query = """
@@ -61,6 +64,6 @@ def test_add_individual_pipeline(pipeline: AddIndividualPipeline):
61
64
  }
62
65
  """
63
66
  sparql_query = sparql_query.replace("{{individual_uri}}", str(individual_uri))
64
- results = services.triple_store_service.query(sparql_query)
65
- results_list = results_to_list(results)
67
+ results = triple_store_service.query(sparql_query)
68
+ results_list = sparql_utils.results_to_list(results)
66
69
  assert results_list is None, results_list
@@ -163,7 +163,11 @@ class InsertDataSPARQLPipeline(Pipeline):
163
163
 
164
164
 
165
165
  if __name__ == "__main__":
166
- from naas_abi import services
166
+ from naas_abi_core.engine.Engine import Engine
167
+
168
+ engine = Engine()
169
+ engine.load(module_names=["naas_abi"])
170
+ triple_store_service = engine.services.triple_store
167
171
 
168
172
  sparql_statement = """
169
173
  PREFIX abi: <http://ontology.naas.ai/abi/>
@@ -184,9 +188,7 @@ if __name__ == "__main__":
184
188
  }
185
189
  """
186
190
  pipeline = InsertDataSPARQLPipeline(
187
- InsertDataSPARQLPipelineConfiguration(
188
- triple_store=services.triple_store_service
189
- )
191
+ InsertDataSPARQLPipelineConfiguration(triple_store=triple_store_service)
190
192
  )
191
193
  # result = pipeline.get_sparql_from_text(InsertDataSPARQLPipelineParameters(sparql_statement=sparql_statement))
192
194
  # logger.info(result)
@@ -1,24 +1,27 @@
1
1
  import pytest
2
- from naas_abi import services
2
+ from naas_abi import ABIModule
3
3
  from naas_abi.pipelines.InsertDataSPARQLPipeline import (
4
4
  InsertDataSPARQLPipeline,
5
5
  InsertDataSPARQLPipelineConfiguration,
6
6
  InsertDataSPARQLPipelineParameters,
7
7
  )
8
+ from naas_abi_core.utils.SPARQL import SPARQLUtils
9
+
10
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
11
+ sparql_utils = SPARQLUtils(triple_store_service)
8
12
 
9
13
 
10
14
  @pytest.fixture
11
15
  def pipeline() -> InsertDataSPARQLPipeline:
12
16
  pipeline = InsertDataSPARQLPipeline(
13
17
  configuration=InsertDataSPARQLPipelineConfiguration(
14
- triple_store=services.triple_store_service
18
+ triple_store=triple_store_service
15
19
  )
16
20
  )
17
21
  return pipeline
18
22
 
19
23
 
20
24
  def test_insert_data_sparql_pipeline(pipeline: InsertDataSPARQLPipeline):
21
- from naas_abi_core.utils.SPARQL import results_to_list
22
25
  from rdflib import Literal, Namespace, URIRef
23
26
 
24
27
  sparql_statement = """
@@ -54,7 +57,7 @@ def test_insert_data_sparql_pipeline(pipeline: InsertDataSPARQLPipeline):
54
57
  ), graph.serialize(format="turtle")
55
58
 
56
59
  # Remove graph
57
- services.triple_store_service.remove(graph)
60
+ triple_store_service.remove(graph)
58
61
 
59
62
  # Check triples are removed from the triple store
60
63
  sparql_query = """
@@ -69,8 +72,8 @@ def test_insert_data_sparql_pipeline(pipeline: InsertDataSPARQLPipeline):
69
72
  FILTER(?s = abi:john || ?s = abi:jane)
70
73
  }
71
74
  """
72
- results = services.triple_store_service.query(sparql_query)
73
- results_list = results_to_list(results)
75
+ results = triple_store_service.query(sparql_query)
76
+ results_list = sparql_utils.results_to_list(results)
74
77
  assert results_list is None, results_list
75
78
 
76
79
 
@@ -226,13 +226,17 @@ class MergeIndividualsPipeline(Pipeline):
226
226
 
227
227
 
228
228
  if __name__ == "__main__":
229
- from naas_abi import services
229
+ from naas_abi_core.engine.Engine import Engine
230
+
231
+ engine = Engine()
232
+ engine.load(module_names=["naas_abi"])
233
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
230
234
 
231
235
  uri_to_keep = "http://ontology.naas.ai/abi/69a231b9-e87a-4503-8f80-a530ed8eaa4b" # URI that will remain
232
236
  uri_to_merge = "http://ontology.naas.ai/abi/4f92bbdd-e710-4e43-9480-9b6cd6d9af80" # URI that will be merged and removed
233
237
 
234
238
  configuration = MergeIndividualsPipelineConfiguration(
235
- triple_store=services.triple_store_service
239
+ triple_store=triple_store_service
236
240
  )
237
241
 
238
242
  pipeline = MergeIndividualsPipeline(configuration)
@@ -1,18 +1,20 @@
1
1
  import pytest
2
- from naas_abi import services
2
+ from naas_abi import ABIModule
3
3
  from naas_abi.pipelines.MergeIndividualsPipeline import (
4
4
  MergeIndividualsPipeline,
5
5
  MergeIndividualsPipelineConfiguration,
6
6
  MergeIndividualsPipelineParameters,
7
7
  )
8
+ from naas_abi_core.utils.SPARQL import SPARQLUtils
9
+
10
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
11
+ sparql_utils = SPARQLUtils(triple_store_service)
8
12
 
9
13
 
10
14
  @pytest.fixture
11
15
  def pipeline() -> MergeIndividualsPipeline:
12
16
  return MergeIndividualsPipeline(
13
- MergeIndividualsPipelineConfiguration(
14
- triple_store=services.triple_store_service
15
- )
17
+ MergeIndividualsPipelineConfiguration(triple_store=triple_store_service)
16
18
  )
17
19
 
18
20
 
@@ -20,8 +22,7 @@ def test_merge_individuals_pipeline(pipeline: MergeIndividualsPipeline):
20
22
  import time
21
23
  from uuid import uuid4
22
24
 
23
- from naas_abi_core.utils.SPARQL import get_subject_graph
24
- from naas_abi_core import logger, services
25
+ from naas_abi_core import logger
25
26
  from rdflib import OWL, RDF, RDFS, SKOS, Graph, Literal, Namespace, URIRef
26
27
 
27
28
  ABI = Namespace("http://ontology.naas.ai/abi/")
@@ -68,7 +69,7 @@ def test_merge_individuals_pipeline(pipeline: MergeIndividualsPipeline):
68
69
  )
69
70
 
70
71
  logger.info("Inserting triples to triplestore")
71
- services.triple_store_service.insert(graph)
72
+ triple_store_service.insert(graph)
72
73
  time.sleep(3)
73
74
 
74
75
  # Run pipeline to merge individuals
@@ -90,9 +91,9 @@ def test_merge_individuals_pipeline(pipeline: MergeIndividualsPipeline):
90
91
  )
91
92
 
92
93
  # Check if uri_to_merge is removed in triplestore
93
- graph = get_subject_graph(str(uri_to_merge), 1)
94
+ graph = sparql_utils.get_subject_graph(str(uri_to_merge), 1)
94
95
  assert len(graph) == 0, graph.serialize(format="turtle")
95
96
 
96
97
  # Remove graphs
97
- services.triple_store_service.remove(graph_merged)
98
+ triple_store_service.remove(graph_merged)
98
99
  time.sleep(3)
@@ -147,7 +147,11 @@ class RemoveIndividualPipeline(Pipeline):
147
147
 
148
148
 
149
149
  if __name__ == "__main__":
150
- from naas_abi import services
150
+ from naas_abi_core.engine.Engine import Engine
151
+
152
+ engine = Engine()
153
+ engine.load(module_names=["naas_abi"])
154
+ triple_store_service = ABIModule.get_instance().engine.services.triple_store
151
155
 
152
156
  uris_to_remove = [
153
157
  "http://ontology.naas.ai/abi/example-uri-1",
@@ -155,7 +159,7 @@ if __name__ == "__main__":
155
159
  ]
156
160
 
157
161
  configuration = RemoveIndividualPipelineConfiguration(
158
- triple_store=services.triple_store_service
162
+ triple_store=triple_store_service
159
163
  )
160
164
 
161
165
  pipeline = RemoveIndividualPipeline(configuration)