naas-abi 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. naas_abi/__init__.py +35 -0
  2. naas_abi/agents/AbiAgent.py +442 -0
  3. naas_abi/agents/AbiAgent_test.py +157 -0
  4. naas_abi/agents/EntitytoSPARQLAgent.py +952 -0
  5. naas_abi/agents/EntitytoSPARQLAgent_test.py +66 -0
  6. naas_abi/agents/KnowledgeGraphBuilderAgent.py +321 -0
  7. naas_abi/agents/KnowledgeGraphBuilderAgent_test.py +86 -0
  8. naas_abi/agents/OntologyEngineerAgent.py +115 -0
  9. naas_abi/agents/OntologyEngineerAgent_test.py +42 -0
  10. naas_abi/apps/oxigraph_admin/main.py +392 -0
  11. naas_abi/apps/oxigraph_admin/terminal_style.py +151 -0
  12. naas_abi/apps/sparql_terminal/main.py +68 -0
  13. naas_abi/apps/sparql_terminal/terminal_style.py +236 -0
  14. naas_abi/apps/terminal_agent/main.py +553 -0
  15. naas_abi/apps/terminal_agent/terminal_style.py +175 -0
  16. naas_abi/cli.py +714 -0
  17. naas_abi/mappings.py +83 -0
  18. naas_abi/models/airgap_gemma.py +220 -0
  19. naas_abi/models/airgap_qwen.py +24 -0
  20. naas_abi/models/default.py +23 -0
  21. naas_abi/models/gpt_4_1.py +25 -0
  22. naas_abi/pipelines/AIAgentOntologyGenerationPipeline.py +635 -0
  23. naas_abi/pipelines/AIAgentOntologyGenerationPipeline_test.py +133 -0
  24. naas_abi/pipelines/AddIndividualPipeline.py +215 -0
  25. naas_abi/pipelines/AddIndividualPipeline_test.py +66 -0
  26. naas_abi/pipelines/InsertDataSPARQLPipeline.py +197 -0
  27. naas_abi/pipelines/InsertDataSPARQLPipeline_test.py +96 -0
  28. naas_abi/pipelines/MergeIndividualsPipeline.py +245 -0
  29. naas_abi/pipelines/MergeIndividualsPipeline_test.py +98 -0
  30. naas_abi/pipelines/RemoveIndividualPipeline.py +166 -0
  31. naas_abi/pipelines/RemoveIndividualPipeline_test.py +58 -0
  32. naas_abi/pipelines/UpdateCommercialOrganizationPipeline.py +198 -0
  33. naas_abi/pipelines/UpdateDataPropertyPipeline.py +175 -0
  34. naas_abi/pipelines/UpdateLegalNamePipeline.py +107 -0
  35. naas_abi/pipelines/UpdateLinkedInPagePipeline.py +179 -0
  36. naas_abi/pipelines/UpdatePersonPipeline.py +184 -0
  37. naas_abi/pipelines/UpdateSkillPipeline.py +118 -0
  38. naas_abi/pipelines/UpdateTickerPipeline.py +104 -0
  39. naas_abi/pipelines/UpdateWebsitePipeline.py +106 -0
  40. naas_abi/triggers.py +131 -0
  41. naas_abi/workflows/AgentRecommendationWorkflow.py +321 -0
  42. naas_abi/workflows/AgentRecommendationWorkflow_test.py +160 -0
  43. naas_abi/workflows/ArtificialAnalysisWorkflow.py +337 -0
  44. naas_abi/workflows/ArtificialAnalysisWorkflow_test.py +57 -0
  45. naas_abi/workflows/ConvertOntologyGraphToYamlWorkflow.py +210 -0
  46. naas_abi/workflows/ConvertOntologyGraphToYamlWorkflow_test.py +78 -0
  47. naas_abi/workflows/CreateClassOntologyYamlWorkflow.py +208 -0
  48. naas_abi/workflows/CreateClassOntologyYamlWorkflow_test.py +65 -0
  49. naas_abi/workflows/CreateIndividualOntologyYamlWorkflow.py +183 -0
  50. naas_abi/workflows/CreateIndividualOntologyYamlWorkflow_test.py +86 -0
  51. naas_abi/workflows/ExportGraphInstancesToExcelWorkflow.py +450 -0
  52. naas_abi/workflows/ExportGraphInstancesToExcelWorkflow_test.py +33 -0
  53. naas_abi/workflows/GetObjectPropertiesFromClassWorkflow.py +385 -0
  54. naas_abi/workflows/GetObjectPropertiesFromClassWorkflow_test.py +57 -0
  55. naas_abi/workflows/GetSubjectGraphWorkflow.py +84 -0
  56. naas_abi/workflows/GetSubjectGraphWorkflow_test.py +71 -0
  57. naas_abi/workflows/SearchIndividualWorkflow.py +190 -0
  58. naas_abi/workflows/SearchIndividualWorkflow_test.py +98 -0
  59. naas_abi-1.0.0.dist-info/METADATA +9 -0
  60. naas_abi-1.0.0.dist-info/RECORD +62 -0
  61. naas_abi-1.0.0.dist-info/WHEEL +5 -0
  62. naas_abi-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,337 @@
1
+ import json
2
+ from dataclasses import dataclass
3
+ from datetime import datetime, timezone
4
+ from enum import Enum
5
+ from pathlib import Path
6
+ from typing import Annotated, Any, Dict, Set
7
+
8
+ import requests
9
+ from fastapi import APIRouter
10
+ from langchain_core.tools import BaseTool, StructuredTool
11
+ from naas_abi_core.workflow import Workflow, WorkflowConfiguration
12
+ from naas_abi_core.workflow.workflow import WorkflowParameters
13
+ from pydantic import Field
14
+
15
+
16
+ @dataclass
17
+ class ArtificialAnalysisWorkflowConfiguration(WorkflowConfiguration):
18
+ """Configuration for ArtificialAnalysisWorkflow.
19
+
20
+ Attributes:
21
+ api_key (str): Artificial Analysis API key
22
+ base_url (str): Base URL for Artificial Analysis API
23
+ """
24
+
25
+ api_key: str
26
+ base_url: str = "https://artificialanalysis.ai/api/v2"
27
+
28
+
29
+ class ArtificialAnalysisWorkflowParameters(WorkflowParameters):
30
+ """Parameters for ArtificialAnalysisWorkflow execution.
31
+
32
+ Attributes:
33
+ endpoint (str): API endpoint to fetch (llms, text-to-image, etc.)
34
+ include_categories (bool): Include category breakdowns for media endpoints
35
+ validate_agents_only (bool): Only generate ontology for modules with active agents
36
+ """
37
+
38
+ endpoint: Annotated[
39
+ str, Field(description="API endpoint to fetch (llms, text-to-image, etc.)")
40
+ ] = "llms"
41
+ include_categories: Annotated[
42
+ bool, Field(description="Include category breakdowns for media endpoints")
43
+ ] = False
44
+ validate_agents_only: Annotated[
45
+ bool, Field(description="Only generate ontology for modules with active agents")
46
+ ] = True
47
+
48
+
49
+ class ArtificialAnalysisWorkflow(Workflow):
50
+ """Workflow for fetching and storing Artificial Analysis API data."""
51
+
52
+ __configuration: ArtificialAnalysisWorkflowConfiguration
53
+
54
+ def __init__(self, configuration: ArtificialAnalysisWorkflowConfiguration):
55
+ self.__configuration = configuration
56
+
57
+ def run_workflow(
58
+ self, parameters: ArtificialAnalysisWorkflowParameters
59
+ ) -> Dict[str, Any]:
60
+ """Execute the workflow to fetch and save Artificial Analysis data.
61
+
62
+ Args:
63
+ parameters: Workflow parameters containing endpoint and options
64
+
65
+ Returns:
66
+ Dict containing operation results and file information
67
+ """
68
+ print(f"🚀 Starting Artificial Analysis data fetch for {parameters.endpoint}")
69
+
70
+ # Step 1: Get valid modules with agents
71
+ valid_modules = (
72
+ self._get_modules_with_agents()
73
+ if parameters.validate_agents_only
74
+ else set()
75
+ )
76
+
77
+ if parameters.validate_agents_only:
78
+ print(
79
+ f"🤖 Found {len(valid_modules)} modules with active agents: {', '.join(sorted(valid_modules))}"
80
+ )
81
+
82
+ # Step 2: Fetch data from API
83
+ api_data = self._fetch_models_data(
84
+ parameters.endpoint, parameters.include_categories
85
+ )
86
+
87
+ if api_data.get("status") == "error":
88
+ return {"status": "error", "message": "Failed to fetch data from API"}
89
+
90
+ # Step 3: Filter data for valid modules only
91
+ filtered_data = api_data
92
+ if parameters.validate_agents_only and valid_modules:
93
+ filtered_data = self._filter_data_for_valid_modules(api_data, valid_modules)
94
+
95
+ # Step 4: Save filtered data
96
+ timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%S")
97
+ filename = f"{timestamp}_{parameters.endpoint}_data.json"
98
+
99
+ # Create storage directory
100
+ storage_dir = Path(
101
+ "storage/datastore/core/modules/abi/ArtificialAnalysisWorkflow"
102
+ )
103
+ storage_dir.mkdir(parents=True, exist_ok=True)
104
+
105
+ # Save filtered API response
106
+ output_file = storage_dir / filename
107
+ with open(output_file, "w", encoding="utf-8") as f:
108
+ json.dump(filtered_data, f, indent=2, ensure_ascii=False)
109
+
110
+ models_count = len(filtered_data.get("data", []))
111
+ original_count = len(api_data.get("data", []))
112
+
113
+ print(f"💾 Saved {models_count}/{original_count} models to: {output_file}")
114
+ print(f"📊 Endpoint: {parameters.endpoint}")
115
+ print(f"🕐 Timestamp: {timestamp}")
116
+
117
+ if parameters.validate_agents_only:
118
+ print("✅ Filtered to models with corresponding agents only")
119
+
120
+ return {
121
+ "status": "success",
122
+ "endpoint": parameters.endpoint,
123
+ "models_count": models_count,
124
+ "original_count": original_count,
125
+ "valid_modules": list(valid_modules) if valid_modules else [],
126
+ "output_file": str(output_file),
127
+ "timestamp": timestamp,
128
+ }
129
+
130
+ def _fetch_models_data(
131
+ self, endpoint: str, include_categories: bool = False
132
+ ) -> Dict[str, Any]:
133
+ """Fetch model data from Artificial Analysis API."""
134
+ # Different URL structure for LLMs vs media endpoints
135
+ if endpoint == "llms":
136
+ url = f"{self.__configuration.base_url}/data/llms/models"
137
+ else:
138
+ # Media endpoints like text-to-image, text-to-speech, etc.
139
+ url = f"{self.__configuration.base_url}/data/media/{endpoint}"
140
+
141
+ headers = {
142
+ "x-api-key": self.__configuration.api_key,
143
+ "Content-Type": "application/json",
144
+ }
145
+
146
+ params = {}
147
+ if include_categories:
148
+ params["include_categories"] = "true"
149
+
150
+ try:
151
+ print(f"🔍 Fetching data from: {url}")
152
+ response = requests.get(url, headers=headers, params=params)
153
+ response.raise_for_status()
154
+
155
+ data = response.json()
156
+ models_count = len(data.get("data", []))
157
+ print(f"✅ Successfully fetched {models_count} models from {endpoint}")
158
+
159
+ return {
160
+ "status": "success",
161
+ "data": data.get("data", []),
162
+ "metadata": {
163
+ "endpoint": endpoint,
164
+ "total_models": models_count,
165
+ "fetched_at": datetime.now(timezone.utc).isoformat(),
166
+ "api_url": url,
167
+ },
168
+ }
169
+
170
+ except requests.exceptions.RequestException as e:
171
+ print(f"❌ Error fetching data: {e}")
172
+ return {"status": "error", "error": str(e), "endpoint": endpoint}
173
+
174
+ def _get_modules_with_agents(self) -> Set[str]:
175
+ """Get set of module names that have agents folders with *Agent.py files.
176
+
177
+ Returns:
178
+ Set[str]: Module names that have active agents
179
+ """
180
+ valid_modules: Set[str] = set()
181
+ core_modules_path = Path("src/core/modules")
182
+
183
+ if not core_modules_path.exists():
184
+ print(f"⚠️ Core modules path not found: {core_modules_path}")
185
+ return valid_modules
186
+
187
+ print(f"🔍 Scanning for modules with agents in: {core_modules_path}")
188
+
189
+ # Scan each module directory
190
+ for module_dir in core_modules_path.iterdir():
191
+ if not module_dir.is_dir() or module_dir.name.startswith("."):
192
+ continue
193
+
194
+ agents_dir = module_dir / "agents"
195
+ if not agents_dir.exists():
196
+ continue
197
+
198
+ # Look for *Agent.py files
199
+ agent_files = list(agents_dir.glob("*Agent.py"))
200
+ if agent_files:
201
+ valid_modules.add(module_dir.name)
202
+ print(f" ✅ {module_dir.name}: Found {len(agent_files)} agent files")
203
+ else:
204
+ print(
205
+ f" ❌ {module_dir.name}: Has agents/ folder but no *Agent.py files"
206
+ )
207
+
208
+ return valid_modules
209
+
210
+ def _filter_data_for_valid_modules(
211
+ self, api_data: Dict[str, Any], valid_modules: Set[str]
212
+ ) -> Dict[str, Any]:
213
+ """Filter API data to only include models that have corresponding agent modules.
214
+
215
+ Args:
216
+ api_data: Raw API data from Artificial Analysis
217
+ valid_modules: Set of module names with agents
218
+
219
+ Returns:
220
+ Dict[str, Any]: Filtered API data
221
+ """
222
+ if not api_data.get("data"):
223
+ return api_data
224
+
225
+ filtered_models = []
226
+ skipped_models = []
227
+
228
+ for model in api_data["data"]:
229
+ # Extract provider/module name from model data
230
+ module_name = self._extract_module_name_from_model(model)
231
+
232
+ if module_name in valid_modules:
233
+ filtered_models.append(model)
234
+ else:
235
+ skipped_models.append(
236
+ {
237
+ "name": model.get("name", "Unknown"),
238
+ "provider": model.get("provider", "Unknown"),
239
+ "reason": f"No agent found for module: {module_name}",
240
+ }
241
+ )
242
+
243
+ print("📋 Filtered results:")
244
+ print(f" ✅ Keeping {len(filtered_models)} models with agents")
245
+ print(f" ❌ Skipping {len(skipped_models)} models without agents")
246
+
247
+ if skipped_models:
248
+ print(" 📝 Skipped models:")
249
+ for skipped in skipped_models[:5]: # Show first 5
250
+ print(
251
+ f" • {skipped['name']} ({skipped['provider']}) - {skipped['reason']}"
252
+ )
253
+ if len(skipped_models) > 5:
254
+ print(f" • ... and {len(skipped_models) - 5} more")
255
+
256
+ # Return filtered data with same structure
257
+ filtered_data = api_data.copy()
258
+ filtered_data["data"] = filtered_models
259
+
260
+ return filtered_data
261
+
262
+ def _extract_module_name_from_model(self, model: Dict[str, Any]) -> str:
263
+ """Extract the module name from a model's provider information.
264
+
265
+ Args:
266
+ model: Model data from API
267
+
268
+ Returns:
269
+ str: Module name (e.g., 'openai', 'anthropic', 'mistral')
270
+ """
271
+ provider = model.get("provider", "").lower()
272
+ model_name = model.get("name", "").lower()
273
+
274
+ # Map providers to module names
275
+ provider_mapping = {
276
+ "openai": "chatgpt",
277
+ "anthropic": "claude",
278
+ "mistral": "mistral",
279
+ "google": "gemini",
280
+ "meta": "llama",
281
+ "perplexity": "perplexity",
282
+ "xai": "grok",
283
+ "together": "together",
284
+ "groq": "groq",
285
+ }
286
+
287
+ # Try to match by provider first
288
+ for provider_key, module_name in provider_mapping.items():
289
+ if provider_key in provider:
290
+ return module_name
291
+
292
+ # Try to match by model name patterns
293
+ if "gpt" in model_name or "openai" in model_name:
294
+ return "chatgpt"
295
+ elif "claude" in model_name or "anthropic" in model_name:
296
+ return "claude"
297
+ elif "mistral" in model_name:
298
+ return "mistral"
299
+ elif "gemini" in model_name or "palm" in model_name:
300
+ return "gemini"
301
+ elif "llama" in model_name or "meta" in model_name:
302
+ return "llama"
303
+ elif "grok" in model_name or "xai" in model_name:
304
+ return "grok"
305
+
306
+ # Default to provider name if no mapping found
307
+ return provider.replace(" ", "").replace("-", "").lower()
308
+
309
+ def as_tools(self) -> list[BaseTool]:
310
+ """Returns a list of LangChain tools for this workflow.
311
+
312
+ Returns:
313
+ list[BaseTool]: List containing the workflow tool
314
+ """
315
+ return [
316
+ StructuredTool(
317
+ name="artificial_analysis_data_fetch",
318
+ description="Fetch AI model data from Artificial Analysis API, filter for modules with active agents, and save as timestamped JSON files",
319
+ func=lambda **kwargs: self.run(
320
+ ArtificialAnalysisWorkflowParameters(**kwargs)
321
+ ),
322
+ args_schema=ArtificialAnalysisWorkflowParameters,
323
+ )
324
+ ]
325
+
326
+ def as_api(
327
+ self,
328
+ router: APIRouter,
329
+ route_name: str = "",
330
+ name: str = "",
331
+ description: str = "",
332
+ description_stream: str = "",
333
+ tags: list[str | Enum] | None = None,
334
+ ) -> None:
335
+ if tags is None:
336
+ tags = []
337
+ return None
@@ -0,0 +1,57 @@
1
+ import os
2
+ from pathlib import Path
3
+
4
+ import pytest
5
+ from naas_abi.workflows.ArtificialAnalysisWorkflow import (
6
+ ArtificialAnalysisWorkflow,
7
+ ArtificialAnalysisWorkflowConfiguration,
8
+ ArtificialAnalysisWorkflowParameters,
9
+ )
10
+
11
+
12
+ @pytest.fixture
13
+ def workflow() -> ArtificialAnalysisWorkflow:
14
+ api_key = os.getenv("AA_AI_API_KEY", "test_key")
15
+
16
+ workflow_configuration = ArtificialAnalysisWorkflowConfiguration(
17
+ api_key=api_key, base_url="https://artificialanalysis.ai/api/v2"
18
+ )
19
+
20
+ return ArtificialAnalysisWorkflow(workflow_configuration)
21
+
22
+
23
+ @pytest.mark.skipif(
24
+ not os.getenv("AA_AI_API_KEY"), reason="AA_AI_API_KEY environment variable not set"
25
+ )
26
+ def test_workflow_fetch_llms_data(workflow: ArtificialAnalysisWorkflow):
27
+ """Test fetching LLMs data from Artificial Analysis API."""
28
+ result = workflow.run_workflow(
29
+ ArtificialAnalysisWorkflowParameters(endpoint="llms", include_categories=False)
30
+ )
31
+
32
+ assert result is not None, result
33
+ assert result["status"] == "success", result
34
+ assert result["models_count"] > 0, result
35
+ assert "output_file" in result, result
36
+ assert "timestamp" in result, result
37
+
38
+ # Verify file was created
39
+ output_file = Path(result["output_file"])
40
+ assert output_file.exists(), f"Output file {output_file} was not created"
41
+
42
+ # Verify file contains valid JSON
43
+ import json
44
+
45
+ with open(output_file, "r") as f:
46
+ data = json.load(f)
47
+ assert "data" in data, "JSON file should contain 'data' field"
48
+ assert "metadata" in data, "JSON file should contain 'metadata' field"
49
+
50
+
51
+ def test_workflow_as_tools(workflow: ArtificialAnalysisWorkflow):
52
+ """Test that workflow can be converted to LangChain tools."""
53
+ tools = workflow.as_tools()
54
+
55
+ assert len(tools) == 1, f"Expected 1 tool, got {len(tools)}"
56
+ assert tools[0].name == "artificial_analysis_data_fetch"
57
+ assert "Artificial Analysis" in tools[0].description
@@ -0,0 +1,210 @@
1
+ from dataclasses import dataclass
2
+ from enum import Enum
3
+ from typing import Annotated, Dict, Optional
4
+
5
+ import pydash as _
6
+ import yaml
7
+ from fastapi import APIRouter
8
+ from langchain_core.tools import BaseTool, StructuredTool
9
+ from naas_abi import config, logger
10
+ from naas_abi.mappings import COLORS_NODES
11
+ from naas_abi_core.utils.OntologyYaml import OntologyYaml
12
+ from naas_abi_core.workflow import Workflow, WorkflowConfiguration
13
+ from naas_abi_core.workflow.workflow import WorkflowParameters
14
+ from naas_abi_marketplace.applications.naas.integrations.NaasIntegration import (
15
+ NaasIntegration,
16
+ NaasIntegrationConfiguration,
17
+ )
18
+ from pydantic import Field
19
+ from rdflib import Graph
20
+ from yaml import Dumper
21
+
22
+
23
+ @dataclass
24
+ class ConvertOntologyGraphToYamlWorkflowConfiguration(WorkflowConfiguration):
25
+ """Configuration for ConvertOntologyGraphToYaml workflow.
26
+
27
+ Attributes:
28
+ naas_integration_config (NaasIntegrationConfiguration): Configuration for the Naas integration
29
+ """
30
+
31
+ naas_integration_config: NaasIntegrationConfiguration
32
+
33
+
34
+ class ConvertOntologyGraphToYamlWorkflowParameters(WorkflowParameters):
35
+ """Parameters for ConvertOntologyGraphToYaml workflow execution.
36
+
37
+ Attributes:
38
+ ontology_name (str): The name of the ontology store to use
39
+ label (str): The label of the ontology
40
+ description (str): The description of the ontology
41
+ logo_url (str): The URL of the ontology logo
42
+ level (str): The level of the ontology (e.g., 'TOP_LEVEL', 'MID_LEVEL', 'DOMAIN', 'USE_CASE')
43
+ display_relations_names (bool): Whether to display relation names in the visualization
44
+ """
45
+
46
+ graph: Annotated[
47
+ str, Field(..., description="The graph serialized as turtle format")
48
+ ]
49
+ ontology_id: Annotated[
50
+ Optional[str], Field(..., description="The ID of the ontology")
51
+ ] = None
52
+ label: Annotated[str, Field(..., description="The label of the ontology")] = (
53
+ "New Ontology"
54
+ )
55
+ description: Annotated[
56
+ str,
57
+ Field(
58
+ ...,
59
+ description="The description of the ontology. Example: 'Represents ABI Ontology with agents, workflows, ontologies, pipelines and integrations.'",
60
+ ),
61
+ ] = "New Ontology Description"
62
+ logo_url: Annotated[
63
+ Optional[str], Field(..., description="The logo URL of the ontology")
64
+ ] = "https://naasai-public.s3.eu-west-3.amazonaws.com/abi-demo/ontology_ULO.png"
65
+ level: Annotated[str, Field(..., description="The level of the ontology")] = (
66
+ "USE_CASE"
67
+ )
68
+ display_relations_names: Annotated[
69
+ bool,
70
+ Field(
71
+ ..., description="Whether to display relation names in the visualization"
72
+ ),
73
+ ] = True
74
+ class_colors_mapping: Annotated[
75
+ Dict, Field(..., description="The mapping of class colors")
76
+ ] = COLORS_NODES
77
+
78
+
79
+ class ConvertOntologyGraphToYamlWorkflow(Workflow):
80
+ """Workflow for converting ontology graph to YAML."""
81
+
82
+ __configuration: ConvertOntologyGraphToYamlWorkflowConfiguration
83
+
84
+ def __init__(self, configuration: ConvertOntologyGraphToYamlWorkflowConfiguration):
85
+ self.__configuration = configuration
86
+ self.__naas_integration = NaasIntegration(
87
+ self.__configuration.naas_integration_config
88
+ )
89
+
90
+ def graph_to_yaml(
91
+ self, parameters: ConvertOntologyGraphToYamlWorkflowParameters
92
+ ) -> str:
93
+ # Initialize parameters
94
+ logger.debug(f"==> Converting ontology graph to YAML: {parameters.label}")
95
+ yaml_data = None
96
+ ontology_id = parameters.ontology_id
97
+
98
+ # Create Graph from turtle string
99
+ g = Graph()
100
+ g.parse(data=parameters.graph, format="turtle")
101
+
102
+ # Upload asset to Naas
103
+ asset = self.__naas_integration.upload_asset(
104
+ data=parameters.graph.encode("utf-8"), # Use the original turtle string
105
+ workspace_id=config.workspace_id,
106
+ storage_name=config.storage_name,
107
+ prefix="assets",
108
+ object_name=str(parameters.label + ".ttl"),
109
+ visibility="public",
110
+ )
111
+ # Save asset URL to JSON
112
+ if asset is None:
113
+ raise ValueError("Failed to upload asset to Naas")
114
+
115
+ asset_url = asset.get("asset", {}).get("url")
116
+ if not asset_url:
117
+ raise ValueError("Asset URL not found in response")
118
+
119
+ if asset_url.endswith("/"):
120
+ asset_url = asset_url[:-1]
121
+
122
+ # Convert to YAML
123
+ try:
124
+ yaml_data = OntologyYaml.rdf_to_yaml(
125
+ g,
126
+ display_relations_names=parameters.display_relations_names,
127
+ class_colors_mapping=parameters.class_colors_mapping,
128
+ )
129
+ except Exception as e:
130
+ message = f"Error converting ontology to YAML: {e}"
131
+ raise e
132
+
133
+ # Initialize parameters
134
+ if yaml_data is not None:
135
+ workspace_id = config.workspace_id
136
+ onto_label = parameters.label
137
+ onto_description = parameters.description
138
+ onto_logo_url = parameters.logo_url
139
+ onto_level = parameters.level
140
+
141
+ # Get ontology ID if it exists
142
+ ontologies = self.__naas_integration.get_ontologies(workspace_id).get(
143
+ "ontologies", []
144
+ )
145
+ for ontology in ontologies:
146
+ if (
147
+ ontology
148
+ and isinstance(ontology, dict)
149
+ and ontology.get("label") == onto_label
150
+ ):
151
+ ontology_id = ontology.get("id")
152
+ break
153
+
154
+ if ontology_id is None:
155
+ # Create new ontology
156
+ res = self.__naas_integration.create_ontology(
157
+ workspace_id=workspace_id,
158
+ label=onto_label,
159
+ source=yaml.dump(yaml_data, Dumper=Dumper),
160
+ level=onto_level,
161
+ description=onto_description,
162
+ download_url=asset_url,
163
+ logo_url=onto_logo_url,
164
+ )
165
+ ontology_id = str(_.get(res, "ontology.id", ""))
166
+ message = (
167
+ f"✅ Ontology '{onto_label}' ({ontology_id}) successfully created."
168
+ )
169
+ else:
170
+ # Update existing ontology
171
+ res = self.__naas_integration.update_ontology(
172
+ workspace_id=workspace_id,
173
+ ontology_id=ontology_id,
174
+ source=yaml.dump(yaml_data, Dumper=Dumper),
175
+ level=onto_level,
176
+ description=onto_description,
177
+ download_url=asset_url,
178
+ logo_url=onto_logo_url,
179
+ )
180
+ message = (
181
+ f"✅ Ontology '{onto_label}' ({ontology_id}) successfully updated."
182
+ )
183
+ logger.info(message)
184
+ if ontology_id is None:
185
+ raise ValueError("Failed to create or update ontology")
186
+ return ontology_id
187
+
188
+ def as_tools(self) -> list[BaseTool]:
189
+ """Returns a list of LangChain tools for this workflow."""
190
+ return [
191
+ StructuredTool(
192
+ name="convert_graph_to_yaml",
193
+ description="Convert an ontology graph to YAML.",
194
+ func=lambda **kwargs: self.graph_to_yaml(
195
+ ConvertOntologyGraphToYamlWorkflowParameters(**kwargs)
196
+ ),
197
+ args_schema=ConvertOntologyGraphToYamlWorkflowParameters,
198
+ )
199
+ ]
200
+
201
+ def as_api(
202
+ self,
203
+ router: APIRouter,
204
+ route_name: str = "",
205
+ name: str = "",
206
+ description: str = "",
207
+ description_stream: str = "",
208
+ tags: list[str | Enum] | None = None,
209
+ ) -> None:
210
+ pass
@@ -0,0 +1,78 @@
1
+ import pytest
2
+ from naas_abi import secret
3
+ from naas_abi.workflows.ConvertOntologyGraphToYamlWorkflow import (
4
+ ConvertOntologyGraphToYamlWorkflow,
5
+ ConvertOntologyGraphToYamlWorkflowConfiguration,
6
+ ConvertOntologyGraphToYamlWorkflowParameters,
7
+ )
8
+ from naas_abi_marketplace.applications.naas.integrations.NaasIntegration import (
9
+ NaasIntegration,
10
+ NaasIntegrationConfiguration,
11
+ )
12
+
13
+
14
+ @pytest.fixture
15
+ def workflow() -> ConvertOntologyGraphToYamlWorkflow:
16
+ return ConvertOntologyGraphToYamlWorkflow(
17
+ ConvertOntologyGraphToYamlWorkflowConfiguration(
18
+ NaasIntegrationConfiguration(api_key=secret.get("NAAS_API_KEY"))
19
+ )
20
+ )
21
+
22
+
23
+ def test_convert_ontology_graph_to_yaml_workflow(
24
+ workflow: ConvertOntologyGraphToYamlWorkflow,
25
+ ):
26
+ import time
27
+ from uuid import uuid4
28
+
29
+ from naas_abi import config, logger, services
30
+ from rdflib import OWL, RDF, RDFS, Graph, Literal, Namespace, URIRef
31
+
32
+ ABI = Namespace("http://ontology.naas.ai/abi/")
33
+
34
+ graph = Graph()
35
+ uri = ABI[str(uuid4())]
36
+ graph.add(
37
+ (uri, RDF.type, URIRef("https://www.commoncoreontologies.org/ont00000443"))
38
+ )
39
+ graph.add((uri, RDF.type, OWL.NamedIndividual))
40
+ graph.add((uri, RDFS.label, Literal("Naas.ai")))
41
+ graph.add(
42
+ (
43
+ uri,
44
+ ABI.logo,
45
+ Literal(
46
+ "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQ9gXMaBLQZ39W6Pk53PRuzFjUvv_6lLRWPoQ&s"
47
+ ),
48
+ )
49
+ )
50
+ services.triple_store_service.insert(graph)
51
+ time.sleep(3)
52
+
53
+ # Run workflow
54
+ ontology_id = workflow.graph_to_yaml(
55
+ ConvertOntologyGraphToYamlWorkflowParameters(
56
+ graph=graph.serialize(format="turtle"),
57
+ ontology_id=None,
58
+ label="Naas.ai",
59
+ description="Naas.ai Ontology",
60
+ logo_url="https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQ9gXMaBLQZ39W6Pk53PRuzFjUvv_6lLRWPoQ&s",
61
+ level="USE_CASE",
62
+ display_relations_names=True,
63
+ class_colors_mapping={},
64
+ )
65
+ )
66
+
67
+ # Remove graph
68
+ services.triple_store_service.remove(graph)
69
+
70
+ # Remove ontology
71
+ naas_integration = NaasIntegration(
72
+ NaasIntegrationConfiguration(api_key=secret.get("NAAS_API_KEY"))
73
+ )
74
+ result = naas_integration.delete_ontology(
75
+ workspace_id=config.workspace_id, ontology_id=ontology_id
76
+ )
77
+ logger.info(f"Removed ontology: {result}")
78
+ assert result is not None, result