kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,680 @@
1
+ """Project template system for Kailash SDK."""
2
+
3
+ from pathlib import Path
4
+ from typing import Dict, Optional
5
+
6
+ from kailash.sdk_exceptions import TemplateError
7
+
8
+
9
+ class NodeTemplate:
10
+ """Template for creating node implementations."""
11
+
12
+ def __init__(self, name: str, description: str, base_class: str = "Node"):
13
+ """Initialize node template.
14
+
15
+ Args:
16
+ name: Node class name
17
+ description: Node description
18
+ base_class: Base class to inherit from
19
+ """
20
+ self.name = name
21
+ self.description = description
22
+ self.base_class = base_class
23
+ self.input_params = []
24
+ self.output_params = []
25
+ self.code_template = ""
26
+
27
+ def add_input_parameter(
28
+ self,
29
+ name: str,
30
+ param_type: str,
31
+ required: bool = True,
32
+ description: str = "",
33
+ default=None,
34
+ ) -> "NodeTemplate":
35
+ """Add input parameter to template.
36
+
37
+ Args:
38
+ name: Parameter name
39
+ param_type: Parameter type (str, int, dict, etc.)
40
+ required: Whether parameter is required
41
+ description: Parameter description
42
+ default: Default value
43
+
44
+ Returns:
45
+ Self for chaining
46
+ """
47
+ self.input_params.append(
48
+ {
49
+ "name": name,
50
+ "type": param_type,
51
+ "required": required,
52
+ "description": description,
53
+ "default": default,
54
+ }
55
+ )
56
+ return self
57
+
58
+ def add_output_parameter(
59
+ self, name: str, param_type: str, description: str = ""
60
+ ) -> "NodeTemplate":
61
+ """Add output parameter to template.
62
+
63
+ Args:
64
+ name: Parameter name
65
+ param_type: Parameter type (str, int, dict, etc.)
66
+ description: Parameter description
67
+
68
+ Returns:
69
+ Self for chaining
70
+ """
71
+ self.output_params.append(
72
+ {"name": name, "type": param_type, "description": description}
73
+ )
74
+ return self
75
+
76
+ def set_code_template(self, code: str) -> "NodeTemplate":
77
+ """Set code template.
78
+
79
+ Args:
80
+ code: Python code template
81
+
82
+ Returns:
83
+ Self for chaining
84
+ """
85
+ self.code_template = code
86
+ return self
87
+
88
+ def generate_code(self) -> str:
89
+ """Generate Python code for the node.
90
+
91
+ Returns:
92
+ Generated code
93
+
94
+ Raises:
95
+ TemplateError: If generation fails
96
+ """
97
+ try:
98
+ # Start with imports
99
+ code = f"""from typing import Dict, Any, Optional
100
+ from kailash.nodes.base import Node, NodeParameter
101
+
102
+ class {self.name}({self.base_class}):
103
+ \"""
104
+ {self.description}
105
+ \"""
106
+
107
+ def get_parameters(self) -> Dict[str, NodeParameter]:
108
+ \"""Define node parameters.\"""
109
+ return {{
110
+ """
111
+
112
+ # Add input parameters
113
+ for param in self.input_params:
114
+ default_str = ""
115
+ if param["default"] is not None:
116
+ if isinstance(param["default"], str):
117
+ default_str = f'default="{param["default"]}"'
118
+ else:
119
+ default_str = f"default={param['default']}"
120
+
121
+ code += f""" "{param["name"]}": NodeParameter(
122
+ name="{param["name"]}",
123
+ type={param["type"]},
124
+ required={param["required"]},
125
+ description="{param["description"]}"{', ' + default_str if default_str else ''}
126
+ ),
127
+ """
128
+
129
+ code += """ }
130
+
131
+ def run(self, **kwargs) -> Dict[str, Any]:
132
+ \"""Process node logic.
133
+
134
+ Args:
135
+ **kwargs: Input parameters
136
+
137
+ Returns:
138
+ Output parameters
139
+ \"""
140
+ """
141
+
142
+ # Add custom code if provided, otherwise use default implementation
143
+ if self.code_template:
144
+ code += f"\n{self.code_template}\n"
145
+ else:
146
+ code += """ # TODO: Implement node logic
147
+ # Access input parameters via kwargs
148
+
149
+ # Return results as a dictionary
150
+ return {
151
+ """
152
+ # Add output parameters
153
+ for param in self.output_params:
154
+ code += f' "{param["name"]}": None, # TODO: Set {param["name"]}\n'
155
+
156
+ code += " }\n"
157
+
158
+ return code
159
+
160
+ except Exception as e:
161
+ raise TemplateError(f"Failed to generate node code: {e}") from e
162
+
163
+ def save(self, output_path: str) -> None:
164
+ """Save generated code to file.
165
+
166
+ Args:
167
+ output_path: Path to save file
168
+
169
+ Raises:
170
+ TemplateError: If save fails
171
+ """
172
+ try:
173
+ code = self.generate_code()
174
+
175
+ # Create parent directories if needed
176
+ path = Path(output_path)
177
+ path.parent.mkdir(parents=True, exist_ok=True)
178
+
179
+ # Write to file
180
+ with open(path, "w") as f:
181
+ f.write(code)
182
+
183
+ except Exception as e:
184
+ raise TemplateError(f"Failed to save node code: {e}") from e
185
+
186
+
187
+ class TemplateManager:
188
+ """Manage project templates for scaffolding."""
189
+
190
+ def __init__(self):
191
+ """Initialize template manager."""
192
+ self.templates = {
193
+ "basic": self._basic_template,
194
+ "data_processing": self._data_processing_template,
195
+ "ml_pipeline": self._ml_pipeline_template,
196
+ "api_workflow": self._api_workflow_template,
197
+ }
198
+
199
+ # Export templates for workflow export
200
+ self.export_templates = {
201
+ "minimal": {"yaml": True, "json": False, "manifest": False},
202
+ "standard": {"yaml": True, "json": True, "manifest": True},
203
+ "kubernetes": {
204
+ "yaml": True,
205
+ "json": False,
206
+ "manifest": True,
207
+ "files": {
208
+ "deploy.sh": """#!/bin/bash
209
+ # Deploy workflow to Kubernetes
210
+ kubectl apply -f {workflow_name}-manifest.yaml
211
+
212
+ # Check deployment status
213
+ kubectl get pods -n {namespace} -l workflow={workflow_name}
214
+ """,
215
+ "README.md": """# {workflow_name} Deployment
216
+
217
+ This directory contains the Kubernetes deployment files for {workflow_name}.
218
+
219
+ ## Files
220
+ - `{workflow_name}.yaml`: Workflow definition
221
+ - `{workflow_name}-manifest.yaml`: Kubernetes manifest
222
+ - `deploy.sh`: Deployment script
223
+
224
+ ## Deployment
225
+ ```bash
226
+ ./deploy.sh
227
+ ```
228
+
229
+ ## Namespace
230
+ Deployed to: {namespace}
231
+ """,
232
+ },
233
+ },
234
+ "docker": {
235
+ "yaml": True,
236
+ "json": True,
237
+ "manifest": False,
238
+ "files": {
239
+ "Dockerfile": """FROM kailash/base:latest
240
+
241
+ WORKDIR /app
242
+
243
+ COPY {workflow_name}.yaml /app/
244
+ COPY {workflow_name}.json /app/
245
+
246
+ CMD ["kailash", "run", "/app/{workflow_name}.yaml"]
247
+ """,
248
+ "docker-compose.yml": """version: '3.8'
249
+
250
+ services:
251
+ {workflow_name}:
252
+ build: .
253
+ environment:
254
+ - WORKFLOW_NAME={workflow_name}
255
+ - WORKFLOW_VERSION={workflow_version}
256
+ volumes:
257
+ - ./data:/data
258
+ - ./output:/output
259
+ """,
260
+ ".dockerignore": """*.log
261
+ __pycache__/
262
+ .git/
263
+ .gitignore
264
+ """,
265
+ },
266
+ },
267
+ }
268
+
269
+ def get_template(self, template_name: str) -> Dict:
270
+ """Get an export template by name.
271
+
272
+ Args:
273
+ template_name: Name of the template
274
+
275
+ Returns:
276
+ Template dictionary
277
+
278
+ Raises:
279
+ ValueError: If template not found
280
+ """
281
+ if template_name not in self.export_templates:
282
+ raise ValueError(f"Unknown export template: {template_name}")
283
+ return self.export_templates[template_name]
284
+
285
+ def create_project(
286
+ self,
287
+ project_name: str,
288
+ template: str = "basic",
289
+ target_dir: Optional[str] = None,
290
+ ) -> None:
291
+ """Create a new project from a template.
292
+
293
+ Args:
294
+ project_name: Name of the project
295
+ template: Template to use
296
+ target_dir: Directory to create project in (defaults to current)
297
+ """
298
+ if template not in self.templates:
299
+ raise ValueError(f"Unknown template: {template}")
300
+
301
+ # Determine target directory
302
+ if target_dir:
303
+ project_root = Path(target_dir) / project_name
304
+ else:
305
+ project_root = Path.cwd() / project_name
306
+
307
+ # Create project structure
308
+ project_root.mkdir(parents=True, exist_ok=True)
309
+
310
+ # Apply template
311
+ self.templates[template](project_root, project_name)
312
+
313
+ def _basic_template(self, project_root: Path, project_name: str) -> None:
314
+ """Create a basic project template."""
315
+ # Create directory structure
316
+ (project_root / "workflows").mkdir(exist_ok=True)
317
+ (project_root / "nodes").mkdir(exist_ok=True)
318
+ (project_root / "data").mkdir(exist_ok=True)
319
+ (project_root / "output").mkdir(exist_ok=True)
320
+
321
+ # Create README
322
+ readme_content = f"""# {project_name}
323
+
324
+ A Kailash workflow project.
325
+
326
+ ## Structure
327
+
328
+ - `workflows/`: Workflow definitions
329
+ - `nodes/`: Custom node implementations
330
+ - `data/`: Input data files
331
+ - `outputs/`: Output files
332
+
333
+ ## Usage
334
+
335
+ ```bash
336
+ # Run a workflow
337
+ kailash run workflows/example_workflow.py
338
+
339
+ # Validate a workflow
340
+ kailash validate workflows/example_workflow.py
341
+
342
+ # Export to Kailash format
343
+ kailash export workflows/example_workflow.py outputs/workflow.yaml
344
+ ```
345
+
346
+ ## Examples
347
+
348
+ See `workflows/example_workflow.py` for a basic workflow example.
349
+ """
350
+ (project_root / "README.md").write_text(readme_content)
351
+
352
+ # Create example workflow
353
+ workflow_content = '''"""Example workflow for data processing."""
354
+ from kailash.workflow import Workflow
355
+ from kailash.nodes.data import CSVReader, CSVWriter
356
+ from kailash.nodes.transform import Filter, Sort
357
+ from kailash.nodes.logic import Aggregator
358
+
359
+ # Create workflow
360
+ workflow = Workflow(
361
+ name="example_workflow",
362
+ description="Process CSV data with filtering and aggregation"
363
+ )
364
+
365
+ # Add nodes
366
+ workflow.add_node("reader", CSVReader(), file_path="data/input.csv")
367
+ workflow.add_node("filter", Filter(), field="value", operator=">", value=100)
368
+ workflow.add_node("sort", Sort(), field="value", reverse=True)
369
+ workflow.add_node("aggregate", Aggregator(), group_by="category", operation="sum")
370
+ workflow.add_node("writer", CSVWriter(), file_path="outputs/results.csv")
371
+
372
+ # Connect nodes
373
+ workflow.connect("reader", "filter", {"data": "data"})
374
+ workflow.connect("filter", "sort", {"filtered_data": "data"})
375
+ workflow.connect("sort", "aggregate", {"sorted_data": "data"})
376
+ workflow.connect("aggregate", "writer", {"aggregated_data": "data"})
377
+
378
+ # Workflow is ready to run!
379
+ '''
380
+ (project_root / "workflows" / "example_workflow.py").write_text(
381
+ workflow_content
382
+ )
383
+
384
+ # Create example custom node
385
+ node_content = '''"""Custom node example."""
386
+ from typing import Any, Dict
387
+ from kailash.nodes.base import Node, NodeParameter, register_node
388
+
389
+
390
+ @register_node()
391
+ class CustomProcessor(Node):
392
+ """A custom data processing node."""
393
+
394
+ def get_parameters(self) -> Dict[str, NodeParameter]:
395
+ return {
396
+ "data": NodeParameter(
397
+ name="data",
398
+ type=list,
399
+ required=True,
400
+ description="Input data to process"
401
+ ),
402
+ "multiplier": NodeParameter(
403
+ name="multiplier",
404
+ type=float,
405
+ required=False,
406
+ default=1.0,
407
+ description="Value multiplier"
408
+ )
409
+ }
410
+
411
+ def run(self, **kwargs) -> Dict[str, Any]:
412
+ data = kwargs["data"]
413
+ multiplier = kwargs.get("multiplier", 1.0)
414
+
415
+ # Process data
416
+ processed = []
417
+ for item in data:
418
+ if isinstance(item, dict) and "value" in item:
419
+ new_item = item.copy()
420
+ new_item["value"] = item["value"] * multiplier
421
+ processed.append(new_item)
422
+ else:
423
+ processed.append(item)
424
+
425
+ return {"processed_data": processed}
426
+ '''
427
+ (project_root / "nodes" / "custom_nodes.py").write_text(node_content)
428
+
429
+ # Create sample data
430
+ csv_content = """id,name,value,category
431
+ 1,Item A,150,Category 1
432
+ 2,Item B,95,Category 2
433
+ 3,Item C,200,Category 1
434
+ 4,Item D,75,Category 2
435
+ 5,Item E,180,Category 1
436
+ """
437
+ (project_root / "data" / "input.csv").write_text(csv_content)
438
+
439
+ # Create .gitignore
440
+ gitignore_content = """# Python
441
+ __pycache__/
442
+ *.py[cod]
443
+ *$py.class
444
+ *.so
445
+ .Python
446
+ env/
447
+ venv/
448
+ *.egg-info/
449
+
450
+ # Output files
451
+ outputs/
452
+ *.log
453
+
454
+ # IDE
455
+ .vscode/
456
+ .idea/
457
+ *.swp
458
+ *.swo
459
+
460
+ # OS
461
+ .DS_Store
462
+ Thumbs.db
463
+ """
464
+ (project_root / ".gitignore").write_text(gitignore_content)
465
+
466
+ def _data_processing_template(self, project_root: Path, project_name: str) -> None:
467
+ """Create a data processing focused template."""
468
+ # Start with basic template
469
+ self._basic_template(project_root, project_name)
470
+
471
+ # Add data processing workflow
472
+ workflow_content = '''"""Data processing pipeline workflow."""
473
+ from kailash.workflow import Workflow
474
+ from kailash.nodes.data import CSVReader, JSONReader, JSONWriter
475
+ from kailash.nodes.transform import Filter, Map, Sort
476
+ from kailash.nodes.logic import Aggregator, Merge
477
+
478
+ # Create workflow
479
+ workflow = Workflow(
480
+ name="data_processing_pipeline",
481
+ description="Complex data processing with multiple transformations"
482
+ )
483
+
484
+ # Data ingestion
485
+ workflow.add_node("csv_reader", CSVReader(), file_path="data/sales_data.csv")
486
+ workflow.add_node("json_reader", JSONReader(), file_path="data/product_data.json")
487
+
488
+ # Transform data
489
+ workflow.add_node("filter_sales", Filter(), field="amount", operator=">", value=1000)
490
+ workflow.add_node("calculate_profit", Map(), field="amount", operation="multiply", value=0.2)
491
+ workflow.add_node("merge_data", Merge(), merge_type="merge_dict", key="product_id")
492
+
493
+ # Aggregate results
494
+ workflow.add_node("group_by_category", Aggregator(), group_by="category", operation="sum")
495
+ workflow.add_node("sort_results", Sort(), field="value", reverse=True)
496
+
497
+ # Export results
498
+ workflow.add_node("write_json", JSONWriter(), file_path="outputs/analysis_results.json")
499
+
500
+ # Connect pipeline
501
+ workflow.connect("csv_reader", "filter_sales", {"data": "data"})
502
+ workflow.connect("filter_sales", "calculate_profit", {"filtered_data": "data"})
503
+ workflow.connect("json_reader", "merge_data", {"data": "data2"})
504
+ workflow.connect("calculate_profit", "merge_data", {"mapped_data": "data1"})
505
+ workflow.connect("merge_data", "group_by_category", {"merged_data": "data"})
506
+ workflow.connect("group_by_category", "sort_results", {"aggregated_data": "data"})
507
+ workflow.connect("sort_results", "write_json", {"sorted_data": "data"})
508
+ '''
509
+ (project_root / "workflows" / "data_processing_pipeline.py").write_text(
510
+ workflow_content
511
+ )
512
+
513
+ # Add sample data files
514
+ sales_data = """product_id,date,amount,customer_id,category
515
+ 101,2024-01-01,1500,C001,Electronics
516
+ 102,2024-01-02,800,C002,Home
517
+ 101,2024-01-03,2200,C003,Electronics
518
+ 103,2024-01-04,1800,C004,Electronics
519
+ 102,2024-01-05,950,C005,Home
520
+ """
521
+ (project_root / "data" / "sales_data.csv").write_text(sales_data)
522
+
523
+ product_data = """{
524
+ "products": [
525
+ {"product_id": "101", "name": "Laptop", "category": "Electronics", "cost": 800},
526
+ {"product_id": "102", "name": "Chair", "category": "Home", "cost": 200},
527
+ {"product_id": "103", "name": "Monitor", "category": "Electronics", "cost": 400}
528
+ ]
529
+ }"""
530
+ (project_root / "data" / "product_data.json").write_text(product_data)
531
+
532
+ def _ml_pipeline_template(self, project_root: Path, project_name: str) -> None:
533
+ """Create an ML pipeline focused template."""
534
+ # Start with basic template
535
+ self._basic_template(project_root, project_name)
536
+
537
+ # Add ML workflow
538
+ workflow_content = '''"""Machine learning pipeline workflow."""
539
+ from kailash.workflow import Workflow
540
+ from kailash.nodes.data import CSVReader, JSONWriter
541
+ from kailash.nodes.transform import Filter, Map
542
+ from kailash.nodes.ai import (
543
+ TextClassifier,
544
+ SentimentAnalyzer,
545
+ NamedEntityRecognizer,
546
+ TextSummarizer
547
+ )
548
+
549
+ # Create workflow
550
+ workflow = Workflow(
551
+ name="ml_pipeline",
552
+ description="Text analysis ML pipeline"
553
+ )
554
+
555
+ # Data ingestion
556
+ workflow.add_node("read_data", CSVReader(), file_path="data/text_data.csv")
557
+
558
+ # Preprocessing
559
+ workflow.add_node("extract_text", Map(), field="content")
560
+
561
+ # ML processing
562
+ workflow.add_node("sentiment", SentimentAnalyzer(), language="en")
563
+ workflow.add_node("classify", TextClassifier(),
564
+ categories=["tech", "business", "health", "other"])
565
+ workflow.add_node("extract_entities", NamedEntityRecognizer(),
566
+ entity_types=["PERSON", "ORGANIZATION", "LOCATION"])
567
+ workflow.add_node("summarize", TextSummarizer(), max_length=100)
568
+
569
+ # Combine results
570
+ workflow.add_node("merge_results", Merge(), merge_type="merge_dict")
571
+
572
+ # Export results
573
+ workflow.add_node("save_results", JSONWriter(), file_path="outputs/ml_results.json")
574
+
575
+ # Connect pipeline
576
+ workflow.connect("read_data", "extract_text", {"data": "data"})
577
+ workflow.connect("extract_text", "sentiment", {"mapped_data": "texts"})
578
+ workflow.connect("extract_text", "classify", {"mapped_data": "texts"})
579
+ workflow.connect("extract_text", "extract_entities", {"mapped_data": "texts"})
580
+ workflow.connect("extract_text", "summarize", {"mapped_data": "texts"})
581
+
582
+ # Merge all ML results
583
+ workflow.connect("sentiment", "merge_results", {"sentiments": "data1"})
584
+ workflow.connect("classify", "merge_results", {"classifications": "data2"})
585
+
586
+ workflow.connect("merge_results", "save_results", {"merged_data": "data"})
587
+ '''
588
+ (project_root / "workflows" / "ml_pipeline.py").write_text(workflow_content)
589
+
590
+ # Add sample text data
591
+ text_data = """id,title,content
592
+ 1,Tech Innovation,"The latest developments in artificial intelligence are transforming how businesses operate. Companies like Google and Microsoft are leading the charge with new AI models."
593
+ 2,Health Update,"Recent studies show that regular exercise and a balanced diet can significantly improve mental health. Researchers at Harvard University published these findings."
594
+ 3,Business News,"Apple announced record profits this quarter, driven by strong iPhone sales in Asian markets. CEO Tim Cook expressed optimism about future growth."
595
+ 4,Local News,"The mayor of New York announced new infrastructure plans for the city. The project will create thousands of jobs over the next five years."
596
+ """
597
+ (project_root / "data" / "text_data.csv").write_text(text_data)
598
+
599
+ def _api_workflow_template(self, project_root: Path, project_name: str) -> None:
600
+ """Create an API integration focused template."""
601
+ # Start with basic template
602
+ self._basic_template(project_root, project_name)
603
+
604
+ # Add API workflow
605
+ workflow_content = '''"""API integration workflow."""
606
+ from kailash.workflow import Workflow
607
+ from kailash.nodes.data import JSONReader, JSONWriter
608
+ from kailash.nodes.transform import Map, Filter
609
+ from kailash.nodes.logic import Conditional
610
+ from kailash.nodes.ai import ChatAgent, FunctionCallingAgent
611
+
612
+ # Create workflow
613
+ workflow = Workflow(
614
+ name="api_workflow",
615
+ description="Workflow with API integrations and AI agents"
616
+ )
617
+
618
+ # Read configuration
619
+ workflow.add_node("read_config", JSONReader(), file_path="data/api_config.json")
620
+
621
+ # Process with AI agent
622
+ workflow.add_node("chat_agent", ChatAgent(),
623
+ model="default",
624
+ system_prompt="You are a helpful API integration assistant.")
625
+
626
+ # Function calling for API operations
627
+ workflow.add_node("function_agent", FunctionCallingAgent(),
628
+ available_functions=[
629
+ {"name": "fetch_data", "description": "Fetch data from API"},
630
+ {"name": "transform_data", "description": "Transform data format"},
631
+ {"name": "validate_data", "description": "Validate API response"}
632
+ ])
633
+
634
+ # Conditional routing based on response
635
+ workflow.add_node("check_status", Conditional(),
636
+ condition_field="status",
637
+ operator="==",
638
+ value="success")
639
+
640
+ # Process successful responses
641
+ workflow.add_node("process_success", Map(), operation="identity")
642
+
643
+ # Handle errors
644
+ workflow.add_node("handle_error", Map(), operation="identity")
645
+
646
+ # Save results
647
+ workflow.add_node("save_results", JSONWriter(), file_path="outputs/api_results.json")
648
+
649
+ # Connect workflow
650
+ workflow.connect("read_config", "chat_agent", {"data": "messages"})
651
+ workflow.connect("chat_agent", "function_agent", {"responses": "query"})
652
+ workflow.connect("function_agent", "check_status", {"response": "data"})
653
+ workflow.connect("check_status", "process_success", {"result": "data"})
654
+ workflow.connect("check_status", "handle_error", {"result": "data"})
655
+ workflow.connect("process_success", "save_results", {"processed_data": "data"})
656
+ workflow.connect("handle_error", "save_results", {"error_data": "data"})
657
+ '''
658
+ (project_root / "workflows" / "api_workflow.py").write_text(workflow_content)
659
+
660
+ # Add API configuration
661
+ api_config = """{
662
+ "api_endpoints": {
663
+ "data_api": "https://api.example.com/data",
664
+ "auth_api": "https://api.example.com/auth"
665
+ },
666
+ "credentials": {
667
+ "api_key": "YOUR_API_KEY_HERE",
668
+ "secret": "YOUR_SECRET_HERE"
669
+ },
670
+ "messages": [
671
+ {"role": "user", "content": "Fetch the latest data from the API and process it"}
672
+ ]
673
+ }"""
674
+ (project_root / "data" / "api_config.json").write_text(api_config)
675
+
676
+
677
+ def create_project(name: str, template: str = "basic") -> None:
678
+ """Convenience function to create a project."""
679
+ manager = TemplateManager()
680
+ manager.create_project(name, template)