agentinit-cli 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. agentinit_cli-0.1.0/PKG-INFO +133 -0
  2. agentinit_cli-0.1.0/README.md +108 -0
  3. agentinit_cli-0.1.0/agentinit/__init__.py +1 -0
  4. agentinit_cli-0.1.0/agentinit/cli.py +83 -0
  5. agentinit_cli-0.1.0/agentinit/generator.py +103 -0
  6. agentinit_cli-0.1.0/agentinit/templates/autogen/agents/base_agent.py.j2 +28 -0
  7. agentinit_cli-0.1.0/agentinit/templates/autogen/main.py.j2 +7 -0
  8. agentinit_cli-0.1.0/agentinit/templates/autogen/requirements.txt.j2 +5 -0
  9. agentinit_cli-0.1.0/agentinit/templates/autogen/tools/sample_tool.py.j2 +3 -0
  10. agentinit_cli-0.1.0/agentinit/templates/common/.env.example.j2 +2 -0
  11. agentinit_cli-0.1.0/agentinit/templates/common/Dockerfile.j2 +21 -0
  12. agentinit_cli-0.1.0/agentinit/templates/common/README.md.j2 +21 -0
  13. agentinit_cli-0.1.0/agentinit/templates/common/config.yaml.j2 +6 -0
  14. agentinit_cli-0.1.0/agentinit/templates/crewai/agents/base_agent.py.j2 +27 -0
  15. agentinit_cli-0.1.0/agentinit/templates/crewai/main.py.j2 +9 -0
  16. agentinit_cli-0.1.0/agentinit/templates/crewai/requirements.txt.j2 +5 -0
  17. agentinit_cli-0.1.0/agentinit/templates/crewai/tools/sample_tool.py.j2 +12 -0
  18. agentinit_cli-0.1.0/agentinit/templates/google_adk/agents/base_agent.py.j2 +11 -0
  19. agentinit_cli-0.1.0/agentinit/templates/google_adk/main.py.j2 +28 -0
  20. agentinit_cli-0.1.0/agentinit/templates/google_adk/requirements.txt.j2 +5 -0
  21. agentinit_cli-0.1.0/agentinit/templates/google_adk/tools/sample_tool.py.j2 +3 -0
  22. agentinit_cli-0.1.0/agentinit/templates/langgraph/agents/base_agent.py.j2 +32 -0
  23. agentinit_cli-0.1.0/agentinit/templates/langgraph/main.py.j2 +9 -0
  24. agentinit_cli-0.1.0/agentinit/templates/langgraph/requirements.txt.j2 +8 -0
  25. agentinit_cli-0.1.0/agentinit/templates/langgraph/tools/sample_tool.py.j2 +7 -0
  26. agentinit_cli-0.1.0/agentinit/templates/openai_agents/agents/base_agent.py.j2 +36 -0
  27. agentinit_cli-0.1.0/agentinit/templates/openai_agents/main.py.j2 +10 -0
  28. agentinit_cli-0.1.0/agentinit/templates/openai_agents/requirements.txt.j2 +8 -0
  29. agentinit_cli-0.1.0/agentinit/templates/openai_agents/tools/sample_tool.py.j2 +7 -0
  30. agentinit_cli-0.1.0/agentinit/templates/smolagents/agents/base_agent.py.j2 +52 -0
  31. agentinit_cli-0.1.0/agentinit/templates/smolagents/main.py.j2 +9 -0
  32. agentinit_cli-0.1.0/agentinit/templates/smolagents/requirements.txt.j2 +10 -0
  33. agentinit_cli-0.1.0/agentinit/templates/smolagents/tools/sample_tool.py.j2 +7 -0
  34. agentinit_cli-0.1.0/pyproject.toml +33 -0
@@ -0,0 +1,133 @@
1
+ Metadata-Version: 2.4
2
+ Name: agentinit-cli
3
+ Version: 0.1.0
4
+ Summary: A CLI tool to scaffold production-ready LLM agent projects
5
+ License: MIT
6
+ Keywords: llm,agents,scaffolding,langgraph,crewai,autogen,google-adk,cli
7
+ Author: Swapnil Bhattacharya
8
+ Author-email: your@email.com
9
+ Requires-Python: >=3.10,<4.0
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Classifier: Programming Language :: Python :: 3.14
18
+ Classifier: Topic :: Software Development :: Code Generators
19
+ Requires-Dist: click (>=8.1.0,<9.0.0)
20
+ Requires-Dist: jinja2 (>=3.1.0,<4.0.0)
21
+ Requires-Dist: rich (>=13.0.0,<14.0.0)
22
+ Project-URL: Homepage, https://github.com/NorthCommits/agentinit
23
+ Project-URL: Repository, https://github.com/NorthCommits/agentinit
24
+ Description-Content-Type: text/markdown
25
+
26
+ # agentinit
27
+
28
+ A CLI tool to scaffold production-ready LLM agent projects in seconds.
29
+
30
+ Stop copy-pasting boilerplate. Run one command and get a fully structured, framework-specific agent project ready to run.
31
+
32
+ ## Install
33
+
34
+ ```bash
35
+ pip install agentinit
36
+ ```
37
+
38
+ ## Usage
39
+
40
+ ```bash
41
+ agentinit init my-project --framework langgraph --llm openai
42
+ ```
43
+
44
+ ## Supported Frameworks
45
+
46
+ | Framework | Description |
47
+ |---|---|
48
+ | `langgraph` | LangChain's graph-based agent framework |
49
+ | `crewai` | Multi-agent role-based framework |
50
+ | `autogen` | Microsoft's conversational agent framework |
51
+ | `google_adk` | Google's Agent Development Kit |
52
+ | `openai_agents` | OpenAI's official agents SDK |
53
+ | `smolagents` | HuggingFace's lightweight agent framework |
54
+
55
+ ## Supported LLM Providers
56
+
57
+ | Provider | Env Variable |
58
+ |---|---|
59
+ | `openai` | `OPENAI_API_KEY` |
60
+ | `anthropic` | `ANTHROPIC_API_KEY` |
61
+ | `groq` | `GROQ_API_KEY` |
62
+ | `azure` | `AZURE_OPENAI_API_KEY` + `AZURE_OPENAI_ENDPOINT` |
63
+ | `bedrock` | `AWS_ACCESS_KEY_ID` + `AWS_SECRET_ACCESS_KEY` |
64
+ | `gemini` | `GOOGLE_API_KEY` |
65
+
66
+ ## Generated Project Structure
67
+
68
+ ```
69
+ my-project/
70
+ ├── agents/
71
+ │ └── base_agent.py # framework-specific agent logic
72
+ ├── tools/
73
+ │ └── sample_tool.py # sample tool stub
74
+ ├── config/
75
+ │ └── config.yaml # llm and project config
76
+ ├── Dockerfile # ready to containerize
77
+ ├── main.py # entry point
78
+ ├── .env.example # environment variable template
79
+ └── requirements.txt # dependencies for chosen framework
80
+ ```
81
+
82
+ ## Commands
83
+
84
+ ### Scaffold a new project
85
+ ```bash
86
+ agentinit init <project-name> --framework <framework> --llm <provider>
87
+ ```
88
+
89
+ ### Add a new agent to an existing project
90
+ ```bash
91
+ cd my-project
92
+ agentinit add-agent researcher --framework langgraph
93
+ ```
94
+
95
+ ### List all supported frameworks and providers
96
+ ```bash
97
+ agentinit list-frameworks
98
+ ```
99
+
100
+ ## Examples
101
+
102
+ ```bash
103
+ # LangGraph with OpenAI
104
+ agentinit init my-agent --framework langgraph --llm openai
105
+
106
+ # CrewAI with Anthropic
107
+ agentinit init my-crew --framework crewai --llm anthropic
108
+
109
+ # AutoGen with Groq
110
+ agentinit init my-autogen --framework autogen --llm groq
111
+
112
+ # Google ADK with Gemini
113
+ agentinit init my-adk --framework google_adk --llm gemini
114
+
115
+ # OpenAI Agents SDK with Azure
116
+ agentinit init my-openai-agent --framework openai_agents --llm azure
117
+
118
+ # Smolagents with Bedrock
119
+ agentinit init my-smol --framework smolagents --llm bedrock
120
+ ```
121
+
122
+ ## Getting Started After Scaffolding
123
+
124
+ ```bash
125
+ cd my-project
126
+ cp .env.example .env # add your API keys
127
+ pip install -r requirements.txt
128
+ python main.py
129
+ ```
130
+
131
+ ## License
132
+
133
+ MIT
@@ -0,0 +1,108 @@
1
+ # agentinit
2
+
3
+ A CLI tool to scaffold production-ready LLM agent projects in seconds.
4
+
5
+ Stop copy-pasting boilerplate. Run one command and get a fully structured, framework-specific agent project ready to run.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ pip install agentinit
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ```bash
16
+ agentinit init my-project --framework langgraph --llm openai
17
+ ```
18
+
19
+ ## Supported Frameworks
20
+
21
+ | Framework | Description |
22
+ |---|---|
23
+ | `langgraph` | LangChain's graph-based agent framework |
24
+ | `crewai` | Multi-agent role-based framework |
25
+ | `autogen` | Microsoft's conversational agent framework |
26
+ | `google_adk` | Google's Agent Development Kit |
27
+ | `openai_agents` | OpenAI's official agents SDK |
28
+ | `smolagents` | HuggingFace's lightweight agent framework |
29
+
30
+ ## Supported LLM Providers
31
+
32
+ | Provider | Env Variable |
33
+ |---|---|
34
+ | `openai` | `OPENAI_API_KEY` |
35
+ | `anthropic` | `ANTHROPIC_API_KEY` |
36
+ | `groq` | `GROQ_API_KEY` |
37
+ | `azure` | `AZURE_OPENAI_API_KEY` + `AZURE_OPENAI_ENDPOINT` |
38
+ | `bedrock` | `AWS_ACCESS_KEY_ID` + `AWS_SECRET_ACCESS_KEY` |
39
+ | `gemini` | `GOOGLE_API_KEY` |
40
+
41
+ ## Generated Project Structure
42
+
43
+ ```
44
+ my-project/
45
+ ├── agents/
46
+ │ └── base_agent.py # framework-specific agent logic
47
+ ├── tools/
48
+ │ └── sample_tool.py # sample tool stub
49
+ ├── config/
50
+ │ └── config.yaml # llm and project config
51
+ ├── Dockerfile # ready to containerize
52
+ ├── main.py # entry point
53
+ ├── .env.example # environment variable template
54
+ └── requirements.txt # dependencies for chosen framework
55
+ ```
56
+
57
+ ## Commands
58
+
59
+ ### Scaffold a new project
60
+ ```bash
61
+ agentinit init <project-name> --framework <framework> --llm <provider>
62
+ ```
63
+
64
+ ### Add a new agent to an existing project
65
+ ```bash
66
+ cd my-project
67
+ agentinit add-agent researcher --framework langgraph
68
+ ```
69
+
70
+ ### List all supported frameworks and providers
71
+ ```bash
72
+ agentinit list-frameworks
73
+ ```
74
+
75
+ ## Examples
76
+
77
+ ```bash
78
+ # LangGraph with OpenAI
79
+ agentinit init my-agent --framework langgraph --llm openai
80
+
81
+ # CrewAI with Anthropic
82
+ agentinit init my-crew --framework crewai --llm anthropic
83
+
84
+ # AutoGen with Groq
85
+ agentinit init my-autogen --framework autogen --llm groq
86
+
87
+ # Google ADK with Gemini
88
+ agentinit init my-adk --framework google_adk --llm gemini
89
+
90
+ # OpenAI Agents SDK with Azure
91
+ agentinit init my-openai-agent --framework openai_agents --llm azure
92
+
93
+ # Smolagents with Bedrock
94
+ agentinit init my-smol --framework smolagents --llm bedrock
95
+ ```
96
+
97
+ ## Getting Started After Scaffolding
98
+
99
+ ```bash
100
+ cd my-project
101
+ cp .env.example .env # add your API keys
102
+ pip install -r requirements.txt
103
+ python main.py
104
+ ```
105
+
106
+ ## License
107
+
108
+ MIT
@@ -0,0 +1 @@
1
+ __version__ = "0.1.0"
@@ -0,0 +1,83 @@
1
+ import click
2
+ from rich.console import Console
3
+ from rich.table import Table
4
+ from agentinit.generator import generate_project, add_agent
5
+
6
+ console = Console()
7
+
8
+ SUPPORTED_FRAMEWORKS = ["langgraph", "crewai", "autogen", "google_adk", "openai_agents", "smolagents"]
9
+ SUPPORTED_LLMS = ["openai", "anthropic", "groq", "azure", "bedrock", "gemini"]
10
+
11
+
12
+ @click.group()
13
+ def app():
14
+ """agentinit - scaffold LLM agent projects in seconds"""
15
+ pass
16
+
17
+
18
+ @app.command("init")
19
+ @click.argument("project_name")
20
+ @click.option("--framework", "-f", required=True, help="Agent framework to use")
21
+ @click.option("--llm", "-l", required=True, help="LLM provider to use")
22
+ def init(project_name, framework, llm):
23
+ """Scaffold a new LLM agent project."""
24
+ if framework not in SUPPORTED_FRAMEWORKS:
25
+ console.print(f"[red]Framework '{framework}' is not supported.[/red]")
26
+ console.print(f"Supported: {', '.join(SUPPORTED_FRAMEWORKS)}")
27
+ raise click.Abort()
28
+
29
+ if llm not in SUPPORTED_LLMS:
30
+ console.print(f"[red]LLM provider '{llm}' is not supported.[/red]")
31
+ console.print(f"Supported: {', '.join(SUPPORTED_LLMS)}")
32
+ raise click.Abort()
33
+
34
+ console.print(f"\n[bold green]Scaffolding project:[/bold green] {project_name}")
35
+ console.print(f" Framework : [cyan]{framework}[/cyan]")
36
+ console.print(f" LLM : [cyan]{llm}[/cyan]\n")
37
+
38
+ success = generate_project(project_name, framework, llm)
39
+
40
+ if not success:
41
+ raise click.Abort()
42
+
43
+ console.print(f"\n[bold green]Done![/bold green] Your project is ready at [cyan]./{project_name}[/cyan]")
44
+ console.print("\nNext steps:")
45
+ console.print(f" [yellow]cd {project_name}[/yellow]")
46
+ console.print(" [yellow]cp .env.example .env[/yellow]")
47
+ console.print(" [yellow]pip install -r requirements.txt[/yellow]")
48
+ console.print(" [yellow]python main.py[/yellow]\n")
49
+
50
+
51
+ @app.command("add-agent")
52
+ @click.argument("agent_name")
53
+ @click.option("--framework", "-f", required=True, help="Framework of the existing project")
54
+ def add_agent_cmd(agent_name, framework):
55
+ """Add a new agent to an existing scaffolded project."""
56
+ if framework not in SUPPORTED_FRAMEWORKS:
57
+ console.print(f"[red]Framework '{framework}' is not supported.[/red]")
58
+ console.print(f"Supported: {', '.join(SUPPORTED_FRAMEWORKS)}")
59
+ raise click.Abort()
60
+
61
+ console.print(f"\n[bold green]Adding agent:[/bold green] {agent_name}")
62
+ console.print(f" Framework : [cyan]{framework}[/cyan]\n")
63
+
64
+ success = add_agent(agent_name, framework)
65
+
66
+ if not success:
67
+ raise click.Abort()
68
+
69
+ console.print(f"\n[bold green]Done![/bold green] Agent [cyan]agents/{agent_name}.py[/cyan] created.\n")
70
+
71
+
72
+ @app.command("list-frameworks")
73
+ def list_frameworks():
74
+ """List all supported frameworks and LLM providers."""
75
+ table = Table(title="Supported Frameworks & LLM Providers")
76
+ table.add_column("Frameworks", style="cyan")
77
+ table.add_column("LLM Providers", style="green")
78
+
79
+ rows = list(zip(SUPPORTED_FRAMEWORKS, SUPPORTED_LLMS))
80
+ for fw, llm in rows:
81
+ table.add_row(fw, llm)
82
+
83
+ console.print(table)
@@ -0,0 +1,103 @@
1
+ from pathlib import Path
2
+ from jinja2 import Environment, FileSystemLoader
3
+ from rich.console import Console
4
+
5
+ console = Console()
6
+
7
+ TEMPLATES_DIR = Path(__file__).parent / "templates"
8
+
9
+ LLM_CONFIG = {
10
+ "openai": {"api_key_env": "OPENAI_API_KEY", "model": "gpt-4o"},
11
+ "anthropic": {"api_key_env": "ANTHROPIC_API_KEY", "model": "claude-sonnet-4-20250514"},
12
+ "groq": {"api_key_env": "GROQ_API_KEY", "model": "llama3-70b-8192"},
13
+ "azure": {"api_key_env": "AZURE_OPENAI_API_KEY", "model": "gpt-4o"},
14
+ "bedrock": {"api_key_env": "AWS_ACCESS_KEY_ID", "model": "anthropic.claude-3-5-sonnet-20241022-v2:0"},
15
+ "gemini": {"api_key_env": "GOOGLE_API_KEY", "model": "gemini-2.0-flash"},
16
+ }
17
+
18
+ FRAMEWORK_FILES = [
19
+ ("main.py.j2", "main.py"),
20
+ ("requirements.txt.j2", "requirements.txt"),
21
+ ("agents/base_agent.py.j2", "agents/base_agent.py"),
22
+ ("tools/sample_tool.py.j2", "tools/sample_tool.py"),
23
+ ]
24
+
25
+ COMMON_FILES = [
26
+ ("config.yaml.j2", "config/config.yaml"),
27
+ ("README.md.j2", "README.md"),
28
+ ("Dockerfile.j2", "Dockerfile"),
29
+ ]
30
+
31
+
32
+ def write_file(output_path: Path, content: str):
33
+ output_path.parent.mkdir(parents=True, exist_ok=True)
34
+ output_path.write_text(content)
35
+ console.print(f" [green]created[/green] {output_path}")
36
+
37
+
38
+ def generate_project(project_name: str, framework: str, llm: str) -> bool:
39
+ project_path = Path(project_name)
40
+
41
+ if project_path.exists():
42
+ console.print(f"[red]Directory '{project_name}' already exists. Aborting.[/red]")
43
+ return False
44
+
45
+ context = {
46
+ "project_name": project_name,
47
+ "framework": framework,
48
+ "llm_provider": llm,
49
+ **LLM_CONFIG[llm],
50
+ }
51
+
52
+ fw_env = Environment(loader=FileSystemLoader(str(TEMPLATES_DIR / framework)))
53
+ for template_file, output_file in FRAMEWORK_FILES:
54
+ template = fw_env.get_template(template_file)
55
+ write_file(project_path / output_file, template.render(**context))
56
+
57
+ common_env = Environment(loader=FileSystemLoader(str(TEMPLATES_DIR / "common")))
58
+ for template_file, output_file in COMMON_FILES:
59
+ template = common_env.get_template(template_file)
60
+ write_file(project_path / output_file, template.render(**context))
61
+
62
+ env_content = build_env_file(llm)
63
+ write_file(project_path / ".env.example", env_content)
64
+ return True
65
+
66
+
67
+ def build_env_file(llm: str) -> str:
68
+ lines = []
69
+ if llm == "azure":
70
+ lines.append("AZURE_OPENAI_API_KEY=your_api_key_here")
71
+ lines.append("AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com/")
72
+ elif llm == "bedrock":
73
+ lines.append("AWS_ACCESS_KEY_ID=your_access_key_here")
74
+ lines.append("AWS_SECRET_ACCESS_KEY=your_secret_key_here")
75
+ lines.append("AWS_REGION=us-east-1")
76
+ else:
77
+ lines.append(f"{LLM_CONFIG[llm]['api_key_env']}=your_api_key_here")
78
+ return "\n".join(lines) + "\n"
79
+
80
+
81
+ def add_agent(agent_name: str, framework: str) -> bool:
82
+ agents_path = Path("agents")
83
+
84
+ if not agents_path.exists():
85
+ console.print("[red]No 'agents/' folder found. Are you inside a scaffolded project?[/red]")
86
+ return False
87
+
88
+ output_path = agents_path / f"{agent_name}.py"
89
+
90
+ if output_path.exists():
91
+ console.print(f"[red]Agent '{agent_name}.py' already exists. Aborting.[/red]")
92
+ return False
93
+
94
+ template_path = TEMPLATES_DIR / framework / "agents" / "base_agent.py.j2"
95
+ if not template_path.exists():
96
+ console.print(f"[red]No template found for framework '{framework}'.[/red]")
97
+ return False
98
+
99
+ env = Environment(loader=FileSystemLoader(str(TEMPLATES_DIR / framework / "agents")))
100
+ template = env.get_template("base_agent.py.j2")
101
+ content = template.render(project_name=agent_name, framework=framework, llm_provider="openai", **LLM_CONFIG["openai"])
102
+ write_file(output_path, content)
103
+ return True
@@ -0,0 +1,28 @@
1
+ import os
2
+ import autogen
3
+
4
+ config_list = [
5
+ {
6
+ "model": "{{ model }}",
7
+ "api_key": os.getenv("{{ api_key_env }}"),
8
+ {% if llm_provider == "anthropic" %}"api_type": "anthropic",{% endif %}
9
+ {% if llm_provider == "groq" %}"base_url": "https://api.groq.com/openai/v1",{% endif %}
10
+ }
11
+ ]
12
+
13
+ llm_config = {"config_list": config_list}
14
+
15
+ assistant = autogen.AssistantAgent(
16
+ name="assistant",
17
+ llm_config=llm_config,
18
+ )
19
+
20
+ user_proxy = autogen.UserProxyAgent(
21
+ name="user_proxy",
22
+ human_input_mode="NEVER",
23
+ max_consecutive_auto_reply=1,
24
+ )
25
+
26
+
27
+ def run_agent(message: str):
28
+ user_proxy.initiate_chat(assistant, message=message)
@@ -0,0 +1,7 @@
1
+ from dotenv import load_dotenv
2
+ from agents.base_agent import run_agent
3
+
4
+ load_dotenv()
5
+
6
+ if __name__ == "__main__":
7
+ run_agent("Hello, agent!")
@@ -0,0 +1,5 @@
1
+ pyautogen
2
+ {% if llm_provider == "openai" %}openai{% endif %}
3
+ {% if llm_provider == "anthropic" %}anthropic{% endif %}
4
+ {% if llm_provider == "groq" %}groq{% endif %}
5
+ python-dotenv
@@ -0,0 +1,3 @@
1
+ def sample_tool(query: str) -> str:
2
+ """A sample tool stub. Replace with your real logic."""
3
+ return f"Tool received: {query}"
@@ -0,0 +1,2 @@
1
+ # Copy to .env and fill in values for {{ project_name }}
2
+ # API_KEYS_AND_SECRETS=
@@ -0,0 +1,21 @@
1
+ FROM python:3.11-slim
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+ RUN pip install --no-cache-dir -r requirements.txt
7
+
8
+ COPY . .
9
+
10
+ ENV {{ api_key_env }}=""
11
+ {% if llm_provider == "azure" %}
12
+ ENV AZURE_OPENAI_ENDPOINT=""
13
+ ENV AZURE_OPENAI_API_KEY=""
14
+ {% endif %}
15
+ {% if llm_provider == "bedrock" %}
16
+ ENV AWS_ACCESS_KEY_ID=""
17
+ ENV AWS_SECRET_ACCESS_KEY=""
18
+ ENV AWS_REGION="us-east-1"
19
+ {% endif %}
20
+
21
+ CMD ["python", "main.py"]
@@ -0,0 +1,21 @@
1
+ # {{ project_name }}
2
+
3
+ Scaffolded with agentinit.
4
+
5
+ ## Stack
6
+ - Framework: {{ framework }}
7
+ - LLM Provider: {{ llm_provider }}
8
+ - Model: {{ model }}
9
+
10
+ ## Getting started
11
+ cp .env.example .env
12
+ pip install -r requirements.txt
13
+ python main.py
14
+
15
+ ## Structure
16
+ ├── agents/base_agent.py
17
+ ├── tools/sample_tool.py
18
+ ├── config/config.yaml
19
+ ├── main.py
20
+ ├── .env.example
21
+ └── requirements.txt
@@ -0,0 +1,6 @@
1
+ project_name: {{ project_name }}
2
+ framework: {{ framework }}
3
+ llm:
4
+ provider: {{ llm_provider }}
5
+ model: {{ model }}
6
+ api_key_env: {{ api_key_env }}
@@ -0,0 +1,27 @@
1
+ import os
2
+ from crewai import Agent, Task, Crew, Process
3
+ from tools.sample_tool import sample_tool
4
+
5
+ base_agent = Agent(
6
+ role="Assistant",
7
+ goal="Help the user accomplish their task",
8
+ backstory="You are a helpful AI assistant.",
9
+ tools=[sample_tool],
10
+ verbose=True,
11
+ llm="{{ llm_provider }}/{{ model }}",
12
+ )
13
+
14
+ task = Task(
15
+ description="Greet the user and introduce yourself.",
16
+ expected_output="A friendly greeting message.",
17
+ agent=base_agent,
18
+ )
19
+
20
+
21
+ def build_crew() -> Crew:
22
+ return Crew(
23
+ agents=[base_agent],
24
+ tasks=[task],
25
+ process=Process.sequential,
26
+ verbose=True,
27
+ )
@@ -0,0 +1,9 @@
1
+ from dotenv import load_dotenv
2
+ from agents.base_agent import build_crew
3
+
4
+ load_dotenv()
5
+
6
+ if __name__ == "__main__":
7
+ crew = build_crew()
8
+ result = crew.kickoff()
9
+ print(result)
@@ -0,0 +1,5 @@
1
+ crewai
2
+ {% if llm_provider == "openai" %}openai{% endif %}
3
+ {% if llm_provider == "anthropic" %}anthropic{% endif %}
4
+ {% if llm_provider == "groq" %}groq{% endif %}
5
+ python-dotenv
@@ -0,0 +1,12 @@
1
+ from crewai.tools import BaseTool
2
+
3
+
4
+ class SampleTool(BaseTool):
5
+ name: str = "Sample Tool"
6
+ description: str = "A sample tool stub. Replace with your real logic."
7
+
8
+ def _run(self, query: str) -> str:
9
+ return f"Tool received: {query}"
10
+
11
+
12
+ sample_tool = SampleTool()
@@ -0,0 +1,11 @@
1
+ import os
2
+ from google.adk.agents import Agent
3
+ from tools.sample_tool import sample_tool
4
+
5
+ root_agent = Agent(
6
+ name="{{ project_name }}_agent",
7
+ model="{{ model }}",
8
+ description="A helpful AI assistant.",
9
+ instruction="You are a helpful assistant. Answer the user's questions clearly.",
10
+ tools=[sample_tool],
11
+ )
@@ -0,0 +1,28 @@
1
+ import asyncio
2
+ from dotenv import load_dotenv
3
+ from agents.base_agent import root_agent
4
+ from google.adk.runners import Runner
5
+ from google.adk.sessions import InMemorySessionService
6
+ from google.genai import types
7
+
8
+ load_dotenv()
9
+
10
+ APP_NAME = "{{ project_name }}"
11
+ USER_ID = "user_01"
12
+ SESSION_ID = "session_01"
13
+
14
+
15
+ async def main():
16
+ session_service = InMemorySessionService()
17
+ await session_service.create_session(
18
+ app_name=APP_NAME, user_id=USER_ID, session_id=SESSION_ID
19
+ )
20
+ runner = Runner(agent=root_agent, app_name=APP_NAME, session_service=session_service)
21
+ content = types.Content(role="user", parts=[types.Part(text="Hello, agent!")])
22
+ async for event in runner.run_async(user_id=USER_ID, session_id=SESSION_ID, new_message=content):
23
+ if event.is_final_response():
24
+ print(event.response.text)
25
+
26
+
27
+ if __name__ == "__main__":
28
+ asyncio.run(main())
@@ -0,0 +1,5 @@
1
+ google-adk
2
+ {% if llm_provider == "openai" %}openai{% endif %}
3
+ {% if llm_provider == "anthropic" %}anthropic{% endif %}
4
+ {% if llm_provider == "groq" %}groq{% endif %}
5
+ python-dotenv
@@ -0,0 +1,3 @@
1
+ def sample_tool(query: str) -> str:
2
+ """A sample tool stub. Replace with your real logic."""
3
+ return f"Tool received: {query}"
@@ -0,0 +1,32 @@
1
+ import os
2
+ from langgraph.graph import StateGraph, END
3
+ from typing import TypedDict, List
4
+
5
+ {% if llm_provider == "openai" %}
6
+ from langchain_openai import ChatOpenAI
7
+ llm = ChatOpenAI(model="{{ model }}", api_key=os.getenv("{{ api_key_env }}"))
8
+ {% elif llm_provider == "anthropic" %}
9
+ from langchain_anthropic import ChatAnthropic
10
+ llm = ChatAnthropic(model="{{ model }}", api_key=os.getenv("{{ api_key_env }}"))
11
+ {% elif llm_provider == "groq" %}
12
+ from langchain_groq import ChatGroq
13
+ llm = ChatGroq(model="{{ model }}", api_key=os.getenv("{{ api_key_env }}"))
14
+ {% endif %}
15
+
16
+
17
+ class AgentState(TypedDict):
18
+ messages: List[dict]
19
+
20
+
21
+ def agent_node(state: AgentState) -> AgentState:
22
+ response = llm.invoke(state["messages"])
23
+ state["messages"].append({"role": "assistant", "content": response.content})
24
+ return state
25
+
26
+
27
+ def build_graph() -> StateGraph:
28
+ graph = StateGraph(AgentState)
29
+ graph.add_node("agent", agent_node)
30
+ graph.set_entry_point("agent")
31
+ graph.add_edge("agent", END)
32
+ return graph.compile()
@@ -0,0 +1,9 @@
1
+ from dotenv import load_dotenv
2
+ from agents.base_agent import build_graph
3
+
4
+ load_dotenv()
5
+
6
+ if __name__ == "__main__":
7
+ graph = build_graph()
8
+ result = graph.invoke({"messages": [{"role": "user", "content": "Hello, agent!"}]})
9
+ print(result)
@@ -0,0 +1,8 @@
1
+ langgraph
2
+ {% if llm_provider == "openai" %}openai
3
+ langchain-openai{% endif %}
4
+ {% if llm_provider == "anthropic" %}anthropic
5
+ langchain-anthropic{% endif %}
6
+ {% if llm_provider == "groq" %}groq
7
+ langchain-groq{% endif %}
8
+ python-dotenv
@@ -0,0 +1,7 @@
1
+ from langchain_core.tools import tool
2
+
3
+
4
+ @tool
5
+ def sample_tool(query: str) -> str:
6
+ """A sample tool stub. Replace with your real logic."""
7
+ return f"Tool received: {query}"
@@ -0,0 +1,36 @@
1
+ import os
2
+ from agents import Agent
3
+ from tools.sample_tool import sample_tool
4
+
5
+ {% if llm_provider == "openai" %}
6
+ from openai import AsyncOpenAI
7
+ client = AsyncOpenAI(api_key=os.getenv("{{ api_key_env }}"))
8
+ {% elif llm_provider == "azure" %}
9
+ from openai import AsyncAzureOpenAI
10
+ client = AsyncAzureOpenAI(
11
+ api_key=os.getenv("AZURE_OPENAI_API_KEY"),
12
+ azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
13
+ api_version="2024-02-01",
14
+ )
15
+ {% elif llm_provider == "gemini" %}
16
+ from openai import AsyncOpenAI
17
+ client = AsyncOpenAI(
18
+ api_key=os.getenv("{{ api_key_env }}"),
19
+ base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
20
+ )
21
+ {% elif llm_provider == "bedrock" %}
22
+ from openai import AsyncOpenAI
23
+ client = AsyncOpenAI(
24
+ api_key=os.getenv("{{ api_key_env }}"),
25
+ base_url="https://bedrock-runtime.{{ region }}.amazonaws.com",
26
+ )
27
+ {% endif %}
28
+
29
+
30
+ def create_agent() -> Agent:
31
+ return Agent(
32
+ name="{{ project_name }}_agent",
33
+ instructions="You are a helpful assistant. Answer the user's questions clearly.",
34
+ model="{{ model }}",
35
+ tools=[sample_tool],
36
+ )
@@ -0,0 +1,10 @@
1
+ from dotenv import load_dotenv
2
+ from agents.base_agent import create_agent
3
+ from agents import Runner
4
+
5
+ load_dotenv()
6
+
7
+ if __name__ == "__main__":
8
+ agent = create_agent()
9
+ result = Runner.run_sync(agent, "Hello, agent!")
10
+ print(result.final_output)
@@ -0,0 +1,8 @@
1
+ openai-agents
2
+ {% if llm_provider == "openai" %}openai{% endif %}
3
+ {% if llm_provider == "azure" %}openai
4
+ azure-identity{% endif %}
5
+ {% if llm_provider == "bedrock" %}boto3
6
+ anthropic[bedrock]{% endif %}
7
+ {% if llm_provider == "gemini" %}google-generativeai{% endif %}
8
+ python-dotenv
@@ -0,0 +1,7 @@
1
+ from agents import function_tool
2
+
3
+
4
+ @function_tool
5
+ def sample_tool(query: str) -> str:
6
+ """A sample tool stub. Replace with your real logic."""
7
+ return f"Tool received: {query}"
@@ -0,0 +1,52 @@
1
+ import os
2
+ from smolagents import CodeAgent
3
+ from tools.sample_tool import sample_tool
4
+
5
+ {% if llm_provider == "openai" %}
6
+ from smolagents import OpenAIServerModel
7
+ model = OpenAIServerModel(
8
+ model_id="{{ model }}",
9
+ api_key=os.getenv("{{ api_key_env }}"),
10
+ )
11
+ {% elif llm_provider == "anthropic" %}
12
+ from smolagents import LiteLLMModel
13
+ model = LiteLLMModel(
14
+ model_id="anthropic/{{ model }}",
15
+ api_key=os.getenv("{{ api_key_env }}"),
16
+ )
17
+ {% elif llm_provider == "groq" %}
18
+ from smolagents import LiteLLMModel
19
+ model = LiteLLMModel(
20
+ model_id="groq/{{ model }}",
21
+ api_key=os.getenv("{{ api_key_env }}"),
22
+ )
23
+ {% elif llm_provider == "azure" %}
24
+ from smolagents import OpenAIServerModel
25
+ model = OpenAIServerModel(
26
+ model_id="{{ model }}",
27
+ api_base=os.getenv("AZURE_OPENAI_ENDPOINT"),
28
+ api_key=os.getenv("AZURE_OPENAI_API_KEY"),
29
+ )
30
+ {% elif llm_provider == "bedrock" %}
31
+ from smolagents import LiteLLMModel
32
+ model = LiteLLMModel(
33
+ model_id="bedrock/{{ model }}",
34
+ aws_access_key_id=os.getenv("AWS_ACCESS_KEY_ID"),
35
+ aws_secret_access_key=os.getenv("AWS_SECRET_ACCESS_KEY"),
36
+ aws_region_name=os.getenv("AWS_REGION", "us-east-1"),
37
+ )
38
+ {% elif llm_provider == "gemini" %}
39
+ from smolagents import LiteLLMModel
40
+ model = LiteLLMModel(
41
+ model_id="gemini/{{ model }}",
42
+ api_key=os.getenv("{{ api_key_env }}"),
43
+ )
44
+ {% endif %}
45
+
46
+
47
+ def create_agent() -> CodeAgent:
48
+ return CodeAgent(
49
+ tools=[sample_tool],
50
+ model=model,
51
+ max_steps=5,
52
+ )
@@ -0,0 +1,9 @@
1
+ from dotenv import load_dotenv
2
+ from agents.base_agent import create_agent
3
+
4
+ load_dotenv()
5
+
6
+ if __name__ == "__main__":
7
+ agent = create_agent()
8
+ result = agent.run("Hello, agent!")
9
+ print(result)
@@ -0,0 +1,10 @@
1
+ smolagents
2
+ {% if llm_provider == "openai" %}openai{% endif %}
3
+ {% if llm_provider == "anthropic" %}anthropic{% endif %}
4
+ {% if llm_provider == "groq" %}groq{% endif %}
5
+ {% if llm_provider == "azure" %}openai
6
+ azure-identity{% endif %}
7
+ {% if llm_provider == "bedrock" %}boto3
8
+ anthropic[bedrock]{% endif %}
9
+ {% if llm_provider == "gemini" %}google-generativeai{% endif %}
10
+ python-dotenv
@@ -0,0 +1,7 @@
1
+ from smolagents import tool
2
+
3
+
4
+ @tool
5
+ def sample_tool(query: str) -> str:
6
+ """A sample tool stub. Replace with your real logic."""
7
+ return f"Tool received: {query}"
@@ -0,0 +1,33 @@
1
+ [tool.poetry]
2
+ name = "agentinit-cli"
3
+ version = "0.1.0"
4
+ description = "A CLI tool to scaffold production-ready LLM agent projects"
5
+ authors = ["Swapnil Bhattacharya <your@email.com>"]
6
+ readme = "README.md"
7
+ license = "MIT"
8
+ homepage = "https://github.com/NorthCommits/agentinit"
9
+ repository = "https://github.com/NorthCommits/agentinit"
10
+ keywords = ["llm", "agents", "scaffolding", "langgraph", "crewai", "autogen", "google-adk", "cli"]
11
+ classifiers = [
12
+ "Programming Language :: Python :: 3",
13
+ "License :: OSI Approved :: MIT License",
14
+ "Operating System :: OS Independent",
15
+ "Topic :: Software Development :: Code Generators",
16
+ ]
17
+ packages = [{include = "agentinit"}]
18
+
19
+ [tool.poetry.dependencies]
20
+ python = "^3.10"
21
+ click = "^8.1.0"
22
+ jinja2 = "^3.1.0"
23
+ rich = "^13.0.0"
24
+
25
+ [tool.poetry.group.dev.dependencies]
26
+ pytest = "^8.0.0"
27
+
28
+ [tool.poetry.scripts]
29
+ agentinit = "agentinit.cli:app"
30
+
31
+ [build-system]
32
+ requires = ["poetry-core"]
33
+ build-backend = "poetry.core.masonry.api"