graphsmith 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. graphsmith/__init__.py +5 -0
  2. graphsmith/cli/__init__.py +1 -0
  3. graphsmith/cli/main.py +89 -0
  4. graphsmith/core/__init__.py +1 -0
  5. graphsmith/core/generator.py +141 -0
  6. graphsmith/core/prompts.py +34 -0
  7. graphsmith/core/template_engine.py +28 -0
  8. graphsmith/templates/basic/.env +1 -0
  9. graphsmith/templates/basic/README.md +16 -0
  10. graphsmith/templates/basic/__pycache__/config.cpython-312.pyc +0 -0
  11. graphsmith/templates/basic/__pycache__/graph.cpython-312.pyc +0 -0
  12. graphsmith/templates/basic/__pycache__/main.cpython-312.pyc +0 -0
  13. graphsmith/templates/basic/__pycache__/nodes.cpython-312.pyc +0 -0
  14. graphsmith/templates/basic/__pycache__/state.cpython-312.pyc +0 -0
  15. graphsmith/templates/basic/config.py +1 -0
  16. graphsmith/templates/basic/graph.py +19 -0
  17. graphsmith/templates/basic/main.py +17 -0
  18. graphsmith/templates/basic/nodes.py +13 -0
  19. graphsmith/templates/basic/requirements.txt +5 -0
  20. graphsmith/templates/basic/state.py +9 -0
  21. graphsmith/templates/multi_agent/.env +1 -0
  22. graphsmith/templates/multi_agent/README.md +16 -0
  23. graphsmith/templates/multi_agent/__pycache__/config.cpython-312.pyc +0 -0
  24. graphsmith/templates/multi_agent/__pycache__/graph.cpython-312.pyc +0 -0
  25. graphsmith/templates/multi_agent/__pycache__/main.cpython-312.pyc +0 -0
  26. graphsmith/templates/multi_agent/__pycache__/state.cpython-312.pyc +0 -0
  27. graphsmith/templates/multi_agent/agents/__pycache__/agent_placeholder.cpython-312.pyc +0 -0
  28. graphsmith/templates/multi_agent/agents/agent_placeholder.py +1 -0
  29. graphsmith/templates/multi_agent/config.py +1 -0
  30. graphsmith/templates/multi_agent/graph.py +1 -0
  31. graphsmith/templates/multi_agent/main.py +17 -0
  32. graphsmith/templates/multi_agent/requirements.txt +5 -0
  33. graphsmith/templates/multi_agent/state.py +9 -0
  34. graphsmith/utils/__init__.py +1 -0
  35. graphsmith/utils/file_utils.py +21 -0
  36. graphsmith-0.1.0.dist-info/METADATA +122 -0
  37. graphsmith-0.1.0.dist-info/RECORD +41 -0
  38. graphsmith-0.1.0.dist-info/WHEEL +5 -0
  39. graphsmith-0.1.0.dist-info/entry_points.txt +2 -0
  40. graphsmith-0.1.0.dist-info/licenses/LICENSE +21 -0
  41. graphsmith-0.1.0.dist-info/top_level.txt +1 -0
graphsmith/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """graphsmith package."""
2
+
3
+ __all__ = ["__version__"]
4
+
5
+ __version__ = "0.1.0"
@@ -0,0 +1 @@
1
+ """CLI package for graphsmith."""
graphsmith/cli/main.py ADDED
@@ -0,0 +1,89 @@
1
+ """Command-line entry point for graphsmith."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+
7
+ from graphsmith.core.generator import create_project
8
+
9
+
10
+ def build_parser() -> argparse.ArgumentParser:
11
+ parser = argparse.ArgumentParser(
12
+ prog="graphsmith",
13
+ description="Scaffold LangGraph applications.",
14
+ )
15
+ subparsers = parser.add_subparsers(dest="command", required=True)
16
+
17
+ create_parser = subparsers.add_parser(
18
+ "create",
19
+ help="Create a new LangGraph project.",
20
+ )
21
+ create_parser.add_argument("project_name", help="Name of the project directory.")
22
+ create_parser.add_argument(
23
+ "template_alias",
24
+ nargs="?",
25
+ choices=("basic", "agent", "multi_agent"),
26
+ help="Optional shorthand template selector. Use 'agent' as an alias for 'multi_agent'.",
27
+ )
28
+ create_parser.add_argument(
29
+ "agent_count_alias",
30
+ nargs="?",
31
+ type=int,
32
+ help="Optional shorthand agent count when using the positional 'agent' template alias.",
33
+ )
34
+ create_parser.add_argument(
35
+ "--template",
36
+ choices=("basic", "multi_agent"),
37
+ default="basic",
38
+ help="Template to use for scaffolding.",
39
+ )
40
+ create_parser.add_argument(
41
+ "--agents",
42
+ type=int,
43
+ default=2,
44
+ help="Number of agents for the multi_agent template.",
45
+ )
46
+ create_parser.add_argument(
47
+ "--llm",
48
+ choices=("openai", "ollama"),
49
+ default="openai",
50
+ help="LLM provider for the generated project.",
51
+ )
52
+ return parser
53
+
54
+
55
+ def _normalize_create_args(args: argparse.Namespace, parser: argparse.ArgumentParser) -> None:
56
+ if not args.template_alias:
57
+ return
58
+
59
+ alias_template = "multi_agent" if args.template_alias == "agent" else args.template_alias
60
+
61
+ if args.template != "basic" and args.template != alias_template:
62
+ parser.error("Do not mix positional template shorthand with a different --template value.")
63
+
64
+ args.template = alias_template
65
+
66
+ if args.agent_count_alias is not None:
67
+ if alias_template != "multi_agent":
68
+ parser.error("A positional agent count is only valid with the 'agent' or 'multi_agent' template.")
69
+ if args.agents != 2 and args.agents != args.agent_count_alias:
70
+ parser.error("Do not mix positional agent count with a different --agents value.")
71
+ args.agents = args.agent_count_alias
72
+
73
+
74
+ def main() -> None:
75
+ parser = build_parser()
76
+ args = parser.parse_args()
77
+
78
+ if args.command == "create":
79
+ _normalize_create_args(args, parser)
80
+ create_project(
81
+ project_name=args.project_name,
82
+ template_name=args.template,
83
+ llm_provider=args.llm,
84
+ agent_count=args.agents,
85
+ )
86
+
87
+
88
+ if __name__ == "__main__":
89
+ main()
@@ -0,0 +1 @@
1
+ """Core functionality for graphsmith."""
@@ -0,0 +1,141 @@
1
+ """Project generation logic for graphsmith."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from importlib import resources
6
+ from pathlib import Path
7
+
8
+ from graphsmith.core.prompts import LLM_CONFIG
9
+ from graphsmith.core.template_engine import render_text, should_render
10
+ from graphsmith.utils.file_utils import copy_directory, remove_path
11
+
12
+
13
+ def create_project(
14
+ project_name: str,
15
+ template_name: str = "basic",
16
+ llm_provider: str = "openai",
17
+ agent_count: int = 2,
18
+ ) -> None:
19
+ if template_name == "multi_agent" and agent_count < 1:
20
+ raise ValueError("--agents must be at least 1 for the multi_agent template.")
21
+
22
+ if llm_provider not in LLM_CONFIG:
23
+ raise ValueError(f"Unsupported llm provider: {llm_provider}")
24
+
25
+ destination = Path(project_name).resolve()
26
+ if destination.exists():
27
+ raise FileExistsError(f"Destination already exists: {destination}")
28
+
29
+ template_ref = resources.files("graphsmith").joinpath("templates", template_name)
30
+ with resources.as_file(template_ref) as template_root:
31
+ copy_directory(Path(template_root), destination)
32
+
33
+ replacements = _build_replacements(
34
+ project_name=project_name,
35
+ template_name=template_name,
36
+ llm_provider=llm_provider,
37
+ agent_count=agent_count,
38
+ )
39
+ _render_project_files(destination, replacements)
40
+ _write_provider_files(destination, llm_provider)
41
+
42
+ if template_name == "multi_agent":
43
+ _generate_multi_agent_files(destination, llm_provider, agent_count)
44
+
45
+ print(
46
+ f"Project '{project_name}' created successfully using the "
47
+ f"'{template_name}' template with '{llm_provider}'."
48
+ )
49
+ def _build_replacements(
50
+ project_name: str,
51
+ template_name: str,
52
+ llm_provider: str,
53
+ agent_count: int,
54
+ ) -> dict[str, str]:
55
+ llm_settings = LLM_CONFIG[llm_provider]
56
+ return {
57
+ "PROJECT_NAME": project_name,
58
+ "TEMPLATE_NAME": template_name,
59
+ "LLM_PROVIDER": llm_provider,
60
+ "LLM_IMPORT": llm_settings["import_line"],
61
+ "MODEL_CLASS": llm_settings["model_class"],
62
+ "DEFAULT_MODEL": llm_settings["default_model"],
63
+ "ENV_CONTENT": "\n".join(llm_settings["env_lines"]),
64
+ "AGENT_COUNT": str(agent_count),
65
+ }
66
+
67
+
68
+ def _render_project_files(project_dir: Path, replacements: dict[str, str]) -> None:
69
+ for path in project_dir.rglob("*"):
70
+ if path.is_file() and should_render(path):
71
+ content = path.read_text(encoding="utf-8")
72
+ path.write_text(render_text(content, replacements), encoding="utf-8")
73
+
74
+
75
+ def _generate_multi_agent_files(
76
+ project_dir: Path,
77
+ llm_provider: str,
78
+ agent_count: int,
79
+ ) -> None:
80
+ agents_dir = project_dir / "agents"
81
+ placeholder = agents_dir / "agent_placeholder.py"
82
+ remove_path(placeholder)
83
+
84
+ init_file = agents_dir / "__init__.py"
85
+ init_file.write_text('"""Generated agent modules."""\n', encoding="utf-8")
86
+
87
+ for index in range(1, agent_count + 1):
88
+ agent_file = agents_dir / f"agent_{index}.py"
89
+ agent_file.write_text(_agent_module_content(index), encoding="utf-8")
90
+
91
+ graph_file = project_dir / "graph.py"
92
+ graph_file.write_text(_multi_agent_graph_content(agent_count), encoding="utf-8")
93
+
94
+ readme_file = project_dir / "README.md"
95
+ readme_file.write_text(_multi_agent_readme_content(agent_count), encoding="utf-8")
96
+
97
+
98
+ def _write_provider_files(project_dir: Path, llm_provider: str) -> None:
99
+ config_file = project_dir / "config.py"
100
+ config_file.write_text(_config_content(llm_provider), encoding="utf-8")
101
+
102
+ env_file = project_dir / ".env"
103
+ env_file.write_text(_env_content(llm_provider), encoding="utf-8")
104
+
105
+
106
+ def _agent_module_content(index: int) -> str:
107
+ return f'''"""Agent {index} implementation."""\n\nfrom __future__ import annotations\n\nfrom config import get_model\nfrom state import GraphState\n\n\ndef agent_{index}(state: GraphState) -> GraphState:\n model = get_model()\n message = state["message"]\n response = model.invoke(\n f"Agent {index} received this message: {{message}}\\nImprove it and return the updated text."\n )\n content = getattr(response, "content", response)\n return {{"message": str(content)}}\n'''
108
+
109
+
110
+ def _multi_agent_graph_content(agent_count: int) -> str:
111
+ import_lines = "\n".join(
112
+ f"from agents.agent_{index} import agent_{index}" for index in range(1, agent_count + 1)
113
+ )
114
+ node_lines = "\n".join(
115
+ f' workflow.add_node("agent_{index}", agent_{index})' for index in range(1, agent_count + 1)
116
+ )
117
+ edge_block = ' workflow.add_edge(START, "agent_1")'
118
+
119
+ for index in range(1, agent_count):
120
+ edge_block += f'\n workflow.add_edge("agent_{index}", "agent_{index + 1}")'
121
+ edge_block += f'\n workflow.add_edge("agent_{agent_count}", END)'
122
+
123
+ return f'''"""Sequential multi-agent graph definition."""\n\nfrom __future__ import annotations\n\nfrom langgraph.graph import END, START, StateGraph\n\nfrom state import GraphState\n{import_lines}\n\n\ndef build_graph():\n workflow = StateGraph(GraphState)\n{node_lines}\n{edge_block}\n return workflow.compile()\n\n\ngraph = build_graph()\n'''
124
+
125
+
126
+ def _config_content(llm_provider: str) -> str:
127
+ llm_settings = LLM_CONFIG[llm_provider]
128
+ if llm_provider == "openai":
129
+ model_args = 'api_key=os.getenv("OPENAI_API_KEY"), model=os.getenv("OPENAI_MODEL", "gpt-4o-mini")'
130
+ else:
131
+ model_args = 'base_url=os.getenv("OLLAMA_BASE_URL", "http://localhost:11434"), model=os.getenv("OLLAMA_MODEL", "llama3.1")'
132
+
133
+ return f'''"""LLM configuration for the generated project."""\n\nfrom __future__ import annotations\n\nimport os\n\nfrom dotenv import load_dotenv\n\n{llm_settings["import_line"]}\n\nload_dotenv()\n\n\ndef get_model():\n return {llm_settings["model_class"]}({model_args})\n'''
134
+
135
+
136
+ def _env_content(llm_provider: str) -> str:
137
+ return "\n".join(LLM_CONFIG[llm_provider]["env_lines"]) + "\n"
138
+
139
+
140
+ def _multi_agent_readme_content(agent_count: int) -> str:
141
+ return f"""# Multi-Agent LangGraph App\n\nThis project was generated with `graphsmith`.\n\n## Run\n\n```bash\npip install -r requirements.txt\npython main.py\n```\n\n## Customize\n\n- Edit `agents/agent_*.py` to change each agent's behavior.\n- Update `config.py` and `.env` to change the model configuration.\n- Extend `graph.py` if you want to add routing or branching.\n\nThis scaffold currently includes {agent_count} sequential agents.\n"""
@@ -0,0 +1,34 @@
1
+ """Prompt and provider configuration data for generated projects."""
2
+
3
+ from __future__ import annotations
4
+
5
+ LLM_CONFIG = {
6
+ "openai": {
7
+ "import_line": "from langchain_openai import ChatOpenAI",
8
+ "model_class": "ChatOpenAI",
9
+ "env_lines": [
10
+ "OPENAI_API_KEY=your_openai_api_key_here",
11
+ "OPENAI_MODEL=gpt-4o-mini",
12
+ ],
13
+ "default_model": "gpt-4o-mini",
14
+ },
15
+ "ollama": {
16
+ "import_line": "from langchain_ollama import OllamaLLM",
17
+ "model_class": "OllamaLLM",
18
+ "env_lines": [
19
+ "OLLAMA_BASE_URL=http://localhost:11434",
20
+ "OLLAMA_MODEL=llama3.1",
21
+ ],
22
+ "default_model": "llama3.1",
23
+ },
24
+ }
25
+
26
+ BASIC_SYSTEM_PROMPT = (
27
+ "You are a helpful AI assistant built with LangGraph. "
28
+ "Answer clearly and concisely."
29
+ )
30
+
31
+ MULTI_AGENT_SYSTEM_PROMPT = (
32
+ "You are one step in a sequential multi-agent LangGraph workflow. "
33
+ "Improve the shared message before passing it along."
34
+ )
@@ -0,0 +1,28 @@
1
+ """Small templating helper for bundled project files."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+
7
+
8
+ TEXT_FILE_NAMES = {
9
+ ".env",
10
+ "README.md",
11
+ "requirements.txt",
12
+ "config.py",
13
+ "graph.py",
14
+ "main.py",
15
+ "nodes.py",
16
+ "state.py",
17
+ }
18
+
19
+
20
+ def should_render(path: Path) -> bool:
21
+ return path.name in TEXT_FILE_NAMES or path.suffix in {".py", ".md", ".txt"}
22
+
23
+
24
+ def render_text(content: str, replacements: dict[str, str]) -> str:
25
+ rendered = content
26
+ for key, value in replacements.items():
27
+ rendered = rendered.replace(f"{{{{{key}}}}}", value)
28
+ return rendered
@@ -0,0 +1 @@
1
+ {{ENV_CONTENT}}
@@ -0,0 +1,16 @@
1
+ # {{PROJECT_NAME}}
2
+
3
+ This project was generated with the `{{TEMPLATE_NAME}}` template.
4
+
5
+ ## Run
6
+
7
+ ```bash
8
+ pip install -r requirements.txt
9
+ python main.py
10
+ ```
11
+
12
+ ## Customize
13
+
14
+ - Update `nodes.py` to change the graph behavior.
15
+ - Edit `config.py` and `.env` to switch models or providers.
16
+ - Expand `graph.py` with more nodes and edges as needed.
@@ -0,0 +1 @@
1
+ """This file will be replaced with the chosen LLM configuration."""
@@ -0,0 +1,19 @@
1
+ """Graph definition for the generated basic template."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from langgraph.graph import END, START, StateGraph
6
+
7
+ from nodes import call_model
8
+ from state import GraphState
9
+
10
+
11
+ def build_graph():
12
+ workflow = StateGraph(GraphState)
13
+ workflow.add_node("call_model", call_model)
14
+ workflow.add_edge(START, "call_model")
15
+ workflow.add_edge("call_model", END)
16
+ return workflow.compile()
17
+
18
+
19
+ graph = build_graph()
@@ -0,0 +1,17 @@
1
+ """Entry point for the generated basic LangGraph app."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dotenv import load_dotenv
6
+
7
+ from graph import graph
8
+
9
+
10
+ def main() -> None:
11
+ load_dotenv()
12
+ result = graph.invoke({"message": "Write a short hello from {{PROJECT_NAME}}."})
13
+ print(result["message"])
14
+
15
+
16
+ if __name__ == "__main__":
17
+ main()
@@ -0,0 +1,13 @@
1
+ """Node implementations for the generated basic template."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from config import get_model
6
+ from state import GraphState
7
+
8
+
9
+ def call_model(state: GraphState) -> GraphState:
10
+ model = get_model()
11
+ response = model.invoke(state["message"])
12
+ content = getattr(response, "content", response)
13
+ return {"message": str(content)}
@@ -0,0 +1,5 @@
1
+ langgraph
2
+ langchain
3
+ langchain-openai
4
+ langchain-ollama
5
+ python-dotenv
@@ -0,0 +1,9 @@
1
+ """State definitions for the generated basic template."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TypedDict
6
+
7
+
8
+ class GraphState(TypedDict):
9
+ message: str
@@ -0,0 +1 @@
1
+ {{ENV_CONTENT}}
@@ -0,0 +1,16 @@
1
+ # {{PROJECT_NAME}}
2
+
3
+ This project was generated with the `{{TEMPLATE_NAME}}` template.
4
+
5
+ ## Run
6
+
7
+ ```bash
8
+ pip install -r requirements.txt
9
+ python main.py
10
+ ```
11
+
12
+ ## Customize
13
+
14
+ - Edit files in `agents/` to define each agent's behavior.
15
+ - Update `config.py` and `.env` to change the model configuration.
16
+ - Adjust `graph.py` if you want a different agent flow.
@@ -0,0 +1 @@
1
+ """Placeholder file removed during project generation."""
@@ -0,0 +1 @@
1
+ """This file will be replaced with the chosen LLM configuration."""
@@ -0,0 +1 @@
1
+ """This file will be replaced with generated agent wiring."""
@@ -0,0 +1,17 @@
1
+ """Entry point for the generated multi-agent LangGraph app."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dotenv import load_dotenv
6
+
7
+ from graph import graph
8
+
9
+
10
+ def main() -> None:
11
+ load_dotenv()
12
+ result = graph.invoke({"message": "Start a multi-agent workflow for {{PROJECT_NAME}}."})
13
+ print(result["message"])
14
+
15
+
16
+ if __name__ == "__main__":
17
+ main()
@@ -0,0 +1,5 @@
1
+ langgraph
2
+ langchain
3
+ langchain-openai
4
+ langchain-ollama
5
+ python-dotenv
@@ -0,0 +1,9 @@
1
+ """State definitions for the generated multi-agent template."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TypedDict
6
+
7
+
8
+ class GraphState(TypedDict):
9
+ message: str
@@ -0,0 +1 @@
1
+ """Utility helpers for graphsmith."""
@@ -0,0 +1,21 @@
1
+ """Filesystem helpers for project generation."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import shutil
6
+ from pathlib import Path
7
+
8
+
9
+ def ensure_directory(path: Path) -> None:
10
+ path.mkdir(parents=True, exist_ok=False)
11
+
12
+
13
+ def copy_directory(source: Path, destination: Path) -> None:
14
+ shutil.copytree(source, destination)
15
+
16
+
17
+ def remove_path(path: Path) -> None:
18
+ if path.is_dir():
19
+ shutil.rmtree(path)
20
+ elif path.exists():
21
+ path.unlink()
@@ -0,0 +1,122 @@
1
+ Metadata-Version: 2.4
2
+ Name: graphsmith
3
+ Version: 0.1.0
4
+ Summary: CLI tool for scaffolding LangGraph applications.
5
+ Author: OpenAI
6
+ License: MIT License
7
+
8
+ Copyright (c) 2026
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Project-URL: Homepage, https://pypi.org/project/graphsmith/
29
+ Project-URL: Repository, https://github.com/your-username/graphsmith
30
+ Project-URL: Issues, https://github.com/your-username/graphsmith/issues
31
+ Keywords: langgraph,langchain,cli,scaffold,templates
32
+ Classifier: Development Status :: 4 - Beta
33
+ Classifier: Intended Audience :: Developers
34
+ Classifier: License :: OSI Approved :: MIT License
35
+ Classifier: Programming Language :: Python :: 3
36
+ Classifier: Programming Language :: Python :: 3 :: Only
37
+ Classifier: Programming Language :: Python :: 3.9
38
+ Classifier: Programming Language :: Python :: 3.10
39
+ Classifier: Programming Language :: Python :: 3.11
40
+ Classifier: Programming Language :: Python :: 3.12
41
+ Classifier: Topic :: Software Development :: Code Generators
42
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
43
+ Requires-Python: >=3.9
44
+ Description-Content-Type: text/markdown
45
+ License-File: LICENSE
46
+ Requires-Dist: langgraph
47
+ Requires-Dist: langchain
48
+ Requires-Dist: python-dotenv
49
+ Provides-Extra: dev
50
+ Requires-Dist: build>=1.2.1; extra == "dev"
51
+ Requires-Dist: twine>=5.1.1; extra == "dev"
52
+ Dynamic: license-file
53
+
54
+ # graphsmith
55
+
56
+ `graphsmith` is a Python command-line tool that scaffolds LangGraph applications.
57
+
58
+ ## Installation
59
+
60
+ ```bash
61
+ pip install .
62
+ ```
63
+
64
+ Install from PyPI:
65
+
66
+ ```bash
67
+ pip install graphsmith
68
+ ```
69
+
70
+ ## Usage
71
+
72
+ Create a basic project:
73
+
74
+ ```bash
75
+ graphsmith create my_app --template basic --llm openai
76
+ ```
77
+
78
+ Create a multi-agent project with three sequential agents:
79
+
80
+ ```bash
81
+ graphsmith create my_app --template multi_agent --agents 3 --llm openai
82
+ ```
83
+
84
+ Use Ollama instead of OpenAI:
85
+
86
+ ```bash
87
+ graphsmith create my_app --template basic --llm ollama
88
+ ```
89
+
90
+ ## Generated project customization
91
+
92
+ - Edit `config.py` to change the model name or provider configuration.
93
+ - Update `.env` with your API key or Ollama model settings.
94
+ - Modify `nodes.py` or files in `agents/` to add your application logic.
95
+ - Extend `graph.py` to add routing, tools, or more complex workflows.
96
+
97
+ ## Build and upload to PyPI
98
+
99
+ Build the distribution files:
100
+
101
+ ```bash
102
+ python -m pip install --upgrade build twine
103
+ python -m build
104
+ ```
105
+
106
+ Validate the package metadata:
107
+
108
+ ```bash
109
+ python -m twine check dist/*
110
+ ```
111
+
112
+ Upload to TestPyPI:
113
+
114
+ ```bash
115
+ python -m twine upload --repository testpypi dist/*
116
+ ```
117
+
118
+ Upload to PyPI:
119
+
120
+ ```bash
121
+ python -m twine upload dist/*
122
+ ```
@@ -0,0 +1,41 @@
1
+ graphsmith/__init__.py,sha256=_nKzSJrRLYYX7FHOd9khLiGSjhlaWEHgrn_DMFvLshY,76
2
+ graphsmith/cli/__init__.py,sha256=njVNE1SOegr7YrzO8Ri2XYNEs9uvp8vKKz3fcmwI9H8,34
3
+ graphsmith/cli/main.py,sha256=tDny1ziH08fajR61LdvlWQhWOJsygrcc-5k0ORDDDCI,2807
4
+ graphsmith/core/__init__.py,sha256=cQtefne7riOghUxMgezONkdAV9uaIqdtru52vFDs7GY,41
5
+ graphsmith/core/generator.py,sha256=FaHy0mG1UHkHClW_IR5mmhdkrgYLkYQcymY3IN9mBNg,6418
6
+ graphsmith/core/prompts.py,sha256=iTx4HlGCHBMe-oo41RsjApPBbVr1OJtScGHAiuruO-g,990
7
+ graphsmith/core/template_engine.py,sha256=ycQODNi6qGzPJ_4WePquL3vhTp4FkSm5P-SKtXTKa08,616
8
+ graphsmith/templates/basic/.env,sha256=gDpfBntf2yszKK1BjvHm--wbB0Y3CSUexmFlFHe_hB8,16
9
+ graphsmith/templates/basic/README.md,sha256=Nd4r6TTDAMnvbWGycNSHYgL4OKDlBJ65RmeL76G9alI,337
10
+ graphsmith/templates/basic/config.py,sha256=xmtudoehS-k9gq1NNmPc_6O7D9F-NPyuFdp9o3ZOewY,68
11
+ graphsmith/templates/basic/graph.py,sha256=yvrLcYgPlUJc-t9kBf9v7PV1khjIa_uJHyw9neaOnNo,449
12
+ graphsmith/templates/basic/main.py,sha256=P_1BAM-QZdlxzG7p7oDJ9HzMp_JM-9bWRh52xZ0QUD8,344
13
+ graphsmith/templates/basic/nodes.py,sha256=YC0xzoUeiUjvllGRfYmN7imIGBUOKoCX5M6kdIl0BVY,367
14
+ graphsmith/templates/basic/requirements.txt,sha256=hEFRNuiRLer8T_6Erg3xLv1b6xz1ybqWZ5NqVKnnTkw,68
15
+ graphsmith/templates/basic/state.py,sha256=qu8KVJZpc3rNEXfX2kcqMdNG_kbGkN9PtnHZtemLPHk,172
16
+ graphsmith/templates/basic/__pycache__/config.cpython-312.pyc,sha256=UZ76PQ44jUkw-N8l64BJuNS1CxMIIXayWROQgqzwEdE,209
17
+ graphsmith/templates/basic/__pycache__/graph.cpython-312.pyc,sha256=LUdqQCIyKopvIlVMT-NalLzDF5-e1uz15B9vnL71srY,877
18
+ graphsmith/templates/basic/__pycache__/main.cpython-312.pyc,sha256=0tkpJ37KfPWrqcVV_yD4T3p0HHEM28j0tAWtfjYMkks,713
19
+ graphsmith/templates/basic/__pycache__/nodes.cpython-312.pyc,sha256=ig4VNFRTtrQjOrS6u5DcyU_gytKl0Z-dyRKFfOClCgE,699
20
+ graphsmith/templates/basic/__pycache__/state.cpython-312.pyc,sha256=7fCipKbHrVxM6eBlMU1iShIlt0DI1VrAnouq4p7UTWU,515
21
+ graphsmith/templates/multi_agent/.env,sha256=gDpfBntf2yszKK1BjvHm--wbB0Y3CSUexmFlFHe_hB8,16
22
+ graphsmith/templates/multi_agent/README.md,sha256=V6taujiC4u8Xb2bZ_uBndKe-gT9Nhx9azSr0Y1GqLBM,351
23
+ graphsmith/templates/multi_agent/config.py,sha256=xmtudoehS-k9gq1NNmPc_6O7D9F-NPyuFdp9o3ZOewY,68
24
+ graphsmith/templates/multi_agent/graph.py,sha256=bbQsE_hbPW7hhbVKLKqlR-hAjP0F6PRCDPsgezcHHzE,62
25
+ graphsmith/templates/multi_agent/main.py,sha256=nJNAFvhgyJ0yXFOzIqgTrK9bSzZ2oKmZrJYXsedjDMA,358
26
+ graphsmith/templates/multi_agent/requirements.txt,sha256=hEFRNuiRLer8T_6Erg3xLv1b6xz1ybqWZ5NqVKnnTkw,68
27
+ graphsmith/templates/multi_agent/state.py,sha256=uAajjBj9PeBEJCMKEZoGdxhgmUWe41af7Odz3wjuVXY,178
28
+ graphsmith/templates/multi_agent/__pycache__/config.cpython-312.pyc,sha256=k1mc2EMvs1pGgCSK22bRyqu8jDxSD18t5M4MADHCB5w,215
29
+ graphsmith/templates/multi_agent/__pycache__/graph.cpython-312.pyc,sha256=0k62PI3oSlz_drogQKb4MlqAk2sc11Lf_go7O6kfUdA,208
30
+ graphsmith/templates/multi_agent/__pycache__/main.cpython-312.pyc,sha256=qmYaVVWpCkLP4ZkViVy6cn2xxxgH5022nQFcxRQt-xs,733
31
+ graphsmith/templates/multi_agent/__pycache__/state.cpython-312.pyc,sha256=l680cSYGMO76AmC4D0RDmfFGTrWvyikPEkeW8DUsLdg,527
32
+ graphsmith/templates/multi_agent/agents/agent_placeholder.py,sha256=szb4JlCLQiF_Gl0swsn6wMzbiAlrbnuTxqJoh9OIA74,58
33
+ graphsmith/templates/multi_agent/agents/__pycache__/agent_placeholder.cpython-312.pyc,sha256=KeFJst-Egot-JT9yw4tWHjPy1xR1K6MvmCMSRicbhG0,223
34
+ graphsmith/utils/__init__.py,sha256=J_60xpQBHo8XNC1dEDitsE034JK_Dxe8HVq-oH_q5ZQ,38
35
+ graphsmith/utils/file_utils.py,sha256=ff8yoTa71x0DNHWODZ2oVaL1lh98OtCxW3P7HQIKDkU,453
36
+ graphsmith-0.1.0.dist-info/licenses/LICENSE,sha256=ESYyLizI0WWtxMeS7rGVcX3ivMezm-HOd5WdeOh-9oU,1056
37
+ graphsmith-0.1.0.dist-info/METADATA,sha256=_9eBfQ4jItrxtxML-biohM6teZ-LcbnicpxfnTk1FD4,3696
38
+ graphsmith-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
39
+ graphsmith-0.1.0.dist-info/entry_points.txt,sha256=HzEry90G5vk0FGwWDZN5LlbtzUvYYKzdKYv6C4bLHeg,56
40
+ graphsmith-0.1.0.dist-info/top_level.txt,sha256=n-Y2-68_jMl4dCC9z7XemKrXde-kWmVciqMSdx3X9JQ,11
41
+ graphsmith-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ graphsmith = graphsmith.cli.main:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ graphsmith