firm-cli 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- firm_cli-1.0.0/PKG-INFO +98 -0
- firm_cli-1.0.0/README.md +66 -0
- firm_cli-1.0.0/firm_cli/__init__.py +3 -0
- firm_cli-1.0.0/firm_cli/config.py +171 -0
- firm_cli-1.0.0/firm_cli/factory.py +605 -0
- firm_cli-1.0.0/firm_cli/main.py +143 -0
- firm_cli-1.0.0/firm_cli/memory.py +331 -0
- firm_cli-1.0.0/firm_cli/server.py +378 -0
- firm_cli-1.0.0/firm_cli.egg-info/PKG-INFO +98 -0
- firm_cli-1.0.0/firm_cli.egg-info/SOURCES.txt +18 -0
- firm_cli-1.0.0/firm_cli.egg-info/dependency_links.txt +1 -0
- firm_cli-1.0.0/firm_cli.egg-info/entry_points.txt +2 -0
- firm_cli-1.0.0/firm_cli.egg-info/requires.txt +10 -0
- firm_cli-1.0.0/firm_cli.egg-info/top_level.txt +1 -0
- firm_cli-1.0.0/pyproject.toml +63 -0
- firm_cli-1.0.0/setup.cfg +4 -0
- firm_cli-1.0.0/tests/test_cli.py +140 -0
- firm_cli-1.0.0/tests/test_config.py +132 -0
- firm_cli-1.0.0/tests/test_memory.py +281 -0
- firm_cli-1.0.0/tests/test_server.py +219 -0
firm_cli-1.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: firm-cli
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: One command to create, start, and manage AI agent firms — inter-session Hebbian memory included
|
|
5
|
+
Author-email: Romain Santoli <romainsantoli@gmail.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/romainsantoli-web/firm-ecosystem
|
|
8
|
+
Project-URL: Documentation, https://github.com/romainsantoli-web/firm-ecosystem
|
|
9
|
+
Project-URL: Repository, https://github.com/romainsantoli-web/setup-vs-agent-firm
|
|
10
|
+
Project-URL: Issues, https://github.com/romainsantoli-web/firm-ecosystem/issues
|
|
11
|
+
Keywords: mcp,ai-agents,firm,openclaw,hebbian-memory,inter-session-memory,copilot,claude-code,a2a-protocol
|
|
12
|
+
Classifier: Development Status :: 4 - Beta
|
|
13
|
+
Classifier: Environment :: Console
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
18
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
19
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
20
|
+
Classifier: Intended Audience :: Developers
|
|
21
|
+
Classifier: Typing :: Typed
|
|
22
|
+
Requires-Python: >=3.11
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
Requires-Dist: rich>=13.7.0
|
|
25
|
+
Provides-Extra: full
|
|
26
|
+
Requires-Dist: mcp-openclaw-extensions>=3.3.0; extra == "full"
|
|
27
|
+
Requires-Dist: memory-os-ai>=3.0.0; extra == "full"
|
|
28
|
+
Provides-Extra: dev
|
|
29
|
+
Requires-Dist: pytest>=8.1.0; extra == "dev"
|
|
30
|
+
Requires-Dist: pytest-cov>=5.0.0; extra == "dev"
|
|
31
|
+
Requires-Dist: ruff>=0.3.0; extra == "dev"
|
|
32
|
+
|
|
33
|
+
# firm-cli
|
|
34
|
+
|
|
35
|
+
> One command to create, start, and manage AI agent firms with inter-session Hebbian memory.
|
|
36
|
+
|
|
37
|
+
[](https://pypi.org/project/firm-cli/)
|
|
38
|
+
[](LICENSE)
|
|
39
|
+
[](https://python.org)
|
|
40
|
+
|
|
41
|
+
## Install
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
pip install firm-cli
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Quick start
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
# Generate a fintech startup firm
|
|
51
|
+
firm init --sector fintech --size startup --output ./my-firm
|
|
52
|
+
|
|
53
|
+
# Start both MCP servers (138 + 18 tools)
|
|
54
|
+
cd my-firm && firm start
|
|
55
|
+
|
|
56
|
+
# Check status
|
|
57
|
+
firm status
|
|
58
|
+
|
|
59
|
+
# View Hebbian memory dashboard
|
|
60
|
+
firm memory status
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## What it does
|
|
64
|
+
|
|
65
|
+
`firm init` generates a complete **VS Code Copilot agent firm** — a pyramidal organization of AI agents (CEO → Departments → Specialists) pre-configured for your industry sector.
|
|
66
|
+
|
|
67
|
+
| Size | Departments | Agents |
|
|
68
|
+
|------|------------|--------|
|
|
69
|
+
| startup | 4 | ~20 |
|
|
70
|
+
| scaleup | 12 | ~60 |
|
|
71
|
+
| enterprise | 18 | ~100+ |
|
|
72
|
+
|
|
73
|
+
**15 sectors**: generic, legal, medtech, ecommerce, fintech, saas, manufacturing, education, realestate, logistics, media, automotive, energy, hr, consulting
|
|
74
|
+
|
|
75
|
+
**7 stacks**: typescript, python, java, dotnet, go, rust, mixed
|
|
76
|
+
|
|
77
|
+
## Commands
|
|
78
|
+
|
|
79
|
+
| Command | Description |
|
|
80
|
+
|---------|-------------|
|
|
81
|
+
| `firm init` | Generate a new AI agent firm |
|
|
82
|
+
| `firm start` | Start MCP servers (openclaw-extensions + memory) |
|
|
83
|
+
| `firm stop` | Stop MCP servers |
|
|
84
|
+
| `firm status` | Show ecosystem component status |
|
|
85
|
+
| `firm memory status` | Hebbian memory dashboard (weights, drift) |
|
|
86
|
+
| `firm memory analyze` | Run Hebbian clustering analysis |
|
|
87
|
+
| `firm config set <k> <v>` | Set configuration |
|
|
88
|
+
| `firm config show` | Show current configuration |
|
|
89
|
+
|
|
90
|
+
## Part of the Firm Ecosystem
|
|
91
|
+
|
|
92
|
+
- **[firm-ecosystem](https://github.com/romainsantoli-web/firm-ecosystem)** — Architecture docs
|
|
93
|
+
- **[mcp-openclaw-extensions](https://github.com/romainsantoli-web/mcp-openclaw)** — 138 MCP tools
|
|
94
|
+
- **[Memory-os-ai](https://github.com/romainsantoli-web/Memory-os-ai)** — Semantic memory engine
|
|
95
|
+
|
|
96
|
+
## License
|
|
97
|
+
|
|
98
|
+
MIT
|
firm_cli-1.0.0/README.md
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# firm-cli
|
|
2
|
+
|
|
3
|
+
> One command to create, start, and manage AI agent firms with inter-session Hebbian memory.
|
|
4
|
+
|
|
5
|
+
[](https://pypi.org/project/firm-cli/)
|
|
6
|
+
[](LICENSE)
|
|
7
|
+
[](https://python.org)
|
|
8
|
+
|
|
9
|
+
## Install
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
pip install firm-cli
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
## Quick start
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Generate a fintech startup firm
|
|
19
|
+
firm init --sector fintech --size startup --output ./my-firm
|
|
20
|
+
|
|
21
|
+
# Start both MCP servers (138 + 18 tools)
|
|
22
|
+
cd my-firm && firm start
|
|
23
|
+
|
|
24
|
+
# Check status
|
|
25
|
+
firm status
|
|
26
|
+
|
|
27
|
+
# View Hebbian memory dashboard
|
|
28
|
+
firm memory status
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## What it does
|
|
32
|
+
|
|
33
|
+
`firm init` generates a complete **VS Code Copilot agent firm** — a pyramidal organization of AI agents (CEO → Departments → Specialists) pre-configured for your industry sector.
|
|
34
|
+
|
|
35
|
+
| Size | Departments | Agents |
|
|
36
|
+
|------|------------|--------|
|
|
37
|
+
| startup | 4 | ~20 |
|
|
38
|
+
| scaleup | 12 | ~60 |
|
|
39
|
+
| enterprise | 18 | ~100+ |
|
|
40
|
+
|
|
41
|
+
**15 sectors**: generic, legal, medtech, ecommerce, fintech, saas, manufacturing, education, realestate, logistics, media, automotive, energy, hr, consulting
|
|
42
|
+
|
|
43
|
+
**7 stacks**: typescript, python, java, dotnet, go, rust, mixed
|
|
44
|
+
|
|
45
|
+
## Commands
|
|
46
|
+
|
|
47
|
+
| Command | Description |
|
|
48
|
+
|---------|-------------|
|
|
49
|
+
| `firm init` | Generate a new AI agent firm |
|
|
50
|
+
| `firm start` | Start MCP servers (openclaw-extensions + memory) |
|
|
51
|
+
| `firm stop` | Stop MCP servers |
|
|
52
|
+
| `firm status` | Show ecosystem component status |
|
|
53
|
+
| `firm memory status` | Hebbian memory dashboard (weights, drift) |
|
|
54
|
+
| `firm memory analyze` | Run Hebbian clustering analysis |
|
|
55
|
+
| `firm config set <k> <v>` | Set configuration |
|
|
56
|
+
| `firm config show` | Show current configuration |
|
|
57
|
+
|
|
58
|
+
## Part of the Firm Ecosystem
|
|
59
|
+
|
|
60
|
+
- **[firm-ecosystem](https://github.com/romainsantoli-web/firm-ecosystem)** — Architecture docs
|
|
61
|
+
- **[mcp-openclaw-extensions](https://github.com/romainsantoli-web/mcp-openclaw)** — 138 MCP tools
|
|
62
|
+
- **[Memory-os-ai](https://github.com/romainsantoli-web/Memory-os-ai)** — Semantic memory engine
|
|
63
|
+
|
|
64
|
+
## License
|
|
65
|
+
|
|
66
|
+
MIT
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
"""firm config — manage Firm ecosystem configuration.
|
|
2
|
+
|
|
3
|
+
Includes embedding model management for multilingual/French support:
|
|
4
|
+
firm config set memory.model paraphrase-multilingual-MiniLM-L12-v2
|
|
5
|
+
firm config models # list recommended embedding models
|
|
6
|
+
|
|
7
|
+
⚠️ Contenu généré par IA — validation humaine requise avant utilisation.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import argparse
|
|
13
|
+
import json
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
from rich.panel import Panel
|
|
18
|
+
from rich.table import Table
|
|
19
|
+
|
|
20
|
+
console = Console()
|
|
21
|
+
|
|
22
|
+
CONFIG_FILE = Path.home() / ".firm" / "config.json"
|
|
23
|
+
|
|
24
|
+
DEFAULTS = {
|
|
25
|
+
"memory.backend": "sqlite",
|
|
26
|
+
"memory.model": "all-MiniLM-L6-v2",
|
|
27
|
+
"server.host": "127.0.0.1",
|
|
28
|
+
"server.openclaw_port": "8012",
|
|
29
|
+
"server.memory_port": "8765",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
# Recommended embedding models for various languages
|
|
33
|
+
RECOMMENDED_MODELS = {
|
|
34
|
+
"all-MiniLM-L6-v2": {
|
|
35
|
+
"lang": "English",
|
|
36
|
+
"dim": 384,
|
|
37
|
+
"speed": "fast",
|
|
38
|
+
"note": "Default. Best speed/quality trade-off for English.",
|
|
39
|
+
},
|
|
40
|
+
"paraphrase-multilingual-MiniLM-L12-v2": {
|
|
41
|
+
"lang": "50+ languages (incl. French)",
|
|
42
|
+
"dim": 384,
|
|
43
|
+
"speed": "fast",
|
|
44
|
+
"note": "Recommended for French. Similar speed to default, multilingual.",
|
|
45
|
+
},
|
|
46
|
+
"BAAI/bge-m3": {
|
|
47
|
+
"lang": "100+ languages",
|
|
48
|
+
"dim": 1024,
|
|
49
|
+
"speed": "medium",
|
|
50
|
+
"note": "State-of-the-art multilingual. Higher quality, slower.",
|
|
51
|
+
},
|
|
52
|
+
"dangvantuan/sentence-camembert-large": {
|
|
53
|
+
"lang": "French",
|
|
54
|
+
"dim": 1024,
|
|
55
|
+
"speed": "slow",
|
|
56
|
+
"note": "French-specific. Best quality for French-only deployments.",
|
|
57
|
+
},
|
|
58
|
+
"OrdalieTech/Solon-embeddings-large-0.1": {
|
|
59
|
+
"lang": "French",
|
|
60
|
+
"dim": 1024,
|
|
61
|
+
"speed": "slow",
|
|
62
|
+
"note": "French-specific. Optimized for French legal/business texts.",
|
|
63
|
+
},
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _load_config() -> dict:
|
|
68
|
+
"""Load config from ~/.firm/config.json, falling back to defaults."""
|
|
69
|
+
if CONFIG_FILE.exists():
|
|
70
|
+
try:
|
|
71
|
+
data = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))
|
|
72
|
+
return {**DEFAULTS, **data}
|
|
73
|
+
except (json.JSONDecodeError, OSError):
|
|
74
|
+
pass
|
|
75
|
+
return dict(DEFAULTS)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _save_config(config: dict) -> None:
|
|
79
|
+
"""Save config to ~/.firm/config.json."""
|
|
80
|
+
CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
81
|
+
CONFIG_FILE.write_text(json.dumps(config, indent=2) + "\n", encoding="utf-8")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def run_config(args: argparse.Namespace) -> int:
|
|
85
|
+
"""Entry point for `firm config` commands."""
|
|
86
|
+
cmd = getattr(args, "config_command", None)
|
|
87
|
+
|
|
88
|
+
if cmd == "set":
|
|
89
|
+
return _config_set(args.key, args.value)
|
|
90
|
+
elif cmd == "show":
|
|
91
|
+
return _config_show()
|
|
92
|
+
elif cmd == "models":
|
|
93
|
+
return _config_models()
|
|
94
|
+
else:
|
|
95
|
+
console.print("[yellow]Usage: firm config set <key> <value> | firm config show | firm config models[/yellow]")
|
|
96
|
+
return 0
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _config_set(key: str, value: str) -> int:
|
|
100
|
+
"""Set a configuration value."""
|
|
101
|
+
config = _load_config()
|
|
102
|
+
old = config.get(key, "(unset)")
|
|
103
|
+
config[key] = value
|
|
104
|
+
_save_config(config)
|
|
105
|
+
console.print(f" [green]✓[/green] {key}: {old} → [bold]{value}[/bold]")
|
|
106
|
+
|
|
107
|
+
# Hint for model changes
|
|
108
|
+
if key == "memory.model":
|
|
109
|
+
if value in RECOMMENDED_MODELS:
|
|
110
|
+
info = RECOMMENDED_MODELS[value]
|
|
111
|
+
console.print(f" [dim]{info['note']}[/dim]")
|
|
112
|
+
else:
|
|
113
|
+
console.print(f" [yellow]Custom model. Run `firm config models` for recommended options.[/yellow]")
|
|
114
|
+
console.print(" [dim]Restart servers for changes to take effect: firm stop && firm start[/dim]")
|
|
115
|
+
return 0
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _config_show() -> int:
|
|
119
|
+
"""Show current configuration."""
|
|
120
|
+
config = _load_config()
|
|
121
|
+
table = Table(title="Firm Configuration", border_style="cyan")
|
|
122
|
+
table.add_column("Key", style="bold")
|
|
123
|
+
table.add_column("Value")
|
|
124
|
+
table.add_column("Source")
|
|
125
|
+
|
|
126
|
+
saved = {}
|
|
127
|
+
if CONFIG_FILE.exists():
|
|
128
|
+
try:
|
|
129
|
+
saved = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))
|
|
130
|
+
except Exception:
|
|
131
|
+
pass
|
|
132
|
+
|
|
133
|
+
for key, value in sorted(config.items()):
|
|
134
|
+
source = "[green]config[/green]" if key in saved else "[dim]default[/dim]"
|
|
135
|
+
table.add_row(key, str(value), source)
|
|
136
|
+
|
|
137
|
+
console.print(table)
|
|
138
|
+
console.print(f"\n [dim]Config file: {CONFIG_FILE}[/dim]")
|
|
139
|
+
return 0
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def _config_models() -> int:
|
|
143
|
+
"""Show recommended embedding models for memory.model."""
|
|
144
|
+
config = _load_config()
|
|
145
|
+
current = config.get("memory.model", DEFAULTS["memory.model"])
|
|
146
|
+
|
|
147
|
+
table = Table(title="Recommended Embedding Models", border_style="cyan")
|
|
148
|
+
table.add_column("Model", style="bold")
|
|
149
|
+
table.add_column("Languages")
|
|
150
|
+
table.add_column("Dim")
|
|
151
|
+
table.add_column("Speed")
|
|
152
|
+
table.add_column("Note")
|
|
153
|
+
table.add_column("", style="green")
|
|
154
|
+
|
|
155
|
+
for model_name, info in RECOMMENDED_MODELS.items():
|
|
156
|
+
active = "← active" if model_name == current else ""
|
|
157
|
+
table.add_row(
|
|
158
|
+
model_name,
|
|
159
|
+
info["lang"],
|
|
160
|
+
str(info["dim"]),
|
|
161
|
+
info["speed"],
|
|
162
|
+
info["note"],
|
|
163
|
+
active,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
console.print(table)
|
|
167
|
+
console.print(f"\n Current model: [bold]{current}[/bold]")
|
|
168
|
+
console.print(" Change: [bold]firm config set memory.model <model-name>[/bold]")
|
|
169
|
+
console.print("\n [dim]For French deployments, we recommend:[/dim]")
|
|
170
|
+
console.print(" [bold]firm config set memory.model paraphrase-multilingual-MiniLM-L12-v2[/bold]")
|
|
171
|
+
return 0
|