@statechange/council 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +207 -0
- package/council/creative/ABOUT.md +23 -0
- package/council/critic/ABOUT.md +23 -0
- package/council/strategist/ABOUT.md +21 -0
- package/dist/backends/anthropic.d.ts +2 -0
- package/dist/backends/anthropic.js +57 -0
- package/dist/backends/anthropic.js.map +1 -0
- package/dist/backends/google.d.ts +2 -0
- package/dist/backends/google.js +68 -0
- package/dist/backends/google.js.map +1 -0
- package/dist/backends/index.d.ts +3 -0
- package/dist/backends/index.js +47 -0
- package/dist/backends/index.js.map +1 -0
- package/dist/backends/ollama.d.ts +2 -0
- package/dist/backends/ollama.js +68 -0
- package/dist/backends/ollama.js.map +1 -0
- package/dist/backends/openai.d.ts +2 -0
- package/dist/backends/openai.js +62 -0
- package/dist/backends/openai.js.map +1 -0
- package/dist/backends/types.d.ts +1 -0
- package/dist/backends/types.js +2 -0
- package/dist/backends/types.js.map +1 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +152 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands/config.d.ts +7 -0
- package/dist/commands/config.js +118 -0
- package/dist/commands/config.js.map +1 -0
- package/dist/commands/counsellor.d.ts +7 -0
- package/dist/commands/counsellor.js +98 -0
- package/dist/commands/counsellor.js.map +1 -0
- package/dist/commands/discuss.d.ts +12 -0
- package/dist/commands/discuss.js +154 -0
- package/dist/commands/discuss.js.map +1 -0
- package/dist/commands/history.d.ts +5 -0
- package/dist/commands/history.js +46 -0
- package/dist/commands/history.js.map +1 -0
- package/dist/commands/list.d.ts +5 -0
- package/dist/commands/list.js +40 -0
- package/dist/commands/list.js.map +1 -0
- package/dist/core/conversation-engine.d.ts +13 -0
- package/dist/core/conversation-engine.js +226 -0
- package/dist/core/conversation-engine.js.map +1 -0
- package/dist/core/counsellor-loader.d.ts +4 -0
- package/dist/core/counsellor-loader.js +97 -0
- package/dist/core/counsellor-loader.js.map +1 -0
- package/dist/core/counsellor-registry.d.ts +12 -0
- package/dist/core/counsellor-registry.js +131 -0
- package/dist/core/counsellor-registry.js.map +1 -0
- package/dist/core/excalidraw-cheatsheet.d.ts +5 -0
- package/dist/core/excalidraw-cheatsheet.js +65 -0
- package/dist/core/excalidraw-cheatsheet.js.map +1 -0
- package/dist/core/history.d.ts +16 -0
- package/dist/core/history.js +74 -0
- package/dist/core/history.js.map +1 -0
- package/dist/core/infographic.d.ts +4 -0
- package/dist/core/infographic.js +81 -0
- package/dist/core/infographic.js.map +1 -0
- package/dist/core/key-scanner.d.ts +8 -0
- package/dist/core/key-scanner.js +79 -0
- package/dist/core/key-scanner.js.map +1 -0
- package/dist/core/logger.d.ts +5 -0
- package/dist/core/logger.js +38 -0
- package/dist/core/logger.js.map +1 -0
- package/dist/core/output-formatter.d.ts +2 -0
- package/dist/core/output-formatter.js +47 -0
- package/dist/core/output-formatter.js.map +1 -0
- package/dist/core/secretary.d.ts +23 -0
- package/dist/core/secretary.js +171 -0
- package/dist/core/secretary.js.map +1 -0
- package/dist/core/skill-loader.d.ts +2 -0
- package/dist/core/skill-loader.js +32 -0
- package/dist/core/skill-loader.js.map +1 -0
- package/dist/electron/ipc-handlers.d.ts +3 -0
- package/dist/electron/ipc-handlers.js +477 -0
- package/dist/electron/ipc-handlers.js.map +1 -0
- package/dist/electron/main.d.ts +1 -0
- package/dist/electron/main.js +85 -0
- package/dist/electron/main.js.map +1 -0
- package/dist/electron/preload.d.ts +1 -0
- package/dist/electron/preload.js +38 -0
- package/dist/electron/preload.js.map +1 -0
- package/dist/types.d.ts +184 -0
- package/dist/types.js +12 -0
- package/dist/types.js.map +1 -0
- package/dist-electron/main.js +1635 -0
- package/package.json +87 -0
- package/skills/council-manage/SKILL.md +214 -0
- package/skills/council-setup-keys/SKILL.md +127 -0
package/README.md
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
# @statechange/council
|
|
2
|
+
|
|
3
|
+
A CLI + Electron GUI for orchestrating round-robin AI counsellor discussions. Build a council of AI personas — each with their own backend, model, personality, and source material — and have them debate topics in structured or freeform formats.
|
|
4
|
+
|
|
5
|
+
## Quick Start with Claude Code Skills
|
|
6
|
+
|
|
7
|
+
The fastest way to get started is through [Claude Code](https://claude.com/claude-code) skills. Install the package, and the skills handle the rest:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install -g @statechange/council
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Then in Claude Code:
|
|
14
|
+
|
|
15
|
+
- **`/council-manage`** — Create counsellors, run discussions, manage your council. Ask it to "create a counsellor based on [person]" or "run a debate about [topic]".
|
|
16
|
+
- **`/council-setup-keys`** — Find API keys scattered across your env files and shell profiles, then consolidate them into `~/.ai-council/config.json`.
|
|
17
|
+
|
|
18
|
+
The skills know how to use the CLI, create counsellors from source material, and troubleshoot configuration issues.
|
|
19
|
+
|
|
20
|
+
## Install
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
# Global install (recommended for CLI use)
|
|
24
|
+
npm install -g @statechange/council
|
|
25
|
+
|
|
26
|
+
# Or run directly
|
|
27
|
+
npx @statechange/council discuss "Should we adopt microservices?"
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## CLI Usage
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
# Run a freeform discussion (default)
|
|
34
|
+
council discuss "Should we pivot to enterprise?" --rounds 3
|
|
35
|
+
|
|
36
|
+
# Run a structured debate
|
|
37
|
+
council discuss "Should AI be open source?" --mode debate --rounds 3
|
|
38
|
+
|
|
39
|
+
# Use specific counsellors
|
|
40
|
+
council discuss "Topic" --counsellors ./council/strategist ./council/critic
|
|
41
|
+
|
|
42
|
+
# Topic from a file
|
|
43
|
+
council discuss ./topics/architecture.md
|
|
44
|
+
|
|
45
|
+
# List available counsellors
|
|
46
|
+
council list
|
|
47
|
+
|
|
48
|
+
# View past discussions
|
|
49
|
+
council history
|
|
50
|
+
council history <id>
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Discussion Modes
|
|
54
|
+
|
|
55
|
+
**Freeform** (default) — Open group chat. Every counsellor sees the full conversation history on every turn. The first speaker sets the tone and later speakers react.
|
|
56
|
+
|
|
57
|
+
**Debate** — Structured argument with three key differences:
|
|
58
|
+
1. **Round 1 (Constructive)**: Each counsellor argues their position based only on the question — no visibility into what others said.
|
|
59
|
+
2. **Rebuttal rounds**: Counsellors see the constructives plus only the previous round. Speaker order is shuffled each round.
|
|
60
|
+
3. **Interim summaries**: A brief secretary summary after each round, creating a running debate narrative.
|
|
61
|
+
|
|
62
|
+
### Options
|
|
63
|
+
|
|
64
|
+
```
|
|
65
|
+
--council, -c Path to council directory (default: ./council/)
|
|
66
|
+
--counsellors Specific counsellor directory paths (space-separated)
|
|
67
|
+
--rounds, -r Number of discussion rounds (default: 2)
|
|
68
|
+
--mode, -m freeform or debate (default: freeform)
|
|
69
|
+
--output, -o Output directory (default: ./output)
|
|
70
|
+
--format, -f md, json, or both (default: both)
|
|
71
|
+
--infographic, -i Generate an infographic after discussion
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
## GUI
|
|
75
|
+
|
|
76
|
+
Launch the Electron app for a visual interface with real-time streaming, counsellor management, and discussion history:
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
# Development
|
|
80
|
+
council-gui # if installed globally
|
|
81
|
+
# or from source:
|
|
82
|
+
bun run dev:gui
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
The GUI includes:
|
|
86
|
+
- Counsellor selection chips with health indicators
|
|
87
|
+
- Freeform/Debate mode toggle with explanatory tooltips
|
|
88
|
+
- Real-time streaming of counsellor responses
|
|
89
|
+
- Round dividers and interim summaries in debate mode
|
|
90
|
+
- Secretary summary with Excalidraw position diagrams
|
|
91
|
+
- Infographic generation (OpenAI / Google)
|
|
92
|
+
- Full discussion history with search
|
|
93
|
+
|
|
94
|
+
## Configuration
|
|
95
|
+
|
|
96
|
+
### API Keys
|
|
97
|
+
|
|
98
|
+
Backends need API keys (except Ollama which runs locally). Keys can come from:
|
|
99
|
+
|
|
100
|
+
1. **Environment variables**: `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, `GOOGLE_API_KEY`
|
|
101
|
+
2. **Config file**: `~/.ai-council/config.json`
|
|
102
|
+
3. **`.env` file** in the project root
|
|
103
|
+
|
|
104
|
+
Use the CLI to find and import keys:
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
council config show # See what's configured
|
|
108
|
+
council config scan # Find keys in env files and shell profiles
|
|
109
|
+
council config scan ~/project/.env # Scan additional paths
|
|
110
|
+
council config import # Import discovered keys
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
Or configure in the GUI under Settings.
|
|
114
|
+
|
|
115
|
+
### Secretary
|
|
116
|
+
|
|
117
|
+
Add a `secretary` block to `~/.ai-council/config.json` to enable post-discussion summaries and debate interim summaries:
|
|
118
|
+
|
|
119
|
+
```json
|
|
120
|
+
{
|
|
121
|
+
"backends": { ... },
|
|
122
|
+
"secretary": {
|
|
123
|
+
"backend": "anthropic",
|
|
124
|
+
"model": "claude-sonnet-4-5-20250929"
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
## Creating Counsellors
|
|
130
|
+
|
|
131
|
+
Each counsellor is a directory with an `ABOUT.md` file:
|
|
132
|
+
|
|
133
|
+
```
|
|
134
|
+
council/
|
|
135
|
+
my-counsellor/
|
|
136
|
+
ABOUT.md
|
|
137
|
+
avatar.jpg # optional
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
### ABOUT.md Format
|
|
141
|
+
|
|
142
|
+
```markdown
|
|
143
|
+
---
|
|
144
|
+
name: "The Strategist"
|
|
145
|
+
description: "Thinks in systems, moats, and long-term positioning"
|
|
146
|
+
interests: ["strategy", "markets", "competition"]
|
|
147
|
+
backend: "anthropic"
|
|
148
|
+
model: "claude-sonnet-4-5-20250929"
|
|
149
|
+
temperature: 0.7
|
|
150
|
+
avatar: "avatar.jpg"
|
|
151
|
+
---
|
|
152
|
+
|
|
153
|
+
You are The Strategist. You sit on a council of experts and bring a
|
|
154
|
+
strategic lens to every discussion...
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
### Supported Backends
|
|
158
|
+
|
|
159
|
+
| Backend | Default Model | API Key |
|
|
160
|
+
|---------|--------------|---------|
|
|
161
|
+
| anthropic | claude-sonnet-4-5-20250929 | `ANTHROPIC_API_KEY` |
|
|
162
|
+
| openai | gpt-4o | `OPENAI_API_KEY` |
|
|
163
|
+
| google | gemini-2.0-flash | `GOOGLE_API_KEY` |
|
|
164
|
+
| ollama | llama3.2 | None (local) |
|
|
165
|
+
|
|
166
|
+
### Registering External Counsellors
|
|
167
|
+
|
|
168
|
+
```bash
|
|
169
|
+
# From a local directory
|
|
170
|
+
council counsellor add ./path/to/counsellor
|
|
171
|
+
|
|
172
|
+
# From a git repository
|
|
173
|
+
council counsellor add https://github.com/user/my-counsellors.git
|
|
174
|
+
|
|
175
|
+
# List registered counsellors
|
|
176
|
+
council counsellor list
|
|
177
|
+
|
|
178
|
+
# Remove
|
|
179
|
+
council counsellor remove my-counsellor
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
## Logging
|
|
183
|
+
|
|
184
|
+
Errors are logged to `~/.ai-council/council.log` with timestamps, context, and full stack traces. Check this file when a counsellor fails to respond or a summary doesn't generate.
|
|
185
|
+
|
|
186
|
+
## Development
|
|
187
|
+
|
|
188
|
+
```bash
|
|
189
|
+
# Install dependencies
|
|
190
|
+
bun install
|
|
191
|
+
|
|
192
|
+
# CLI development
|
|
193
|
+
bun run dev -- discuss "topic"
|
|
194
|
+
|
|
195
|
+
# GUI development
|
|
196
|
+
bun run dev:gui
|
|
197
|
+
|
|
198
|
+
# Build CLI
|
|
199
|
+
bun run build
|
|
200
|
+
|
|
201
|
+
# Build GUI
|
|
202
|
+
bun run build:gui
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
## License
|
|
206
|
+
|
|
207
|
+
MIT
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: "The Creative2"
|
|
3
|
+
description: "Lateral thinker who finds unconventional angles and unexpected connections"
|
|
4
|
+
interests: ["innovation", "creativity", "lateral thinking", "analogies", "reframing"]
|
|
5
|
+
backend: "anthropic"
|
|
6
|
+
model: "claude-opus-4-6"
|
|
7
|
+
temperature: 0.9
|
|
8
|
+
avatar: "https://api.dicebear.com/9.x/personas/svg?seed=Creative&backgroundColor=ffd5dc"
|
|
9
|
+
---
|
|
10
|
+
You are The Creative, an unconventional lateral thinker on a council of experts.
|
|
11
|
+
|
|
12
|
+
Your role is to bring fresh perspectives, unexpected analogies, and reframed questions to the discussion. You see connections others miss and challenge the group to think beyond obvious solutions.
|
|
13
|
+
|
|
14
|
+
When contributing to a discussion:
|
|
15
|
+
- Reframe the problem in surprising ways
|
|
16
|
+
- Draw analogies from completely different domains
|
|
17
|
+
- Propose ideas that might seem wild but have a kernel of insight
|
|
18
|
+
- Build unexpected bridges between what other council members have said
|
|
19
|
+
- Question whether the group is even asking the right question
|
|
20
|
+
|
|
21
|
+
You balance creative provocation with practical grounding. Your best ideas are ones that make people say "I never thought of it that way" — not just "that's weird."
|
|
22
|
+
|
|
23
|
+
Keep your responses focused and substantive. Aim for 2-4 paragraphs per turn.
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: "The Critic"
|
|
3
|
+
description: "Devil's advocate who stress-tests ideas and finds weaknesses"
|
|
4
|
+
interests: ["risk analysis", "critical thinking", "failure modes", "assumptions"]
|
|
5
|
+
backend: "google"
|
|
6
|
+
model: "gemini-flash-latest"
|
|
7
|
+
temperature: 0.6
|
|
8
|
+
avatar: "https://api.dicebear.com/9.x/personas/svg?seed=Critic&backgroundColor=d1d4f9"
|
|
9
|
+
---
|
|
10
|
+
You are The Critic, a sharp analytical thinker on a council of experts.
|
|
11
|
+
|
|
12
|
+
Your role is to be the constructive devil's advocate. You stress-test ideas, surface hidden assumptions, identify failure modes, and push the council toward more rigorous thinking.
|
|
13
|
+
|
|
14
|
+
When contributing to a discussion:
|
|
15
|
+
- Challenge assumptions other council members are making
|
|
16
|
+
- Identify what could go wrong and why
|
|
17
|
+
- Ask the uncomfortable questions others might avoid
|
|
18
|
+
- Point out blind spots and biases in the reasoning so far
|
|
19
|
+
- Be respectful but unflinching — soft criticism helps no one
|
|
20
|
+
|
|
21
|
+
You are not a pessimist. You genuinely want the best outcome, which means being honest about the risks. When an idea is strong, acknowledge it — then probe for weaknesses anyway.
|
|
22
|
+
|
|
23
|
+
Keep your responses focused and substantive. Aim for 2-4 paragraphs per turn.
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: "The Strategist"
|
|
3
|
+
description: "Strategic business advisor focused on positioning, growth, and competitive advantage"
|
|
4
|
+
interests: ["strategy", "business", "growth", "positioning", "market analysis"]
|
|
5
|
+
backend: "anthropic"
|
|
6
|
+
model: "claude-haiku-4-5-20251001"
|
|
7
|
+
temperature: 0.7
|
|
8
|
+
avatar: "https://api.dicebear.com/9.x/personas/svg?seed=Strategist&backgroundColor=b6e3f4"
|
|
9
|
+
---
|
|
10
|
+
You are The Strategist, a senior business strategy advisor on a council of experts.
|
|
11
|
+
|
|
12
|
+
Your role is to analyze topics through the lens of strategic thinking: competitive advantage, market positioning, resource allocation, and long-term growth trajectories.
|
|
13
|
+
|
|
14
|
+
When contributing to a discussion:
|
|
15
|
+
- Build on what other council members have said, but bring your strategic perspective
|
|
16
|
+
- Think about second and third-order effects
|
|
17
|
+
- Consider both offensive and defensive strategic positions
|
|
18
|
+
- Ground your thinking in frameworks like Porter's Five Forces, Blue Ocean Strategy, or Jobs-to-be-Done when relevant
|
|
19
|
+
- Be direct and opinionated — the council values strong perspectives that can be debated
|
|
20
|
+
|
|
21
|
+
Keep your responses focused and substantive. Aim for 2-4 paragraphs per turn.
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
export function createAnthropicBackend(config) {
|
|
3
|
+
const client = new Anthropic({
|
|
4
|
+
apiKey: config.apiKey ?? process.env.ANTHROPIC_API_KEY,
|
|
5
|
+
...(config.baseUrl ? { baseURL: config.baseUrl } : {}),
|
|
6
|
+
});
|
|
7
|
+
return {
|
|
8
|
+
name: "anthropic",
|
|
9
|
+
defaultModel: "claude-sonnet-4-5-20250929",
|
|
10
|
+
async chat(request) {
|
|
11
|
+
const response = await client.messages.create({
|
|
12
|
+
model: request.model,
|
|
13
|
+
max_tokens: 4096,
|
|
14
|
+
system: request.systemPrompt,
|
|
15
|
+
messages: request.messages.map((m) => ({
|
|
16
|
+
role: m.role,
|
|
17
|
+
content: m.content,
|
|
18
|
+
})),
|
|
19
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
20
|
+
});
|
|
21
|
+
const textBlock = response.content.find((b) => b.type === "text");
|
|
22
|
+
return {
|
|
23
|
+
content: textBlock?.text ?? "",
|
|
24
|
+
tokenUsage: {
|
|
25
|
+
input: response.usage.input_tokens,
|
|
26
|
+
output: response.usage.output_tokens,
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
},
|
|
30
|
+
async *chatStream(request) {
|
|
31
|
+
const stream = client.messages.stream({
|
|
32
|
+
model: request.model,
|
|
33
|
+
max_tokens: 4096,
|
|
34
|
+
system: request.systemPrompt,
|
|
35
|
+
messages: request.messages.map((m) => ({
|
|
36
|
+
role: m.role,
|
|
37
|
+
content: m.content,
|
|
38
|
+
})),
|
|
39
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
40
|
+
});
|
|
41
|
+
for await (const event of stream) {
|
|
42
|
+
if (event.type === "content_block_delta" && event.delta.type === "text_delta") {
|
|
43
|
+
yield { delta: event.delta.text };
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
const finalMessage = await stream.finalMessage();
|
|
47
|
+
yield {
|
|
48
|
+
delta: "",
|
|
49
|
+
tokenUsage: {
|
|
50
|
+
input: finalMessage.usage.input_tokens,
|
|
51
|
+
output: finalMessage.usage.output_tokens,
|
|
52
|
+
},
|
|
53
|
+
};
|
|
54
|
+
},
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
//# sourceMappingURL=anthropic.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"anthropic.js","sourceRoot":"","sources":["../../src/backends/anthropic.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,mBAAmB,CAAC;AAG1C,MAAM,UAAU,sBAAsB,CAAC,MAAqB;IAC1D,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QAC3B,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB;QACtD,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KACvD,CAAC,CAAC;IAEH,OAAO;QACL,IAAI,EAAE,WAAW;QACjB,YAAY,EAAE,4BAA4B;QAE1C,KAAK,CAAC,IAAI,CAAC,OAAoB;YAC7B,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC5C,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,UAAU,EAAE,IAAI;gBAChB,MAAM,EAAE,OAAO,CAAC,YAAY;gBAC5B,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;oBACrC,IAAI,EAAE,CAAC,CAAC,IAAI;oBACZ,OAAO,EAAE,CAAC,CAAC,OAAO;iBACnB,CAAC,CAAC;gBACH,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aACnF,CAAC,CAAC;YAEH,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC;YAClE,OAAO;gBACL,OAAO,EAAE,SAAS,EAAE,IAAI,IAAI,EAAE;gBAC9B,UAAU,EAAE;oBACV,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,YAAY;oBAClC,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,aAAa;iBACrC;aACF,CAAC;QACJ,CAAC;QAED,KAAK,CAAC,CAAC,UAAU,CAAC,OAAoB;YACpC,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC;gBACpC,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,UAAU,EAAE,IAAI;gBAChB,MAAM,EAAE,OAAO,CAAC,YAAY;gBAC5B,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;oBACrC,IAAI,EAAE,CAAC,CAAC,IAAI;oBACZ,OAAO,EAAE,CAAC,CAAC,OAAO;iBACnB,CAAC,CAAC;gBACH,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aACnF,CAAC,CAAC;YAEH,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;gBACjC,IAAI,KAAK,CAAC,IAAI,KAAK,qBAAqB,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;oBAC9E,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;gBACpC,CAAC;YACH,CAAC;YAED,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;YACjD,MAAM;gBACJ,KAAK,EAAE,EAAE;gBACT,UAAU,EAAE;oBACV,KAAK,EAAE,YAAY,CAAC,KAAK,CAAC,YAAY;oBACtC,MAAM,EAAE,YAAY,CAAC,KAAK,CAAC,aAAa;iBACzC;aACF,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
2
|
+
export function createGoogleBackend(config) {
|
|
3
|
+
const apiKey = config.apiKey ?? process.env.GOOGLE_API_KEY ?? "";
|
|
4
|
+
const genAI = new GoogleGenerativeAI(apiKey);
|
|
5
|
+
return {
|
|
6
|
+
name: "google",
|
|
7
|
+
defaultModel: "gemini-2.0-flash",
|
|
8
|
+
async chat(request) {
|
|
9
|
+
const model = genAI.getGenerativeModel({
|
|
10
|
+
model: request.model,
|
|
11
|
+
systemInstruction: request.systemPrompt,
|
|
12
|
+
generationConfig: {
|
|
13
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
14
|
+
},
|
|
15
|
+
});
|
|
16
|
+
const history = request.messages.slice(0, -1).map((m) => ({
|
|
17
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
18
|
+
parts: [{ text: m.content }],
|
|
19
|
+
}));
|
|
20
|
+
const chat = model.startChat({ history });
|
|
21
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
22
|
+
const result = await chat.sendMessage(lastMessage?.content ?? "");
|
|
23
|
+
const response = result.response;
|
|
24
|
+
return {
|
|
25
|
+
content: response.text(),
|
|
26
|
+
tokenUsage: response.usageMetadata
|
|
27
|
+
? {
|
|
28
|
+
input: response.usageMetadata.promptTokenCount ?? 0,
|
|
29
|
+
output: response.usageMetadata.candidatesTokenCount ?? 0,
|
|
30
|
+
}
|
|
31
|
+
: undefined,
|
|
32
|
+
};
|
|
33
|
+
},
|
|
34
|
+
async *chatStream(request) {
|
|
35
|
+
const genModel = genAI.getGenerativeModel({
|
|
36
|
+
model: request.model,
|
|
37
|
+
systemInstruction: request.systemPrompt,
|
|
38
|
+
generationConfig: {
|
|
39
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
40
|
+
},
|
|
41
|
+
});
|
|
42
|
+
const history = request.messages.slice(0, -1).map((m) => ({
|
|
43
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
44
|
+
parts: [{ text: m.content }],
|
|
45
|
+
}));
|
|
46
|
+
const chat = genModel.startChat({ history });
|
|
47
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
48
|
+
const result = await chat.sendMessageStream(lastMessage?.content ?? "");
|
|
49
|
+
for await (const chunk of result.stream) {
|
|
50
|
+
const text = chunk.text();
|
|
51
|
+
if (text) {
|
|
52
|
+
yield { delta: text };
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const response = await result.response;
|
|
56
|
+
yield {
|
|
57
|
+
delta: "",
|
|
58
|
+
tokenUsage: response.usageMetadata
|
|
59
|
+
? {
|
|
60
|
+
input: response.usageMetadata.promptTokenCount ?? 0,
|
|
61
|
+
output: response.usageMetadata.candidatesTokenCount ?? 0,
|
|
62
|
+
}
|
|
63
|
+
: undefined,
|
|
64
|
+
};
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=google.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"google.js","sourceRoot":"","sources":["../../src/backends/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAG3D,MAAM,UAAU,mBAAmB,CAAC,MAAqB;IACvD,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC;IACjE,MAAM,KAAK,GAAG,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;IAE7C,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,YAAY,EAAE,kBAAkB;QAEhC,KAAK,CAAC,IAAI,CAAC,OAAoB;YAC7B,MAAM,KAAK,GAAG,KAAK,CAAC,kBAAkB,CAAC;gBACrC,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,iBAAiB,EAAE,OAAO,CAAC,YAAY;gBACvC,gBAAgB,EAAE;oBAChB,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;iBACnF;aACF,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBACxD,IAAI,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;gBAC/C,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;aAC7B,CAAC,CAAC,CAAC;YAEJ,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,MAAM,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YAClE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,OAAO,IAAI,EAAE,CAAC,CAAC;YAClE,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;YAEjC,OAAO;gBACL,OAAO,EAAE,QAAQ,CAAC,IAAI,EAAE;gBACxB,UAAU,EAAE,QAAQ,CAAC,aAAa;oBAChC,CAAC,CAAC;wBACE,KAAK,EAAE,QAAQ,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC;wBACnD,MAAM,EAAE,QAAQ,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC;qBACzD;oBACH,CAAC,CAAC,SAAS;aACd,CAAC;QACJ,CAAC;QAED,KAAK,CAAC,CAAC,UAAU,CAAC,OAAoB;YACpC,MAAM,QAAQ,GAAG,KAAK,CAAC,kBAAkB,CAAC;gBACxC,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,iBAAiB,EAAE,OAAO,CAAC,YAAY;gBACvC,gBAAgB,EAAE;oBAChB,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;iBACnF;aACF,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBACxD,IAAI,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;gBAC/C,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;aAC7B,CAAC,CAAC,CAAC;YAEJ,MAAM,IAAI,GAAG,QAAQ,CAAC,SAAS,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YAC7C,MAAM,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YAClE,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,EAAE,OAAO,IAAI,EAAE,CAAC,CAAC;YAExE,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;gBACxC,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;gBAC1B,IAAI,IAAI,EAAE,CAAC;oBACT,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;gBACxB,CAAC;YACH,CAAC;YAED,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC;YACvC,MAAM;gBACJ,KAAK,EAAE,EAAE;gBACT,UAAU,EAAE,QAAQ,CAAC,aAAa;oBAChC,CAAC,CAAC;wBACE,KAAK,EAAE,QAAQ,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC;wBACnD,MAAM,EAAE,QAAQ,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC;qBACzD;oBACH,CAAC,CAAC,SAAS;aACd,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { homedir } from "node:os";
|
|
4
|
+
import { createAnthropicBackend } from "./anthropic.js";
|
|
5
|
+
import { createOpenAIBackend } from "./openai.js";
|
|
6
|
+
import { createGoogleBackend } from "./google.js";
|
|
7
|
+
import { createOllamaBackend } from "./ollama.js";
|
|
8
|
+
const factories = {
|
|
9
|
+
anthropic: createAnthropicBackend,
|
|
10
|
+
openai: createOpenAIBackend,
|
|
11
|
+
google: createGoogleBackend,
|
|
12
|
+
ollama: createOllamaBackend,
|
|
13
|
+
};
|
|
14
|
+
let configCache = null;
|
|
15
|
+
async function loadConfig() {
|
|
16
|
+
if (configCache)
|
|
17
|
+
return configCache;
|
|
18
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
19
|
+
try {
|
|
20
|
+
const raw = await readFile(configPath, "utf-8");
|
|
21
|
+
configCache = JSON.parse(raw);
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
configCache = { backends: {} };
|
|
25
|
+
}
|
|
26
|
+
return configCache;
|
|
27
|
+
}
|
|
28
|
+
const backendCache = new Map();
|
|
29
|
+
export function clearCaches() {
|
|
30
|
+
configCache = null;
|
|
31
|
+
backendCache.clear();
|
|
32
|
+
}
|
|
33
|
+
export async function getBackend(name) {
|
|
34
|
+
const cached = backendCache.get(name);
|
|
35
|
+
if (cached)
|
|
36
|
+
return cached;
|
|
37
|
+
const factory = factories[name];
|
|
38
|
+
if (!factory) {
|
|
39
|
+
throw new Error(`Unknown backend: "${name}". Available: ${Object.keys(factories).join(", ")}`);
|
|
40
|
+
}
|
|
41
|
+
const config = await loadConfig();
|
|
42
|
+
const backendConfig = config.backends[name] ?? {};
|
|
43
|
+
const backend = factory(backendConfig);
|
|
44
|
+
backendCache.set(name, backend);
|
|
45
|
+
return backend;
|
|
46
|
+
}
|
|
47
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/backends/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC;AACxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AAClD,OAAO,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AAClD,OAAO,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AAIlD,MAAM,SAAS,GAAmC;IAChD,SAAS,EAAE,sBAAsB;IACjC,MAAM,EAAE,mBAAmB;IAC3B,MAAM,EAAE,mBAAmB;IAC3B,MAAM,EAAE,mBAAmB;CAC5B,CAAC;AAEF,IAAI,WAAW,GAAyB,IAAI,CAAC;AAE7C,KAAK,UAAU,UAAU;IACvB,IAAI,WAAW;QAAE,OAAO,WAAW,CAAC;IAEpC,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,aAAa,EAAE,aAAa,CAAC,CAAC;IACjE,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAChD,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAkB,CAAC;IACjD,CAAC;IAAC,MAAM,CAAC;QACP,WAAW,GAAG,EAAE,QAAQ,EAAE,EAAE,EAAE,CAAC;IACjC,CAAC;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,MAAM,YAAY,GAAG,IAAI,GAAG,EAA2B,CAAC;AAExD,MAAM,UAAU,WAAW;IACzB,WAAW,GAAG,IAAI,CAAC;IACnB,YAAY,CAAC,KAAK,EAAE,CAAC;AACvB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,IAAY;IAC3C,MAAM,MAAM,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,MAAM;QAAE,OAAO,MAAM,CAAC;IAE1B,MAAM,OAAO,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC;IAChC,IAAI,CAAC,OAAO,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,qBAAqB,IAAI,iBAAiB,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACjG,CAAC;IAED,MAAM,MAAM,GAAG,MAAM,UAAU,EAAE,CAAC;IAClC,MAAM,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;IAClD,MAAM,OAAO,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;IACvC,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAChC,OAAO,OAAO,CAAC;AACjB,CAAC"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { Ollama } from "ollama";
|
|
2
|
+
export function createOllamaBackend(config) {
|
|
3
|
+
const client = new Ollama({
|
|
4
|
+
host: config.baseUrl ?? "http://localhost:11434",
|
|
5
|
+
});
|
|
6
|
+
return {
|
|
7
|
+
name: "ollama",
|
|
8
|
+
defaultModel: "llama3.2",
|
|
9
|
+
async chat(request) {
|
|
10
|
+
const response = await client.chat({
|
|
11
|
+
model: request.model,
|
|
12
|
+
messages: [
|
|
13
|
+
{ role: "system", content: request.systemPrompt },
|
|
14
|
+
...request.messages.map((m) => ({
|
|
15
|
+
role: m.role,
|
|
16
|
+
content: m.content,
|
|
17
|
+
})),
|
|
18
|
+
],
|
|
19
|
+
options: {
|
|
20
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
21
|
+
},
|
|
22
|
+
});
|
|
23
|
+
return {
|
|
24
|
+
content: response.message.content,
|
|
25
|
+
tokenUsage: response.prompt_eval_count !== undefined
|
|
26
|
+
? {
|
|
27
|
+
input: response.prompt_eval_count ?? 0,
|
|
28
|
+
output: response.eval_count ?? 0,
|
|
29
|
+
}
|
|
30
|
+
: undefined,
|
|
31
|
+
};
|
|
32
|
+
},
|
|
33
|
+
async *chatStream(request) {
|
|
34
|
+
const response = await client.chat({
|
|
35
|
+
model: request.model,
|
|
36
|
+
messages: [
|
|
37
|
+
{ role: "system", content: request.systemPrompt },
|
|
38
|
+
...request.messages.map((m) => ({
|
|
39
|
+
role: m.role,
|
|
40
|
+
content: m.content,
|
|
41
|
+
})),
|
|
42
|
+
],
|
|
43
|
+
options: {
|
|
44
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
45
|
+
},
|
|
46
|
+
stream: true,
|
|
47
|
+
});
|
|
48
|
+
let promptEvalCount;
|
|
49
|
+
let evalCount;
|
|
50
|
+
for await (const chunk of response) {
|
|
51
|
+
if (chunk.message.content) {
|
|
52
|
+
yield { delta: chunk.message.content };
|
|
53
|
+
}
|
|
54
|
+
if (chunk.done) {
|
|
55
|
+
promptEvalCount = chunk.prompt_eval_count;
|
|
56
|
+
evalCount = chunk.eval_count;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
yield {
|
|
60
|
+
delta: "",
|
|
61
|
+
tokenUsage: promptEvalCount !== undefined
|
|
62
|
+
? { input: promptEvalCount ?? 0, output: evalCount ?? 0 }
|
|
63
|
+
: undefined,
|
|
64
|
+
};
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=ollama.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ollama.js","sourceRoot":"","sources":["../../src/backends/ollama.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAGhC,MAAM,UAAU,mBAAmB,CAAC,MAAqB;IACvD,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC;QACxB,IAAI,EAAE,MAAM,CAAC,OAAO,IAAI,wBAAwB;KACjD,CAAC,CAAC;IAEH,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,YAAY,EAAE,UAAU;QAExB,KAAK,CAAC,IAAI,CAAC,OAAoB;YAC7B,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;gBACjC,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,QAAQ,EAAE;oBACR,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,YAAY,EAAE;oBACjD,GAAG,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;wBAC9B,IAAI,EAAE,CAAC,CAAC,IAA4B;wBACpC,OAAO,EAAE,CAAC,CAAC,OAAO;qBACnB,CAAC,CAAC;iBACJ;gBACD,OAAO,EAAE;oBACP,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;iBACnF;aACF,CAAC,CAAC;YAEH,OAAO;gBACL,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,OAAO;gBACjC,UAAU,EACR,QAAQ,CAAC,iBAAiB,KAAK,SAAS;oBACtC,CAAC,CAAC;wBACE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,IAAI,CAAC;wBACtC,MAAM,EAAE,QAAQ,CAAC,UAAU,IAAI,CAAC;qBACjC;oBACH,CAAC,CAAC,SAAS;aAChB,CAAC;QACJ,CAAC;QAED,KAAK,CAAC,CAAC,UAAU,CAAC,OAAoB;YACpC,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;gBACjC,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,QAAQ,EAAE;oBACR,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,YAAY,EAAE;oBACjD,GAAG,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;wBAC9B,IAAI,EAAE,CAAC,CAAC,IAA4B;wBACpC,OAAO,EAAE,CAAC,CAAC,OAAO;qBACnB,CAAC,CAAC;iBACJ;gBACD,OAAO,EAAE;oBACP,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;iBACnF;gBACD,MAAM,EAAE,IAAI;aACb,CAAC,CAAC;YAEH,IAAI,eAAmC,CAAC;YACxC,IAAI,SAA6B,CAAC;YAElC,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;oBAC1B,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;gBACzC,CAAC;gBACD,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;oBACf,eAAe,GAAG,KAAK,CAAC,iBAAiB,CAAC;oBAC1C,SAAS,GAAG,KAAK,CAAC,UAAU,CAAC;gBAC/B,CAAC;YACH,CAAC;YAED,MAAM;gBACJ,KAAK,EAAE,EAAE;gBACT,UAAU,EACR,eAAe,KAAK,SAAS;oBAC3B,CAAC,CAAC,EAAE,KAAK,EAAE,eAAe,IAAI,CAAC,EAAE,MAAM,EAAE,SAAS,IAAI,CAAC,EAAE;oBACzD,CAAC,CAAC,SAAS;aAChB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
export function createOpenAIBackend(config) {
|
|
3
|
+
const client = new OpenAI({
|
|
4
|
+
apiKey: config.apiKey ?? process.env.OPENAI_API_KEY,
|
|
5
|
+
...(config.baseUrl ? { baseURL: config.baseUrl } : {}),
|
|
6
|
+
});
|
|
7
|
+
return {
|
|
8
|
+
name: "openai",
|
|
9
|
+
defaultModel: "gpt-4o",
|
|
10
|
+
async chat(request) {
|
|
11
|
+
const response = await client.chat.completions.create({
|
|
12
|
+
model: request.model,
|
|
13
|
+
messages: [
|
|
14
|
+
{ role: "system", content: request.systemPrompt },
|
|
15
|
+
...request.messages.map((m) => ({
|
|
16
|
+
role: m.role,
|
|
17
|
+
content: m.content,
|
|
18
|
+
})),
|
|
19
|
+
],
|
|
20
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
21
|
+
});
|
|
22
|
+
const choice = response.choices[0];
|
|
23
|
+
return {
|
|
24
|
+
content: choice?.message?.content ?? "",
|
|
25
|
+
tokenUsage: response.usage
|
|
26
|
+
? { input: response.usage.prompt_tokens, output: response.usage.completion_tokens }
|
|
27
|
+
: undefined,
|
|
28
|
+
};
|
|
29
|
+
},
|
|
30
|
+
async *chatStream(request) {
|
|
31
|
+
const stream = await client.chat.completions.create({
|
|
32
|
+
model: request.model,
|
|
33
|
+
messages: [
|
|
34
|
+
{ role: "system", content: request.systemPrompt },
|
|
35
|
+
...request.messages.map((m) => ({
|
|
36
|
+
role: m.role,
|
|
37
|
+
content: m.content,
|
|
38
|
+
})),
|
|
39
|
+
],
|
|
40
|
+
...(request.temperature !== undefined ? { temperature: request.temperature } : {}),
|
|
41
|
+
stream: true,
|
|
42
|
+
stream_options: { include_usage: true },
|
|
43
|
+
});
|
|
44
|
+
for await (const chunk of stream) {
|
|
45
|
+
const delta = chunk.choices[0]?.delta?.content;
|
|
46
|
+
if (delta) {
|
|
47
|
+
yield { delta };
|
|
48
|
+
}
|
|
49
|
+
if (chunk.usage) {
|
|
50
|
+
yield {
|
|
51
|
+
delta: "",
|
|
52
|
+
tokenUsage: {
|
|
53
|
+
input: chunk.usage.prompt_tokens,
|
|
54
|
+
output: chunk.usage.completion_tokens,
|
|
55
|
+
},
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
//# sourceMappingURL=openai.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.js","sourceRoot":"","sources":["../../src/backends/openai.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAG5B,MAAM,UAAU,mBAAmB,CAAC,MAAqB;IACvD,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC;QACxB,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc;QACnD,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KACvD,CAAC,CAAC;IAEH,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,YAAY,EAAE,QAAQ;QAEtB,KAAK,CAAC,IAAI,CAAC,OAAoB;YAC7B,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBACpD,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,QAAQ,EAAE;oBACR,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,YAAY,EAAE;oBACjD,GAAG,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;wBAC9B,IAAI,EAAE,CAAC,CAAC,IAA4B;wBACpC,OAAO,EAAE,CAAC,CAAC,OAAO;qBACnB,CAAC,CAAC;iBACJ;gBACD,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;aACnF,CAAC,CAAC;YAEH,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YACnC,OAAO;gBACL,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE;gBACvC,UAAU,EAAE,QAAQ,CAAC,KAAK;oBACxB,CAAC,CAAC,EAAE,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,aAAa,EAAE,MAAM,EAAE,QAAQ,CAAC,KAAK,CAAC,iBAAiB,EAAE;oBACnF,CAAC,CAAC,SAAS;aACd,CAAC;QACJ,CAAC;QAED,KAAK,CAAC,CAAC,UAAU,CAAC,OAAoB;YACpC,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBAClD,KAAK,EAAE,OAAO,CAAC,KAAK;gBACpB,QAAQ,EAAE;oBACR,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,YAAY,EAAE;oBACjD,GAAG,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;wBAC9B,IAAI,EAAE,CAAC,CAAC,IAA4B;wBACpC,OAAO,EAAE,CAAC,CAAC,OAAO;qBACnB,CAAC,CAAC;iBACJ;gBACD,GAAG,CAAC,OAAO,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;gBAClF,MAAM,EAAE,IAAI;gBACZ,cAAc,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE;aACxC,CAAC,CAAC;YAEH,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;gBACjC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC;gBAC/C,IAAI,KAAK,EAAE,CAAC;oBACV,MAAM,EAAE,KAAK,EAAE,CAAC;gBAClB,CAAC;gBACD,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;oBAChB,MAAM;wBACJ,KAAK,EAAE,EAAE;wBACT,UAAU,EAAE;4BACV,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,aAAa;4BAChC,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,iBAAiB;yBACtC;qBACF,CAAC;gBACJ,CAAC;YACH,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export type { BackendProvider, BackendConfig, ChatRequest, ChatResponse, ChatStreamChunk } from "../types.js";
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/backends/types.ts"],"names":[],"mappings":""}
|
package/dist/cli.d.ts
ADDED