@masyv/relay 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +169 -0
- package/core/Cargo.toml +51 -0
- package/core/src/agents/codex.rs +91 -0
- package/core/src/agents/gemini.rs +114 -0
- package/core/src/agents/mod.rs +86 -0
- package/core/src/agents/ollama.rs +98 -0
- package/core/src/agents/openai.rs +85 -0
- package/core/src/capture/git.rs +66 -0
- package/core/src/capture/mod.rs +41 -0
- package/core/src/capture/session.rs +217 -0
- package/core/src/capture/todos.rs +104 -0
- package/core/src/detect/mod.rs +80 -0
- package/core/src/handoff/mod.rs +156 -0
- package/core/src/lib.rs +198 -0
- package/core/src/main.rs +331 -0
- package/hooks/rate-limit.sh +19 -0
- package/package.json +38 -0
- package/scripts/build.sh +5 -0
- package/scripts/install.sh +12 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 MASYV
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# Relay
|
|
2
|
+
|
|
3
|
+
**When Claude's rate limit hits, another agent picks up exactly where you left off.**
|
|
4
|
+
|
|
5
|
+
[](https://www.rust-lang.org/)
|
|
6
|
+
[](https://www.npmjs.com/package/@masyv/relay)
|
|
7
|
+
[](LICENSE)
|
|
8
|
+
|
|
9
|
+
## The Problem
|
|
10
|
+
|
|
11
|
+
You're building a feature. It's 6:20 PM. You need to submit by 7 PM. Claude hits its rate limit.
|
|
12
|
+
|
|
13
|
+
Your entire session context — what you were building, your todos, the last error you were debugging, the architectural decisions you made — all gone. You have to re-explain everything to a new tool. By the time you're set up, it's 6:45 PM.
|
|
14
|
+
|
|
15
|
+
**Relay fixes this.** It captures your full session state and hands it to Codex, Gemini, Ollama, or GPT-4 — automatically, with complete context — so work never stops.
|
|
16
|
+
|
|
17
|
+
## How It Works
|
|
18
|
+
|
|
19
|
+
```
|
|
20
|
+
Claude Code session running...
|
|
21
|
+
| (rate limit hit)
|
|
22
|
+
v
|
|
23
|
+
Relay captures session state:
|
|
24
|
+
- Current task (from conversation)
|
|
25
|
+
- Todo list + status (from TodoWrite)
|
|
26
|
+
- Git branch, diff, recent commits
|
|
27
|
+
- Last error / last tool output
|
|
28
|
+
- Key decisions made
|
|
29
|
+
- Deadline (if set)
|
|
30
|
+
|
|
|
31
|
+
v
|
|
32
|
+
Relay dispatches to fallback agent:
|
|
33
|
+
-> Codex CLI (if installed)
|
|
34
|
+
-> Gemini (if API key set)
|
|
35
|
+
-> Ollama (if running locally)
|
|
36
|
+
-> GPT-4 (if API key set)
|
|
37
|
+
|
|
|
38
|
+
v
|
|
39
|
+
Agent picks up EXACTLY where you left off.
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Quick Start
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
# Install
|
|
46
|
+
git clone https://github.com/Manavarya09/relay
|
|
47
|
+
cd relay && ./scripts/build.sh && ./scripts/install.sh
|
|
48
|
+
|
|
49
|
+
# Generate config
|
|
50
|
+
relay init
|
|
51
|
+
|
|
52
|
+
# Check available agents
|
|
53
|
+
relay agents
|
|
54
|
+
|
|
55
|
+
# See what would be handed off
|
|
56
|
+
relay status
|
|
57
|
+
|
|
58
|
+
# Manual handoff (now)
|
|
59
|
+
relay handoff
|
|
60
|
+
|
|
61
|
+
# Handoff to specific agent with deadline
|
|
62
|
+
relay handoff --to codex --deadline "7:00 PM"
|
|
63
|
+
|
|
64
|
+
# Dry run — just print the handoff package
|
|
65
|
+
relay handoff --dry-run
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
## What Relay Captures
|
|
69
|
+
|
|
70
|
+
```
|
|
71
|
+
═══ Relay Session Snapshot ═══
|
|
72
|
+
|
|
73
|
+
Project: /Users/dev/myproject
|
|
74
|
+
Captured: 2026-04-05 13:32:02
|
|
75
|
+
|
|
76
|
+
── Current Task ──
|
|
77
|
+
Building WebSocket handler in src/server/ws.rs
|
|
78
|
+
|
|
79
|
+
── Todos ──
|
|
80
|
+
✅ [completed] Database schema + REST API
|
|
81
|
+
🔄 [in_progress] WebSocket handler (60% done)
|
|
82
|
+
⏳ [pending] Frontend charts
|
|
83
|
+
⏳ [pending] Auth
|
|
84
|
+
|
|
85
|
+
── Last Error ──
|
|
86
|
+
error[E0499]: cannot borrow `state` as mutable...
|
|
87
|
+
|
|
88
|
+
── Decisions ──
|
|
89
|
+
• Using Socket.io instead of raw WebSockets
|
|
90
|
+
• Redis pub/sub for cross-server events
|
|
91
|
+
|
|
92
|
+
── Git ──
|
|
93
|
+
Branch: feature/websocket
|
|
94
|
+
3 uncommitted changes
|
|
95
|
+
Recent: abc1234 Add WebSocket route skeleton
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
## Agent Priority
|
|
99
|
+
|
|
100
|
+
Configure in `~/.relay/config.toml`:
|
|
101
|
+
|
|
102
|
+
```toml
|
|
103
|
+
[general]
|
|
104
|
+
priority = ["codex", "gemini", "ollama", "openai"]
|
|
105
|
+
auto_handoff = true
|
|
106
|
+
max_context_tokens = 8000
|
|
107
|
+
|
|
108
|
+
[agents.codex]
|
|
109
|
+
model = "o4-mini"
|
|
110
|
+
|
|
111
|
+
[agents.gemini]
|
|
112
|
+
api_key = "your-key"
|
|
113
|
+
model = "gemini-2.5-pro"
|
|
114
|
+
|
|
115
|
+
[agents.ollama]
|
|
116
|
+
url = "http://localhost:11434"
|
|
117
|
+
model = "llama3"
|
|
118
|
+
|
|
119
|
+
[agents.openai]
|
|
120
|
+
api_key = "your-key"
|
|
121
|
+
model = "gpt-4o"
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
Relay tries agents in priority order and uses the first available one.
|
|
125
|
+
|
|
126
|
+
## CLI
|
|
127
|
+
|
|
128
|
+
```
|
|
129
|
+
COMMANDS:
|
|
130
|
+
handoff Hand off to fallback agent (--to, --deadline, --dry-run)
|
|
131
|
+
status Show current session snapshot
|
|
132
|
+
agents List agents and availability
|
|
133
|
+
init Generate default config
|
|
134
|
+
hook PostToolUse hook (auto-detect rate limits)
|
|
135
|
+
|
|
136
|
+
OPTIONS:
|
|
137
|
+
--json Output as JSON
|
|
138
|
+
--project Project directory (default: cwd)
|
|
139
|
+
-v Verbose logging
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
## Auto-Handoff via Hook
|
|
143
|
+
|
|
144
|
+
Add to `~/.claude/settings.json`:
|
|
145
|
+
|
|
146
|
+
```json
|
|
147
|
+
{
|
|
148
|
+
"hooks": {
|
|
149
|
+
"PostToolUse": [
|
|
150
|
+
{
|
|
151
|
+
"matcher": "*",
|
|
152
|
+
"hooks": [{ "type": "command", "command": "relay hook" }]
|
|
153
|
+
}
|
|
154
|
+
]
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
Relay will detect rate limit signals in tool output and automatically hand off.
|
|
160
|
+
|
|
161
|
+
## Performance
|
|
162
|
+
|
|
163
|
+
- **4.6 MB** binary (release, stripped)
|
|
164
|
+
- **< 100ms** to capture full session snapshot
|
|
165
|
+
- **Zero network calls** for capture (git + file reads only)
|
|
166
|
+
|
|
167
|
+
## License
|
|
168
|
+
|
|
169
|
+
MIT
|
package/core/Cargo.toml
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
[package]
|
|
2
|
+
name = "relay"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
edition = "2021"
|
|
5
|
+
description = "Relay — When Claude's rate limit hits, another agent picks up exactly where you left off."
|
|
6
|
+
license = "MIT"
|
|
7
|
+
|
|
8
|
+
[[bin]]
|
|
9
|
+
name = "relay"
|
|
10
|
+
path = "src/main.rs"
|
|
11
|
+
|
|
12
|
+
[lib]
|
|
13
|
+
name = "relay"
|
|
14
|
+
path = "src/lib.rs"
|
|
15
|
+
|
|
16
|
+
[dependencies]
|
|
17
|
+
# CLI
|
|
18
|
+
clap = { version = "4", features = ["derive"] }
|
|
19
|
+
|
|
20
|
+
# Serialization
|
|
21
|
+
serde = { version = "1", features = ["derive"] }
|
|
22
|
+
serde_json = "1"
|
|
23
|
+
toml = "0.8"
|
|
24
|
+
|
|
25
|
+
# Error handling
|
|
26
|
+
anyhow = "1"
|
|
27
|
+
thiserror = "2"
|
|
28
|
+
|
|
29
|
+
# Text processing
|
|
30
|
+
regex = "1"
|
|
31
|
+
|
|
32
|
+
# Logging
|
|
33
|
+
tracing = "0.1"
|
|
34
|
+
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
|
35
|
+
|
|
36
|
+
# Time
|
|
37
|
+
chrono = { version = "0.4", features = ["serde"] }
|
|
38
|
+
|
|
39
|
+
# Hashing
|
|
40
|
+
blake3 = "1"
|
|
41
|
+
|
|
42
|
+
# HTTP client (for Ollama, OpenAI, Gemini APIs)
|
|
43
|
+
ureq = { version = "2", features = ["json"] }
|
|
44
|
+
|
|
45
|
+
# Terminal
|
|
46
|
+
colored = "2"
|
|
47
|
+
|
|
48
|
+
[profile.release]
|
|
49
|
+
opt-level = 3
|
|
50
|
+
lto = "thin"
|
|
51
|
+
strip = true
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
//! Codex CLI agent adapter.
|
|
2
|
+
//! Launches `codex` (OpenAI Codex CLI) as a subprocess with the handoff prompt.
|
|
3
|
+
|
|
4
|
+
use super::Agent;
|
|
5
|
+
use crate::{AgentStatus, CodexConfig, HandoffResult};
|
|
6
|
+
use anyhow::Result;
|
|
7
|
+
use std::process::Command;
|
|
8
|
+
|
|
9
|
+
pub struct CodexAgent {
|
|
10
|
+
binary: String,
|
|
11
|
+
model: String,
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
impl CodexAgent {
|
|
15
|
+
pub fn new(config: &CodexConfig) -> Self {
|
|
16
|
+
Self {
|
|
17
|
+
binary: config.binary.clone().unwrap_or_else(|| "codex".into()),
|
|
18
|
+
model: config.model.clone(),
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
fn find_binary(&self) -> Option<String> {
|
|
23
|
+
// Check if binary exists in PATH
|
|
24
|
+
let output = Command::new("which")
|
|
25
|
+
.arg(&self.binary)
|
|
26
|
+
.output()
|
|
27
|
+
.ok()?;
|
|
28
|
+
if output.status.success() {
|
|
29
|
+
return Some(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
|
30
|
+
}
|
|
31
|
+
None
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
impl Agent for CodexAgent {
|
|
36
|
+
fn name(&self) -> &str { "codex" }
|
|
37
|
+
|
|
38
|
+
fn check_available(&self) -> AgentStatus {
|
|
39
|
+
match self.find_binary() {
|
|
40
|
+
Some(path) => {
|
|
41
|
+
// Try to get version
|
|
42
|
+
let version = Command::new(&path)
|
|
43
|
+
.arg("--version")
|
|
44
|
+
.output()
|
|
45
|
+
.ok()
|
|
46
|
+
.filter(|o| o.status.success())
|
|
47
|
+
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string());
|
|
48
|
+
|
|
49
|
+
AgentStatus {
|
|
50
|
+
name: "codex".into(),
|
|
51
|
+
available: true,
|
|
52
|
+
reason: format!("Found at {path}"),
|
|
53
|
+
version,
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
None => AgentStatus {
|
|
57
|
+
name: "codex".into(),
|
|
58
|
+
available: false,
|
|
59
|
+
reason: format!("'{}' not found in PATH", self.binary),
|
|
60
|
+
version: None,
|
|
61
|
+
},
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
fn execute(&self, handoff_prompt: &str, project_dir: &str) -> Result<HandoffResult> {
|
|
66
|
+
let binary = self.find_binary().unwrap_or(self.binary.clone());
|
|
67
|
+
|
|
68
|
+
// Write handoff to a temp file
|
|
69
|
+
let tmp = std::env::temp_dir().join("relay_handoff.md");
|
|
70
|
+
std::fs::write(&tmp, handoff_prompt)?;
|
|
71
|
+
|
|
72
|
+
// Launch codex with the prompt
|
|
73
|
+
let mut child = Command::new(&binary)
|
|
74
|
+
.current_dir(project_dir)
|
|
75
|
+
.arg("--model")
|
|
76
|
+
.arg(&self.model)
|
|
77
|
+
.arg("--quiet")
|
|
78
|
+
.arg(handoff_prompt)
|
|
79
|
+
.spawn()?;
|
|
80
|
+
|
|
81
|
+
// Don't wait — let it run in the foreground
|
|
82
|
+
let _ = child.wait();
|
|
83
|
+
|
|
84
|
+
Ok(HandoffResult {
|
|
85
|
+
agent: "codex".into(),
|
|
86
|
+
success: true,
|
|
87
|
+
message: format!("Codex ({}) launched with handoff context", self.model),
|
|
88
|
+
handoff_file: Some(tmp.to_string_lossy().to_string()),
|
|
89
|
+
})
|
|
90
|
+
}
|
|
91
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
//! Gemini agent adapter — uses the Gemini API.
|
|
2
|
+
|
|
3
|
+
use super::Agent;
|
|
4
|
+
use crate::{AgentStatus, GeminiConfig, HandoffResult};
|
|
5
|
+
use anyhow::Result;
|
|
6
|
+
|
|
7
|
+
pub struct GeminiAgent {
|
|
8
|
+
api_key: Option<String>,
|
|
9
|
+
model: String,
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
impl GeminiAgent {
|
|
13
|
+
pub fn new(config: &GeminiConfig) -> Self {
|
|
14
|
+
let api_key = config.api_key.clone()
|
|
15
|
+
.or_else(|| std::env::var("GEMINI_API_KEY").ok())
|
|
16
|
+
.or_else(|| std::env::var("GOOGLE_API_KEY").ok());
|
|
17
|
+
Self {
|
|
18
|
+
api_key,
|
|
19
|
+
model: config.model.clone(),
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
impl Agent for GeminiAgent {
|
|
25
|
+
fn name(&self) -> &str { "gemini" }
|
|
26
|
+
|
|
27
|
+
fn check_available(&self) -> AgentStatus {
|
|
28
|
+
// First check if gemini CLI is available
|
|
29
|
+
if let Ok(output) = std::process::Command::new("which").arg("gemini").output() {
|
|
30
|
+
if output.status.success() {
|
|
31
|
+
return AgentStatus {
|
|
32
|
+
name: "gemini".into(),
|
|
33
|
+
available: true,
|
|
34
|
+
reason: "Gemini CLI found in PATH".into(),
|
|
35
|
+
version: None,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
match &self.api_key {
|
|
41
|
+
Some(_) => AgentStatus {
|
|
42
|
+
name: "gemini".into(),
|
|
43
|
+
available: true,
|
|
44
|
+
reason: format!("API key configured, model: {}", self.model),
|
|
45
|
+
version: Some(self.model.clone()),
|
|
46
|
+
},
|
|
47
|
+
None => AgentStatus {
|
|
48
|
+
name: "gemini".into(),
|
|
49
|
+
available: false,
|
|
50
|
+
reason: "No API key. Set GEMINI_API_KEY env var or add to config.toml".into(),
|
|
51
|
+
version: None,
|
|
52
|
+
},
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
fn execute(&self, handoff_prompt: &str, project_dir: &str) -> Result<HandoffResult> {
|
|
57
|
+
// Try Gemini CLI first
|
|
58
|
+
if let Ok(output) = std::process::Command::new("which").arg("gemini").output() {
|
|
59
|
+
if output.status.success() {
|
|
60
|
+
let mut child = std::process::Command::new("gemini")
|
|
61
|
+
.current_dir(project_dir)
|
|
62
|
+
.arg(handoff_prompt)
|
|
63
|
+
.spawn()?;
|
|
64
|
+
let _ = child.wait();
|
|
65
|
+
return Ok(HandoffResult {
|
|
66
|
+
agent: "gemini".into(),
|
|
67
|
+
success: true,
|
|
68
|
+
message: "Gemini CLI launched with handoff context".into(),
|
|
69
|
+
handoff_file: None,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Fall back to API
|
|
75
|
+
let api_key = self.api_key.as_ref()
|
|
76
|
+
.ok_or_else(|| anyhow::anyhow!("No Gemini API key"))?;
|
|
77
|
+
|
|
78
|
+
let url = format!(
|
|
79
|
+
"https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}",
|
|
80
|
+
self.model, api_key
|
|
81
|
+
);
|
|
82
|
+
|
|
83
|
+
let body = serde_json::json!({
|
|
84
|
+
"contents": [{
|
|
85
|
+
"parts": [{ "text": handoff_prompt }]
|
|
86
|
+
}]
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
let resp = ureq::post(&url)
|
|
90
|
+
.set("Content-Type", "application/json")
|
|
91
|
+
.send_json(&body)?;
|
|
92
|
+
|
|
93
|
+
let resp_json: serde_json::Value = resp.into_json()?;
|
|
94
|
+
|
|
95
|
+
let text = resp_json
|
|
96
|
+
.get("candidates")
|
|
97
|
+
.and_then(|c| c.get(0))
|
|
98
|
+
.and_then(|c| c.get("content"))
|
|
99
|
+
.and_then(|c| c.get("parts"))
|
|
100
|
+
.and_then(|p| p.get(0))
|
|
101
|
+
.and_then(|p| p.get("text"))
|
|
102
|
+
.and_then(|t| t.as_str())
|
|
103
|
+
.unwrap_or("(no response)");
|
|
104
|
+
|
|
105
|
+
println!("{text}");
|
|
106
|
+
|
|
107
|
+
Ok(HandoffResult {
|
|
108
|
+
agent: "gemini".into(),
|
|
109
|
+
success: true,
|
|
110
|
+
message: format!("Gemini ({}) responded to handoff", self.model),
|
|
111
|
+
handoff_file: None,
|
|
112
|
+
})
|
|
113
|
+
}
|
|
114
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
pub mod codex;
|
|
2
|
+
pub mod gemini;
|
|
3
|
+
pub mod ollama;
|
|
4
|
+
pub mod openai;
|
|
5
|
+
|
|
6
|
+
use crate::{AgentStatus, Config, HandoffResult};
|
|
7
|
+
use anyhow::Result;
|
|
8
|
+
|
|
9
|
+
/// Trait for all fallback agents.
|
|
10
|
+
pub trait Agent {
|
|
11
|
+
fn name(&self) -> &str;
|
|
12
|
+
fn check_available(&self) -> AgentStatus;
|
|
13
|
+
fn execute(&self, handoff_prompt: &str, project_dir: &str) -> Result<HandoffResult>;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/// Get all configured agents in priority order.
|
|
17
|
+
pub fn get_agents(config: &Config) -> Vec<Box<dyn Agent>> {
|
|
18
|
+
let mut agents: Vec<Box<dyn Agent>> = Vec::new();
|
|
19
|
+
for name in &config.general.priority {
|
|
20
|
+
match name.as_str() {
|
|
21
|
+
"codex" => agents.push(Box::new(codex::CodexAgent::new(&config.agents.codex))),
|
|
22
|
+
"gemini" => agents.push(Box::new(gemini::GeminiAgent::new(&config.agents.gemini))),
|
|
23
|
+
"ollama" => agents.push(Box::new(ollama::OllamaAgent::new(&config.agents.ollama))),
|
|
24
|
+
"openai" => agents.push(Box::new(openai::OpenAIAgent::new(&config.agents.openai))),
|
|
25
|
+
_ => {} // unknown agent, skip
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
agents
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/// Check availability of all agents and return statuses.
|
|
32
|
+
pub fn check_all_agents(config: &Config) -> Vec<AgentStatus> {
|
|
33
|
+
get_agents(config).iter().map(|a| a.check_available()).collect()
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/// Execute handoff on the first available agent.
|
|
37
|
+
pub fn handoff_to_first_available(
|
|
38
|
+
config: &Config,
|
|
39
|
+
handoff_prompt: &str,
|
|
40
|
+
project_dir: &str,
|
|
41
|
+
) -> Result<HandoffResult> {
|
|
42
|
+
let agents = get_agents(config);
|
|
43
|
+
for agent in &agents {
|
|
44
|
+
let status = agent.check_available();
|
|
45
|
+
if status.available {
|
|
46
|
+
tracing::info!("Handing off to {}", agent.name());
|
|
47
|
+
return agent.execute(handoff_prompt, project_dir);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
Ok(HandoffResult {
|
|
51
|
+
agent: "none".into(),
|
|
52
|
+
success: false,
|
|
53
|
+
message: "No agents available. Configure at least one in ~/.relay/config.toml".into(),
|
|
54
|
+
handoff_file: None,
|
|
55
|
+
})
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/// Execute handoff on a specific named agent.
|
|
59
|
+
pub fn handoff_to_named(
|
|
60
|
+
config: &Config,
|
|
61
|
+
agent_name: &str,
|
|
62
|
+
handoff_prompt: &str,
|
|
63
|
+
project_dir: &str,
|
|
64
|
+
) -> Result<HandoffResult> {
|
|
65
|
+
let agents = get_agents(config);
|
|
66
|
+
for agent in &agents {
|
|
67
|
+
if agent.name() == agent_name {
|
|
68
|
+
let status = agent.check_available();
|
|
69
|
+
if !status.available {
|
|
70
|
+
return Ok(HandoffResult {
|
|
71
|
+
agent: agent_name.into(),
|
|
72
|
+
success: false,
|
|
73
|
+
message: format!("{} is not available: {}", agent_name, status.reason),
|
|
74
|
+
handoff_file: None,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
return agent.execute(handoff_prompt, project_dir);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
Ok(HandoffResult {
|
|
81
|
+
agent: agent_name.into(),
|
|
82
|
+
success: false,
|
|
83
|
+
message: format!("Unknown agent: {agent_name}. Available: codex, gemini, ollama, openai"),
|
|
84
|
+
handoff_file: None,
|
|
85
|
+
})
|
|
86
|
+
}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
//! Ollama local agent adapter — uses the Ollama REST API.
|
|
2
|
+
|
|
3
|
+
use super::Agent;
|
|
4
|
+
use crate::{AgentStatus, HandoffResult, OllamaConfig};
|
|
5
|
+
use anyhow::Result;
|
|
6
|
+
|
|
7
|
+
pub struct OllamaAgent {
|
|
8
|
+
url: String,
|
|
9
|
+
model: String,
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
impl OllamaAgent {
|
|
13
|
+
pub fn new(config: &OllamaConfig) -> Self {
|
|
14
|
+
Self {
|
|
15
|
+
url: config.url.clone(),
|
|
16
|
+
model: config.model.clone(),
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
impl Agent for OllamaAgent {
|
|
22
|
+
fn name(&self) -> &str { "ollama" }
|
|
23
|
+
|
|
24
|
+
fn check_available(&self) -> AgentStatus {
|
|
25
|
+
// Ping Ollama's API
|
|
26
|
+
let tag_url = format!("{}/api/tags", self.url);
|
|
27
|
+
match ureq::get(&tag_url).call() {
|
|
28
|
+
Ok(resp) => {
|
|
29
|
+
let body: serde_json::Value = resp.into_json().unwrap_or_default();
|
|
30
|
+
let models = body.get("models")
|
|
31
|
+
.and_then(|m| m.as_array())
|
|
32
|
+
.map(|a| a.len())
|
|
33
|
+
.unwrap_or(0);
|
|
34
|
+
|
|
35
|
+
// Check if our target model is available
|
|
36
|
+
let has_model = body.get("models")
|
|
37
|
+
.and_then(|m| m.as_array())
|
|
38
|
+
.map(|arr| arr.iter().any(|m| {
|
|
39
|
+
m.get("name").and_then(|n| n.as_str())
|
|
40
|
+
.map(|n| n.starts_with(&self.model))
|
|
41
|
+
.unwrap_or(false)
|
|
42
|
+
}))
|
|
43
|
+
.unwrap_or(false);
|
|
44
|
+
|
|
45
|
+
if has_model {
|
|
46
|
+
AgentStatus {
|
|
47
|
+
name: "ollama".into(),
|
|
48
|
+
available: true,
|
|
49
|
+
reason: format!("Running at {}, {} models, '{}' available", self.url, models, self.model),
|
|
50
|
+
version: Some(self.model.clone()),
|
|
51
|
+
}
|
|
52
|
+
} else {
|
|
53
|
+
AgentStatus {
|
|
54
|
+
name: "ollama".into(),
|
|
55
|
+
available: true,
|
|
56
|
+
reason: format!("Running but model '{}' not pulled. {} models available", self.model, models),
|
|
57
|
+
version: None,
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
Err(_) => AgentStatus {
|
|
62
|
+
name: "ollama".into(),
|
|
63
|
+
available: false,
|
|
64
|
+
reason: format!("Not reachable at {}", self.url),
|
|
65
|
+
version: None,
|
|
66
|
+
},
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
fn execute(&self, handoff_prompt: &str, _project_dir: &str) -> Result<HandoffResult> {
|
|
71
|
+
let url = format!("{}/api/generate", self.url);
|
|
72
|
+
|
|
73
|
+
let body = serde_json::json!({
|
|
74
|
+
"model": self.model,
|
|
75
|
+
"prompt": handoff_prompt,
|
|
76
|
+
"stream": false
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
let resp = ureq::post(&url)
|
|
80
|
+
.set("Content-Type", "application/json")
|
|
81
|
+
.send_json(&body)?;
|
|
82
|
+
|
|
83
|
+
let resp_json: serde_json::Value = resp.into_json()?;
|
|
84
|
+
let text = resp_json
|
|
85
|
+
.get("response")
|
|
86
|
+
.and_then(|r| r.as_str())
|
|
87
|
+
.unwrap_or("(no response)");
|
|
88
|
+
|
|
89
|
+
println!("{text}");
|
|
90
|
+
|
|
91
|
+
Ok(HandoffResult {
|
|
92
|
+
agent: "ollama".into(),
|
|
93
|
+
success: true,
|
|
94
|
+
message: format!("Ollama ({}) responded", self.model),
|
|
95
|
+
handoff_file: None,
|
|
96
|
+
})
|
|
97
|
+
}
|
|
98
|
+
}
|