@nordbyte/nordrelay 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +88 -0
- package/Dockerfile +19 -0
- package/LICENSE +21 -0
- package/README.md +749 -0
- package/dist/access-control.js +146 -0
- package/dist/agent-factory.js +22 -0
- package/dist/agent.js +57 -0
- package/dist/artifacts.js +515 -0
- package/dist/attachments.js +69 -0
- package/dist/bot-preferences.js +146 -0
- package/dist/bot-ui.js +161 -0
- package/dist/bot.js +4520 -0
- package/dist/codex-auth.js +150 -0
- package/dist/codex-cli.js +79 -0
- package/dist/codex-config.js +50 -0
- package/dist/codex-launch.js +109 -0
- package/dist/codex-session.js +591 -0
- package/dist/codex-state.js +573 -0
- package/dist/config.js +385 -0
- package/dist/context-key.js +23 -0
- package/dist/error-messages.js +73 -0
- package/dist/format.js +121 -0
- package/dist/index.js +140 -0
- package/dist/logger.js +27 -0
- package/dist/operations.js +133 -0
- package/dist/persistence.js +65 -0
- package/dist/pi-cli.js +19 -0
- package/dist/pi-rpc.js +158 -0
- package/dist/pi-session.js +573 -0
- package/dist/pi-state.js +226 -0
- package/dist/prompt-store.js +241 -0
- package/dist/redaction.js +47 -0
- package/dist/session-format.js +191 -0
- package/dist/session-registry.js +195 -0
- package/dist/telegram-rate-limit.js +136 -0
- package/dist/voice.js +373 -0
- package/dist/workspace-policy.js +41 -0
- package/docker-compose.yml +17 -0
- package/launchd/start.sh +8 -0
- package/package.json +69 -0
- package/plugins/nordrelay/.codex-plugin/plugin.json +48 -0
- package/plugins/nordrelay/assets/nordrelay.svg +5 -0
- package/plugins/nordrelay/commands/remote.md +33 -0
- package/plugins/nordrelay/scripts/nordrelay.mjs +396 -0
- package/plugins/nordrelay/skills/telegram-remote/SKILL.md +26 -0
package/dist/voice.js
ADDED
|
@@ -0,0 +1,373 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { createRequire } from "node:module";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { readFile } from "node:fs/promises";
|
|
5
|
+
const PARAKEET_SPECIFIER = "parakeet-coreml";
|
|
6
|
+
const FFMPEG_INSTALL_MESSAGE = "ffmpeg not found. Install it with: sudo apt-get install ffmpeg or brew install ffmpeg";
|
|
7
|
+
const NO_BACKEND_ERROR = `Voice messages require a transcription backend.
|
|
8
|
+
|
|
9
|
+
Option 1: Install faster-whisper for local Linux transcription:
|
|
10
|
+
python3 -m venv .venv
|
|
11
|
+
.venv/bin/python -m pip install faster-whisper
|
|
12
|
+
Add FASTER_WHISPER_PYTHON=.venv/bin/python to your .env file
|
|
13
|
+
|
|
14
|
+
Option 2: Install Parakeet for local macOS Apple Silicon transcription (free, private, ~1.5GB download):
|
|
15
|
+
npm install parakeet-coreml
|
|
16
|
+
Also requires ffmpeg: sudo apt-get install ffmpeg or brew install ffmpeg
|
|
17
|
+
|
|
18
|
+
Option 3: Set OPENAI_API_KEY for cloud transcription (~$0.006/min):
|
|
19
|
+
Add OPENAI_API_KEY=sk-... to your .env file`;
|
|
20
|
+
const FASTER_WHISPER_CHECK_SCRIPT = "import faster_whisper";
|
|
21
|
+
const FASTER_WHISPER_TRANSCRIBE_SCRIPT = `
|
|
22
|
+
import json
|
|
23
|
+
import os
|
|
24
|
+
import sys
|
|
25
|
+
from faster_whisper import WhisperModel
|
|
26
|
+
|
|
27
|
+
audio_path = sys.argv[1]
|
|
28
|
+
model_name = os.environ.get("FASTER_WHISPER_MODEL", "base")
|
|
29
|
+
device = os.environ.get("FASTER_WHISPER_DEVICE", "cpu")
|
|
30
|
+
compute_type = os.environ.get("FASTER_WHISPER_COMPUTE_TYPE", "int8")
|
|
31
|
+
language = os.environ.get("FASTER_WHISPER_LANGUAGE") or None
|
|
32
|
+
|
|
33
|
+
model = WhisperModel(model_name, device=device, compute_type=compute_type)
|
|
34
|
+
segments, info = model.transcribe(audio_path, language=language, vad_filter=True)
|
|
35
|
+
text = " ".join(segment.text.strip() for segment in segments).strip()
|
|
36
|
+
print(json.dumps({
|
|
37
|
+
"text": text,
|
|
38
|
+
"language": getattr(info, "language", None),
|
|
39
|
+
"duration": getattr(info, "duration", None),
|
|
40
|
+
}))
|
|
41
|
+
`;
|
|
42
|
+
const _require = createRequire(import.meta.url);
|
|
43
|
+
let _importModule = async (specifier) => _require(specifier);
|
|
44
|
+
let _decodeAudio = decodeAudioToSamples;
|
|
45
|
+
let _runCommand = runCommand;
|
|
46
|
+
let _engine = null;
|
|
47
|
+
let _fasterWhisperAvailable;
|
|
48
|
+
export function _setImportHook(hook) {
|
|
49
|
+
_importModule = hook;
|
|
50
|
+
}
|
|
51
|
+
export function _setDecodeHook(hook) {
|
|
52
|
+
_decodeAudio = hook;
|
|
53
|
+
}
|
|
54
|
+
export function _setCommandHook(hook) {
|
|
55
|
+
_runCommand = hook;
|
|
56
|
+
_fasterWhisperAvailable = undefined;
|
|
57
|
+
}
|
|
58
|
+
export function _resetImportHook() {
|
|
59
|
+
_importModule = async (specifier) => _require(specifier);
|
|
60
|
+
_decodeAudio = decodeAudioToSamples;
|
|
61
|
+
_runCommand = runCommand;
|
|
62
|
+
_engine = null;
|
|
63
|
+
_fasterWhisperAvailable = undefined;
|
|
64
|
+
}
|
|
65
|
+
export async function transcribeAudio(filePath, options = {}) {
|
|
66
|
+
for (const backend of backendOrder(options.preferredBackend)) {
|
|
67
|
+
try {
|
|
68
|
+
if (backend === "parakeet") {
|
|
69
|
+
const parakeetMod = await _importModule(PARAKEET_SPECIFIER);
|
|
70
|
+
return await transcribeWithParakeet(filePath, parakeetMod);
|
|
71
|
+
}
|
|
72
|
+
if (backend === "faster-whisper" && await hasFasterWhisper()) {
|
|
73
|
+
return await transcribeWithFasterWhisper(filePath, options);
|
|
74
|
+
}
|
|
75
|
+
if (backend === "openai" && hasOpenAIApiKey()) {
|
|
76
|
+
return await transcribeWithOpenAI(filePath, options);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
if (backend === "parakeet" && isModuleNotFoundError(error, PARAKEET_SPECIFIER)) {
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
throw error;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
throw new Error(NO_BACKEND_ERROR);
|
|
87
|
+
}
|
|
88
|
+
export async function getAvailableBackends() {
|
|
89
|
+
const backends = [];
|
|
90
|
+
try {
|
|
91
|
+
await _importModule(PARAKEET_SPECIFIER);
|
|
92
|
+
backends.push("parakeet");
|
|
93
|
+
}
|
|
94
|
+
catch {
|
|
95
|
+
// Treat import failures as unavailable so /start can still work.
|
|
96
|
+
}
|
|
97
|
+
if (await hasFasterWhisper()) {
|
|
98
|
+
backends.push("faster-whisper");
|
|
99
|
+
}
|
|
100
|
+
if (hasOpenAIApiKey()) {
|
|
101
|
+
backends.push("openai");
|
|
102
|
+
}
|
|
103
|
+
return backends;
|
|
104
|
+
}
|
|
105
|
+
async function transcribeWithParakeet(filePath, parakeetMod) {
|
|
106
|
+
const startedAt = Date.now();
|
|
107
|
+
const samples = await _decodeAudio(filePath);
|
|
108
|
+
if (!_engine) {
|
|
109
|
+
const mod = parakeetMod;
|
|
110
|
+
const ParakeetAsrEngine = mod?.ParakeetAsrEngine ??
|
|
111
|
+
mod?.default?.ParakeetAsrEngine;
|
|
112
|
+
if (typeof ParakeetAsrEngine !== "function") {
|
|
113
|
+
throw new Error("parakeet-coreml was loaded but does not expose a ParakeetAsrEngine class");
|
|
114
|
+
}
|
|
115
|
+
const engine = new ParakeetAsrEngine();
|
|
116
|
+
if (typeof engine.initialize !== "function") {
|
|
117
|
+
throw new Error("parakeet-coreml was loaded but the engine does not expose initialize()");
|
|
118
|
+
}
|
|
119
|
+
if (typeof engine.transcribe !== "function") {
|
|
120
|
+
throw new Error("parakeet-coreml was loaded but the engine does not expose transcribe(samples)");
|
|
121
|
+
}
|
|
122
|
+
await engine.initialize();
|
|
123
|
+
_engine = engine;
|
|
124
|
+
}
|
|
125
|
+
const result = await _engine.transcribe(samples);
|
|
126
|
+
const text = extractTranscribedText(result);
|
|
127
|
+
if (text === undefined) {
|
|
128
|
+
throw new Error("parakeet-coreml returned an unsupported transcription result");
|
|
129
|
+
}
|
|
130
|
+
const durationMs = typeof result === "object" && result !== null && typeof result.durationMs === "number"
|
|
131
|
+
? result.durationMs
|
|
132
|
+
: Date.now() - startedAt;
|
|
133
|
+
return {
|
|
134
|
+
text,
|
|
135
|
+
backend: "parakeet",
|
|
136
|
+
durationMs,
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
async function transcribeWithFasterWhisper(filePath, options = {}) {
|
|
140
|
+
const startedAt = Date.now();
|
|
141
|
+
const env = {
|
|
142
|
+
...process.env,
|
|
143
|
+
...(options.language ? { FASTER_WHISPER_LANGUAGE: options.language } : {}),
|
|
144
|
+
...(options.fasterWhisperModel ? { FASTER_WHISPER_MODEL: options.fasterWhisperModel } : {}),
|
|
145
|
+
};
|
|
146
|
+
const result = await _runCommand(resolveFasterWhisperPython(), ["-c", FASTER_WHISPER_TRANSCRIBE_SCRIPT, filePath], {
|
|
147
|
+
env,
|
|
148
|
+
timeoutMs: parsePositiveInteger(process.env.FASTER_WHISPER_TIMEOUT_MS, 10 * 60 * 1000),
|
|
149
|
+
});
|
|
150
|
+
if (result.code !== 0) {
|
|
151
|
+
const detail = (result.stderr || result.stdout || "unknown error").trim();
|
|
152
|
+
throw new Error(`faster-whisper transcription failed (${result.code ?? result.signal ?? "unknown"}): ${detail}`);
|
|
153
|
+
}
|
|
154
|
+
const payload = parseJsonLine(result.stdout);
|
|
155
|
+
if (!payload || typeof payload.text !== "string") {
|
|
156
|
+
throw new Error("faster-whisper transcription response did not include a text field");
|
|
157
|
+
}
|
|
158
|
+
return {
|
|
159
|
+
text: payload.text,
|
|
160
|
+
backend: "faster-whisper",
|
|
161
|
+
durationMs: typeof payload.duration === "number" ? Math.round(payload.duration * 1000) : Date.now() - startedAt,
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
async function transcribeWithOpenAI(filePath, options = {}) {
|
|
165
|
+
const apiKey = process.env.OPENAI_API_KEY?.trim();
|
|
166
|
+
if (!apiKey) {
|
|
167
|
+
throw new Error(NO_BACKEND_ERROR);
|
|
168
|
+
}
|
|
169
|
+
const startedAt = Date.now();
|
|
170
|
+
const audioBuffer = await readFile(filePath);
|
|
171
|
+
const ext = (path.extname(filePath) || ".ogg").slice(1).toLowerCase();
|
|
172
|
+
const mimeTypes = {
|
|
173
|
+
ogg: "audio/ogg", oga: "audio/ogg", mp3: "audio/mpeg",
|
|
174
|
+
m4a: "audio/mp4", aac: "audio/aac", wav: "audio/wav",
|
|
175
|
+
webm: "audio/webm", flac: "audio/flac",
|
|
176
|
+
};
|
|
177
|
+
const mimeType = mimeTypes[ext] ?? "audio/ogg";
|
|
178
|
+
const form = new FormData();
|
|
179
|
+
form.append("file", new Blob([audioBuffer], { type: mimeType }), path.basename(filePath) || "audio.ogg");
|
|
180
|
+
form.append("model", "whisper-1");
|
|
181
|
+
if (options.language) {
|
|
182
|
+
form.append("language", options.language);
|
|
183
|
+
}
|
|
184
|
+
const response = await fetch("https://api.openai.com/v1/audio/transcriptions", {
|
|
185
|
+
method: "POST",
|
|
186
|
+
headers: {
|
|
187
|
+
Authorization: `Bearer ${apiKey}`,
|
|
188
|
+
},
|
|
189
|
+
body: form,
|
|
190
|
+
});
|
|
191
|
+
if (!response.ok) {
|
|
192
|
+
const errorText = (await response.text().catch(() => "")).trim();
|
|
193
|
+
throw new Error(`OpenAI transcription failed (${response.status}): ${errorText || response.statusText || "Unknown error"}`);
|
|
194
|
+
}
|
|
195
|
+
const payload = (await response.json());
|
|
196
|
+
if (typeof payload.text !== "string") {
|
|
197
|
+
throw new Error("OpenAI transcription response did not include a text field");
|
|
198
|
+
}
|
|
199
|
+
return {
|
|
200
|
+
text: payload.text,
|
|
201
|
+
backend: "openai",
|
|
202
|
+
durationMs: Date.now() - startedAt,
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
function decodeAudioToSamples(filePath) {
|
|
206
|
+
return new Promise((resolve, reject) => {
|
|
207
|
+
const stdoutChunks = [];
|
|
208
|
+
const stderrChunks = [];
|
|
209
|
+
let settled = false;
|
|
210
|
+
const ffmpeg = spawn("ffmpeg", ["-i", filePath, "-ar", "16000", "-ac", "1", "-f", "f32le", "pipe:1"], {
|
|
211
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
212
|
+
});
|
|
213
|
+
const finish = (callback) => {
|
|
214
|
+
if (settled)
|
|
215
|
+
return;
|
|
216
|
+
settled = true;
|
|
217
|
+
callback();
|
|
218
|
+
};
|
|
219
|
+
ffmpeg.stdout.on("data", (chunk) => {
|
|
220
|
+
stdoutChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
221
|
+
});
|
|
222
|
+
ffmpeg.stderr.on("data", (chunk) => {
|
|
223
|
+
stderrChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
224
|
+
});
|
|
225
|
+
ffmpeg.once("error", (error) => {
|
|
226
|
+
finish(() => {
|
|
227
|
+
if (error.code === "ENOENT") {
|
|
228
|
+
reject(new Error(FFMPEG_INSTALL_MESSAGE));
|
|
229
|
+
return;
|
|
230
|
+
}
|
|
231
|
+
reject(error);
|
|
232
|
+
});
|
|
233
|
+
});
|
|
234
|
+
ffmpeg.once("close", (code, signal) => {
|
|
235
|
+
finish(() => {
|
|
236
|
+
if (code !== 0) {
|
|
237
|
+
const stderr = Buffer.concat(stderrChunks).toString("utf8").trim();
|
|
238
|
+
const reason = stderr || (signal ? `signal ${signal}` : `exit code ${code ?? "unknown"}`);
|
|
239
|
+
reject(new Error(`ffmpeg failed to decode audio: ${reason}`));
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const buffer = Buffer.concat(stdoutChunks);
|
|
243
|
+
if (buffer.byteLength % Float32Array.BYTES_PER_ELEMENT !== 0) {
|
|
244
|
+
reject(new Error("ffmpeg returned invalid float32 PCM output"));
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
const samples = new Float32Array(buffer.buffer, buffer.byteOffset, buffer.byteLength / Float32Array.BYTES_PER_ELEMENT).slice();
|
|
248
|
+
resolve(samples);
|
|
249
|
+
});
|
|
250
|
+
});
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
async function hasFasterWhisper() {
|
|
254
|
+
if (_fasterWhisperAvailable !== undefined) {
|
|
255
|
+
return _fasterWhisperAvailable;
|
|
256
|
+
}
|
|
257
|
+
const result = await _runCommand(resolveFasterWhisperPython(), ["-c", FASTER_WHISPER_CHECK_SCRIPT], {
|
|
258
|
+
env: process.env,
|
|
259
|
+
timeoutMs: 10_000,
|
|
260
|
+
}).catch(() => null);
|
|
261
|
+
_fasterWhisperAvailable = result?.code === 0;
|
|
262
|
+
return _fasterWhisperAvailable;
|
|
263
|
+
}
|
|
264
|
+
function resolveFasterWhisperPython() {
|
|
265
|
+
return process.env.FASTER_WHISPER_PYTHON?.trim() || process.env.WHISPER_PYTHON?.trim() || "python3";
|
|
266
|
+
}
|
|
267
|
+
function hasOpenAIApiKey() {
|
|
268
|
+
return Boolean(process.env.OPENAI_API_KEY?.trim());
|
|
269
|
+
}
|
|
270
|
+
function extractTranscribedText(result) {
|
|
271
|
+
if (typeof result === "string") {
|
|
272
|
+
return result;
|
|
273
|
+
}
|
|
274
|
+
if (typeof result === "object" && result !== null && typeof result.text === "string") {
|
|
275
|
+
return result.text;
|
|
276
|
+
}
|
|
277
|
+
return undefined;
|
|
278
|
+
}
|
|
279
|
+
function parseJsonLine(stdout) {
|
|
280
|
+
const lines = stdout
|
|
281
|
+
.split(/\r?\n/)
|
|
282
|
+
.map((line) => line.trim())
|
|
283
|
+
.filter(Boolean);
|
|
284
|
+
const lastLine = lines.at(-1);
|
|
285
|
+
if (!lastLine) {
|
|
286
|
+
return null;
|
|
287
|
+
}
|
|
288
|
+
try {
|
|
289
|
+
return JSON.parse(lastLine);
|
|
290
|
+
}
|
|
291
|
+
catch {
|
|
292
|
+
return null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
function runCommand(command, args, options = {}) {
|
|
296
|
+
return new Promise((resolve, reject) => {
|
|
297
|
+
const stdoutChunks = [];
|
|
298
|
+
const stderrChunks = [];
|
|
299
|
+
let settled = false;
|
|
300
|
+
const child = spawn(command, args, {
|
|
301
|
+
env: options.env,
|
|
302
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
303
|
+
});
|
|
304
|
+
const timer = options.timeoutMs
|
|
305
|
+
? setTimeout(() => {
|
|
306
|
+
if (settled)
|
|
307
|
+
return;
|
|
308
|
+
child.kill("SIGTERM");
|
|
309
|
+
settled = true;
|
|
310
|
+
resolve({
|
|
311
|
+
code: null,
|
|
312
|
+
signal: "SIGTERM",
|
|
313
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf8"),
|
|
314
|
+
stderr: `Command timed out after ${options.timeoutMs}ms`,
|
|
315
|
+
});
|
|
316
|
+
}, options.timeoutMs)
|
|
317
|
+
: undefined;
|
|
318
|
+
timer?.unref?.();
|
|
319
|
+
const finish = (callback) => {
|
|
320
|
+
if (settled)
|
|
321
|
+
return;
|
|
322
|
+
settled = true;
|
|
323
|
+
if (timer)
|
|
324
|
+
clearTimeout(timer);
|
|
325
|
+
callback();
|
|
326
|
+
};
|
|
327
|
+
child.stdout.on("data", (chunk) => {
|
|
328
|
+
stdoutChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
329
|
+
});
|
|
330
|
+
child.stderr.on("data", (chunk) => {
|
|
331
|
+
stderrChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
332
|
+
});
|
|
333
|
+
child.once("error", (error) => {
|
|
334
|
+
finish(() => {
|
|
335
|
+
reject(error);
|
|
336
|
+
});
|
|
337
|
+
});
|
|
338
|
+
child.once("close", (code, signal) => {
|
|
339
|
+
finish(() => {
|
|
340
|
+
resolve({
|
|
341
|
+
code,
|
|
342
|
+
signal,
|
|
343
|
+
stdout: Buffer.concat(stdoutChunks).toString("utf8"),
|
|
344
|
+
stderr: Buffer.concat(stderrChunks).toString("utf8"),
|
|
345
|
+
});
|
|
346
|
+
});
|
|
347
|
+
});
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
function parsePositiveInteger(raw, fallback) {
|
|
351
|
+
const parsed = Number.parseInt(raw ?? "", 10);
|
|
352
|
+
return Number.isInteger(parsed) && parsed > 0 ? parsed : fallback;
|
|
353
|
+
}
|
|
354
|
+
function backendOrder(preferred) {
|
|
355
|
+
const all = ["parakeet", "faster-whisper", "openai"];
|
|
356
|
+
if (!preferred || preferred === "auto") {
|
|
357
|
+
return all;
|
|
358
|
+
}
|
|
359
|
+
return [preferred, ...all.filter((backend) => backend !== preferred)];
|
|
360
|
+
}
|
|
361
|
+
function isModuleNotFoundError(error, specifier) {
|
|
362
|
+
const code = typeof error === "object" && error !== null ? error.code : undefined;
|
|
363
|
+
if (code === "ERR_MODULE_NOT_FOUND" || code === "MODULE_NOT_FOUND") {
|
|
364
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
365
|
+
// Only treat as "not installed" if the message references the specific package.
|
|
366
|
+
// A broken transitive dependency (e.g. missing native addon) should surface as a real error.
|
|
367
|
+
return !message || message.includes(specifier);
|
|
368
|
+
}
|
|
369
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
370
|
+
return (message.includes(`Cannot find package '${specifier}'`) ||
|
|
371
|
+
message.includes(`Cannot find module '${specifier}'`) ||
|
|
372
|
+
message.includes(`Cannot resolve module '${specifier}'`));
|
|
373
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
const BROAD_WORKSPACE_NAMES = new Set(["/", "home", "Users", "projects", "src", "code", "workspace"]);
|
|
3
|
+
export function evaluateWorkspacePolicy(workspace, config) {
|
|
4
|
+
const resolved = path.resolve(workspace);
|
|
5
|
+
const allowedRoots = config.workspaceAllowedRoots.map((root) => path.resolve(root));
|
|
6
|
+
if (allowedRoots.length > 0 && !allowedRoots.some((root) => isPathInside(resolved, root))) {
|
|
7
|
+
return {
|
|
8
|
+
allowed: false,
|
|
9
|
+
warning: `Workspace is outside allowed roots: ${resolved}`,
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
const warnRoots = config.workspaceWarnRoots.map((root) => path.resolve(root));
|
|
13
|
+
const explicitWarningRoot = warnRoots.find((root) => resolved === root);
|
|
14
|
+
if (explicitWarningRoot) {
|
|
15
|
+
return {
|
|
16
|
+
allowed: true,
|
|
17
|
+
warning: `Workspace is a broad configured root: ${explicitWarningRoot}`,
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
const basename = path.basename(resolved) || resolved;
|
|
21
|
+
if (BROAD_WORKSPACE_NAMES.has(basename) && resolved.split(path.sep).filter(Boolean).length <= 3) {
|
|
22
|
+
return {
|
|
23
|
+
allowed: true,
|
|
24
|
+
warning: `Workspace looks broad; prefer a project-specific directory: ${resolved}`,
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
return { allowed: true };
|
|
28
|
+
}
|
|
29
|
+
export function filterAllowedWorkspaces(workspaces, config) {
|
|
30
|
+
return workspaces.filter((workspace) => evaluateWorkspacePolicy(workspace, config).allowed);
|
|
31
|
+
}
|
|
32
|
+
export function renderWorkspacePolicyLine(workspace, config) {
|
|
33
|
+
const result = evaluateWorkspacePolicy(workspace, config);
|
|
34
|
+
if (!result.allowed) {
|
|
35
|
+
return `Blocked: ${result.warning}`;
|
|
36
|
+
}
|
|
37
|
+
return result.warning ? `Warning: ${result.warning}` : undefined;
|
|
38
|
+
}
|
|
39
|
+
function isPathInside(candidate, root) {
|
|
40
|
+
return candidate === root || candidate.startsWith(`${root}${path.sep}`);
|
|
41
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
services:
|
|
2
|
+
nordrelay:
|
|
3
|
+
build: .
|
|
4
|
+
env_file: .env
|
|
5
|
+
volumes:
|
|
6
|
+
- ${HOME}/.codex:/home/nordrelay/.codex:rw
|
|
7
|
+
- ./workspace:/workspace:rw
|
|
8
|
+
cap_drop:
|
|
9
|
+
- ALL
|
|
10
|
+
security_opt:
|
|
11
|
+
- no-new-privileges:true
|
|
12
|
+
deploy:
|
|
13
|
+
resources:
|
|
14
|
+
limits:
|
|
15
|
+
memory: 2G
|
|
16
|
+
cpus: "2.0"
|
|
17
|
+
restart: unless-stopped
|
package/launchd/start.sh
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@nordbyte/nordrelay",
|
|
3
|
+
"version": "0.2.1",
|
|
4
|
+
"description": "Remote control plane for coding agents across messaging channels.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"author": "Ricardo",
|
|
7
|
+
"homepage": "https://github.com/nordbyte",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/nordbyte/nordrelay.git"
|
|
11
|
+
},
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/nordbyte/nordrelay/issues"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"coding-agents",
|
|
17
|
+
"messaging",
|
|
18
|
+
"remote-control",
|
|
19
|
+
"codex",
|
|
20
|
+
"pi",
|
|
21
|
+
"telegram",
|
|
22
|
+
"bot",
|
|
23
|
+
"automation"
|
|
24
|
+
],
|
|
25
|
+
"main": "./dist/index.js",
|
|
26
|
+
"bin": {
|
|
27
|
+
"nordrelay": "plugins/nordrelay/scripts/nordrelay.mjs"
|
|
28
|
+
},
|
|
29
|
+
"files": [
|
|
30
|
+
"dist/",
|
|
31
|
+
"plugins/",
|
|
32
|
+
"launchd/",
|
|
33
|
+
".env.example",
|
|
34
|
+
"Dockerfile",
|
|
35
|
+
"docker-compose.yml"
|
|
36
|
+
],
|
|
37
|
+
"scripts": {
|
|
38
|
+
"build": "tsc",
|
|
39
|
+
"check": "node --check plugins/nordrelay/scripts/nordrelay.mjs && tsc --noEmit",
|
|
40
|
+
"dev": "tsx src/index.ts",
|
|
41
|
+
"foreground": "node plugins/nordrelay/scripts/nordrelay.mjs foreground",
|
|
42
|
+
"prepack": "npm run build",
|
|
43
|
+
"prepublishOnly": "npm run check && npm test && npm run build",
|
|
44
|
+
"status": "node plugins/nordrelay/scripts/nordrelay.mjs status",
|
|
45
|
+
"start": "node plugins/nordrelay/scripts/nordrelay.mjs start",
|
|
46
|
+
"stop": "node plugins/nordrelay/scripts/nordrelay.mjs stop",
|
|
47
|
+
"test": "vitest run"
|
|
48
|
+
},
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"@grammyjs/auto-retry": "^2.0.2",
|
|
51
|
+
"@openai/codex-sdk": "^0.130.0",
|
|
52
|
+
"grammy": "^1.41.1"
|
|
53
|
+
},
|
|
54
|
+
"devDependencies": {
|
|
55
|
+
"@types/better-sqlite3": "^7.6.0",
|
|
56
|
+
"@types/node": "^25.5.0",
|
|
57
|
+
"tsx": "^4.21.0",
|
|
58
|
+
"typescript": "^5.9.3",
|
|
59
|
+
"vitest": "^3.2.4"
|
|
60
|
+
},
|
|
61
|
+
"optionalDependencies": {
|
|
62
|
+
"better-sqlite3": "^11.0.0",
|
|
63
|
+
"parakeet-coreml": "^2.2.0"
|
|
64
|
+
},
|
|
65
|
+
"engines": {
|
|
66
|
+
"node": ">=22"
|
|
67
|
+
},
|
|
68
|
+
"license": "MIT"
|
|
69
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "nordrelay",
|
|
3
|
+
"version": "0.2.1",
|
|
4
|
+
"description": "Run a remote-control bridge for coding agents. The current adapter connects Codex sessions to Telegram with streaming replies, multi-session controls, attachments, voice input, model selection, thread browsing, and handback.",
|
|
5
|
+
"author": {
|
|
6
|
+
"name": "Ricardo",
|
|
7
|
+
"url": "https://github.com/nordbyte"
|
|
8
|
+
},
|
|
9
|
+
"homepage": "https://github.com/nordbyte",
|
|
10
|
+
"repository": "https://github.com/nordbyte/nordrelay",
|
|
11
|
+
"license": "MIT",
|
|
12
|
+
"keywords": [
|
|
13
|
+
"coding-agents",
|
|
14
|
+
"messaging",
|
|
15
|
+
"remote-control",
|
|
16
|
+
"codex",
|
|
17
|
+
"telegram",
|
|
18
|
+
"bot",
|
|
19
|
+
"app-server"
|
|
20
|
+
],
|
|
21
|
+
"skills": "./skills/",
|
|
22
|
+
"interface": {
|
|
23
|
+
"displayName": "NordRelay",
|
|
24
|
+
"shortDescription": "Remote-control bridge for coding agents",
|
|
25
|
+
"longDescription": "Starts NordRelay, a messaging bridge for coding agents. The current runtime connects Codex and Pi sessions to Telegram: messages become agent turns, replies and tool activity stream back to Telegram, each chat or forum topic has its own session, and commands provide thread browsing, model and reasoning controls, launch profiles, attachments, voice transcription, artifacts, login, abort, retry, and CLI handback.",
|
|
26
|
+
"developerName": "Ricardo",
|
|
27
|
+
"category": "Productivity",
|
|
28
|
+
"capabilities": [
|
|
29
|
+
"Interactive",
|
|
30
|
+
"Read",
|
|
31
|
+
"Write"
|
|
32
|
+
],
|
|
33
|
+
"websiteURL": "https://github.com/nordbyte",
|
|
34
|
+
"privacyPolicyURL": "https://github.com/nordbyte/nordrelay",
|
|
35
|
+
"termsOfServiceURL": "https://github.com/nordbyte/nordrelay",
|
|
36
|
+
"defaultPrompt": [
|
|
37
|
+
"Start NordRelay remote control",
|
|
38
|
+
"Show NordRelay status",
|
|
39
|
+
"Stop NordRelay",
|
|
40
|
+
"Show Telegram session browser",
|
|
41
|
+
"Select Codex model"
|
|
42
|
+
],
|
|
43
|
+
"brandColor": "#229ED9",
|
|
44
|
+
"composerIcon": "./assets/nordrelay.svg",
|
|
45
|
+
"logo": "./assets/nordrelay.svg",
|
|
46
|
+
"screenshots": []
|
|
47
|
+
}
|
|
48
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
<svg xmlns="http://www.w3.org/2000/svg" width="256" height="256" viewBox="0 0 256 256" role="img" aria-label="NordRelay">
|
|
2
|
+
<rect width="256" height="256" rx="48" fill="#229ED9"/>
|
|
3
|
+
<path d="M202.4 59.9 38.9 123.1c-11.2 4.5-11.1 10.8-2 13.6l41.9 13.1 16 49.1c2 5.6 1 7.8 6.9 7.8 4.5 0 6.5-2.1 9-4.5l21.7-21.1 45.2 33.4c8.3 4.6 14.3 2.2 16.4-7.7l29.7-140.1c3-12.1-4.6-17.6-21.3-6.8Z" fill="#fff"/>
|
|
4
|
+
<path d="m86.3 145.9 96.7-61c4.6-2.8 8.8-1.3 5.3 1.8l-82.8 74.8-3.2 34.1-16-49.7Z" fill="#D2F0FF"/>
|
|
5
|
+
</svg>
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Start, stop, or inspect the NordRelay bot process.
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
# /nordrelay:remote
|
|
6
|
+
|
|
7
|
+
Start the NordRelay bot process.
|
|
8
|
+
|
|
9
|
+
This command is now only a process-manager shortcut. The Telegram bot itself provides the full remote controls: `/start`, `/new`, `/session`, `/sessions`, `/sync`, `/pinned`, `/pin`, `/unpin`, `/attach`, `/handback`, `/model`, `/reasoning`, `/fast`, `/launch_profiles`, `/retry`, `/queue`, `/cancel`, `/clearqueue`, `/artifacts`, `/abort`, `/stop`, `/tasks`, `/progress`, `/status`, `/health`, `/version`, `/logs`, `/diagnostics`, `/restart`, `/update`, voice messages, photos, documents, media groups, artifacts, and login.
|
|
10
|
+
|
|
11
|
+
Codex plugin commands are namespaced by the plugin id in current plugin-aware command surfaces. The unnamespaced `/remote` command is not available in current Codex TUI builds.
|
|
12
|
+
|
|
13
|
+
## Arguments
|
|
14
|
+
|
|
15
|
+
- empty: start the connector in the background
|
|
16
|
+
- `status`: show connector status
|
|
17
|
+
- `stop`: stop the connector
|
|
18
|
+
- `restart`: restart the connector
|
|
19
|
+
- `foreground`: run the connector in the foreground for debugging
|
|
20
|
+
|
|
21
|
+
## Workflow
|
|
22
|
+
|
|
23
|
+
1. Locate the plugin root containing `.codex-plugin/plugin.json` with `"name": "nordrelay"`. In a source checkout this is usually `<repo>/plugins/nordrelay`.
|
|
24
|
+
2. Check whether `TELEGRAM_BOT_TOKEN` and either `TELEGRAM_ALLOWED_USER_IDS`, `TELEGRAM_ALLOWED_CHAT_IDS`, or `TELEGRAM_ALLOW_ANY_CHAT=1` are available from the environment or from `.env`.
|
|
25
|
+
3. Run the connector command from the plugin root:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
node scripts/nordrelay.mjs ${ARGUMENTS:-start}
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
4. If `${ARGUMENTS}` is empty, use `start`.
|
|
32
|
+
5. After `start` or `restart`, run `node scripts/nordrelay.mjs status` and report the PID, selected Codex thread id, and log file.
|
|
33
|
+
6. If startup fails because dependencies are missing, run `npm install` and `npm run build` in the repository root.
|