@tiens.nguyen/gonext-local-worker 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,79 @@
1
+ # gonext-local-worker
2
+
3
+ Runs on **your Mac** next to **Ollama** or any **OpenAI-compatible** local server. It:
4
+
5
+ 1. Polls **`POST /api/worker/jobs/next`** on your GoNext cloud API (Lambda).
6
+ 2. Runs the job against **`payload.baseURL`** (your LAN `http://127.0.0.1:11434/v1` etc.).
7
+ 3. **`PATCH`**es **`running`** → **`completed`** / **`failed`** so DynamoDB and the web app update.
8
+
9
+ You must create a **Worker API key** in the web app **Settings** (stored as a hash in DynamoDB).
10
+
11
+ ## Install
12
+
13
+ ```bash
14
+ npm install -g @gomarsic/gonext-local-worker
15
+ ```
16
+
17
+ Or from source:
18
+
19
+ ```bash
20
+ cd tools/gonext-local-worker
21
+ npm install
22
+ npm link
23
+ ```
24
+
25
+ Requires **Node.js 18+**.
26
+
27
+ ## Configure
28
+
29
+ Create `~/.gonext/worker.env` (optional):
30
+
31
+ ```bash
32
+ mkdir -p ~/.gonext
33
+ cat > ~/.gonext/worker.env << 'EOF'
34
+ GONEXT_API_BASE=https://YOUR_API.execute-api.ap-southeast-1.amazonaws.com
35
+ GONEXT_WORKER_KEY=paste-your-worker-secret-here
36
+ GONEXT_POLL_MS=1500
37
+ EOF
38
+ chmod 600 ~/.gonext/worker.env
39
+ ```
40
+
41
+ Or export in the shell:
42
+
43
+ ```bash
44
+ export GONEXT_API_BASE=https://....execute-api....amazonaws.com
45
+ export GONEXT_WORKER_KEY=...
46
+ ```
47
+
48
+ ## Run
49
+
50
+ ```bash
51
+ gonext-local-worker
52
+ ```
53
+
54
+ Options:
55
+
56
+ ```bash
57
+ gonext-local-worker --poll-ms 2000
58
+ gonext-local-worker --api-base https://other-host.example
59
+ gonext-local-worker --help
60
+ ```
61
+
62
+ Leave this process **running** while you use async local models from the web app.
63
+
64
+ ## Run at login (macOS LaunchAgent)
65
+
66
+ 1. Copy `launchd/com.gonext.worker.plist.example` to `~/Library/LaunchAgents/com.gonext.worker.plist`.
67
+ 2. Edit paths: set **full path** to `gonext-local-worker` (`which gonext-local-worker` after `npm link`).
68
+ 3. `launchctl load ~/Library/LaunchAgents/com.gonext.worker.plist`
69
+
70
+ Unload:
71
+
72
+ ```bash
73
+ launchctl unload ~/Library/LaunchAgents/com.gonext.worker.plist
74
+ ```
75
+
76
+ ## Troubleshooting
77
+
78
+ - **`403` / invalid key** — Regenerate worker key in Settings and update `GONEXT_WORKER_KEY`.
79
+ - **Job never claimed** — Ensure DynamoDB + worker routes work; Settings must include **Ollama base URL** so the queued payload points at a reachable host from this Mac (`127.0.0.1` is fine).
@@ -0,0 +1,219 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * GoNext local LLM worker — runs on the Mac where Ollama / MLX HTTP server lives.
4
+ * Polls your cloud API for jobs, calls the model locally, PATCHes status back.
5
+ *
6
+ * Env:
7
+ * GONEXT_API_BASE HTTPS origin only (no trailing slash), e.g. https://xxx.execute-api....amazonaws.com
8
+ * GONEXT_WORKER_KEY Plaintext worker secret from Web → Settings → Worker API key
9
+ * GONEXT_POLL_MS Poll interval when idle (default 1500)
10
+ *
11
+ * Optional env files (loaded in order; shell exports win if set before launch):
12
+ * ~/.gonext/worker.env
13
+ * ./.env (cwd)
14
+ */
15
+ import { homedir } from "node:os";
16
+ import { join } from "node:path";
17
+ import dotenv from "dotenv";
18
+ import OpenAI from "openai";
19
+
20
+ dotenv.config({ path: join(homedir(), ".gonext", "worker.env") });
21
+ dotenv.config();
22
+
23
+ function parseArgs(argv) {
24
+ const out = { help: false, pollMs: undefined, apiBase: undefined };
25
+ for (let i = 2; i < argv.length; i++) {
26
+ const a = argv[i];
27
+ if (a === "--help" || a === "-h") out.help = true;
28
+ else if (a === "--poll-ms" && argv[i + 1])
29
+ out.pollMs = Number(argv[++i]);
30
+ else if (a === "--api-base" && argv[i + 1]) out.apiBase = argv[++i];
31
+ }
32
+ return out;
33
+ }
34
+
35
+ function printHelp() {
36
+ console.log(`
37
+ gonext-local-worker — bridge cloud GoNext jobs ↔ local LLM (Ollama / OpenAI-compatible)
38
+
39
+ Usage:
40
+ export GONEXT_API_BASE=https://....amazonaws.com
41
+ export GONEXT_WORKER_KEY=your-secret-from-settings
42
+ gonext-local-worker
43
+
44
+ Options:
45
+ --poll-ms <ms> Idle poll interval (default 1500 or GONEXT_POLL_MS)
46
+ --api-base <url> Override GONEXT_API_BASE
47
+
48
+ Config files (optional):
49
+ ~/.gonext/worker.env
50
+ .env in current directory
51
+
52
+ Install (from repo):
53
+ cd tools/gonext-local-worker && npm install && npm link
54
+
55
+ Then keep this running while you use the web app with local models.
56
+ `);
57
+ }
58
+
59
+ const args = parseArgs(process.argv);
60
+ if (args.help) {
61
+ printHelp();
62
+ process.exit(0);
63
+ }
64
+
65
+ const apiBase = (
66
+ args.apiBase ??
67
+ process.env.GONEXT_API_BASE ??
68
+ ""
69
+ ).replace(/\/+$/, "");
70
+ const workerKey = process.env.GONEXT_WORKER_KEY ?? "";
71
+ const pollMs =
72
+ (Number.isFinite(args.pollMs) && args.pollMs > 0
73
+ ? args.pollMs
74
+ : Number(process.env.GONEXT_POLL_MS ?? "1500")) || 1500;
75
+
76
+ if (!apiBase || !workerKey) {
77
+ console.error(
78
+ "Missing GONEXT_API_BASE or GONEXT_WORKER_KEY.\nSet them or put them in ~/.gonext/worker.env — run with --help."
79
+ );
80
+ process.exit(1);
81
+ }
82
+
83
+ function ts() {
84
+ return new Date().toISOString();
85
+ }
86
+
87
+ function toOpenAIMessages(messages) {
88
+ return messages.map((m) => {
89
+ if (m.role === "user" && m.attachments?.length) {
90
+ return {
91
+ role: m.role,
92
+ content: [
93
+ { type: "text", text: m.content },
94
+ ...m.attachments.map((a) => ({
95
+ type: "image_url",
96
+ image_url: { url: `data:${a.mimeType};base64,${a.data}` },
97
+ })),
98
+ ],
99
+ };
100
+ }
101
+ return { role: m.role, content: m.content };
102
+ });
103
+ }
104
+
105
+ async function workerFetch(path, init = {}) {
106
+ const url = `${apiBase}${path.startsWith("/") ? path : `/${path}`}`;
107
+ const headers = {
108
+ "Content-Type": "application/json",
109
+ "X-Worker-Key": workerKey,
110
+ ...(init.headers ?? {}),
111
+ };
112
+ return fetch(url, { ...init, headers });
113
+ }
114
+
115
+ let shuttingDown = false;
116
+
117
+ async function runChatJob(job) {
118
+ const { jobId, payload } = job;
119
+ const start = Date.now();
120
+ const patchRunning = await workerFetch(`/api/worker/jobs/${jobId}`, {
121
+ method: "PATCH",
122
+ body: JSON.stringify({ jobStatus: "running" }),
123
+ });
124
+ if (!patchRunning.ok) {
125
+ const t = await patchRunning.text().catch(() => "");
126
+ console.error(`[${ts()}] PATCH running failed ${patchRunning.status}`, t);
127
+ return;
128
+ }
129
+
130
+ const client = new OpenAI({
131
+ baseURL: payload.baseURL,
132
+ apiKey: payload.apiKey || "ollama",
133
+ });
134
+
135
+ try {
136
+ const completion = await client.chat.completions.create({
137
+ model: payload.modelId,
138
+ messages: toOpenAIMessages(payload.messages),
139
+ temperature: 0,
140
+ });
141
+ const text = completion.choices[0]?.message?.content ?? "";
142
+ const totalTimeSeconds = (Date.now() - start) / 1000;
143
+ await workerFetch(`/api/worker/jobs/${jobId}`, {
144
+ method: "PATCH",
145
+ body: JSON.stringify({
146
+ jobStatus: "completed",
147
+ resultText: text,
148
+ tokenCount: Math.max(1, completion.usage?.total_tokens ?? 1),
149
+ totalTimeSeconds,
150
+ }),
151
+ });
152
+ console.log(
153
+ `[${ts()}] completed job ${jobId} (${totalTimeSeconds.toFixed(1)}s)`
154
+ );
155
+ } catch (e) {
156
+ const message = e instanceof Error ? e.message : String(e);
157
+ await workerFetch(`/api/worker/jobs/${jobId}`, {
158
+ method: "PATCH",
159
+ body: JSON.stringify({
160
+ jobStatus: "failed",
161
+ errorMessage: message,
162
+ totalTimeSeconds: (Date.now() - start) / 1000,
163
+ }),
164
+ });
165
+ console.error(`[${ts()}] failed job ${jobId}:`, message);
166
+ }
167
+ }
168
+
169
+ async function pollOnce() {
170
+ const res = await workerFetch("/api/worker/jobs/next", { method: "POST" });
171
+ if (res.status === 204) return;
172
+ if (!res.ok) {
173
+ const t = await res.text().catch(() => "");
174
+ throw new Error(`POST /api/worker/jobs/next → ${res.status}: ${t}`);
175
+ }
176
+ const job = await res.json();
177
+ if (job?.jobId && job.payload) {
178
+ await runChatJob(job);
179
+ }
180
+ }
181
+
182
+ function sleep(ms) {
183
+ return new Promise((r) => setTimeout(r, ms));
184
+ }
185
+
186
+ async function main() {
187
+ console.log(`[${ts()}] gonext-local-worker`);
188
+ console.log(` API ${apiBase}`);
189
+ console.log(` poll every ${pollMs}ms (idle)`);
190
+ console.log(` stop Ctrl+C`);
191
+
192
+ const loop = async () => {
193
+ while (!shuttingDown) {
194
+ try {
195
+ await pollOnce();
196
+ } catch (e) {
197
+ console.error(`[${ts()}] poll error:`, e instanceof Error ? e.message : e);
198
+ }
199
+ if (shuttingDown) break;
200
+ await sleep(pollMs);
201
+ }
202
+ };
203
+
204
+ const stop = () => {
205
+ if (shuttingDown) return;
206
+ shuttingDown = true;
207
+ console.log(`\n[${ts()}] shutting down…`);
208
+ process.exit(0);
209
+ };
210
+ process.on("SIGINT", stop);
211
+ process.on("SIGTERM", stop);
212
+
213
+ await loop();
214
+ }
215
+
216
+ main().catch((e) => {
217
+ console.error(e);
218
+ process.exit(1);
219
+ });
@@ -0,0 +1,34 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3
+ <plist version="1.0">
4
+ <dict>
5
+ <key>Label</key>
6
+ <string>com.gonext.worker</string>
7
+ <key>ProgramArguments</key>
8
+ <array>
9
+ <!-- Replace with output of: which gonext-local-worker -->
10
+ <string>/usr/local/bin/gonext-local-worker</string>
11
+ </array>
12
+ <!-- Optional: uncomment to force Node path -->
13
+ <!--
14
+ <array>
15
+ <string>/usr/local/bin/node</string>
16
+ <string>/ABSOLUTE/PATH/TO/gonext/tools/gonext-local-worker/gonext-local-worker.mjs</string>
17
+ </array>
18
+ -->
19
+ <key>RunAtLoad</key>
20
+ <true/>
21
+ <key>KeepAlive</key>
22
+ <true/>
23
+ <key>StandardOutPath</key>
24
+ <string>/tmp/gonext-worker.log</string>
25
+ <key>StandardErrorPath</key>
26
+ <string>/tmp/gonext-worker.err</string>
27
+ <key>EnvironmentVariables</key>
28
+ <dict>
29
+ <!-- Or rely on ~/.gonext/worker.env (loaded by the worker) -->
30
+ <key>PATH</key>
31
+ <string>/usr/local/bin:/usr/bin:/bin:/opt/homebrew/bin</string>
32
+ </dict>
33
+ </dict>
34
+ </plist>
package/package.json ADDED
@@ -0,0 +1,33 @@
1
+ {
2
+ "name": "@tiens.nguyen/gonext-local-worker",
3
+ "version": "1.0.0",
4
+ "description": "Polls GoNext cloud API for async local LLM jobs and runs them against Ollama/OpenAI-compatible servers on this Mac",
5
+ "type": "module",
6
+ "license": "MIT",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "git+https://github.com/tiennsloit/gonext.git"
10
+ },
11
+ "homepage": "https://github.com/tiennsloit/gonext#readme",
12
+ "bugs": {
13
+ "url": "https://github.com/tiennsloit/gonext/issues"
14
+ },
15
+ "bin": {
16
+ "gonext-local-worker": "./gonext-local-worker.mjs"
17
+ },
18
+ "files": [
19
+ "gonext-local-worker.mjs",
20
+ "README.md",
21
+ "launchd/"
22
+ ],
23
+ "publishConfig": {
24
+ "access": "public"
25
+ },
26
+ "dependencies": {
27
+ "dotenv": "^16.4.5",
28
+ "openai": "^4.77.0"
29
+ },
30
+ "engines": {
31
+ "node": ">=18"
32
+ }
33
+ }