openhorizon-cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,90 @@
1
+ # OpenHorizon CLI
2
+
3
+ Chat with AI models directly from your terminal.
4
+
5
+ **GitHub:** [SkidGod4444/openhorizon · packages/cli](https://github.com/SkidGod4444/openhorizon/tree/main/packages/cli)
6
+
7
+ ## Installation
8
+
9
+ ### macOS / Linux
10
+
11
+ ```sh
12
+ curl -fsSL https://raw.githubusercontent.com/SkidGod4444/openhorizon/main/packages/cli/install.sh | sh
13
+ ```
14
+
15
+ ### Windows (PowerShell)
16
+
17
+ ```powershell
18
+ irm https://raw.githubusercontent.com/SkidGod4444/openhorizon/main/packages/cli/install.ps1 | iex
19
+ ```
20
+
21
+ ### via npm / bun / pnpm (all platforms)
22
+
23
+ ```sh
24
+ npm install -g openhorizon-cli
25
+ # or
26
+ bun install -g openhorizon-cli
27
+ # or
28
+ pnpm install -g openhorizon-cli
29
+ ```
30
+
31
+ ## Quick Start
32
+
33
+ ```sh
34
+ # Save your API key
35
+ openhorizon login <your-api-key>
36
+
37
+ # Start chatting
38
+ openhorizon chat
39
+ ```
40
+
41
+ Get your API key at **https://openhorizon.devwtf.in**
42
+
43
+ ## Commands
44
+
45
+ | Command | Description |
46
+ |---|---|
47
+ | `openhorizon chat` | Start an interactive AI chat session |
48
+ | `openhorizon login <key>` | Save your API key to `~/.openhorizon/config.json` |
49
+ | `openhorizon model [name]` | Get or set the default model |
50
+ | `openhorizon --help` | Show help |
51
+
52
+ ### In-chat commands
53
+
54
+ | Command | Description |
55
+ |---|---|
56
+ | `/model` | Show active model |
57
+ | `/clear` | Clear conversation history |
58
+ | `/help` | Show available commands |
59
+ | `exit` / `q` | Quit |
60
+
61
+ ## Options
62
+
63
+ ```sh
64
+ openhorizon chat --model openhorizon/llama3 --base-url https://your-server.com
65
+ ```
66
+
67
+ | Flag | Env variable | Default |
68
+ |---|---|---|
69
+ | `--model` | `OPENHORIZON_MODEL` | `openhorizon/devstral-small-2:24b` |
70
+ | `--base-url` | `OPENHORIZON_API_URL` | `https://api.openhorizon.devwtf.in` |
71
+
72
+ ## History
73
+
74
+ Every conversation is automatically logged to `~/.openhorizon/history.jsonl` as newline-delimited JSON.
75
+
76
+ ```sh
77
+ # View today's conversations
78
+ grep "$(date +%Y-%m-%d)" ~/.openhorizon/history.jsonl | jq .
79
+
80
+ # View all assistant responses
81
+ jq 'select(.role == "assistant")' ~/.openhorizon/history.jsonl
82
+ ```
83
+
84
+ ## Source
85
+
86
+ <https://github.com/SkidGod4444/openhorizon/tree/main/packages/cli>
87
+
88
+ ## License
89
+
90
+ MIT
@@ -0,0 +1 @@
1
+ #!/usr/bin/env node
package/dist/index.js ADDED
@@ -0,0 +1,295 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { Command } from "commander";
5
+ import chalk2 from "chalk";
6
+ import dotenv from "dotenv";
7
+
8
+ // src/config.ts
9
+ import fs from "fs";
10
+ import path from "path";
11
+ import os from "os";
12
+ var CONFIG_DIR = path.join(os.homedir(), ".openhorizon");
13
+ var CONFIG_FILE = path.join(CONFIG_DIR, "config.json");
14
+ function getConfig() {
15
+ if (!fs.existsSync(CONFIG_FILE)) {
16
+ return { defaultModel: "openhorizon/devstral-small-2:24b" };
17
+ }
18
+ try {
19
+ const raw = fs.readFileSync(CONFIG_FILE, "utf-8");
20
+ const parsed = JSON.parse(raw);
21
+ return {
22
+ defaultModel: "openhorizon/devstral-small-2:24b",
23
+ // Always ensure fallback
24
+ ...parsed
25
+ };
26
+ } catch (e) {
27
+ return { defaultModel: "openhorizon/devstral-small-2:24b" };
28
+ }
29
+ }
30
+ function saveConfig(updates) {
31
+ const current = getConfig();
32
+ const next = { ...current, ...updates };
33
+ if (!fs.existsSync(CONFIG_DIR)) {
34
+ fs.mkdirSync(CONFIG_DIR, { recursive: true });
35
+ }
36
+ fs.writeFileSync(CONFIG_FILE, JSON.stringify(next, null, 2), "utf-8");
37
+ }
38
+
39
+ // src/chat.ts
40
+ import chalk from "chalk";
41
+ import { input } from "@inquirer/prompts";
42
+ import { createHash } from "crypto";
43
+ import { marked } from "marked";
44
+ import TerminalRenderer from "marked-terminal";
45
+
46
+ // src/history.ts
47
+ import fs2 from "fs";
48
+ import path2 from "path";
49
+ import os2 from "os";
50
+ var HISTORY_DIR = path2.join(os2.homedir(), ".openhorizon");
51
+ var HISTORY_FILE = path2.join(HISTORY_DIR, "history.jsonl");
52
+ function ensureDir() {
53
+ if (!fs2.existsSync(HISTORY_DIR)) {
54
+ fs2.mkdirSync(HISTORY_DIR, { recursive: true });
55
+ }
56
+ }
57
+ function appendHistory(entry) {
58
+ try {
59
+ ensureDir();
60
+ fs2.appendFileSync(HISTORY_FILE, JSON.stringify(entry) + "\n", "utf-8");
61
+ } catch {
62
+ }
63
+ }
64
+ function getHistoryPath() {
65
+ return HISTORY_FILE;
66
+ }
67
+
68
+ // src/chat.ts
69
+ marked.setOptions({
70
+ renderer: new TerminalRenderer()
71
+ });
72
+ var SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
73
+ function createSpinner(label) {
74
+ let i = 0;
75
+ let stopped = false;
76
+ const interval = setInterval(() => {
77
+ if (stopped) return;
78
+ process.stdout.write(
79
+ `\r${chalk.cyan(SPINNER_FRAMES[i % SPINNER_FRAMES.length])} ${chalk.dim(label)} `
80
+ );
81
+ i++;
82
+ }, 80);
83
+ return {
84
+ update(newLabel) {
85
+ label = newLabel;
86
+ },
87
+ /** Stop and erase the spinner line */
88
+ stop() {
89
+ if (stopped) return;
90
+ stopped = true;
91
+ clearInterval(interval);
92
+ process.stdout.write("\r\x1B[2K");
93
+ }
94
+ };
95
+ }
96
+ async function* streamCompletion(baseUrl, apiKey, model, messages) {
97
+ const base = baseUrl.replace(/\/$/, "");
98
+ const v1base = base.endsWith("/v1") ? base : `${base}/v1`;
99
+ const url = `${v1base}/chat/completions`;
100
+ const res = await fetch(url, {
101
+ method: "POST",
102
+ headers: {
103
+ "Content-Type": "application/json",
104
+ Authorization: `Bearer ${apiKey}`,
105
+ "x-api-key": apiKey
106
+ },
107
+ body: JSON.stringify({ model, messages, stream: true })
108
+ });
109
+ if (!res.ok) {
110
+ const body = await res.text().catch(() => "");
111
+ throw new Error(`API error ${res.status} at ${url}: ${body || res.statusText}`);
112
+ }
113
+ if (!res.body) throw new Error("No response body received from server.");
114
+ const reader = res.body.getReader();
115
+ const decoder = new TextDecoder();
116
+ let buf = "";
117
+ let promptTokens = 0;
118
+ let completionTokens = 0;
119
+ while (true) {
120
+ const { done, value } = await reader.read();
121
+ if (done) break;
122
+ buf += decoder.decode(value, { stream: true });
123
+ const lines = buf.split("\n");
124
+ buf = lines.pop() ?? "";
125
+ for (const raw of lines) {
126
+ const line = raw.trim();
127
+ if (!line || line === "data: [DONE]") continue;
128
+ if (!line.startsWith("data: ")) continue;
129
+ try {
130
+ const json = JSON.parse(line.slice(6));
131
+ const delta = json.choices?.[0]?.delta?.content ?? "";
132
+ if (delta) yield { type: "text", delta };
133
+ if (json.usage) {
134
+ promptTokens = json.usage.prompt_tokens ?? 0;
135
+ completionTokens = json.usage.completion_tokens ?? 0;
136
+ }
137
+ } catch {
138
+ }
139
+ }
140
+ }
141
+ yield { type: "done", usage: { promptTokens, completionTokens } };
142
+ }
143
+ async function runChatLoop(options) {
144
+ const { apiKey, model, baseUrl } = options;
145
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
146
+ const keyFingerprint = createHash("sha256").update(apiKey).digest("hex").slice(0, 8);
147
+ const session = `${date}_${keyFingerprint}_${Math.random().toString(36).slice(2, 7)}`;
148
+ console.log(
149
+ "\n" + chalk.green("\u2713 Connected") + chalk.gray(" \xB7 model ") + chalk.bold.white(model)
150
+ );
151
+ console.log(chalk.dim(` History \u2192 ${getHistoryPath()}`));
152
+ console.log(
153
+ chalk.gray(
154
+ `
155
+ Commands: ${chalk.white("/model")} ${chalk.white("/clear")} ${chalk.white("/help")} ${chalk.white("exit")}
156
+ `
157
+ )
158
+ );
159
+ const messages = [];
160
+ while (true) {
161
+ let userInput;
162
+ try {
163
+ userInput = await input({ message: chalk.cyan("\u276F") });
164
+ } catch (err) {
165
+ if (err?.name === "ExitPromptError") {
166
+ console.log(chalk.gray("\nGoodbye!\n"));
167
+ process.exit(0);
168
+ }
169
+ throw err;
170
+ }
171
+ const trimmed = userInput.trim();
172
+ if (!trimmed) continue;
173
+ if (["exit", "quit", "q"].includes(trimmed.toLowerCase())) {
174
+ console.log(chalk.gray("\nGoodbye!\n"));
175
+ process.exit(0);
176
+ }
177
+ if (trimmed === "/help") {
178
+ console.log(chalk.gray("\n /model \u2013 show current model"));
179
+ console.log(chalk.gray(" /clear \u2013 clear conversation history"));
180
+ console.log(chalk.gray(" exit \u2013 quit\n"));
181
+ continue;
182
+ }
183
+ if (trimmed === "/model") {
184
+ console.log(chalk.gray(`
185
+ Model: ${chalk.bold.white(model)}
186
+ `));
187
+ continue;
188
+ }
189
+ if (trimmed === "/clear") {
190
+ messages.length = 0;
191
+ console.log(chalk.gray("\n \u2713 Conversation cleared.\n"));
192
+ continue;
193
+ }
194
+ messages.push({ role: "user", content: trimmed });
195
+ appendHistory({ ts: (/* @__PURE__ */ new Date()).toISOString(), session, model, role: "user", content: trimmed });
196
+ process.stdout.write("\n");
197
+ const spinner = createSpinner("Loading\u2026");
198
+ let fullResponse = "";
199
+ let tokenCount = 0;
200
+ const start = Date.now();
201
+ try {
202
+ for await (const event of streamCompletion(baseUrl, apiKey, model, messages)) {
203
+ if (event.type === "done") {
204
+ const elapsed = ((Date.now() - start) / 1e3).toFixed(1);
205
+ const { promptTokens, completionTokens } = event.usage;
206
+ spinner.stop();
207
+ if (!fullResponse) {
208
+ console.log(chalk.red(" \u276F No response received.\n"));
209
+ break;
210
+ }
211
+ const rendered = String(await marked(fullResponse)).trimEnd();
212
+ process.stdout.write(chalk.green("\u276F ") + "\n");
213
+ process.stdout.write(rendered + "\n");
214
+ const usageStr = promptTokens || completionTokens ? chalk.dim(` \u2191${promptTokens} \u2193${completionTokens} tok \xB7 ${elapsed}s`) : chalk.dim(` ${elapsed}s`);
215
+ process.stdout.write(usageStr + "\n\n");
216
+ appendHistory({
217
+ ts: (/* @__PURE__ */ new Date()).toISOString(),
218
+ session,
219
+ model,
220
+ role: "assistant",
221
+ content: fullResponse,
222
+ usage: { promptTokens, completionTokens },
223
+ latencyMs: Date.now() - start
224
+ });
225
+ break;
226
+ }
227
+ fullResponse += event.delta;
228
+ tokenCount++;
229
+ if (tokenCount === 1) spinner.update("Thinking\u2026");
230
+ if (tokenCount > 5) spinner.update(`Streaming\u2026 ${fullResponse.length} chars`);
231
+ }
232
+ messages.push({ role: "assistant", content: fullResponse });
233
+ } catch (err) {
234
+ spinner.stop();
235
+ const msg = err?.message ?? String(err);
236
+ if (msg.includes("404")) {
237
+ console.log(chalk.red(`
238
+ \u276F 404 \u2014 check your API URL (${baseUrl})
239
+ `));
240
+ } else if (msg.includes("401") || msg.includes("403")) {
241
+ console.log(chalk.red("\n \u276F Authentication failed \u2014 check your API key.\n"));
242
+ } else if (msg.includes("429")) {
243
+ console.log(chalk.red("\n \u276F Rate limit reached \u2014 try again shortly.\n"));
244
+ } else {
245
+ console.log(chalk.red(`
246
+ \u276F ${msg}
247
+ `));
248
+ }
249
+ if (messages.at(-1)?.role === "user") messages.pop();
250
+ }
251
+ }
252
+ }
253
+
254
+ // src/index.ts
255
+ dotenv.config();
256
+ var program = new Command();
257
+ program.name("openhorizon").description("CLI to interact with OpenHorizon AI Models").version("1.0.0");
258
+ program.command("chat", { isDefault: true }).description("Start an interactive chat session with the AI").option("-m, --model <model>", "Specify a model to use for this session").option("-b, --base-url <url>", "Base API URL (e.g. https://api.openhorizon.devwtf.in/v1)").action(async (options) => {
259
+ const config = getConfig();
260
+ const apiKey = process.env.OPENHORIZON_API_KEY || config.apiKey;
261
+ if (!apiKey) {
262
+ console.error(chalk2.red("\n\u2717 Missing API key."));
263
+ console.log(
264
+ chalk2.gray(
265
+ " Run '" + chalk2.white("openhorizon login <api_key>") + chalk2.gray("' or set ") + chalk2.white("OPENHORIZON_API_KEY") + chalk2.gray(".\n")
266
+ )
267
+ );
268
+ process.exit(1);
269
+ }
270
+ const model = options.model || process.env.OPENHORIZON_MODEL || config.defaultModel || "openhorizon/devstral-small-2:24b";
271
+ const baseUrl = options.baseUrl || process.env.OPENHORIZON_API_URL || "https://api.openhorizon.devwtf.in/v1";
272
+ await runChatLoop({ apiKey, model, baseUrl });
273
+ });
274
+ program.command("model").description("Get or set the default AI model").argument("[modelName]", "The name of the model to set as default").action((modelName) => {
275
+ const config = getConfig();
276
+ if (modelName) {
277
+ saveConfig({ defaultModel: modelName });
278
+ console.log(chalk2.green(`\u2713 Default model set to: ${chalk2.bold(modelName)}`));
279
+ } else {
280
+ console.log(chalk2.blue(`Current default model: ${chalk2.bold(config.defaultModel ?? "(not set)")}`));
281
+ }
282
+ });
283
+ program.command("login").description("Save your OpenHorizon API Key securely to local config").argument("[apiKey]", "Your OpenHorizon API Key").action(async (apiKeyInput) => {
284
+ let key = apiKeyInput;
285
+ if (!key) {
286
+ const { input: input2 } = await import("@inquirer/prompts");
287
+ key = await input2({
288
+ message: "Enter your OpenHorizon API Key:",
289
+ validate: (val) => val.length > 0 ? true : "API Key is required"
290
+ });
291
+ }
292
+ saveConfig({ apiKey: key });
293
+ console.log(chalk2.green("\u2713 API Key saved successfully."));
294
+ });
295
+ program.parse(process.argv);
package/package.json ADDED
@@ -0,0 +1,56 @@
1
+ {
2
+ "name": "openhorizon-cli",
3
+ "version": "1.0.0",
4
+ "description": "Official CLI for OpenHorizon — chat with AI models directly from your terminal",
5
+ "type": "module",
6
+ "bin": {
7
+ "openhorizon": "./dist/index.js"
8
+ },
9
+ "files": [
10
+ "dist",
11
+ "README.md"
12
+ ],
13
+ "engines": {
14
+ "node": ">=18"
15
+ },
16
+ "scripts": {
17
+ "build": "tsup",
18
+ "dev": "tsup --watch",
19
+ "prepublishOnly": "npm run build",
20
+ "publish": "npm publish --access public"
21
+ },
22
+ "keywords": [
23
+ "openhorizon",
24
+ "ai",
25
+ "cli",
26
+ "llm",
27
+ "chat",
28
+ "terminal"
29
+ ],
30
+ "homepage": "https://github.com/SkidGod4444/openhorizon",
31
+ "repository": {
32
+ "type": "git",
33
+ "url": "https://github.com/SkidGod4444/openhorizon.git",
34
+ "directory": "packages/cli"
35
+ },
36
+ "license": "MIT",
37
+ "publishConfig": {
38
+ "access": "public"
39
+ },
40
+ "devDependencies": {
41
+ "@types/node": "^25.5.2",
42
+ "@types/marked-terminal": "^6.1.1",
43
+ "tsup": "^8.5.1"
44
+ },
45
+ "peerDependencies": {
46
+ "typescript": "^5"
47
+ },
48
+ "dependencies": {
49
+ "@inquirer/prompts": "^8.3.2",
50
+ "chalk": "^5.6.2",
51
+ "commander": "^14.0.3",
52
+ "dotenv": "^17.4.0",
53
+ "marked": "^17.0.5",
54
+ "marked-terminal": "^7.3.0"
55
+ }
56
+ }