notoken-core 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,47 @@
1
+ # notoken-core
2
+
3
+ Shared engine for [notoken](https://notoken.sh) — NLP parsing, execution, detection, analysis.
4
+
5
+ Used by the CLI (`notoken`) and the desktop app (`notoken-installer`).
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ npm install notoken-core
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ ```typescript
16
+ import {
17
+ parseIntent,
18
+ executeIntent,
19
+ detectLocalPlatform,
20
+ checkForUpdate,
21
+ } from "notoken-core";
22
+
23
+ // Parse natural language into a structured intent
24
+ const parsed = await parseIntent("restart nginx on prod");
25
+ console.log(parsed.intent.intent); // "service.restart"
26
+ console.log(parsed.intent.fields); // { service: "nginx", environment: "prod" }
27
+
28
+ // Detect platform
29
+ const platform = detectLocalPlatform();
30
+ console.log(platform.distro); // "Ubuntu 24.04.2 LTS"
31
+ console.log(platform.packageManager); // "apt"
32
+ ```
33
+
34
+ ## What's Inside
35
+
36
+ - **119 config-driven intents** — services, docker, git, files, network, security, databases, and more
37
+ - **NLP pipeline** — rule parser + compromise POS tagging + multi-classifier + keyboard typo correction
38
+ - **LLM fallback** — Claude CLI, OpenAI API, or Ollama (auto-detected)
39
+ - **File parsers** — passwd, shadow, .env, yaml, json, nginx, apache, BIND zone files
40
+ - **Intelligent analysis** — load/disk/memory assessment, project type detection
41
+ - **Conversation persistence** — knowledge tree, coreference resolution
42
+ - **Platform detection** — Linux/macOS/Windows/WSL, adapts commands per OS
43
+ - **Adaptive rules** — learns from failures via LLM
44
+
45
+ ## License
46
+
47
+ MIT — [Dino Bartolome](https://notoken.sh)
@@ -12,6 +12,6 @@
12
12
  *
13
13
  * Usage:
14
14
  * npx tsx src/healing/claudeHealer.ts [--promote] [--dry-run]
15
- * MYCLI_LLM_CLI=claude npm run heal:claude
15
+ * NOTOKEN_LLM_CLI=claude npm run heal:claude
16
16
  */
17
17
  export {};
@@ -12,7 +12,7 @@
12
12
  *
13
13
  * Usage:
14
14
  * npx tsx src/healing/claudeHealer.ts [--promote] [--dry-run]
15
- * MYCLI_LLM_CLI=claude npm run heal:claude
15
+ * NOTOKEN_LLM_CLI=claude npm run heal:claude
16
16
  */
17
17
  import { execSync, execFileSync } from "node:child_process";
18
18
  import { readFileSync, existsSync } from "node:fs";
@@ -34,7 +34,7 @@ async function main() {
34
34
  execSync("command -v claude", { stdio: "pipe" });
35
35
  }
36
36
  catch {
37
- console.error(`${c.red}Claude CLI not found. Install it or set MYCLI_LLM_CLI=claude.${c.reset}`);
37
+ console.error(`${c.red}Claude CLI not found. Install it or set NOTOKEN_LLM_CLI=claude.${c.reset}`);
38
38
  process.exit(1);
39
39
  }
40
40
  // Gather context
@@ -4,14 +4,14 @@ import { RulePatch as RulePatchSchema } from "../types/rules.js";
4
4
  * RuleBuilder: asks an LLM to propose new rules from a set of example phrases.
5
5
  */
6
6
  export async function buildRulesFromExamples(examples) {
7
- const endpoint = process.env.MYCLI_LLM_ENDPOINT;
7
+ const endpoint = process.env.NOTOKEN_LLM_ENDPOINT;
8
8
  if (!endpoint) {
9
- console.error("Set MYCLI_LLM_ENDPOINT to use the RuleBuilder.");
9
+ console.error("Set NOTOKEN_LLM_ENDPOINT to use the RuleBuilder.");
10
10
  return null;
11
11
  }
12
12
  const rules = loadRules();
13
13
  const intents = loadIntents();
14
- const apiKey = process.env.MYCLI_LLM_API_KEY ?? "";
14
+ const apiKey = process.env.NOTOKEN_LLM_API_KEY ?? "";
15
15
  const intentList = intents.map((i) => `- ${i.name}: ${i.description}`).join("\n");
16
16
  const prompt = `You are a rule builder for a CLI command parser.
17
17
 
@@ -56,7 +56,7 @@ Rules:
56
56
  ...(apiKey ? { Authorization: `Bearer ${apiKey}`, "x-api-key": apiKey } : {}),
57
57
  },
58
58
  body: JSON.stringify({
59
- model: process.env.MYCLI_LLM_MODEL ?? "claude-sonnet-4-20250514",
59
+ model: process.env.NOTOKEN_LLM_MODEL ?? "claude-sonnet-4-20250514",
60
60
  max_tokens: 1024,
61
61
  messages: [{ role: "user", content: prompt }],
62
62
  }),
@@ -2,8 +2,8 @@
2
2
  * LLM fallback for unrecognized prompts.
3
3
  *
4
4
  * ONLY fires when an LLM is actually configured:
5
- * - MYCLI_LLM_ENDPOINT env var is set (for API), OR
6
- * - MYCLI_LLM_CLI=claude|chatgpt is set (for CLI tools)
5
+ * - NOTOKEN_LLM_ENDPOINT env var is set (for API), OR
6
+ * - NOTOKEN_LLM_CLI=claude|chatgpt is set (for CLI tools)
7
7
  *
8
8
  * Otherwise returns null immediately — no noise, no "trying fallback" messages.
9
9
  */
@@ -2,8 +2,8 @@
2
2
  * LLM fallback for unrecognized prompts.
3
3
  *
4
4
  * ONLY fires when an LLM is actually configured:
5
- * - MYCLI_LLM_ENDPOINT env var is set (for API), OR
6
- * - MYCLI_LLM_CLI=claude|chatgpt is set (for CLI tools)
5
+ * - NOTOKEN_LLM_ENDPOINT env var is set (for API), OR
6
+ * - NOTOKEN_LLM_CLI=claude|chatgpt is set (for CLI tools)
7
7
  *
8
8
  * Otherwise returns null immediately — no noise, no "trying fallback" messages.
9
9
  */
@@ -18,13 +18,13 @@ import { detectLocalPlatform } from "../utils/platform.js";
18
18
  * Order: explicit config → auto-detect Ollama → nothing.
19
19
  */
20
20
  export function isLLMConfigured() {
21
- return !!(process.env.MYCLI_LLM_ENDPOINT || process.env.MYCLI_LLM_CLI || detectOllama());
21
+ return !!(process.env.NOTOKEN_LLM_ENDPOINT || process.env.NOTOKEN_LLM_CLI || detectOllama());
22
22
  }
23
23
  /** Which LLM backend is active? */
24
24
  export function getLLMBackend() {
25
- if (process.env.MYCLI_LLM_CLI)
26
- return process.env.MYCLI_LLM_CLI;
27
- if (process.env.MYCLI_LLM_ENDPOINT)
25
+ if (process.env.NOTOKEN_LLM_CLI)
26
+ return process.env.NOTOKEN_LLM_CLI;
27
+ if (process.env.NOTOKEN_LLM_ENDPOINT)
28
28
  return "api";
29
29
  if (detectOllama())
30
30
  return "ollama";
@@ -56,13 +56,13 @@ export async function llmFallback(rawText, context) {
56
56
  if (!isLLMConfigured())
57
57
  return null;
58
58
  // Try CLI tool if configured
59
- if (process.env.MYCLI_LLM_CLI) {
59
+ if (process.env.NOTOKEN_LLM_CLI) {
60
60
  const cliResult = await tryLLMCli(rawText, context);
61
61
  if (cliResult)
62
62
  return cliResult;
63
63
  }
64
64
  // Try API endpoint if configured
65
- if (process.env.MYCLI_LLM_ENDPOINT) {
65
+ if (process.env.NOTOKEN_LLM_ENDPOINT) {
66
66
  const apiResult = await tryApiEndpoint(rawText, context);
67
67
  if (apiResult)
68
68
  return apiResult;
@@ -76,7 +76,7 @@ export async function llmFallback(rawText, context) {
76
76
  return null;
77
77
  }
78
78
  async function tryLLMCli(rawText, context) {
79
- const cli = process.env.MYCLI_LLM_CLI;
79
+ const cli = process.env.NOTOKEN_LLM_CLI;
80
80
  if (!cli)
81
81
  return null;
82
82
  try {
@@ -131,11 +131,11 @@ async function tryLLMCli(rawText, context) {
131
131
  }
132
132
  }
133
133
  async function tryApiEndpoint(rawText, context) {
134
- const endpoint = process.env.MYCLI_LLM_ENDPOINT;
134
+ const endpoint = process.env.NOTOKEN_LLM_ENDPOINT;
135
135
  if (!endpoint)
136
136
  return null;
137
- const apiKey = process.env.MYCLI_LLM_API_KEY ?? "";
138
- const model = process.env.MYCLI_LLM_MODEL ?? "claude-sonnet-4-20250514";
137
+ const apiKey = process.env.NOTOKEN_LLM_API_KEY ?? "";
138
+ const model = process.env.NOTOKEN_LLM_MODEL ?? "claude-sonnet-4-20250514";
139
139
  const prompt = buildPrompt(rawText, context);
140
140
  try {
141
141
  const response = await fetch(endpoint, {
@@ -164,7 +164,7 @@ async function tryApiEndpoint(rawText, context) {
164
164
  }
165
165
  async function tryOllama(rawText, context) {
166
166
  const prompt = buildPrompt(rawText, context);
167
- const model = process.env.MYCLI_OLLAMA_MODEL ?? "llama3.2";
167
+ const model = process.env.NOTOKEN_OLLAMA_MODEL ?? "llama3.2";
168
168
  try {
169
169
  const response = await fetch("http://localhost:11434/api/generate", {
170
170
  method: "POST",
@@ -3,6 +3,6 @@ import type { DynamicIntent } from "../types/intent.js";
3
3
  * LLM-based fallback parser.
4
4
  *
5
5
  * Sends the raw text + context to an LLM and asks for structured JSON.
6
- * Set MYCLI_LLM_ENDPOINT and optionally MYCLI_LLM_API_KEY in env.
6
+ * Set NOTOKEN_LLM_ENDPOINT and optionally NOTOKEN_LLM_API_KEY in env.
7
7
  */
8
8
  export declare function parseByLLM(rawText: string): Promise<DynamicIntent | null>;
@@ -5,13 +5,13 @@ import { loadRules } from "../utils/config.js";
5
5
  * LLM-based fallback parser.
6
6
  *
7
7
  * Sends the raw text + context to an LLM and asks for structured JSON.
8
- * Set MYCLI_LLM_ENDPOINT and optionally MYCLI_LLM_API_KEY in env.
8
+ * Set NOTOKEN_LLM_ENDPOINT and optionally NOTOKEN_LLM_API_KEY in env.
9
9
  */
10
10
  export async function parseByLLM(rawText) {
11
- const endpoint = process.env.MYCLI_LLM_ENDPOINT;
11
+ const endpoint = process.env.NOTOKEN_LLM_ENDPOINT;
12
12
  if (!endpoint)
13
13
  return null;
14
- const apiKey = process.env.MYCLI_LLM_API_KEY ?? "";
14
+ const apiKey = process.env.NOTOKEN_LLM_API_KEY ?? "";
15
15
  const rules = loadRules();
16
16
  const intents = loadIntents();
17
17
  const systemPrompt = buildSystemPrompt(intents, rules);
@@ -24,7 +24,7 @@ export async function parseByLLM(rawText) {
24
24
  ...(apiKey ? { Authorization: `Bearer ${apiKey}`, "x-api-key": apiKey } : {}),
25
25
  },
26
26
  body: JSON.stringify({
27
- model: process.env.MYCLI_LLM_MODEL ?? "claude-sonnet-4-20250514",
27
+ model: process.env.NOTOKEN_LLM_MODEL ?? "claude-sonnet-4-20250514",
28
28
  max_tokens: 512,
29
29
  messages: [
30
30
  { role: "system", content: systemPrompt },
@@ -1,14 +1,15 @@
1
1
  /**
2
2
  * Centralized path resolution.
3
3
  *
4
- * Single source of truth for all directory paths used by mycli.
5
- * Supports three modes:
6
- * 1. Development (tsx): src/utils/paths.ts → resolve("../..")
7
- * 2. npm package: dist/utils/paths.js → resolve("../..")
8
- * 3. SEA binary: embedded assets, writable dirs in ~/.mycli/
4
+ * Single source of truth for all directory paths used by notoken.
5
+ * Everything lives under ~/.notoken/
9
6
  *
10
- * Writable directories (data, logs) always go to ~/.mycli/ so they
11
- * work in all modes. Config is read-only and ships with the package.
7
+ * ~/.notoken/
8
+ * data/ — history, sessions
9
+ * logs/ — failures, uncertainty
10
+ * backups/ — auto-backups before file modifications
11
+ * conversations/ — conversation persistence
12
+ * .update-check.json — update cache
12
13
  */
13
14
  /** Whether running as a Node.js Single Executable Application */
14
15
  export declare function isSEA(): boolean;
@@ -16,7 +17,7 @@ export declare function isSEA(): boolean;
16
17
  export declare const PACKAGE_ROOT: string;
17
18
  /** Read-only config directory (ships with the package) */
18
19
  export declare const CONFIG_DIR: string;
19
- /** User data root — writable, lives in home directory */
20
+ /** User homeeverything writable lives here: ~/.notoken/ */
20
21
  export declare const USER_HOME: string;
21
22
  /** Writable data directory (history, sessions) */
22
23
  export declare const DATA_DIR: string;
@@ -1,14 +1,15 @@
1
1
  /**
2
2
  * Centralized path resolution.
3
3
  *
4
- * Single source of truth for all directory paths used by mycli.
5
- * Supports three modes:
6
- * 1. Development (tsx): src/utils/paths.ts → resolve("../..")
7
- * 2. npm package: dist/utils/paths.js → resolve("../..")
8
- * 3. SEA binary: embedded assets, writable dirs in ~/.mycli/
4
+ * Single source of truth for all directory paths used by notoken.
5
+ * Everything lives under ~/.notoken/
9
6
  *
10
- * Writable directories (data, logs) always go to ~/.mycli/ so they
11
- * work in all modes. Config is read-only and ships with the package.
7
+ * ~/.notoken/
8
+ * data/ — history, sessions
9
+ * logs/ — failures, uncertainty
10
+ * backups/ — auto-backups before file modifications
11
+ * conversations/ — conversation persistence
12
+ * .update-check.json — update cache
12
13
  */
13
14
  import { resolve, dirname } from "node:path";
14
15
  import { fileURLToPath } from "node:url";
@@ -19,8 +20,6 @@ const __dirname = dirname(__filename);
19
20
  /** Whether running as a Node.js Single Executable Application */
20
21
  export function isSEA() {
21
22
  try {
22
- // node:sea module only exists when running as a SEA binary
23
- // Use globalThis to check for the injected fuse
24
23
  return !!globalThis.__sea_resources__;
25
24
  }
26
25
  catch {
@@ -31,8 +30,8 @@ export function isSEA() {
31
30
  export const PACKAGE_ROOT = resolve(__dirname, "../..");
32
31
  /** Read-only config directory (ships with the package) */
33
32
  export const CONFIG_DIR = resolve(PACKAGE_ROOT, "config");
34
- /** User data root — writable, lives in home directory */
35
- export const USER_HOME = resolve(process.env.MYCLI_DATA_DIR ?? resolve(homedir(), ".mycli"));
33
+ /** User homeeverything writable lives here: ~/.notoken/ */
34
+ export const USER_HOME = resolve(process.env.NOTOKEN_HOME ?? resolve(homedir(), ".notoken"));
36
35
  /** Writable data directory (history, sessions) */
37
36
  export const DATA_DIR = resolve(USER_HOME, "data");
38
37
  /** Writable logs directory (failures, uncertainty) */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "notoken-core",
3
- "version": "1.1.0",
3
+ "version": "1.2.1",
4
4
  "description": "Shared engine for notoken — NLP parsing, execution, detection, analysis",
5
5
  "type": "module",
6
6
  "license": "MIT",