neuronix-node 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/dist/api.d.ts +31 -0
- package/dist/api.js +68 -0
- package/dist/config.d.ts +15 -0
- package/dist/config.js +48 -0
- package/dist/handlers/chart.d.ts +2 -0
- package/dist/handlers/chart.js +121 -0
- package/dist/handlers/expense.d.ts +2 -0
- package/dist/handlers/expense.js +102 -0
- package/dist/handlers/file-processor.d.ts +7 -0
- package/dist/handlers/file-processor.js +168 -0
- package/dist/handlers/index.d.ts +20 -0
- package/dist/handlers/index.js +36 -0
- package/dist/handlers/invoice.d.ts +2 -0
- package/dist/handlers/invoice.js +113 -0
- package/dist/handlers/pnl.d.ts +2 -0
- package/dist/handlers/pnl.js +116 -0
- package/dist/handlers/smart-route.d.ts +2 -0
- package/dist/handlers/smart-route.js +116 -0
- package/dist/hardware.d.ts +10 -0
- package/dist/hardware.js +27 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +279 -0
- package/dist/inference.d.ts +25 -0
- package/dist/inference.js +73 -0
- package/dist/models.d.ts +29 -0
- package/dist/models.js +141 -0
- package/dist/parsers/csv.d.ts +24 -0
- package/dist/parsers/csv.js +94 -0
- package/dist/parsers/index.d.ts +17 -0
- package/dist/parsers/index.js +101 -0
- package/dist/updater.d.ts +8 -0
- package/dist/updater.js +48 -0
- package/package.json +51 -0
package/README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# Neuronix Node
|
|
2
|
+
|
|
3
|
+
Earn money by contributing your GPU to the Neuronix decentralized compute network.
|
|
4
|
+
|
|
5
|
+
## Quick Start
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npx neuronix-node
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
That's it. The node will:
|
|
12
|
+
1. Detect your hardware (GPU, CPU, RAM)
|
|
13
|
+
2. Prompt you to log in with your Neuronix account
|
|
14
|
+
3. Download the best AI model for your hardware
|
|
15
|
+
4. Connect to the network and start earning
|
|
16
|
+
|
|
17
|
+
## Requirements
|
|
18
|
+
|
|
19
|
+
- **Node.js 18+** — [Download here](https://nodejs.org)
|
|
20
|
+
- **8 GB RAM** minimum (16 GB recommended)
|
|
21
|
+
- **5 GB free disk space** for AI models
|
|
22
|
+
- **GPU optional** — runs in CPU mode without one, but earns more with a GPU
|
|
23
|
+
|
|
24
|
+
## Supported GPUs
|
|
25
|
+
|
|
26
|
+
| GPU | VRAM | Model Used | Earnings Tier |
|
|
27
|
+
|-----|------|-----------|---------------|
|
|
28
|
+
| No GPU (CPU) | — | TinyLlama 1.1B | Basic |
|
|
29
|
+
| GTX 1060 / RX 580 | 2-4 GB | Phi-2 2.7B | Mid |
|
|
30
|
+
| RTX 3060 / RX 6700 | 6 GB | Mistral 7B | High |
|
|
31
|
+
| RTX 4070+ / RX 7900 | 8+ GB | Llama 3 8B | Premium |
|
|
32
|
+
|
|
33
|
+
## Environment Variables
|
|
34
|
+
|
|
35
|
+
Skip the interactive login by setting:
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
export NEURONIX_EMAIL="you@company.com"
|
|
39
|
+
export NEURONIX_PASSWORD="your-password"
|
|
40
|
+
npx neuronix-node
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Global Install
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
npm install -g neuronix-node
|
|
47
|
+
neuronix-node
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Configuration
|
|
51
|
+
|
|
52
|
+
Config is stored at `~/.neuronix/config.json`. Models are cached at `~/.neuronix/models/`.
|
|
53
|
+
|
|
54
|
+
## Sign Up
|
|
55
|
+
|
|
56
|
+
Create a free account at [neuronix-nu.vercel.app/signup](https://neuronix-nu.vercel.app/signup)
|
|
57
|
+
|
|
58
|
+
## License
|
|
59
|
+
|
|
60
|
+
MIT
|
package/dist/api.d.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { NodeConfig } from "./config.js";
|
|
2
|
+
import type { HardwareInfo } from "./hardware.js";
|
|
3
|
+
interface RegisterResponse {
|
|
4
|
+
success: boolean;
|
|
5
|
+
node: {
|
|
6
|
+
id: string;
|
|
7
|
+
node_key: string;
|
|
8
|
+
status: string;
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
interface PollResponse {
|
|
12
|
+
task: {
|
|
13
|
+
id: string;
|
|
14
|
+
type: string;
|
|
15
|
+
model: string;
|
|
16
|
+
input_payload: Record<string, unknown>;
|
|
17
|
+
cost_usd: number;
|
|
18
|
+
timeout_seconds: number;
|
|
19
|
+
} | null;
|
|
20
|
+
message?: string;
|
|
21
|
+
}
|
|
22
|
+
interface LoginResponse {
|
|
23
|
+
access_token: string;
|
|
24
|
+
user_id: string;
|
|
25
|
+
}
|
|
26
|
+
export declare function loginProvider(config: NodeConfig, email: string, password: string): Promise<LoginResponse>;
|
|
27
|
+
export declare function registerNode(config: NodeConfig, hw: HardwareInfo): Promise<RegisterResponse>;
|
|
28
|
+
export declare function sendHeartbeat(config: NodeConfig, status?: string): Promise<void>;
|
|
29
|
+
export declare function pollTask(config: NodeConfig): Promise<PollResponse>;
|
|
30
|
+
export declare function completeTask(config: NodeConfig, taskId: string, status: "completed" | "failed", outputPayload: Record<string, unknown>, durationMs: number): Promise<void>;
|
|
31
|
+
export {};
|
package/dist/api.js
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loginProvider = loginProvider;
|
|
4
|
+
exports.registerNode = registerNode;
|
|
5
|
+
exports.sendHeartbeat = sendHeartbeat;
|
|
6
|
+
exports.pollTask = pollTask;
|
|
7
|
+
exports.completeTask = completeTask;
|
|
8
|
+
async function apiCall(config, path, body) {
|
|
9
|
+
const headers = { "Content-Type": "application/json" };
|
|
10
|
+
if (config.authToken) {
|
|
11
|
+
headers["Authorization"] = `Bearer ${config.authToken}`;
|
|
12
|
+
}
|
|
13
|
+
const res = await fetch(`${config.apiUrl}/api/node${path}`, {
|
|
14
|
+
method: "POST",
|
|
15
|
+
headers,
|
|
16
|
+
body: JSON.stringify(body),
|
|
17
|
+
});
|
|
18
|
+
if (!res.ok) {
|
|
19
|
+
const text = await res.text();
|
|
20
|
+
throw new Error(`API error ${res.status}: ${text}`);
|
|
21
|
+
}
|
|
22
|
+
return res.json();
|
|
23
|
+
}
|
|
24
|
+
async function loginProvider(config, email, password) {
|
|
25
|
+
const res = await fetch(`${config.apiUrl}/api/node/auth`, {
|
|
26
|
+
method: "POST",
|
|
27
|
+
headers: { "Content-Type": "application/json" },
|
|
28
|
+
body: JSON.stringify({ email, password }),
|
|
29
|
+
});
|
|
30
|
+
if (!res.ok) {
|
|
31
|
+
const text = await res.text();
|
|
32
|
+
throw new Error(`Login failed: ${text}`);
|
|
33
|
+
}
|
|
34
|
+
return res.json();
|
|
35
|
+
}
|
|
36
|
+
async function registerNode(config, hw) {
|
|
37
|
+
return apiCall(config, "/register", {
|
|
38
|
+
node_key: config.nodeKey,
|
|
39
|
+
user_id: config.userId || undefined,
|
|
40
|
+
hostname: hw.hostname,
|
|
41
|
+
os: hw.os,
|
|
42
|
+
gpu_model: hw.gpuModel,
|
|
43
|
+
gpu_vram_mb: hw.gpuVramMb,
|
|
44
|
+
cpu_model: hw.cpuModel,
|
|
45
|
+
ram_mb: hw.ramMb,
|
|
46
|
+
version: "0.1.0",
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
async function sendHeartbeat(config, status = "online") {
|
|
50
|
+
await apiCall(config, "/heartbeat", {
|
|
51
|
+
node_key: config.nodeKey,
|
|
52
|
+
status,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
async function pollTask(config) {
|
|
56
|
+
return apiCall(config, "/poll", {
|
|
57
|
+
node_id: config.nodeId,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async function completeTask(config, taskId, status, outputPayload, durationMs) {
|
|
61
|
+
await apiCall(config, "/complete", {
|
|
62
|
+
task_id: taskId,
|
|
63
|
+
node_id: config.nodeId,
|
|
64
|
+
status,
|
|
65
|
+
output_payload: outputPayload,
|
|
66
|
+
duration_ms: durationMs,
|
|
67
|
+
});
|
|
68
|
+
}
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
declare const CONFIG_DIR: string;
|
|
2
|
+
declare const MODELS_DIR: string;
|
|
3
|
+
export interface NodeConfig {
|
|
4
|
+
nodeKey: string;
|
|
5
|
+
nodeId: string | null;
|
|
6
|
+
userId: string | null;
|
|
7
|
+
authToken: string | null;
|
|
8
|
+
apiUrl: string;
|
|
9
|
+
modelsDir: string;
|
|
10
|
+
pollIntervalMs: number;
|
|
11
|
+
heartbeatIntervalMs: number;
|
|
12
|
+
}
|
|
13
|
+
export declare function loadConfig(): NodeConfig;
|
|
14
|
+
export declare function saveConfig(config: NodeConfig): void;
|
|
15
|
+
export { CONFIG_DIR, MODELS_DIR };
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MODELS_DIR = exports.CONFIG_DIR = void 0;
|
|
4
|
+
exports.loadConfig = loadConfig;
|
|
5
|
+
exports.saveConfig = saveConfig;
|
|
6
|
+
const crypto_1 = require("crypto");
|
|
7
|
+
const fs_1 = require("fs");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const os_1 = require("os");
|
|
10
|
+
const CONFIG_DIR = (0, path_1.join)((0, os_1.homedir)(), ".neuronix");
|
|
11
|
+
exports.CONFIG_DIR = CONFIG_DIR;
|
|
12
|
+
const CONFIG_FILE = (0, path_1.join)(CONFIG_DIR, "config.json");
|
|
13
|
+
const MODELS_DIR = (0, path_1.join)(CONFIG_DIR, "models");
|
|
14
|
+
exports.MODELS_DIR = MODELS_DIR;
|
|
15
|
+
const DEFAULT_CONFIG = {
|
|
16
|
+
nodeKey: "",
|
|
17
|
+
nodeId: null,
|
|
18
|
+
userId: null,
|
|
19
|
+
authToken: null,
|
|
20
|
+
apiUrl: "https://neuronix-nu.vercel.app",
|
|
21
|
+
modelsDir: MODELS_DIR,
|
|
22
|
+
pollIntervalMs: 5000,
|
|
23
|
+
heartbeatIntervalMs: 30000,
|
|
24
|
+
};
|
|
25
|
+
function loadConfig() {
|
|
26
|
+
if (!(0, fs_1.existsSync)(CONFIG_DIR)) {
|
|
27
|
+
(0, fs_1.mkdirSync)(CONFIG_DIR, { recursive: true });
|
|
28
|
+
}
|
|
29
|
+
if (!(0, fs_1.existsSync)(MODELS_DIR)) {
|
|
30
|
+
(0, fs_1.mkdirSync)(MODELS_DIR, { recursive: true });
|
|
31
|
+
}
|
|
32
|
+
if ((0, fs_1.existsSync)(CONFIG_FILE)) {
|
|
33
|
+
const raw = (0, fs_1.readFileSync)(CONFIG_FILE, "utf-8");
|
|
34
|
+
return { ...DEFAULT_CONFIG, ...JSON.parse(raw) };
|
|
35
|
+
}
|
|
36
|
+
const config = {
|
|
37
|
+
...DEFAULT_CONFIG,
|
|
38
|
+
nodeKey: `node_${(0, crypto_1.randomUUID)().replace(/-/g, "").slice(0, 16)}`,
|
|
39
|
+
};
|
|
40
|
+
saveConfig(config);
|
|
41
|
+
return config;
|
|
42
|
+
}
|
|
43
|
+
function saveConfig(config) {
|
|
44
|
+
if (!(0, fs_1.existsSync)(CONFIG_DIR)) {
|
|
45
|
+
(0, fs_1.mkdirSync)(CONFIG_DIR, { recursive: true });
|
|
46
|
+
}
|
|
47
|
+
(0, fs_1.writeFileSync)(CONFIG_FILE, JSON.stringify(config, null, 2));
|
|
48
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.handleChart = handleChart;
|
|
4
|
+
const DEFAULT_COLORS = [
|
|
5
|
+
"#0066FF", "#10b981", "#f59e0b", "#8b5cf6", "#ec4899",
|
|
6
|
+
"#06b6d4", "#84cc16", "#f97316", "#6366f1", "#14b8a6",
|
|
7
|
+
];
|
|
8
|
+
/**
|
|
9
|
+
* Parse a natural language prompt into chart parameters.
|
|
10
|
+
* Extracts numbers, chart type hints, and uses the prompt as the title.
|
|
11
|
+
*/
|
|
12
|
+
function parsePrompt(prompt) {
|
|
13
|
+
const lower = prompt.toLowerCase();
|
|
14
|
+
const result = {};
|
|
15
|
+
// Detect chart type from prompt
|
|
16
|
+
if (/pie/.test(lower))
|
|
17
|
+
result.chart_type = "pie";
|
|
18
|
+
else if (/doughnut|donut/.test(lower))
|
|
19
|
+
result.chart_type = "doughnut";
|
|
20
|
+
else if (/line/.test(lower))
|
|
21
|
+
result.chart_type = "line";
|
|
22
|
+
else
|
|
23
|
+
result.chart_type = "bar";
|
|
24
|
+
// Extract numbers from prompt
|
|
25
|
+
const numbers = prompt.match(/\d+(?:\.\d+)?/g);
|
|
26
|
+
if (numbers && numbers.length > 0) {
|
|
27
|
+
const data = numbers.map(Number);
|
|
28
|
+
result.datasets = [{ label: "Values", data }];
|
|
29
|
+
// Generate labels (Day 1, Day 2... or Item 1, Item 2...)
|
|
30
|
+
const labelPrefix = /daily|day/i.test(lower) ? "Day" :
|
|
31
|
+
/month/i.test(lower) ? "Month" :
|
|
32
|
+
/week/i.test(lower) ? "Week" :
|
|
33
|
+
/quarter|q[1-4]/i.test(lower) ? "Q" : "Item";
|
|
34
|
+
result.labels = data.map((_, i) => `${labelPrefix} ${i + 1}`);
|
|
35
|
+
}
|
|
36
|
+
// Use prompt as title (clean it up)
|
|
37
|
+
result.title = prompt.replace(/^(make|create|generate|show|draw)\s+(me\s+)?(a\s+)?/i, "").slice(0, 80);
|
|
38
|
+
return result;
|
|
39
|
+
}
|
|
40
|
+
async function handleChart(task) {
|
|
41
|
+
const start = Date.now();
|
|
42
|
+
const raw = task.input_payload;
|
|
43
|
+
// If there's a prompt, parse it for chart parameters
|
|
44
|
+
const parsed = raw.prompt ? parsePrompt(raw.prompt) : {};
|
|
45
|
+
const input = { ...parsed, ...raw }; // explicit params override parsed ones
|
|
46
|
+
// But keep parsed values if raw doesn't have them
|
|
47
|
+
if (!raw.chart_type && parsed.chart_type)
|
|
48
|
+
input.chart_type = parsed.chart_type;
|
|
49
|
+
if (!raw.labels && parsed.labels)
|
|
50
|
+
input.labels = parsed.labels;
|
|
51
|
+
if (!raw.datasets && parsed.datasets)
|
|
52
|
+
input.datasets = parsed.datasets;
|
|
53
|
+
if (!raw.title && parsed.title)
|
|
54
|
+
input.title = parsed.title;
|
|
55
|
+
const chartType = input.chart_type || "bar";
|
|
56
|
+
const title = input.title || "Chart";
|
|
57
|
+
const labels = input.labels || ["Jan", "Feb", "Mar", "Apr", "May", "Jun"];
|
|
58
|
+
const width = input.width || 800;
|
|
59
|
+
const height = input.height || 400;
|
|
60
|
+
// Default dataset if none provided
|
|
61
|
+
const datasets = input.datasets || [{
|
|
62
|
+
label: "Data",
|
|
63
|
+
data: [12, 19, 3, 5, 2, 3],
|
|
64
|
+
}];
|
|
65
|
+
// Assign colors if not provided
|
|
66
|
+
const coloredDatasets = datasets.map((ds, i) => ({
|
|
67
|
+
...ds,
|
|
68
|
+
backgroundColor: ds.backgroundColor || (chartType === "pie" || chartType === "doughnut"
|
|
69
|
+
? DEFAULT_COLORS.slice(0, labels.length)
|
|
70
|
+
: DEFAULT_COLORS[i % DEFAULT_COLORS.length]),
|
|
71
|
+
borderColor: ds.borderColor || (chartType === "line" ? DEFAULT_COLORS[i % DEFAULT_COLORS.length] : undefined),
|
|
72
|
+
borderWidth: chartType === "line" ? 2 : 1,
|
|
73
|
+
fill: chartType === "line" ? false : undefined,
|
|
74
|
+
}));
|
|
75
|
+
// Dynamic import for chartjs-node-canvas (ESM)
|
|
76
|
+
const { ChartJSNodeCanvas } = await import("chartjs-node-canvas");
|
|
77
|
+
const chartCanvas = new ChartJSNodeCanvas({ width, height, backgroundColour: "#0a0a0f" });
|
|
78
|
+
const config = {
|
|
79
|
+
type: chartType,
|
|
80
|
+
data: {
|
|
81
|
+
labels,
|
|
82
|
+
datasets: coloredDatasets,
|
|
83
|
+
},
|
|
84
|
+
options: {
|
|
85
|
+
responsive: false,
|
|
86
|
+
plugins: {
|
|
87
|
+
title: {
|
|
88
|
+
display: true,
|
|
89
|
+
text: title,
|
|
90
|
+
color: "#ffffff",
|
|
91
|
+
font: { size: 16, weight: "bold" },
|
|
92
|
+
},
|
|
93
|
+
legend: {
|
|
94
|
+
labels: { color: "#9ca3af" },
|
|
95
|
+
},
|
|
96
|
+
},
|
|
97
|
+
scales: chartType === "pie" || chartType === "doughnut" ? {} : {
|
|
98
|
+
x: {
|
|
99
|
+
ticks: { color: "#6b7280" },
|
|
100
|
+
grid: { color: "rgba(255,255,255,0.06)" },
|
|
101
|
+
},
|
|
102
|
+
y: {
|
|
103
|
+
ticks: { color: "#6b7280" },
|
|
104
|
+
grid: { color: "rgba(255,255,255,0.06)" },
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
},
|
|
108
|
+
};
|
|
109
|
+
const imageBuffer = await chartCanvas.renderToBuffer(config);
|
|
110
|
+
const base64 = imageBuffer.toString("base64");
|
|
111
|
+
const durationMs = Date.now() - start;
|
|
112
|
+
return {
|
|
113
|
+
image_base64: base64,
|
|
114
|
+
image_format: "png",
|
|
115
|
+
chart_type: chartType,
|
|
116
|
+
title,
|
|
117
|
+
width,
|
|
118
|
+
height,
|
|
119
|
+
duration_ms: durationMs,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.handleExpenseReport = handleExpenseReport;
|
|
4
|
+
async function handleExpenseReport(task) {
|
|
5
|
+
const start = Date.now();
|
|
6
|
+
const input = task.input_payload;
|
|
7
|
+
const period = input.period || "Current Period";
|
|
8
|
+
const expenses = input.expenses || [
|
|
9
|
+
{ description: "Office Supplies", amount: 245.50, category: "Operations", date: "2026-03-01" },
|
|
10
|
+
{ description: "Software Subscriptions", amount: 890.00, category: "Technology", date: "2026-03-05" },
|
|
11
|
+
{ description: "Team Lunch", amount: 156.75, category: "Meals", date: "2026-03-08" },
|
|
12
|
+
{ description: "Cloud Hosting", amount: 432.00, category: "Technology", date: "2026-03-10" },
|
|
13
|
+
{ description: "Marketing Ads", amount: 1200.00, category: "Marketing", date: "2026-03-12" },
|
|
14
|
+
{ description: "Travel - Client Meeting", amount: 387.25, category: "Travel", date: "2026-03-15" },
|
|
15
|
+
{ description: "Printer Ink", amount: 89.99, category: "Operations", date: "2026-03-18" },
|
|
16
|
+
{ description: "Conference Tickets", amount: 599.00, category: "Education", date: "2026-03-20" },
|
|
17
|
+
];
|
|
18
|
+
// Calculate totals by category
|
|
19
|
+
const byCategory = {};
|
|
20
|
+
let total = 0;
|
|
21
|
+
for (const exp of expenses) {
|
|
22
|
+
total += exp.amount;
|
|
23
|
+
if (!byCategory[exp.category]) {
|
|
24
|
+
byCategory[exp.category] = { total: 0, count: 0, items: [] };
|
|
25
|
+
}
|
|
26
|
+
byCategory[exp.category].total += exp.amount;
|
|
27
|
+
byCategory[exp.category].count += 1;
|
|
28
|
+
byCategory[exp.category].items.push(exp);
|
|
29
|
+
}
|
|
30
|
+
// Round totals
|
|
31
|
+
total = Math.round(total * 100) / 100;
|
|
32
|
+
for (const cat of Object.keys(byCategory)) {
|
|
33
|
+
byCategory[cat].total = Math.round(byCategory[cat].total * 100) / 100;
|
|
34
|
+
}
|
|
35
|
+
// Sort categories by total descending
|
|
36
|
+
const sortedCategories = Object.entries(byCategory)
|
|
37
|
+
.sort((a, b) => b[1].total - a[1].total)
|
|
38
|
+
.map(([name, data]) => ({
|
|
39
|
+
category: name,
|
|
40
|
+
total: data.total,
|
|
41
|
+
count: data.count,
|
|
42
|
+
percentage: Math.round((data.total / total) * 1000) / 10,
|
|
43
|
+
}));
|
|
44
|
+
// Generate chart data for the frontend
|
|
45
|
+
const chartData = {
|
|
46
|
+
chart_type: "doughnut",
|
|
47
|
+
title: `Expenses by Category — ${period}`,
|
|
48
|
+
labels: sortedCategories.map((c) => c.category),
|
|
49
|
+
datasets: [{
|
|
50
|
+
label: "Expenses",
|
|
51
|
+
data: sortedCategories.map((c) => c.total),
|
|
52
|
+
}],
|
|
53
|
+
};
|
|
54
|
+
// Summary text
|
|
55
|
+
const summaryLines = [
|
|
56
|
+
`Expense Report: ${period}`,
|
|
57
|
+
`Total: $${total.toFixed(2)} across ${expenses.length} expenses`,
|
|
58
|
+
``,
|
|
59
|
+
`By Category:`,
|
|
60
|
+
...sortedCategories.map((c) => ` ${c.category}: $${c.total.toFixed(2)} (${c.percentage}%) — ${c.count} item${c.count > 1 ? "s" : ""}`),
|
|
61
|
+
``,
|
|
62
|
+
`Largest expense: ${expenses.sort((a, b) => b.amount - a.amount)[0].description} ($${expenses.sort((a, b) => b.amount - a.amount)[0].amount.toFixed(2)})`,
|
|
63
|
+
];
|
|
64
|
+
// Generate CSV output
|
|
65
|
+
const csvLines = [
|
|
66
|
+
"EXPENSE REPORT",
|
|
67
|
+
`Period,${period}`,
|
|
68
|
+
"",
|
|
69
|
+
"EXPENSES BY ITEM",
|
|
70
|
+
"Date,Description,Amount,Category",
|
|
71
|
+
...expenses.map((e) => `${e.date || ""},${csvEscape(e.description)},$${e.amount.toFixed(2)},${csvEscape(e.category)}`),
|
|
72
|
+
"",
|
|
73
|
+
"SUMMARY BY CATEGORY",
|
|
74
|
+
"Category,Total,% of Total,Item Count",
|
|
75
|
+
...sortedCategories.map((c) => `${csvEscape(c.category)},$${c.total.toFixed(2)},${c.percentage}%,${c.count}`),
|
|
76
|
+
"",
|
|
77
|
+
`TOTAL,,$${total.toFixed(2)},${expenses.length} items`,
|
|
78
|
+
];
|
|
79
|
+
const durationMs = Date.now() - start;
|
|
80
|
+
return {
|
|
81
|
+
text: summaryLines.join("\n"),
|
|
82
|
+
output_csv: csvLines.join("\n"),
|
|
83
|
+
summary: {
|
|
84
|
+
period,
|
|
85
|
+
total,
|
|
86
|
+
expense_count: expenses.length,
|
|
87
|
+
category_count: sortedCategories.length,
|
|
88
|
+
largest_category: sortedCategories[0]?.category,
|
|
89
|
+
largest_expense: expenses.sort((a, b) => b.amount - a.amount)[0],
|
|
90
|
+
},
|
|
91
|
+
by_category: sortedCategories,
|
|
92
|
+
expenses,
|
|
93
|
+
chart_data: chartData,
|
|
94
|
+
duration_ms: durationMs,
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
function csvEscape(value) {
|
|
98
|
+
if (value.includes(",") || value.includes('"') || value.includes("\n")) {
|
|
99
|
+
return `"${value.replace(/"/g, '""')}"`;
|
|
100
|
+
}
|
|
101
|
+
return value;
|
|
102
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { TaskInput, TaskOutput } from "./index.js";
|
|
2
|
+
/**
|
|
3
|
+
* File processor handler.
|
|
4
|
+
* Receives a file's content, parses it, determines what to do,
|
|
5
|
+
* then routes to the appropriate specialized handler.
|
|
6
|
+
*/
|
|
7
|
+
export declare function handleFileProcess(task: TaskInput): Promise<TaskOutput>;
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.handleFileProcess = handleFileProcess;
|
|
4
|
+
const index_js_1 = require("../parsers/index.js");
|
|
5
|
+
const chart_js_1 = require("./chart.js");
|
|
6
|
+
const expense_js_1 = require("./expense.js");
|
|
7
|
+
const pnl_js_1 = require("./pnl.js");
|
|
8
|
+
const invoice_js_1 = require("./invoice.js");
|
|
9
|
+
/**
|
|
10
|
+
* File processor handler.
|
|
11
|
+
* Receives a file's content, parses it, determines what to do,
|
|
12
|
+
* then routes to the appropriate specialized handler.
|
|
13
|
+
*/
|
|
14
|
+
async function handleFileProcess(task) {
|
|
15
|
+
const start = Date.now();
|
|
16
|
+
const payload = task.input_payload;
|
|
17
|
+
const fileName = payload.file_name || "unknown";
|
|
18
|
+
const fileType = payload.file_type || "unknown";
|
|
19
|
+
const content = payload.file_content || "";
|
|
20
|
+
// Parse the file
|
|
21
|
+
const parsed = (0, index_js_1.parseFile)(fileName, fileType, content);
|
|
22
|
+
// If there are suggested actions, auto-execute the best one
|
|
23
|
+
if (parsed.suggestedActions.length > 0) {
|
|
24
|
+
const bestAction = parsed.suggestedActions[0];
|
|
25
|
+
// Build input from parsed data
|
|
26
|
+
if (bestAction === "expense_report" && parsed.type === "csv") {
|
|
27
|
+
const csvData = parsed.data;
|
|
28
|
+
const expenses = csvToExpenses(csvData);
|
|
29
|
+
if (expenses.length > 0) {
|
|
30
|
+
const result = await (0, expense_js_1.handleExpenseReport)({
|
|
31
|
+
type: "expense_report",
|
|
32
|
+
input_payload: { expenses },
|
|
33
|
+
});
|
|
34
|
+
return {
|
|
35
|
+
...result,
|
|
36
|
+
parsed_file: { type: parsed.type, rows: csvData.rowCount, columns: csvData.headers.length },
|
|
37
|
+
auto_action: bestAction,
|
|
38
|
+
duration_ms: Date.now() - start,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
if (bestAction === "chart" && parsed.type === "csv") {
|
|
43
|
+
const csvData = parsed.data;
|
|
44
|
+
const chartInput = csvToChart(csvData, fileName);
|
|
45
|
+
if (chartInput) {
|
|
46
|
+
const result = await (0, chart_js_1.handleChart)({
|
|
47
|
+
type: "chart",
|
|
48
|
+
input_payload: chartInput,
|
|
49
|
+
});
|
|
50
|
+
return {
|
|
51
|
+
...result,
|
|
52
|
+
parsed_file: { type: parsed.type, rows: csvData.rowCount, columns: csvData.headers.length },
|
|
53
|
+
auto_action: bestAction,
|
|
54
|
+
duration_ms: Date.now() - start,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
if (bestAction === "pnl" && parsed.type === "csv") {
|
|
59
|
+
const csvData = parsed.data;
|
|
60
|
+
const pnlInput = csvToPnl(csvData);
|
|
61
|
+
if (pnlInput) {
|
|
62
|
+
const result = await (0, pnl_js_1.handlePnl)({
|
|
63
|
+
type: "pnl",
|
|
64
|
+
input_payload: pnlInput,
|
|
65
|
+
});
|
|
66
|
+
return {
|
|
67
|
+
...result,
|
|
68
|
+
parsed_file: { type: parsed.type, rows: csvData.rowCount, columns: csvData.headers.length },
|
|
69
|
+
auto_action: bestAction,
|
|
70
|
+
duration_ms: Date.now() - start,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
if (bestAction === "invoice") {
|
|
75
|
+
const result = await (0, invoice_js_1.handleInvoice)({
|
|
76
|
+
type: "invoice",
|
|
77
|
+
input_payload: {},
|
|
78
|
+
});
|
|
79
|
+
return {
|
|
80
|
+
...result,
|
|
81
|
+
parsed_file: { type: parsed.type },
|
|
82
|
+
auto_action: bestAction,
|
|
83
|
+
duration_ms: Date.now() - start,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// Default: return parsed summary
|
|
88
|
+
const durationMs = Date.now() - start;
|
|
89
|
+
return {
|
|
90
|
+
text: `Parsed "${fileName}" (${fileType}).\n\n${JSON.stringify(parsed.data, null, 2)}`,
|
|
91
|
+
parsed_file: parsed,
|
|
92
|
+
suggested_actions: parsed.suggestedActions,
|
|
93
|
+
duration_ms: durationMs,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Convert CSV data into expense objects for the expense handler.
|
|
98
|
+
*/
|
|
99
|
+
function csvToExpenses(csv) {
|
|
100
|
+
const headersLower = csv.headers.map((h) => h.toLowerCase());
|
|
101
|
+
// Find relevant columns
|
|
102
|
+
const descCol = headersLower.findIndex((h) => /description|name|item|memo/.test(h));
|
|
103
|
+
const amountCol = headersLower.findIndex((h) => /amount|cost|price|total|debit/.test(h));
|
|
104
|
+
const categoryCol = headersLower.findIndex((h) => /category|type|class|department/.test(h));
|
|
105
|
+
const dateCol = headersLower.findIndex((h) => /date/.test(h));
|
|
106
|
+
if (amountCol === -1)
|
|
107
|
+
return [];
|
|
108
|
+
return csv.rows.map((row) => ({
|
|
109
|
+
description: descCol >= 0 ? row[descCol] : "Item",
|
|
110
|
+
amount: parseFloat((row[amountCol] || "0").replace(/[$,]/g, "")) || 0,
|
|
111
|
+
category: categoryCol >= 0 ? row[categoryCol] : "Uncategorized",
|
|
112
|
+
date: dateCol >= 0 ? row[dateCol] : undefined,
|
|
113
|
+
})).filter((e) => e.amount > 0);
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Convert CSV data into chart parameters.
|
|
117
|
+
*/
|
|
118
|
+
function csvToChart(csv, fileName) {
|
|
119
|
+
if (csv.summary.numericColumns.length === 0)
|
|
120
|
+
return null;
|
|
121
|
+
const headersLower = csv.headers.map((h) => h.toLowerCase());
|
|
122
|
+
const labelCol = headersLower.findIndex((h) => /name|description|category|date|month|item|label/.test(h));
|
|
123
|
+
const numCol = csv.headers.findIndex((h) => csv.columnTypes[h] === "number" || csv.columnTypes[h] === "currency");
|
|
124
|
+
if (numCol === -1)
|
|
125
|
+
return null;
|
|
126
|
+
const labels = labelCol >= 0
|
|
127
|
+
? csv.rows.map((r) => r[labelCol]).slice(0, 20)
|
|
128
|
+
: csv.rows.map((_, i) => `Row ${i + 1}`).slice(0, 20);
|
|
129
|
+
const data = csv.rows
|
|
130
|
+
.map((r) => parseFloat((r[numCol] || "0").replace(/[$,]/g, "")))
|
|
131
|
+
.filter((n) => !isNaN(n))
|
|
132
|
+
.slice(0, 20);
|
|
133
|
+
return {
|
|
134
|
+
chart_type: data.length > 8 ? "line" : "bar",
|
|
135
|
+
title: fileName.replace(/\.[^.]+$/, "").replace(/[_-]/g, " "),
|
|
136
|
+
labels,
|
|
137
|
+
datasets: [{ label: csv.headers[numCol], data }],
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Convert CSV data into P&L parameters.
|
|
142
|
+
*/
|
|
143
|
+
function csvToPnl(csv) {
|
|
144
|
+
const headersLower = csv.headers.map((h) => h.toLowerCase());
|
|
145
|
+
const descCol = headersLower.findIndex((h) => /description|name|item/.test(h));
|
|
146
|
+
const amountCol = headersLower.findIndex((h) => /amount|total/.test(h));
|
|
147
|
+
const typeCol = headersLower.findIndex((h) => /type|category/.test(h));
|
|
148
|
+
if (amountCol === -1)
|
|
149
|
+
return null;
|
|
150
|
+
const revenue = [];
|
|
151
|
+
const expenses = [];
|
|
152
|
+
for (const row of csv.rows) {
|
|
153
|
+
const desc = descCol >= 0 ? row[descCol] : "Item";
|
|
154
|
+
const amount = Math.abs(parseFloat((row[amountCol] || "0").replace(/[$,]/g, "")));
|
|
155
|
+
const type = typeCol >= 0 ? row[typeCol].toLowerCase() : "";
|
|
156
|
+
if (amount === 0)
|
|
157
|
+
continue;
|
|
158
|
+
if (/revenue|income|sale/i.test(type) || /revenue|income|sale/i.test(desc)) {
|
|
159
|
+
revenue.push({ description: desc, amount });
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
expenses.push({ description: desc, amount, category: type || "General" });
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
if (revenue.length === 0 && expenses.length === 0)
|
|
166
|
+
return null;
|
|
167
|
+
return { revenue, expenses };
|
|
168
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export interface TaskInput {
|
|
2
|
+
type: string;
|
|
3
|
+
input_payload: Record<string, unknown>;
|
|
4
|
+
model?: string;
|
|
5
|
+
}
|
|
6
|
+
export interface TaskOutput {
|
|
7
|
+
[key: string]: unknown;
|
|
8
|
+
duration_ms: number;
|
|
9
|
+
}
|
|
10
|
+
export type TaskHandler = (input: TaskInput) => Promise<TaskOutput>;
|
|
11
|
+
declare const handlers: Record<string, TaskHandler>;
|
|
12
|
+
/**
|
|
13
|
+
* Check if we have a specialized handler for this task type.
|
|
14
|
+
*/
|
|
15
|
+
export declare function hasHandler(type: string): boolean;
|
|
16
|
+
/**
|
|
17
|
+
* Run the specialized handler for a task type.
|
|
18
|
+
*/
|
|
19
|
+
export declare function runHandler(task: TaskInput): Promise<TaskOutput>;
|
|
20
|
+
export { handlers };
|