@buley/neural 4.1.0 → 4.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=benchmark.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"benchmark.d.ts","sourceRoot":"","sources":["../../src/bench/benchmark.ts"],"names":[],"mappings":""}
@@ -0,0 +1,13 @@
1
+ import { Neuron, Synapse } from "../types";
2
+ export declare class NeuronRepository {
3
+ create(neuron: Neuron): Promise<void>;
4
+ createWithSemantics(neuron: Neuron, description: string): Promise<void>;
5
+ getAll(): Promise<Neuron[]>;
6
+ delete(id: string): Promise<void>;
7
+ }
8
+ export declare class SynapseRepository {
9
+ create(synapse: Synapse): Promise<void>;
10
+ getAll(): Promise<Synapse[]>;
11
+ delete(id: string): Promise<void>;
12
+ }
13
+ //# sourceMappingURL=repository.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"repository.d.ts","sourceRoot":"","sources":["../../src/db/repository.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAE3C,qBAAa,gBAAgB;IACnB,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQrC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAOvE,MAAM,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAI3B,MAAM,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAG1C;AAED,qBAAa,iBAAiB;IACpB,MAAM,CAAC,OAAO,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAOvC,MAAM,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;IAI5B,MAAM,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAG1C"}
@@ -0,0 +1,2 @@
1
+ export declare function initializeSchema(): Promise<void>;
2
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/db/schema.ts"],"names":[],"mappings":"AAGA,wBAAsB,gBAAgB,kBAoCrC"}
@@ -0,0 +1,33 @@
1
+ export declare class GPUEngine {
2
+ device: GPUDevice | null;
3
+ pipeline: GPUComputePipeline | null;
4
+ bindGroup: GPUBindGroup | null;
5
+ deltaBuffer: GPUBuffer | null;
6
+ targetBuffer: GPUBuffer | null;
7
+ paramBuffer: GPUBuffer | null;
8
+ trainingPipeline: GPUComputePipeline | null;
9
+ deltaPipeline: GPUComputePipeline | null;
10
+ trainingBindGroup: GPUBindGroup | null;
11
+ weightBuffer: GPUBuffer | null;
12
+ inputBuffer: GPUBuffer | null;
13
+ biasBuffer: GPUBuffer | null;
14
+ outputBuffer: GPUBuffer | null;
15
+ uniformBuffer: GPUBuffer | null;
16
+ networkSize: number;
17
+ batchSize: number;
18
+ init(): Promise<void>;
19
+ prepareBuffers(size: number, weights: Float32Array, biases: Float32Array, batchSize?: number): void;
20
+ private createBuffer;
21
+ runTick(inputs: Float32Array): Promise<Float32Array>;
22
+ prepareTrainingBuffers(targets: Float32Array, learningRate: number): void;
23
+ private subscribers;
24
+ subscribe(callback: (event: {
25
+ type: 'loss' | 'epoch';
26
+ value: number;
27
+ }) => void): () => void;
28
+ private emit;
29
+ train(inputs: Float32Array, targets: Float32Array): Promise<Float32Array>;
30
+ trainTick(deltas?: Float32Array): Promise<void>;
31
+ injectInput(data: Float32Array): Promise<void>;
32
+ }
33
+ //# sourceMappingURL=gpu.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"gpu.d.ts","sourceRoot":"","sources":["../../src/engine/gpu.ts"],"names":[],"mappings":"AAIA,qBAAa,SAAS;IAClB,MAAM,EAAE,SAAS,GAAG,IAAI,CAAQ;IAChC,QAAQ,EAAE,kBAAkB,GAAG,IAAI,CAAQ;IAC3C,SAAS,EAAE,YAAY,GAAG,IAAI,CAAQ;IAGtC,WAAW,EAAE,SAAS,GAAG,IAAI,CAAQ;IACrC,YAAY,EAAE,SAAS,GAAG,IAAI,CAAQ;IACtC,WAAW,EAAE,SAAS,GAAG,IAAI,CAAQ;IAErC,gBAAgB,EAAE,kBAAkB,GAAG,IAAI,CAAQ;IACnD,aAAa,EAAE,kBAAkB,GAAG,IAAI,CAAQ;IAChD,iBAAiB,EAAE,YAAY,GAAG,IAAI,CAAQ;IAG9C,YAAY,EAAE,SAAS,GAAG,IAAI,CAAQ;IACtC,WAAW,EAAE,SAAS,GAAG,IAAI,CAAQ;IACrC,UAAU,EAAE,SAAS,GAAG,IAAI,CAAQ;IACpC,YAAY,EAAE,SAAS,GAAG,IAAI,CAAQ;IACtC,aAAa,EAAE,SAAS,GAAG,IAAI,CAAQ;IAEvC,WAAW,EAAE,MAAM,CAAK;IACxB,SAAS,EAAE,MAAM,CAAK;IAEhB,IAAI;IA4BV,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,GAAE,MAAU;IAgC/F,OAAO,CAAC,YAAY;IAgBd,OAAO,CAAC,MAAM,EAAE,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;IA4C1D,sBAAsB,CAAC,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM;IA6BlE,OAAO,CAAC,WAAW,CAAsE;IAEzF,SAAS,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE;QAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI;IAO9E,OAAO,CAAC,IAAI;IAIN,KAAK,CAAC,MAAM,EAAE,YAAY,EAAE,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;IAmCzE,SAAS,CAAC,MAAM,CAAC,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IA8B/C,WAAW,CAAC,IAAI,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;CAUvD"}
@@ -0,0 +1,12 @@
1
+ import { Neuron, Synapse } from "../types";
2
+ export declare class Translator {
3
+ private idToIndex;
4
+ private indexToId;
5
+ flatten(neurons: Neuron[], synapses: Synapse[]): {
6
+ size: number;
7
+ weights: Float32Array;
8
+ biases: Float32Array;
9
+ initialValues: Float32Array;
10
+ };
11
+ }
12
+ //# sourceMappingURL=translator.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"translator.d.ts","sourceRoot":"","sources":["../../src/engine/translator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAE3C,qBAAa,UAAU;IAEnB,OAAO,CAAC,SAAS,CAAkC;IACnD,OAAO,CAAC,SAAS,CAAgB;IAGjC,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG;QAC7C,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,YAAY,CAAC;QACtB,MAAM,EAAE,YAAY,CAAC;QACrB,aAAa,EAAE,YAAY,CAAA;KAC9B;CAwCJ"}
@@ -0,0 +1,14 @@
1
+ export declare class WebNNEngine {
2
+ context: MLContext | null;
3
+ builder: MLGraphBuilder | null;
4
+ graph: MLGraph | null;
5
+ networkSize: number;
6
+ batchSize: number;
7
+ weights: Float32Array | null;
8
+ biases: Float32Array | null;
9
+ isReady: boolean;
10
+ init(): Promise<void>;
11
+ prepareModel(size: number, weights: Float32Array, biases: Float32Array, batchSize?: number): Promise<void>;
12
+ runTick(inputs: Float32Array): Promise<Float32Array>;
13
+ }
14
+ //# sourceMappingURL=webnn.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"webnn.d.ts","sourceRoot":"","sources":["../../src/engine/webnn.ts"],"names":[],"mappings":"AAGA,qBAAa,WAAW;IACpB,OAAO,EAAE,SAAS,GAAG,IAAI,CAAQ;IACjC,OAAO,EAAE,cAAc,GAAG,IAAI,CAAQ;IACtC,KAAK,EAAE,OAAO,GAAG,IAAI,CAAQ;IAG7B,WAAW,EAAE,MAAM,CAAK;IACxB,SAAS,EAAE,MAAM,CAAK;IAItB,OAAO,EAAE,YAAY,GAAG,IAAI,CAAQ;IACpC,MAAM,EAAE,YAAY,GAAG,IAAI,CAAQ;IAEnC,OAAO,UAAS;IAEV,IAAI;IAqBJ,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,GAAE,MAAU;IA4D7F,OAAO,CAAC,MAAM,EAAE,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;CAiB7D"}
@@ -0,0 +1,117 @@
1
+ import { NeuronRepository, SynapseRepository } from "./db/repository";
2
+ import { GPUEngine } from "./engine/gpu";
3
+ import { WebNNEngine } from "./engine/webnn";
4
+ import { Neuron, Synapse } from "./types";
5
+ export type { Neuron, Synapse } from "./types";
6
+ export declare class NeuralEngine {
7
+ gpu: GPUEngine;
8
+ npu: WebNNEngine;
9
+ neuronRepo: NeuronRepository;
10
+ synapseRepo: SynapseRepository;
11
+ private translator;
12
+ activeBackend: 'gpu' | 'npu';
13
+ constructor();
14
+ private neurons;
15
+ private synapses;
16
+ init(): Promise<{
17
+ nodeCount: number;
18
+ nodes: {
19
+ id: string;
20
+ index: number;
21
+ type: "input" | "hidden" | "output" | "cloud";
22
+ }[];
23
+ edges: {
24
+ id: string;
25
+ source: number;
26
+ target: number;
27
+ weight: number;
28
+ }[];
29
+ }>;
30
+ compile(): Promise<{
31
+ size: number;
32
+ weights: Float32Array;
33
+ biases: Float32Array;
34
+ initialValues: Float32Array;
35
+ }>;
36
+ deployToCloud(): Promise<{
37
+ nodeCount: number;
38
+ nodes: {
39
+ id: string;
40
+ index: number;
41
+ type: "input" | "hidden" | "output" | "cloud";
42
+ }[];
43
+ edges: {
44
+ id: string;
45
+ source: number;
46
+ target: number;
47
+ weight: number;
48
+ }[];
49
+ }>;
50
+ getGraphData(): {
51
+ nodeCount: number;
52
+ nodes: {
53
+ id: string;
54
+ index: number;
55
+ type: "input" | "hidden" | "output" | "cloud";
56
+ }[];
57
+ edges: {
58
+ id: string;
59
+ source: number;
60
+ target: number;
61
+ weight: number;
62
+ }[];
63
+ };
64
+ deleteSynapse(id: string): Promise<{
65
+ nodeCount: number;
66
+ nodes: {
67
+ id: string;
68
+ index: number;
69
+ type: "input" | "hidden" | "output" | "cloud";
70
+ }[];
71
+ edges: {
72
+ id: string;
73
+ source: number;
74
+ target: number;
75
+ weight: number;
76
+ }[];
77
+ }>;
78
+ exportGraph(): {
79
+ version: string;
80
+ neurons: Neuron[];
81
+ synapses: Synapse[];
82
+ };
83
+ importGraph(data: {
84
+ neurons: Neuron[];
85
+ synapses: Synapse[];
86
+ }): Promise<{
87
+ nodeCount: number;
88
+ nodes: {
89
+ id: string;
90
+ index: number;
91
+ type: "input" | "hidden" | "output" | "cloud";
92
+ }[];
93
+ edges: {
94
+ id: string;
95
+ source: number;
96
+ target: number;
97
+ weight: number;
98
+ }[];
99
+ }>;
100
+ injectInput(data: Float32Array): Promise<void>;
101
+ runTick(inputs: Float32Array): Promise<Float32Array>;
102
+ }
103
+ export declare function init(): Promise<{
104
+ nodeCount: number;
105
+ nodes: {
106
+ id: string;
107
+ index: number;
108
+ type: "input" | "hidden" | "output" | "cloud";
109
+ }[];
110
+ edges: {
111
+ id: string;
112
+ source: number;
113
+ target: number;
114
+ weight: number;
115
+ }[];
116
+ }>;
117
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAE1C,YAAY,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAE/C,qBAAa,YAAY;IACrB,GAAG,EAAE,SAAS,CAAC;IACf,GAAG,EAAE,WAAW,CAAC;IACjB,UAAU,EAAE,gBAAgB,CAAC;IAC7B,WAAW,EAAE,iBAAiB,CAAC;IAC/B,OAAO,CAAC,UAAU,CAAa;IAE/B,aAAa,EAAE,KAAK,GAAG,KAAK,CAAS;;IAWrC,OAAO,CAAC,OAAO,CAAgB;IAC/B,OAAO,CAAC,QAAQ,CAAiB;IAE3B,IAAI;;;;;;;;;;;;;;IAsDJ,OAAO;;;;;;IAgBP,aAAa;;;;;;;;;;;;;;IAoBnB,YAAY;;;;;;;;;;;;;;IA0BN,aAAa,CAAC,EAAE,EAAE,MAAM;;;;;;;;;;;;;;IAa9B,WAAW;;;;;IAQL,WAAW,CAAC,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,EAAE,CAAC;QAAC,QAAQ,EAAE,OAAO,EAAE,CAAA;KAAE;;;;;;;;;;;;;;IA0B5D,WAAW,CAAC,IAAI,EAAE,YAAY;IAQ9B,OAAO,CAAC,MAAM,EAAE,YAAY,GAAG,OAAO,CAAC,YAAY,CAAC;CAO7D;AAGD,wBAAsB,IAAI;;;;;;;;;;;;;GAGzB"}
@@ -4,10 +4,10 @@ export interface Neuron {
4
4
  bias: number;
5
5
  activation: string;
6
6
  }
7
-
8
7
  export interface Synapse {
9
8
  id: string;
10
9
  from_id: string;
11
10
  to_id: string;
12
11
  weight: number;
13
12
  }
13
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,MAAM;IACnB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;IAC9C,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,OAAO;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;CAClB"}
package/package.json CHANGED
@@ -1,16 +1,29 @@
1
1
  {
2
2
  "name": "@buley/neural",
3
- "version": "4.1.0",
3
+ "version": "4.2.0",
4
4
  "description": "A Transparent, Local-First, WebGPU-Accelerated Neural Graph Database.",
5
5
  "type": "module",
6
- "main": "./src/index.ts",
6
+ "main": "./dist/index.js",
7
+ "types": "./dist/index.d.ts",
8
+ "files": [
9
+ "dist"
10
+ ],
11
+ "exports": {
12
+ ".": {
13
+ "types": "./dist/index.d.ts",
14
+ "import": "./dist/index.js"
15
+ }
16
+ },
17
+ "publishConfig": {
18
+ "access": "public"
19
+ },
7
20
  "scripts": {
8
21
  "dev": "vite",
9
- "build": "tsc && vite build",
22
+ "build": "vite build && tsc -p tsconfig.build.json --emitDeclarationOnly",
10
23
  "bench": "bun src/bench/benchmark.ts"
11
24
  },
12
25
  "dependencies": {
13
- "@buley/dash": "4.1.0"
26
+ "@buley/dash": "^4.3.0"
14
27
  },
15
28
  "devDependencies": {
16
29
  "@webgpu/types": "^0.1.69",
package/index.html DELETED
@@ -1,12 +0,0 @@
1
- <!doctype html>
2
- <html lang="en">
3
- <head>
4
- <meta charset="UTF-8" />
5
- <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
- <title>Neural 2.0: The Transparent Brain</title>
7
- </head>
8
- <body>
9
- <div id="app"></div>
10
- <script type="module" src="/src/index.ts"></script>
11
- </body>
12
- </html>
@@ -1,107 +0,0 @@
1
-
2
- import { GPUEngine } from "../engine/gpu";
3
- import { performance } from "perf_hooks";
4
-
5
- // Mock WebGPU types for Node environment if needed, likely handled by bun's specialized runtime or mocks if strictly node.
6
- // However, since we are using Bun, we might need a headless WebGPU implementation or we run this in a real browser.
7
- // REALITY CHECK: running WebGPU in a headless CI/Node environment usually requires 'headless-gl' or similar, but WebGPU is newer.
8
- // Bun does not support WebGPU native out of the box yet.
9
- // For this task, since the user is on Mac, we will assume they might run this via a browser test runner OR we simulate/mock for the "structure" of the benchmark
10
- // if actual GPU access isn't available in the terminal.
11
- // BUT: The roadmap implies real GPU benchmarks.
12
- // Strategy: We will write the benchmark to be runnable. If it fails due to missing GPU in terminal,
13
- // we'll note that it needs to be run in a browser context (e.g. via the web app or a test runner that supports it).
14
- // actually, for the purpose of this agent, I'll implement it assuming the environment *might* support it or I'll add a check.
15
-
16
- async function runBenchmark(label: string, networkSize: number, batchSize: number, iterations: number) {
17
- console.log(`\n--- Benchmark: ${label} ---`);
18
- console.log(`Network: ${networkSize} Neurons, Batch: ${batchSize}`);
19
-
20
- const gpu = new GPUEngine();
21
-
22
- try {
23
- await gpu.init();
24
- } catch (e) {
25
- console.error("WebGPU Initialize Failed (Expected in non-browser env):", e);
26
- return;
27
- }
28
-
29
- // Prepare Data
30
- const weights = new Float32Array(networkSize * networkSize); // Full connectivity
31
- const biases = new Float32Array(networkSize);
32
- const inputs = new Float32Array(networkSize * batchSize);
33
- const targets = new Float32Array(networkSize * batchSize);
34
-
35
- // Init Buffers
36
- const startObj = performance.now();
37
- gpu.prepareBuffers(networkSize, weights, biases, batchSize);
38
- gpu.prepareTrainingBuffers(targets, 0.01);
39
- const initTime = performance.now() - startObj;
40
- console.log(`Initialization/Upload: ${initTime.toFixed(2)}ms`);
41
-
42
- // Warmup
43
- await gpu.runTick(inputs);
44
-
45
- // Measure Inference
46
- const startInf = performance.now();
47
- for (let i = 0; i < iterations; i++) {
48
- await gpu.runTick(inputs);
49
- }
50
- const endInf = performance.now();
51
- const infTime = endInf - startInf;
52
- const infOPS = (iterations * batchSize) / (infTime / 1000);
53
- console.log(`Inference: ${infTime.toFixed(2)}ms for ${iterations} ticks`);
54
- console.log(`Throughput: ${infOPS.toFixed(0)} samples/sec`);
55
-
56
- // Measure Training
57
- const startTrain = performance.now();
58
- for (let i = 0; i < iterations; i++) {
59
- await gpu.trainTick();
60
- }
61
- const endTrain = performance.now();
62
- const trainTime = endTrain - startTrain;
63
- const trainOPS = (iterations * batchSize) / (trainTime / 1000);
64
- console.log(`Training: ${trainTime.toFixed(2)}ms for ${iterations} ticks`);
65
- console.log(`Throughput: ${trainOPS.toFixed(0)} samples/sec`);
66
- }
67
-
68
- async function main() {
69
- // Small
70
- await runBenchmark("Small", 100, 1, 100);
71
-
72
- // Medium
73
- await runBenchmark("Medium (Batched)", 1000, 32, 50);
74
-
75
- // Large
76
- await runBenchmark("Large (Batched)", 5000, 64, 20);
77
- }
78
-
79
- // Check for WebGPU polyfill or mock if running in Node without headers
80
- if (!global.navigator?.gpu) {
81
- console.log("No WebGPU detected in global scope. Mocking for CLI structure verification...");
82
- // @ts-ignore
83
- global.navigator = {
84
- gpu: { ...({} as any) as GPU,
85
- requestAdapter: async () => ({ ...({} as any) as GPUAdapter, // Force cast for mock
86
- requestDevice: async () => ({ ...({} as any) as GPUDevice,
87
- createShaderModule: () => ({} as unknown as GPUShaderModule),
88
- createComputePipeline: () => ({ getBindGroupLayout: () => ({} as unknown as GPUBindGroupLayout) } as unknown as GPUComputePipeline),
89
- createBuffer: (d: any) => ({ getMappedRange: () => new ArrayBuffer(d.size), unmap: () => {}, mapAsync: async () => {} } as unknown as GPUBuffer),
90
- createBindGroup: () => ({} as unknown as GPUBindGroup),
91
- createCommandEncoder: () => ({
92
- beginComputePass: () => ({ setPipeline:()=>{}, setBindGroup:()=>{}, dispatchWorkgroups:()=>{}, end:()=>{} } as unknown as GPUComputePassEncoder),
93
- copyBufferToBuffer: ()=>{},
94
- finish: ()=>(({} as any) as GPUCommandBuffer)
95
- } as unknown as GPUCommandEncoder),
96
- queue: { writeBuffer: ()=>{}, submit: ()=>{} } as unknown as GPUQueue
97
- })
98
- })
99
- }
100
- };
101
- // @ts-ignore
102
- global.GPUBufferUsage = { STORAGE: 1, COPY_DST: 2, COPY_SRC: 4, UNIFORM: 8, MAP_READ: 16 };
103
- // @ts-ignore
104
- global.GPUMapMode = { READ: 1 };
105
- }
106
-
107
- main();
@@ -1,67 +0,0 @@
1
- import { expect, test, describe, mock } from "bun:test";
2
- import { NeuronRepository, SynapseRepository } from "./repository";
3
-
4
- // Mock @buley/dash
5
- const mockDash = {
6
- execute: mock((query, _params) => {
7
- // Simple mock implementation
8
- if (query.includes("INSERT")) return Promise.resolve();
9
- if (query.includes("SELECT * FROM neurons")) return Promise.resolve([
10
- { id: "n1", type: "input", bias: 0.1, activation: "tanh" }
11
- ]);
12
- if (query.includes("SELECT * FROM synapses")) return Promise.resolve([
13
- { id: "s1", from_id: "n1", to_id: "n2", weight: 0.5 }
14
- ]);
15
- return Promise.resolve([]);
16
- }),
17
- addWithEmbedding: mock(() => Promise.resolve())
18
- };
19
-
20
- // Mock module
21
- mock.module("@buley/dash", () => ({
22
- dash: mockDash
23
- }));
24
-
25
- describe("NeuronRepository", () => {
26
- test("create() executes INSERT query", async () => {
27
- const repo = new NeuronRepository();
28
- await repo.create({ id: "n1", type: "input", bias: 0.1, activation: "tanh" });
29
- expect(mockDash.execute).toHaveBeenCalled();
30
- const call = mockDash.execute.mock.calls[0];
31
- expect(call[0]).toContain("INSERT INTO neurons");
32
- expect(call[1]).toEqual(["n1", "input", 0.1, "tanh"]);
33
- });
34
-
35
- test("createWithSemantics() calls addWithEmbedding", async () => {
36
- const repo = new NeuronRepository();
37
- await repo.createWithSemantics(
38
- { id: "n2", type: "hidden", bias: 0, activation: "relu" },
39
- "detects curves"
40
- );
41
- expect(mockDash.addWithEmbedding).toHaveBeenCalledWith("n2", "detects curves");
42
- });
43
-
44
- test("getAll() returns neurons", async () => {
45
- mockDash.execute.mockClear();
46
- const repo = new NeuronRepository();
47
- const results = await repo.getAll();
48
- expect(results.length).toBe(1);
49
- expect(results[0].id).toBe("n1");
50
- });
51
- });
52
-
53
- describe("SynapseRepository", () => {
54
- test("create() executes INSERT query", async () => {
55
- const repo = new SynapseRepository();
56
- await repo.create({ id: "s1", from_id: "n1", to_id: "n2", weight: 0.5 });
57
- expect(mockDash.execute).toHaveBeenCalled();
58
- // Check latest call
59
- });
60
-
61
- test("getAll() returns synapses", async () => {
62
- const repo = new SynapseRepository();
63
- const results = await repo.getAll();
64
- expect(results.length).toBe(1);
65
- expect(results[0].weight).toBe(0.5);
66
- });
67
- });
@@ -1,44 +0,0 @@
1
- import { dash } from "@buley/dash";
2
- import { Neuron, Synapse } from "../types";
3
-
4
- export class NeuronRepository {
5
- async create(neuron: Neuron): Promise<void> {
6
- await dash.execute(
7
- "INSERT INTO neurons (id, type, bias, activation) VALUES (?, ?, ?, ?)",
8
- [neuron.id, neuron.type, neuron.bias, neuron.activation]
9
- );
10
- }
11
-
12
- // Feature: Add with semantic embedding
13
- async createWithSemantics(neuron: Neuron, description: string): Promise<void> {
14
- // We store the structured data normally
15
- await this.create(neuron);
16
- // And we map the ID to a semantic embedding in dash's hidden semantic store
17
- await dash.addWithEmbedding(neuron.id, description);
18
- }
19
-
20
- async getAll(): Promise<Neuron[]> {
21
- return await dash.execute("SELECT * FROM neurons") as Neuron[];
22
- }
23
-
24
- async delete(id: string): Promise<void> {
25
- await dash.execute("DELETE FROM neurons WHERE id = ?", [id]);
26
- }
27
- }
28
-
29
- export class SynapseRepository {
30
- async create(synapse: Synapse): Promise<void> {
31
- await dash.execute(
32
- "INSERT INTO synapses (id, from_id, to_id, weight) VALUES (?, ?, ?, ?)",
33
- [synapse.id, synapse.from_id, synapse.to_id, synapse.weight]
34
- );
35
- }
36
-
37
- async getAll(): Promise<Synapse[]> {
38
- return await dash.execute("SELECT * FROM synapses") as Synapse[];
39
- }
40
-
41
- async delete(id: string): Promise<void> {
42
- await dash.execute("DELETE FROM synapses WHERE id = ?", [id]);
43
- }
44
- }
package/src/db/schema.ts DELETED
@@ -1,40 +0,0 @@
1
-
2
- import { dash } from "@buley/dash";
3
-
4
- export async function initializeSchema() {
5
- console.log("Initializing Neural Schema...");
6
-
7
- // Neurons Table
8
- // id: UUID
9
- // type: input, hidden, output
10
- // bias: float
11
- // activation: string (tanh, relu, sigmoid)
12
- await dash.execute(`
13
- CREATE TABLE IF NOT EXISTS neurons (
14
- id TEXT PRIMARY KEY,
15
- type TEXT NOT NULL,
16
- bias REAL DEFAULT 0.0,
17
- activation TEXT DEFAULT 'tanh',
18
- created_at INTEGER DEFAULT (unixepoch())
19
- )
20
- `);
21
-
22
- // Synapses Table
23
- // id: UUID
24
- // from_id: neuron UUID
25
- // to_id: neuron UUID
26
- // weight: float
27
- await dash.execute(`
28
- CREATE TABLE IF NOT EXISTS synapses (
29
- id TEXT PRIMARY KEY,
30
- from_id TEXT NOT NULL,
31
- to_id TEXT NOT NULL,
32
- weight REAL DEFAULT 0.0,
33
- created_at INTEGER DEFAULT (unixepoch()),
34
- FOREIGN KEY(from_id) REFERENCES neurons(id),
35
- FOREIGN KEY(to_id) REFERENCES neurons(id)
36
- )
37
- `);
38
-
39
- console.log("Schema initialized.");
40
- }