@openfluke/welvet 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,656 @@
1
+ # @openfluke/welvet
2
+
3
+ > TypeScript/JavaScript bindings for the LOOM neural network framework via WebAssembly
4
+
5
+ **Welvet** (Wrapper for Embedding Loom Via External Toolchain) provides a complete neural network API in the browser and Node.js/Bun environments. Built on WebAssembly, it delivers high-performance deep learning with zero dependencies.
6
+
7
+ [![npm version](https://img.shields.io/npm/v/@openfluke/welvet.svg)](https://www.npmjs.com/package/@openfluke/welvet)
8
+ [![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](LICENSE)
9
+
10
+ ## ✨ Features
11
+
12
+ - 🚀 **5.4MB WASM Binary** - Complete neural network framework compiled to WebAssembly
13
+ - 🧠 **All 5 Layer Types** - Dense, Conv2D, Multi-Head Attention, RNN, LSTM fully supported
14
+ - 🎯 **Registry-based Initialization** - Dynamic layer creation via `CallLayerInit()` with zero manual exports
15
+ - 🔍 **Runtime Introspection** - Discover methods, signatures, and parameters dynamically
16
+ - 💾 **Model Serialization** - Save/load models as JSON (no filesystem required)
17
+ - ⚡ **Full Training Support** - Train networks with `network.Train()` API and automatic gradients
18
+ - 📘 **Full TypeScript Support** - Complete type definitions for IntelliSense
19
+ - 🎯 **Zero Dependencies** - Pure WASM + Go runtime, no external libs
20
+ - 🌐 **Isomorphic** - Works in browsers, Node.js, Bun, and Deno
21
+ - 🎨 **Multiple Activation Functions** - ReLU, Sigmoid, Tanh, Softplus, LeakyReLU, Linear
22
+ - ⚠️ **CPU-Only** (GPU support via WebGPU coming soon)
23
+
24
+ ## 📦 Installation
25
+
26
+ ```bash
27
+ npm install @openfluke/welvet
28
+ ```
29
+
30
+ Or with your preferred package manager:
31
+
32
+ ```bash
33
+ # Yarn
34
+ yarn add @openfluke/welvet
35
+
36
+ # pnpm
37
+ pnpm add @openfluke/welvet
38
+
39
+ # Bun
40
+ bun add @openfluke/welvet
41
+ ```
42
+
43
+ ## 🚀 Quick Start
44
+
45
+ ```typescript
46
+ import { initLoom, ActivationType } from "@openfluke/welvet";
47
+
48
+ // Initialize the WASM module
49
+ const loom = await initLoom();
50
+
51
+ // Create a neural network: 784 inputs → 392 hidden → 10 outputs
52
+ const network = loom.NewNetwork(784, 1, 1, 2);
53
+
54
+ // Configure layers using registry-based initialization
55
+ const layer0 = loom.CallLayerInit(
56
+ "InitDenseLayer",
57
+ JSON.stringify([784, 392, ActivationType.ReLU])
58
+ );
59
+ const layer1 = loom.CallLayerInit(
60
+ "InitDenseLayer",
61
+ JSON.stringify([392, 10, ActivationType.Sigmoid])
62
+ );
63
+
64
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer0)]));
65
+ network.SetLayer(JSON.stringify([0, 0, 1, JSON.parse(layer1)]));
66
+
67
+ // Forward pass
68
+ const input = new Array(784).fill(0).map(() => Math.random());
69
+ const resultJSON = network.ForwardCPU(JSON.stringify([input]));
70
+ const [output, duration] = JSON.parse(resultJSON);
71
+
72
+ console.log("Output:", output);
73
+ console.log("Inference time:", duration / 1e6, "ms");
74
+
75
+ // Training with the high-level API
76
+ const batches = [
77
+ { Input: input, Target: [0, 0, 0, 0, 0, 0, 0, 0, 0, 1] }, // Example: classify as "9"
78
+ ];
79
+ const config = {
80
+ Epochs: 10,
81
+ LearningRate: 0.01,
82
+ GradientClip: 1.0,
83
+ LossType: "mse",
84
+ };
85
+
86
+ const trainingResult = network.Train(JSON.stringify([batches, config]));
87
+ const result = JSON.parse(trainingResult);
88
+ console.log("Final loss:", result.FinalLoss);
89
+ ```
90
+
91
+ ## 📚 API Reference
92
+
93
+ ### Initialization
94
+
95
+ ```typescript
96
+ interface InitOptions {
97
+ wasmUrl?: string | URL; // Custom WASM file location
98
+ injectGoRuntime?: boolean; // Include Go runtime (default: true)
99
+ }
100
+
101
+ const loom = await initLoom(options?);
102
+ ```
103
+
104
+ ### Creating Networks
105
+
106
+ ```typescript
107
+ const network = loom.NewNetwork(
108
+ inputSize: number, // Input layer size
109
+ gridRows: number, // Grid rows (use 1 for simple networks)
110
+ gridCols: number, // Grid columns (use 1 for simple networks)
111
+ layersPerCell: number // Number of layers
112
+ );
113
+ ```
114
+
115
+ ### Layer Types
116
+
117
+ All layer types are created via the registry system using `CallLayerInit()`:
118
+
119
+ #### Dense (Fully-Connected) Layer
120
+
121
+ ```typescript
122
+ const config = loom.CallLayerInit(
123
+ "InitDenseLayer",
124
+ JSON.stringify([
125
+ inputSize: number,
126
+ outputSize: number,
127
+ activation: ActivationType,
128
+ ])
129
+ );
130
+ ```
131
+
132
+ #### Conv2D Layer
133
+
134
+ ```typescript
135
+ const config = loom.CallLayerInit(
136
+ "InitConv2DLayer",
137
+ JSON.stringify([
138
+ height: number, // Input height
139
+ width: number, // Input width
140
+ channels: number, // Input channels
141
+ filters: number, // Number of output filters
142
+ kernelSize: number, // Kernel size (e.g., 3 for 3x3)
143
+ stride: number, // Stride (typically 1 or 2)
144
+ padding: number, // Padding (typically 0 or 1)
145
+ activation: ActivationType,
146
+ ])
147
+ );
148
+ ```
149
+
150
+ #### Multi-Head Attention Layer
151
+
152
+ ```typescript
153
+ const config = loom.CallLayerInit(
154
+ "InitMultiHeadAttentionLayer",
155
+ JSON.stringify([
156
+ seqLength: number, // Sequence length
157
+ dModel: number, // Model dimension
158
+ numHeads: number, // Number of attention heads
159
+ activation: ActivationType,
160
+ ])
161
+ );
162
+ ```
163
+
164
+ #### RNN Layer
165
+
166
+ ```typescript
167
+ const config = loom.CallLayerInit(
168
+ "InitRNNLayer",
169
+ JSON.stringify([
170
+ inputSize: number, // Input feature size
171
+ hiddenSize: number, // Hidden state size
172
+ seqLength: number, // Sequence length
173
+ outputSize: number, // Output size (hiddenSize * seqLength)
174
+ ])
175
+ );
176
+ ```
177
+
178
+ #### LSTM Layer
179
+
180
+ ```typescript
181
+ const config = loom.CallLayerInit(
182
+ "InitLSTMLayer",
183
+ JSON.stringify([
184
+ inputSize: number, // Input feature size
185
+ hiddenSize: number, // Hidden/cell state size
186
+ seqLength: number, // Sequence length
187
+ outputSize: number, // Output size (hiddenSize * seqLength)
188
+ ])
189
+ );
190
+ ```
191
+
192
+ **Activation Types:**
193
+
194
+ ```typescript
195
+ enum ActivationType {
196
+ ReLU = 0, // Scaled ReLU (1.1x)
197
+ Sigmoid = 1, // Logistic sigmoid
198
+ Tanh = 2, // Hyperbolic tangent
199
+ Softplus = 3, // Smooth ReLU
200
+ LeakyReLU = 4, // ReLU with 0.1x negative slope
201
+ Linear = 5, // Identity (no activation)
202
+ }
203
+ ```
204
+
205
+ activation: ActivationType // ReLU, Sigmoid, Tanh, Linear
206
+ );
207
+
208
+ network.SetLayer(JSON.stringify([
209
+ gridRow, // Grid row index
210
+ gridCol, // Grid column index
211
+ layerIndex, // Layer index within cell
212
+ JSON.parse(config)
213
+ ]));
214
+
215
+ ````
216
+
217
+ #### Multi-Head Attention Layer
218
+
219
+ ```typescript
220
+ const config = loom.InitMultiHeadAttentionLayer(
221
+ dModel: number, // Model dimension
222
+ numHeads: number, // Number of attention heads
223
+ seqLength: number, // Sequence length
224
+ activation: ActivationType
225
+ );
226
+
227
+ network.SetLayer(JSON.stringify([0, 0, layerIndex, JSON.parse(config)]));
228
+ ````
229
+
230
+ ### Training Operations
231
+
232
+ #### Forward Pass
233
+
234
+ ```typescript
235
+ const input = [0.1, 0.2, 0.3, 0.4];
236
+ const resultJSON = network.ForwardCPU(JSON.stringify([input]));
237
+ const [output, duration] = JSON.parse(resultJSON);
238
+ ```
239
+
240
+ #### Backward Pass
241
+
242
+ ```typescript
243
+ const gradOutput = new Array(outputSize).fill(0.01);
244
+ const backwardJSON = network.BackwardCPU(JSON.stringify([gradOutput]));
245
+ const [gradInput, duration] = JSON.parse(backwardJSON);
246
+ ```
247
+
248
+ #### Update Weights
249
+
250
+ ```typescript
251
+ const learningRate = 0.01;
252
+ network.UpdateWeights(JSON.stringify([learningRate]));
253
+ ```
254
+
255
+ ### Model Persistence
256
+
257
+ #### Save Model
258
+
259
+ ```typescript
260
+ const modelJSON = network.SaveModelToString(JSON.stringify(["model_name"]));
261
+ const model = JSON.parse(JSON.parse(modelJSON)[0]);
262
+
263
+ // Store anywhere (localStorage, IndexedDB, etc.)
264
+ localStorage.setItem("my_model", JSON.stringify(model));
265
+ ```
266
+
267
+ #### Load Model
268
+
269
+ ```typescript
270
+ const savedModel = JSON.parse(localStorage.getItem("my_model")!);
271
+ const network = loom.LoadModelFromString(
272
+ JSON.stringify(savedModel),
273
+ "model_name"
274
+ );
275
+ ```
276
+
277
+ ### Runtime Introspection
278
+
279
+ #### Get All Methods
280
+
281
+ ```typescript
282
+ const methodsJSON = network.GetMethods();
283
+ const methods = JSON.parse(methodsJSON);
284
+
285
+ methods.forEach((method) => {
286
+ console.log(
287
+ `${method.method_name}(${method.parameters.map((p) => p.type).join(", ")})`
288
+ );
289
+ });
290
+ ```
291
+
292
+ #### Check Method Availability
293
+
294
+ ```typescript
295
+ if (network.HasMethod("ForwardGPU")) {
296
+ const signature = network.GetMethodSignature(JSON.stringify(["ForwardGPU"]));
297
+ console.log(signature);
298
+ }
299
+ ```
300
+
301
+ #### List Method Names
302
+
303
+ ```typescript
304
+ const names = JSON.parse(network.ListMethods());
305
+ console.log("Available methods:", names);
306
+ ```
307
+
308
+ ## 🎨 Activation Functions
309
+
310
+ ```typescript
311
+ enum ActivationType {
312
+ ReLU = 0, // Rectified Linear Unit
313
+ Sigmoid = 1, // Sigmoid (logistic)
314
+ Tanh = 2, // Hyperbolic tangent
315
+ Linear = 3, // No activation (identity)
316
+ }
317
+ ```
318
+
319
+ ## 💡 Complete Examples
320
+
321
+ ### MNIST-Style Classifier
322
+
323
+ ```typescript
324
+ import { initLoom, ActivationType } from "@openfluke/welvet";
325
+
326
+ async function trainMNIST() {
327
+ const loom = await initLoom();
328
+
329
+ // Network: 784 → 128 → 64 → 10
330
+ const network = loom.NewNetwork(784, 1, 1, 3);
331
+
332
+ const layer0 = loom.InitDenseLayer(784, 128, ActivationType.ReLU);
333
+ const layer1 = loom.InitDenseLayer(128, 64, ActivationType.ReLU);
334
+ const layer2 = loom.InitDenseLayer(64, 10, ActivationType.Sigmoid);
335
+
336
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer0)]));
337
+ network.SetLayer(JSON.stringify([0, 0, 1, JSON.parse(layer1)]));
338
+ network.SetLayer(JSON.stringify([0, 0, 2, JSON.parse(layer2)]));
339
+
340
+ // Training loop
341
+ const epochs = 50;
342
+ const learningRate = 0.01;
343
+
344
+ for (let epoch = 0; epoch < epochs; epoch++) {
345
+ // Your training data here
346
+ const input = new Array(784).fill(0).map(() => Math.random());
347
+ const target = new Array(10).fill(0);
348
+ target[Math.floor(Math.random() * 10)] = 1;
349
+
350
+ // Forward
351
+ const [output] = JSON.parse(network.ForwardCPU(JSON.stringify([input])));
352
+
353
+ // Compute loss (MSE)
354
+ const loss =
355
+ output.reduce((sum, val, i) => sum + Math.pow(val - target[i], 2), 0) /
356
+ output.length;
357
+
358
+ // Backward
359
+ const gradOutput = output.map(
360
+ (val, i) => (2 * (val - target[i])) / output.length
361
+ );
362
+ network.BackwardCPU(JSON.stringify([gradOutput]));
363
+
364
+ // Update
365
+ network.UpdateWeights(JSON.stringify([learningRate]));
366
+
367
+ if (epoch % 10 === 0) {
368
+ console.log(`Epoch ${epoch}: Loss = ${loss.toFixed(6)}`);
369
+ }
370
+ }
371
+
372
+ // Save model
373
+ const modelJSON = network.SaveModelToString(JSON.stringify(["mnist"]));
374
+ localStorage.setItem("mnist_model", JSON.parse(modelJSON)[0]);
375
+ }
376
+ ```
377
+
378
+ ### XOR Problem
379
+
380
+ ```typescript
381
+ import { initLoom, ActivationType } from "@openfluke/welvet";
382
+
383
+ const loom = await initLoom();
384
+ const network = loom.NewNetwork(2, 1, 1, 2);
385
+
386
+ // 2 → 4 → 1 (XOR needs hidden layer)
387
+ const layer0 = loom.InitDenseLayer(2, 4, ActivationType.ReLU);
388
+ const layer1 = loom.InitDenseLayer(4, 1, ActivationType.Sigmoid);
389
+
390
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer0)]));
391
+ network.SetLayer(JSON.stringify([0, 0, 1, JSON.parse(layer1)]));
392
+
393
+ const trainingData = [
394
+ { input: [0, 0], target: [0] },
395
+ { input: [0, 1], target: [1] },
396
+ { input: [1, 0], target: [1] },
397
+ { input: [1, 1], target: [0] },
398
+ ];
399
+
400
+ for (let epoch = 0; epoch < 1000; epoch++) {
401
+ let totalLoss = 0;
402
+
403
+ for (const sample of trainingData) {
404
+ const [output] = JSON.parse(
405
+ network.ForwardCPU(JSON.stringify([sample.input]))
406
+ );
407
+ const loss = Math.pow(output[0] - sample.target[0], 2);
408
+ totalLoss += loss;
409
+
410
+ const gradOutput = [2 * (output[0] - sample.target[0])];
411
+ network.BackwardCPU(JSON.stringify([gradOutput]));
412
+ network.UpdateWeights(JSON.stringify([0.1]));
413
+ }
414
+
415
+ if (epoch % 100 === 0) {
416
+ console.log(`Epoch ${epoch}: Loss = ${(totalLoss / 4).toFixed(6)}`);
417
+ }
418
+ }
419
+
420
+ // Test
421
+ trainingData.forEach((sample) => {
422
+ const [output] = JSON.parse(
423
+ network.ForwardCPU(JSON.stringify([sample.input]))
424
+ );
425
+ console.log(
426
+ `${sample.input} → ${output[0].toFixed(4)} (expected ${sample.target[0]})`
427
+ );
428
+ });
429
+ ```
430
+
431
+ ## 🌐 Browser Usage
432
+
433
+ ### Via CDN (UMD)
434
+
435
+ ```html
436
+ <!DOCTYPE html>
437
+ <html>
438
+ <head>
439
+ <script src="https://unpkg.com/@openfluke/welvet"></script>
440
+ </head>
441
+ <body>
442
+ <script>
443
+ (async () => {
444
+ const { initLoom, ActivationType } = window.Welvet;
445
+ const loom = await initLoom();
446
+
447
+ const network = loom.NewNetwork(4, 1, 1, 1);
448
+ console.log("LOOM ready!");
449
+ })();
450
+ </script>
451
+ </body>
452
+ </html>
453
+ ```
454
+
455
+ ### Via ES Modules
456
+
457
+ ```html
458
+ <!DOCTYPE html>
459
+ <html>
460
+ <head>
461
+ <script type="module">
462
+ import {
463
+ initLoom,
464
+ ActivationType,
465
+ } from "https://unpkg.com/@openfluke/welvet/dist/esm/index.js";
466
+
467
+ const loom = await initLoom();
468
+ const network = loom.NewNetwork(4, 1, 1, 1);
469
+ console.log("LOOM ready!");
470
+ </script>
471
+ </head>
472
+ </html>
473
+ ```
474
+
475
+ ## ⚛️ Framework Integration
476
+
477
+ ### React
478
+
479
+ ```tsx
480
+ import { useEffect, useState } from "react";
481
+ import { initLoom, type LoomAPI } from "@openfluke/welvet";
482
+
483
+ function NeuralNetworkComponent() {
484
+ const [loom, setLoom] = useState<LoomAPI | null>(null);
485
+ const [prediction, setPrediction] = useState<number[] | null>(null);
486
+
487
+ useEffect(() => {
488
+ initLoom().then((api) => {
489
+ setLoom(api);
490
+
491
+ // Initialize network
492
+ const network = api.NewNetwork(4, 1, 1, 2);
493
+ const layer0 = api.InitDenseLayer(4, 8, 0); // ReLU
494
+ const layer1 = api.InitDenseLayer(8, 2, 1); // Sigmoid
495
+
496
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer0)]));
497
+ network.SetLayer(JSON.stringify([0, 0, 1, JSON.parse(layer1)]));
498
+
499
+ // Make prediction
500
+ const input = [0.5, 0.3, 0.2, 0.1];
501
+ const [output] = JSON.parse(network.ForwardCPU(JSON.stringify([input])));
502
+ setPrediction(output);
503
+ });
504
+ }, []);
505
+
506
+ if (!loom) return <div>Loading neural network...</div>;
507
+
508
+ return (
509
+ <div>
510
+ <h2>Prediction: {prediction?.map((v) => v.toFixed(4)).join(", ")}</h2>
511
+ </div>
512
+ );
513
+ }
514
+ ```
515
+
516
+ ### Vue 3
517
+
518
+ ```vue
519
+ <script setup lang="ts">
520
+ import { ref, onMounted } from "vue";
521
+ import { initLoom, type LoomAPI } from "@openfluke/welvet";
522
+
523
+ const loom = ref<LoomAPI | null>(null);
524
+ const output = ref<number[] | null>(null);
525
+
526
+ onMounted(async () => {
527
+ const api = await initLoom();
528
+ loom.value = api;
529
+
530
+ const network = api.NewNetwork(2, 1, 1, 1);
531
+ const layer = api.InitDenseLayer(2, 1, 1); // Sigmoid
532
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer)]));
533
+
534
+ const [result] = JSON.parse(network.ForwardCPU(JSON.stringify([[0.5, 0.5]])));
535
+ output.value = result;
536
+ });
537
+ </script>
538
+
539
+ <template>
540
+ <div v-if="!loom">Loading...</div>
541
+ <div v-else>
542
+ <h2>Neural Network Output</h2>
543
+ <pre>{{ output }}</pre>
544
+ </div>
545
+ </template>
546
+ ```
547
+
548
+ ### Svelte
549
+
550
+ ```svelte
551
+ <script lang="ts">
552
+ import { onMount } from 'svelte';
553
+ import { initLoom, type LoomAPI } from '@openfluke/welvet';
554
+
555
+ let loom: LoomAPI | null = null;
556
+ let result: number[] = [];
557
+
558
+ onMount(async () => {
559
+ loom = await initLoom();
560
+
561
+ const network = loom.NewNetwork(3, 1, 1, 1);
562
+ const layer = loom.InitDenseLayer(3, 2, 0); // ReLU
563
+ network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer)]));
564
+
565
+ const [output] = JSON.parse(network.ForwardCPU(JSON.stringify([[1, 2, 3]])));
566
+ result = output;
567
+ });
568
+ </script>
569
+
570
+ {#if !loom}
571
+ <p>Loading neural network...</p>
572
+ {:else}
573
+ <h2>Result: {result.join(', ')}</h2>
574
+ {/if}
575
+ ```
576
+
577
+ ## 🔧 Advanced Configuration
578
+
579
+ ### Custom WASM Location
580
+
581
+ ```typescript
582
+ const loom = await initLoom({
583
+ wasmUrl: "/custom/path/loom.wasm",
584
+ });
585
+ ```
586
+
587
+ ### Skip Go Runtime Injection
588
+
589
+ ```typescript
590
+ // Useful if you're loading Go runtime separately
591
+ const loom = await initLoom({
592
+ injectGoRuntime: false,
593
+ });
594
+ ```
595
+
596
+ ## 📊 Performance Tips
597
+
598
+ 1. **Batch Processing** - Process multiple inputs together when possible
599
+ 2. **Model Caching** - Save trained models to avoid retraining
600
+ 3. **Layer Sizing** - Start with smaller layers and scale up as needed
601
+ 4. **Learning Rate** - Tune learning rate for faster convergence (typically 0.001 - 0.1)
602
+ 5. **Activation Functions** - ReLU often trains faster than Sigmoid/Tanh
603
+
604
+ ## 🐛 Troubleshooting
605
+
606
+ ### WASM fails to load
607
+
608
+ Ensure your server serves `.wasm` files with the correct MIME type:
609
+
610
+ ```
611
+ Content-Type: application/wasm
612
+ ```
613
+
614
+ ### Module not found errors
615
+
616
+ Make sure to await the initialization:
617
+
618
+ ```typescript
619
+ const loom = await initLoom(); // Don't forget await!
620
+ ```
621
+
622
+ ### JSON parsing errors
623
+
624
+ All network methods use JSON string parameters:
625
+
626
+ ```typescript
627
+ // ✅ Correct
628
+ network.ForwardCPU(JSON.stringify([input]));
629
+
630
+ // ❌ Wrong
631
+ network.ForwardCPU(input);
632
+ ```
633
+
634
+ ## 🔗 Related Projects
635
+
636
+ - **Python Package**: [`welvet`](https://pypi.org/project/welvet/) - Python bindings for LOOM
637
+ - **Go Framework**: [LOOM](https://github.com/openfluke/loom) - Original Go implementation
638
+ - **Legacy Package**: [`@openfluke/portal`](https://github.com/openfluke/portal) - Previous generation framework
639
+
640
+ ## 📄 License
641
+
642
+ Apache-2.0 © 2025 OpenFluke
643
+
644
+ ## 🤝 Contributing
645
+
646
+ Contributions are welcome! Please see the [main repository](https://github.com/openfluke/loom) for guidelines.
647
+
648
+ ## 📞 Support
649
+
650
+ - 🐛 [Report Issues](https://github.com/openfluke/loom/issues)
651
+ - 💬 [Discussions](https://github.com/openfluke/loom/discussions)
652
+ - 📖 [Documentation](https://github.com/openfluke/loom/tree/main/typescript)
653
+
654
+ ---
655
+
656
+ **Built with ❤️ by the OpenFluke team**
package/dist/env.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ export declare const isBrowser: boolean;
2
+ export declare const isNode: boolean;
3
+ export declare const isBun: boolean;
package/dist/env.js ADDED
@@ -0,0 +1,3 @@
1
+ export const isBrowser = typeof window !== "undefined" && typeof document !== "undefined";
2
+ export const isNode = typeof process !== "undefined" && !!process.versions?.node;
3
+ export const isBun = typeof globalThis.Bun !== "undefined";
@@ -0,0 +1,31 @@
1
+ import type { InitOptions, LoomAPI } from "./types";
2
+ /**
3
+ * Initialize the LOOM WASM module and return the API
4
+ *
5
+ * @example
6
+ * ```typescript
7
+ * import { initLoom, ActivationType } from '@openfluke/loom';
8
+ *
9
+ * const loom = await initLoom();
10
+ *
11
+ * // Create a network: 784 → 392 → 10
12
+ * const network = loom.NewNetwork(784, 1, 1, 2);
13
+ *
14
+ * // Configure layers
15
+ * const layer0 = loom.InitDenseLayer(784, 392, ActivationType.ReLU);
16
+ * const layer1 = loom.InitDenseLayer(392, 10, ActivationType.Sigmoid);
17
+ *
18
+ * network.SetLayer(JSON.stringify([0, 0, 0, JSON.parse(layer0)]));
19
+ * network.SetLayer(JSON.stringify([0, 0, 1, JSON.parse(layer1)]));
20
+ *
21
+ * // Forward pass
22
+ * const input = new Array(784).fill(0).map(() => Math.random());
23
+ * const resultJSON = network.ForwardCPU(JSON.stringify([input]));
24
+ * const [output, duration] = JSON.parse(resultJSON);
25
+ *
26
+ * console.log('Output:', output);
27
+ * ```
28
+ */
29
+ export declare function initLoom(opts?: InitOptions): Promise<LoomAPI>;
30
+ export type { LoomAPI, LoomNetwork, InitOptions } from "./types";
31
+ export { ActivationType } from "./types";