@openfluke/welvet 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +286 -800
- package/dist/index.browser.d.ts +32 -0
- package/dist/index.browser.js +37 -0
- package/dist/index.d.ts +30 -30
- package/dist/index.js +34 -83
- package/dist/loader.browser.d.ts +5 -0
- package/dist/loader.browser.js +25 -0
- package/dist/loader.d.ts +5 -3
- package/dist/loader.js +25 -89
- package/dist/{loom.wasm → main.wasm} +0 -0
- package/dist/types.d.ts +91 -197
- package/dist/types.js +2 -10
- package/dist/wasm_exec.js +568 -658
- package/package.json +4 -2
- package/dist/env.d.ts +0 -3
- package/dist/env.js +0 -3
- package/dist/transformer.d.ts +0 -5
- package/dist/transformer.js +0 -127
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openfluke/welvet",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.4",
|
|
4
4
|
"description": "TypeScript/JavaScript bindings for LOOM neural network framework with WebAssembly support - GPU-accelerated machine learning in the browser",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -50,7 +50,9 @@
|
|
|
50
50
|
"clean": "rm -rf dist",
|
|
51
51
|
"build": "tsc -p tsconfig.json",
|
|
52
52
|
"postbuild": "cp -r assets/* dist/",
|
|
53
|
-
"prepare": "bun run build"
|
|
53
|
+
"prepare": "bun run build",
|
|
54
|
+
"serve": "python3 serve.py",
|
|
55
|
+
"dev": "bun run build && bun run serve"
|
|
54
56
|
},
|
|
55
57
|
"devDependencies": {
|
|
56
58
|
"@types/node": "^22.7.5",
|
package/dist/env.d.ts
DELETED
package/dist/env.js
DELETED
package/dist/transformer.d.ts
DELETED
package/dist/transformer.js
DELETED
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Helper to wait for a global function to be available
|
|
3
|
-
*/
|
|
4
|
-
async function waitForGlobal(name, timeoutMs = 5000) {
|
|
5
|
-
const t0 = performance.now();
|
|
6
|
-
for (;;) {
|
|
7
|
-
if (globalThis[name])
|
|
8
|
-
return;
|
|
9
|
-
if (performance.now() - t0 > timeoutMs) {
|
|
10
|
-
throw new Error(`Timeout waiting for ${name}`);
|
|
11
|
-
}
|
|
12
|
-
await new Promise((r) => setTimeout(r, 10));
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
/**
|
|
16
|
-
* Create transformer API wrapper around WASM functions
|
|
17
|
-
*/
|
|
18
|
-
export async function createTransformerAPI() {
|
|
19
|
-
// Wait for transformer functions to be available
|
|
20
|
-
await Promise.all([
|
|
21
|
-
waitForGlobal("LoadTokenizerFromBytes"),
|
|
22
|
-
waitForGlobal("LoadTransformerFromBytes"),
|
|
23
|
-
waitForGlobal("EncodeText"),
|
|
24
|
-
waitForGlobal("DecodeTokens"),
|
|
25
|
-
waitForGlobal("GenerateNextToken"),
|
|
26
|
-
waitForGlobal("GenerateText"),
|
|
27
|
-
]);
|
|
28
|
-
const g = globalThis;
|
|
29
|
-
return {
|
|
30
|
-
async loadTokenizer(tokenizerData) {
|
|
31
|
-
return new Promise((resolve, reject) => {
|
|
32
|
-
try {
|
|
33
|
-
const resultStr = g.LoadTokenizerFromBytes(tokenizerData);
|
|
34
|
-
// If it's already an object, return it directly
|
|
35
|
-
if (typeof resultStr === "object") {
|
|
36
|
-
resolve(resultStr);
|
|
37
|
-
return;
|
|
38
|
-
}
|
|
39
|
-
const result = JSON.parse(resultStr);
|
|
40
|
-
resolve(result);
|
|
41
|
-
}
|
|
42
|
-
catch (error) {
|
|
43
|
-
reject(error);
|
|
44
|
-
}
|
|
45
|
-
});
|
|
46
|
-
},
|
|
47
|
-
async loadModel(configData, weightsData) {
|
|
48
|
-
return new Promise((resolve, reject) => {
|
|
49
|
-
try {
|
|
50
|
-
const resultStr = g.LoadTransformerFromBytes(configData, weightsData);
|
|
51
|
-
const result = JSON.parse(resultStr);
|
|
52
|
-
resolve(result);
|
|
53
|
-
}
|
|
54
|
-
catch (error) {
|
|
55
|
-
reject(error);
|
|
56
|
-
}
|
|
57
|
-
});
|
|
58
|
-
},
|
|
59
|
-
async encode(text, addSpecialTokens = true) {
|
|
60
|
-
return new Promise((resolve, reject) => {
|
|
61
|
-
try {
|
|
62
|
-
const resultStr = g.EncodeText(text, addSpecialTokens);
|
|
63
|
-
const result = JSON.parse(resultStr);
|
|
64
|
-
resolve(result);
|
|
65
|
-
}
|
|
66
|
-
catch (error) {
|
|
67
|
-
reject(error);
|
|
68
|
-
}
|
|
69
|
-
});
|
|
70
|
-
},
|
|
71
|
-
async decode(tokenIds, skipSpecialTokens = true) {
|
|
72
|
-
return new Promise((resolve, reject) => {
|
|
73
|
-
try {
|
|
74
|
-
const resultStr = g.DecodeTokens(tokenIds, skipSpecialTokens);
|
|
75
|
-
const result = JSON.parse(resultStr);
|
|
76
|
-
resolve(result);
|
|
77
|
-
}
|
|
78
|
-
catch (error) {
|
|
79
|
-
reject(error);
|
|
80
|
-
}
|
|
81
|
-
});
|
|
82
|
-
},
|
|
83
|
-
async generate(prompt, maxTokens = 50, temperature = 0.7) {
|
|
84
|
-
return new Promise((resolve, reject) => {
|
|
85
|
-
try {
|
|
86
|
-
const resultStr = g.GenerateText(prompt, maxTokens, temperature);
|
|
87
|
-
const result = JSON.parse(resultStr);
|
|
88
|
-
resolve(result);
|
|
89
|
-
}
|
|
90
|
-
catch (error) {
|
|
91
|
-
reject(error);
|
|
92
|
-
}
|
|
93
|
-
});
|
|
94
|
-
},
|
|
95
|
-
async *generateStream(prompt, maxTokens = 50, temperature = 0.7) {
|
|
96
|
-
// Encode the prompt
|
|
97
|
-
const encodeResultStr = g.EncodeText(prompt, true);
|
|
98
|
-
const encodeResult = JSON.parse(encodeResultStr);
|
|
99
|
-
if (!encodeResult.success || !encodeResult.ids) {
|
|
100
|
-
throw new Error(encodeResult.error || "Failed to encode prompt");
|
|
101
|
-
}
|
|
102
|
-
const tokens = [...encodeResult.ids];
|
|
103
|
-
// Generate tokens one at a time
|
|
104
|
-
for (let i = 0; i < maxTokens; i++) {
|
|
105
|
-
const resultStr = g.GenerateNextToken(tokens, temperature);
|
|
106
|
-
const result = JSON.parse(resultStr);
|
|
107
|
-
if (!result.success) {
|
|
108
|
-
throw new Error(result.error || "Failed to generate token");
|
|
109
|
-
}
|
|
110
|
-
if (result.token === undefined) {
|
|
111
|
-
break;
|
|
112
|
-
}
|
|
113
|
-
tokens.push(result.token);
|
|
114
|
-
// Decode just this token
|
|
115
|
-
const decodeResultStr = g.DecodeTokens([result.token], true);
|
|
116
|
-
const decodeResult = JSON.parse(decodeResultStr);
|
|
117
|
-
if (decodeResult.success && decodeResult.text) {
|
|
118
|
-
yield decodeResult.text;
|
|
119
|
-
}
|
|
120
|
-
// Check for end of sequence
|
|
121
|
-
if (result.is_eos) {
|
|
122
|
-
break;
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
},
|
|
126
|
-
};
|
|
127
|
-
}
|