@forbocai/browser 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +25 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.js +173 -0
- package/dist/index.mjs +140 -0
- package/package.json +29 -0
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { CortexConfig, ICortex, MemoryType, MemoryItem, MemoryConfig } from '@forbocai/core';
|
|
2
|
+
|
|
3
|
+
declare const createBrowserCortex: (config: CortexConfig) => ICortex;
|
|
4
|
+
declare const createCortex: (config: CortexConfig) => ICortex;
|
|
5
|
+
|
|
6
|
+
interface IBrowserMemory {
|
|
7
|
+
store(text: string, type?: MemoryType, importance?: number): Promise<MemoryItem>;
|
|
8
|
+
recall(query: string, limit?: number): Promise<MemoryItem[]>;
|
|
9
|
+
clear(): Promise<void>;
|
|
10
|
+
}
|
|
11
|
+
declare const createBrowserMemory: (config?: MemoryConfig) => {
|
|
12
|
+
store: (text: string, type?: MemoryType, importance?: number) => Promise<MemoryItem>;
|
|
13
|
+
recall: (query: string, limit?: number) => Promise<MemoryItem[]>;
|
|
14
|
+
clear: () => Promise<void>;
|
|
15
|
+
};
|
|
16
|
+
declare const createMemory: (config?: MemoryConfig) => {
|
|
17
|
+
store: (text: string, type?: MemoryType, importance?: number) => Promise<MemoryItem>;
|
|
18
|
+
recall: (query: string, limit?: number) => Promise<MemoryItem[]>;
|
|
19
|
+
clear: () => Promise<void>;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
declare const initVectorEngine: () => Promise<void>;
|
|
23
|
+
declare const generateEmbedding: (text: string) => Promise<number[]>;
|
|
24
|
+
|
|
25
|
+
export { type IBrowserMemory, createBrowserCortex, createBrowserMemory, createCortex, createMemory, generateEmbedding, initVectorEngine };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { CortexConfig, ICortex, MemoryType, MemoryItem, MemoryConfig } from '@forbocai/core';
|
|
2
|
+
|
|
3
|
+
declare const createBrowserCortex: (config: CortexConfig) => ICortex;
|
|
4
|
+
declare const createCortex: (config: CortexConfig) => ICortex;
|
|
5
|
+
|
|
6
|
+
interface IBrowserMemory {
|
|
7
|
+
store(text: string, type?: MemoryType, importance?: number): Promise<MemoryItem>;
|
|
8
|
+
recall(query: string, limit?: number): Promise<MemoryItem[]>;
|
|
9
|
+
clear(): Promise<void>;
|
|
10
|
+
}
|
|
11
|
+
declare const createBrowserMemory: (config?: MemoryConfig) => {
|
|
12
|
+
store: (text: string, type?: MemoryType, importance?: number) => Promise<MemoryItem>;
|
|
13
|
+
recall: (query: string, limit?: number) => Promise<MemoryItem[]>;
|
|
14
|
+
clear: () => Promise<void>;
|
|
15
|
+
};
|
|
16
|
+
declare const createMemory: (config?: MemoryConfig) => {
|
|
17
|
+
store: (text: string, type?: MemoryType, importance?: number) => Promise<MemoryItem>;
|
|
18
|
+
recall: (query: string, limit?: number) => Promise<MemoryItem[]>;
|
|
19
|
+
clear: () => Promise<void>;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
declare const initVectorEngine: () => Promise<void>;
|
|
23
|
+
declare const generateEmbedding: (text: string) => Promise<number[]>;
|
|
24
|
+
|
|
25
|
+
export { type IBrowserMemory, createBrowserCortex, createBrowserMemory, createCortex, createMemory, generateEmbedding, initVectorEngine };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
createBrowserCortex: () => createBrowserCortex,
|
|
24
|
+
createBrowserMemory: () => createBrowserMemory,
|
|
25
|
+
createCortex: () => createCortex,
|
|
26
|
+
createMemory: () => createMemory,
|
|
27
|
+
generateEmbedding: () => generateEmbedding,
|
|
28
|
+
initVectorEngine: () => initVectorEngine
|
|
29
|
+
});
|
|
30
|
+
module.exports = __toCommonJS(index_exports);
|
|
31
|
+
|
|
32
|
+
// src/cortex.ts
|
|
33
|
+
var import_web_llm = require("@mlc-ai/web-llm");
|
|
34
|
+
var import_meta = {};
|
|
35
|
+
var createBrowserCortex = (config) => {
|
|
36
|
+
let engine = null;
|
|
37
|
+
let status = {
|
|
38
|
+
id: "browser-init",
|
|
39
|
+
model: config.model || "smollm2-135m",
|
|
40
|
+
ready: false,
|
|
41
|
+
engine: "web-llm"
|
|
42
|
+
};
|
|
43
|
+
const init = async (onProgress) => {
|
|
44
|
+
if (status.ready) return status;
|
|
45
|
+
const initProgressCallback = (report) => {
|
|
46
|
+
console.log(report.text);
|
|
47
|
+
if (onProgress) {
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
const modelId = config.model || "SmolLM2-135M-Instruct-q4f16_1-MLC";
|
|
51
|
+
engine = await (0, import_web_llm.CreateWebWorkerMLCEngine)(
|
|
52
|
+
new Worker(new URL("./worker.ts", import_meta.url), { type: "module" }),
|
|
53
|
+
modelId,
|
|
54
|
+
{ initProgressCallback }
|
|
55
|
+
);
|
|
56
|
+
status = {
|
|
57
|
+
id: `ctx_web_${Date.now()}`,
|
|
58
|
+
model: modelId,
|
|
59
|
+
ready: true,
|
|
60
|
+
engine: "web-llm"
|
|
61
|
+
};
|
|
62
|
+
return status;
|
|
63
|
+
};
|
|
64
|
+
const complete = async (prompt, options = {}) => {
|
|
65
|
+
if (!status.ready) await init();
|
|
66
|
+
const reply = await engine.chat.completions.create({
|
|
67
|
+
messages: [{ role: "user", content: prompt }],
|
|
68
|
+
max_gen_len: options.maxTokens,
|
|
69
|
+
temperature: options.temperature
|
|
70
|
+
});
|
|
71
|
+
return reply.choices[0].message.content || "";
|
|
72
|
+
};
|
|
73
|
+
const completeStream = async function* (prompt, options = {}) {
|
|
74
|
+
if (!status.ready) await init();
|
|
75
|
+
const chunks = await engine.chat.completions.create({
|
|
76
|
+
messages: [{ role: "user", content: prompt }],
|
|
77
|
+
max_gen_len: options.maxTokens,
|
|
78
|
+
temperature: options.temperature,
|
|
79
|
+
stream: true
|
|
80
|
+
});
|
|
81
|
+
for await (const chunk of chunks) {
|
|
82
|
+
const content = chunk.choices[0]?.delta?.content || "";
|
|
83
|
+
if (content) yield content;
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
return {
|
|
87
|
+
init,
|
|
88
|
+
complete,
|
|
89
|
+
completeStream
|
|
90
|
+
};
|
|
91
|
+
};
|
|
92
|
+
var createCortex = (config) => createBrowserCortex(config);
|
|
93
|
+
|
|
94
|
+
// src/memory.ts
|
|
95
|
+
var import_orama = require("@orama/orama");
|
|
96
|
+
|
|
97
|
+
// src/vector.ts
|
|
98
|
+
var import_transformers = require("@xenova/transformers");
|
|
99
|
+
var embedder = null;
|
|
100
|
+
var initVectorEngine = async () => {
|
|
101
|
+
if (embedder) return;
|
|
102
|
+
console.log("> Initializing Browser Vector Engine...");
|
|
103
|
+
embedder = await (0, import_transformers.pipeline)("feature-extraction", "Xenova/all-MiniLM-L6-v2");
|
|
104
|
+
};
|
|
105
|
+
var generateEmbedding = async (text) => {
|
|
106
|
+
if (!embedder) await initVectorEngine();
|
|
107
|
+
const result = await embedder(text, { pooling: "mean", normalize: true });
|
|
108
|
+
return Array.from(result.data);
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
// src/memory.ts
|
|
112
|
+
var createBrowserMemory = (config = {}) => {
|
|
113
|
+
let db = null;
|
|
114
|
+
const init = async () => {
|
|
115
|
+
if (db) return db;
|
|
116
|
+
db = await (0, import_orama.create)({
|
|
117
|
+
schema: {
|
|
118
|
+
id: "string",
|
|
119
|
+
text: "string",
|
|
120
|
+
timestamp: "number",
|
|
121
|
+
type: "string",
|
|
122
|
+
importance: "number",
|
|
123
|
+
embedding: "vector[384]"
|
|
124
|
+
// all-MiniLM-L6-v2
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
return db;
|
|
128
|
+
};
|
|
129
|
+
const store = async (text, type = "observation", importance = 0.5) => {
|
|
130
|
+
const instance = await init();
|
|
131
|
+
const item = {
|
|
132
|
+
id: `mem_br_${Date.now()}_${Math.random().toString(36).substring(7)}`,
|
|
133
|
+
text,
|
|
134
|
+
timestamp: Date.now(),
|
|
135
|
+
type,
|
|
136
|
+
importance
|
|
137
|
+
};
|
|
138
|
+
const embedding = await generateEmbedding(text);
|
|
139
|
+
await (0, import_orama.insert)(instance, {
|
|
140
|
+
...item,
|
|
141
|
+
embedding
|
|
142
|
+
});
|
|
143
|
+
return item;
|
|
144
|
+
};
|
|
145
|
+
const recall = async (query, limit = 5) => {
|
|
146
|
+
const instance = await init();
|
|
147
|
+
const embedding = await generateEmbedding(query);
|
|
148
|
+
const results = await (0, import_orama.search)(instance, {
|
|
149
|
+
mode: "vector",
|
|
150
|
+
vector: {
|
|
151
|
+
value: embedding,
|
|
152
|
+
property: "embedding"
|
|
153
|
+
},
|
|
154
|
+
similarity: 0.5,
|
|
155
|
+
limit
|
|
156
|
+
});
|
|
157
|
+
return results.hits.map((hit) => hit.document);
|
|
158
|
+
};
|
|
159
|
+
const clear = async () => {
|
|
160
|
+
db = null;
|
|
161
|
+
};
|
|
162
|
+
return { store, recall, clear };
|
|
163
|
+
};
|
|
164
|
+
var createMemory = (config = {}) => createBrowserMemory(config);
|
|
165
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
166
|
+
0 && (module.exports = {
|
|
167
|
+
createBrowserCortex,
|
|
168
|
+
createBrowserMemory,
|
|
169
|
+
createCortex,
|
|
170
|
+
createMemory,
|
|
171
|
+
generateEmbedding,
|
|
172
|
+
initVectorEngine
|
|
173
|
+
});
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
// src/cortex.ts
|
|
2
|
+
import { CreateWebWorkerMLCEngine } from "@mlc-ai/web-llm";
|
|
3
|
+
var createBrowserCortex = (config) => {
|
|
4
|
+
let engine = null;
|
|
5
|
+
let status = {
|
|
6
|
+
id: "browser-init",
|
|
7
|
+
model: config.model || "smollm2-135m",
|
|
8
|
+
ready: false,
|
|
9
|
+
engine: "web-llm"
|
|
10
|
+
};
|
|
11
|
+
const init = async (onProgress) => {
|
|
12
|
+
if (status.ready) return status;
|
|
13
|
+
const initProgressCallback = (report) => {
|
|
14
|
+
console.log(report.text);
|
|
15
|
+
if (onProgress) {
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
const modelId = config.model || "SmolLM2-135M-Instruct-q4f16_1-MLC";
|
|
19
|
+
engine = await CreateWebWorkerMLCEngine(
|
|
20
|
+
new Worker(new URL("./worker.ts", import.meta.url), { type: "module" }),
|
|
21
|
+
modelId,
|
|
22
|
+
{ initProgressCallback }
|
|
23
|
+
);
|
|
24
|
+
status = {
|
|
25
|
+
id: `ctx_web_${Date.now()}`,
|
|
26
|
+
model: modelId,
|
|
27
|
+
ready: true,
|
|
28
|
+
engine: "web-llm"
|
|
29
|
+
};
|
|
30
|
+
return status;
|
|
31
|
+
};
|
|
32
|
+
const complete = async (prompt, options = {}) => {
|
|
33
|
+
if (!status.ready) await init();
|
|
34
|
+
const reply = await engine.chat.completions.create({
|
|
35
|
+
messages: [{ role: "user", content: prompt }],
|
|
36
|
+
max_gen_len: options.maxTokens,
|
|
37
|
+
temperature: options.temperature
|
|
38
|
+
});
|
|
39
|
+
return reply.choices[0].message.content || "";
|
|
40
|
+
};
|
|
41
|
+
const completeStream = async function* (prompt, options = {}) {
|
|
42
|
+
if (!status.ready) await init();
|
|
43
|
+
const chunks = await engine.chat.completions.create({
|
|
44
|
+
messages: [{ role: "user", content: prompt }],
|
|
45
|
+
max_gen_len: options.maxTokens,
|
|
46
|
+
temperature: options.temperature,
|
|
47
|
+
stream: true
|
|
48
|
+
});
|
|
49
|
+
for await (const chunk of chunks) {
|
|
50
|
+
const content = chunk.choices[0]?.delta?.content || "";
|
|
51
|
+
if (content) yield content;
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
return {
|
|
55
|
+
init,
|
|
56
|
+
complete,
|
|
57
|
+
completeStream
|
|
58
|
+
};
|
|
59
|
+
};
|
|
60
|
+
var createCortex = (config) => createBrowserCortex(config);
|
|
61
|
+
|
|
62
|
+
// src/memory.ts
|
|
63
|
+
import { create, insert, search } from "@orama/orama";
|
|
64
|
+
|
|
65
|
+
// src/vector.ts
|
|
66
|
+
import { pipeline } from "@xenova/transformers";
|
|
67
|
+
var embedder = null;
|
|
68
|
+
var initVectorEngine = async () => {
|
|
69
|
+
if (embedder) return;
|
|
70
|
+
console.log("> Initializing Browser Vector Engine...");
|
|
71
|
+
embedder = await pipeline("feature-extraction", "Xenova/all-MiniLM-L6-v2");
|
|
72
|
+
};
|
|
73
|
+
var generateEmbedding = async (text) => {
|
|
74
|
+
if (!embedder) await initVectorEngine();
|
|
75
|
+
const result = await embedder(text, { pooling: "mean", normalize: true });
|
|
76
|
+
return Array.from(result.data);
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
// src/memory.ts
|
|
80
|
+
var createBrowserMemory = (config = {}) => {
|
|
81
|
+
let db = null;
|
|
82
|
+
const init = async () => {
|
|
83
|
+
if (db) return db;
|
|
84
|
+
db = await create({
|
|
85
|
+
schema: {
|
|
86
|
+
id: "string",
|
|
87
|
+
text: "string",
|
|
88
|
+
timestamp: "number",
|
|
89
|
+
type: "string",
|
|
90
|
+
importance: "number",
|
|
91
|
+
embedding: "vector[384]"
|
|
92
|
+
// all-MiniLM-L6-v2
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
return db;
|
|
96
|
+
};
|
|
97
|
+
const store = async (text, type = "observation", importance = 0.5) => {
|
|
98
|
+
const instance = await init();
|
|
99
|
+
const item = {
|
|
100
|
+
id: `mem_br_${Date.now()}_${Math.random().toString(36).substring(7)}`,
|
|
101
|
+
text,
|
|
102
|
+
timestamp: Date.now(),
|
|
103
|
+
type,
|
|
104
|
+
importance
|
|
105
|
+
};
|
|
106
|
+
const embedding = await generateEmbedding(text);
|
|
107
|
+
await insert(instance, {
|
|
108
|
+
...item,
|
|
109
|
+
embedding
|
|
110
|
+
});
|
|
111
|
+
return item;
|
|
112
|
+
};
|
|
113
|
+
const recall = async (query, limit = 5) => {
|
|
114
|
+
const instance = await init();
|
|
115
|
+
const embedding = await generateEmbedding(query);
|
|
116
|
+
const results = await search(instance, {
|
|
117
|
+
mode: "vector",
|
|
118
|
+
vector: {
|
|
119
|
+
value: embedding,
|
|
120
|
+
property: "embedding"
|
|
121
|
+
},
|
|
122
|
+
similarity: 0.5,
|
|
123
|
+
limit
|
|
124
|
+
});
|
|
125
|
+
return results.hits.map((hit) => hit.document);
|
|
126
|
+
};
|
|
127
|
+
const clear = async () => {
|
|
128
|
+
db = null;
|
|
129
|
+
};
|
|
130
|
+
return { store, recall, clear };
|
|
131
|
+
};
|
|
132
|
+
var createMemory = (config = {}) => createBrowserMemory(config);
|
|
133
|
+
export {
|
|
134
|
+
createBrowserCortex,
|
|
135
|
+
createBrowserMemory,
|
|
136
|
+
createCortex,
|
|
137
|
+
createMemory,
|
|
138
|
+
generateEmbedding,
|
|
139
|
+
initVectorEngine
|
|
140
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@forbocai/browser",
|
|
3
|
+
"version": "0.4.4",
|
|
4
|
+
"license": "UNLICENSED",
|
|
5
|
+
"description": "Web Browser implementation for ForbocAI SDK",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"module": "dist/index.mjs",
|
|
8
|
+
"types": "dist/index.d.ts",
|
|
9
|
+
"scripts": {
|
|
10
|
+
"build": "tsup src/index.ts --format cjs,esm --dts",
|
|
11
|
+
"dev": "tsup src/index.ts --watch",
|
|
12
|
+
"test": "vitest"
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"@forbocai/core": "*",
|
|
16
|
+
"@mlc-ai/web-llm": "^0.2.46",
|
|
17
|
+
"@orama/orama": "^2.0.26",
|
|
18
|
+
"@xenova/transformers": "^2.17.2"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"tsup": "^8.5.1",
|
|
22
|
+
"typescript": "^5.9.3",
|
|
23
|
+
"vitest": "^1.0.0"
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist",
|
|
27
|
+
"package.json"
|
|
28
|
+
]
|
|
29
|
+
}
|