@browser-ai/web-llm 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +34 -271
- package/dist/index.d.mts +14 -29
- package/dist/index.d.ts +14 -29
- package/dist/index.js +295 -49
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +295 -49
- package/dist/index.mjs.map +1 -1
- package/package.json +71 -70
package/package.json
CHANGED
|
@@ -1,71 +1,72 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@browser-ai/web-llm",
|
|
3
|
-
"version": "
|
|
4
|
-
"description": "WebLLM provider for Vercel AI SDK (High-performance
|
|
5
|
-
"author": {
|
|
6
|
-
"name": "Jakob Hoeg Mørk",
|
|
7
|
-
"url": "https://jakobhoeg.dev"
|
|
8
|
-
},
|
|
9
|
-
"repository": {
|
|
10
|
-
"type": "git",
|
|
11
|
-
"url": "https://github.com/jakobhoeg/browser-ai.git",
|
|
12
|
-
"directory": "packages/vercel/web-llm"
|
|
13
|
-
},
|
|
14
|
-
"sideEffects": false,
|
|
15
|
-
"main": "dist/index.js",
|
|
16
|
-
"types": "dist/index.d.ts",
|
|
17
|
-
"exports": {
|
|
18
|
-
".": {
|
|
19
|
-
"types": "./dist/index.d.ts",
|
|
20
|
-
"import": "./dist/index.js",
|
|
21
|
-
"require": "./dist/index.js"
|
|
22
|
-
}
|
|
23
|
-
},
|
|
24
|
-
"files": [
|
|
25
|
-
"dist/**/*"
|
|
26
|
-
],
|
|
27
|
-
"scripts": {
|
|
28
|
-
"build": "npm run clean && tsup",
|
|
29
|
-
"build:prod": "tsup",
|
|
30
|
-
"build:test": "npm run test:run && tsup",
|
|
31
|
-
"dev": "tsup --watch",
|
|
32
|
-
"clean": "rimraf dist",
|
|
33
|
-
"test": "vitest",
|
|
34
|
-
"test:watch": "vitest --watch",
|
|
35
|
-
"test:coverage": "vitest --coverage",
|
|
36
|
-
"test:run": "vitest run",
|
|
37
|
-
"dev:example": "npm run build && npm run -w examples/next-hybrid dev"
|
|
38
|
-
},
|
|
39
|
-
"keywords": [
|
|
40
|
-
"ai",
|
|
41
|
-
"ai-sdk",
|
|
42
|
-
"vercel",
|
|
43
|
-
"webllm",
|
|
44
|
-
"web-llm",
|
|
45
|
-
"browser-ai",
|
|
46
|
-
"mlc-ai",
|
|
47
|
-
"webgpu",
|
|
48
|
-
"language-model",
|
|
49
|
-
"llm",
|
|
50
|
-
"inference",
|
|
51
|
-
"
|
|
52
|
-
],
|
|
53
|
-
"license": "Apache License",
|
|
54
|
-
"peerDependencies": {
|
|
55
|
-
"@mlc-ai/web-llm": "^0.2.79",
|
|
56
|
-
"ai": "^
|
|
57
|
-
},
|
|
58
|
-
"devDependencies": {
|
|
59
|
-
"@types/node": "^20.0.0",
|
|
60
|
-
"@vitest/coverage-v8": "^1.0.0",
|
|
61
|
-
"
|
|
62
|
-
"
|
|
63
|
-
"
|
|
64
|
-
"
|
|
65
|
-
"
|
|
66
|
-
"
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
1
|
+
{
|
|
2
|
+
"name": "@browser-ai/web-llm",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "WebLLM provider for Vercel AI SDK v5+ (High-performance in-browser LLM inference)",
|
|
5
|
+
"author": {
|
|
6
|
+
"name": "Jakob Hoeg Mørk",
|
|
7
|
+
"url": "https://jakobhoeg.dev"
|
|
8
|
+
},
|
|
9
|
+
"repository": {
|
|
10
|
+
"type": "git",
|
|
11
|
+
"url": "https://github.com/jakobhoeg/browser-ai.git",
|
|
12
|
+
"directory": "packages/vercel/web-llm"
|
|
13
|
+
},
|
|
14
|
+
"sideEffects": false,
|
|
15
|
+
"main": "dist/index.js",
|
|
16
|
+
"types": "dist/index.d.ts",
|
|
17
|
+
"exports": {
|
|
18
|
+
".": {
|
|
19
|
+
"types": "./dist/index.d.ts",
|
|
20
|
+
"import": "./dist/index.js",
|
|
21
|
+
"require": "./dist/index.js"
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"files": [
|
|
25
|
+
"dist/**/*"
|
|
26
|
+
],
|
|
27
|
+
"scripts": {
|
|
28
|
+
"build": "npm run clean && tsup",
|
|
29
|
+
"build:prod": "tsup",
|
|
30
|
+
"build:test": "npm run test:run && tsup",
|
|
31
|
+
"dev": "tsup --watch",
|
|
32
|
+
"clean": "rimraf dist",
|
|
33
|
+
"test": "vitest",
|
|
34
|
+
"test:watch": "vitest --watch",
|
|
35
|
+
"test:coverage": "vitest --coverage",
|
|
36
|
+
"test:run": "vitest run",
|
|
37
|
+
"dev:example": "npm run build && npm run -w examples/next-hybrid dev"
|
|
38
|
+
},
|
|
39
|
+
"keywords": [
|
|
40
|
+
"ai",
|
|
41
|
+
"ai-sdk",
|
|
42
|
+
"vercel",
|
|
43
|
+
"webllm",
|
|
44
|
+
"web-llm",
|
|
45
|
+
"browser-ai",
|
|
46
|
+
"mlc-ai",
|
|
47
|
+
"webgpu",
|
|
48
|
+
"language-model",
|
|
49
|
+
"llm",
|
|
50
|
+
"inference",
|
|
51
|
+
"built-in-ai"
|
|
52
|
+
],
|
|
53
|
+
"license": "Apache License",
|
|
54
|
+
"peerDependencies": {
|
|
55
|
+
"@mlc-ai/web-llm": "^0.2.79",
|
|
56
|
+
"ai": "^6.0.0"
|
|
57
|
+
},
|
|
58
|
+
"devDependencies": {
|
|
59
|
+
"@types/node": "^20.0.0",
|
|
60
|
+
"@vitest/coverage-v8": "^1.0.0",
|
|
61
|
+
"@webgpu/types": "^0.1.68",
|
|
62
|
+
"jsdom": "^26.1.0",
|
|
63
|
+
"rimraf": "^5.0.0",
|
|
64
|
+
"tsup": "^8.0.0",
|
|
65
|
+
"typescript": "^5.0.0",
|
|
66
|
+
"vitest": "^1.0.0",
|
|
67
|
+
"zod": "^3.25.76"
|
|
68
|
+
},
|
|
69
|
+
"publishConfig": {
|
|
70
|
+
"access": "public"
|
|
71
|
+
}
|
|
71
72
|
}
|