@dev4s/opencode-dcp 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -3
- package/dist/index.js.map +1 -1
- package/dist/lib/tokenizer.d.ts +12 -17
- package/dist/lib/tokenizer.d.ts.map +1 -1
- package/dist/lib/tokenizer.js +13 -36
- package/dist/lib/tokenizer.js.map +1 -1
- package/package.json +9 -10
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;AAQjD,QAAA,MAAM,MAAM,EAAE,MAuFK,CAAA;AAEnB,eAAe,MAAM,CAAA"}
|
package/dist/index.js
CHANGED
|
@@ -4,14 +4,11 @@ import { loadPrompt } from "./lib/prompt";
|
|
|
4
4
|
import { createSessionState } from "./lib/state";
|
|
5
5
|
import { createDiscardTool, createExtractTool } from "./lib/strategies";
|
|
6
6
|
import { createChatMessageTransformHandler, createEventHandler } from "./lib/hooks";
|
|
7
|
-
import { preloadTokenizer } from "./lib/tokenizer";
|
|
8
7
|
const plugin = (async (ctx) => {
|
|
9
8
|
const config = getConfig(ctx);
|
|
10
9
|
if (!config.enabled) {
|
|
11
10
|
return {};
|
|
12
11
|
}
|
|
13
|
-
// Start loading tokenizer in background (non-blocking)
|
|
14
|
-
preloadTokenizer();
|
|
15
12
|
// Suppress AI SDK warnings
|
|
16
13
|
if (typeof globalThis !== "undefined") {
|
|
17
14
|
;
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AACzC,OAAO,EAAE,kBAAkB,EAAE,MAAM,aAAa,CAAA;AAChD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAA;AACvE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,MAAM,aAAa,CAAA;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AACzC,OAAO,EAAE,kBAAkB,EAAE,MAAM,aAAa,CAAA;AAChD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAA;AACvE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,MAAM,aAAa,CAAA;AAEnF,MAAM,MAAM,GAAW,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;IAClC,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAA;IAE7B,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,OAAO,EAAE,CAAA;IACb,CAAC;IAED,2BAA2B;IAC3B,IAAI,OAAO,UAAU,KAAK,WAAW,EAAE,CAAC;QACpC,CAAC;QAAC,UAAkB,CAAC,mBAAmB,GAAG,KAAK,CAAA;IACpD,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;IACvC,MAAM,KAAK,GAAG,kBAAkB,EAAE,CAAA;IAElC,MAAM,CAAC,IAAI,CAAC,iBAAiB,EAAE;QAC3B,UAAU,EAAE,MAAM,CAAC,UAAU;KAChC,CAAC,CAAA;IAEF,OAAO;QACH,oCAAoC,EAAE,KAAK,EACvC,MAAe,EACf,MAA4B,EAC9B,EAAE;YACA,MAAM,cAAc,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAA;YACnD,MAAM,cAAc,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAA;YAEnD,IAAI,UAAkB,CAAA;YACtB,IAAI,cAAc,IAAI,cAAc,EAAE,CAAC;gBACnC,UAAU,GAAG,gCAAgC,CAAA;YACjD,CAAC;iBAAM,IAAI,cAAc,EAAE,CAAC;gBACxB,UAAU,GAAG,mCAAmC,CAAA;YACpD,CAAC;iBAAM,IAAI,cAAc,EAAE,CAAC;gBACxB,UAAU,GAAG,mCAAmC,CAAA;YACpD,CAAC;iBAAM,CAAC;gBACJ,OAAM;YACV,CAAC;YAED,MAAM,eAAe,GAAG,UAAU,CAAC,UAAU,CAAC,CAAA;YAC9C,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;QACvC,CAAC;QACD,sCAAsC,EAAE,iCAAiC,CACrE,GAAG,CAAC,MAAM,EACV,KAAK,EACL,MAAM,EACN,MAAM,CACT;QACD,IAAI,EAAE;YACF,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,IAAI;gBAChC,OAAO,EAAE,iBAAiB,CAAC;oBACvB,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,KAAK;oBACL,MAAM;oBACN,MAAM;oBACN,gBAAgB,EAAE,GAAG,CAAC,SAAS;iBAClC,CAAC;aACL,CAAC;YACF,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,IAAI;gBAChC,OAAO,EAAE,iBAAiB,CAAC;oBACvB,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,KAAK;oBACL,MAAM;oBACN,MAAM;oBACN,gBAAgB,EAAE,GAAG,CAAC,SAAS;iBAClC,CAAC;aACL,CAAC;SACL;QACD,MAAM,EAAE,KAAK,EAAE,cAAc,EAAE,EAAE;YAC7B,qEAAqE;YACrE,8DAA8D;YAC9D,MAAM,UAAU,GAAa,EAAE,CAAA;YAC/B,IAAI,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO;gBAAE,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAC5D,IAAI,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO;gBAAE,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAE5D,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACxB,MAAM,oBAAoB,GAAG,cAAc,CAAC,YAAY,EAAE,aAAa,IAAI,EAAE,CAAA;gBAC7E,cAAc,CAAC,YAAY,GAAG;oBAC1B,GAAG,cAAc,CAAC,YAAY;oBAC9B,aAAa,EAAE,CAAC,GAAG,oBAAoB,EAAE,GAAG,UAAU,CAAC;iBAC1D,CAAA;gBACD,MAAM,CAAC,IAAI,CACP,SAAS,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,oDAAoD,CAC7G,CAAA;YACL,CAAC;QACL,CAAC;QACD,KAAK,EAAE,kBAAkB,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,CAAC,SAAS,CAAC;KAC9E,CAAA;AACL,CAAC,CAAkB,CAAA;AAEnB,eAAe,MAAM,CAAA"}
|
package/dist/lib/tokenizer.d.ts
CHANGED
|
@@ -1,24 +1,19 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
2
|
+
* Token estimation module - uses character-based heuristics for fast token counting.
|
|
3
3
|
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
6
|
-
*
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
*
|
|
10
|
-
* Call this at plugin startup - it returns immediately and loads async.
|
|
4
|
+
* This module provides approximate token counts (~85-95% accuracy) using a simple
|
|
5
|
+
* character-to-token ratio. This is sufficient for statistics display where counts
|
|
6
|
+
* are shown with a "~" prefix indicating approximation.
|
|
7
|
+
*
|
|
8
|
+
* Using estimation instead of actual tokenization eliminates the 5-15 second
|
|
9
|
+
* startup delay that tokenizer libraries like gpt-tokenizer require.
|
|
11
10
|
*/
|
|
12
|
-
export declare function preloadTokenizer(): void;
|
|
13
11
|
/**
|
|
14
|
-
*
|
|
15
|
-
*
|
|
16
|
-
*
|
|
12
|
+
* Estimates token count for text using character-based heuristic.
|
|
13
|
+
* Returns an array of the estimated length (for API compatibility).
|
|
14
|
+
*
|
|
15
|
+
* Uses ~3.5 chars per token which is more conservative than the common /4 estimate,
|
|
16
|
+
* accounting for code, mixed content, and non-English text better.
|
|
17
17
|
*/
|
|
18
18
|
export declare function encodeText(text: string): number[];
|
|
19
|
-
/**
|
|
20
|
-
* Returns whether the tokenizer has finished loading.
|
|
21
|
-
* Useful for logging/debugging.
|
|
22
|
-
*/
|
|
23
|
-
export declare function isTokenizerReady(): boolean;
|
|
24
19
|
//# sourceMappingURL=tokenizer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../lib/tokenizer.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;
|
|
1
|
+
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../lib/tokenizer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH;;;;;;GAMG;AACH,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CAGjD"}
|
package/dist/lib/tokenizer.js
CHANGED
|
@@ -1,45 +1,22 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
2
|
+
* Token estimation module - uses character-based heuristics for fast token counting.
|
|
3
3
|
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
6
|
-
*
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
/**
|
|
11
|
-
* Starts loading the tokenizer in the background.
|
|
12
|
-
* Call this at plugin startup - it returns immediately and loads async.
|
|
4
|
+
* This module provides approximate token counts (~85-95% accuracy) using a simple
|
|
5
|
+
* character-to-token ratio. This is sufficient for statistics display where counts
|
|
6
|
+
* are shown with a "~" prefix indicating approximation.
|
|
7
|
+
*
|
|
8
|
+
* Using estimation instead of actual tokenization eliminates the 5-15 second
|
|
9
|
+
* startup delay that tokenizer libraries like gpt-tokenizer require.
|
|
13
10
|
*/
|
|
14
|
-
export function preloadTokenizer() {
|
|
15
|
-
if (loadPromise)
|
|
16
|
-
return;
|
|
17
|
-
loadPromise = import("gpt-tokenizer")
|
|
18
|
-
.then(({ encode }) => {
|
|
19
|
-
encodeFn = encode;
|
|
20
|
-
})
|
|
21
|
-
.catch(() => {
|
|
22
|
-
// Silently fail - fallback estimation will be used
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
11
|
/**
|
|
26
|
-
*
|
|
27
|
-
*
|
|
28
|
-
*
|
|
12
|
+
* Estimates token count for text using character-based heuristic.
|
|
13
|
+
* Returns an array of the estimated length (for API compatibility).
|
|
14
|
+
*
|
|
15
|
+
* Uses ~3.5 chars per token which is more conservative than the common /4 estimate,
|
|
16
|
+
* accounting for code, mixed content, and non-English text better.
|
|
29
17
|
*/
|
|
30
18
|
export function encodeText(text) {
|
|
31
|
-
|
|
32
|
-
return encodeFn(text);
|
|
33
|
-
}
|
|
34
|
-
// Fallback: ~4 chars per token (reasonable estimate for English text)
|
|
35
|
-
const estimatedLength = Math.ceil(text.length / 4);
|
|
19
|
+
const estimatedLength = Math.ceil(text.length / 3.5);
|
|
36
20
|
return new Array(estimatedLength);
|
|
37
21
|
}
|
|
38
|
-
/**
|
|
39
|
-
* Returns whether the tokenizer has finished loading.
|
|
40
|
-
* Useful for logging/debugging.
|
|
41
|
-
*/
|
|
42
|
-
export function isTokenizerReady() {
|
|
43
|
-
return encodeFn !== null;
|
|
44
|
-
}
|
|
45
22
|
//# sourceMappingURL=tokenizer.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../lib/tokenizer.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;
|
|
1
|
+
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../lib/tokenizer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CAAC,IAAY;IACnC,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAC,CAAA;IACpD,OAAO,IAAI,KAAK,CAAC,eAAe,CAAC,CAAA;AACrC,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"$schema": "https://json.schemastore.org/package.json",
|
|
3
3
|
"name": "@dev4s/opencode-dcp",
|
|
4
|
-
"version": "0.1.
|
|
4
|
+
"version": "0.1.1",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"description": "OpenCode plugin that optimizes token usage by pruning obsolete tool outputs from conversation context",
|
|
7
7
|
"main": "./dist/index.js",
|
|
@@ -32,20 +32,19 @@
|
|
|
32
32
|
"@opencode-ai/plugin": ">=0.13.7"
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
|
-
"@ai-sdk/openai-compatible": "^
|
|
35
|
+
"@ai-sdk/openai-compatible": "^2.0.2",
|
|
36
36
|
"@opencode-ai/sdk": "latest",
|
|
37
37
|
"@tarquinen/opencode-auth-provider": "^0.1.7",
|
|
38
|
-
"ai": "^
|
|
39
|
-
"gpt-tokenizer": "^3.4.0",
|
|
38
|
+
"ai": "^6.0.6",
|
|
40
39
|
"jsonc-parser": "^3.3.1",
|
|
41
|
-
"zod": "^4.
|
|
40
|
+
"zod": "^4.3.4"
|
|
42
41
|
},
|
|
43
42
|
"devDependencies": {
|
|
44
|
-
"@opencode-ai/plugin": "^1.0.
|
|
45
|
-
"@types/node": "^
|
|
46
|
-
"cpy-cli": "^
|
|
47
|
-
"prettier": "^3.4
|
|
48
|
-
"rimraf": "^6.
|
|
43
|
+
"@opencode-ai/plugin": "^1.0.224",
|
|
44
|
+
"@types/node": "^25.0.3",
|
|
45
|
+
"cpy-cli": "^6.0.0",
|
|
46
|
+
"prettier": "^3.7.4",
|
|
47
|
+
"rimraf": "^6.1.2",
|
|
49
48
|
"tsx": "^4.21.0",
|
|
50
49
|
"typescript": "^5.9.3"
|
|
51
50
|
},
|