@sliday/tamp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/tamp.js ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env node
2
+ import { createProxy } from '../index.js'
3
+
4
+ const { config, server } = createProxy()
5
+
6
+ server.listen(config.port, () => {
7
+ console.error('')
8
+ console.error(' ┌─ Tamp ─────────────────────────────┐')
9
+ console.error(` │ Proxy: http://localhost:${config.port} │`)
10
+ console.error(' │ Status: ● Ready │')
11
+ console.error(' │ │')
12
+ console.error(' │ In another terminal: │')
13
+ console.error(` │ export ANTHROPIC_BASE_URL=http://localhost:${config.port}`)
14
+ console.error(' │ claude │')
15
+ console.error(' └────────────────────────────────────┘')
16
+ console.error('')
17
+ console.error(` Upstream: ${config.upstream}`)
18
+ console.error(` Stages: ${config.stages.join(', ')}`)
19
+ console.error('')
20
+ })
package/compress.js ADDED
@@ -0,0 +1,114 @@
1
+ import { encode } from '@toon-format/toon'
2
+ import { tryParseJSON, classifyContent, stripLineNumbers } from './detect.js'
3
+
4
+ export function compressText(text, config) {
5
+ if (text.length < config.minSize) return null
6
+ const cls = classifyContent(text)
7
+ if (cls === 'toon') return null
8
+ if (cls === 'text') {
9
+ if (config.stages.includes('llmlingua') && config.llmLinguaUrl) {
10
+ return { async: true, text, cls }
11
+ }
12
+ return null
13
+ }
14
+ if (cls !== 'json' && cls !== 'json-lined') return null
15
+
16
+ const raw = cls === 'json-lined' ? stripLineNumbers(text) : text
17
+ const { ok, value } = tryParseJSON(raw)
18
+ if (!ok) return null
19
+
20
+ const minified = JSON.stringify(value)
21
+ if (minified.length >= text.length) return null
22
+
23
+ let best = { text: minified, method: 'minify' }
24
+
25
+ if (config.stages.includes('toon')) {
26
+ try {
27
+ const tooned = encode(value)
28
+ if (tooned.length < best.text.length) {
29
+ best = { text: tooned, method: 'toon' }
30
+ }
31
+ } catch { /* fall back to minified */ }
32
+ }
33
+
34
+ return { text: best.text, method: best.method, originalLen: text.length, compressedLen: best.text.length }
35
+ }
36
+
37
+ async function compressWithLLMLingua(text, config) {
38
+ try {
39
+ const controller = new AbortController()
40
+ const timeout = setTimeout(() => controller.abort(), 5000)
41
+ const res = await fetch(config.llmLinguaUrl + '/compress', {
42
+ method: 'POST',
43
+ headers: { 'Content-Type': 'application/json' },
44
+ body: JSON.stringify({ text, rate: 0.5 }),
45
+ signal: controller.signal,
46
+ })
47
+ clearTimeout(timeout)
48
+ if (!res.ok) return null
49
+ const data = await res.json()
50
+ return { text: data.text, method: 'llmlingua', originalLen: text.length, compressedLen: data.text.length }
51
+ } catch {
52
+ return null
53
+ }
54
+ }
55
+
56
+ async function compressBlock(text, config) {
57
+ const sync = compressText(text, config)
58
+ if (sync && sync.async) {
59
+ return compressWithLLMLingua(text, config)
60
+ }
61
+ return sync
62
+ }
63
+
64
+ export async function compressMessages(body, config) {
65
+ const stats = []
66
+ if (!body?.messages?.length) return { body, stats }
67
+
68
+ let lastUserIdx = -1
69
+ for (let i = body.messages.length - 1; i >= 0; i--) {
70
+ if (body.messages[i].role === 'user') { lastUserIdx = i; break }
71
+ }
72
+ if (lastUserIdx === -1) return { body, stats }
73
+
74
+ const msg = body.messages[lastUserIdx]
75
+ const debug = config.log
76
+
77
+ if (typeof msg.content === 'string') {
78
+ const result = await compressBlock(msg.content, config)
79
+ if (result) {
80
+ msg.content = result.text
81
+ stats.push({ index: lastUserIdx, ...result })
82
+ }
83
+ } else if (Array.isArray(msg.content)) {
84
+ for (let i = 0; i < msg.content.length; i++) {
85
+ const block = msg.content[i]
86
+ if (block.type !== 'tool_result') continue
87
+ if (block.is_error) { stats.push({ index: i, skipped: 'error' }); continue }
88
+
89
+ if (typeof block.content === 'string') {
90
+ if (debug) {
91
+ const cls = classifyContent(block.content)
92
+ const len = block.content.length
93
+ console.error(`[toona] debug block[${i}]: type=${cls} len=${len} tool_use_id=${block.tool_use_id || '?'}`)
94
+ }
95
+ const result = await compressBlock(block.content, config)
96
+ if (result) { block.content = result.text; stats.push({ index: i, ...result }) }
97
+ } else if (Array.isArray(block.content)) {
98
+ for (const sub of block.content) {
99
+ if (sub.type === 'text') {
100
+ if (debug) {
101
+ const cls = classifyContent(sub.text)
102
+ const len = sub.text.length
103
+ console.error(`[toona] debug sub-block: type=${cls} len=${len}`)
104
+ }
105
+ const result = await compressBlock(sub.text, config)
106
+ if (result) { sub.text = result.text; stats.push({ index: i, ...result }) }
107
+ }
108
+ }
109
+ }
110
+ }
111
+ }
112
+
113
+ return { body, stats }
114
+ }
package/config.js ADDED
@@ -0,0 +1,14 @@
1
+ export function loadConfig(env = process.env) {
2
+ const stages = (env.TOONA_STAGES || 'minify').split(',').map(s => s.trim()).filter(Boolean)
3
+ return Object.freeze({
4
+ port: parseInt(env.TOONA_PORT, 10) || 7778,
5
+ upstream: env.TOONA_UPSTREAM || 'https://api.anthropic.com',
6
+ minSize: parseInt(env.TOONA_MIN_SIZE, 10) || 200,
7
+ stages,
8
+ log: env.TOONA_LOG !== 'false',
9
+ logFile: env.TOONA_LOG_FILE || null,
10
+ maxBody: parseInt(env.TOONA_MAX_BODY, 10) || 10_485_760,
11
+ cacheSafe: true,
12
+ llmLinguaUrl: env.TOONA_LLMLINGUA_URL || null,
13
+ })
14
+ }
package/detect.js ADDED
@@ -0,0 +1,43 @@
1
+ const LINE_NUM_RE = /^ *\d+[\t→]/
2
+
3
+ export function stripLineNumbers(str) {
4
+ if (typeof str !== 'string') return str
5
+ const lines = str.split('\n')
6
+ if (lines.length < 2) return str
7
+ // Check first 3 non-empty lines for line number pattern
8
+ let matches = 0
9
+ for (const line of lines.slice(0, 5)) {
10
+ if (line.length === 0) continue
11
+ if (LINE_NUM_RE.test(line)) matches++
12
+ }
13
+ if (matches < 2) return str
14
+ return lines.map(l => l.replace(LINE_NUM_RE, '')).join('\n')
15
+ }
16
+
17
+ export function tryParseJSON(str) {
18
+ if (typeof str !== 'string' || str.length === 0) return { ok: false }
19
+ try {
20
+ const value = JSON.parse(str)
21
+ return { ok: true, value }
22
+ } catch {
23
+ return { ok: false }
24
+ }
25
+ }
26
+
27
+ export function isTOON(str) {
28
+ if (typeof str !== 'string') return false
29
+ const firstLine = str.trimStart().split('\n')[0]
30
+ return /^\[TOON\]/.test(firstLine) || /\w+\[\d+\]\{/.test(firstLine) || /\w+\[\d+\]:/.test(firstLine)
31
+ }
32
+
33
+ export function classifyContent(str) {
34
+ if (typeof str !== 'string') return 'unknown'
35
+ if (isTOON(str)) return 'toon'
36
+ const { ok } = tryParseJSON(str)
37
+ if (ok) return 'json'
38
+ // Try stripping line numbers (e.g. Read tool output)
39
+ const stripped = stripLineNumbers(str)
40
+ if (stripped !== str && tryParseJSON(stripped).ok) return 'json-lined'
41
+ if (str.length > 0) return 'text'
42
+ return 'unknown'
43
+ }
package/index.js ADDED
@@ -0,0 +1,144 @@
1
+ import http from 'node:http'
2
+ import https from 'node:https'
3
+ import { loadConfig } from './config.js'
4
+ import { compressMessages } from './compress.js'
5
+ import { createSession, formatRequestLog } from './stats.js'
6
+
7
+ export function createProxy(overrides = {}) {
8
+ const config = { ...loadConfig(), ...overrides }
9
+ const session = createSession()
10
+ return { config, session, server: _createServer(config, session) }
11
+ }
12
+
13
+ function _createServer(config, session) {
14
+
15
+ function forwardRequest(method, upstreamUrl, headers, body, res) {
16
+ const mod = upstreamUrl.protocol === 'https:' ? https : http
17
+ const opts = {
18
+ hostname: upstreamUrl.hostname,
19
+ port: upstreamUrl.port,
20
+ path: upstreamUrl.pathname + upstreamUrl.search,
21
+ method,
22
+ headers,
23
+ }
24
+
25
+ const upstream = mod.request(opts, (upstreamRes) => {
26
+ res.writeHead(upstreamRes.statusCode, upstreamRes.headers)
27
+ upstreamRes.pipe(res)
28
+ })
29
+
30
+ upstream.on('error', (err) => {
31
+ console.error(`[tamp] upstream error: ${err.message}`)
32
+ if (!res.headersSent) {
33
+ res.writeHead(502, { 'Content-Type': 'application/json' })
34
+ }
35
+ res.end(JSON.stringify({ error: 'upstream_error', message: err.message }))
36
+ })
37
+
38
+ if (body) {
39
+ upstream.end(body)
40
+ } else {
41
+ upstream.end()
42
+ }
43
+
44
+ return upstream
45
+ }
46
+
47
+ function pipeRequest(req, res, upstreamUrl, prefixChunks) {
48
+ const mod = upstreamUrl.protocol === 'https:' ? https : http
49
+ const headers = { ...req.headers }
50
+ delete headers.host
51
+
52
+ const opts = {
53
+ hostname: upstreamUrl.hostname,
54
+ port: upstreamUrl.port,
55
+ path: upstreamUrl.pathname + upstreamUrl.search,
56
+ method: req.method,
57
+ headers,
58
+ }
59
+
60
+ const upstream = mod.request(opts, (upstreamRes) => {
61
+ res.writeHead(upstreamRes.statusCode, upstreamRes.headers)
62
+ upstreamRes.pipe(res)
63
+ })
64
+
65
+ upstream.on('error', (err) => {
66
+ console.error(`[tamp] upstream error: ${err.message}`)
67
+ if (!res.headersSent) {
68
+ res.writeHead(502, { 'Content-Type': 'application/json' })
69
+ }
70
+ res.end(JSON.stringify({ error: 'upstream_error', message: err.message }))
71
+ })
72
+
73
+ if (prefixChunks) {
74
+ for (const chunk of prefixChunks) {
75
+ upstream.write(chunk)
76
+ }
77
+ }
78
+
79
+ req.pipe(upstream)
80
+ }
81
+
82
+ return http.createServer(async (req, res) => {
83
+ if (config.log) console.error(`[tamp] ${req.method} ${req.url}`)
84
+ const upstreamUrl = new URL(req.url, config.upstream)
85
+ const isMessages = req.method === 'POST' && req.url.startsWith('/v1/messages')
86
+
87
+ if (!isMessages) {
88
+ return pipeRequest(req, res, upstreamUrl)
89
+ }
90
+
91
+ const chunks = []
92
+ let size = 0
93
+ let overflow = false
94
+
95
+ for await (const chunk of req) {
96
+ size += chunk.length
97
+ chunks.push(chunk)
98
+ if (size > config.maxBody) {
99
+ overflow = true
100
+ break
101
+ }
102
+ }
103
+
104
+ if (overflow) {
105
+ if (config.log) console.error('[tamp] passthrough (body too large)')
106
+ return pipeRequest(req, res, upstreamUrl, chunks)
107
+ }
108
+
109
+ const rawBody = Buffer.concat(chunks)
110
+ let finalBody = rawBody
111
+ const headers = { ...req.headers }
112
+ delete headers.host
113
+
114
+ try {
115
+ const parsed = JSON.parse(rawBody.toString('utf-8'))
116
+ const { body, stats } = await compressMessages(parsed, config)
117
+ finalBody = Buffer.from(JSON.stringify(body), 'utf-8')
118
+
119
+ if (config.log && stats.length) {
120
+ session.record(stats)
121
+ console.error(formatRequestLog(stats, session))
122
+ }
123
+ } catch (err) {
124
+ if (config.log) console.error(`[tamp] passthrough (parse error): ${err.message}`)
125
+ finalBody = rawBody
126
+ }
127
+
128
+ headers['content-length'] = Buffer.byteLength(finalBody)
129
+ delete headers['transfer-encoding']
130
+
131
+ forwardRequest(req.method, upstreamUrl, headers, finalBody, res)
132
+ })
133
+ }
134
+
135
+ const isMain = !process.argv[1]?.includes('node_modules') && process.argv[1] === new URL(import.meta.url).pathname
136
+
137
+ if (isMain) {
138
+ const { config, server } = createProxy()
139
+ server.listen(config.port, () => {
140
+ console.error(`[tamp] proxy listening on http://localhost:${config.port}`)
141
+ console.error(`[tamp] upstream: ${config.upstream}`)
142
+ console.error(`[tamp] stages: ${config.stages.join(', ')}`)
143
+ })
144
+ }
package/package.json ADDED
@@ -0,0 +1,33 @@
1
+ {
2
+ "name": "@sliday/tamp",
3
+ "files": [
4
+ "index.js",
5
+ "bin/",
6
+ "compress.js",
7
+ "config.js",
8
+ "detect.js",
9
+ "stats.js"
10
+ ],
11
+ "version": "0.1.0",
12
+ "description": "Token compression proxy for Claude Code. 50% fewer tokens, zero behavior change.",
13
+ "type": "module",
14
+ "main": "index.js",
15
+ "bin": {
16
+ "tamp": "./bin/tamp.js"
17
+ },
18
+ "scripts": {
19
+ "start": "node bin/tamp.js",
20
+ "test": "node --test test/*.test.js"
21
+ },
22
+ "keywords": ["claude", "anthropic", "proxy", "compression", "tokens", "llm"],
23
+ "author": "Stas Kulesh <stas@sliday.com>",
24
+ "license": "MIT",
25
+ "repository": {
26
+ "type": "git",
27
+ "url": "https://github.com/sliday/tamp"
28
+ },
29
+ "homepage": "https://github.com/sliday/tamp",
30
+ "dependencies": {
31
+ "@toon-format/toon": "^2.1.0"
32
+ }
33
+ }
package/stats.js ADDED
@@ -0,0 +1,47 @@
1
+ export function formatRequestLog(stats, session) {
2
+ const compressed = stats.filter(s => s.method)
3
+ const skipped = stats.filter(s => s.skipped)
4
+ const lines = [`[toona] POST /v1/messages — ${stats.length} blocks, ${compressed.length} compressed`]
5
+
6
+ for (const s of stats) {
7
+ if (s.skipped) {
8
+ lines.push(`[toona] block[${s.index}]: skipped (${s.skipped})`)
9
+ } else if (s.method) {
10
+ const pct = (((s.originalLen - s.compressedLen) / s.originalLen) * 100).toFixed(1)
11
+ lines.push(`[toona] block[${s.index}]: ${s.originalLen}->${s.compressedLen} chars (-${pct}%) [${s.method}]`)
12
+ }
13
+ }
14
+
15
+ const totalOrig = compressed.reduce((a, s) => a + s.originalLen, 0)
16
+ const totalComp = compressed.reduce((a, s) => a + s.compressedLen, 0)
17
+ if (compressed.length > 0) {
18
+ const pct = (((totalOrig - totalComp) / totalOrig) * 100).toFixed(1)
19
+ lines.push(`[toona] total: ${totalOrig}->${totalComp} chars (-${pct}%)`)
20
+ }
21
+
22
+ if (session) {
23
+ const totals = session.getTotals()
24
+ lines.push(`[toona] session: ${totals.totalSaved} chars saved across ${totals.compressionCount} compressions`)
25
+ }
26
+
27
+ return lines.join('\n')
28
+ }
29
+
30
+ export function createSession() {
31
+ let totalSaved = 0
32
+ let compressionCount = 0
33
+
34
+ return {
35
+ record(stats) {
36
+ for (const s of stats) {
37
+ if (s.method && s.originalLen && s.compressedLen) {
38
+ totalSaved += s.originalLen - s.compressedLen
39
+ compressionCount++
40
+ }
41
+ }
42
+ },
43
+ getTotals() {
44
+ return { totalSaved, compressionCount }
45
+ },
46
+ }
47
+ }