codeep 1.2.4 → 1.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/index.js +23 -0
- package/dist/config/providers.js +19 -0
- package/dist/renderer/App.js +5 -1
- package/dist/renderer/components/Help.js +3 -0
- package/dist/renderer/main.js +11 -3
- package/package.json +1 -1
package/dist/api/index.js
CHANGED
|
@@ -272,6 +272,7 @@ async function chatOpenAI(message, history, model, apiKey, onChunk, abortSignal)
|
|
|
272
272
|
model,
|
|
273
273
|
messages,
|
|
274
274
|
stream,
|
|
275
|
+
...(stream ? { stream_options: { include_usage: true } } : {}),
|
|
275
276
|
temperature,
|
|
276
277
|
max_tokens: maxTokens,
|
|
277
278
|
}),
|
|
@@ -323,6 +324,12 @@ async function handleOpenAIStream(body, onChunk) {
|
|
|
323
324
|
chunks.push(content);
|
|
324
325
|
onChunk(content);
|
|
325
326
|
}
|
|
327
|
+
// Capture usage from final chunk (stream_options: include_usage)
|
|
328
|
+
if (parsed.usage) {
|
|
329
|
+
const usage = extractOpenAIUsage(parsed);
|
|
330
|
+
if (usage)
|
|
331
|
+
recordTokenUsage(usage, parsed.model || 'unknown', config.get('provider'));
|
|
332
|
+
}
|
|
326
333
|
}
|
|
327
334
|
catch {
|
|
328
335
|
// Ignore parse errors
|
|
@@ -418,6 +425,9 @@ async function handleAnthropicStream(body, onChunk) {
|
|
|
418
425
|
const decoder = new TextDecoder();
|
|
419
426
|
const chunks = [];
|
|
420
427
|
let buffer = '';
|
|
428
|
+
let inputTokens = 0;
|
|
429
|
+
let outputTokens = 0;
|
|
430
|
+
let streamModel = '';
|
|
421
431
|
while (true) {
|
|
422
432
|
const { done, value } = await reader.read();
|
|
423
433
|
if (done)
|
|
@@ -437,6 +447,15 @@ async function handleAnthropicStream(body, onChunk) {
|
|
|
437
447
|
onChunk(text);
|
|
438
448
|
}
|
|
439
449
|
}
|
|
450
|
+
// message_start contains input_tokens
|
|
451
|
+
if (parsed.type === 'message_start' && parsed.message?.usage) {
|
|
452
|
+
inputTokens = parsed.message.usage.input_tokens || 0;
|
|
453
|
+
streamModel = parsed.message.model || '';
|
|
454
|
+
}
|
|
455
|
+
// message_delta contains output_tokens
|
|
456
|
+
if (parsed.type === 'message_delta' && parsed.usage) {
|
|
457
|
+
outputTokens = parsed.usage.output_tokens || 0;
|
|
458
|
+
}
|
|
440
459
|
}
|
|
441
460
|
catch {
|
|
442
461
|
// Ignore parse errors
|
|
@@ -444,6 +463,10 @@ async function handleAnthropicStream(body, onChunk) {
|
|
|
444
463
|
}
|
|
445
464
|
}
|
|
446
465
|
}
|
|
466
|
+
// Record token usage
|
|
467
|
+
if (inputTokens > 0 || outputTokens > 0) {
|
|
468
|
+
recordTokenUsage({ promptTokens: inputTokens, completionTokens: outputTokens, totalTokens: inputTokens + outputTokens }, streamModel || 'unknown', config.get('provider'));
|
|
469
|
+
}
|
|
447
470
|
// Strip <think> tags from MiniMax responses
|
|
448
471
|
return stripThinkTags(chunks.join(''));
|
|
449
472
|
}
|
package/dist/config/providers.js
CHANGED
|
@@ -49,6 +49,25 @@ export const PROVIDERS = {
|
|
|
49
49
|
envKey: 'MINIMAX_API_KEY',
|
|
50
50
|
subscribeUrl: 'https://platform.minimax.io/subscribe/coding-plan?code=2lWvoWUhrp&source=link',
|
|
51
51
|
},
|
|
52
|
+
'anthropic': {
|
|
53
|
+
name: 'Anthropic',
|
|
54
|
+
description: 'Claude AI models',
|
|
55
|
+
protocols: {
|
|
56
|
+
anthropic: {
|
|
57
|
+
baseUrl: 'https://api.anthropic.com',
|
|
58
|
+
authHeader: 'x-api-key',
|
|
59
|
+
supportsNativeTools: true,
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
models: [
|
|
63
|
+
{ id: 'claude-sonnet-4-5-20250929', name: 'Claude Sonnet 4.5', description: 'Best balance of speed and intelligence' },
|
|
64
|
+
{ id: 'claude-haiku-4-5-20251001', name: 'Claude Haiku 4.5', description: 'Fastest and most affordable' },
|
|
65
|
+
{ id: 'claude-opus-4-6', name: 'Claude Opus 4.6', description: 'Most capable model' },
|
|
66
|
+
],
|
|
67
|
+
defaultModel: 'claude-sonnet-4-5-20250929',
|
|
68
|
+
defaultProtocol: 'anthropic',
|
|
69
|
+
envKey: 'ANTHROPIC_API_KEY',
|
|
70
|
+
},
|
|
52
71
|
};
|
|
53
72
|
export function getProvider(id) {
|
|
54
73
|
return PROVIDERS[id] || null;
|
package/dist/renderer/App.js
CHANGED
|
@@ -2376,7 +2376,11 @@ export class App {
|
|
|
2376
2376
|
leftText = ` ${this.notification}`;
|
|
2377
2377
|
}
|
|
2378
2378
|
else {
|
|
2379
|
-
|
|
2379
|
+
const stats = this.options.getStatus().tokenStats;
|
|
2380
|
+
const tokenInfo = stats && stats.totalTokens > 0
|
|
2381
|
+
? ` | ${stats.totalTokens < 1000 ? stats.totalTokens : (stats.totalTokens / 1000).toFixed(1) + 'K'} tokens`
|
|
2382
|
+
: '';
|
|
2383
|
+
leftText = ` ${this.messages.length} messages${tokenInfo}`;
|
|
2380
2384
|
}
|
|
2381
2385
|
if (this.isStreaming) {
|
|
2382
2386
|
rightText = 'Streaming... (Esc to cancel)';
|
|
@@ -51,6 +51,9 @@ export const helpCategories = [
|
|
|
51
51
|
{ key: '/git-commit <msg>', description: 'Commit with message' },
|
|
52
52
|
{ key: '/push (/p)', description: 'Git push' },
|
|
53
53
|
{ key: '/pull', description: 'Git pull' },
|
|
54
|
+
{ key: '/amend', description: 'Amend last commit' },
|
|
55
|
+
{ key: '/branch', description: 'Create/manage branches' },
|
|
56
|
+
{ key: '/stash', description: 'Stash changes' },
|
|
54
57
|
{ key: '/scan', description: 'Scan project structure' },
|
|
55
58
|
{ key: '/review', description: 'Code review' },
|
|
56
59
|
],
|
package/dist/renderer/main.js
CHANGED
|
@@ -14,7 +14,7 @@ import { runAgent } from '../utils/agent.js';
|
|
|
14
14
|
import { config, loadApiKey, loadAllApiKeys, getCurrentProvider, getModelsForCurrentProvider, PROTOCOLS, LANGUAGES, setProvider, setApiKey, clearApiKey, getApiKey, autoSaveSession, saveSession, startNewSession, getCurrentSessionId, loadSession, listSessionsWithInfo, deleteSession, renameSession, hasReadPermission, hasWritePermission, setProjectPermission, initializeAsProject, isManuallyInitializedProject, } from '../config/index.js';
|
|
15
15
|
import { isProjectDirectory, getProjectContext } from '../utils/project.js';
|
|
16
16
|
import { getCurrentVersion } from '../utils/update.js';
|
|
17
|
-
import { getProviderList } from '../config/providers.js';
|
|
17
|
+
import { getProviderList, getProvider } from '../config/providers.js';
|
|
18
18
|
import { getSessionStats } from '../utils/tokenTracker.js';
|
|
19
19
|
// State
|
|
20
20
|
let projectPath = process.cwd();
|
|
@@ -40,7 +40,7 @@ function getStatus() {
|
|
|
40
40
|
projectPath,
|
|
41
41
|
hasWriteAccess,
|
|
42
42
|
sessionId,
|
|
43
|
-
messageCount:
|
|
43
|
+
messageCount: app ? app.getMessages().length : 0,
|
|
44
44
|
tokenStats: {
|
|
45
45
|
totalTokens: stats.totalTokens,
|
|
46
46
|
promptTokens: stats.totalPromptTokens,
|
|
@@ -899,10 +899,18 @@ function handleCommand(command, args) {
|
|
|
899
899
|
}
|
|
900
900
|
// Protocol and language
|
|
901
901
|
case 'protocol': {
|
|
902
|
-
const
|
|
902
|
+
const currentProvider = getCurrentProvider();
|
|
903
|
+
const providerConfig = getProvider(currentProvider.id);
|
|
904
|
+
const protocols = Object.entries(PROTOCOLS)
|
|
905
|
+
.filter(([key]) => providerConfig?.protocols[key])
|
|
906
|
+
.map(([key, name]) => ({
|
|
903
907
|
key,
|
|
904
908
|
label: name,
|
|
905
909
|
}));
|
|
910
|
+
if (protocols.length <= 1) {
|
|
911
|
+
app.notify(`${currentProvider.name} only supports ${protocols[0]?.label || 'one'} protocol`);
|
|
912
|
+
break;
|
|
913
|
+
}
|
|
906
914
|
const currentProtocol = config.get('protocol') || 'openai';
|
|
907
915
|
app.showSelect('Select Protocol', protocols, currentProtocol, (item) => {
|
|
908
916
|
config.set('protocol', item.key);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "codeep",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.6",
|
|
4
4
|
"description": "AI-powered coding assistant built for the terminal. Multiple LLM providers, project-aware context, and a seamless development workflow.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|