codebot-ai 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -52,6 +52,9 @@ class OpenAIProvider {
52
52
  const decoder = new TextDecoder();
53
53
  let buffer = '';
54
54
  const toolCalls = new Map();
55
+ // Track <think>...</think> blocks (used by qwen3, deepseek, etc.)
56
+ let insideThink = false;
57
+ let contentBuffer = '';
55
58
  try {
56
59
  while (true) {
57
60
  const { done, value } = await reader.read();
@@ -65,6 +68,11 @@ class OpenAIProvider {
65
68
  if (!trimmed || !trimmed.startsWith('data: '))
66
69
  continue;
67
70
  if (trimmed === 'data: [DONE]') {
71
+ // Flush any remaining content buffer
72
+ if (contentBuffer && !insideThink) {
73
+ yield { type: 'text', text: contentBuffer };
74
+ contentBuffer = '';
75
+ }
68
76
  for (const [, tc] of toolCalls) {
69
77
  yield {
70
78
  type: 'tool_call_end',
@@ -84,7 +92,48 @@ class OpenAIProvider {
84
92
  if (!delta)
85
93
  continue;
86
94
  if (delta.content) {
87
- yield { type: 'text', text: delta.content };
95
+ contentBuffer += delta.content;
96
+ // Process buffer for <think>...</think> tags
97
+ let changed = true;
98
+ while (changed) {
99
+ changed = false;
100
+ if (insideThink) {
101
+ const end = contentBuffer.indexOf('</think>');
102
+ if (end !== -1) {
103
+ // End of think block — discard thinking content, continue
104
+ contentBuffer = contentBuffer.slice(end + 8);
105
+ insideThink = false;
106
+ changed = true;
107
+ }
108
+ // else: still inside think, wait for more data
109
+ }
110
+ else {
111
+ const start = contentBuffer.indexOf('<think>');
112
+ if (start !== -1) {
113
+ // Found <think> — output everything before it, enter think mode
114
+ const before = contentBuffer.slice(0, start);
115
+ if (before)
116
+ yield { type: 'text', text: before };
117
+ contentBuffer = contentBuffer.slice(start + 7);
118
+ insideThink = true;
119
+ changed = true;
120
+ }
121
+ else {
122
+ // No think tag — check if buffer ends with a partial "<think" prefix
123
+ let holdBack = 0;
124
+ for (let len = Math.min(6, contentBuffer.length); len >= 1; len--) {
125
+ if ('<think>'.startsWith(contentBuffer.slice(-len))) {
126
+ holdBack = len;
127
+ break;
128
+ }
129
+ }
130
+ const safe = contentBuffer.slice(0, contentBuffer.length - holdBack);
131
+ contentBuffer = contentBuffer.slice(contentBuffer.length - holdBack);
132
+ if (safe)
133
+ yield { type: 'text', text: safe };
134
+ }
135
+ }
136
+ }
88
137
  }
89
138
  if (delta.tool_calls) {
90
139
  for (const tc of delta.tool_calls) {
package/dist/setup.js CHANGED
@@ -194,16 +194,16 @@ async function runSetup() {
194
194
  const choice = await ask(rl, fmt(`\nSelect [1-${options.length}]: `, 'cyan'));
195
195
  const selected = options[parseInt(choice, 10) - 1] || options[0];
196
196
  // Step 4: Show available models for chosen provider
197
- const providerModels = Object.entries(registry_1.MODEL_REGISTRY)
198
- .filter(([, info]) => {
199
- if (selected.provider === 'openai' && !info.provider)
200
- return true; // local models
201
- return info.provider === selected.provider;
202
- })
203
- .map(([name]) => name);
197
+ // For local servers, use the actual installed models instead of the hardcoded registry
198
+ const matchedServer = localServers.find(s => s.url === selected.baseUrl);
199
+ const providerModels = matchedServer && matchedServer.models.length > 0
200
+ ? matchedServer.models
201
+ : Object.entries(registry_1.MODEL_REGISTRY)
202
+ .filter(([, info]) => info.provider === selected.provider)
203
+ .map(([name]) => name);
204
204
  if (providerModels.length > 1) {
205
- console.log(fmt(`\nAvailable models for ${selected.label}:`, 'bold'));
206
- providerModels.slice(0, 10).forEach((m, i) => {
205
+ console.log(fmt(`\nAvailable models${matchedServer ? ` on ${matchedServer.name}` : ''}:`, 'bold'));
206
+ providerModels.slice(0, 15).forEach((m, i) => {
207
207
  const marker = m === selected.model ? fmt(' (default)', 'green') : '';
208
208
  console.log(` ${fmt(`${i + 1}`, 'cyan')} ${m}${marker}`);
209
209
  });
@@ -214,7 +214,7 @@ async function runSetup() {
214
214
  selected.model = providerModels[modelIdx];
215
215
  }
216
216
  else if (modelChoice.length > 2) {
217
- // Treat as model name
217
+ // Treat as model name typed directly
218
218
  selected.model = modelChoice;
219
219
  }
220
220
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codebot-ai",
3
- "version": "1.0.0",
3
+ "version": "1.0.1",
4
4
  "description": "Local-first AI coding assistant. Zero dependencies. Works with Ollama, LM Studio, vLLM, Claude, GPT, Gemini, and more.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",