@serii84/vertex-partner-provider 1.0.22 → 1.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +43 -36
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * Vertex Partner Provider for OpenCode
3
- * v1.0.22 - Fix: remove null values from delta, remove empty usage
3
+ * v1.0.23 - Fix: whitelist-based cleaning, pass through usage chunks
4
4
  */
5
5
 
6
6
  const DEBUG = process.env.VERTEX_DEBUG === 'true';
@@ -23,56 +23,63 @@ async function getAuthToken(googleAuthOptions) {
23
23
  }
24
24
 
25
25
  function cleanResponse(parsed) {
26
+ // Build a clean response with only standard OpenAI fields
27
+ const cleaned = {
28
+ id: parsed.id,
29
+ object: parsed.object,
30
+ created: parsed.created,
31
+ model: parsed.model,
32
+ };
33
+
26
34
  if (parsed.choices) {
27
- for (const choice of parsed.choices) {
28
- delete choice.matched_stop;
29
- delete choice.logprobs;
35
+ cleaned.choices = parsed.choices.map(choice => {
36
+ const cleanChoice = {
37
+ index: choice.index,
38
+ };
30
39
 
31
40
  // Normalize finish_reason to a string (some models return an object)
32
- if (choice.finish_reason && typeof choice.finish_reason === 'object') {
33
- choice.finish_reason = choice.finish_reason.type
34
- || choice.finish_reason.reason
35
- || choice.finish_reason.stop_reason
36
- || 'stop';
41
+ if (choice.finish_reason != null) {
42
+ if (typeof choice.finish_reason === 'object') {
43
+ cleanChoice.finish_reason = choice.finish_reason.type
44
+ || choice.finish_reason.reason
45
+ || choice.finish_reason.stop_reason
46
+ || 'stop';
47
+ } else {
48
+ cleanChoice.finish_reason = choice.finish_reason;
49
+ }
37
50
  }
38
51
 
39
52
  // Clean up delta - remove null values and non-standard fields
40
53
  if (choice.delta) {
41
- if (!choice.delta.content && choice.delta.reasoning_content) {
42
- choice.delta.content = choice.delta.reasoning_content;
43
- }
44
- delete choice.delta.reasoning_content;
45
-
46
- // Remove null values from delta - OpenAI format uses empty object, not nulls
47
- for (const key of Object.keys(choice.delta)) {
48
- if (choice.delta[key] === null) {
49
- delete choice.delta[key];
50
- }
51
- }
54
+ const cleanDelta = {};
55
+ if (choice.delta.role) cleanDelta.role = choice.delta.role;
56
+ if (choice.delta.content) cleanDelta.content = choice.delta.content;
57
+ else if (choice.delta.reasoning_content) cleanDelta.content = choice.delta.reasoning_content;
58
+ if (choice.delta.tool_calls) cleanDelta.tool_calls = choice.delta.tool_calls;
59
+ cleanChoice.delta = cleanDelta;
52
60
  }
53
61
 
54
62
  if (choice.message) {
55
- if (!choice.message.content && choice.message.reasoning_content) {
56
- choice.message.content = choice.message.reasoning_content;
57
- }
58
- delete choice.message.reasoning_content;
63
+ const cleanMessage = { role: choice.message.role };
64
+ if (choice.message.content) cleanMessage.content = choice.message.content;
65
+ else if (choice.message.reasoning_content) cleanMessage.content = choice.message.reasoning_content;
66
+ if (choice.message.tool_calls) cleanMessage.tool_calls = choice.message.tool_calls;
67
+ cleanChoice.message = cleanMessage;
59
68
  }
60
- }
69
+
70
+ return cleanChoice;
71
+ });
61
72
  }
62
73
 
63
- // Clean usage - only keep standard fields, remove if empty/invalid
74
+ // Clean usage - only keep standard fields
64
75
  if (parsed.usage) {
65
76
  const { prompt_tokens, completion_tokens, total_tokens } = parsed.usage;
66
77
  if (prompt_tokens != null || completion_tokens != null || total_tokens != null) {
67
- parsed.usage = { prompt_tokens, completion_tokens, total_tokens };
68
- } else {
69
- delete parsed.usage;
78
+ cleaned.usage = { prompt_tokens, completion_tokens, total_tokens };
70
79
  }
71
80
  }
72
81
 
73
- delete parsed.metadata;
74
-
75
- return parsed;
82
+ return cleaned;
76
83
  }
77
84
 
78
85
  function transformStream(response) {
@@ -116,13 +123,13 @@ function transformStream(response) {
116
123
  console.log('[VERTEX DEBUG] Raw chunk:', JSON.stringify(parsed, null, 2));
117
124
  }
118
125
 
119
- // Skip empty choices (usage-only chunk)
120
- if (parsed.choices && parsed.choices.length === 0) {
126
+ const cleaned = cleanResponse(parsed);
127
+
128
+ // Skip chunks that have no useful data after cleaning
129
+ if ((!cleaned.choices || cleaned.choices.length === 0) && !cleaned.usage) {
121
130
  continue;
122
131
  }
123
132
 
124
- const cleaned = cleanResponse(parsed);
125
-
126
133
  if (DEBUG) {
127
134
  console.log('[VERTEX DEBUG] Cleaned chunk:', JSON.stringify(cleaned, null, 2));
128
135
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@serii84/vertex-partner-provider",
3
- "version": "1.0.22",
3
+ "version": "1.0.23",
4
4
  "description": "Vertex AI partner models (GLM, Kimi, DeepSeek) for OpenCode",
5
5
  "main": "index.js",
6
6
  "scripts": {