@serii84/vertex-partner-provider 1.0.21 → 1.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +48 -29
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * Vertex Partner Provider for OpenCode
3
- * v1.0.21 - Debug: log raw responses to find issue
3
+ * v1.0.23 - Fix: whitelist-based cleaning, pass through usage chunks
4
4
  */
5
5
 
6
6
  const DEBUG = process.env.VERTEX_DEBUG === 'true';
@@ -23,44 +23,63 @@ async function getAuthToken(googleAuthOptions) {
23
23
  }
24
24
 
25
25
  function cleanResponse(parsed) {
26
+ // Build a clean response with only standard OpenAI fields
27
+ const cleaned = {
28
+ id: parsed.id,
29
+ object: parsed.object,
30
+ created: parsed.created,
31
+ model: parsed.model,
32
+ };
33
+
26
34
  if (parsed.choices) {
27
- for (const choice of parsed.choices) {
28
- delete choice.matched_stop;
29
- delete choice.logprobs;
35
+ cleaned.choices = parsed.choices.map(choice => {
36
+ const cleanChoice = {
37
+ index: choice.index,
38
+ };
30
39
 
31
40
  // Normalize finish_reason to a string (some models return an object)
32
- if (choice.finish_reason && typeof choice.finish_reason === 'object') {
33
- // Extract string value from object, common patterns: {type: "stop"}, {reason: "stop"}
34
- choice.finish_reason = choice.finish_reason.type
35
- || choice.finish_reason.reason
36
- || choice.finish_reason.stop_reason
37
- || 'stop';
41
+ if (choice.finish_reason != null) {
42
+ if (typeof choice.finish_reason === 'object') {
43
+ cleanChoice.finish_reason = choice.finish_reason.type
44
+ || choice.finish_reason.reason
45
+ || choice.finish_reason.stop_reason
46
+ || 'stop';
47
+ } else {
48
+ cleanChoice.finish_reason = choice.finish_reason;
49
+ }
38
50
  }
39
51
 
52
+ // Clean up delta - remove null values and non-standard fields
40
53
  if (choice.delta) {
41
- if (!choice.delta.content && choice.delta.reasoning_content) {
42
- choice.delta.content = choice.delta.reasoning_content;
43
- }
44
- delete choice.delta.reasoning_content;
54
+ const cleanDelta = {};
55
+ if (choice.delta.role) cleanDelta.role = choice.delta.role;
56
+ if (choice.delta.content) cleanDelta.content = choice.delta.content;
57
+ else if (choice.delta.reasoning_content) cleanDelta.content = choice.delta.reasoning_content;
58
+ if (choice.delta.tool_calls) cleanDelta.tool_calls = choice.delta.tool_calls;
59
+ cleanChoice.delta = cleanDelta;
45
60
  }
46
61
 
47
62
  if (choice.message) {
48
- if (!choice.message.content && choice.message.reasoning_content) {
49
- choice.message.content = choice.message.reasoning_content;
50
- }
51
- delete choice.message.reasoning_content;
63
+ const cleanMessage = { role: choice.message.role };
64
+ if (choice.message.content) cleanMessage.content = choice.message.content;
65
+ else if (choice.message.reasoning_content) cleanMessage.content = choice.message.reasoning_content;
66
+ if (choice.message.tool_calls) cleanMessage.tool_calls = choice.message.tool_calls;
67
+ cleanChoice.message = cleanMessage;
52
68
  }
53
- }
69
+
70
+ return cleanChoice;
71
+ });
54
72
  }
55
-
73
+
74
+ // Clean usage - only keep standard fields
56
75
  if (parsed.usage) {
57
76
  const { prompt_tokens, completion_tokens, total_tokens } = parsed.usage;
58
- parsed.usage = { prompt_tokens, completion_tokens, total_tokens };
77
+ if (prompt_tokens != null || completion_tokens != null || total_tokens != null) {
78
+ cleaned.usage = { prompt_tokens, completion_tokens, total_tokens };
79
+ }
59
80
  }
60
-
61
- delete parsed.metadata;
62
-
63
- return parsed;
81
+
82
+ return cleaned;
64
83
  }
65
84
 
66
85
  function transformStream(response) {
@@ -104,13 +123,13 @@ function transformStream(response) {
104
123
  console.log('[VERTEX DEBUG] Raw chunk:', JSON.stringify(parsed, null, 2));
105
124
  }
106
125
 
107
- // Skip empty choices (usage-only chunk)
108
- if (parsed.choices && parsed.choices.length === 0) {
126
+ const cleaned = cleanResponse(parsed);
127
+
128
+ // Skip chunks that have no useful data after cleaning
129
+ if ((!cleaned.choices || cleaned.choices.length === 0) && !cleaned.usage) {
109
130
  continue;
110
131
  }
111
132
 
112
- const cleaned = cleanResponse(parsed);
113
-
114
133
  if (DEBUG) {
115
134
  console.log('[VERTEX DEBUG] Cleaned chunk:', JSON.stringify(cleaned, null, 2));
116
135
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@serii84/vertex-partner-provider",
3
- "version": "1.0.21",
3
+ "version": "1.0.23",
4
4
  "description": "Vertex AI partner models (GLM, Kimi, DeepSeek) for OpenCode",
5
5
  "main": "index.js",
6
6
  "scripts": {