@memberjunction/ai-gemini 2.39.0 → 2.41.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +3 -10
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +64 -25
- package/dist/index.js.map +1 -1
- package/package.json +3 -3
package/dist/index.d.ts
CHANGED
|
@@ -1,12 +1,5 @@
|
|
|
1
|
-
import { GoogleGenAI } from "@google/genai";
|
|
2
|
-
|
|
3
|
-
text: string;
|
|
4
|
-
}
|
|
5
|
-
interface Content {
|
|
6
|
-
role: 'user' | 'model';
|
|
7
|
-
parts: TextPart[];
|
|
8
|
-
}
|
|
9
|
-
import { BaseLLM, ChatMessage, ChatParams, ChatResult, SummarizeParams, SummarizeResult } from "@memberjunction/ai";
|
|
1
|
+
import { GoogleGenAI, Content, Part } from "@google/genai";
|
|
2
|
+
import { BaseLLM, ChatMessage, ChatParams, ChatResult, SummarizeParams, SummarizeResult, ChatMessageContent } from "@memberjunction/ai";
|
|
10
3
|
export declare class GeminiLLM extends BaseLLM {
|
|
11
4
|
private _gemini;
|
|
12
5
|
constructor(apiKey: string);
|
|
@@ -41,8 +34,8 @@ export declare class GeminiLLM extends BaseLLM {
|
|
|
41
34
|
protected finalizeStreamingResponse(accumulatedContent: string | null | undefined, lastChunk: any | null | undefined, usage: any | null | undefined): ChatResult;
|
|
42
35
|
SummarizeText(params: SummarizeParams): Promise<SummarizeResult>;
|
|
43
36
|
ClassifyText(params: any): Promise<any>;
|
|
37
|
+
static MapMJContentToGeminiParts(content: ChatMessageContent): Array<Part>;
|
|
44
38
|
static MapMJMessageToGeminiHistoryEntry(message: ChatMessage): Content;
|
|
45
39
|
}
|
|
46
40
|
export declare function LoadGeminiLLM(): void;
|
|
47
|
-
export {};
|
|
48
41
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,IAAI,EAAO,MAAM,eAAe,CAAC;AAGhE,OAAO,EAAE,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,UAAU,EAAE,eAAe,EAAE,eAAe,EAA0B,kBAAkB,EAAc,MAAM,oBAAoB,CAAC;AAG5K,qBACa,SAAU,SAAQ,OAAO;IAClC,OAAO,CAAC,OAAO,CAAc;gBAEjB,MAAM,EAAE,MAAM;IAK1B;;OAEG;IACH,IAAW,YAAY,IAAI,WAAW,CAErC;IAED;;OAEG;IACH,IAAoB,iBAAiB,IAAI,OAAO,CAE/C;IAED,SAAS,CAAC,oBAAoB,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,EAAE;IAmB9D;;OAEG;cACa,0BAA0B,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IA6EnF;;OAEG;cACa,sBAAsB,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC;IAyCxE;;OAEG;IACH,SAAS,CAAC,qBAAqB,CAAC,KAAK,EAAE,GAAG,GAAG;QACzC,OAAO,EAAE,MAAM,CAAC;QAChB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,KAAK,CAAC,EAAE,GAAG,CAAC;KACf;IAsBD;;OAEG;IACH,SAAS,CAAC,yBAAyB,CAC/B,kBAAkB,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,EAC7C,SAAS,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,EACjC,KAAK,EAAE,GAAG,GAAG,IAAI,GAAG,SAAS,GAC9B,UAAU;IA4Bb,aAAa,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAGhE,YAAY,CAAC,MAAM,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;WAIzB,yBAAyB,CAAC,OAAO,EAAE,kBAAkB,GAAG,KAAK,CAAC,IAAI,CAAC;WAqCnE,gCAAgC,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO;CAMhF;AAGD,wBAAgB,aAAa,SAE5B"}
|
package/dist/index.js
CHANGED
|
@@ -31,15 +31,15 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
31
31
|
return true;
|
|
32
32
|
}
|
|
33
33
|
geminiMessageSpacing(messages) {
|
|
34
|
-
//
|
|
35
|
-
//
|
|
34
|
+
// This method makes sure that we alternate messages between user and model
|
|
35
|
+
// If we find two messages in a row with the same role, we insert a message
|
|
36
|
+
// with the opposite role between them with just "OK"
|
|
36
37
|
const result = [];
|
|
37
38
|
let lastRole = "model";
|
|
38
39
|
for (let i = 0; i < messages.length; i++) {
|
|
39
40
|
if (messages[i].role === lastRole) {
|
|
40
41
|
result.push({
|
|
41
|
-
role: "model", // we are using the ChatMessage type from the MJ package
|
|
42
|
-
// later on the role will be converted to "model" in the MapMJMessageToGeminiHistoryEntry method
|
|
42
|
+
role: "model", // we are using the ChatMessage type from the MJ package
|
|
43
43
|
parts: [{ text: "OK" }]
|
|
44
44
|
});
|
|
45
45
|
}
|
|
@@ -64,6 +64,15 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
64
64
|
temperature: params.temperature || 0.5,
|
|
65
65
|
responseType: params.responseFormat,
|
|
66
66
|
};
|
|
67
|
+
// Add generationConfig with reasoningMode if effortLevel is provided
|
|
68
|
+
if (params.effortLevel) {
|
|
69
|
+
// Gemini has generationConfig.reasoningMode which can be set to 'full' for higher quality
|
|
70
|
+
// reasoning but at increased cost and latency
|
|
71
|
+
modelOptions.generationConfig = {
|
|
72
|
+
...(modelOptions.generationConfig || {}),
|
|
73
|
+
reasoningMode: 'full'
|
|
74
|
+
};
|
|
75
|
+
}
|
|
67
76
|
// Use the new API structure
|
|
68
77
|
const chat = this.GeminiClient.chats.create({
|
|
69
78
|
model: modelName,
|
|
@@ -72,7 +81,7 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
72
81
|
// Send the latest message
|
|
73
82
|
const latestMessage = params.messages[params.messages.length - 1].content;
|
|
74
83
|
const result = await chat.sendMessage({
|
|
75
|
-
message: latestMessage,
|
|
84
|
+
message: GeminiLLM_1.MapMJContentToGeminiParts(latestMessage),
|
|
76
85
|
...modelOptions
|
|
77
86
|
});
|
|
78
87
|
const responseContent = result.candidates?.[0]?.content?.parts?.find(part => part.text)?.text || '';
|
|
@@ -89,11 +98,7 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
89
98
|
finish_reason: "completed",
|
|
90
99
|
index: 0
|
|
91
100
|
}],
|
|
92
|
-
usage:
|
|
93
|
-
totalTokens: 0,
|
|
94
|
-
promptTokens: 0,
|
|
95
|
-
completionTokens: 0 // to do map this from google
|
|
96
|
-
}
|
|
101
|
+
usage: new ai_1.ModelUsage(0, 0) // Gemini doesn't provide detailed token usage
|
|
97
102
|
},
|
|
98
103
|
errorMessage: "",
|
|
99
104
|
exception: null,
|
|
@@ -108,11 +113,7 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
108
113
|
timeElapsed: 0,
|
|
109
114
|
data: {
|
|
110
115
|
choices: [],
|
|
111
|
-
usage:
|
|
112
|
-
totalTokens: 0,
|
|
113
|
-
promptTokens: 0,
|
|
114
|
-
completionTokens: 0
|
|
115
|
-
}
|
|
116
|
+
usage: new ai_1.ModelUsage(0, 0) // Gemini doesn't provide detailed token usage
|
|
116
117
|
},
|
|
117
118
|
errorMessage: e.message,
|
|
118
119
|
exception: e
|
|
@@ -132,6 +133,15 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
132
133
|
temperature: params.temperature || 0.5,
|
|
133
134
|
responseType: params.responseFormat,
|
|
134
135
|
};
|
|
136
|
+
// Add generationConfig with reasoningMode if effortLevel is provided
|
|
137
|
+
if (params.effortLevel) {
|
|
138
|
+
// Gemini has generationConfig.reasoningMode which can be set to 'full' for higher quality
|
|
139
|
+
// reasoning but at increased cost and latency
|
|
140
|
+
modelOptions.generationConfig = {
|
|
141
|
+
...(modelOptions.generationConfig || {}),
|
|
142
|
+
reasoningMode: 'full'
|
|
143
|
+
};
|
|
144
|
+
}
|
|
135
145
|
// Use the new API structure
|
|
136
146
|
const chat = this.GeminiClient.chats.create({
|
|
137
147
|
model: modelName,
|
|
@@ -140,7 +150,7 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
140
150
|
const latestMessage = params.messages[params.messages.length - 1].content;
|
|
141
151
|
// Send message with streaming
|
|
142
152
|
const streamResult = await chat.sendMessageStream({
|
|
143
|
-
message: latestMessage,
|
|
153
|
+
message: GeminiLLM_1.MapMJContentToGeminiParts(latestMessage),
|
|
144
154
|
...modelOptions
|
|
145
155
|
});
|
|
146
156
|
// Return the stream for the for-await loop to work
|
|
@@ -187,11 +197,7 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
187
197
|
finish_reason: 'stop',
|
|
188
198
|
index: 0
|
|
189
199
|
}],
|
|
190
|
-
usage:
|
|
191
|
-
promptTokens: 0,
|
|
192
|
-
completionTokens: 0,
|
|
193
|
-
totalTokens: 0
|
|
194
|
-
}
|
|
200
|
+
usage: new ai_1.ModelUsage(0, 0) // Gemini doesn't provide detailed token usage
|
|
195
201
|
};
|
|
196
202
|
result.statusText = 'success';
|
|
197
203
|
result.errorMessage = null;
|
|
@@ -204,13 +210,46 @@ let GeminiLLM = GeminiLLM_1 = class GeminiLLM extends ai_1.BaseLLM {
|
|
|
204
210
|
ClassifyText(params) {
|
|
205
211
|
throw new Error("Method not implemented.");
|
|
206
212
|
}
|
|
213
|
+
static MapMJContentToGeminiParts(content) {
|
|
214
|
+
const parts = [];
|
|
215
|
+
if (Array.isArray(content)) {
|
|
216
|
+
for (const part of content) {
|
|
217
|
+
if (part.type === 'text') {
|
|
218
|
+
parts.push({ text: part.content });
|
|
219
|
+
}
|
|
220
|
+
else {
|
|
221
|
+
// use the inlineData property which expects a Blob property which consists of data and mimeType
|
|
222
|
+
const blob = {
|
|
223
|
+
data: part.content
|
|
224
|
+
};
|
|
225
|
+
switch (part.type) {
|
|
226
|
+
case 'image_url':
|
|
227
|
+
blob.mimeType = 'image/jpeg';
|
|
228
|
+
break;
|
|
229
|
+
case 'audio_url':
|
|
230
|
+
blob.mimeType = 'audio/mpeg';
|
|
231
|
+
break;
|
|
232
|
+
case 'video_url':
|
|
233
|
+
blob.mimeType = 'video/mp4';
|
|
234
|
+
break;
|
|
235
|
+
case 'file_url':
|
|
236
|
+
blob.mimeType = 'application/octet-stream';
|
|
237
|
+
break;
|
|
238
|
+
}
|
|
239
|
+
parts.push({ inlineData: blob });
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
else {
|
|
244
|
+
// we know that message.content is a string
|
|
245
|
+
parts.push({ text: content });
|
|
246
|
+
}
|
|
247
|
+
return parts;
|
|
248
|
+
}
|
|
207
249
|
static MapMJMessageToGeminiHistoryEntry(message) {
|
|
208
|
-
const textPart = {
|
|
209
|
-
text: message.content
|
|
210
|
-
};
|
|
211
250
|
return {
|
|
212
251
|
role: message.role === 'assistant' ? 'model' : 'user', // google calls all messages other than the replies from the model 'user' which would include the system prompt
|
|
213
|
-
parts:
|
|
252
|
+
parts: GeminiLLM_1.MapMJContentToGeminiParts(message.content)
|
|
214
253
|
};
|
|
215
254
|
}
|
|
216
255
|
};
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;AAEA,uBAAuB;AACvB,
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;AAEA,uBAAuB;AACvB,yCAAgE;AAEhE,WAAW;AACX,2CAA4K;AAC5K,mDAAuD;AAGhD,IAAM,SAAS,iBAAf,MAAM,SAAU,SAAQ,YAAO;IAGlC,YAAY,MAAc;QACtB,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,IAAI,mBAAW,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,IAAW,YAAY;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,IAAoB,iBAAiB;QACjC,OAAO,IAAI,CAAC;IAChB,CAAC;IAES,oBAAoB,CAAC,QAAmB;QAC9C,2EAA2E;QAC3E,4EAA4E;QAC5E,qDAAqD;QACrD,MAAM,MAAM,GAAc,EAAE,CAAC;QAC7B,IAAI,QAAQ,GAAG,OAAO,CAAC;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAChC,MAAM,CAAC,IAAI,CAAC;oBACR,IAAI,EAAE,OAAO,EAAE,wDAAwD;oBACvE,KAAK,EAAE,CAAC,EAAC,IAAI,EAAE,IAAI,EAAC,CAAC;iBACxB,CAAC,CAAC;YACP,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YACzB,QAAQ,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QAChC,CAAC;QACD,OAAO,MAAM,CAAC;IAClB,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,0BAA0B,CAAC,MAAkB;QACzD,IAAI,CAAC;YACD,gDAAgD;YAChD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;YAC7B,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,IAAI,YAAY,CAAC;YAE/C,MAAM,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YAChF,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAS,CAAC,gCAAgC,CAAC,CAAC,CAAC,CAAC,CAAC;YACrG,MAAM,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,CAAC;YAElE,iCAAiC;YACjC,MAAM,YAAY,GAAwB;gBACtC,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,GAAG;gBACtC,YAAY,EAAE,MAAM,CAAC,cAAc;aACtC,CAAC;YAEF,qEAAqE;YACrE,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC;gBACrB,0FAA0F;gBAC1F,8CAA8C;gBAC9C,YAAY,CAAC,gBAAgB,GAAG;oBAC5B,GAAG,CAAC,YAAY,CAAC,gBAAgB,IAAI,EAAE,CAAC;oBACxC,aAAa,EAAE,MAAM;iBACxB,CAAC;YACN,CAAC;YAED,4BAA4B;YAC5B,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC;gBACxC,KAAK,EAAE,SAAS;gBAChB,OAAO,EAAE,YAAY;aACxB,CAAC,CAAC;YAEH,0BAA0B;YAC1B,MAAM,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC;YAC1E,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC;gBAClC,OAAO,EAAE,WAAS,CAAC,yBAAyB,CAAC,aAAa,CAAC;gBAC3D,GAAG,YAAY;aAClB,CAAC,CAAC;YAEH,MAAM,eAAe,GAAG,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC;YAEpG,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC;YAC3B,OAAO;gBACH,OAAO,EAAE,IAAI;gBACb,UAAU,EAAE,IAAI;gBAChB,SAAS,EAAE,SAAS;gBACpB,OAAO,EAAE,OAAO;gBAChB,WAAW,EAAE,OAAO,CAAC,OAAO,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE;gBACpD,IAAI,EAAE;oBACF,OAAO,EAAE,CAAC;4BACN,OAAO,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,eAAe,EAAE;4BACxD,aAAa,EAAE,WAAW;4BAC1B,KAAK,EAAE,CAAC;yBACX,CAAC;oBACF,KAAK,EAAE,IAAI,eAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,8CAA8C;iBAC7E;gBACD,YAAY,EAAE,EAAE;gBAChB,SAAS,EAAE,IAAI;aAClB,CAAA;QACL,CAAC;QACD,OAAO,CAAC,EAAE,CAAC;YACP,OAAO;gBACH,OAAO,EAAE,KAAK;gBACd,UAAU,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO;gBAChD,SAAS,EAAE,IAAI,IAAI,EAAE;gBACrB,OAAO,EAAE,IAAI,IAAI,EAAE;gBACnB,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE;oBACF,OAAO,EAAE,EAAE;oBACX,KAAK,EAAE,IAAI,eAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,8CAA8C;iBAC7E;gBACD,YAAY,EAAE,CAAC,CAAC,OAAO;gBACvB,SAAS,EAAE,CAAC;aACf,CAAA;QACL,CAAC;IACL,CAAC;IAED;;OAEG;IACO,KAAK,CAAC,sBAAsB,CAAC,MAAkB;QACrD,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,IAAI,YAAY,CAAC;QAE/C,MAAM,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAChF,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAS,CAAC,gCAAgC,CAAC,CAAC,CAAC,CAAC,CAAC;QACrG,MAAM,YAAY,GAAG,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,CAAC;QAElE,iCAAiC;QACjC,MAAM,YAAY,GAAwB;YACtC,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,GAAG;YACtC,YAAY,EAAE,MAAM,CAAC,cAAc;SACtC,CAAC;QAEF,qEAAqE;QACrE,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC;YACrB,0FAA0F;YAC1F,8CAA8C;YAC9C,YAAY,CAAC,gBAAgB,GAAG;gBAC5B,GAAG,CAAC,YAAY,CAAC,gBAAgB,IAAI,EAAE,CAAC;gBACxC,aAAa,EAAE,MAAM;aACxB,CAAC;QACN,CAAC;QAED,4BAA4B;QAC5B,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC;YACxC,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,YAAY;SACxB,CAAC,CAAC;QAEH,MAAM,aAAa,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC;QAE1E,8BAA8B;QAC9B,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC;YAC9C,OAAO,EAAE,WAAS,CAAC,yBAAyB,CAAC,aAAa,CAAC;YAC3D,GAAG,YAAY;SAClB,CAAC,CAAC;QAEH,mDAAmD;QACnD,OAAO,YAAY,CAAC;IACxB,CAAC;IAED;;OAEG;IACO,qBAAqB,CAAC,KAAU;QAKtC,+CAA+C;QAC/C,IAAI,OAAO,GAAG,EAAE,CAAC;QACjB,IAAI,KAAK,CAAC,UAAU;YAChB,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO;YAC3B,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAC9B,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC;YAEvC,qBAAqB;YACrB,MAAM,QAAQ,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACrF,OAAO,GAAG,QAAQ,EAAE,IAAI,IAAI,EAAE,CAAC;QACnC,CAAC;QAED,0DAA0D;QAC1D,OAAO;YACH,OAAO;YACP,YAAY,EAAE,SAAS;YACvB,KAAK,EAAE,IAAI;SACd,CAAC;IACN,CAAC;IAED;;OAEG;IACO,yBAAyB,CAC/B,kBAA6C,EAC7C,SAAiC,EACjC,KAA6B;QAE7B,wDAAwD;QAExD,kDAAkD;QAClD,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;QAEvB,8DAA8D;QAC9D,MAAM,MAAM,GAAG,IAAI,eAAU,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAE9C,qBAAqB;QACrB,MAAM,CAAC,IAAI,GAAG;YACV,OAAO,EAAE,CAAC;oBACN,OAAO,EAAE;wBACL,IAAI,EAAE,WAAW;wBACjB,OAAO,EAAE,kBAAkB,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;qBACxD;oBACD,aAAa,EAAE,MAAM;oBACrB,KAAK,EAAE,CAAC;iBACX,CAAC;YACF,KAAK,EAAE,IAAI,eAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,8CAA8C;SAC7E,CAAC;QAEF,MAAM,CAAC,UAAU,GAAG,SAAS,CAAC;QAC9B,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC;QAC3B,MAAM,CAAC,SAAS,GAAG,IAAI,CAAC;QAExB,OAAO,MAAM,CAAC;IAClB,CAAC;IACD,aAAa,CAAC,MAAuB;QACjC,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IACD,YAAY,CAAC,MAAW;QACpB,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC/C,CAAC;IAEM,MAAM,CAAC,yBAAyB,CAAC,OAA2B;QAC/D,MAAM,KAAK,GAAgB,EAAE,CAAC;QAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;YACzB,KAAK,MAAM,IAAI,IAAI,OAAO,EAAE,CAAC;gBACzB,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBACvB,KAAK,CAAC,IAAI,CAAC,EAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAC,CAAC,CAAC;gBACrC,CAAC;qBACI,CAAC;oBACF,gGAAgG;oBAChG,MAAM,IAAI,GAAS;wBACf,IAAI,EAAE,IAAI,CAAC,OAAO;qBACrB,CAAA;oBACD,QAAQ,IAAI,CAAC,IAAI,EAAE,CAAC;wBAChB,KAAK,WAAW;4BACZ,IAAI,CAAC,QAAQ,GAAG,YAAY,CAAC;4BAC7B,MAAM;wBACV,KAAK,WAAW;4BACZ,IAAI,CAAC,QAAQ,GAAG,YAAY,CAAC;4BAC7B,MAAM;wBACV,KAAK,WAAW;4BACZ,IAAI,CAAC,QAAQ,GAAG,WAAW,CAAC;4BAC5B,MAAM;wBACV,KAAK,UAAU;4BACX,IAAI,CAAC,QAAQ,GAAG,0BAA0B,CAAC;4BAC3C,MAAM;oBACd,CAAC;oBACD,KAAK,CAAC,IAAI,CAAC,EAAC,UAAU,EAAE,IAAI,EAAC,CAAC,CAAC;gBACnC,CAAC;YACL,CAAC;QACL,CAAC;aACI,CAAC;YACF,2CAA2C;YAC3C,KAAK,CAAC,IAAI,CAAC,EAAC,IAAI,EAAE,OAAO,EAAC,CAAC,CAAC;QAChC,CAAC;QACD,OAAO,KAAK,CAAC;IACjB,CAAC;IAEM,MAAM,CAAC,gCAAgC,CAAC,OAAoB;QAC/D,OAAO;YACH,IAAI,EAAE,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,+GAA+G;YACtK,KAAK,EAAE,WAAS,CAAC,yBAAyB,CAAC,OAAO,CAAC,OAAO,CAAC;SAC9D,CAAA;IACL,CAAC;CACJ,CAAA;AAvRY,8BAAS;oBAAT,SAAS;IADrB,IAAA,sBAAa,EAAC,YAAO,EAAE,WAAW,CAAC;GACvB,SAAS,CAuRrB;AAGD,SAAgB,aAAa;IACzB,4JAA4J;AAChK,CAAC;AAFD,sCAEC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@memberjunction/ai-gemini",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.41.0",
|
|
4
4
|
"description": "MemberJunction Wrapper for Google Gemini AI Models",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -19,8 +19,8 @@
|
|
|
19
19
|
"typescript": "^5.4.5"
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@memberjunction/ai": "2.
|
|
23
|
-
"@memberjunction/global": "2.
|
|
22
|
+
"@memberjunction/ai": "2.41.0",
|
|
23
|
+
"@memberjunction/global": "2.41.0",
|
|
24
24
|
"@google/genai": "0.14.0"
|
|
25
25
|
}
|
|
26
26
|
}
|