llm-gemini 0.13__py3-none-any.whl → 0.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: llm-gemini
3
- Version: 0.13
3
+ Version: 0.13.1
4
4
  Summary: LLM plugin to access Google's Gemini family of models
5
5
  Author: Simon Willison
6
6
  License: Apache-2.0
@@ -0,0 +1,7 @@
1
+ llm_gemini.py,sha256=JWaRtT8vJzdxrRbCAgoNrKqes2df_T4gqqeDmiw0oRI,14370
2
+ llm_gemini-0.13.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
3
+ llm_gemini-0.13.1.dist-info/METADATA,sha256=S87OFYnm9K0EVvJIRrJoZT-J0gxhPA4BrnIGkVD8q8c,7016
4
+ llm_gemini-0.13.1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
5
+ llm_gemini-0.13.1.dist-info/entry_points.txt,sha256=n544bpgUPIBc5l_cnwsTxPc3gMGJHPtAyqBNp-CkMWk,26
6
+ llm_gemini-0.13.1.dist-info/top_level.txt,sha256=WUQmG6_2QKbT_8W4HH93qyKl_0SUteL4Ra6_PhyNGKU,11
7
+ llm_gemini-0.13.1.dist-info/RECORD,,
llm_gemini.py CHANGED
@@ -90,7 +90,7 @@ def resolve_type(attachment):
90
90
 
91
91
  def cleanup_schema(schema):
92
92
  "Gemini supports only a subset of JSON schema"
93
- keys_to_remove = ("$schema", "additionalProperties")
93
+ keys_to_remove = ("$schema", "additionalProperties", "title")
94
94
  # Recursively remove them
95
95
  if isinstance(schema, dict):
96
96
  for key in keys_to_remove:
@@ -288,9 +288,17 @@ class _SharedGemini:
288
288
  return f'```\n{part["codeExecutionResult"]["output"].strip()}\n```\n'
289
289
  return ""
290
290
 
291
+ def process_candidates(self, candidates):
292
+ # We only use the first candidate
293
+ for part in candidates[0]["content"]["parts"]:
294
+ yield self.process_part(part)
295
+
291
296
  def set_usage(self, response):
292
297
  try:
293
- usage = response.response_json[-1].pop("usageMetadata")
298
+ # Don't record the "content" key from that last candidate
299
+ for candidate in response.response_json["candidates"]:
300
+ candidate.pop("content", None)
301
+ usage = response.response_json.pop("usageMetadata")
294
302
  input_tokens = usage.pop("promptTokenCount", None)
295
303
  output_tokens = usage.pop("candidatesTokenCount", None)
296
304
  usage.pop("totalTokenCount", None)
@@ -320,17 +328,16 @@ class GeminiPro(_SharedGemini, llm.KeyModel):
320
328
  for chunk in http_response.iter_bytes():
321
329
  coro.send(chunk)
322
330
  if events:
323
- event = events[0]
324
- if isinstance(event, dict) and "error" in event:
325
- raise llm.ModelError(event["error"]["message"])
326
- try:
327
- part = event["candidates"][0]["content"]["parts"][0]
328
- yield self.process_part(part)
329
- except KeyError:
330
- yield ""
331
- gathered.append(event)
331
+ for event in events:
332
+ if isinstance(event, dict) and "error" in event:
333
+ raise llm.ModelError(event["error"]["message"])
334
+ try:
335
+ yield from self.process_candidates(event["candidates"])
336
+ except KeyError:
337
+ yield ""
338
+ gathered.append(event)
332
339
  events.clear()
333
- response.response_json = gathered
340
+ response.response_json = gathered[-1]
334
341
  self.set_usage(response)
335
342
 
336
343
 
@@ -353,17 +360,19 @@ class AsyncGeminiPro(_SharedGemini, llm.AsyncKeyModel):
353
360
  async for chunk in http_response.aiter_bytes():
354
361
  coro.send(chunk)
355
362
  if events:
356
- event = events[0]
357
- if isinstance(event, dict) and "error" in event:
358
- raise llm.ModelError(event["error"]["message"])
359
- try:
360
- part = event["candidates"][0]["content"]["parts"][0]
361
- yield self.process_part(part)
362
- except KeyError:
363
- yield ""
364
- gathered.append(event)
363
+ for event in events:
364
+ if isinstance(event, dict) and "error" in event:
365
+ raise llm.ModelError(event["error"]["message"])
366
+ try:
367
+ for chunk in self.process_candidates(
368
+ event["candidates"]
369
+ ):
370
+ yield chunk
371
+ except KeyError:
372
+ yield ""
373
+ gathered.append(event)
365
374
  events.clear()
366
- response.response_json = gathered
375
+ response.response_json = gathered[-1]
367
376
  self.set_usage(response)
368
377
 
369
378
 
@@ -1,7 +0,0 @@
1
- llm_gemini.py,sha256=lpsKlCAiJtEkwE5Va1zpbX0oSUAWQ0dTfbmH-Xo4hPo,13910
2
- llm_gemini-0.13.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
3
- llm_gemini-0.13.dist-info/METADATA,sha256=WhmPBMoyvyyH0ErFWaR4_0uDKN9ixogNkdshmED32FQ,7014
4
- llm_gemini-0.13.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
5
- llm_gemini-0.13.dist-info/entry_points.txt,sha256=n544bpgUPIBc5l_cnwsTxPc3gMGJHPtAyqBNp-CkMWk,26
6
- llm_gemini-0.13.dist-info/top_level.txt,sha256=WUQmG6_2QKbT_8W4HH93qyKl_0SUteL4Ra6_PhyNGKU,11
7
- llm_gemini-0.13.dist-info/RECORD,,