lionagi 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -288,11 +288,39 @@ class BranchOperationMixin(ABC):
288
288
  image_detail=image_detail,
289
289
  tool_schemas=tool_schemas,
290
290
  )
291
- kwargs["messages"] = self.msgs.to_chat_msgs(progress)
292
- kwargs["messages"].append(ins.chat_msg)
293
291
 
292
+ progress = progress or self.msgs.progress
293
+ messages = [self.msgs.messages[i] for i in progress]
294
+
295
+ if self.msgs.system and "system" not in imodel.allowed_roles:
296
+ messages = [msg for msg in messages if msg.role != "system"]
297
+ first_instruction = None
298
+
299
+ if len(messages) == 0:
300
+ first_instruction = ins.model_copy()
301
+ first_instruction.guidance = self.msgs.system.system_info + (
302
+ first_instruction.guidance or ""
303
+ )
304
+ messages.append(first_instruction)
305
+ elif len(messages) >= 1:
306
+ first_instruction = messages[0]
307
+ if not isinstance(first_instruction, Instruction):
308
+ raise ValueError(
309
+ "First message in progress must be an Instruction or System"
310
+ )
311
+ first_instruction = first_instruction.model_copy()
312
+ first_instruction.guidance = self.msgs.system.system_info + (
313
+ first_instruction.guidance or ""
314
+ )
315
+ messages[0] = first_instruction
316
+
317
+ else:
318
+ messages.append(ins)
319
+
320
+ kwargs["messages"] = [i.chat_msg for i in messages]
294
321
  imodel = imodel or self.imodel
295
322
  api_response = None
323
+
296
324
  if isinstance(imodel, LiteiModel):
297
325
  api_response = await imodel.invoke(**kwargs)
298
326
  elif isinstance(imodel, iModel):
@@ -329,12 +357,22 @@ class BranchOperationMixin(ABC):
329
357
  skip_validation: bool = False,
330
358
  clear_messages: bool = False,
331
359
  invoke_action: bool = True,
360
+ response_format: (
361
+ type[BaseModel] | BaseModel
362
+ ) = None, # alias of request_model
332
363
  **kwargs,
333
364
  ):
365
+ if response_format and request_model:
366
+ raise ValueError(
367
+ "Cannot specify both response_format and request_model"
368
+ "as they are aliases for the same parameter."
369
+ )
370
+ request_model = request_model or response_format
371
+
334
372
  imodel = imodel or self.imodel
335
373
  retry_imodel = retry_imodel or imodel
336
374
  if clear_messages:
337
- self.clear_messages()
375
+ self.msgs.clear_messages()
338
376
 
339
377
  if num_parse_retries > 5:
340
378
  logging.warning(
@@ -111,3 +111,7 @@ class AnthropicService(Service):
111
111
  return self.check_rate_limiter(
112
112
  model_obj, limit_requests=limit_requests, limit_tokens=limit_tokens
113
113
  )
114
+
115
+ @property
116
+ def allowed_roles(self):
117
+ return ["user", "assistant"]
@@ -145,3 +145,7 @@ class GroqService(Service):
145
145
  model_obj,
146
146
  limit_requests=limit_requests,
147
147
  )
148
+
149
+ @property
150
+ def allowed_roles(self):
151
+ return ["user", "assistant", "system"]
@@ -67,3 +67,7 @@ class LiteiModel:
67
67
  v = str(v)
68
68
  hashable_items.append((k, v))
69
69
  return hash(frozenset(hashable_items))
70
+
71
+ @property
72
+ def allowed_roles(self):
73
+ return ["user", "assistant", "system"]
@@ -136,3 +136,7 @@ class OllamaService(Service):
136
136
  # List Running Models
137
137
  def list_running_models(self):
138
138
  return OllamaRequest(endpoint="ps", method="GET")
139
+
140
+ @property
141
+ def allowed_roles(self):
142
+ return ["user", "assistant", "system"]
@@ -412,3 +412,7 @@ class OpenAIModel(BaseModel):
412
412
  "Estimating price currently only supports chat/completions endpoint"
413
413
  )
414
414
  return estimated_price
415
+
416
+ @property
417
+ def allowed_roles(self):
418
+ return ["user", "assistant", "system"]
@@ -107,3 +107,7 @@ class PerplexityService(Service):
107
107
  return self.check_rate_limiter(
108
108
  model_obj, limit_requests=limit_requests, limit_tokens=limit_tokens
109
109
  )
110
+
111
+ @property
112
+ def allowed_roles(self):
113
+ return ["user", "assistant", "system"]
lionagi/service/imodel.py CHANGED
@@ -9,6 +9,7 @@ from lionagi.service.service_match_util import (
9
9
 
10
10
 
11
11
  class iModel:
12
+
12
13
  def __init__(
13
14
  self,
14
15
  provider: str | Service,
@@ -20,25 +21,34 @@ class iModel:
20
21
  interval_requests: int = None,
21
22
  **kwargs,
22
23
  ):
23
-
24
24
  if api_key is not None:
25
25
  api_key = api_key
26
26
  elif api_key_schema is not None:
27
27
  api_key = api_key_schema
28
28
 
29
- if api_key is None:
30
- if isinstance(provider, str):
29
+ if task == "chat":
30
+ match provider:
31
+ case "openai":
32
+ task = "create_chat_completion"
33
+ case "anthropic":
34
+ task = "create_message"
35
+ case "groq":
36
+ task = "create_chat_completion"
37
+ case "perplexity":
38
+ task = "create_chat_completion"
39
+
40
+ if isinstance(provider, str):
41
+ if api_key is None:
31
42
  match provider:
32
43
  case "openai":
33
44
  api_key = "OPENAI_API_KEY"
34
45
  case "anthropic":
35
46
  api_key = "ANTHROPIC_API_KEY"
36
- case "grok":
37
- api_key = "GROK_API_KEY"
47
+ case "groq":
48
+ api_key = "GROQ_API_KEY"
38
49
  case "perplexity":
39
- api_key = "PERPLEXITY_API_KEY"
50
+ api_key = "PERPLEXIY_API_KEY"
40
51
 
41
- if isinstance(provider, str):
42
52
  self.service = match_service(provider, api_key=api_key, **kwargs)
43
53
  elif isinstance(provider, Service):
44
54
  self.service = provider
@@ -80,6 +90,7 @@ class iModel:
80
90
  self.data_model = self.service.match_data_model(self.task)
81
91
 
82
92
  def parse_to_data_model(self, **kwargs):
93
+
83
94
  if kwargs.get("model") and self.model:
84
95
  if kwargs.get("model") != self.model:
85
96
  raise ValueError(
@@ -106,5 +117,9 @@ class iModel:
106
117
  def list_tasks(self):
107
118
  return self.service.list_tasks()
108
119
 
120
+ @property
121
+ def allowed_roles(self):
122
+ return self.service.allowed_roles
123
+
109
124
 
110
125
  __all__ = ["iModel"]
@@ -3,6 +3,10 @@ class Service:
3
3
  def list_tasks(self):
4
4
  pass
5
5
 
6
+ @property
7
+ def allowed_roles(self):
8
+ return ["user", "assistant", "system"]
9
+
6
10
 
7
11
  def register_service(cls):
8
12
  original_init = cls.__init__
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.5.0"
1
+ __version__ = "0.5.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lionagi
3
- Version: 0.5.0
3
+ Version: 0.5.1
4
4
  Summary: An AGentic Intelligence Operating System.
5
5
  Author-email: HaiyangLi <quantocean.li@gmail.com>
6
6
  License: Apache License
@@ -246,17 +246,35 @@ LION is designed to be:
246
246
  - 🔧 **Flexible**: Build any workflow you need
247
247
  - 🚀 **Efficient**: Minimal dependencies, maximum performance
248
248
 
249
+
250
+
251
+ ## Installation
252
+
253
+ LION maintains minimal dependencies for maximum reliability:
254
+
255
+ ```bash
256
+ uv pip install lionagi
257
+ ```
258
+
259
+ Dependencies:
260
+ - litellm
261
+ - jinja2
262
+ - pandas
263
+ - pillow
264
+ - python-dotenv
265
+
266
+
249
267
  ## Quick Start
250
268
 
251
269
  ```python
252
270
  from lionagi import iModel, Branch
253
271
 
254
272
  # Initialize model
255
- gpt4o = iModel(provider="openai", model="gpt-4o")
273
+ gpt4o = iModel(provider="openai", task="chat", model="gpt-4o")
256
274
 
257
275
  hunter = Branch(
258
- system="you are a hilarious dragon hunter who responds in 10 words rhymes",
259
- imodel=gpt4o,
276
+ system="you are a hilarious dragon hunter who responds in 10 words rhymes",
277
+ imodel=gpt4o,
260
278
  )
261
279
 
262
280
  # Chat asynchronously
@@ -267,20 +285,199 @@ print(await hunter.communicate("I am a dragon"))
267
285
  You claim to be a dragon, oh what a braggin'!
268
286
  ```
269
287
 
270
- ## Installation
288
+ ## 📦 Features
271
289
 
272
- LION maintains minimal dependencies for maximum reliability:
290
+ ### 1. Model Agnostic Structured Output
273
291
 
274
- ```bash
275
- uv pip install lionagi
292
+ LION provides a unified interface for interacting with any AI model, regardless of the underlying architecture. This allows you to easily switch between models without changing your code.
293
+
294
+ ```python
295
+ from pydantic import BaseModel
296
+
297
+ class Joke(BaseModel):
298
+ joke: str
299
+
300
+ sonnet = iModel(
301
+ provider="anthropic",
302
+ model="claude-3-5-sonnet-20241022",
303
+ max_tokens=100, # max_tokens is required for anthropic models
304
+ )
305
+
306
+ response = await hunter.communicate(
307
+ instruction="I am a dragon",
308
+ response_format=Joke, # structured output in given pydantic model
309
+ clear_messages=True, # refresh the conversation
310
+ imodel=sonnet, # use sonnet model, which doesn't support structured output
311
+ )
312
+
313
+ print(type(response))
314
+ print(response.joke)
276
315
  ```
277
316
 
278
- Dependencies:
279
- - litellm
280
- - jinja2
281
- - pandas
282
- - pillow
283
- - python-dotenv
317
+ ```
318
+ <class '__main__.Joke'>
319
+ Joke(joke='With fiery claws, dragons hide their laughter flaws!')
320
+ ```
321
+
322
+
323
+ ### 2. Complete Observability
324
+
325
+ ```python
326
+ # using perplexity model
327
+ pplx_small = iModel(
328
+ provider="perplexity",
329
+ task="chat/completions",
330
+ model="llama-3.1-sonar-small-128k-online",
331
+ max_tokens=1000,
332
+ )
333
+
334
+ b = await hunter.communicate(
335
+ instruction="What makes a well-behaved dragon?",
336
+ clear_messages=True, # refresh the conversation
337
+ imodel=pplx_small, # use sonnet model
338
+ )
339
+
340
+ print(b)
341
+ ```
342
+
343
+ ```
344
+ A well-behaved dragon is one that's calm and bright,
345
+ No stress or fear, just a peaceful night.
346
+ It's active, not lethargic, with a happy face,
347
+ And behaviors like digging, not a frantic pace.
348
+ It's social, friendly, and never a fright,
349
+ Just a gentle soul, shining with delight
350
+ ```
351
+
352
+ ```python
353
+ hunter.msgs.last_response.model_response
354
+ ```
355
+
356
+ ```
357
+ {'id': '1be10f4c-0936-4050-ab48-91bd86ab11a5',
358
+ 'model': 'llama-3.1-sonar-small-128k-online',
359
+ 'object': 'chat.completion',
360
+ 'created': 1734369700,
361
+ 'choices': [{'index': 0,
362
+ 'message': {'role': 'assistant',
363
+ 'content': "A well-behaved dragon is one that's calm and bright,\nNo stress or fear, just a peaceful night.\nIt's active, not lethargic, with a happy face,\nAnd behaviors like digging, not a frantic pace.\nIt's social, friendly, and never a fright,\nJust a gentle soul, shining with delight"},
364
+ 'finish_reason': 'stop',
365
+ 'delta': {'role': 'assistant', 'content': ''}}],
366
+ 'usage': {'prompt_tokens': 40, 'completion_tokens': 69, 'total_tokens': 109},
367
+ 'citations': [{'url': 'https://dragonsdiet.com/blogs/dragon-care/15-bearded-dragon-behaviors-and-what-they-could-mean'},
368
+ {'url': 'https://masterbraeokk.tripod.com/dragons/behavior.html'},
369
+ {'url': 'https://files.eric.ed.gov/fulltext/ED247607.pdf'},
370
+ {'url': 'https://www.travelchinaguide.com/intro/social_customs/zodiac/dragon/five-elements.htm'},
371
+ {'url': 'https://www.travelchinaguide.com/intro/social_customs/zodiac/dragon/'}]}
372
+ ```
373
+
374
+
375
+ ### 3. Easy composition of complex workflows
376
+
377
+ ```python
378
+ # chain of thoughts
379
+ from pydantic import Field
380
+
381
+ class Reason(BaseModel):
382
+ reason: str
383
+ confidence_score: float
384
+
385
+ class Thought(BaseModel):
386
+ thought: str
387
+
388
+ class Analysis(BaseModel):
389
+ thought: list[Thought] = Field(
390
+ default_factory=list,
391
+ description="concise Chain of thoughts from you, 3 step, each in 8 words"
392
+ )
393
+ analysis: str = Field(
394
+ ...,
395
+ description="Final analysis of the dragon's psyche in 20 words",
396
+ )
397
+ reason: list[Reason] = Field(
398
+ default_factory=list,
399
+ description="Concise Reasoning behind the analysis, 3 support, each in 8 words"
400
+ )
401
+
402
+ context1 = "I am a dragon, I think therefore I am, I suffer from shiny objects syndrome"
403
+ context2 = "I like food and poetry, I use uv sometimes, it's cool but I am not familiar with pip"
404
+
405
+ async def analyze(context) -> Analysis:
406
+ psychologist = Branch(
407
+ system="you are a renowned dragon psychologist",
408
+ imodel=gpt4o,
409
+ )
410
+ return await psychologist.communicate(
411
+ instruction="analyze the dragon's psyche using chain of thoughts",
412
+ guidance="think step by step, reason with logic",
413
+ context=context,
414
+ response_format=Analysis,
415
+ )
416
+
417
+ ```
418
+
419
+ ```python
420
+ result1 = await analyze(context1)
421
+
422
+ print("\nThoughts:")
423
+ for i in result1.thought:
424
+ print(i.thought)
425
+
426
+ print("\nAnalysis:")
427
+ print(result1.analysis)
428
+
429
+ print("\nReasoning:")
430
+ for i in result1.reason:
431
+ print(i.reason)
432
+ ```
433
+
434
+ ```
435
+
436
+ Thoughts:
437
+ Dragons are attracted to shiny objects naturally.
438
+ This suggests a strong affinity for hoarding.
439
+ Reflects the dragon's inherent desire for possession.
440
+
441
+ Analysis:
442
+ The dragon demonstrates a compulsive hoarding behavior linked to attraction for shiny objects.
443
+
444
+ Reasoning:
445
+ Shiny objects trigger instinctual hoarding behavior.
446
+ Possession indicates a symbol of power and security.
447
+ Hoarding is reinforced by evolutionary survival mechanisms.
448
+ ```
449
+
450
+ ```python
451
+ result2 = await analyze(context2)
452
+
453
+ print("\nThoughts:")
454
+ for i in result2.thought:
455
+ print(i.thought)
456
+
457
+ print("\nAnalysis:")
458
+ print(result2.analysis)
459
+
460
+ print("\nReasoning:")
461
+ for i in result2.reason:
462
+ print(i.reason)
463
+ ```
464
+
465
+ ```
466
+ Thoughts:
467
+ Dragon enjoys both food and poetry regularly.
468
+ Dragon uses uv light with frequent interest.
469
+ Dragon is unfamiliar and not comfortable with pip.
470
+
471
+ Analysis:
472
+ The dragon is curious and exploratory, yet selectively cautious about unfamiliar methodologies.
473
+
474
+ Reasoning:
475
+ Preference for food and poetry suggests curiosity.
476
+ Frequent uv light use indicates exploratory nature.
477
+ Discomfort with pip usage shows selective caution.
478
+ ```
479
+
480
+
284
481
 
285
482
  ## 🌟 Example Workflow
286
483
 
@@ -1,6 +1,6 @@
1
1
  lionagi/__init__.py,sha256=kIVZqqp61CFddL5BbYynIPwRlg3oUbSVTnWh6QOyhaM,495
2
2
  lionagi/settings.py,sha256=r3G-ihH5ehIKwsUdqSGKRwxKpXFj9v6kIaElZKQd7R4,2953
3
- lionagi/version.py,sha256=LBK46heutvn3KmsCrKIYu8RQikbfnjZaj2xFrXaeCzQ,22
3
+ lionagi/version.py,sha256=eZ1bOun1DDVV0YLOBW4wj2FP1ajReLjbIrGmzN7ASBw,22
4
4
  lionagi/core/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
5
5
  lionagi/core/_class_registry.py,sha256=srSWefqCS9EZrMvyA8zCrZ9KFvzAhTIj8g6mJG5KlIc,1982
6
6
  lionagi/core/action/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
@@ -55,7 +55,7 @@ lionagi/core/models/schema_model.py,sha256=1GNCStg10VWyhqjN4ojxs9mt60T5o8I8_ZX7D
55
55
  lionagi/core/models/types.py,sha256=I8khSl0c-O2UgYLMss5akvIFscogjfgKTE2sT6Tg50I,270
56
56
  lionagi/core/session/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
57
57
  lionagi/core/session/branch.py,sha256=MZXmE7Ne8_CGxgS_Jo5Qak3wJdvut4ZXgF2j8zPMstc,4241
58
- lionagi/core/session/branch_mixins.py,sha256=Li3Y4_EsqsT0c4OwzM1eh1loalAGELUiByPcKWmISAs,18226
58
+ lionagi/core/session/branch_mixins.py,sha256=OvN29nHieren4YQQ47VQ3ZfxKPBwIR7_lrxcefSwJLc,19766
59
59
  lionagi/core/session/session.py,sha256=0DW8HoU_Eqki4UeKIaEuXWBb08N_OkxoDWQyH211M9g,5195
60
60
  lionagi/core/session/types.py,sha256=MUGTSa2HWK79p7z-CG22RFP07N5AKnPVNXZwZt_wIvU,202
61
61
  lionagi/core/typing/__init__.py,sha256=THyLLjX2WgKkycQPNxXWWl1BVh7XCrQMuAh8WE4UhP8,226
@@ -67,7 +67,7 @@ lionagi/core/typing/typing_.py,sha256=VJj5W6y-JGK1ZzSbyDs4qAuq0cA5wp5HtRgZUsZ50E
67
67
  lionagi/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
68
  lionagi/integrations/services.py,sha256=qxffUaPKvp2Bb_LI3Uiyokm7l6ZAbRi0xKxZXCYs67c,498
69
69
  lionagi/integrations/anthropic_/AnthropicModel.py,sha256=SOqO0xlmOZ9CMl7aEmQylXjDnHJ3c5oyR7POQLrWWGc,9064
70
- lionagi/integrations/anthropic_/AnthropicService.py,sha256=AHwsGYkblAMqev-COgqZthAVgFrWBWiaqMmFUcxqm6M,3575
70
+ lionagi/integrations/anthropic_/AnthropicService.py,sha256=Z6fGD2nLEKOm3cDK4Gy0_Vl289KAxX1wswCWsEDpp4E,3656
71
71
  lionagi/integrations/anthropic_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
72
72
  lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml,sha256=u_CT9UAXoNpbAWuPHMxH9g4erZW7XUwY4vdSXyE86t4,203
73
73
  lionagi/integrations/anthropic_/anthropic_price_data.yaml,sha256=NCoBo5QFiBvrtztZf2FEyYvtKjIOYDuVMAVUINtp3OY,368
@@ -85,7 +85,7 @@ lionagi/integrations/anthropic_/api_endpoints/messages/response/content_models.p
85
85
  lionagi/integrations/anthropic_/api_endpoints/messages/response/response_body.py,sha256=96e2GAAiTGkExRullvAsAOGhPPHNByhnv6DK_wz1j40,3213
86
86
  lionagi/integrations/anthropic_/api_endpoints/messages/response/usage_models.py,sha256=s6oe5iOU027M7YPS10upnvcPsuFbTV1ZM00lInHPKvA,695
87
87
  lionagi/integrations/groq_/GroqModel.py,sha256=8vIHjzcYoIeRvzXQpBDd-ASx6gXckr5qhmUqJJ9UL1g,11721
88
- lionagi/integrations/groq_/GroqService.py,sha256=i3Vru3w8ktve9ISacMSWpjrnpJW_swPMzIZgbvHmxhc,4553
88
+ lionagi/integrations/groq_/GroqService.py,sha256=v3bn04njIwV7ysOlfLCmapOhHZJy3bNUTr0i12tFqME,4644
89
89
  lionagi/integrations/groq_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
90
90
  lionagi/integrations/groq_/groq_max_output_token_data.yaml,sha256=Y0PbyZ7pyyV1zi7ZJSJGVPsZDhSeN2JMOyghzkqqoKc,559
91
91
  lionagi/integrations/groq_/groq_price_data.yaml,sha256=HmN_2-QliKvaC8Ghq7QJAD97ubMYzPSy7EGgqzgCz48,1234
@@ -97,9 +97,9 @@ lionagi/integrations/groq_/api_endpoints/groq_request.py,sha256=u-GJuu0ZsY7jMWaX
97
97
  lionagi/integrations/groq_/api_endpoints/match_response.py,sha256=95vRKsR1QYPPmBY36dC5azdKn5UlXNRrTQqTUZro_YM,3756
98
98
  lionagi/integrations/groq_/api_endpoints/response_utils.py,sha256=P5kRsGHe-Rx9xejfRcU8q680yotcWLTSaSUuqXATcho,3710
99
99
  lionagi/integrations/litellm_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
100
- lionagi/integrations/litellm_/imodel.py,sha256=Do8fPqdhZi6ImgfATS68imvAMyVqeouCOH5fTPCJ4Z0,2092
100
+ lionagi/integrations/litellm_/imodel.py,sha256=8TalouW9bb1TBlbPeRJjySHrR6u9Chgru43qmYTIYBE,2183
101
101
  lionagi/integrations/ollama_/OllamaModel.py,sha256=5kBYIWShsSpQpSgOxdbRk2_4jmss6Y8iISjUcS3KoWw,8341
102
- lionagi/integrations/ollama_/OllamaService.py,sha256=hUVnmHEFvQlwaozt6A4xQvBhOkia8AC9cEj9LmSB1i0,3989
102
+ lionagi/integrations/ollama_/OllamaService.py,sha256=bJ4kk1FPjn_kecLzxTJgVj05KZPzF5FclHoDA3jdAlg,4080
103
103
  lionagi/integrations/ollama_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
104
104
  lionagi/integrations/ollama_/api_endpoints/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
105
105
  lionagi/integrations/ollama_/api_endpoints/api_request.py,sha256=N3frStr_ZJmqu5WMmJXioxnXy5m10bZNwO9vjifcGVw,6843
@@ -126,7 +126,7 @@ lionagi/integrations/ollama_/api_endpoints/model/list_model.py,sha256=OrGdJa0evj
126
126
  lionagi/integrations/ollama_/api_endpoints/model/pull_model.py,sha256=fiZJcQSRn73SJA9GdlfPBG5RiMISQwBc0y7S_zAlOGA,923
127
127
  lionagi/integrations/ollama_/api_endpoints/model/push_model.py,sha256=yDOMVu3ajdNFT1cuzb4R_3qDxlk0qT4aM2oget3aHZ4,961
128
128
  lionagi/integrations/ollama_/api_endpoints/model/show_model.py,sha256=CclV6pEmm5iYM25ePnMAiicVJmZzolDim7BsQoEJAw0,864
129
- lionagi/integrations/openai_/OpenAIModel.py,sha256=J1X2oYdkXUJRTviAeZrsFe6p8Rqcj3Md8wPaLI8z_Ts,15725
129
+ lionagi/integrations/openai_/OpenAIModel.py,sha256=TOQtliWIV_LsfkfmVK8BHVNMnni-jh60EBN1uj73lCI,15816
130
130
  lionagi/integrations/openai_/OpenAIService.py,sha256=P0sGnxeWPKe15_KndqCgEdM74AKVJ6NJsgFGORqqlrk,13507
131
131
  lionagi/integrations/openai_/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
132
  lionagi/integrations/openai_/openai_max_output_token_data.yaml,sha256=3gd8TGnFjy4AdHzvsV9wZjK_r_o26pe3Th75n5eN2zs,263
@@ -219,7 +219,7 @@ lionagi/integrations/pandas_/search_keywords.py,sha256=AJfN8QVu6rUni6As8AOTnzne_
219
219
  lionagi/integrations/pandas_/to_df.py,sha256=3vAOCj0Ib2PZNCblg1oA20PjRIrUXR86FHICQLNhLu0,5757
220
220
  lionagi/integrations/pandas_/update_cells.py,sha256=7X1bGcPvnEINrLM_zFCWUXIkrFdMGV3TjoEYBq_SThs,1658
221
221
  lionagi/integrations/perplexity_/PerplexityModel.py,sha256=FfOR3WwOgZZEw97mai3D9PJS5C1jsZii2tAX5n0IR8s,9171
222
- lionagi/integrations/perplexity_/PerplexityService.py,sha256=9Hl3Qcy0r8AqNa94dkWgY5CUSXhSFn4PBVsA6CcEOFY,3550
222
+ lionagi/integrations/perplexity_/PerplexityService.py,sha256=q5Ruy60H3cL2a4LOUjB2JI6bkMwKiWVSlrjpATREwqM,3641
223
223
  lionagi/integrations/perplexity_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
224
224
  lionagi/integrations/perplexity_/perplexity_max_output_token_data.yaml,sha256=SY6nDrDRhI_HzEBYHaANR_Mc5GRa0SY9Pw_wRuVBlV4,121
225
225
  lionagi/integrations/perplexity_/perplexity_price_data.yaml,sha256=eyp_jZktnEbsEv9VJ0TLNzjp99VdOEA0E-el_dAzCTc,284
@@ -350,9 +350,9 @@ lionagi/protocols/registries/_component_registry.py,sha256=MRA8bAgmjqNsFUnzq1I7J
350
350
  lionagi/protocols/registries/_pile_registry.py,sha256=iHiQsd0dapzu7L2YA-VPo7qAw6gjWdDETKe_zh1zhHk,543
351
351
  lionagi/service/__init__.py,sha256=xuMab03l9QSeBmq0GXjkTE-Oh2R10l0aful5swKrjas,296
352
352
  lionagi/service/complete_request_info.py,sha256=V9cWqmQ-fo952EWCooAvezaM_xuxYU4Xy0Jo_0XJnug,301
353
- lionagi/service/imodel.py,sha256=gObNmi7tTOZRS639YuAtB1oy8XcXIZUusZJ815LAGm0,3758
353
+ lionagi/service/imodel.py,sha256=laiwc4FvO0jHpf7Q9GrL0VIABCIInkn0ld8XUcbabKo,4191
354
354
  lionagi/service/rate_limiter.py,sha256=1cCtKfpOo_8h4ZN-oX5HXtnq0iGjF1rxi8htyu65kMU,3746
355
- lionagi/service/service.py,sha256=uN9mBrZeV-MDOovqNU4kjRjOhGVtxJOJKTgYNO0QDuc,986
355
+ lionagi/service/service.py,sha256=58FPZzLM85fNm4WgSzmZRwbHHuLUW1K0VagKZ7A2ZAs,1077
356
356
  lionagi/service/service_match_util.py,sha256=Re2zJgqsBV9LnDcn5N9pUGMQB_O8-jCTR3ZH6-nxFds,4301
357
357
  lionagi/service/service_util.py,sha256=z0tU_wAgWq_gDfm_QeNfDbrsmseumoNxVQ1bEpOCBzg,2985
358
358
  lionagi/service/token_calculator.py,sha256=6FQ7GvCq4nBo_7QbNyekYJmQrVIzL0CNwiztK3QSR3M,1486
@@ -367,7 +367,7 @@ lionagi/strategies/sequential_chunk.py,sha256=jG_WZXG-Ra3yd30CmX4b3XeCNAUrZGA2-i
367
367
  lionagi/strategies/sequential_concurrent_chunk.py,sha256=H7GShaqYlD5XxNJMG2GdOR4Vl8JHDhZb5jxNq8zY0hI,3365
368
368
  lionagi/strategies/types.py,sha256=fEvE4d1H4SeCcXcd2dz3q4k8jFIBtxYzjxDN7eJRLtI,769
369
369
  lionagi/strategies/utils.py,sha256=DX1dvxia8cNRqEJJbssJ3mgRzo7kgWCTA4y5DYLCCZE,1281
370
- lionagi-0.5.0.dist-info/METADATA,sha256=OuNuC0O5DostwQRd5b2H11jASoX_tKE7Q-fuzXRGC-U,17275
371
- lionagi-0.5.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
372
- lionagi-0.5.0.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
373
- lionagi-0.5.0.dist-info/RECORD,,
370
+ lionagi-0.5.1.dist-info/METADATA,sha256=FkuZDqqzBAOOQ-y96QXAK_E__O5tbVZAhxIsQggkL7U,22672
371
+ lionagi-0.5.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
372
+ lionagi-0.5.1.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
373
+ lionagi-0.5.1.dist-info/RECORD,,