lionagi 0.9.14__py3-none-any.whl → 0.9.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,83 @@
1
+ from pathlib import Path
2
+
3
+ from lionagi.utils import create_path
4
+
5
+ from .process import dir_to_files
6
+
7
+
8
+ def concat_files(
9
+ data_path: str | Path | list,
10
+ file_types: list[str],
11
+ output_dir: str | Path = None,
12
+ output_filename: str = None,
13
+ file_exist_ok: bool = True,
14
+ recursive: bool = True,
15
+ verbose: bool = True,
16
+ threshold: int = 0,
17
+ return_fps: bool = False,
18
+ return_files: bool = False,
19
+ **kwargs,
20
+ ) -> list[str] | str | tuple[list[str], list[Path]] | tuple[str, list[Path]]:
21
+ """
22
+ data_path: str or Path or list of str or Path, the directory or file paths to concatenate.
23
+ file_types: list of str, the file types to concatenate. [e.g. ['.txt', '.md']]
24
+ output_dir: str or Path, the directory to save the concatenated file. If provided, will save the file.
25
+ output_filename: str, the filename to save the concatenated file.
26
+ file_exist_ok: bool, if True, overwrite the existing file. Default is True.
27
+ recursive: bool, if True, search files recursively. Default is True.
28
+ verbose: bool, if True, print the output path. Default is True.
29
+ threshold: int, the minimum number of chars for the file to be considered valid to concatenate.
30
+ kwargs: additional keyword arguments to pass to create_path.
31
+ """
32
+ persist_path = None
33
+ if output_dir:
34
+ if not output_filename:
35
+ output_filename = "concatenated_text.txt"
36
+ kwargs["timestamp"] = kwargs.get("timestamp", True)
37
+ kwargs["random_hash_digits"] = kwargs.get("random_hash_digits", 6)
38
+ output_filename = output_filename or "concatenated_text.txt"
39
+ persist_path = create_path(
40
+ output_dir, output_filename, file_exist_ok=file_exist_ok, **kwargs
41
+ )
42
+
43
+ texts = []
44
+ data_path = (
45
+ [str(data_path)] if not isinstance(data_path, list) else data_path
46
+ )
47
+ data_path = sorted(data_path)
48
+ data_path = [Path(dp) for dp in data_path if Path(dp).exists()]
49
+
50
+ for dp in data_path:
51
+ fps = dir_to_files(dp, recursive=recursive, file_types=file_types)
52
+
53
+ data_path = sorted([str(i) for i in fps])
54
+ data_path: list[Path] = [
55
+ Path(dp) for dp in data_path if Path(dp).exists()
56
+ ]
57
+
58
+ for fp in data_path:
59
+ text = fp.read_text(encoding="utf-8")
60
+ if len(text) >= threshold:
61
+ fp_text = (
62
+ "\n----------------------------------------------------\n"
63
+ f"{str(fp)}"
64
+ "\n----------------------------------------------------\n"
65
+ )
66
+ text = fp_text + text
67
+ texts.append(text)
68
+
69
+ text = "\n".join(texts)
70
+ if persist_path:
71
+ persist_path.write_text(text, encoding="utf-8")
72
+ if verbose:
73
+ print(f"Concatenated {len(fps)} files to {persist_path}")
74
+ print(f"The file contains {len(text)} characters.")
75
+
76
+ if return_files:
77
+ if return_fps:
78
+ return texts, fps
79
+ return texts
80
+
81
+ if return_fps:
82
+ return text, fps
83
+ return text
@@ -164,10 +164,12 @@ def file_to_chunks(
164
164
 
165
165
 
166
166
  def chunk(
167
- url_or_path: str | Path,
168
167
  *,
168
+ text: str | None = None,
169
+ url_or_path: str | Path = None,
169
170
  file_types: list[str] | None = None, # only local files
170
171
  recursive: bool = False, # only local files
172
+ tokenizer: Callable[[str], list[str]] = None,
171
173
  chunk_by: Literal["chars", "tokens"] = "chars",
172
174
  chunk_size: int = 1500,
173
175
  overlap: float = 0.1,
@@ -175,45 +177,52 @@ def chunk(
175
177
  output_file: str | Path | None = None,
176
178
  metadata: dict[str, Any] | None = None,
177
179
  reader_tool: Callable = None,
178
- ):
179
- if isinstance(url_or_path, str):
180
- url_or_path = Path(url_or_path)
181
-
182
- chunks = None
183
- files = None
184
- if url_or_path.exists():
185
- if url_or_path.is_dir():
186
- files = dir_to_files(
187
- directory=url_or_path,
188
- file_types=file_types,
189
- recursive=recursive,
180
+ as_node: bool = False,
181
+ ) -> list:
182
+ texts = []
183
+ if not text:
184
+ if isinstance(url_or_path, str):
185
+ url_or_path = Path(url_or_path)
186
+
187
+ chunks = None
188
+ files = None
189
+ if url_or_path.exists():
190
+ if url_or_path.is_dir():
191
+ files = dir_to_files(
192
+ directory=url_or_path,
193
+ file_types=file_types,
194
+ recursive=recursive,
195
+ )
196
+ elif url_or_path.is_file():
197
+ files = [url_or_path]
198
+ else:
199
+ files = (
200
+ [str(url_or_path)]
201
+ if not isinstance(url_or_path, list)
202
+ else url_or_path
190
203
  )
191
- elif url_or_path.is_file():
192
- files = [url_or_path]
193
- else:
194
- files = (
195
- [str(url_or_path)]
196
- if not isinstance(url_or_path, list)
197
- else url_or_path
198
- )
199
204
 
200
- if reader_tool is None:
201
- reader_tool = lambda x: x.read_text(encoding="utf-8")
205
+ if reader_tool is None:
206
+ reader_tool = lambda x: x.read_text(encoding="utf-8")
202
207
 
203
- if reader_tool == "docling":
204
- from lionagi.libs.package.imports import check_import
208
+ if reader_tool == "docling":
209
+ from lionagi.libs.package.imports import check_import
205
210
 
206
- DocumentConverter = check_import(
207
- "docling",
208
- module_name="document_converter",
209
- import_name="DocumentConverter",
210
- )
211
- converter = DocumentConverter()
212
- reader_tool = lambda x: converter.convert(
213
- x
214
- ).document.export_to_markdown()
211
+ DocumentConverter = check_import(
212
+ "docling",
213
+ module_name="document_converter",
214
+ import_name="DocumentConverter",
215
+ )
216
+ converter = DocumentConverter()
217
+ reader_tool = lambda x: converter.convert(
218
+ x
219
+ ).document.export_to_markdown()
220
+
221
+ texts = lcall(files, reader_tool)
222
+
223
+ else:
224
+ texts = [text]
215
225
 
216
- texts = lcall(files, reader_tool)
217
226
  chunks = lcall(
218
227
  texts,
219
228
  chunk_content,
@@ -224,6 +233,7 @@ def chunk(
224
233
  metadata=metadata,
225
234
  as_node=True,
226
235
  flatten=True,
236
+ tokenizer=tokenizer or str.split,
227
237
  )
228
238
  if threshold:
229
239
  chunks = [c for c in chunks if len(c.content) > threshold]
@@ -247,4 +257,7 @@ def chunk(
247
257
  else:
248
258
  raise ValueError(f"Unsupported output file format: {output_file}")
249
259
 
250
- return chunks
260
+ if as_node:
261
+ return chunks
262
+
263
+ return [c.content for c in chunks]
@@ -13,6 +13,7 @@ MAPPING_PATH = "synthlang_/resources/mapping"
13
13
 
14
14
  class TokenMappingTemplate(str, Enum):
15
15
  RUST_CHINESE = "rust_chinese"
16
+ LION_EMOJI = "lion_emoji"
16
17
 
17
18
  @property
18
19
  def fp(self) -> Path:
@@ -4,13 +4,16 @@ from typing import Literal
4
4
 
5
5
  from lionagi.service.imodel import iModel
6
6
  from lionagi.session.branch import Branch
7
- from lionagi.utils import alcall
7
+ from lionagi.utils import alcall, get_bins
8
8
 
9
9
  from .base import TokenMapping, TokenMappingTemplate
10
10
  from .synthlang_.base import SynthlangFramework, SynthlangTemplate
11
11
 
12
12
  FRAMEWORK_OPTIONS = SynthlangFramework.load_framework_options()
13
13
  FRAMEWORK_CHOICES = Literal["math", "optim", "custom_algebra"]
14
+ DEFAULT_INVOKATION_PROMPT = (
15
+ "The light-speed brown fox jumps over the lazy dog with great agility."
16
+ )
14
17
 
15
18
 
16
19
  async def symbolic_compress_context(
@@ -19,10 +22,10 @@ async def symbolic_compress_context(
19
22
  url_or_path: str | Path = None,
20
23
  chunk_by="tokens",
21
24
  chunk_size: int = 1000,
22
- chunk_tokenizer: Callable = str.split,
25
+ chunk_tokenizer: Callable = None,
23
26
  threshold=50,
24
27
  output_path: Path | str = None,
25
- overlap=0.05,
28
+ overlap=0.025,
26
29
  system: str = None,
27
30
  chat_model: iModel = None,
28
31
  use_lion_system_message: bool = True,
@@ -42,8 +45,8 @@ async def symbolic_compress_context(
42
45
  compress_min_pplx=None,
43
46
  encode_token_map: TokenMappingTemplate | dict | TokenMapping = None,
44
47
  num_encodings: int = 3,
45
- encode_output: bool = False,
46
- num_output_encodings: int = None,
48
+ encode_output: bool = True,
49
+ num_output_encodings: int = 1,
47
50
  verbose: bool = True,
48
51
  branch: Branch = None,
49
52
  additional_text: str = "",
@@ -96,7 +99,7 @@ async def symbolic_compress_context(
96
99
 
97
100
  from lionagi.libs.file.process import chunk, chunk_content
98
101
 
99
- texts = []
102
+ chunks = []
100
103
  if url_or_path:
101
104
  chunks = chunk(
102
105
  url_or_path=url_or_path,
@@ -105,28 +108,38 @@ async def symbolic_compress_context(
105
108
  overlap=overlap,
106
109
  threshold=threshold,
107
110
  )
108
- texts = [i.content for i in chunks if i.content]
109
111
 
110
112
  elif text:
111
- texts = chunk_content(
113
+ chunks = chunk_content(
112
114
  text=text,
113
115
  chunk_by=chunk_by,
114
116
  chunk_size=chunk_size,
115
117
  overlap=overlap,
116
118
  threshold=threshold,
117
- tokenizer=chunk_tokenizer,
119
+ tokenizer=chunk_tokenizer or str.split,
118
120
  )
119
121
 
122
+ texts = [str(i).strip() for i in chunks if str(i).strip()]
123
+ bins = get_bins(texts, upper=chunk_size)
124
+ textss = []
125
+ for i in bins:
126
+ textss.append("\n".join([texts[j] for j in i]))
127
+
120
128
  results = await alcall(
121
- texts,
129
+ textss,
122
130
  _inner,
123
131
  max_concurrent=max_concurrent,
124
132
  retry_default=None,
133
+ num_retries=3,
125
134
  throttle_period=throttle_period,
135
+ retry_delay=1,
136
+ backoff_factor=2,
126
137
  flatten=True,
127
138
  dropna=True,
139
+ unique_output=True,
128
140
  )
129
141
  text = "\n".join(results)
142
+ text = DEFAULT_INVOKATION_PROMPT + text
130
143
 
131
144
  if output_path:
132
145
  fp = Path(output_path)
@@ -0,0 +1,55 @@
1
+ id = "8d3c0f4a-5bbb-4426-99fb-1a37c6a72b38"
2
+ created_at = 1740946382.550635
3
+ category = "utility"
4
+
5
+ [metadata]
6
+ title = "LionAGI-Emoji Symbolic Encoding Mapping"
7
+ domain = "Symbolic Compression"
8
+ version = "1.0"
9
+ overview = "This resource provides a mapping of lionagi emoji symbolic encoding for compressing lionagi python codebase. When using, should keep the semantic structure of original python codes, do not translate into emoji nor any natural language."
10
+ lion_class = "lionagi.libs.token_transform.base.TokenMapping"
11
+
12
+ [content]
13
+ "..." = "⋯"
14
+ --- = "⧦"
15
+ "⧦⧦⧦" = "⧦"
16
+ " " = "空"
17
+ "###" = "井"
18
+ lionagi = "🦁"
19
+ LionAGI = "🦁"
20
+ Branch = "β"
21
+ branch = "β"
22
+ iModel = "🧠"
23
+ imodel = "🧠"
24
+ operatives = "Ṓ"
25
+ operative = "Ṓ"
26
+ operations = "⨔"
27
+ operation = "⨔"
28
+ protocols = "🔏"
29
+ manager = "👑"
30
+ Manager = "👑"
31
+ Element = "🧱"
32
+ element = "🧱"
33
+ Pile = "📚"
34
+ pile = "📚"
35
+ progression = "䷢"
36
+ Progression = "䷢"
37
+ IDType = "🆔"
38
+ "await " = "🕰️"
39
+ "async " = "⋕"
40
+ True = "✅"
41
+ False = "❌"
42
+ None = "🅾️"
43
+ "->" = "→"
44
+ "<=" = "≤"
45
+ ">=" = "≥"
46
+ "!=" = "≠"
47
+ "=>" = "⇒"
48
+ "def " = "∂"
49
+ "==" = "⩵"
50
+ from = "从"
51
+ if = "¿"
52
+ return = "⟲"
53
+ function = "ƒ"
54
+ "```synthlang\n```synthlang\n```synthlang" = "```synthlang"
55
+ "```synthlang\n```synthlang" = "```synthlang"
@@ -4,18 +4,18 @@ created_at = 1740603518.077121
4
4
  [metadata]
5
5
  title = "Rust-Chinese Symbolic Encoding Mapping"
6
6
  domain = "Symbolic Compression"
7
- version = "1.0"
8
- overview = "This resource provides a mapping of Rust-Chinese symbolic encoding for compressing rust codebase."
7
+ version = "1.1"
8
+ overview = "This resource provides a mapping of Rust-Chinese symbolic encoding for compressing rust codebase. When using, should keep the semantic structure of original rust codes, do not translate into chinese."
9
9
  category = "Token Transform"
10
10
  lion_class = "lionagi.libs.token_transform.base.TokenMapping"
11
11
 
12
12
  [content]
13
- --- = ""
14
- "```" = ""
13
+ --- = ""
14
+ "std::" = ""
15
15
  "###" = "井"
16
16
  "..." = "点"
17
17
  "~~~" = "波"
18
- "|||" = ""
18
+ "|||" = ""
19
19
  " " = "空"
20
20
  "===" = "等"
21
21
  "{{{" = "開"
@@ -34,4 +34,27 @@ lion_class = "lionagi.libs.token_transform.base.TokenMapping"
34
34
  "=>" = "⇒"
35
35
  "<=>" = "⇔"
36
36
  "::" = "的"
37
- "synthlang" = ""
37
+ "assert_eq!" = ""
38
+ "assert_eq" = "是"
39
+ "assert!" = "否"
40
+ assert = "是"
41
+ ";//" = ";"
42
+ "横横" = "横"
43
+ "斜斜" = "斜"
44
+ "//!" = " "
45
+ "##// " = " "
46
+ "#//" = " "
47
+ "//点" = " "
48
+ "空空" = " "
49
+ Self = "自"
50
+ self = "自"
51
+ "enum " = "列"
52
+ "00000000" = "亿"
53
+ "0000" = "万"
54
+ "万万" = "亿"
55
+ ";\n// " = "; "
56
+ "pub fn " = "公"
57
+ Clone = "隆"
58
+ clone = "隆"
59
+ derive = "衍"
60
+ "println!" = "印"
@@ -45,14 +45,18 @@ async def translate_to_synthlang(
45
45
  encode_token_map = TokenMapping.load_from_template(
46
46
  encode_token_map
47
47
  )
48
+ additional_text += (
49
+ f"\nTransforming text with {encode_token_map.metadata['title']}"
50
+ f"\nOverview: {encode_token_map.metadata['overview']}"
51
+ )
48
52
  encode_token_map = encode_token_map.content
49
53
  if not isinstance(encode_token_map, dict):
50
54
  raise ValueError(
51
55
  "encode_token_map must be a dict or TokenMappingTemplate"
52
56
  )
53
57
  for _ in range(num_encodings):
54
- text = "\n".join(
55
- [str(i).strip() for i in text.split("\n") if i.strip()]
58
+ text = " ".join(
59
+ [str(i).strip() for i in text.split() if i.strip()]
56
60
  )
57
61
  for k, v in encode_token_map.items():
58
62
  text = text.replace(k, v)
@@ -60,7 +64,6 @@ async def translate_to_synthlang(
60
64
  additional_text += (
61
65
  f"\nthesaurus, lexicon, glossary:\n{encode_token_map}"
62
66
  )
63
-
64
67
  if not isinstance(framework_template, SynthlangFramework):
65
68
  framework_template = SynthlangFramework.load_from_template(
66
69
  framework_template
@@ -98,9 +101,10 @@ async def translate_to_synthlang(
98
101
 
99
102
  kwargs["guidance"] = (
100
103
  "Following SynthLang, translate the provided text into SynthLang syntax. "
101
- "Reasonably shrink the token size by 50-80%. Return only the translated text"
102
- "enclosed by ```synthlang...```. "
103
- "\n\n" + kwargs.get("guidance", "")
104
+ "Reasonably reduce the token count by up to 80%. Return only the transformed"
105
+ " string enclosed by ```synthlang...```. \n\n"
106
+ "DO NOT include anything else, no comments, no explanations, no additional "
107
+ "text, just the transformed string." + kwargs.get("guidance", "")
104
108
  )
105
109
 
106
110
  out = await branch.chat(
@@ -114,8 +118,8 @@ async def translate_to_synthlang(
114
118
  if encode_output:
115
119
  if isinstance(num_output_encodings, int) and num_output_encodings > 0:
116
120
  for _ in range(num_output_encodings):
117
- out = "\n".join(
118
- [str(i).strip() for i in out.split("\n") if i.strip()]
121
+ out = " ".join(
122
+ [str(i).strip() for i in out.split(" ") if i.strip()]
119
123
  )
120
124
  for k, v in encode_token_map.items():
121
125
  out = out.replace(k, v).strip()
@@ -123,7 +127,7 @@ async def translate_to_synthlang(
123
127
  if sys1:
124
128
  branch.msgs.add_message(system=sys1)
125
129
 
126
- len_ = calculator.tokenize(out, return_tokens=False)
130
+ len_ = calculator.tokenize(out)
127
131
  if verbose:
128
132
  msg = "------------------------------------------\n"
129
133
  msg += f"Compression Method: SynthLang\n"
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.9.14"
1
+ __version__ = "0.9.16"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lionagi
3
- Version: 0.9.14
3
+ Version: 0.9.16
4
4
  Summary: An Intelligence Operating System.
5
5
  Author-email: HaiyangLi <quantocean.li@gmail.com>
6
6
  License: Apache License
@@ -4,14 +4,15 @@ lionagi/_errors.py,sha256=JlBTFJnRWtVYcRxKb7fWFiJHLbykl1E19mSJ8sXYVxg,455
4
4
  lionagi/_types.py,sha256=9g7iytvSj3UjZxD-jL06_fxuNfgZyWT3Qnp0XYp1wQU,63
5
5
  lionagi/settings.py,sha256=W52mM34E6jXF3GyqCFzVREKZrmnUqtZm_BVDsUiDI_s,1627
6
6
  lionagi/utils.py,sha256=K36D9AAGiMPR4eM9tYoiVgvH-NdPPSeMQPls09s7keQ,73223
7
- lionagi/version.py,sha256=XGLQjyj9cqDYEcMT9GOWB4Gw5INLWURmRG2KlvV7wDk,23
7
+ lionagi/version.py,sha256=acB0gualD_f0pAsWdbLFyA6FBMb_Gmat0K0kEn9R9AA,23
8
8
  lionagi/libs/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
9
9
  lionagi/libs/parse.py,sha256=JRS3bql0InHJqATnAatl-hQv4N--XXw4P77JHhTFnrc,1011
10
10
  lionagi/libs/file/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
11
11
  lionagi/libs/file/chunk.py,sha256=XeVMwM33JF0X1W6udz_nhlb3DCevA_EK6A50Hn_e5SY,9300
12
+ lionagi/libs/file/concat_files.py,sha256=Kb4YhG-W9IpCYDSJUmvlZgpXXAvG71ocKL_ztHjNtUk,3044
12
13
  lionagi/libs/file/file_ops.py,sha256=HBiIh1EljIJ5VTIXuyvJM0ppSs0YYOPUWmgDMJT634U,3430
13
14
  lionagi/libs/file/params.py,sha256=SZ5DkoffWfxWudOAYCfCxpL8UIm-1UjeyTtploo-Lqs,5824
14
- lionagi/libs/file/process.py,sha256=bIe4AdQ7eT0NHSMn0_Ail_-ltlM21YWqUWiPGXEPuHU,8264
15
+ lionagi/libs/file/process.py,sha256=NKC-rIkm83qv4rgz-otd89aziop0G_LPz7_kmBAowsQ,8686
15
16
  lionagi/libs/file/save.py,sha256=TCxVlKxFFnr3xZ-HAXPpTomQoyiVrp6nKRoj-bcQt4k,2863
16
17
  lionagi/libs/nested/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
17
18
  lionagi/libs/nested/flatten.py,sha256=sB4jxZRoaUbjak9RbIWVWNKz2hzkhQJPFffV_Ws1GA0,5479
@@ -35,14 +36,14 @@ lionagi/libs/schema/extract_docstring.py,sha256=aYyLSRlB8lTH9QF9-6a56uph3AAkNuTy
35
36
  lionagi/libs/schema/function_to_schema.py,sha256=Ak21_0xCFP71qgb6_wNzaRSVsdkf1ieRjJ92hXo7qPE,5628
36
37
  lionagi/libs/schema/json_schema.py,sha256=cuHcaMr748O9g6suNGmRx4tRXcidd5-c7AMGjTIZyHM,7670
37
38
  lionagi/libs/token_transform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- lionagi/libs/token_transform/base.py,sha256=1rLtfKK9pG8jEykcPG7a3o58lMTXOazgWn9Ff6BX3Wo,1483
39
+ lionagi/libs/token_transform/base.py,sha256=OiF2peDm39BNLJLq9uNREcM0-YfnP21B9y7BO9tKMFs,1513
39
40
  lionagi/libs/token_transform/llmlingua.py,sha256=DkeLUlrb7rGx3nZ04aADU9HXXu5mZTf_DBwT0xhzIv4,7
40
41
  lionagi/libs/token_transform/perplexity.py,sha256=tcVRjPBX3nuVqsoTkowCf6RBXuybO--owH1lf2Ywj1s,14470
41
- lionagi/libs/token_transform/symbolic_compress_context.py,sha256=joWy9QDtYIl8TuvZu00s90FH6EYtLtmXi9wnTfCnB58,4190
42
+ lionagi/libs/token_transform/symbolic_compress_context.py,sha256=Nr4vSJSN6sUQgaA1QxHhidWH3pUG_5RnoYeLHjMsoLA,4603
42
43
  lionagi/libs/token_transform/synthlang.py,sha256=W6e-_265UXqVosM9X0TLKW53rNHvWCKhsWbVAop49Ac,259
43
44
  lionagi/libs/token_transform/types.py,sha256=4HgAfNDlJ_Hu18kLt59GHr_76eF3xLpNCTbOXlAYVlA,491
44
45
  lionagi/libs/token_transform/synthlang_/base.py,sha256=diDFrm-1Zf3PerKjODo-uFQMEjxPeaGtbZ5N1lK59Kg,4114
45
- lionagi/libs/token_transform/synthlang_/translate_to_synthlang.py,sha256=cfhTSYDryN27Wu2iWDziToI1qidJ6sT08DKOZ51OqPQ,4659
46
+ lionagi/libs/token_transform/synthlang_/translate_to_synthlang.py,sha256=lRBpeKGhyNlf8ngigjYAw56QbIAVZWLQUzRg2wpwMfQ,4956
46
47
  lionagi/libs/token_transform/synthlang_/resources/frameworks/abstract_algebra.toml,sha256=2TuOAo97g8mNhdPH96HP8vYZpnC8neiP-KlhVqbp1Us,970
47
48
  lionagi/libs/token_transform/synthlang_/resources/frameworks/category_theory.toml,sha256=Stg9W3h8o7VkQ9tdAfSZmR3LctFqcH6OhOPdaw9BlIg,1064
48
49
  lionagi/libs/token_transform/synthlang_/resources/frameworks/complex_analysis.toml,sha256=iE6FS7Cn5_uJRG5-StLuMM4XVAk95bxhbYWwlstw_tA,1044
@@ -52,7 +53,8 @@ lionagi/libs/token_transform/synthlang_/resources/frameworks/math_logic.toml,sha
52
53
  lionagi/libs/token_transform/synthlang_/resources/frameworks/reflective_patterns.toml,sha256=LxBIVLHNLfvVdXjLAzqivrYaHNix514DLNYsbA-VSQ4,5730
53
54
  lionagi/libs/token_transform/synthlang_/resources/frameworks/set_theory.toml,sha256=SZpBvUySZ3_0pIrRko24a3KfbPHd55LyNwzFHyznjs4,1457
54
55
  lionagi/libs/token_transform/synthlang_/resources/frameworks/topology_fundamentals.toml,sha256=nnhfbIJQ5pTGlX7lo1XzjyOevaZOHuusvBuhwWHzbLk,1008
55
- lionagi/libs/token_transform/synthlang_/resources/mapping/rust_chinese_mapping.toml,sha256=KTyNI9_xr_bK3ipMZmGzRf6LGbmhOJ6S2NAbyMcQ9Kc,753
56
+ lionagi/libs/token_transform/synthlang_/resources/mapping/lion_emoji_mapping.toml,sha256=HQFdI7WJ6Jwl9Y4O6Bz862UtzKIzDuYkhydC7Vt_2fw,1276
57
+ lionagi/libs/token_transform/synthlang_/resources/mapping/rust_chinese_mapping.toml,sha256=IU5qOB-a_ZJI_wMOa0OJu3foAp6tIdvegnVt-yw5URM,1216
56
58
  lionagi/libs/token_transform/synthlang_/resources/utility/base_synthlang_system_prompt.toml,sha256=8xhY14WdDRF6GIToqzRPM7EjM6-uO6-hQ9Muei1A2Iw,3458
57
59
  lionagi/libs/validate/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
58
60
  lionagi/libs/validate/common_field_validators.py,sha256=1BHznXnJYcLQrHqvHKUnP6aqCptuQ0qN7KJRCExcJBU,4778
@@ -231,7 +233,7 @@ lionagi/tools/file/writer.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,
231
233
  lionagi/tools/file/providers/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
232
234
  lionagi/tools/file/providers/docling_.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
233
235
  lionagi/tools/query/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
234
- lionagi-0.9.14.dist-info/METADATA,sha256=5W9E7xdnhy-LI4ReBsPHdczTCaEMUHFuhCJ9IoRImOE,18464
235
- lionagi-0.9.14.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
236
- lionagi-0.9.14.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
237
- lionagi-0.9.14.dist-info/RECORD,,
236
+ lionagi-0.9.16.dist-info/METADATA,sha256=p6bDdHJ5ayzHFHGd3X2Sr2Em_nI4eE6TLNg1WBwqI_M,18464
237
+ lionagi-0.9.16.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
238
+ lionagi-0.9.16.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
239
+ lionagi-0.9.16.dist-info/RECORD,,