fabricatio 0.3.13__cp312-cp312-win_amd64.whl → 0.3.14__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. fabricatio/__init__.py +6 -13
  2. fabricatio/actions/article.py +87 -50
  3. fabricatio/actions/article_rag.py +59 -68
  4. fabricatio/actions/output.py +58 -24
  5. fabricatio/actions/rag.py +2 -3
  6. fabricatio/capabilities/advanced_judge.py +4 -7
  7. fabricatio/capabilities/advanced_rag.py +2 -1
  8. fabricatio/capabilities/censor.py +5 -4
  9. fabricatio/capabilities/check.py +27 -27
  10. fabricatio/capabilities/correct.py +22 -22
  11. fabricatio/capabilities/extract.py +33 -33
  12. fabricatio/capabilities/persist.py +103 -0
  13. fabricatio/capabilities/propose.py +2 -2
  14. fabricatio/capabilities/rag.py +11 -10
  15. fabricatio/capabilities/rating.py +66 -70
  16. fabricatio/capabilities/review.py +12 -11
  17. fabricatio/capabilities/task.py +19 -18
  18. fabricatio/decorators.py +11 -9
  19. fabricatio/{core.py → emitter.py} +17 -19
  20. fabricatio/journal.py +2 -4
  21. fabricatio/models/action.py +15 -32
  22. fabricatio/models/extra/aricle_rag.py +13 -8
  23. fabricatio/models/extra/article_base.py +57 -25
  24. fabricatio/models/extra/article_essence.py +2 -1
  25. fabricatio/models/extra/article_main.py +24 -22
  26. fabricatio/models/extra/article_outline.py +2 -1
  27. fabricatio/models/extra/article_proposal.py +1 -1
  28. fabricatio/models/extra/rag.py +2 -2
  29. fabricatio/models/extra/rule.py +2 -1
  30. fabricatio/models/generic.py +55 -137
  31. fabricatio/models/kwargs_types.py +1 -54
  32. fabricatio/models/role.py +49 -28
  33. fabricatio/models/task.py +3 -4
  34. fabricatio/models/tool.py +6 -7
  35. fabricatio/models/usages.py +146 -149
  36. fabricatio/parser.py +59 -99
  37. fabricatio/rust.cp312-win_amd64.pyd +0 -0
  38. fabricatio/rust.pyi +58 -81
  39. fabricatio/utils.py +63 -162
  40. fabricatio-0.3.14.data/scripts/tdown.exe +0 -0
  41. fabricatio-0.3.14.data/scripts/ttm.exe +0 -0
  42. {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/METADATA +10 -13
  43. fabricatio-0.3.14.dist-info/RECORD +64 -0
  44. {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/WHEEL +1 -1
  45. fabricatio-0.3.13.data/scripts/tdown.exe +0 -0
  46. fabricatio-0.3.13.data/scripts/ttm.exe +0 -0
  47. fabricatio-0.3.13.dist-info/RECORD +0 -63
  48. {fabricatio-0.3.13.dist-info → fabricatio-0.3.14.dist-info}/licenses/LICENSE +0 -0
fabricatio/parser.py CHANGED
@@ -1,152 +1,112 @@
1
- """A module to parse text using regular expressions."""
1
+ """A module for capturing patterns in text using regular expressions."""
2
2
 
3
3
  import re
4
+ from dataclasses import dataclass, field
4
5
  from functools import lru_cache
5
- from re import Pattern, compile
6
- from typing import Any, Callable, Iterable, List, Optional, Self, Tuple, Type
6
+ from typing import Any, Callable, Iterable, List, Optional, Self, Tuple, Type, Union
7
7
 
8
8
  import ujson
9
- from fabricatio.rust import CONFIG
10
9
  from json_repair import repair_json
11
- from pydantic import BaseModel, ConfigDict, Field, PositiveInt, PrivateAttr, ValidationError
12
10
 
13
11
  from fabricatio.journal import logger
12
+ from fabricatio.rust import CONFIG
14
13
 
15
14
 
16
- class Capture(BaseModel):
15
+ @dataclass(frozen=True)
16
+ class Capture:
17
17
  """A class to capture patterns in text using regular expressions.
18
18
 
19
19
  Attributes:
20
- pattern (str): The regular expression pattern to search for.
21
- _compiled (Pattern): The compiled regular expression pattern.
20
+ target_groups (Tuple[int, ...]): The target groups to extract from the match.
21
+ pattern (str): The regex pattern to search for.
22
+ flags (int): Flags to apply when compiling the regex.
23
+ capture_type (Optional[str]): Optional hint for post-processing (e.g., 'json').
22
24
  """
23
25
 
24
- model_config = ConfigDict(use_attribute_docstrings=True)
25
- target_groups: Tuple[int, ...] = Field(default_factory=tuple)
26
- """The target groups to capture from the pattern."""
27
- pattern: str = Field(frozen=True)
26
+ pattern: str = field()
28
27
  """The regular expression pattern to search for."""
29
- flags: PositiveInt = Field(default=re.DOTALL | re.MULTILINE | re.IGNORECASE, frozen=True)
30
- """The flags to use when compiling the regular expression pattern."""
28
+ flags: int = re.DOTALL | re.MULTILINE | re.IGNORECASE
29
+ """Flags to control regex behavior (DOTALL, MULTILINE, IGNORECASE by default)."""
31
30
  capture_type: Optional[str] = None
32
- """The type of capture to perform, e.g., 'json', which is used to dispatch the fixer accordingly."""
33
- _compiled: Pattern = PrivateAttr()
34
-
35
- def model_post_init(self, __context: Any) -> None:
36
- """Initialize the compiled pattern."""
37
- self._compiled = compile(self.pattern, self.flags)
38
-
39
- def fix[T](self, text: str | Iterable[str] | T) -> str | List[str] | T:
40
- """Fix the text using the pattern.
31
+ """Optional type identifier for post-processing (e.g., 'json' for JSON repair)."""
32
+ target_groups: Tuple[int, ...] = field(default_factory=tuple)
33
+ """Tuple of group indices to extract from the match (1-based indexing)."""
41
34
 
42
- Args:
43
- text (str | List[str]): The text to fix.
44
-
45
- Returns:
46
- str | List[str]: The fixed text with the same type as input.
47
- """
35
+ def fix(self, text: Union[str, Iterable[str], Any]) -> Union[str, List[str], Any]:
36
+ """Fix the text based on capture_type (e.g., JSON repair)."""
48
37
  match self.capture_type:
49
38
  case "json" if CONFIG.general.use_json_repair:
50
- logger.debug("Applying json repair to text.")
39
+ logger.debug("Applying JSON repair to text.")
51
40
  if isinstance(text, str):
52
- return repair_json(text, ensure_ascii=False) # pyright: ignore [reportReturnType]
53
- return [repair_json(item, ensure_ascii=False) for item in
54
- text] # pyright: ignore [reportReturnType, reportGeneralTypeIssues]
41
+ return repair_json(text, ensure_ascii=False)
42
+ return [repair_json(item, ensure_ascii=False) for item in text]
55
43
  case _:
56
- return text # pyright: ignore [reportReturnType]
57
-
58
- def capture(self, text: str) -> Tuple[str, ...] | str | None:
59
- """Capture the first occurrence of the pattern in the given text.
60
-
61
- Args:
62
- text (str): The text to search the pattern in.
63
-
64
- Returns:
65
- str | None: The captured text if the pattern is found, otherwise None.
66
-
67
- """
68
- if (match := self._compiled.match(text) or self._compiled.search(text)) is None:
69
- logger.debug(f"Capture Failed {type(text)}: \n{text}")
44
+ return text
45
+
46
+ def capture(self, text: str) -> Optional[Union[str, Tuple[str, ...]]]:
47
+ """Capture the first match of the pattern in the text."""
48
+ compiled = re.compile(self.pattern, self.flags)
49
+ match = compiled.match(text) or compiled.search(text)
50
+ if match is None:
51
+ logger.debug(f"Capture Failed: {text}")
70
52
  return None
53
+
71
54
  groups = self.fix(match.groups())
72
55
  if self.target_groups:
73
56
  cap = tuple(groups[g - 1] for g in self.target_groups)
74
- logger.debug(f"Captured text: {'\n\n'.join(cap)}")
57
+ logger.debug(f"Captured texts: {'\n==\n'.join(cap)}")
75
58
  return cap
76
59
  cap = groups[0]
77
60
  logger.debug(f"Captured text: \n{cap}")
78
61
  return cap
79
62
 
80
- def convert_with[T](self, text: str, convertor: Callable[[Tuple[str, ...]], T] | Callable[[str], T]) -> T | None:
81
- """Convert the given text using the pattern.
82
-
83
- Args:
84
- text (str): The text to search the pattern in.
85
- convertor (Callable[[Tuple[str, ...]], T] | Callable[[str], T]): The function to convert the captured text.
86
-
87
- Returns:
88
- str | None: The converted text if the pattern is found, otherwise None.
89
- """
63
+ def convert_with(
64
+ self,
65
+ text: str,
66
+ convertor: Callable[[Union[str, Tuple[str, ...]]], Any],
67
+ ) -> Optional[Any]:
68
+ """Convert captured text using a provided function."""
90
69
  if (cap := self.capture(text)) is None:
91
70
  return None
92
71
  try:
93
- return convertor(cap) # pyright: ignore [reportArgumentType]
94
- except (ValueError, SyntaxError, ValidationError) as e:
95
- logger.error(f"Failed to convert text using {convertor.__name__} to convert.\nerror: {e}\n {cap}")
72
+ return convertor(cap)
73
+ except Exception as e: # noqa: BLE001
74
+ logger.error(f"Failed to convert text using {convertor.__name__}: {e}\n{cap}")
96
75
  return None
97
76
 
98
- def validate_with[K, T, E](
99
- self,
100
- text: str,
101
- target_type: Type[T],
102
- elements_type: Optional[Type[E]] = None,
103
- length: Optional[int] = None,
104
- deserializer: Callable[[Tuple[str, ...]], K] | Callable[[str], K] = ujson.loads,
105
- ) -> T | None:
106
- """Validate the given text using the pattern.
107
-
108
- Args:
109
- text (str): The text to search the pattern in.
110
- target_type (Type[T]): The expected type of the output, dict or list.
111
- elements_type (Optional[Type[E]]): The expected type of the elements in the output dict keys or list elements.
112
- length (Optional[int]): The expected length of the output, bool(length)==False means no length validation.
113
- deserializer (Callable[[Tuple[str, ...]], K] | Callable[[str], K]): The function to deserialize the captured text.
114
-
115
- Returns:
116
- T | None: The validated text if the pattern is found and the output is of the expected type, otherwise None.
117
- """
118
- judges = [lambda output_obj: isinstance(output_obj, target_type)]
77
+ def validate_with[T, K, E](
78
+ self,
79
+ text: str,
80
+ target_type: Type[T],
81
+ elements_type: Optional[Type[E]] = None,
82
+ length: Optional[int] = None,
83
+ deserializer: Callable[[Union[str, Tuple[str, ...]]], K] = lambda x: ujson.loads(x) if isinstance(x, str) else ujson.loads(x[0]),
84
+ ) -> Optional[T]:
85
+ """Deserialize and validate the captured text against expected types."""
86
+ judges = [lambda obj: isinstance(obj, target_type)]
119
87
  if elements_type:
120
- judges.append(lambda output_obj: all(isinstance(e, elements_type) for e in output_obj))
88
+ judges.append(lambda obj: all(isinstance(e, elements_type) for e in obj))
121
89
  if length:
122
- judges.append(lambda output_obj: len(output_obj) == length)
90
+ judges.append(lambda obj: len(obj) == length)
123
91
 
124
92
  if (out := self.convert_with(text, deserializer)) and all(j(out) for j in judges):
125
- return out # pyright: ignore [reportReturnType]
93
+ return out # type: ignore
126
94
  return None
127
95
 
128
96
  @classmethod
129
97
  @lru_cache(32)
130
98
  def capture_code_block(cls, language: str) -> Self:
131
- """Capture the first occurrence of a code block in the given text.
132
-
133
- Args:
134
- language (str): The text containing the code block.
135
-
136
- Returns:
137
- Self: The instance of the class with the captured code block.
138
- """
99
+ """Capture a code block of the given language."""
139
100
  return cls(pattern=f"```{language}(.*?)```", capture_type=language)
140
101
 
141
102
  @classmethod
142
103
  @lru_cache(32)
143
104
  def capture_generic_block(cls, language: str) -> Self:
144
- """Capture the first occurrence of a generic code block in the given text.
145
-
146
- Returns:
147
- Self: The instance of the class with the captured code block.
148
- """
149
- return cls(pattern=f"--- Start of {language} ---(.*?)--- end of {language} ---", capture_type=language)
105
+ """Capture a generic block of the given language."""
106
+ return cls(
107
+ pattern=f"--- Start of {language} ---(.*?)--- End of {language} ---",
108
+ capture_type=language,
109
+ )
150
110
 
151
111
 
152
112
  JsonCapture = Capture.capture_code_block("json")
Binary file
fabricatio/rust.pyi CHANGED
@@ -10,12 +10,13 @@ Key Features:
10
10
  - Cryptographic utilities: BLAKE3 hashing.
11
11
  - Text utilities: Word boundary splitting and word counting.
12
12
  """
13
+
13
14
  from enum import StrEnum
14
- from typing import Any, Dict, List, Optional, Self, Tuple, overload, Union
15
+ from pathlib import Path
16
+ from typing import Any, Dict, List, Literal, Optional, Self, Tuple, Union, overload
15
17
 
16
18
  from pydantic import JsonValue
17
19
 
18
-
19
20
  class TemplateManager:
20
21
  """Template rendering engine using Handlebars templates.
21
22
 
@@ -47,10 +48,8 @@ class TemplateManager:
47
48
 
48
49
  @overload
49
50
  def render_template(self, name: str, data: Dict[str, Any]) -> str: ...
50
-
51
51
  @overload
52
52
  def render_template(self, name: str, data: List[Dict[str, Any]]) -> List[str]: ...
53
-
54
53
  def render_template(self, name: str, data: Dict[str, Any] | List[Dict[str, Any]]) -> str | List[str]:
55
54
  """Render a template with context data.
56
55
 
@@ -67,10 +66,8 @@ class TemplateManager:
67
66
 
68
67
  @overload
69
68
  def render_template_raw(self, template: str, data: Dict[str, Any]) -> str: ...
70
-
71
69
  @overload
72
70
  def render_template_raw(self, template: str, data: List[Dict[str, Any]]) -> List[str]: ...
73
-
74
71
  def render_template_raw(self, template: str, data: Dict[str, Any] | List[Dict[str, Any]]) -> str | List[str]:
75
72
  """Render a template with context data.
76
73
 
@@ -82,7 +79,6 @@ class TemplateManager:
82
79
  Rendered template content as string or list of strings
83
80
  """
84
81
 
85
-
86
82
  class BibManager:
87
83
  """BibTeX bibliography manager for parsing and querying citation data."""
88
84
 
@@ -191,7 +187,6 @@ class BibManager:
191
187
  Field value if found, None otherwise
192
188
  """
193
189
 
194
-
195
190
  def blake3_hash(content: bytes) -> str:
196
191
  """Calculate the BLAKE3 cryptographic hash of data.
197
192
 
@@ -202,11 +197,9 @@ def blake3_hash(content: bytes) -> str:
202
197
  Hex-encoded BLAKE3 hash string
203
198
  """
204
199
 
205
-
206
200
  def detect_language(string: str) -> str:
207
201
  """Detect the language of a given string."""
208
202
 
209
-
210
203
  def split_word_bounds(string: str) -> List[str]:
211
204
  """Split the string into words based on word boundaries.
212
205
 
@@ -217,7 +210,6 @@ def split_word_bounds(string: str) -> List[str]:
217
210
  A list of words extracted from the string.
218
211
  """
219
212
 
220
-
221
213
  def split_sentence_bounds(string: str) -> List[str]:
222
214
  """Split the string into sentences based on sentence boundaries.
223
215
 
@@ -228,7 +220,6 @@ def split_sentence_bounds(string: str) -> List[str]:
228
220
  A list of sentences extracted from the string.
229
221
  """
230
222
 
231
-
232
223
  def split_into_chunks(string: str, max_chunk_size: int, max_overlapping_rate: float = 0.3) -> List[str]:
233
224
  """Split the string into chunks of a specified size.
234
225
 
@@ -241,7 +232,6 @@ def split_into_chunks(string: str, max_chunk_size: int, max_overlapping_rate: fl
241
232
  A list of chunks extracted from the string.
242
233
  """
243
234
 
244
-
245
235
  def word_count(string: str) -> int:
246
236
  """Count the number of words in the string.
247
237
 
@@ -252,67 +242,51 @@ def word_count(string: str) -> int:
252
242
  The number of words in the string.
253
243
  """
254
244
 
255
-
256
245
  def is_chinese(string: str) -> bool:
257
246
  """Check if the given string is in Chinese."""
258
247
 
259
-
260
248
  def is_english(string: str) -> bool:
261
249
  """Check if the given string is in English."""
262
250
 
263
-
264
251
  def is_japanese(string: str) -> bool:
265
252
  """Check if the given string is in Japanese."""
266
253
 
267
-
268
254
  def is_korean(string: str) -> bool:
269
255
  """Check if the given string is in Korean."""
270
256
 
271
-
272
257
  def is_arabic(string: str) -> bool:
273
258
  """Check if the given string is in Arabic."""
274
259
 
275
-
276
260
  def is_russian(string: str) -> bool:
277
261
  """Check if the given string is in Russian."""
278
262
 
279
-
280
263
  def is_german(string: str) -> bool:
281
264
  """Check if the given string is in German."""
282
265
 
283
-
284
266
  def is_french(string: str) -> bool:
285
267
  """Check if the given string is in French."""
286
268
 
287
-
288
269
  def is_hindi(string: str) -> bool:
289
270
  """Check if the given string is in Hindi."""
290
271
 
291
-
292
272
  def is_italian(string: str) -> bool:
293
273
  """Check if the given string is in Italian."""
294
274
 
295
-
296
275
  def is_dutch(string: str) -> bool:
297
276
  """Check if the given string is in Dutch."""
298
277
 
299
-
300
278
  def is_portuguese(string: str) -> bool:
301
279
  """Check if the given string is in Portuguese."""
302
280
 
303
-
304
281
  def is_swedish(string: str) -> bool:
305
282
  """Check if the given string is in Swedish."""
306
283
 
307
-
308
284
  def is_turkish(string: str) -> bool:
309
285
  """Check if the given string is in Turkish."""
310
286
 
311
-
312
287
  def is_vietnamese(string: str) -> bool:
313
288
  """Check if the given string is in Vietnamese."""
314
289
 
315
-
316
290
  def tex_to_typst(string: str) -> str:
317
291
  """Convert TeX to Typst.
318
292
 
@@ -323,29 +297,18 @@ def tex_to_typst(string: str) -> str:
323
297
  The converted Typst string.
324
298
  """
325
299
 
300
+ def convert_all_tex_math(string: str) -> str:
301
+ r"""Unified function to convert all supported TeX math expressions in a string to Typst format.
326
302
 
327
- def convert_all_inline_tex(string: str) -> str:
328
- """Convert all inline TeX code in the string.
303
+ Handles $...$, $$...$$, \\(...\\), and \\[...\\]
329
304
 
330
305
  Args:
331
- string: The input string containing inline TeX code wrapped in $code$.
306
+ string: The input string containing TeX math expressions.
332
307
 
333
308
  Returns:
334
- The converted string with inline TeX code replaced.
309
+ The string with TeX math expressions converted to Typst format.
335
310
  """
336
311
 
337
-
338
- def convert_all_block_tex(string: str) -> str:
339
- """Convert all block TeX code in the string.
340
-
341
- Args:
342
- string: The input string containing block TeX code wrapped in $$code$$.
343
-
344
- Returns:
345
- The converted string with block TeX code replaced.
346
- """
347
-
348
-
349
312
  def fix_misplaced_labels(string: str) -> str:
350
313
  """A func to fix labels in a string.
351
314
 
@@ -356,9 +319,8 @@ def fix_misplaced_labels(string: str) -> str:
356
319
  The fixed string with labels properly placed.
357
320
  """
358
321
 
359
-
360
322
  def comment(string: str) -> str:
361
- """Add comment to the string.
323
+ r"""Add comment to the string.
362
324
 
363
325
  Args:
364
326
  string: The input string to which comments will be added.
@@ -367,7 +329,6 @@ def comment(string: str) -> str:
367
329
  The string with each line prefixed by '// '.
368
330
  """
369
331
 
370
-
371
332
  def uncomment(string: str) -> str:
372
333
  """Remove comment from the string.
373
334
 
@@ -378,6 +339,15 @@ def uncomment(string: str) -> str:
378
339
  The string with comments (lines starting with '// ' or '//') removed.
379
340
  """
380
341
 
342
+ def strip_comment(string: str) -> str:
343
+ """Remove leading and trailing comment lines from a multi-line string.
344
+
345
+ Args:
346
+ string: Input string that may have comment lines at start and/or end
347
+
348
+ Returns:
349
+ str: A new string with leading and trailing comment lines removed
350
+ """
381
351
 
382
352
  def split_out_metadata(string: str) -> Tuple[Optional[JsonValue], str]:
383
353
  """Split out metadata from a string.
@@ -389,7 +359,6 @@ def split_out_metadata(string: str) -> Tuple[Optional[JsonValue], str]:
389
359
  A tuple containing the metadata as a Python object (if parseable) and the remaining string.
390
360
  """
391
361
 
392
-
393
362
  def to_metadata(data: JsonValue) -> str:
394
363
  """Convert a Python object to a YAML string.
395
364
 
@@ -400,16 +369,7 @@ def to_metadata(data: JsonValue) -> str:
400
369
  The YAML string representation of the input data.
401
370
  """
402
371
 
403
-
404
- def convert_to_inline_formula(string: str) -> str:
405
- r"""Convert `$...$` to inline formula `\(...\)` and trim spaces."""
406
-
407
-
408
- def convert_to_block_formula(string: str) -> str:
409
- r"""Convert `$$...$$` to block formula `\[...\]` and trim spaces."""
410
-
411
-
412
- def inplace_update(string: str, wrapper: str, new_body: str) -> Optional[str]:
372
+ def replace_thesis_body(string: str, wrapper: str, new_body: str) -> Optional[str]:
413
373
  """Replace content between wrapper strings.
414
374
 
415
375
  Args:
@@ -422,7 +382,6 @@ def inplace_update(string: str, wrapper: str, new_body: str) -> Optional[str]:
422
382
 
423
383
  """
424
384
 
425
-
426
385
  def extract_body(string: str, wrapper: str) -> Optional[str]:
427
386
  """Extract the content between two occurrences of a wrapper string.
428
387
 
@@ -434,7 +393,6 @@ def extract_body(string: str, wrapper: str) -> Optional[str]:
434
393
  The content between the first two occurrences of the wrapper string if found, otherwise None.
435
394
  """
436
395
 
437
-
438
396
  class LLMConfig:
439
397
  """LLM configuration structure.
440
398
 
@@ -486,7 +444,6 @@ class LLMConfig:
486
444
  frequency_penalty: Optional[float]
487
445
  """Penalizes new tokens based on their frequency in text so far (-2.0-2.0)."""
488
446
 
489
-
490
447
  class EmbeddingConfig:
491
448
  """Embedding configuration structure."""
492
449
 
@@ -511,7 +468,6 @@ class EmbeddingConfig:
511
468
  api_key: Optional[SecretStr]
512
469
  """The API key."""
513
470
 
514
-
515
471
  class RagConfig:
516
472
  """RAG (Retrieval Augmented Generation) configuration structure."""
517
473
 
@@ -527,18 +483,16 @@ class RagConfig:
527
483
  milvus_dimensions: Optional[int]
528
484
  """The dimensions for Milvus vectors."""
529
485
 
530
-
531
486
  class DebugConfig:
532
487
  """Debug configuration structure."""
533
488
 
534
489
  log_level: Optional[str]
535
490
  """The logging level to use."""
536
491
 
537
-
538
492
  class TemplateManagerConfig:
539
493
  """Template manager configuration structure."""
540
494
 
541
- template_dir: List[str]
495
+ template_dir: List[Path]
542
496
  """The directories containing the templates."""
543
497
 
544
498
  active_loading: Optional[bool]
@@ -547,7 +501,6 @@ class TemplateManagerConfig:
547
501
  template_suffix: Optional[str]
548
502
  """The suffix of the templates."""
549
503
 
550
-
551
504
  class TemplateConfig:
552
505
  """Template configuration structure."""
553
506
 
@@ -632,7 +585,6 @@ class TemplateConfig:
632
585
  chap_summary_template: str
633
586
  """The name of the chap summary template which will be used to generate a chapter summary."""
634
587
 
635
-
636
588
  class RoutingConfig:
637
589
  """Routing configuration structure for controlling request dispatching behavior."""
638
590
 
@@ -648,7 +600,6 @@ class RoutingConfig:
648
600
  cooldown_time: Optional[int]
649
601
  """Time to cooldown a deployment after failure in seconds."""
650
602
 
651
-
652
603
  class GeneralConfig:
653
604
  """General configuration structure for application-wide settings."""
654
605
 
@@ -658,7 +609,6 @@ class GeneralConfig:
658
609
  use_json_repair: bool
659
610
  """Whether to automatically repair malformed JSON."""
660
611
 
661
-
662
612
  class ToolBoxConfig:
663
613
  """Configuration for toolbox functionality."""
664
614
 
@@ -668,7 +618,6 @@ class ToolBoxConfig:
668
618
  data_module_name: str
669
619
  """The name of the module containing the data."""
670
620
 
671
-
672
621
  class PymitterConfig:
673
622
  """Pymitter configuration structure for controlling event emission and listener behavior."""
674
623
 
@@ -681,7 +630,6 @@ class PymitterConfig:
681
630
  max_listeners: int
682
631
  """The maximum number of listeners per event. -1 means unlimited."""
683
632
 
684
-
685
633
  class Config:
686
634
  """Configuration structure containing all system components."""
687
635
 
@@ -715,27 +663,23 @@ class Config:
715
663
  pymitter: PymitterConfig
716
664
  """Pymitter configuration."""
717
665
 
718
-
719
666
  CONFIG: Config
720
667
 
721
-
722
668
  class SecretStr:
723
669
  """A string that should not be exposed."""
724
670
 
725
671
  def __init__(self, source: str) -> None: ...
726
-
727
- def expose(self) -> str:
672
+ def get_secret_value(self) -> str:
728
673
  """Expose the secret string."""
729
674
 
730
-
731
675
  TEMPLATE_MANAGER: TemplateManager
732
676
 
733
-
734
677
  class Event:
735
678
  """Event class that represents a hierarchical event with segments.
736
679
 
737
680
  Events can be constructed from strings, lists of strings, or other Events.
738
681
  """
682
+
739
683
  segments: List[str]
740
684
 
741
685
  def __init__(self, segments: Optional[List[str]] = None) -> None:
@@ -841,12 +785,9 @@ class Event:
841
785
  """
842
786
 
843
787
  def __hash__(self) -> int: ...
844
-
845
788
  def __eq__(self, other: object) -> bool: ...
846
-
847
789
  def __ne__(self, other: object) -> bool: ...
848
790
 
849
-
850
791
  class TaskStatus(StrEnum, str):
851
792
  """Enumeration of possible task statuses."""
852
793
 
@@ -864,3 +805,39 @@ class TaskStatus(StrEnum, str):
864
805
 
865
806
  Cancelled: TaskStatus
866
807
  """Task has been cancelled."""
808
+
809
+ class TEIClient:
810
+ """Client for TEI reranking service.
811
+
812
+ Handles communication with a TEI reranking service to reorder text snippets
813
+ based on their relevance to a query.
814
+ """
815
+
816
+ def __init__(self, base_url: str) -> None:
817
+ """Initialize the TEI client.
818
+
819
+ Args:
820
+ base_url: URL to the TEI reranking service
821
+ """
822
+
823
+ async def arerank(
824
+ self,
825
+ query: str,
826
+ texts: List[str],
827
+ truncate: bool = False,
828
+ truncation_direction: Literal["Left", "Right"] = "Left",
829
+ ) -> List[Tuple[int, float]]:
830
+ """Rerank texts based on relevance to query.
831
+
832
+ Args:
833
+ query: The query to match texts against
834
+ texts: List of text snippets to rerank
835
+ truncate: Whether to truncate texts to fit model context
836
+ truncation_direction: Direction to truncate from ("Left" or "Right")
837
+
838
+ Returns:
839
+ List of tuples containing (original_index, relevance_score)
840
+
841
+ Raises:
842
+ RuntimeError: If reranking fails or truncation_direction is invalid
843
+ """