fabricatio 0.3.14.dev5__cp312-cp312-win_amd64.whl → 0.3.15.dev4__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@
2
2
 
3
3
  from asyncio import gather
4
4
  from pathlib import Path
5
- from typing import Callable, List, Optional
5
+ from typing import Callable, ClassVar, List, Optional
6
6
 
7
7
  from more_itertools import filter_map
8
8
  from pydantic import Field
@@ -15,14 +15,14 @@ from fabricatio.fs import dump_text, safe_text_read
15
15
  from fabricatio.journal import logger
16
16
  from fabricatio.models.action import Action
17
17
  from fabricatio.models.extra.article_essence import ArticleEssence
18
- from fabricatio.models.extra.article_main import Article
18
+ from fabricatio.models.extra.article_main import Article, ArticleChapter, ArticleSubsection
19
19
  from fabricatio.models.extra.article_outline import ArticleOutline
20
20
  from fabricatio.models.extra.article_proposal import ArticleProposal
21
21
  from fabricatio.models.extra.rule import RuleSet
22
22
  from fabricatio.models.kwargs_types import ValidateKwargs
23
23
  from fabricatio.models.task import Task
24
24
  from fabricatio.models.usages import LLMUsage
25
- from fabricatio.rust import CONFIG, TEMPLATE_MANAGER, BibManager, detect_language
25
+ from fabricatio.rust import CONFIG, TEMPLATE_MANAGER, BibManager, detect_language, word_count
26
26
  from fabricatio.utils import ok, wrapp_in_block
27
27
 
28
28
 
@@ -277,43 +277,139 @@ class LoadArticle(Action):
277
277
  class WriteChapterSummary(Action, LLMUsage):
278
278
  """Write the chapter summary."""
279
279
 
280
- output_key: str = "chapter_summaries"
280
+ ctx_override: ClassVar[bool] = True
281
281
 
282
282
  paragraph_count: int = 1
283
+ """The number of paragraphs to generate in the chapter summary."""
283
284
 
284
- summary_word_count: int = 200
285
-
285
+ summary_word_count: int = 120
286
+ """The number of words to use in each chapter summary."""
287
+ output_key: str = "summarized_article"
288
+ """The key under which the summarized article will be stored in the output."""
286
289
  summary_title: str = "Chapter Summary"
287
- write_to: Optional[Path] = None
290
+ """The title to be used for the generated chapter summary section."""
291
+
292
+ skip_chapters: List[str] = Field(default_factory=list)
293
+ """A list of chapter titles to skip during summary generation."""
294
+
295
+ async def _execute(self, article_path: Path, **cxt) -> Article:
296
+ article = Article.from_article_file(article_path, article_path.stem)
297
+
298
+ chaps = [c for c in article.chapters if c.title not in self.skip_chapters]
299
+
300
+ retained_chapters = []
301
+ # Count chapters before filtering based on section presence,
302
+ # chaps at this point has already been filtered by self.skip_chapters
303
+ initial_chaps_for_summary_step_count = len(chaps)
304
+
305
+ for chapter_candidate in chaps:
306
+ if chapter_candidate.sections: # Check if the sections list is non-empty
307
+ retained_chapters.append(chapter_candidate)
308
+ else:
309
+ # Log c warning for each chapter skipped due to lack of sections
310
+ logger.warning(
311
+ f"Chapter '{chapter_candidate.title}' has no sections and will be skipped for summary generation."
312
+ )
313
+
314
+ chaps = retained_chapters # Update chaps to only include chapters with sections
288
315
 
289
- async def _execute(self, article: Article, write_to: Optional[Path] = None, **cxt) -> List[str]:
290
- logger.info(";".join(a.title for a in article.chapters))
316
+ # If chaps is now empty, but there were chapters to consider at the start of this step,
317
+ # log c specific warning.
318
+ if not chaps and initial_chaps_for_summary_step_count > 0:
319
+ raise ValueError("No chapters with sections were found. Please check your input data.")
291
320
 
321
+ # This line was part of the original selection.
322
+ # It will now log the titles of the chapters that are actually being processed (those with sections).
323
+ # If 'chaps' is empty, this will result in logger.info(""), which is acceptable.
324
+ logger.info(";".join(a.title for a in chaps))
292
325
  ret = [
293
- f"== {self.summary_title}\n{raw}"
326
+ ArticleSubsection.from_typst_code(self.summary_title, raw)
294
327
  for raw in (
295
328
  await self.aask(
296
329
  TEMPLATE_MANAGER.render_template(
297
330
  CONFIG.templates.chap_summary_template,
298
331
  [
299
332
  {
300
- "chapter": a.to_typst_code(),
301
- "title": a.title,
302
- "language": a.language,
333
+ "chapter": c.to_typst_code(),
334
+ "title": c.title,
335
+ "language": c.language,
303
336
  "summary_word_count": self.summary_word_count,
304
337
  "paragraph_count": self.paragraph_count,
305
338
  }
306
- for a in article.chapters
339
+ for c in chaps
307
340
  ],
308
341
  )
309
342
  )
310
343
  )
311
344
  ]
312
345
 
313
- if (to := (self.write_to or write_to)) is not None:
314
- dump_text(
315
- to,
316
- "\n\n\n".join(f"//{a.title}\n\n{s}" for a, s in zip(article.chapters, ret, strict=True)),
346
+ for c, n in zip(chaps, ret, strict=True):
347
+ c: ArticleChapter
348
+ n: ArticleSubsection
349
+ if c.sections[-1].title == self.summary_title:
350
+ logger.debug(f"Removing old summary `{self.summary_title}` at {c.title}")
351
+ c.sections.pop()
352
+
353
+ c.sections[-1].subsections.append(n)
354
+
355
+ article.update_article_file(article_path)
356
+
357
+ dump_text(
358
+ article_path, safe_text_read(article_path).replace(f"=== {self.summary_title}", f"== {self.summary_title}")
359
+ )
360
+ return article
361
+
362
+
363
+ class WriteResearchContentSummary(Action, LLMUsage):
364
+ """Write the research content summary."""
365
+
366
+ ctx_override: ClassVar[bool] = True
367
+ summary_word_count: int = 160
368
+ """The number of words to use in the research content summary."""
369
+
370
+ output_key: str = "summarized_article"
371
+ """The key under which the summarized article will be stored in the output."""
372
+
373
+ summary_title: str = "Research Content"
374
+ """The title to be used for the generated research content summary section."""
375
+
376
+ paragraph_count: int = 1
377
+ """The number of paragraphs to generate in the research content summary."""
378
+
379
+ async def _execute(self, article_path: Path, **cxt) -> Article:
380
+ article = Article.from_article_file(article_path, article_path.stem)
381
+ if not article.chapters:
382
+ raise ValueError("No chapters found in the article.")
383
+ chap_1 = article.chapters[0]
384
+ if not chap_1.sections:
385
+ raise ValueError("No sections found in the first chapter of the article.")
386
+
387
+ outline = article.extrac_outline()
388
+ suma: str = await self.aask(
389
+ TEMPLATE_MANAGER.render_template(
390
+ CONFIG.templates.research_content_summary_template,
391
+ {
392
+ "title": outline.title,
393
+ "outline": outline.to_typst_code(),
394
+ "language": detect_language(self.summary_title),
395
+ "summary_word_count": self.summary_word_count,
396
+ "paragraph_count": self.paragraph_count,
397
+ },
317
398
  )
399
+ )
400
+ logger.success(
401
+ f"{self.summary_title}|Wordcount: {word_count(suma)}|Expected: {self.summary_word_count}\n{suma}"
402
+ )
403
+
404
+ if chap_1.sections[-1].title == self.summary_title:
405
+ # remove old
406
+ logger.debug(f"Removing old summary `{self.summary_title}`")
407
+ chap_1.sections.pop()
318
408
 
319
- return ret
409
+ chap_1.sections[-1].subsections.append(ArticleSubsection.from_typst_code(self.summary_title, suma))
410
+
411
+ article.update_article_file(article_path)
412
+ dump_text(
413
+ article_path, safe_text_read(article_path).replace(f"=== {self.summary_title}", f"== {self.summary_title}")
414
+ )
415
+ return article
@@ -21,10 +21,7 @@ from fabricatio.models.extra.rule import RuleSet
21
21
  from fabricatio.models.kwargs_types import ChooseKwargs, LLMKwargs
22
22
  from fabricatio.rust import (
23
23
  BibManager,
24
- convert_all_block_tex,
25
- convert_all_inline_tex,
26
- convert_to_block_formula,
27
- convert_to_inline_formula,
24
+ convert_all_tex_math,
28
25
  fix_misplaced_labels,
29
26
  )
30
27
  from fabricatio.utils import ok
@@ -129,8 +126,7 @@ class WriteArticleContentRAG(Action, Extract, AdvancedRAG):
129
126
  raw_paras = edt
130
127
 
131
128
  raw_paras = fix_misplaced_labels(raw_paras)
132
- raw_paras = convert_all_inline_tex(raw_paras)
133
- raw_paras = convert_all_block_tex(raw_paras)
129
+ raw_paras = convert_all_tex_math(raw_paras)
134
130
 
135
131
  r_print(raw_paras)
136
132
 
@@ -153,8 +149,7 @@ class WriteArticleContentRAG(Action, Extract, AdvancedRAG):
153
149
  raw_paras = "\n".join(p for p in raw_paras.splitlines() if p and not p.endswith("**") and not p.startswith("#"))
154
150
 
155
151
  raw_paras = fix_misplaced_labels(raw_paras)
156
- raw_paras = convert_all_inline_tex(raw_paras)
157
- raw_paras = convert_all_block_tex(raw_paras)
152
+ raw_paras = convert_all_tex_math(raw_paras)
158
153
 
159
154
  return await self.extract_new_subsec(subsec, raw_paras, cm)
160
155
 
@@ -261,8 +256,6 @@ class ArticleConsultRAG(Action, AdvancedRAG):
261
256
  from questionary import confirm, text
262
257
  from rich import print as r_print
263
258
 
264
- from fabricatio.rust import convert_all_block_tex, convert_all_inline_tex, fix_misplaced_labels
265
-
266
259
  self.target_collection = collection_name or self.safe_target_collection
267
260
 
268
261
  cm = CitationManager()
@@ -272,8 +265,7 @@ class ArticleConsultRAG(Action, AdvancedRAG):
272
265
  if await confirm("Empty the cm?").ask_async():
273
266
  cm.empty()
274
267
 
275
- req = convert_to_block_formula(req)
276
- req = convert_to_inline_formula(req)
268
+ req = convert_all_tex_math(req)
277
269
 
278
270
  await self.clued_search(
279
271
  req,
@@ -289,8 +281,7 @@ class ArticleConsultRAG(Action, AdvancedRAG):
289
281
  ret = await self.aask(f"{cm.as_prompt()}\n{self.req}\n{req}")
290
282
 
291
283
  ret = fix_misplaced_labels(ret)
292
- ret = convert_all_inline_tex(ret)
293
- ret = convert_all_block_tex(ret)
284
+ ret = convert_all_tex_math(ret)
294
285
  ret = cm.apply(ret)
295
286
 
296
287
  r_print(ret)
@@ -51,16 +51,16 @@ class RAG(EmbeddingUsage, ABC):
51
51
  return self._client
52
52
 
53
53
  def init_client(
54
- self,
55
- milvus_uri: Optional[str] = None,
56
- milvus_token: Optional[str] = None,
57
- milvus_timeout: Optional[float] = None,
54
+ self,
55
+ milvus_uri: Optional[str] = None,
56
+ milvus_token: Optional[str] = None,
57
+ milvus_timeout: Optional[float] = None,
58
58
  ) -> Self:
59
59
  """Initialize the Milvus client."""
60
60
  self._client = create_client(
61
61
  uri=milvus_uri or ok(self.milvus_uri or CONFIG.rag.milvus_uri),
62
62
  token=milvus_token
63
- or (token.get_secret_value() if (token := (self.milvus_token or CONFIG.rag.milvus_token)) else ""),
63
+ or (token.get_secret_value() if (token := (self.milvus_token or CONFIG.rag.milvus_token)) else ""),
64
64
  timeout=milvus_timeout or self.milvus_timeout or CONFIG.rag.milvus_timeout,
65
65
  )
66
66
  return self
@@ -74,7 +74,7 @@ class RAG(EmbeddingUsage, ABC):
74
74
  return self
75
75
 
76
76
  def view(
77
- self, collection_name: Optional[str], create: bool = False, **kwargs: Unpack[CollectionConfigKwargs]
77
+ self, collection_name: Optional[str], create: bool = False, **kwargs: Unpack[CollectionConfigKwargs]
78
78
  ) -> Self:
79
79
  """View the specified collection.
80
80
 
@@ -116,7 +116,7 @@ class RAG(EmbeddingUsage, ABC):
116
116
  return ok(self.target_collection, "No collection is being viewed. Have you called `self.view()`?")
117
117
 
118
118
  async def add_document[D: MilvusDataBase](
119
- self, data: List[D] | D, collection_name: Optional[str] = None, flush: bool = False
119
+ self, data: List[D] | D, collection_name: Optional[str] = None, flush: bool = False
120
120
  ) -> Self:
121
121
  """Adds a document to the specified collection.
122
122
 
@@ -143,15 +143,15 @@ class RAG(EmbeddingUsage, ABC):
143
143
  return self
144
144
 
145
145
  async def afetch_document[D: MilvusDataBase](
146
- self,
147
- query: List[str],
148
- document_model: Type[D],
149
- collection_name: Optional[str] = None,
150
- similarity_threshold: float = 0.37,
151
- result_per_query: int = 10,
152
- tei_endpoint: Optional[str] = None,
153
- reranker_threshold: float = 0.7,
154
- filter_expr: str = "",
146
+ self,
147
+ query: List[str],
148
+ document_model: Type[D],
149
+ collection_name: Optional[str] = None,
150
+ similarity_threshold: float = 0.37,
151
+ result_per_query: int = 10,
152
+ tei_endpoint: Optional[str] = None,
153
+ reranker_threshold: float = 0.7,
154
+ filter_expr: str = "",
155
155
  ) -> List[D]:
156
156
  """Asynchronously fetches documents from a Milvus database based on input vectors.
157
157
 
@@ -192,7 +192,8 @@ class RAG(EmbeddingUsage, ABC):
192
192
  retrieved_id.update(res["id"] for res in g)
193
193
  if not models:
194
194
  continue
195
- rank_scores = await reranker.arerank(q, [m.prepare_vectorization() for m in models], truncate=True)
195
+ rank_scores = await reranker.arerank(q, [m.prepare_vectorization() for m in models], truncate=True,
196
+ truncation_direction="Left")
196
197
  raw_result.extend((models[idx], scr) for (idx, scr) in rank_scores if scr > reranker_threshold)
197
198
 
198
199
  raw_result_sorted = sorted(raw_result, key=lambda x: x[1], reverse=True)
@@ -214,11 +215,11 @@ class RAG(EmbeddingUsage, ABC):
214
215
  return document_model.from_sequence(resp)
215
216
 
216
217
  async def aretrieve[D: MilvusDataBase](
217
- self,
218
- query: List[str] | str,
219
- document_model: Type[D],
220
- max_accepted: int = 20,
221
- **kwargs: Unpack[FetchKwargs],
218
+ self,
219
+ query: List[str] | str,
220
+ document_model: Type[D],
221
+ max_accepted: int = 20,
222
+ **kwargs: Unpack[FetchKwargs],
222
223
  ) -> List[D]:
223
224
  """Retrieve data from the collection.
224
225
 
@@ -235,15 +236,15 @@ class RAG(EmbeddingUsage, ABC):
235
236
  query = [query]
236
237
 
237
238
  return (
238
- await self.afetch_document(
239
- query=query,
240
- document_model=document_model,
241
- **kwargs,
242
- )
243
- )[:max_accepted]
239
+ await self.afetch_document(
240
+ query=query,
241
+ document_model=document_model,
242
+ **kwargs,
243
+ )
244
+ )[:max_accepted]
244
245
 
245
246
  async def arefined_query(
246
- self, question: List[str] | str, **kwargs: Unpack[ChooseKwargs[Optional[List[str]]]]
247
+ self, question: List[str] | str, **kwargs: Unpack[ChooseKwargs[Optional[List[str]]]]
247
248
  ) -> Optional[List[str]]:
248
249
  """Refines the given question using a template.
249
250
 
fabricatio/decorators.py CHANGED
@@ -235,6 +235,7 @@ def logging_exec_time[**P, R](
235
235
  @wraps(func)
236
236
  async def _async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
237
237
  start_time = time()
238
+ logger.debug(f"Starting execution of {func.__name__}")
238
239
  result = await func(*args, **kwargs)
239
240
  logger.debug(f"Execution time of `{func.__name__}`: {time() - start_time:.2f} s")
240
241
  return result
@@ -244,6 +245,7 @@ def logging_exec_time[**P, R](
244
245
  @wraps(func)
245
246
  def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
246
247
  start_time = time()
248
+ logger.debug(f"Starting execution of {func.__name__}")
247
249
  result = func(*args, **kwargs)
248
250
  logger.debug(f"Execution time of {func.__name__}: {(time() - start_time) * 1000:.2f} ms")
249
251
  return result
@@ -1,6 +1,7 @@
1
1
  """A Module containing the article rag models."""
2
2
 
3
3
  import re
4
+ from dataclasses import dataclass, field
4
5
  from itertools import groupby
5
6
  from pathlib import Path
6
7
  from typing import ClassVar, Dict, List, Optional, Self, Unpack
@@ -68,7 +69,7 @@ class ArticleChunk(MilvusDataBase):
68
69
 
69
70
  @classmethod
70
71
  def from_file[P: str | Path](
71
- cls, path: P | List[P], bib_mgr: BibManager, **kwargs: Unpack[ChunkKwargs]
72
+ cls, path: P | List[P], bib_mgr: BibManager, **kwargs: Unpack[ChunkKwargs]
72
73
  ) -> List[Self]:
73
74
  """Load the article chunks from the file."""
74
75
  if isinstance(path, list):
@@ -85,9 +86,9 @@ class ArticleChunk(MilvusDataBase):
85
86
  title_seg = path.stem.split(" - ").pop()
86
87
 
87
88
  key = (
88
- bib_mgr.get_cite_key_by_title(title_seg)
89
- or bib_mgr.get_cite_key_by_title_fuzzy(title_seg)
90
- or bib_mgr.get_cite_key_fuzzy(path.stem)
89
+ bib_mgr.get_cite_key_by_title(title_seg)
90
+ or bib_mgr.get_cite_key_by_title_fuzzy(title_seg)
91
+ or bib_mgr.get_cite_key_fuzzy(path.stem)
91
92
  )
92
93
  if key is None:
93
94
  logger.warning(f"no cite key found for {path.as_posix()}, skip.")
@@ -165,10 +166,11 @@ class ArticleChunk(MilvusDataBase):
165
166
  return self
166
167
 
167
168
 
169
+ @dataclass
168
170
  class CitationManager(AsPrompt):
169
171
  """Citation manager."""
170
172
 
171
- article_chunks: List[ArticleChunk] = Field(default_factory=list)
173
+ article_chunks: List[ArticleChunk] = field(default_factory=list)
172
174
  """Article chunks."""
173
175
 
174
176
  pat: str = r"(\[\[([\d\s,-]*)]])"
@@ -179,7 +181,7 @@ class CitationManager(AsPrompt):
179
181
  """Separator for abbreviated citation numbers."""
180
182
 
181
183
  def update_chunks(
182
- self, article_chunks: List[ArticleChunk], set_cite_number: bool = True, dedup: bool = True
184
+ self, article_chunks: List[ArticleChunk], set_cite_number: bool = True, dedup: bool = True
183
185
  ) -> Self:
184
186
  """Update article chunks."""
185
187
  self.article_chunks.clear()
@@ -2,9 +2,7 @@
2
2
 
3
3
  from abc import ABC
4
4
  from enum import StrEnum
5
- from fabricatio.rust import extract_body, replace_thesis_body, split_out_metadata, to_metadata, word_count
6
5
  from pathlib import Path
7
- from pydantic import Field
8
6
  from typing import ClassVar, Generator, List, Optional, Self, Tuple, Type
9
7
 
10
8
  from fabricatio.capabilities.persist import PersistentAble
@@ -23,7 +21,17 @@ from fabricatio.models.generic import (
23
21
  Titled,
24
22
  WordCount,
25
23
  )
24
+ from fabricatio.rust import (
25
+ detect_language,
26
+ extract_body,
27
+ replace_thesis_body,
28
+ split_out_metadata,
29
+ strip_comment,
30
+ to_metadata,
31
+ word_count,
32
+ )
26
33
  from fabricatio.utils import fallback_kwargs, ok
34
+ from pydantic import Field
27
35
 
28
36
  ARTICLE_WRAPPER = "// =-=-=-=-=-=-=-=-=-="
29
37
 
@@ -52,10 +60,31 @@ class ArticleMetaData(SketchedAble, Described, WordCount, Titled, Language):
52
60
  aims: List[str]
53
61
  """List of writing aims of the research component in academic style."""
54
62
 
63
+ _unstructured_body: str = ""
64
+ """Store the source of the unknown information."""
65
+
55
66
  @property
56
67
  def typst_metadata_comment(self) -> str:
57
68
  """Generates a comment for the metadata of the article component."""
58
- return to_metadata(self.model_dump(include={"description", "aims", "expected_word_count"}, by_alias=True))
69
+ data = self.model_dump(
70
+ include={"description", "aims", "expected_word_count"},
71
+ by_alias=True,
72
+ )
73
+ return to_metadata({k: v for k, v in data.items() if v})
74
+
75
+ @property
76
+ def unstructured_body(self) -> str:
77
+ """Returns the unstructured body of the article component."""
78
+ return self._unstructured_body
79
+
80
+ def update_unstructured_body[S: "ArticleMetaData"](self: S, body: str) -> S:
81
+ """Update the unstructured body of the article component."""
82
+ self._unstructured_body = body
83
+ return self
84
+
85
+ @property
86
+ def language(self) -> str:
87
+ return detect_language(self.title)
59
88
 
60
89
 
61
90
  class FromTypstCode(ArticleMetaData):
@@ -67,13 +96,8 @@ class FromTypstCode(ArticleMetaData):
67
96
  data, body = split_out_metadata(body)
68
97
 
69
98
  return cls(
70
- heading=title,
71
- **fallback_kwargs(
72
- data or {},
73
- elaboration="",
74
- expected_word_count=word_count(body),
75
- aims=[],
76
- ),
99
+ heading=title.strip(),
100
+ **fallback_kwargs(data or {}, elaboration="", expected_word_count=word_count(body), aims=[]),
77
101
  **kwargs,
78
102
  )
79
103
 
@@ -83,7 +107,7 @@ class ToTypstCode(ArticleMetaData):
83
107
 
84
108
  def to_typst_code(self) -> str:
85
109
  """Converts the component into a Typst code snippet for rendering."""
86
- return f"{self.title}\n{self.typst_metadata_comment}\n"
110
+ return f"{self.title}\n{self.typst_metadata_comment}\n\n{self._unstructured_body}"
87
111
 
88
112
 
89
113
  class ArticleOutlineBase(
@@ -151,12 +175,16 @@ class SectionBase[T: SubSectionBase](ArticleOutlineBase):
151
175
  @classmethod
152
176
  def from_typst_code(cls, title: str, body: str, **kwargs) -> Self:
153
177
  """Creates an Article object from the given Typst code."""
154
- return super().from_typst_code(
155
- title,
156
- body,
157
- subsections=[
158
- cls.child_type.from_typst_code(*pack) for pack in extract_sections(body, level=3, section_char="=")
159
- ],
178
+ raw = extract_sections(body, level=3, section_char="=")
179
+
180
+ return (
181
+ super()
182
+ .from_typst_code(
183
+ title,
184
+ body,
185
+ subsections=[cls.child_type.from_typst_code(*pack) for pack in raw],
186
+ )
187
+ .update_unstructured_body("" if raw else strip_comment(body))
160
188
  )
161
189
 
162
190
  def resolve_update_conflict(self, other: Self) -> str:
@@ -191,6 +219,11 @@ class SectionBase[T: SubSectionBase](ArticleOutlineBase):
191
219
  return f"Section `{self.title}` contains no subsections, expected at least one, but got 0, you can add one or more as needed."
192
220
  return ""
193
221
 
222
+ @property
223
+ def exact_word_count(self) -> int:
224
+ """Returns the exact word count of the article section outline."""
225
+ return sum(a.exact_word_count for a in self.subsections)
226
+
194
227
 
195
228
  class ChapterBase[T: SectionBase](ArticleOutlineBase):
196
229
  """Base class for article chapters."""
@@ -206,12 +239,16 @@ class ChapterBase[T: SectionBase](ArticleOutlineBase):
206
239
  @classmethod
207
240
  def from_typst_code(cls, title: str, body: str, **kwargs) -> Self:
208
241
  """Creates an Article object from the given Typst code."""
209
- return super().from_typst_code(
210
- title,
211
- body,
212
- sections=[
213
- cls.child_type.from_typst_code(*pack) for pack in extract_sections(body, level=2, section_char="=")
214
- ],
242
+ raw_sec = extract_sections(body, level=2, section_char="=")
243
+
244
+ return (
245
+ super()
246
+ .from_typst_code(
247
+ title,
248
+ body,
249
+ sections=[cls.child_type.from_typst_code(*pack) for pack in raw_sec],
250
+ )
251
+ .update_unstructured_body("" if raw_sec else strip_comment(body))
215
252
  )
216
253
 
217
254
  def resolve_update_conflict(self, other: Self) -> str:
@@ -243,6 +280,15 @@ class ChapterBase[T: SectionBase](ArticleOutlineBase):
243
280
  return f"Chapter `{self.title}` contains no sections, expected at least one, but got 0, you can add one or more as needed."
244
281
  return ""
245
282
 
283
+ @property
284
+ def exact_word_count(self) -> int:
285
+ """Calculates the total word count across all sections in the chapter.
286
+
287
+ Returns:
288
+ int: The cumulative word count of all sections.
289
+ """
290
+ return sum(a.exact_word_count for a in self.sections)
291
+
246
292
 
247
293
  class ArticleBase[T: ChapterBase](FinalizedDumpAble, AsPrompt, FromTypstCode, ToTypstCode, ABC):
248
294
  """Base class for article outlines."""
@@ -263,19 +309,37 @@ class ArticleBase[T: ChapterBase](FinalizedDumpAble, AsPrompt, FromTypstCode, To
263
309
 
264
310
  child_type: ClassVar[Type[ChapterBase]]
265
311
 
312
+ @property
313
+ def language(self) -> str:
314
+ if self.title:
315
+ return super().language
316
+ return self.chapters[0].language
317
+
318
+ @property
319
+ def exact_word_count(self) -> int:
320
+ """Calculates the total word count across all chapters in the article.
321
+
322
+ Returns:
323
+ int: The cumulative word count of all chapters.
324
+ """
325
+ return sum(ch.exact_word_count for ch in self.chapters)
326
+
266
327
  @classmethod
267
328
  def from_typst_code(cls, title: str, body: str, **kwargs) -> Self:
268
329
  """Generates an article from the given Typst code."""
269
- return super().from_typst_code(
270
- title,
271
- body,
272
- chapters=[
273
- cls.child_type.from_typst_code(*pack) for pack in extract_sections(body, level=1, section_char="=")
274
- ],
330
+ raw = extract_sections(body, level=1, section_char="=")
331
+ return (
332
+ super()
333
+ .from_typst_code(
334
+ title,
335
+ body,
336
+ chapters=[cls.child_type.from_typst_code(*pack) for pack in raw],
337
+ )
338
+ .update_unstructured_body("" if raw else strip_comment(body))
275
339
  )
276
340
 
277
341
  def iter_dfs_rev(
278
- self,
342
+ self,
279
343
  ) -> Generator[ArticleOutlineBase, None, None]:
280
344
  """Performs a depth-first search (DFS) through the article structure in reverse order.
281
345
 
@@ -350,7 +414,7 @@ class ArticleBase[T: ChapterBase](FinalizedDumpAble, AsPrompt, FromTypstCode, To
350
414
 
351
415
  def to_typst_code(self) -> str:
352
416
  """Generates the Typst code representation of the article."""
353
- return f"// #{super().to_typst_code()}\n\n" + "\n\n".join(a.to_typst_code() for a in self.chapters)
417
+ return f"// #Title: {super().to_typst_code()}\n" + "\n\n".join(a.to_typst_code() for a in self.chapters)
354
418
 
355
419
  def finalized_dump(self) -> str:
356
420
  """Generates standardized hierarchical markup for academic publishing systems.
@@ -401,11 +465,11 @@ class ArticleBase[T: ChapterBase](FinalizedDumpAble, AsPrompt, FromTypstCode, To
401
465
  """Set all chap, sec, subsec have same word count sum up to be `self.expected_word_count`."""
402
466
  return self.avg_chap_wordcount().avg_sec_wordcount().avg_subsec_wordcount()
403
467
 
404
- def update_article_file(self, file: str | Path) -> Self:
468
+ def update_article_file[S: "ArticleBase"](self: S, file: str | Path) -> S:
405
469
  """Update the article file."""
406
470
  file = Path(file)
407
471
  string = safe_text_read(file)
408
- if updated := replace_thesis_body(string, ARTICLE_WRAPPER, self.to_typst_code()):
472
+ if updated := replace_thesis_body(string, ARTICLE_WRAPPER, f"\n\n{self.to_typst_code()}\n\n"):
409
473
  dump_text(file, updated)
410
474
  logger.success(f"Successfully updated {file.as_posix()}.")
411
475
  else:
@@ -413,7 +477,7 @@ class ArticleBase[T: ChapterBase](FinalizedDumpAble, AsPrompt, FromTypstCode, To
413
477
  return self
414
478
 
415
479
  @classmethod
416
- def from_article_file[S: "ArticleBase"](cls: Type[S], file: str | Path, title: str) -> S:
480
+ def from_article_file[S: "ArticleBase"](cls: Type[S], file: str | Path, title: str = "") -> S:
417
481
  """Load article from file."""
418
482
  file = Path(file)
419
483
  string = safe_text_read(file)
@@ -19,8 +19,7 @@ from fabricatio.models.extra.article_outline import (
19
19
  )
20
20
  from fabricatio.models.generic import Described, SequencePatch, SketchedAble, WithRef, WordCount
21
21
  from fabricatio.rust import (
22
- convert_all_block_tex,
23
- convert_all_inline_tex,
22
+ convert_all_tex_math,
24
23
  fix_misplaced_labels,
25
24
  split_out_metadata,
26
25
  word_count,
@@ -53,7 +52,7 @@ class Paragraph(SketchedAble, WordCount, Described):
53
52
  return cls(elaboration="", aims=[], expected_word_count=word_count(content), content=content.strip())
54
53
 
55
54
  @property
56
- def exact_wordcount(self) -> int:
55
+ def exact_word_count(self) -> int:
57
56
  """Calculates the exact word count of the content."""
58
57
  return word_count(self.content)
59
58
 
@@ -71,6 +70,11 @@ class ArticleSubsection(SubSectionBase):
71
70
  _max_word_count_deviation: float = 0.3
72
71
  """Maximum allowed deviation from the expected word count, as a percentage."""
73
72
 
73
+ @property
74
+ def exact_word_count(self) -> int:
75
+ """Calculates the exact word count of all paragraphs in the subsection."""
76
+ return sum(a.exact_word_count for a in self.paragraphs)
77
+
74
78
  @property
75
79
  def word_count(self) -> int:
76
80
  """Calculates the total word count of all paragraphs in the subsection."""
@@ -155,15 +159,13 @@ class Article(
155
159
  if descriptions:
156
160
  for a in self.iter_dfs():
157
161
  a.description = fix_misplaced_labels(a.description)
158
- a.description = convert_all_inline_tex(a.description)
159
- a.description = convert_all_block_tex(a.description)
162
+ a.description = convert_all_tex_math(a.description)
160
163
 
161
164
  if paragraphs:
162
165
  for _, _, subsec in self.iter_subsections():
163
166
  for p in subsec.paragraphs:
164
167
  p.content = fix_misplaced_labels(p.content)
165
- p.content = convert_all_inline_tex(p.content)
166
- p.content = convert_all_block_tex(p.content)
168
+ p.content = convert_all_tex_math(p.content)
167
169
  return self
168
170
 
169
171
  @override
@@ -276,9 +278,9 @@ class Article(
276
278
  err = []
277
279
  for chap, sec, subsec in self.iter_subsections():
278
280
  for i, p in enumerate(subsec.paragraphs):
279
- if p.exact_wordcount <= threshold:
281
+ if p.exact_word_count <= threshold:
280
282
  err.append(
281
- f"{chap.title}->{sec.title}->{subsec.title}-> Paragraph [{i}] is too short, {p.exact_wordcount} words."
283
+ f"{chap.title}->{sec.title}->{subsec.title}-> Paragraph [{i}] is too short, {p.exact_word_count} words."
282
284
  )
283
285
 
284
286
  return "\n".join(err)
@@ -114,7 +114,7 @@ class WordCount(Base, ABC):
114
114
  @property
115
115
  def exact_word_count(self) -> int:
116
116
  """Get the exact word count of this research component."""
117
- raise NotImplementedError(f"`expected_word_count` is not implemented for {self.__class__.__name__}")
117
+ raise NotImplementedError(f"`exact_word_count` is not implemented for {self.__class__.__name__}")
118
118
 
119
119
 
120
120
  class FromMapping:
fabricatio/models/role.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """Module that contains the Role class for managing workflows and their event registrations."""
2
2
 
3
3
  from functools import partial
4
- from typing import Any, Dict, Self
4
+ from typing import Any, Callable, Dict, Self, Type
5
5
 
6
6
  from fabricatio.emitter import env
7
7
  from fabricatio.journal import logger
@@ -68,28 +68,32 @@ class Role(WithBriefing):
68
68
  workflow.inject_personality(self.briefing)
69
69
  return self
70
70
 
71
- def _configure_scoped_config(self, workflow: WorkFlow) -> None:
72
- """Configure scoped configuration for workflow and its actions."""
73
- if not is_scoped_config(self.__class__):
71
+ def _propagate_config(
72
+ self,
73
+ workflow: WorkFlow,
74
+ has_capability: Callable[[Type], bool],
75
+ config_method_name: str,
76
+ capability_description: str,
77
+ ) -> None:
78
+ """Propagates configuration to workflow and its actions if they have a given capability."""
79
+ if not has_capability(self.__class__):
74
80
  return
75
81
 
76
- fallback_target = self
77
- if is_scoped_config(workflow):
78
- workflow.fallback_to(self)
79
- fallback_target = workflow
82
+ config_source_for_actions = self
83
+ if has_capability(workflow.__class__):
84
+ logger.debug(
85
+ f"Configuring {capability_description} inherited from `{self.name}` for workflow: `{workflow.name}`"
86
+ )
87
+ getattr(workflow, config_method_name)(self)
88
+ config_source_for_actions = workflow
80
89
 
81
- for action in (a for a in workflow.iter_actions() if is_scoped_config(a)):
82
- action.fallback_to(fallback_target)
90
+ for action in (act for act in workflow.iter_actions() if has_capability(act.__class__)):
91
+ getattr(action, config_method_name)(config_source_for_actions)
92
+
93
+ def _configure_scoped_config(self, workflow: WorkFlow) -> None:
94
+ """Configure scoped configuration for workflow and its actions."""
95
+ self._propagate_config(workflow, is_scoped_config, "fallback_to", "scoped config")
83
96
 
84
97
  def _configure_toolbox_usage(self, workflow: WorkFlow) -> None:
85
98
  """Configure toolbox usage for workflow and its actions."""
86
- if not is_toolbox_usage(self.__class__):
87
- return
88
-
89
- supply_target = self
90
- if is_toolbox_usage(workflow):
91
- workflow.supply_tools_from(self)
92
- supply_target = workflow
93
-
94
- for action in (a for a in workflow.iter_actions() if is_toolbox_usage(a)):
95
- action.supply_tools_from(supply_target)
99
+ self._propagate_config(workflow, is_toolbox_usage, "supply_tools_from", "toolbox usage")
Binary file
fabricatio/rust.pyi CHANGED
@@ -12,9 +12,10 @@ Key Features:
12
12
  """
13
13
 
14
14
  from enum import StrEnum
15
- from pydantic import JsonValue
15
+ from pathlib import Path
16
16
  from typing import Any, Dict, List, Literal, Optional, Self, Tuple, Union, overload
17
17
 
18
+ from pydantic import JsonValue
18
19
 
19
20
  class TemplateManager:
20
21
  """Template rendering engine using Handlebars templates.
@@ -47,10 +48,8 @@ class TemplateManager:
47
48
 
48
49
  @overload
49
50
  def render_template(self, name: str, data: Dict[str, Any]) -> str: ...
50
-
51
51
  @overload
52
52
  def render_template(self, name: str, data: List[Dict[str, Any]]) -> List[str]: ...
53
-
54
53
  def render_template(self, name: str, data: Dict[str, Any] | List[Dict[str, Any]]) -> str | List[str]:
55
54
  """Render a template with context data.
56
55
 
@@ -67,10 +66,8 @@ class TemplateManager:
67
66
 
68
67
  @overload
69
68
  def render_template_raw(self, template: str, data: Dict[str, Any]) -> str: ...
70
-
71
69
  @overload
72
70
  def render_template_raw(self, template: str, data: List[Dict[str, Any]]) -> List[str]: ...
73
-
74
71
  def render_template_raw(self, template: str, data: Dict[str, Any] | List[Dict[str, Any]]) -> str | List[str]:
75
72
  """Render a template with context data.
76
73
 
@@ -82,7 +79,6 @@ class TemplateManager:
82
79
  Rendered template content as string or list of strings
83
80
  """
84
81
 
85
-
86
82
  class BibManager:
87
83
  """BibTeX bibliography manager for parsing and querying citation data."""
88
84
 
@@ -191,7 +187,6 @@ class BibManager:
191
187
  Field value if found, None otherwise
192
188
  """
193
189
 
194
-
195
190
  def blake3_hash(content: bytes) -> str:
196
191
  """Calculate the BLAKE3 cryptographic hash of data.
197
192
 
@@ -202,11 +197,9 @@ def blake3_hash(content: bytes) -> str:
202
197
  Hex-encoded BLAKE3 hash string
203
198
  """
204
199
 
205
-
206
200
  def detect_language(string: str) -> str:
207
201
  """Detect the language of a given string."""
208
202
 
209
-
210
203
  def split_word_bounds(string: str) -> List[str]:
211
204
  """Split the string into words based on word boundaries.
212
205
 
@@ -217,7 +210,6 @@ def split_word_bounds(string: str) -> List[str]:
217
210
  A list of words extracted from the string.
218
211
  """
219
212
 
220
-
221
213
  def split_sentence_bounds(string: str) -> List[str]:
222
214
  """Split the string into sentences based on sentence boundaries.
223
215
 
@@ -228,7 +220,6 @@ def split_sentence_bounds(string: str) -> List[str]:
228
220
  A list of sentences extracted from the string.
229
221
  """
230
222
 
231
-
232
223
  def split_into_chunks(string: str, max_chunk_size: int, max_overlapping_rate: float = 0.3) -> List[str]:
233
224
  """Split the string into chunks of a specified size.
234
225
 
@@ -241,7 +232,6 @@ def split_into_chunks(string: str, max_chunk_size: int, max_overlapping_rate: fl
241
232
  A list of chunks extracted from the string.
242
233
  """
243
234
 
244
-
245
235
  def word_count(string: str) -> int:
246
236
  """Count the number of words in the string.
247
237
 
@@ -252,67 +242,51 @@ def word_count(string: str) -> int:
252
242
  The number of words in the string.
253
243
  """
254
244
 
255
-
256
245
  def is_chinese(string: str) -> bool:
257
246
  """Check if the given string is in Chinese."""
258
247
 
259
-
260
248
  def is_english(string: str) -> bool:
261
249
  """Check if the given string is in English."""
262
250
 
263
-
264
251
  def is_japanese(string: str) -> bool:
265
252
  """Check if the given string is in Japanese."""
266
253
 
267
-
268
254
  def is_korean(string: str) -> bool:
269
255
  """Check if the given string is in Korean."""
270
256
 
271
-
272
257
  def is_arabic(string: str) -> bool:
273
258
  """Check if the given string is in Arabic."""
274
259
 
275
-
276
260
  def is_russian(string: str) -> bool:
277
261
  """Check if the given string is in Russian."""
278
262
 
279
-
280
263
  def is_german(string: str) -> bool:
281
264
  """Check if the given string is in German."""
282
265
 
283
-
284
266
  def is_french(string: str) -> bool:
285
267
  """Check if the given string is in French."""
286
268
 
287
-
288
269
  def is_hindi(string: str) -> bool:
289
270
  """Check if the given string is in Hindi."""
290
271
 
291
-
292
272
  def is_italian(string: str) -> bool:
293
273
  """Check if the given string is in Italian."""
294
274
 
295
-
296
275
  def is_dutch(string: str) -> bool:
297
276
  """Check if the given string is in Dutch."""
298
277
 
299
-
300
278
  def is_portuguese(string: str) -> bool:
301
279
  """Check if the given string is in Portuguese."""
302
280
 
303
-
304
281
  def is_swedish(string: str) -> bool:
305
282
  """Check if the given string is in Swedish."""
306
283
 
307
-
308
284
  def is_turkish(string: str) -> bool:
309
285
  """Check if the given string is in Turkish."""
310
286
 
311
-
312
287
  def is_vietnamese(string: str) -> bool:
313
288
  """Check if the given string is in Vietnamese."""
314
289
 
315
-
316
290
  def tex_to_typst(string: str) -> str:
317
291
  """Convert TeX to Typst.
318
292
 
@@ -323,11 +297,11 @@ def tex_to_typst(string: str) -> str:
323
297
  The converted Typst string.
324
298
  """
325
299
 
326
-
327
300
  def convert_all_tex_math(string: str) -> str:
328
- """Unified function to convert all supported TeX math expressions in a string to Typst format.
301
+ r"""Unified function to convert all supported TeX math expressions in a string to Typst format.
329
302
 
330
303
  Handles $...$, $$...$$, \\(...\\), and \\[...\\]
304
+
331
305
  Args:
332
306
  string: The input string containing TeX math expressions.
333
307
 
@@ -335,7 +309,6 @@ def convert_all_tex_math(string: str) -> str:
335
309
  The string with TeX math expressions converted to Typst format.
336
310
  """
337
311
 
338
-
339
312
  def fix_misplaced_labels(string: str) -> str:
340
313
  """A func to fix labels in a string.
341
314
 
@@ -346,9 +319,8 @@ def fix_misplaced_labels(string: str) -> str:
346
319
  The fixed string with labels properly placed.
347
320
  """
348
321
 
349
-
350
322
  def comment(string: str) -> str:
351
- """Add comment to the string.
323
+ r"""Add comment to the string.
352
324
 
353
325
  Args:
354
326
  string: The input string to which comments will be added.
@@ -357,7 +329,6 @@ def comment(string: str) -> str:
357
329
  The string with each line prefixed by '// '.
358
330
  """
359
331
 
360
-
361
332
  def uncomment(string: str) -> str:
362
333
  """Remove comment from the string.
363
334
 
@@ -368,6 +339,15 @@ def uncomment(string: str) -> str:
368
339
  The string with comments (lines starting with '// ' or '//') removed.
369
340
  """
370
341
 
342
+ def strip_comment(string: str) -> str:
343
+ """Remove leading and trailing comment lines from a multi-line string.
344
+
345
+ Args:
346
+ string: Input string that may have comment lines at start and/or end
347
+
348
+ Returns:
349
+ str: A new string with leading and trailing comment lines removed
350
+ """
371
351
 
372
352
  def split_out_metadata(string: str) -> Tuple[Optional[JsonValue], str]:
373
353
  """Split out metadata from a string.
@@ -379,7 +359,6 @@ def split_out_metadata(string: str) -> Tuple[Optional[JsonValue], str]:
379
359
  A tuple containing the metadata as a Python object (if parseable) and the remaining string.
380
360
  """
381
361
 
382
-
383
362
  def to_metadata(data: JsonValue) -> str:
384
363
  """Convert a Python object to a YAML string.
385
364
 
@@ -390,7 +369,6 @@ def to_metadata(data: JsonValue) -> str:
390
369
  The YAML string representation of the input data.
391
370
  """
392
371
 
393
-
394
372
  def replace_thesis_body(string: str, wrapper: str, new_body: str) -> Optional[str]:
395
373
  """Replace content between wrapper strings.
396
374
 
@@ -404,7 +382,6 @@ def replace_thesis_body(string: str, wrapper: str, new_body: str) -> Optional[st
404
382
 
405
383
  """
406
384
 
407
-
408
385
  def extract_body(string: str, wrapper: str) -> Optional[str]:
409
386
  """Extract the content between two occurrences of a wrapper string.
410
387
 
@@ -416,7 +393,6 @@ def extract_body(string: str, wrapper: str) -> Optional[str]:
416
393
  The content between the first two occurrences of the wrapper string if found, otherwise None.
417
394
  """
418
395
 
419
-
420
396
  class LLMConfig:
421
397
  """LLM configuration structure.
422
398
 
@@ -468,7 +444,6 @@ class LLMConfig:
468
444
  frequency_penalty: Optional[float]
469
445
  """Penalizes new tokens based on their frequency in text so far (-2.0-2.0)."""
470
446
 
471
-
472
447
  class EmbeddingConfig:
473
448
  """Embedding configuration structure."""
474
449
 
@@ -493,7 +468,6 @@ class EmbeddingConfig:
493
468
  api_key: Optional[SecretStr]
494
469
  """The API key."""
495
470
 
496
-
497
471
  class RagConfig:
498
472
  """RAG (Retrieval Augmented Generation) configuration structure."""
499
473
 
@@ -509,18 +483,16 @@ class RagConfig:
509
483
  milvus_dimensions: Optional[int]
510
484
  """The dimensions for Milvus vectors."""
511
485
 
512
-
513
486
  class DebugConfig:
514
487
  """Debug configuration structure."""
515
488
 
516
489
  log_level: Optional[str]
517
490
  """The logging level to use."""
518
491
 
519
-
520
492
  class TemplateManagerConfig:
521
493
  """Template manager configuration structure."""
522
494
 
523
- template_dir: List[str]
495
+ template_dir: List[Path]
524
496
  """The directories containing the templates."""
525
497
 
526
498
  active_loading: Optional[bool]
@@ -529,10 +501,12 @@ class TemplateManagerConfig:
529
501
  template_suffix: Optional[str]
530
502
  """The suffix of the templates."""
531
503
 
532
-
533
504
  class TemplateConfig:
534
505
  """Template configuration structure."""
535
506
 
507
+ research_content_summary_template: str
508
+ """The name of the research content summary template which will be used to generate a summary of research content."""
509
+
536
510
  create_json_obj_template: str
537
511
  """The name of the create json object template which will be used to create a json object."""
538
512
 
@@ -614,7 +588,6 @@ class TemplateConfig:
614
588
  chap_summary_template: str
615
589
  """The name of the chap summary template which will be used to generate a chapter summary."""
616
590
 
617
-
618
591
  class RoutingConfig:
619
592
  """Routing configuration structure for controlling request dispatching behavior."""
620
593
 
@@ -630,7 +603,6 @@ class RoutingConfig:
630
603
  cooldown_time: Optional[int]
631
604
  """Time to cooldown a deployment after failure in seconds."""
632
605
 
633
-
634
606
  class GeneralConfig:
635
607
  """General configuration structure for application-wide settings."""
636
608
 
@@ -640,7 +612,6 @@ class GeneralConfig:
640
612
  use_json_repair: bool
641
613
  """Whether to automatically repair malformed JSON."""
642
614
 
643
-
644
615
  class ToolBoxConfig:
645
616
  """Configuration for toolbox functionality."""
646
617
 
@@ -650,7 +621,6 @@ class ToolBoxConfig:
650
621
  data_module_name: str
651
622
  """The name of the module containing the data."""
652
623
 
653
-
654
624
  class PymitterConfig:
655
625
  """Pymitter configuration structure for controlling event emission and listener behavior."""
656
626
 
@@ -663,7 +633,6 @@ class PymitterConfig:
663
633
  max_listeners: int
664
634
  """The maximum number of listeners per event. -1 means unlimited."""
665
635
 
666
-
667
636
  class Config:
668
637
  """Configuration structure containing all system components."""
669
638
 
@@ -697,22 +666,17 @@ class Config:
697
666
  pymitter: PymitterConfig
698
667
  """Pymitter configuration."""
699
668
 
700
-
701
669
  CONFIG: Config
702
670
 
703
-
704
671
  class SecretStr:
705
672
  """A string that should not be exposed."""
706
673
 
707
674
  def __init__(self, source: str) -> None: ...
708
-
709
675
  def get_secret_value(self) -> str:
710
676
  """Expose the secret string."""
711
677
 
712
-
713
678
  TEMPLATE_MANAGER: TemplateManager
714
679
 
715
-
716
680
  class Event:
717
681
  """Event class that represents a hierarchical event with segments.
718
682
 
@@ -824,12 +788,9 @@ class Event:
824
788
  """
825
789
 
826
790
  def __hash__(self) -> int: ...
827
-
828
791
  def __eq__(self, other: object) -> bool: ...
829
-
830
792
  def __ne__(self, other: object) -> bool: ...
831
793
 
832
-
833
794
  class TaskStatus(StrEnum, str):
834
795
  """Enumeration of possible task statuses."""
835
796
 
@@ -848,7 +809,6 @@ class TaskStatus(StrEnum, str):
848
809
  Cancelled: TaskStatus
849
810
  """Task has been cancelled."""
850
811
 
851
-
852
812
  class TEIClient:
853
813
  """Client for TEI reranking service.
854
814
 
@@ -864,11 +824,11 @@ class TEIClient:
864
824
  """
865
825
 
866
826
  async def arerank(
867
- self,
868
- query: str,
869
- texts: List[str],
870
- truncate: bool = False,
871
- truncation_direction: Literal["Left", "Right"] = "Left",
827
+ self,
828
+ query: str,
829
+ texts: List[str],
830
+ truncate: bool = False,
831
+ truncation_direction: Literal["Left", "Right"] = "Left",
872
832
  ) -> List[Tuple[int, float]]:
873
833
  """Rerank texts based on relevance to query.
874
834
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fabricatio
3
- Version: 0.3.14.dev5
3
+ Version: 0.3.15.dev4
4
4
  Classifier: License :: OSI Approved :: MIT License
5
5
  Classifier: Programming Language :: Rust
6
6
  Classifier: Programming Language :: Python :: 3.12
@@ -185,4 +185,5 @@ Special thanks to the contributors and maintainers of:
185
185
  - [PyO3](https://github.com/PyO3/pyo3)
186
186
  - [Maturin](https://github.com/PyO3/maturin)
187
187
  - [Handlebars.rs](https://github.com/sunng87/handlebars-rust)
188
+ - [LiteLLM](https://github.com/BerriAI/litellm)
188
189
 
@@ -1,13 +1,17 @@
1
- fabricatio-0.3.14.dev5.dist-info/METADATA,sha256=0MBWyxdGQ7HQZrtsF7zXfaTPqwCd3ci7F2_701zlCfs,5116
2
- fabricatio-0.3.14.dev5.dist-info/WHEEL,sha256=jABKVkLC9kJr8mi_er5jOqpiQUjARSLXDUIIxDqsS50,96
3
- fabricatio-0.3.14.dev5.dist-info/licenses/LICENSE,sha256=do7J7EiCGbq0QPbMAL_FqLYufXpHnCnXBOuqVPwSV8Y,1088
4
- fabricatio/actions/article.py,sha256=TPS2fOqCymKv2hK2c_WmMRMKNBkvN8M91QkB9ar8-bg,12507
5
- fabricatio/actions/article_rag.py,sha256=e1fVh7Jph2zVD0bRAmK2dJ0BVkSEvF-FPfxUKujkn6s,18407
1
+ fabricatio-0.3.15.dev4.data/scripts/tdown.exe,sha256=xo5gt8ZZhdo04pXNXNcSr8eW__L-GrUYRu09mADIOVI,3448320
2
+ fabricatio-0.3.15.dev4.data/scripts/ttm.exe,sha256=fvovyVc2sYfjO9ovvyVyOrg_15xXUyKxTRQEM2w1PCY,2560512
3
+ fabricatio-0.3.15.dev4.dist-info/METADATA,sha256=WtY8oxwIpleThYREaAQDeDvcscg8NWOjnAk3-iz-ZH8,5165
4
+ fabricatio-0.3.15.dev4.dist-info/WHEEL,sha256=YpU2aDuTyBIvwRZn8idqScP-vkQ8DUGkCILtYmMfnFY,96
5
+ fabricatio-0.3.15.dev4.dist-info/licenses/LICENSE,sha256=do7J7EiCGbq0QPbMAL_FqLYufXpHnCnXBOuqVPwSV8Y,1088
6
+ fabricatio/__init__.py,sha256=w7ObFg6ud4pQuC1DhVyQI9x9dtp05QrcJAEk643iJmc,761
7
+ fabricatio/actions/__init__.py,sha256=wVENCFtpVb1rLFxoOFJt9-8smLWXuJV7IwA8P3EfFz4,48
8
+ fabricatio/actions/article.py,sha256=pKJ8DBHeb3MIUdz-y-Xtk-7XAIyDAGQf3b135w1Moxo,17110
9
+ fabricatio/actions/article_rag.py,sha256=ohS1CRtYuv2rJNgoIsBl2yv-PuuoypA3y223rUUyDBg,17989
6
10
  fabricatio/actions/fs.py,sha256=gJR14U4ln35nt8Z7OWLVAZpqGaLnED-r1Yi-lX22tkI,959
7
11
  fabricatio/actions/output.py,sha256=jZL72D5uFobKNiVFapnVxBcjSNqGEThYNlCUKQvZwz8,9935
8
12
  fabricatio/actions/rag.py,sha256=vgCzIfbSd3_vL3QxB12PY4h12V9Pe3sZRsWme0KC6X8,3583
9
13
  fabricatio/actions/rules.py,sha256=dkvCgNDjt2KSO1VgPRsxT4YBmIIMeetZb5tiz-slYkU,3640
10
- fabricatio/actions/__init__.py,sha256=wVENCFtpVb1rLFxoOFJt9-8smLWXuJV7IwA8P3EfFz4,48
14
+ fabricatio/capabilities/__init__.py,sha256=v1cHRHIJ2gxyqMLNCs6ERVcCakSasZNYzmMI4lqAcls,57
11
15
  fabricatio/capabilities/advanced_judge.py,sha256=jQ_Gsn6L8EKb6KQi3j0G0GSUz2j8D2220C1hIhrAeU8,682
12
16
  fabricatio/capabilities/advanced_rag.py,sha256=DYh-imLQkjVOgKd__OEbwGzqwNeTtX_6NBGx_bCiFs8,2539
13
17
  fabricatio/capabilities/censor.py,sha256=e0tHll4J_-TT8-Vn1OZ1innVZbJfx55oyGtDoEI99r8,4745
@@ -16,49 +20,45 @@ fabricatio/capabilities/correct.py,sha256=-JR8ZUAtagmNXepVyY679MBUyFCZwtKPjv8dAN
16
20
  fabricatio/capabilities/extract.py,sha256=E7CLZflWzJ6C6DVLEWysYZ_48g_-F93gZJVU56k2-XA,2523
17
21
  fabricatio/capabilities/persist.py,sha256=9XnKoeZ62YjXVDpYnkbDFf62B_Mz46WVsq1dTr2Wvvc,3421
18
22
  fabricatio/capabilities/propose.py,sha256=v8OiUHc8GU7Jg1zAUghYhrI-AKgmBeUvQMo22ZAOddw,2030
19
- fabricatio/capabilities/rag.py,sha256=D5rULrQxPmp4kVLP_jBE4yal1v9N68XOgBdJqGvVHpU,10979
23
+ fabricatio/capabilities/rag.py,sha256=lwFrC96tL3uJ4keJ6n46vrvrdF6bARg1Yn6y6pQn7VQ,11194
20
24
  fabricatio/capabilities/rating.py,sha256=cm-s2YJMYcS36mR9b7XNwRQ1x0h0uWxLHCapoAORv8I,17815
21
25
  fabricatio/capabilities/review.py,sha256=l06BYcQzPi7VKmWdplj9L6WvZEscZqW1Wx9OhR-UnNw,5061
22
26
  fabricatio/capabilities/task.py,sha256=-b92cGi7b3B30kOSS-90_H6BjA0VF_cjc1BzPbO5MkI,4401
23
- fabricatio/capabilities/__init__.py,sha256=v1cHRHIJ2gxyqMLNCs6ERVcCakSasZNYzmMI4lqAcls,57
24
- fabricatio/decorators.py,sha256=OohwKgc5dUjybv70D-J2lA0C9zjUuq8-gzU5O8JPl8w,8962
27
+ fabricatio/decorators.py,sha256=FmUDSr6RFtRnIXZ2OWYmbxCgTM1lsHKuyw4jTFgJbDo,9094
25
28
  fabricatio/emitter.py,sha256=n4vH6E7lcT57qVve_3hUAdfvj0mQUDkWu6iU5aNztB8,6341
29
+ fabricatio/fs/__init__.py,sha256=USoMI_HcIr3Yc77_JQYYsXrsplYPXtFTaNB9YgFfC4s,713
26
30
  fabricatio/fs/curd.py,sha256=652nHulbJ3gwt0Z3nywtPMmjhEyglDvEfc3p7ieJNNA,4777
27
31
  fabricatio/fs/readers.py,sha256=UXvcJO3UCsxHu9PPkg34Yh55Zi-miv61jD_wZQJgKRs,1751
28
- fabricatio/fs/__init__.py,sha256=USoMI_HcIr3Yc77_JQYYsXrsplYPXtFTaNB9YgFfC4s,713
29
32
  fabricatio/journal.py,sha256=mnbdB1Dw-mhEKIgWlPKn7W07ssg-6dmxMXilIGQMFV8,216
30
33
  fabricatio/models/action.py,sha256=RhjHaEJILiCZux5hzxSZVt_7Evcu3TnFHNuJN8rzgq8,10052
31
34
  fabricatio/models/adv_kwargs_types.py,sha256=IBV3ZcsNLvvEjO_2hBpYg_wLSpNKaMx6Ndam3qXJCw8,2097
35
+ fabricatio/models/extra/__init__.py,sha256=XlYnS_2B9nhLhtQkjE7rvvfPmAAtXVdNi9bSDAR-Ge8,54
32
36
  fabricatio/models/extra/advanced_judge.py,sha256=INUl_41C8jkausDekkjnEmTwNfLCJ23TwFjq2cM23Cw,1092
33
- fabricatio/models/extra/aricle_rag.py,sha256=fTxlQyrzyl9bLCC5Zreb71TKaJ7xiHqqyR62HXr2unQ,11935
34
- fabricatio/models/extra/article_base.py,sha256=UBNZaauEm3X85Cw-k7pIos129lkI0ocw7bAmRDpiG1k,16783
37
+ fabricatio/models/extra/aricle_rag.py,sha256=wg7EaxDW3ScOoYHPc-e9HXzllNgaJemNFmrAuF_mgzI,12009
38
+ fabricatio/models/extra/article_base.py,sha256=HkRsQ1WwwfSLvDbOUQqduEITcXr4alA58hMbzmlSuDo,18815
35
39
  fabricatio/models/extra/article_essence.py,sha256=z3Qz6xVsB9k-K-c4Y2CoKzxZrXaUd4oyt2Mb6hGDYdg,2793
36
- fabricatio/models/extra/article_main.py,sha256=nwwcTn-TgCeFNT4vVfE4OCMonwSdvyx1TR-lXPE6Cp4,11268
40
+ fabricatio/models/extra/article_main.py,sha256=kBLqKMeBPy2fxioOZa9oqFjGPPjHvXHWDF_Fpx16aLU,11307
37
41
  fabricatio/models/extra/article_outline.py,sha256=P0T-1DGCzoNmQ3iQVwSmOul0nwS6qLgr0FF8jDdD7F0,1673
38
42
  fabricatio/models/extra/article_proposal.py,sha256=OQIKoJkmJv0ogYVk7eGK_TOtANVYcBPA_HeV1nuG0Vo,1909
39
43
  fabricatio/models/extra/patches.py,sha256=_WNCxtYzzsVfUxI16vu4IqsLahLYRHdbQN9er9tqhC0,997
40
44
  fabricatio/models/extra/problem.py,sha256=8tTU-3giFHOi5j7NJsvH__JJyYcaGrcfsRnkzQNm0Ew,7216
41
45
  fabricatio/models/extra/rag.py,sha256=C7ptZCuGJmT8WikjpF9KhZ0Bw-VicdB-s8EqEAgMLKE,3967
42
46
  fabricatio/models/extra/rule.py,sha256=WKahNiaIp8s_l2r_FG21F_PP3_hgNm4hfSVCSFyfoBE,2669
43
- fabricatio/models/extra/__init__.py,sha256=XlYnS_2B9nhLhtQkjE7rvvfPmAAtXVdNi9bSDAR-Ge8,54
44
- fabricatio/models/generic.py,sha256=OJrYClooL2XnyalWTyyLgorycA1d_JNW8VqOYNDJdXc,27873
47
+ fabricatio/models/generic.py,sha256=NQW2hfZ-_OTuok_gvIYepq49VThrl9guFPfN-68LbPw,27870
45
48
  fabricatio/models/kwargs_types.py,sha256=Ik8-Oi_NmwfkvC9B8K4NsoZc_vSWV85xKCSthA1Xv_k,3403
46
- fabricatio/models/role.py,sha256=b3zg96YKDsMBqa7SIe9LQHc-IVs2fGWqoQeRQYQIl4o,3856
49
+ fabricatio/models/role.py,sha256=PrVwmGUi3xh2uyxEDw0fygXDhllFOB65dKOuQnepoUc,4253
47
50
  fabricatio/models/task.py,sha256=XZ1l1P-iS02ZF9P8cXv8gEfJKBa17PFPNJ1SbhyhT4Q,11033
48
51
  fabricatio/models/tool.py,sha256=q2wDtZAebWMZlsFifgmJq8N3XvAhVNMb0aUIKkdruGc,12419
49
52
  fabricatio/models/usages.py,sha256=q2jLqa0vJ7ho9ZUkC-2uPuFpK8uClBLIS6TEEYHUotY,33041
50
53
  fabricatio/parser.py,sha256=dYFri9pDlsiwVpEJ-a5jmVU2nFuKN3uBHC8VsVpdEm8,4642
51
54
  fabricatio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
- fabricatio/rust.pyi,sha256=zgA4po7lznRxicXPxhRP3mmBY_xyGTnzpkW5D47tS5U,25958
55
+ fabricatio/rust.cp312-win_amd64.pyd,sha256=PPNj55qdP98py7T6flmmi2nEnUuTkSo3G9YVnwB3G2g,7824896
56
+ fabricatio/rust.pyi,sha256=gACnTKrPEU15AC3c_HfcNNzLcMa6VTdC8SoGRgm0Qdw,26337
57
+ fabricatio/toolboxes/__init__.py,sha256=KBJi5OG_pExscdlM7Bnt_UF43j4I3Lv6G71kPVu4KQU,395
53
58
  fabricatio/toolboxes/arithmetic.py,sha256=WLqhY-Pikv11Y_0SGajwZx3WhsLNpHKf9drzAqOf_nY,1369
54
59
  fabricatio/toolboxes/fs.py,sha256=l4L1CVxJmjw9Ld2XUpIlWfV0_Fu_2Og6d3E13I-S4aE,736
55
- fabricatio/toolboxes/__init__.py,sha256=KBJi5OG_pExscdlM7Bnt_UF43j4I3Lv6G71kPVu4KQU,395
56
60
  fabricatio/utils.py,sha256=WYhFB4tHk6jKmjZgAsYhRmg1ZvBjn4X2y4n7yz25HjE,5454
61
+ fabricatio/workflows/__init__.py,sha256=5ScFSTA-bvhCesj3U9Mnmi6Law6N1fmh5UKyh58L3u8,51
57
62
  fabricatio/workflows/articles.py,sha256=ObYTFUqLUk_CzdmmnX6S7APfxcGmPFqnFr9pdjU7Z4Y,969
58
63
  fabricatio/workflows/rag.py,sha256=-YYp2tlE9Vtfgpg6ROpu6QVO8j8yVSPa6yDzlN3qVxs,520
59
- fabricatio/workflows/__init__.py,sha256=5ScFSTA-bvhCesj3U9Mnmi6Law6N1fmh5UKyh58L3u8,51
60
- fabricatio/__init__.py,sha256=w7ObFg6ud4pQuC1DhVyQI9x9dtp05QrcJAEk643iJmc,761
61
- fabricatio/rust.cp312-win_amd64.pyd,sha256=fpmxmtf2fPo_tceNOh7JIKjIV1R-s5Pif2wD8Fj7msY,7818752
62
- fabricatio-0.3.14.dev5.data/scripts/tdown.exe,sha256=YePgy8GewcQSj78G8EcePrHdBzbm3iJOR74wLQSg5Zs,3449344
63
- fabricatio-0.3.14.dev5.data/scripts/ttm.exe,sha256=1YTY-py7QIYYEcsJ6rIz5j3MLbmdHZ3XkZnBiFZ0YnI,2555904
64
- fabricatio-0.3.14.dev5.dist-info/RECORD,,
64
+ fabricatio-0.3.15.dev4.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: maturin (1.8.3)
2
+ Generator: maturin (1.8.6)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp312-cp312-win_amd64
Binary file