langchain-ocr-lib 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,6 +14,7 @@ from langchain_ocr_lib.di_binding_keys.binding_keys import (
14
14
  from langchain_ollama import ChatOllama
15
15
  from langchain_openai import ChatOpenAI
16
16
  from langfuse import Langfuse
17
+ from functools import partial
17
18
 
18
19
  from langchain_ocr_lib.impl.chains.ocr_chain import OcrChain
19
20
  from langchain_ocr_lib.impl.settings.ollama_chat_settings import OllamaSettings
@@ -50,16 +51,17 @@ def lib_di_config(binder: Binder):
50
51
 
51
52
  if llm_class_type_settings.llm_type == "ollama":
52
53
  settings = OllamaSettings()
53
- llm_instance = llm_provider(settings, ChatOllama)
54
+ partial_llm_provider = partial(llm_provider,settings, ChatOllama)
54
55
  elif llm_class_type_settings.llm_type == "openai":
55
56
  settings = OpenAISettings()
56
- llm_instance = llm_provider(settings, ChatOpenAI)
57
+ partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
57
58
  elif llm_class_type_settings.llm_type == "vllm":
58
59
  settings = VllmSettings()
59
- llm_instance = llm_provider(settings, ChatOpenAI)
60
+ partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
60
61
  else:
61
62
  raise NotImplementedError("Configured LLM is not implemented")
62
- binder.bind(LargeLanguageModelKey, llm_instance)
63
+
64
+ binder.bind_to_provider(LargeLanguageModelKey, partial_llm_provider)
63
65
 
64
66
  prompt = ocr_prompt_template_builder(language=language_settings.language, model_name=settings.model)
65
67
 
@@ -35,20 +35,6 @@ class LangfuseManager:
35
35
  ):
36
36
  self._managed_prompts = managed_prompts
37
37
 
38
- def init_prompts(self) -> None:
39
- """
40
- Initialize the prompts managed by the LangfuseManager.
41
-
42
- This method iterates over the keys of the managed prompts and retrieves
43
- each prompt using the `get_langfuse_prompt` method.
44
-
45
- Returns
46
- -------
47
- None
48
- """
49
- for key in list(self._managed_prompts.keys()):
50
- self.get_langfuse_prompt(key)
51
-
52
38
  def get_langfuse_prompt(self, base_prompt_name: str) -> Optional[ChatPromptClient]:
53
39
  """
54
40
  Retrieve the prompt from Langfuse Prompt Management.
@@ -35,7 +35,7 @@ class OllamaSettings(BaseSettings):
35
35
  env_prefix = "OLLAMA_"
36
36
  case_sensitive = False
37
37
 
38
- model: str = Field(default="gemma3:4b-it-q4_K_M")
38
+ model: str = Field(default="gemma3:4b-it-q4_K_M", title="LLM Model")
39
39
  base_url: str = Field(default="http://localhost:11434")
40
40
  top_k: int = Field(default=0, title="LLM Top K")
41
41
  top_p: float = Field(default=0, title="LLM Top P")
@@ -28,10 +28,10 @@ class OpenAISettings(BaseSettings):
28
28
  env_prefix = "OPENAI_"
29
29
  case_sensitive = False
30
30
 
31
- model: str = Field(default="gpt-4o-mini-search-preview-2025-03-11", description="The model identifier")
31
+ model: str = Field(default="gpt-4o-mini-search-preview-2025-03-11", description="The model identifier", title="LLM Model")
32
32
  api_key: str = Field(default="", description="The API key for authentication")
33
- top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step")
34
- temperature: float = Field(default=0, description="What sampling temperature to use")
33
+ top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
34
+ temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
35
35
  base_url: str = Field(
36
36
  default="https://api.openai.com/v1",
37
37
  description="The base URL for the OpenAI API endpoint",
@@ -28,10 +28,10 @@ class VllmSettings(BaseSettings):
28
28
  env_prefix = "VLLM_"
29
29
  case_sensitive = False
30
30
 
31
- model: str = Field(default="", description="The model identifier")
31
+ model: str = Field(default="", description="The model identifier", title="LLM Model")
32
32
  api_key: str = Field(default="", description="The API key for authentication")
33
- top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step")
34
- temperature: float = Field(default=0, description="What sampling temperature to use")
33
+ top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
34
+ temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
35
35
  base_url: str = Field(
36
36
  default="http://localhost:8000/v1",
37
37
  description="The base URL for the Vllm API endpoint",
@@ -1,7 +1,7 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: langchain-ocr-lib
3
- Version: 0.3.0
4
- Summary:
3
+ Version: 0.3.1
4
+ Summary: Modular, vision-LLM-powered chain to convert image and PDF documents into clean Markdown.
5
5
  License: MIT
6
6
  Author: Andreas Klos
7
7
  Author-email: aklos@outlook.de
@@ -5,7 +5,7 @@ langchain_ocr_lib/converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
5
5
  langchain_ocr_lib/converter/converter.py,sha256=oDUNzVWD743RgqIal7T4OVv-Z1RKE9uQYzAIPpgY3o8,1280
6
6
  langchain_ocr_lib/di_binding_keys/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  langchain_ocr_lib/di_binding_keys/binding_keys.py,sha256=jE8rwNcLaI0NflIMkK0vu0LVy5o4y0pYgdjbpDNTGyk,338
8
- langchain_ocr_lib/di_config.py,sha256=eYzDi_LJaYY_JhRnNqW3VYGd3N1QblaGFjWlL_6Vx9c,3537
8
+ langchain_ocr_lib/di_config.py,sha256=K11ZHkUDP1TsYzZSRMnrbnroovw-_CCbyxHNo9kjRCw,3640
9
9
  langchain_ocr_lib/impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  langchain_ocr_lib/impl/chains/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  langchain_ocr_lib/impl/chains/ocr_chain.py,sha256=stE8RLE1ieRHf6XHreKCRfhNfXzw9fNLTake7xQBGL8,2673
@@ -13,7 +13,7 @@ langchain_ocr_lib/impl/converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
13
13
  langchain_ocr_lib/impl/converter/image_converter.py,sha256=G1rDOCbudWNL4sDvSGJ7CeeFrWUblfWPGaZf5JsnpiM,2871
14
14
  langchain_ocr_lib/impl/converter/pdf_converter.py,sha256=pTHPojuNLCSWJp4FzXBHshXva2sBGyOs6Y7jnKJrnNo,3760
15
15
  langchain_ocr_lib/impl/langfuse_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- langchain_ocr_lib/impl/langfuse_manager/langfuse_manager.py,sha256=AEF1iFYghr-62gcPcUb4Yi3DNRYfe-JsIWd3ymsIU8I,5403
16
+ langchain_ocr_lib/impl/langfuse_manager/langfuse_manager.py,sha256=nfIuOSOsewH6azNNtrsGxSrGI2Blt2fhbp-PBbgXJ2I,4995
17
17
  langchain_ocr_lib/impl/llms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  langchain_ocr_lib/impl/llms/llm_factory.py,sha256=9DsUdoYNrjeWLGA9ISDdHN2cxcQ7DquNQ5it6zSxHlg,2199
19
19
  langchain_ocr_lib/impl/llms/llm_type.py,sha256=_LKtdVuTRYX6gupkxJtEtIwrbtiMvZmG8WOxfzlm42M,286
@@ -21,9 +21,9 @@ langchain_ocr_lib/impl/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
21
21
  langchain_ocr_lib/impl/settings/langfuse_settings.py,sha256=5lr3tVeiHXDUaYtWAnZPXrKxBJgM2wgaz7yyZThhCsE,812
22
22
  langchain_ocr_lib/impl/settings/language_settings.py,sha256=tdAC1t5wGu1MoH1jhjkDnxnX4Ui7giwxt7Qm8_LPkP8,627
23
23
  langchain_ocr_lib/impl/settings/llm_class_type_settings.py,sha256=4KC6zxby13wn38rB8055J8LNVTsmUfrOiyLtLuToHaM,598
24
- langchain_ocr_lib/impl/settings/ollama_chat_settings.py,sha256=8RWMsaK4qDrqC6Mrxekr8IEDYwcvjYwhw9xDwZemxI4,1506
25
- langchain_ocr_lib/impl/settings/openai_chat_settings.py,sha256=gZqmFYDtF0l5lEAnuT2VzdqLWKnTPSK_lTeg7ERmJas,1276
26
- langchain_ocr_lib/impl/settings/vllm_chat_settings.py,sha256=y8PPNUcce1uA4kEu6p0p5vCwCOGp9uEEvHbCoS1Ohh8,1226
24
+ langchain_ocr_lib/impl/settings/ollama_chat_settings.py,sha256=YQkgD7CfOjHN5wkpJakO0GfM7-D2GqoJLP1gB2932ms,1525
25
+ langchain_ocr_lib/impl/settings/openai_chat_settings.py,sha256=NqVfkcI8OoD8TVxyv4l0G9ycUC6LIs6Qs4kQRL24doA,1331
26
+ langchain_ocr_lib/impl/settings/vllm_chat_settings.py,sha256=Zr4L6Urp-f1JZu7Q1dwL6671EQbrIIYL0ubJSQlod3c,1281
27
27
  langchain_ocr_lib/impl/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  langchain_ocr_lib/impl/tracers/langfuse_traced_chain.py,sha256=syjwNt8HfVmaWXZ-ElFYsc-KwpnKQz2LE3K5jV7c3GE,1599
29
29
  langchain_ocr_lib/language_mapping/language_mapping.py,sha256=VY7WkkZauoHNxkvgUYbig0rDmlKqDkz24cXMd6A7txM,700
@@ -32,7 +32,7 @@ langchain_ocr_lib/prompt_templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
32
32
  langchain_ocr_lib/prompt_templates/ocr_prompt.py,sha256=3Be1AL-HJkxPnAP0DNH1MqvAxFWTCeM5UOKP63xkHsY,3543
33
33
  langchain_ocr_lib/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  langchain_ocr_lib/tracers/traced_chain.py,sha256=uxRkdLNn_G6dAsti_gUuF7muhIj10xrOUL7HUga40oc,3056
35
- langchain_ocr_lib-0.3.0.dist-info/METADATA,sha256=IaqIz9OXgu5WQXwEVpLmMNLmz2w3IowWmdZ7kt5O6VM,6240
36
- langchain_ocr_lib-0.3.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
37
- langchain_ocr_lib-0.3.0.dist-info/entry_points.txt,sha256=l4mIs0tnIgbJYuVveZySQKVBnqNMHS-8ZZtLwz8ag5k,61
38
- langchain_ocr_lib-0.3.0.dist-info/RECORD,,
35
+ langchain_ocr_lib-0.3.1.dist-info/METADATA,sha256=3FLR8CPBSpusTzAslaBIC5_4sz27ofyvkDQ3oGl4Nwo,6329
36
+ langchain_ocr_lib-0.3.1.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
37
+ langchain_ocr_lib-0.3.1.dist-info/entry_points.txt,sha256=l4mIs0tnIgbJYuVveZySQKVBnqNMHS-8ZZtLwz8ag5k,61
38
+ langchain_ocr_lib-0.3.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.1
2
+ Generator: poetry-core 2.1.2
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any