langchain-ocr-lib 0.3.2__tar.gz → 0.3.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/PKG-INFO +1 -1
  2. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/pyproject.toml +1 -1
  3. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/di_config.py +5 -2
  4. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/langfuse_manager/langfuse_manager.py +2 -1
  5. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/llms/llm_factory.py +1 -0
  6. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/openai_chat_settings.py +7 -2
  7. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/vllm_chat_settings.py +7 -2
  8. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/README.md +0 -0
  9. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/__init__.py +0 -0
  10. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/chains/__init__.py +0 -0
  11. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/chains/chain.py +0 -0
  12. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/converter/__init__.py +0 -0
  13. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/converter/converter.py +0 -0
  14. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/di_binding_keys/__init__.py +0 -0
  15. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/di_binding_keys/binding_keys.py +0 -0
  16. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/__init__.py +0 -0
  17. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/chains/__init__.py +0 -0
  18. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/chains/ocr_chain.py +0 -0
  19. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/converter/__init__.py +0 -0
  20. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/converter/image_converter.py +0 -0
  21. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/converter/pdf_converter.py +0 -0
  22. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/langfuse_manager/__init__.py +0 -0
  23. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/llms/__init__.py +0 -0
  24. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/llms/llm_type.py +0 -0
  25. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/__init__.py +0 -0
  26. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/langfuse_settings.py +0 -0
  27. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/language_settings.py +0 -0
  28. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/llm_class_type_settings.py +0 -0
  29. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/settings/ollama_chat_settings.py +0 -0
  30. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/tracers/__init__.py +0 -0
  31. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/impl/tracers/langfuse_traced_chain.py +0 -0
  32. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/language_mapping/language_mapping.py +0 -0
  33. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/main.py +0 -0
  34. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/prompt_templates/__init__.py +0 -0
  35. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/prompt_templates/ocr_prompt.py +0 -0
  36. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/tracers/__init__.py +0 -0
  37. {langchain_ocr_lib-0.3.2 → langchain_ocr_lib-0.3.3}/src/langchain_ocr_lib/tracers/traced_chain.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langchain-ocr-lib
3
- Version: 0.3.2
3
+ Version: 0.3.3
4
4
  Summary: Modular, vision-LLM-powered chain to convert image and PDF documents into clean Markdown.
5
5
  License: MIT
6
6
  Author: Andreas Klos
@@ -7,7 +7,7 @@ langchain-ocr = "langchain_ocr_lib.main:main"
7
7
 
8
8
  [tool.poetry]
9
9
  name = "langchain-ocr-lib"
10
- version = "0.3.2"
10
+ version = "0.3.3"
11
11
  description = "Modular, vision-LLM-powered chain to convert image and PDF documents into clean Markdown."
12
12
  authors = ["Andreas Klos <aklos@outlook.de>"]
13
13
  readme = "README.md"
@@ -48,22 +48,25 @@ def lib_di_config(binder: Binder):
48
48
  langfuse_settings = LangfuseSettings()
49
49
  llm_class_type_settings = LlmClassTypeSettings()
50
50
  language_settings = LanguageSettings()
51
-
51
+ model_name = ""
52
52
  if llm_class_type_settings.llm_type == "ollama":
53
53
  settings = OllamaSettings()
54
+ model_name = settings.model
54
55
  partial_llm_provider = partial(llm_provider,settings, ChatOllama)
55
56
  elif llm_class_type_settings.llm_type == "openai":
56
57
  settings = OpenAISettings()
58
+ model_name = settings.model_name
57
59
  partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
58
60
  elif llm_class_type_settings.llm_type == "vllm":
59
61
  settings = VllmSettings()
62
+ model_name = settings.model_name
60
63
  partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
61
64
  else:
62
65
  raise NotImplementedError("Configured LLM is not implemented")
63
66
 
64
67
  binder.bind_to_provider(LargeLanguageModelKey, partial_llm_provider)
65
68
 
66
- prompt = ocr_prompt_template_builder(language=language_settings.language, model_name=settings.model)
69
+ prompt = ocr_prompt_template_builder(language=language_settings.language, model_name=model_name)
67
70
 
68
71
  binder.bind(
69
72
  LangfuseClientKey,
@@ -58,9 +58,10 @@ class LangfuseManager:
58
58
  Exception
59
59
  If an error occurs while retrieving the prompt template from Langfuse.
60
60
  """
61
+ langfuse_prompt = None
61
62
  if not self._enabled:
62
63
  logger.info("Langfuse is not enabled. Using fallback prompt.")
63
- return None
64
+ return langfuse_prompt
64
65
  try:
65
66
  langfuse_prompt = self._langfuse.get_prompt(base_prompt_name)
66
67
  except NotFoundError:
@@ -43,6 +43,7 @@ def get_configurable_fields_from(settings: BaseSettings) -> dict[str, Configurab
43
43
  settings_of_interest = settings.model_fields[field_name]
44
44
  if settings_of_interest.title is not None:
45
45
  _fields[field_name] = ConfigurableField(id=field_name, name=settings_of_interest.title)
46
+
46
47
  return _fields
47
48
 
48
49
 
@@ -10,7 +10,7 @@ class OpenAISettings(BaseSettings):
10
10
 
11
11
  Attributes
12
12
  ----------
13
- model : str
13
+ model_name : str
14
14
  The model identifier.
15
15
  api_key : str
16
16
  The API key for authentication.
@@ -28,7 +28,12 @@ class OpenAISettings(BaseSettings):
28
28
  env_prefix = "OPENAI_"
29
29
  case_sensitive = False
30
30
 
31
- model: str = Field(default="gpt-4o-mini-search-preview-2025-03-11", description="The model identifier", title="LLM Model")
31
+ model_name: str = Field(
32
+ default="gpt-4o-mini-search-preview-2025-03-11",
33
+ env="MODEL",
34
+ description="The model identifier",
35
+ title="LLM Model",
36
+ )
32
37
  api_key: str = Field(default="", description="The API key for authentication")
33
38
  top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
34
39
  temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
@@ -10,7 +10,7 @@ class VllmSettings(BaseSettings):
10
10
 
11
11
  Attributes
12
12
  ----------
13
- model : str
13
+ model_name : str
14
14
  The model identifier.
15
15
  api_key : str
16
16
  The API key for authentication.
@@ -28,7 +28,12 @@ class VllmSettings(BaseSettings):
28
28
  env_prefix = "VLLM_"
29
29
  case_sensitive = False
30
30
 
31
- model: str = Field(default="", description="The model identifier", title="LLM Model")
31
+ model_name: str = Field(
32
+ default="",
33
+ env="MODEL",
34
+ description="The model identifier",
35
+ title="LLM Model",
36
+ )
32
37
  api_key: str = Field(default="", description="The API key for authentication")
33
38
  top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
34
39
  temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")