langchain-ocr-lib 0.3.3__tar.gz → 0.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/PKG-INFO +2 -2
  2. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/README.md +0 -1
  3. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/pyproject.toml +2 -1
  4. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/di_config.py +10 -4
  5. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/llms/llm_factory.py +1 -1
  6. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/llms/llm_type.py +1 -0
  7. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/llm_class_type_settings.py +1 -1
  8. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/openai_chat_settings.py +3 -1
  9. langchain_ocr_lib-0.4.1/src/langchain_ocr_lib/impl/settings/together_ai_chat_settings.py +45 -0
  10. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/vllm_chat_settings.py +3 -1
  11. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/__init__.py +0 -0
  12. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/chains/__init__.py +0 -0
  13. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/chains/chain.py +0 -0
  14. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/converter/__init__.py +0 -0
  15. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/converter/converter.py +0 -0
  16. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/di_binding_keys/__init__.py +0 -0
  17. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/di_binding_keys/binding_keys.py +0 -0
  18. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/__init__.py +0 -0
  19. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/chains/__init__.py +0 -0
  20. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/chains/ocr_chain.py +0 -0
  21. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/converter/__init__.py +0 -0
  22. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/converter/image_converter.py +0 -0
  23. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/converter/pdf_converter.py +0 -0
  24. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/langfuse_manager/__init__.py +0 -0
  25. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/langfuse_manager/langfuse_manager.py +0 -0
  26. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/llms/__init__.py +0 -0
  27. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/__init__.py +0 -0
  28. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/langfuse_settings.py +0 -0
  29. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/language_settings.py +0 -0
  30. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/settings/ollama_chat_settings.py +0 -0
  31. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/tracers/__init__.py +0 -0
  32. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/impl/tracers/langfuse_traced_chain.py +0 -0
  33. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/language_mapping/language_mapping.py +0 -0
  34. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/main.py +0 -0
  35. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/prompt_templates/__init__.py +0 -0
  36. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/prompt_templates/ocr_prompt.py +0 -0
  37. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/tracers/__init__.py +0 -0
  38. {langchain_ocr_lib-0.3.3 → langchain_ocr_lib-0.4.1}/src/langchain_ocr_lib/tracers/traced_chain.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langchain-ocr-lib
3
- Version: 0.3.3
3
+ Version: 0.4.1
4
4
  Summary: Modular, vision-LLM-powered chain to convert image and PDF documents into clean Markdown.
5
5
  License: MIT
6
6
  Author: Andreas Klos
@@ -16,6 +16,7 @@ Requires-Dist: inject (>=5.2.1,<6.0.0)
16
16
  Requires-Dist: langchain-community (>=0.3.19,<0.4.0)
17
17
  Requires-Dist: langchain-ollama (>=0.2.0,<0.3.0)
18
18
  Requires-Dist: langchain-openai (>=0.3.8,<0.4.0)
19
+ Requires-Dist: langchain-together (>=0.3.0,<0.4.0)
19
20
  Requires-Dist: langfuse (>=2.59.7,<3.0.0)
20
21
  Requires-Dist: openai (>=1.42.0,<2.0.0)
21
22
  Requires-Dist: pdf2image (>=1.17.0,<2.0.0)
@@ -127,7 +128,6 @@ Use the the library programmatically:
127
128
  ```python
128
129
  import inject
129
130
 
130
- import configure_di
131
131
  from langchain_ocr_lib.di_config import configure_di
132
132
  from langchain_ocr_lib.di_binding_keys.binding_keys import PdfConverterKey
133
133
  from langchain_ocr_lib.impl.converter.pdf_converter import Pdf2MarkdownConverter
@@ -100,7 +100,6 @@ Use the the library programmatically:
100
100
  ```python
101
101
  import inject
102
102
 
103
- import configure_di
104
103
  from langchain_ocr_lib.di_config import configure_di
105
104
  from langchain_ocr_lib.di_binding_keys.binding_keys import PdfConverterKey
106
105
  from langchain_ocr_lib.impl.converter.pdf_converter import Pdf2MarkdownConverter
@@ -7,7 +7,7 @@ langchain-ocr = "langchain_ocr_lib.main:main"
7
7
 
8
8
  [tool.poetry]
9
9
  name = "langchain-ocr-lib"
10
- version = "0.3.3"
10
+ version = "0.4.1"
11
11
  description = "Modular, vision-LLM-powered chain to convert image and PDF documents into clean Markdown."
12
12
  authors = ["Andreas Klos <aklos@outlook.de>"]
13
13
  readme = "README.md"
@@ -28,6 +28,7 @@ langfuse = "^2.59.7"
28
28
  pycountry = "^24.6.1"
29
29
  pdf2image = "^1.17.0"
30
30
  inject = "^5.2.1"
31
+ langchain-together = "^0.3.0"
31
32
 
32
33
  [tool.poetry.group.dev.dependencies]
33
34
  debugpy = "^1.8.1"
@@ -13,11 +13,13 @@ from langchain_ocr_lib.di_binding_keys.binding_keys import (
13
13
  )
14
14
  from langchain_ollama import ChatOllama
15
15
  from langchain_openai import ChatOpenAI
16
+ from langchain_together import ChatTogether
16
17
  from langfuse import Langfuse
17
18
  from functools import partial
18
19
 
19
20
  from langchain_ocr_lib.impl.chains.ocr_chain import OcrChain
20
21
  from langchain_ocr_lib.impl.settings.ollama_chat_settings import OllamaSettings
22
+ from langchain_ocr_lib.impl.settings.together_ai_chat_settings import TogetherAISettings
21
23
  from langchain_ocr_lib.impl.settings.vllm_chat_settings import VllmSettings
22
24
  from langchain_ocr_lib.impl.settings.openai_chat_settings import OpenAISettings
23
25
  from langchain_ocr_lib.impl.settings.llm_class_type_settings import LlmClassTypeSettings
@@ -52,18 +54,22 @@ def lib_di_config(binder: Binder):
52
54
  if llm_class_type_settings.llm_type == "ollama":
53
55
  settings = OllamaSettings()
54
56
  model_name = settings.model
55
- partial_llm_provider = partial(llm_provider,settings, ChatOllama)
57
+ partial_llm_provider = partial(llm_provider, settings, ChatOllama)
56
58
  elif llm_class_type_settings.llm_type == "openai":
57
59
  settings = OpenAISettings()
58
60
  model_name = settings.model_name
59
- partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
61
+ partial_llm_provider = partial(llm_provider, settings, ChatOpenAI)
60
62
  elif llm_class_type_settings.llm_type == "vllm":
61
63
  settings = VllmSettings()
62
64
  model_name = settings.model_name
63
- partial_llm_provider = partial(llm_provider,settings, ChatOpenAI)
65
+ partial_llm_provider = partial(llm_provider, settings, ChatOpenAI)
66
+ elif llm_class_type_settings.llm_type == "togetherai":
67
+ settings = TogetherAISettings()
68
+ model_name = settings.model_name
69
+ partial_llm_provider = partial(llm_provider, settings, ChatTogether)
64
70
  else:
65
71
  raise NotImplementedError("Configured LLM is not implemented")
66
-
72
+
67
73
  binder.bind_to_provider(LargeLanguageModelKey, partial_llm_provider)
68
74
 
69
75
  prompt = ocr_prompt_template_builder(language=language_settings.language, model_name=model_name)
@@ -43,7 +43,7 @@ def get_configurable_fields_from(settings: BaseSettings) -> dict[str, Configurab
43
43
  settings_of_interest = settings.model_fields[field_name]
44
44
  if settings_of_interest.title is not None:
45
45
  _fields[field_name] = ConfigurableField(id=field_name, name=settings_of_interest.title)
46
-
46
+
47
47
  return _fields
48
48
 
49
49
 
@@ -10,3 +10,4 @@ class LLMType(StrEnum):
10
10
  OLLAMA = "ollama"
11
11
  OPENAI = "openai"
12
12
  VLLM = "vllm"
13
+ TOGETHERAI = "togetherai"
@@ -19,7 +19,7 @@ class LlmClassTypeSettings(BaseSettings):
19
19
  class Config:
20
20
  """Config class for reading Fields from env."""
21
21
 
22
- env_prefix = "RAG_CLASS_TYPE_"
22
+ env_prefix = "LLM_CLASS_TYPE_"
23
23
  case_sensitive = False
24
24
 
25
25
  llm_type: LLMType = Field(
@@ -35,7 +35,9 @@ class OpenAISettings(BaseSettings):
35
35
  title="LLM Model",
36
36
  )
37
37
  api_key: str = Field(default="", description="The API key for authentication")
38
- top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
38
+ top_p: float = Field(
39
+ default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P"
40
+ )
39
41
  temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
40
42
  base_url: str = Field(
41
43
  default="https://api.openai.com/v1",
@@ -0,0 +1,45 @@
1
+ """Module contains settings regarding the Together AI API."""
2
+
3
+ from pydantic import Field
4
+ from pydantic_settings import BaseSettings
5
+
6
+
7
+ class TogetherAISettings(BaseSettings):
8
+ """
9
+ Contains settings regarding the Together AI API.
10
+
11
+ Attributes
12
+ ----------
13
+ model_name : str
14
+ The Together AI model identifier.
15
+ together_api_key : str
16
+ The API key for authentication.
17
+ top_p : float
18
+ Total probability mass of tokens to consider at each step.
19
+ temperature : float
20
+ What sampling temperature to use.
21
+ together_api_base : str
22
+ The base URL for the Together AI API endpoint.
23
+ """
24
+
25
+ class Config:
26
+ """Config class for reading fields from environment variables."""
27
+
28
+ env_prefix = "TOGETHER_"
29
+ case_sensitive = False
30
+
31
+ model_name: str = Field(
32
+ default="",
33
+ description="The Together AI model identifier",
34
+ title="Together AI Model",
35
+ )
36
+ together_api_key: str = Field(default="", description="The API key for authentication")
37
+ top_p: float = Field(
38
+ default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P"
39
+ )
40
+ temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
41
+ together_api_base: str = Field(
42
+ default="https://api.together.xyz/v1/",
43
+ env="API_BASE",
44
+ description="The base URL for the Together AI API endpoint",
45
+ )
@@ -35,7 +35,9 @@ class VllmSettings(BaseSettings):
35
35
  title="LLM Model",
36
36
  )
37
37
  api_key: str = Field(default="", description="The API key for authentication")
38
- top_p: float = Field(default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P")
38
+ top_p: float = Field(
39
+ default=1.0, description="Total probability mass of tokens to consider at each step", title="Top P"
40
+ )
39
41
  temperature: float = Field(default=0, description="What sampling temperature to use", title="Temperature")
40
42
  base_url: str = Field(
41
43
  default="http://localhost:8000/v1",