kiln-ai 0.20.1__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kiln-ai might be problematic. Click here for more details.

Files changed (117) hide show
  1. kiln_ai/adapters/__init__.py +6 -0
  2. kiln_ai/adapters/adapter_registry.py +43 -226
  3. kiln_ai/adapters/chunkers/__init__.py +13 -0
  4. kiln_ai/adapters/chunkers/base_chunker.py +42 -0
  5. kiln_ai/adapters/chunkers/chunker_registry.py +16 -0
  6. kiln_ai/adapters/chunkers/fixed_window_chunker.py +39 -0
  7. kiln_ai/adapters/chunkers/helpers.py +23 -0
  8. kiln_ai/adapters/chunkers/test_base_chunker.py +63 -0
  9. kiln_ai/adapters/chunkers/test_chunker_registry.py +28 -0
  10. kiln_ai/adapters/chunkers/test_fixed_window_chunker.py +346 -0
  11. kiln_ai/adapters/chunkers/test_helpers.py +75 -0
  12. kiln_ai/adapters/data_gen/test_data_gen_task.py +9 -3
  13. kiln_ai/adapters/embedding/__init__.py +0 -0
  14. kiln_ai/adapters/embedding/base_embedding_adapter.py +44 -0
  15. kiln_ai/adapters/embedding/embedding_registry.py +32 -0
  16. kiln_ai/adapters/embedding/litellm_embedding_adapter.py +199 -0
  17. kiln_ai/adapters/embedding/test_base_embedding_adapter.py +283 -0
  18. kiln_ai/adapters/embedding/test_embedding_registry.py +166 -0
  19. kiln_ai/adapters/embedding/test_litellm_embedding_adapter.py +1149 -0
  20. kiln_ai/adapters/eval/eval_runner.py +6 -2
  21. kiln_ai/adapters/eval/test_base_eval.py +1 -3
  22. kiln_ai/adapters/eval/test_g_eval.py +1 -1
  23. kiln_ai/adapters/extractors/__init__.py +18 -0
  24. kiln_ai/adapters/extractors/base_extractor.py +72 -0
  25. kiln_ai/adapters/extractors/encoding.py +20 -0
  26. kiln_ai/adapters/extractors/extractor_registry.py +44 -0
  27. kiln_ai/adapters/extractors/extractor_runner.py +112 -0
  28. kiln_ai/adapters/extractors/litellm_extractor.py +386 -0
  29. kiln_ai/adapters/extractors/test_base_extractor.py +244 -0
  30. kiln_ai/adapters/extractors/test_encoding.py +54 -0
  31. kiln_ai/adapters/extractors/test_extractor_registry.py +181 -0
  32. kiln_ai/adapters/extractors/test_extractor_runner.py +181 -0
  33. kiln_ai/adapters/extractors/test_litellm_extractor.py +1192 -0
  34. kiln_ai/adapters/fine_tune/test_dataset_formatter.py +2 -2
  35. kiln_ai/adapters/fine_tune/test_fireworks_tinetune.py +2 -6
  36. kiln_ai/adapters/fine_tune/test_together_finetune.py +2 -6
  37. kiln_ai/adapters/ml_embedding_model_list.py +192 -0
  38. kiln_ai/adapters/ml_model_list.py +382 -4
  39. kiln_ai/adapters/model_adapters/litellm_adapter.py +7 -69
  40. kiln_ai/adapters/model_adapters/test_litellm_adapter.py +1 -1
  41. kiln_ai/adapters/model_adapters/test_structured_output.py +3 -1
  42. kiln_ai/adapters/ollama_tools.py +69 -12
  43. kiln_ai/adapters/provider_tools.py +190 -46
  44. kiln_ai/adapters/rag/deduplication.py +49 -0
  45. kiln_ai/adapters/rag/progress.py +252 -0
  46. kiln_ai/adapters/rag/rag_runners.py +844 -0
  47. kiln_ai/adapters/rag/test_deduplication.py +195 -0
  48. kiln_ai/adapters/rag/test_progress.py +785 -0
  49. kiln_ai/adapters/rag/test_rag_runners.py +2376 -0
  50. kiln_ai/adapters/remote_config.py +80 -8
  51. kiln_ai/adapters/test_adapter_registry.py +579 -86
  52. kiln_ai/adapters/test_ml_embedding_model_list.py +429 -0
  53. kiln_ai/adapters/test_ml_model_list.py +212 -0
  54. kiln_ai/adapters/test_ollama_tools.py +340 -1
  55. kiln_ai/adapters/test_prompt_builders.py +1 -1
  56. kiln_ai/adapters/test_provider_tools.py +199 -8
  57. kiln_ai/adapters/test_remote_config.py +551 -56
  58. kiln_ai/adapters/vector_store/__init__.py +1 -0
  59. kiln_ai/adapters/vector_store/base_vector_store_adapter.py +83 -0
  60. kiln_ai/adapters/vector_store/lancedb_adapter.py +389 -0
  61. kiln_ai/adapters/vector_store/test_base_vector_store.py +160 -0
  62. kiln_ai/adapters/vector_store/test_lancedb_adapter.py +1841 -0
  63. kiln_ai/adapters/vector_store/test_vector_store_registry.py +199 -0
  64. kiln_ai/adapters/vector_store/vector_store_registry.py +33 -0
  65. kiln_ai/datamodel/__init__.py +16 -13
  66. kiln_ai/datamodel/basemodel.py +170 -1
  67. kiln_ai/datamodel/chunk.py +158 -0
  68. kiln_ai/datamodel/datamodel_enums.py +27 -0
  69. kiln_ai/datamodel/embedding.py +64 -0
  70. kiln_ai/datamodel/extraction.py +303 -0
  71. kiln_ai/datamodel/project.py +33 -1
  72. kiln_ai/datamodel/rag.py +79 -0
  73. kiln_ai/datamodel/test_attachment.py +649 -0
  74. kiln_ai/datamodel/test_basemodel.py +1 -1
  75. kiln_ai/datamodel/test_chunk_models.py +317 -0
  76. kiln_ai/datamodel/test_dataset_split.py +1 -1
  77. kiln_ai/datamodel/test_embedding_models.py +448 -0
  78. kiln_ai/datamodel/test_eval_model.py +6 -6
  79. kiln_ai/datamodel/test_extraction_chunk.py +206 -0
  80. kiln_ai/datamodel/test_extraction_model.py +470 -0
  81. kiln_ai/datamodel/test_rag.py +641 -0
  82. kiln_ai/datamodel/test_tool_id.py +81 -0
  83. kiln_ai/datamodel/test_vector_store.py +320 -0
  84. kiln_ai/datamodel/tool_id.py +22 -0
  85. kiln_ai/datamodel/vector_store.py +141 -0
  86. kiln_ai/tools/mcp_session_manager.py +4 -1
  87. kiln_ai/tools/rag_tools.py +157 -0
  88. kiln_ai/tools/test_mcp_session_manager.py +1 -1
  89. kiln_ai/tools/test_rag_tools.py +848 -0
  90. kiln_ai/tools/test_tool_registry.py +91 -2
  91. kiln_ai/tools/tool_registry.py +21 -0
  92. kiln_ai/utils/__init__.py +3 -0
  93. kiln_ai/utils/async_job_runner.py +62 -17
  94. kiln_ai/utils/config.py +2 -2
  95. kiln_ai/utils/env.py +15 -0
  96. kiln_ai/utils/filesystem.py +14 -0
  97. kiln_ai/utils/filesystem_cache.py +60 -0
  98. kiln_ai/utils/litellm.py +94 -0
  99. kiln_ai/utils/lock.py +100 -0
  100. kiln_ai/utils/mime_type.py +38 -0
  101. kiln_ai/utils/pdf_utils.py +38 -0
  102. kiln_ai/utils/test_async_job_runner.py +151 -35
  103. kiln_ai/utils/test_env.py +142 -0
  104. kiln_ai/utils/test_filesystem_cache.py +316 -0
  105. kiln_ai/utils/test_litellm.py +206 -0
  106. kiln_ai/utils/test_lock.py +185 -0
  107. kiln_ai/utils/test_mime_type.py +66 -0
  108. kiln_ai/utils/test_pdf_utils.py +73 -0
  109. kiln_ai/utils/test_uuid.py +111 -0
  110. kiln_ai/utils/test_validation.py +524 -0
  111. kiln_ai/utils/uuid.py +9 -0
  112. kiln_ai/utils/validation.py +90 -0
  113. {kiln_ai-0.20.1.dist-info → kiln_ai-0.21.0.dist-info}/METADATA +7 -1
  114. kiln_ai-0.21.0.dist-info/RECORD +211 -0
  115. kiln_ai-0.20.1.dist-info/RECORD +0 -138
  116. {kiln_ai-0.20.1.dist-info → kiln_ai-0.21.0.dist-info}/WHEEL +0 -0
  117. {kiln_ai-0.20.1.dist-info → kiln_ai-0.21.0.dist-info}/licenses/LICENSE.txt +0 -0
@@ -206,7 +206,7 @@ def test_generate_chat_message_toolcall(mock_training_chat_two_step_json):
206
206
 
207
207
  def test_generate_chat_message_toolcall_invalid_json(mock_training_chat_two_step_json):
208
208
  mock_training_chat_two_step_json[-1].content = "invalid json"
209
- with pytest.raises(ValueError, match="^Last message is not JSON"):
209
+ with pytest.raises(ValueError, match=r"^Last message is not JSON"):
210
210
  generate_chat_message_toolcall(mock_training_chat_two_step_json)
211
211
 
212
212
 
@@ -536,7 +536,7 @@ def test_generate_huggingface_chat_template_toolcall_invalid_json(
536
536
  ):
537
537
  mock_training_chat_two_step_json[-1].content = "invalid json"
538
538
 
539
- with pytest.raises(ValueError, match="^Last message is not JSON"):
539
+ with pytest.raises(ValueError, match=r"^Last message is not JSON"):
540
540
  generate_huggingface_chat_template_toolcall(mock_training_chat_two_step_json)
541
541
 
542
542
 
@@ -14,11 +14,7 @@ from kiln_ai.adapters.fine_tune.fireworks_finetune import (
14
14
  DeployStatus,
15
15
  FireworksFinetune,
16
16
  )
17
- from kiln_ai.datamodel import (
18
- DatasetSplit,
19
- StructuredOutputMode,
20
- Task,
21
- )
17
+ from kiln_ai.datamodel import DatasetSplit, StructuredOutputMode, Task
22
18
  from kiln_ai.datamodel import Finetune as FinetuneModel
23
19
  from kiln_ai.datamodel.datamodel_enums import ChatStrategy
24
20
  from kiln_ai.datamodel.dataset_split import Train80Test20SplitDefinition
@@ -1053,7 +1049,7 @@ async def test_fetch_all_deployments_invalid_json(fireworks_finetune, mock_api_k
1053
1049
 
1054
1050
  with pytest.raises(
1055
1051
  ValueError,
1056
- match="Invalid response from Fireworks. Expected list of deployments in 'deployments' key",
1052
+ match=r"Invalid response from Fireworks. Expected list of deployments in 'deployments' key",
1057
1053
  ):
1058
1054
  await fireworks_finetune._fetch_all_deployments()
1059
1055
 
@@ -17,11 +17,7 @@ from kiln_ai.adapters.fine_tune.together_finetune import (
17
17
  _pending_statuses,
18
18
  _running_statuses,
19
19
  )
20
- from kiln_ai.datamodel import (
21
- DatasetSplit,
22
- StructuredOutputMode,
23
- Task,
24
- )
20
+ from kiln_ai.datamodel import DatasetSplit, StructuredOutputMode, Task
25
21
  from kiln_ai.datamodel import Finetune as FinetuneModel
26
22
  from kiln_ai.datamodel.dataset_split import Train80Test20SplitDefinition
27
23
  from kiln_ai.utils.config import Config
@@ -105,7 +101,7 @@ def mock_api_key():
105
101
  def test_init_missing_api_key(finetune):
106
102
  with patch.object(Config, "shared") as mock_config:
107
103
  mock_config.return_value.together_api_key = None
108
- with pytest.raises(ValueError, match="Together.ai API key not set"):
104
+ with pytest.raises(ValueError, match=r"Together.ai API key not set"):
109
105
  TogetherFinetune(datamodel=finetune)
110
106
 
111
107
 
@@ -0,0 +1,192 @@
1
+ from enum import Enum
2
+ from typing import List
3
+
4
+ from pydantic import BaseModel, Field
5
+
6
+ from kiln_ai.datamodel.datamodel_enums import ModelProviderName
7
+
8
+
9
+ class KilnEmbeddingModelFamily(str, Enum):
10
+ """
11
+ Enumeration of supported embedding model families.
12
+ """
13
+
14
+ # for bespoke proprietary models, the family tends to be the same
15
+ # as provider name, but it does not have to be
16
+ openai = "openai"
17
+ gemini = "gemini"
18
+ gemma = "gemma"
19
+ nomic = "nomic"
20
+
21
+
22
+ class EmbeddingModelName(str, Enum):
23
+ """
24
+ Enumeration of specific model versions supported by the system.
25
+ """
26
+
27
+ # Embedding model names are often generic (e.g., "text-embedding"),
28
+ # so we prefix them with the provider name (e.g., "openai_") to ensure
29
+ # uniqueness across providers now and in the future
30
+ openai_text_embedding_3_small = "openai_text_embedding_3_small"
31
+ openai_text_embedding_3_large = "openai_text_embedding_3_large"
32
+ gemini_text_embedding_004 = "gemini_text_embedding_004"
33
+ gemini_embedding_001 = "gemini_embedding_001"
34
+ embedding_gemma_300m = "embedding_gemma_300m"
35
+ nomic_text_embedding_v1_5 = "nomic_text_embedding_v1_5"
36
+
37
+
38
+ class KilnEmbeddingModelProvider(BaseModel):
39
+ name: ModelProviderName
40
+
41
+ model_id: str = Field(
42
+ description="The model ID for the embedding model. This is the ID used to identify the model in the provider's API.",
43
+ )
44
+
45
+ max_input_tokens: int | None = Field(
46
+ default=None,
47
+ description="The maximum number of tokens that can be input to the model.",
48
+ )
49
+
50
+ n_dimensions: int = Field(
51
+ description="The number of dimensions in the output embedding.",
52
+ )
53
+
54
+ supports_custom_dimensions: bool = Field(
55
+ default=False,
56
+ description="Whether the model supports setting a custom output dimension. If true, the user can set the output dimension in the UI.",
57
+ )
58
+
59
+ suggested_for_chunk_embedding: bool = Field(
60
+ default=False,
61
+ description="Whether the model is particularly good for chunk embedding.",
62
+ )
63
+
64
+ ollama_model_aliases: List[str] | None = None
65
+
66
+
67
+ class KilnEmbeddingModel(BaseModel):
68
+ """
69
+ Configuration for a specific embedding model.
70
+ """
71
+
72
+ family: str
73
+ name: str
74
+ friendly_name: str
75
+ providers: List[KilnEmbeddingModelProvider]
76
+
77
+
78
+ built_in_embedding_models: List[KilnEmbeddingModel] = [
79
+ # openai
80
+ KilnEmbeddingModel(
81
+ family=KilnEmbeddingModelFamily.openai,
82
+ name=EmbeddingModelName.openai_text_embedding_3_small,
83
+ friendly_name="Text Embedding 3 Small",
84
+ providers=[
85
+ KilnEmbeddingModelProvider(
86
+ name=ModelProviderName.openai,
87
+ model_id="text-embedding-3-small",
88
+ n_dimensions=1536,
89
+ max_input_tokens=8192,
90
+ supports_custom_dimensions=True,
91
+ ),
92
+ ],
93
+ ),
94
+ KilnEmbeddingModel(
95
+ family=KilnEmbeddingModelFamily.openai,
96
+ name=EmbeddingModelName.openai_text_embedding_3_large,
97
+ friendly_name="Text Embedding 3 Large",
98
+ providers=[
99
+ KilnEmbeddingModelProvider(
100
+ name=ModelProviderName.openai,
101
+ model_id="text-embedding-3-large",
102
+ n_dimensions=3072,
103
+ max_input_tokens=8192,
104
+ supports_custom_dimensions=True,
105
+ suggested_for_chunk_embedding=True,
106
+ ),
107
+ ],
108
+ ),
109
+ # gemini
110
+ KilnEmbeddingModel(
111
+ family=KilnEmbeddingModelFamily.gemini,
112
+ name=EmbeddingModelName.gemini_text_embedding_004,
113
+ friendly_name="Text Embedding 004",
114
+ providers=[
115
+ KilnEmbeddingModelProvider(
116
+ name=ModelProviderName.gemini_api,
117
+ model_id="text-embedding-004",
118
+ n_dimensions=768,
119
+ max_input_tokens=2048,
120
+ ),
121
+ ],
122
+ ),
123
+ KilnEmbeddingModel(
124
+ family=KilnEmbeddingModelFamily.gemini,
125
+ name=EmbeddingModelName.gemini_embedding_001,
126
+ friendly_name="Gemini Embedding 001",
127
+ providers=[
128
+ KilnEmbeddingModelProvider(
129
+ name=ModelProviderName.gemini_api,
130
+ model_id="gemini-embedding-001",
131
+ n_dimensions=3072,
132
+ max_input_tokens=2048,
133
+ supports_custom_dimensions=True,
134
+ suggested_for_chunk_embedding=True,
135
+ ),
136
+ ],
137
+ ),
138
+ # gemma
139
+ KilnEmbeddingModel(
140
+ family=KilnEmbeddingModelFamily.gemma,
141
+ name=EmbeddingModelName.embedding_gemma_300m,
142
+ friendly_name="Embedding Gemma 300m",
143
+ providers=[
144
+ KilnEmbeddingModelProvider(
145
+ name=ModelProviderName.ollama,
146
+ model_id="embeddinggemma:300m",
147
+ n_dimensions=768,
148
+ max_input_tokens=2048,
149
+ # the model itself does support custom dimensions, but
150
+ # not sure if ollama supports it
151
+ supports_custom_dimensions=False,
152
+ ollama_model_aliases=["embeddinggemma"],
153
+ ),
154
+ ],
155
+ ),
156
+ # nomic
157
+ KilnEmbeddingModel(
158
+ family=KilnEmbeddingModelFamily.nomic,
159
+ name=EmbeddingModelName.nomic_text_embedding_v1_5,
160
+ friendly_name="Nomic Embed Text v1.5",
161
+ providers=[
162
+ KilnEmbeddingModelProvider(
163
+ name=ModelProviderName.ollama,
164
+ model_id="nomic-embed-text:v1.5",
165
+ n_dimensions=768,
166
+ max_input_tokens=2048,
167
+ # the model itself does support custom dimensions, but
168
+ # not sure if ollama supports it
169
+ supports_custom_dimensions=False,
170
+ ollama_model_aliases=["nomic-embed-text"],
171
+ ),
172
+ ],
173
+ ),
174
+ ]
175
+
176
+
177
+ def get_model_by_name(name: EmbeddingModelName) -> KilnEmbeddingModel:
178
+ for model in built_in_embedding_models:
179
+ if model.name == name:
180
+ return model
181
+ raise ValueError(f"Embedding model {name} not found in the list of built-in models")
182
+
183
+
184
+ def built_in_embedding_models_from_provider(
185
+ provider_name: ModelProviderName, model_name: str
186
+ ) -> KilnEmbeddingModelProvider | None:
187
+ for model in built_in_embedding_models:
188
+ if model.name == model_name:
189
+ for p in model.providers:
190
+ if p.name == provider_name:
191
+ return p
192
+ return None