kodit 0.3.15__py3-none-any.whl → 0.3.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kodit might be problematic. Click here for more details.

kodit/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.3.15'
21
- __version_tuple__ = version_tuple = (0, 3, 15)
31
+ __version__ = version = '0.3.16'
32
+ __version_tuple__ = version_tuple = (0, 3, 16)
33
+
34
+ __commit_id__ = commit_id = None
@@ -241,27 +241,38 @@ class CodeIndexingApplicationService:
241
241
  [x.id for x in final_results]
242
242
  )
243
243
 
244
+ # Create a mapping from snippet ID to search result to handle cases where
245
+ # some snippet IDs don't exist (e.g., with vectorchord inconsistencies)
246
+ snippet_map = {
247
+ result.snippet.id: result
248
+ for result in search_results
249
+ if result.snippet.id is not None
250
+ }
251
+
252
+ # Filter final_results to only include IDs that we actually found snippets for
253
+ valid_final_results = [fr for fr in final_results if fr.id in snippet_map]
254
+
244
255
  return [
245
256
  MultiSearchResult(
246
- id=result.snippet.id or 0,
247
- content=result.snippet.original_text(),
257
+ id=snippet_map[fr.id].snippet.id or 0,
258
+ content=snippet_map[fr.id].snippet.original_text(),
248
259
  original_scores=fr.original_scores,
249
260
  # Enhanced fields
250
- source_uri=str(result.source.working_copy.remote_uri),
261
+ source_uri=str(snippet_map[fr.id].source.working_copy.remote_uri),
251
262
  relative_path=str(
252
- result.file.as_path().relative_to(
253
- result.source.working_copy.cloned_path
254
- )
263
+ snippet_map[fr.id]
264
+ .file.as_path()
265
+ .relative_to(snippet_map[fr.id].source.working_copy.cloned_path)
255
266
  ),
256
267
  language=MultiSearchResult.detect_language_from_extension(
257
- result.file.extension()
268
+ snippet_map[fr.id].file.extension()
258
269
  ),
259
- authors=[author.name for author in result.authors],
260
- created_at=result.snippet.created_at or datetime.now(UTC),
270
+ authors=[author.name for author in snippet_map[fr.id].authors],
271
+ created_at=snippet_map[fr.id].snippet.created_at or datetime.now(UTC),
261
272
  # Summary from snippet entity
262
- summary=result.snippet.summary_text(),
273
+ summary=snippet_map[fr.id].snippet.summary_text(),
263
274
  )
264
- for result, fr in zip(search_results, final_results, strict=True)
275
+ for fr in valid_final_results
265
276
  ]
266
277
 
267
278
  async def list_snippets(
kodit/config.py CHANGED
@@ -49,6 +49,14 @@ class Endpoint(BaseModel):
49
49
  model: str | None = None
50
50
  api_key: str | None = None
51
51
  num_parallel_tasks: int | None = None
52
+ socket_path: str | None = Field(
53
+ default=None,
54
+ description="Unix socket path for local communication (e.g., /tmp/openai.sock)",
55
+ )
56
+ timeout: float | None = Field(
57
+ default=None,
58
+ description="Request timeout in seconds (default: 30.0)",
59
+ )
52
60
 
53
61
 
54
62
  class Search(BaseModel):
@@ -1,6 +1,5 @@
1
1
  """Factory for creating embedding services with DDD architecture."""
2
2
 
3
- from openai import AsyncOpenAI
4
3
  from sqlalchemy.ext.asyncio import AsyncSession
5
4
 
6
5
  from kodit.config import AppContext, Endpoint
@@ -48,15 +47,14 @@ def embedding_domain_service_factory(
48
47
  endpoint = _get_endpoint_configuration(app_context)
49
48
  if endpoint and endpoint.type == "openai":
50
49
  log_event("kodit.embedding", {"provider": "openai"})
50
+ # Use new httpx-based provider with socket support
51
51
  embedding_provider = OpenAIEmbeddingProvider(
52
- openai_client=AsyncOpenAI(
53
- api_key=endpoint.api_key or "default",
54
- base_url=endpoint.base_url or "https://api.openai.com/v1",
55
- timeout=10,
56
- max_retries=2,
57
- ),
52
+ api_key=endpoint.api_key,
53
+ base_url=endpoint.base_url or "https://api.openai.com/v1",
58
54
  model_name=endpoint.model or "text-embedding-3-small",
59
55
  num_parallel_tasks=endpoint.num_parallel_tasks or OPENAI_NUM_PARALLEL_TASKS,
56
+ socket_path=endpoint.socket_path,
57
+ timeout=endpoint.timeout or 30.0,
60
58
  )
61
59
  else:
62
60
  log_event("kodit.embedding", {"provider": "local"})
@@ -1,11 +1,12 @@
1
- """OpenAI embedding provider implementation."""
1
+ """OpenAI embedding provider implementation using httpx."""
2
2
 
3
3
  import asyncio
4
4
  from collections.abc import AsyncGenerator
5
+ from typing import Any
5
6
 
7
+ import httpx
6
8
  import structlog
7
9
  import tiktoken
8
- from openai import AsyncOpenAI
9
10
  from tiktoken import Encoding
10
11
 
11
12
  from kodit.domain.services.embedding_service import EmbeddingProvider
@@ -22,29 +23,53 @@ OPENAI_NUM_PARALLEL_TASKS = 10 # Semaphore limit for concurrent OpenAI requests
22
23
 
23
24
 
24
25
  class OpenAIEmbeddingProvider(EmbeddingProvider):
25
- """OpenAI embedding provider that uses OpenAI's embedding API."""
26
+ """OpenAI embedding provider that uses OpenAI's embedding API via httpx."""
26
27
 
27
- def __init__(
28
+ def __init__( # noqa: PLR0913
28
29
  self,
29
- openai_client: AsyncOpenAI,
30
+ api_key: str | None = None,
31
+ base_url: str = "https://api.openai.com",
30
32
  model_name: str = "text-embedding-3-small",
31
33
  num_parallel_tasks: int = OPENAI_NUM_PARALLEL_TASKS,
34
+ socket_path: str | None = None,
35
+ timeout: float = 30.0,
32
36
  ) -> None:
33
37
  """Initialize the OpenAI embedding provider.
34
38
 
35
39
  Args:
36
- openai_client: The OpenAI client instance
37
- model_name: The model name to use for embeddings
40
+ api_key: The OpenAI API key.
41
+ base_url: The base URL for the OpenAI API.
42
+ model_name: The model name to use for embeddings.
43
+ num_parallel_tasks: Maximum number of concurrent requests.
44
+ socket_path: Optional Unix socket path for local communication.
45
+ timeout: Request timeout in seconds.
38
46
 
39
47
  """
40
- self.openai_client = openai_client
41
48
  self.model_name = model_name
42
49
  self.num_parallel_tasks = num_parallel_tasks
43
50
  self.log = structlog.get_logger(__name__)
51
+ self.api_key = api_key
52
+ self.base_url = base_url
53
+ self.socket_path = socket_path
54
+ self.timeout = timeout
44
55
 
45
56
  # Lazily initialised token encoding
46
57
  self._encoding: Encoding | None = None
47
58
 
59
+ # Create httpx client with optional Unix socket support
60
+ if socket_path:
61
+ transport = httpx.AsyncHTTPTransport(uds=socket_path)
62
+ self.http_client = httpx.AsyncClient(
63
+ transport=transport,
64
+ base_url="http://localhost", # Base URL for Unix socket
65
+ timeout=timeout,
66
+ )
67
+ else:
68
+ self.http_client = httpx.AsyncClient(
69
+ base_url=base_url,
70
+ timeout=timeout,
71
+ )
72
+
48
73
  # ---------------------------------------------------------------------
49
74
  # Helper utilities
50
75
  # ---------------------------------------------------------------------
@@ -76,6 +101,37 @@ class OpenAIEmbeddingProvider(EmbeddingProvider):
76
101
  batch_size=BATCH_SIZE,
77
102
  )
78
103
 
104
+ async def _call_embeddings_api(
105
+ self, texts: list[str]
106
+ ) -> dict[str, Any]:
107
+ """Call the embeddings API using httpx.
108
+
109
+ Args:
110
+ texts: The texts to embed.
111
+
112
+ Returns:
113
+ The API response as a dictionary.
114
+
115
+ """
116
+ headers = {
117
+ "Content-Type": "application/json",
118
+ }
119
+ if self.api_key:
120
+ headers["Authorization"] = f"Bearer {self.api_key}"
121
+
122
+ data = {
123
+ "model": self.model_name,
124
+ "input": texts,
125
+ }
126
+
127
+ response = await self.http_client.post(
128
+ "/v1/embeddings",
129
+ json=data,
130
+ headers=headers,
131
+ )
132
+ response.raise_for_status()
133
+ return response.json()
134
+
79
135
  async def embed(
80
136
  self, data: list[EmbeddingRequest]
81
137
  ) -> AsyncGenerator[list[EmbeddingResponse], None]:
@@ -99,17 +155,17 @@ class OpenAIEmbeddingProvider(EmbeddingProvider):
99
155
  ) -> list[EmbeddingResponse]:
100
156
  async with sem:
101
157
  try:
102
- response = await self.openai_client.embeddings.create(
103
- model=self.model_name,
104
- input=[item.text for item in batch],
158
+ response = await self._call_embeddings_api(
159
+ [item.text for item in batch]
105
160
  )
161
+ embeddings_data = response.get("data", [])
106
162
 
107
163
  return [
108
164
  EmbeddingResponse(
109
165
  snippet_id=item.snippet_id,
110
- embedding=embedding.embedding,
166
+ embedding=emb_data.get("embedding", []),
111
167
  )
112
- for item, embedding in zip(batch, response.data, strict=True)
168
+ for item, emb_data in zip(batch, embeddings_data, strict=True)
113
169
  ]
114
170
  except Exception as e:
115
171
  self.log.exception("Error embedding batch", error=str(e))
@@ -119,3 +175,9 @@ class OpenAIEmbeddingProvider(EmbeddingProvider):
119
175
  tasks = [_process_batch(batch) for batch in batched_data]
120
176
  for task in asyncio.as_completed(tasks):
121
177
  yield await task
178
+
179
+ async def close(self) -> None:
180
+ """Close the HTTP client."""
181
+ if hasattr(self, "http_client"):
182
+ await self.http_client.aclose()
183
+
@@ -45,17 +45,14 @@ def enrichment_domain_service_factory(
45
45
  enrichment_provider: EnrichmentProvider | None = None
46
46
  if endpoint and endpoint.type == "openai":
47
47
  log_event("kodit.enrichment", {"provider": "openai"})
48
- from openai import AsyncOpenAI
49
-
48
+ # Use new httpx-based provider with socket support
50
49
  enrichment_provider = OpenAIEnrichmentProvider(
51
- openai_client=AsyncOpenAI(
52
- api_key=endpoint.api_key or "default",
53
- base_url=endpoint.base_url or "https://api.openai.com/v1",
54
- timeout=60,
55
- max_retries=2,
56
- ),
50
+ api_key=endpoint.api_key,
51
+ base_url=endpoint.base_url or "https://api.openai.com/v1",
57
52
  model_name=endpoint.model or "gpt-4o-mini",
58
53
  num_parallel_tasks=endpoint.num_parallel_tasks or OPENAI_NUM_PARALLEL_TASKS,
54
+ socket_path=endpoint.socket_path,
55
+ timeout=endpoint.timeout or 30.0,
59
56
  )
60
57
  else:
61
58
  log_event("kodit.enrichment", {"provider": "local"})
@@ -1,9 +1,10 @@
1
- """OpenAI enrichment provider implementation."""
1
+ """OpenAI enrichment provider implementation using httpx."""
2
2
 
3
3
  import asyncio
4
4
  from collections.abc import AsyncGenerator
5
5
  from typing import Any
6
6
 
7
+ import httpx
7
8
  import structlog
8
9
 
9
10
  from kodit.domain.services.enrichment_service import EnrichmentProvider
@@ -19,25 +20,80 @@ OPENAI_NUM_PARALLEL_TASKS = 40
19
20
 
20
21
 
21
22
  class OpenAIEnrichmentProvider(EnrichmentProvider):
22
- """OpenAI enrichment provider implementation."""
23
+ """OpenAI enrichment provider implementation using httpx."""
23
24
 
24
- def __init__(
25
+ def __init__( # noqa: PLR0913
25
26
  self,
26
- openai_client: Any,
27
+ api_key: str | None = None,
28
+ base_url: str = "https://api.openai.com",
27
29
  model_name: str = "gpt-4o-mini",
28
30
  num_parallel_tasks: int = OPENAI_NUM_PARALLEL_TASKS,
31
+ socket_path: str | None = None,
32
+ timeout: float = 30.0,
29
33
  ) -> None:
30
34
  """Initialize the OpenAI enrichment provider.
31
35
 
32
36
  Args:
33
- openai_client: The OpenAI client instance.
37
+ api_key: The OpenAI API key.
38
+ base_url: The base URL for the OpenAI API.
34
39
  model_name: The model name to use for enrichment.
40
+ num_parallel_tasks: Maximum number of concurrent requests.
41
+ socket_path: Optional Unix socket path for local communication.
42
+ timeout: Request timeout in seconds.
35
43
 
36
44
  """
37
45
  self.log = structlog.get_logger(__name__)
38
- self.openai_client = openai_client
39
46
  self.model_name = model_name
40
47
  self.num_parallel_tasks = num_parallel_tasks
48
+ self.api_key = api_key
49
+ self.base_url = base_url
50
+ self.socket_path = socket_path
51
+ self.timeout = timeout
52
+
53
+ # Create httpx client with optional Unix socket support
54
+ if socket_path:
55
+ transport = httpx.AsyncHTTPTransport(uds=socket_path)
56
+ self.http_client = httpx.AsyncClient(
57
+ transport=transport,
58
+ base_url="http://localhost", # Base URL for Unix socket
59
+ timeout=timeout,
60
+ )
61
+ else:
62
+ self.http_client = httpx.AsyncClient(
63
+ base_url=base_url,
64
+ timeout=timeout,
65
+ )
66
+
67
+ async def _call_chat_completion(
68
+ self, messages: list[dict[str, str]]
69
+ ) -> dict[str, Any]:
70
+ """Call the chat completion API using httpx.
71
+
72
+ Args:
73
+ messages: The messages to send to the API.
74
+
75
+ Returns:
76
+ The API response as a dictionary.
77
+
78
+ """
79
+ headers = {
80
+ "Content-Type": "application/json",
81
+ }
82
+ if self.api_key:
83
+ headers["Authorization"] = f"Bearer {self.api_key}"
84
+
85
+ data = {
86
+ "model": self.model_name,
87
+ "messages": messages,
88
+ }
89
+
90
+ response = await self.http_client.post(
91
+ "/v1/chat/completions",
92
+ json=data,
93
+ headers=headers,
94
+ )
95
+ response.raise_for_status()
96
+ return response.json()
41
97
 
42
98
  async def enrich(
43
99
  self, requests: list[EnrichmentRequest]
@@ -66,19 +122,22 @@ class OpenAIEnrichmentProvider(EnrichmentProvider):
66
122
  text="",
67
123
  )
68
124
  try:
69
- response = await self.openai_client.chat.completions.create(
70
- model=self.model_name,
71
- messages=[
72
- {
73
- "role": "system",
74
- "content": ENRICHMENT_SYSTEM_PROMPT,
75
- },
76
- {"role": "user", "content": request.text},
77
- ],
125
+ messages = [
126
+ {
127
+ "role": "system",
128
+ "content": ENRICHMENT_SYSTEM_PROMPT,
129
+ },
130
+ {"role": "user", "content": request.text},
131
+ ]
132
+ response = await self._call_chat_completion(messages)
133
+ content = (
134
+ response.get("choices", [{}])[0]
135
+ .get("message", {})
136
+ .get("content", "")
78
137
  )
79
138
  return EnrichmentResponse(
80
139
  snippet_id=request.snippet_id,
81
- text=response.choices[0].message.content or "",
140
+ text=content or "",
82
141
  )
83
142
  except Exception as e:
84
143
  self.log.exception("Error enriching request", error=str(e))
@@ -93,3 +152,8 @@ class OpenAIEnrichmentProvider(EnrichmentProvider):
93
152
  # Process all requests and yield results as they complete
94
153
  for task in asyncio.as_completed(tasks):
95
154
  yield await task
155
+
156
+ async def close(self) -> None:
157
+ """Close the HTTP client."""
158
+ if hasattr(self, "http_client"):
159
+ await self.http_client.aclose()
@@ -597,12 +597,12 @@ class SqlAlchemyIndexRepository(IndexRepository):
597
597
  )
598
598
  await self._session.execute(stmt)
599
599
 
600
+ # Delete the index
601
+ stmt = delete(db_entities.Index).where(db_entities.Index.id == index.id)
602
+ await self._session.execute(stmt)
603
+
600
604
  # Delete the source
601
605
  stmt = delete(db_entities.Source).where(
602
606
  db_entities.Source.id == index.source.id
603
607
  )
604
608
  await self._session.execute(stmt)
605
-
606
- # Delete the index
607
- stmt = delete(db_entities.Index).where(db_entities.Index.id == index.id)
608
- await self._session.execute(stmt)
kodit/middleware.py CHANGED
@@ -53,6 +53,7 @@ async def logging_middleware(request: Request, call_next: Callable) -> Response:
53
53
  "client_host": client_host,
54
54
  "client_port": client_port,
55
55
  },
56
+ headers=dict(request.headers),
56
57
  network={"client": {"ip": client_host, "port": client_port}},
57
58
  duration=process_time,
58
59
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kodit
3
- Version: 0.3.15
3
+ Version: 0.3.16
4
4
  Summary: Code indexing for better AI code generation
5
5
  Project-URL: Homepage, https://docs.helixml.tech/kodit/
6
6
  Project-URL: Documentation, https://docs.helixml.tech/kodit/
@@ -1,20 +1,20 @@
1
1
  kodit/.gitignore,sha256=ztkjgRwL9Uud1OEi36hGQeDGk3OLK1NfDEO8YqGYy8o,11
2
2
  kodit/__init__.py,sha256=aEKHYninUq1yh6jaNfvJBYg-6fenpN132nJt1UU6Jxs,59
3
- kodit/_version.py,sha256=HSn5cGZLA_vnXREa9sRtSYCA5Ii9CJlQbi1YMzsfUGM,513
3
+ kodit/_version.py,sha256=xgBNL2JDMxFk3qbERXuWrl5gA3YUpNxRvPxBXL1PUBY,706
4
4
  kodit/app.py,sha256=aK6TP-7L3aHCiatNm_SYSw0zU4G3EuOcvdBZ3xPHO_U,3917
5
5
  kodit/cli.py,sha256=ZOS_VzCHGjJRZzZpaVR00QXSPIwRXPYu-pTrbEtlyR0,19328
6
- kodit/config.py,sha256=kuGdl-q5r9b7spt89-1mfVNuoQLqkXQ9-nuJFfRflUU,8940
6
+ kodit/config.py,sha256=TBmh-Z_aZ8dfyhWDLzufhT30yiUYv82KWWK9kWxlgOw,9223
7
7
  kodit/database.py,sha256=kI9yBm4uunsgV4-QeVoCBL0wLzU4kYmYv5qZilGnbPE,1740
8
8
  kodit/log.py,sha256=XyuseZk90gUBj1B7np2UO2EW9eE_ApayIpPRvI19KCE,8651
9
9
  kodit/mcp.py,sha256=aEcPc8dQiZaR0AswCZZNxcm_rhhUZNsEBimYti0ibSI,7221
10
- kodit/middleware.py,sha256=xBmC6keFeNsS0y8XUcIKzJzuefkE9bq2UhW1fR5cqxg,2770
10
+ kodit/middleware.py,sha256=TiwebNpaEmiP7QRuZrfZcCL51IUefQyNLSPuzVyk8UM,2813
11
11
  kodit/reporting.py,sha256=icce1ZyiADsA_Qz-mSjgn2H4SSqKuGfLKnw-yrl9nsg,2722
12
12
  kodit/application/__init__.py,sha256=mH50wTpgP9dhbKztFsL8Dda9Hi18TSnMVxXtpp4aGOA,35
13
13
  kodit/application/factories/__init__.py,sha256=bU5CvEnaBePZ7JbkCOp1MGTNP752bnU2uEqmfy5FdRk,37
14
14
  kodit/application/factories/code_indexing_factory.py,sha256=R9f0wsj4-3NJFS5SEt_-OIGR_s_01gJXaL3PkZd8MlU,5911
15
15
  kodit/application/services/__init__.py,sha256=p5UQNw-H5sxQvs5Etfte93B3cJ1kKW6DNxK34uFvU1E,38
16
16
  kodit/application/services/auto_indexing_service.py,sha256=9eVYSHj0UHO7B5PlQtcOpnLqiNLi-f5u5d8BPb4HM5A,3154
17
- kodit/application/services/code_indexing_application_service.py,sha256=4jlndefMecf0U235gj2PtzZmWKULK1XOd8SxE4zhZEI,15317
17
+ kodit/application/services/code_indexing_application_service.py,sha256=nrnd_Md-D0AfNKku7Aqt3YHDbXsBV9f44Z6XsjhiF3E,15877
18
18
  kodit/application/services/sync_scheduler.py,sha256=cSjlV46Xl_coq_4EYAvYesHaoOl4jvQXcQQfrZNv8mA,4767
19
19
  kodit/domain/__init__.py,sha256=TCpg4Xx-oF4mKV91lo4iXqMEfBT1OoRSYnbG-zVWolA,66
20
20
  kodit/domain/entities.py,sha256=EY43R0LOTmsaVsZGS3TWz0Bx5kF3Gm-Knqe6kLZaf9Y,8822
@@ -50,19 +50,19 @@ kodit/infrastructure/cloning/metadata.py,sha256=GD2UnCC1oR82RD0SVUqk9CJOqzXPxhOA
50
50
  kodit/infrastructure/cloning/git/__init__.py,sha256=20ePcp0qE6BuLsjsv4KYB1DzKhMIMsPXwEqIEZtjTJs,34
51
51
  kodit/infrastructure/cloning/git/working_copy.py,sha256=qYcrR5qP1rhWZiYGMT1p-1Alavi_YvQLXx4MgIV7eXs,2611
52
52
  kodit/infrastructure/embedding/__init__.py,sha256=F-8nLlWAerYJ0MOIA4tbXHLan8bW5rRR84vzxx6tRKI,39
53
- kodit/infrastructure/embedding/embedding_factory.py,sha256=BsePuOAU4UmFP1hbn3WwZkY6dTG-oxlte0l7P_LsuuA,3745
53
+ kodit/infrastructure/embedding/embedding_factory.py,sha256=8LC2jKf2vx-P-TCh8ZanxwF3hT5PSjWA3vuSR6ggcXk,3731
54
54
  kodit/infrastructure/embedding/local_vector_search_repository.py,sha256=ExweyNEL5cP-g3eDhGqZSih7zhdOrop2WdFPPJL-tB4,3505
55
55
  kodit/infrastructure/embedding/vectorchord_vector_search_repository.py,sha256=PIoU0HsDlaoXDXnGjOR0LAkAcW4JiE3ymJy_SBhEopc,8030
56
56
  kodit/infrastructure/embedding/embedding_providers/__init__.py,sha256=qeZ-oAIAxMl5QqebGtO1lq-tHjl_ucAwOXePklcwwGk,34
57
57
  kodit/infrastructure/embedding/embedding_providers/batching.py,sha256=a8CL9PX2VLmbeg616fc_lQzfC4BWTVn32m4SEhXpHxc,3279
58
58
  kodit/infrastructure/embedding/embedding_providers/hash_embedding_provider.py,sha256=V6OdCuWyQQOvo3OJGRi-gBKDApIcrELydFg7T696P5s,2257
59
59
  kodit/infrastructure/embedding/embedding_providers/local_embedding_provider.py,sha256=9aLV1Zg4KMhYWlGRwgAUtswW4aIabNqbsipWhAn64RI,4133
60
- kodit/infrastructure/embedding/embedding_providers/openai_embedding_provider.py,sha256=EBNBwme-n9WROSmjgjLYWwGbFc7AvTg6-IZ4fci2XVQ,4404
60
+ kodit/infrastructure/embedding/embedding_providers/openai_embedding_provider.py,sha256=CE86s8IicieUjIDWn2xzswteHXCzmw1Qz6Kp4GBIcus,6316
61
61
  kodit/infrastructure/enrichment/__init__.py,sha256=8acZKNzql8Fs0lceFu9U3KoUrOptRBtVIxr_Iw6lz3Y,40
62
- kodit/infrastructure/enrichment/enrichment_factory.py,sha256=_JWkna3g8q8hzaPq1NRfZU9Y_sv99xcloM4BCJAWmHw,2039
62
+ kodit/infrastructure/enrichment/enrichment_factory.py,sha256=jZWGgAvFjEuRUc1oW3iGhgipvX-EnVJZpw6ybzp9NGM,2016
63
63
  kodit/infrastructure/enrichment/local_enrichment_provider.py,sha256=7Vlwu1jPJ5KNUn1a51M1P-laUd5YVFJA8EeH6KO-95k,3960
64
64
  kodit/infrastructure/enrichment/null_enrichment_provider.py,sha256=DhZkJBnkvXg_XSAs-oKiFnKqYFPnmTl3ikdxrqeEfbc,713
65
- kodit/infrastructure/enrichment/openai_enrichment_provider.py,sha256=quu9GLGhxGk3ilrg5kPZdGiCTfuQ20JuwM3Grv6qaQE,3332
65
+ kodit/infrastructure/enrichment/openai_enrichment_provider.py,sha256=IVoP1CqM-_iR164FCfQO9TvqMiIXjaRWpfF0Jp5qXLI,5366
66
66
  kodit/infrastructure/git/__init__.py,sha256=0iMosFzudj4_xNIMe2SRbV6l5bWqkjnUsZoFsoZFuM8,33
67
67
  kodit/infrastructure/git/git_utils.py,sha256=KERwmhWDR4ooMQKS-nSPxjvdCzoWF9NS6nhdeXyzdtY,571
68
68
  kodit/infrastructure/ignore/__init__.py,sha256=VzFv8XOzHmsu0MEGnWVSF6KsgqLBmvHlRqAkT1Xb1MY,36
@@ -77,7 +77,7 @@ kodit/infrastructure/slicing/slicer.py,sha256=GOqJykd00waOTO1WJHyE5KUgJ2RLx2rOQ7
77
77
  kodit/infrastructure/sqlalchemy/__init__.py,sha256=UXPMSF_hgWaqr86cawRVqM8XdVNumQyyK5B8B97GnlA,33
78
78
  kodit/infrastructure/sqlalchemy/embedding_repository.py,sha256=dC2Wzj_zQiWExwfScE1LAGiiyxPyg0YepwyLOgDwcs4,7905
79
79
  kodit/infrastructure/sqlalchemy/entities.py,sha256=Dmh0z-dMI0wfMAPpf62kxU4md6NUH9P5Nx1QSTITOfg,5961
80
- kodit/infrastructure/sqlalchemy/index_repository.py,sha256=UlDH6Qluuat1T0GaATko29fwQPAaUh2WLWiGurBW42w,23598
80
+ kodit/infrastructure/sqlalchemy/index_repository.py,sha256=QQNsyLBI09YLUPLguB9qvqPZMxtg1p2twpm7sO_gNlo,23598
81
81
  kodit/infrastructure/ui/__init__.py,sha256=CzbLOBwIZ6B6iAHEd1L8cIBydCj-n_kobxJAhz2I9_Y,32
82
82
  kodit/infrastructure/ui/progress.py,sha256=SHEUoQA_x36z4nqHrQduVrrWIvFfX6QxAawC7zQ50pw,6433
83
83
  kodit/infrastructure/ui/spinner.py,sha256=GcP115qtR0VEnGfMEtsGoAUpRzVGUSfiUXfoJJERngA,2357
@@ -95,8 +95,8 @@ kodit/migrations/versions/c3f5137d30f5_index_all_the_things.py,sha256=r7ukmJ_axX
95
95
  kodit/utils/__init__.py,sha256=DPEB1i8evnLF4Ns3huuAYg-0pKBFKUFuiDzOKG9r-sw,33
96
96
  kodit/utils/dump_openapi.py,sha256=29VdjHpNSaGAg7RjQw0meq1OLhljCx1ElgBlTC8xoF4,1247
97
97
  kodit/utils/path_utils.py,sha256=thK6YGGNvQThdBaCYCCeCvS1L8x-lwl3AoGht2jnjGw,1645
98
- kodit-0.3.15.dist-info/METADATA,sha256=Ao9egwccbR6lQS0KqkSeS8gym88pE6SzTFVEOhYQnRE,7672
99
- kodit-0.3.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
- kodit-0.3.15.dist-info/entry_points.txt,sha256=hoTn-1aKyTItjnY91fnO-rV5uaWQLQ-Vi7V5et2IbHY,40
101
- kodit-0.3.15.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
102
- kodit-0.3.15.dist-info/RECORD,,
98
+ kodit-0.3.16.dist-info/METADATA,sha256=BNHPAIg4yQtpiM0K07pAiVATTVqGzqxCy_yqmu8hszs,7672
99
+ kodit-0.3.16.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
+ kodit-0.3.16.dist-info/entry_points.txt,sha256=hoTn-1aKyTItjnY91fnO-rV5uaWQLQ-Vi7V5et2IbHY,40
101
+ kodit-0.3.16.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
102
+ kodit-0.3.16.dist-info/RECORD,,
File without changes