kodit 0.1.14__tar.gz → 0.1.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kodit might be problematic. Click here for more details.

Files changed (111) hide show
  1. kodit-0.1.15/.github/dependabot.yml +10 -0
  2. {kodit-0.1.14 → kodit-0.1.15}/.github/workflows/pypi.yaml +2 -2
  3. {kodit-0.1.14 → kodit-0.1.15}/.github/workflows/test.yaml +11 -9
  4. {kodit-0.1.14 → kodit-0.1.15}/PKG-INFO +2 -1
  5. {kodit-0.1.14 → kodit-0.1.15}/docs/_index.md +59 -0
  6. {kodit-0.1.14 → kodit-0.1.15}/pyproject.toml +1 -0
  7. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/_version.py +2 -2
  8. kodit-0.1.15/src/kodit/bm25/keyword_search_factory.py +17 -0
  9. kodit-0.1.15/src/kodit/bm25/keyword_search_service.py +34 -0
  10. kodit-0.1.14/src/kodit/bm25/bm25.py → kodit-0.1.15/src/kodit/bm25/local_bm25.py +40 -14
  11. kodit-0.1.15/src/kodit/bm25/vectorchord_bm25.py +193 -0
  12. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/cli.py +14 -11
  13. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/config.py +9 -2
  14. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/database.py +4 -2
  15. kodit-0.1.15/src/kodit/embedding/embedding_factory.py +44 -0
  16. kodit-0.1.15/src/kodit/embedding/embedding_provider/__init__.py +1 -0
  17. kodit-0.1.15/src/kodit/embedding/embedding_provider/embedding_provider.py +53 -0
  18. kodit-0.1.15/src/kodit/embedding/embedding_provider/hash_embedding_provider.py +77 -0
  19. kodit-0.1.15/src/kodit/embedding/embedding_provider/local_embedding_provider.py +58 -0
  20. kodit-0.1.15/src/kodit/embedding/embedding_provider/openai_embedding_provider.py +63 -0
  21. kodit-0.1.14/src/kodit/search/search_repository.py → kodit-0.1.15/src/kodit/embedding/embedding_repository.py +61 -33
  22. kodit-0.1.15/src/kodit/embedding/local_vector_search_service.py +50 -0
  23. kodit-0.1.15/src/kodit/embedding/vector_search_service.py +38 -0
  24. kodit-0.1.15/src/kodit/embedding/vectorchord_vector_search_service.py +145 -0
  25. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/indexing/indexing_repository.py +24 -4
  26. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/indexing/indexing_service.py +25 -30
  27. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/mcp.py +7 -3
  28. kodit-0.1.15/src/kodit/search/search_repository.py +57 -0
  29. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/search/search_service.py +12 -24
  30. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/source/source_service.py +9 -3
  31. kodit-0.1.15/src/kodit/util/__init__.py +1 -0
  32. kodit-0.1.15/src/kodit/util/spinner.py +59 -0
  33. {kodit-0.1.14 → kodit-0.1.15}/tests/experiments/embedding.py +3 -3
  34. kodit-0.1.15/tests/kodit/bm25/local_bm25_test.py +155 -0
  35. kodit-0.1.15/tests/kodit/bm25/vectorchord_repository_test.py +182 -0
  36. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/cli_test.py +16 -4
  37. kodit-0.1.15/tests/kodit/embedding/embedding_provider/local_embedding_provider_test.py +93 -0
  38. kodit-0.1.15/tests/kodit/embedding/embedding_provider/openai_embedding_provider_test.py +138 -0
  39. kodit-0.1.15/tests/kodit/embedding/local_vector_search_service_test.py +143 -0
  40. kodit-0.1.15/tests/kodit/embedding/vectorchord_vector_search_service_test.py +230 -0
  41. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/indexing/indexing_service_test.py +38 -22
  42. kodit-0.1.15/tests/kodit/search/search_repository_test.py +57 -0
  43. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/search/search_service_test.py +41 -110
  44. {kodit-0.1.14 → kodit-0.1.15}/uv.lock +26 -0
  45. kodit-0.1.14/src/kodit/embedding/embedding.py +0 -203
  46. kodit-0.1.14/tests/kodit/embedding/embedding_test.py +0 -13
  47. kodit-0.1.14/tests/kodit/search/search_repository_test.py +0 -124
  48. {kodit-0.1.14 → kodit-0.1.15}/.cursor/rules/kodit.mdc +0 -0
  49. {kodit-0.1.14 → kodit-0.1.15}/.github/CODE_OF_CONDUCT.md +0 -0
  50. {kodit-0.1.14 → kodit-0.1.15}/.github/CONTRIBUTING.md +0 -0
  51. {kodit-0.1.14 → kodit-0.1.15}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  52. {kodit-0.1.14 → kodit-0.1.15}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  53. {kodit-0.1.14 → kodit-0.1.15}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
  54. {kodit-0.1.14 → kodit-0.1.15}/.github/workflows/docker.yaml +0 -0
  55. {kodit-0.1.14 → kodit-0.1.15}/.github/workflows/docs.yaml +0 -0
  56. {kodit-0.1.14 → kodit-0.1.15}/.github/workflows/pypi-test.yaml +0 -0
  57. {kodit-0.1.14 → kodit-0.1.15}/.gitignore +0 -0
  58. {kodit-0.1.14 → kodit-0.1.15}/.python-version +0 -0
  59. {kodit-0.1.14 → kodit-0.1.15}/.vscode/launch.json +0 -0
  60. {kodit-0.1.14 → kodit-0.1.15}/.vscode/settings.json +0 -0
  61. {kodit-0.1.14 → kodit-0.1.15}/Dockerfile +0 -0
  62. {kodit-0.1.14 → kodit-0.1.15}/LICENSE +0 -0
  63. {kodit-0.1.14 → kodit-0.1.15}/README.md +0 -0
  64. {kodit-0.1.14 → kodit-0.1.15}/alembic.ini +0 -0
  65. {kodit-0.1.14 → kodit-0.1.15}/docs/developer/index.md +0 -0
  66. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/.gitignore +0 -0
  67. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/__init__.py +0 -0
  68. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/app.py +0 -0
  69. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/bm25/__init__.py +0 -0
  70. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/embedding/__init__.py +0 -0
  71. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/embedding/embedding_models.py +0 -0
  72. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/indexing/__init__.py +0 -0
  73. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/indexing/indexing_models.py +0 -0
  74. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/log.py +0 -0
  75. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/middleware.py +0 -0
  76. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/README +0 -0
  77. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/__init__.py +0 -0
  78. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/env.py +0 -0
  79. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/script.py.mako +0 -0
  80. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/versions/7c3bbc2ab32b_add_embeddings_table.py +0 -0
  81. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/versions/85155663351e_initial.py +0 -0
  82. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/migrations/versions/__init__.py +0 -0
  83. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/search/__init__.py +0 -0
  84. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/__init__.py +0 -0
  85. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/languages/__init__.py +0 -0
  86. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/languages/csharp.scm +0 -0
  87. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/languages/python.scm +0 -0
  88. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/method_snippets.py +0 -0
  89. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/snippets/snippets.py +0 -0
  90. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/source/__init__.py +0 -0
  91. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/source/source_models.py +0 -0
  92. {kodit-0.1.14 → kodit-0.1.15}/src/kodit/source/source_repository.py +0 -0
  93. {kodit-0.1.14 → kodit-0.1.15}/tests/__init__.py +0 -0
  94. {kodit-0.1.14 → kodit-0.1.15}/tests/conftest.py +0 -0
  95. {kodit-0.1.14 → kodit-0.1.15}/tests/experiments/cline-prompt-regression-tests/cline_prompt.txt +0 -0
  96. {kodit-0.1.14 → kodit-0.1.15}/tests/experiments/cline-prompt-regression-tests/cline_prompt_test.py +0 -0
  97. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/__init__.py +0 -0
  98. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/e2e.py +0 -0
  99. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/embedding/__init__.py +0 -0
  100. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/indexing/__init__.py +0 -0
  101. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/mcp_test.py +0 -0
  102. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/search/__init__.py +0 -0
  103. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/snippets/__init__.py +0 -0
  104. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/snippets/csharp.cs +0 -0
  105. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/snippets/detect_language_test.py +0 -0
  106. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/snippets/method_extraction_test.py +0 -0
  107. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/snippets/python.py +0 -0
  108. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/source/__init__.py +0 -0
  109. {kodit-0.1.14 → kodit-0.1.15}/tests/kodit/source/source_service_test.py +0 -0
  110. {kodit-0.1.14 → kodit-0.1.15}/tests/performance/similarity.py +0 -0
  111. {kodit-0.1.14 → kodit-0.1.15}/tests/smoke.sh +0 -0
@@ -0,0 +1,10 @@
1
+ version: 2
2
+ updates:
3
+ - package-ecosystem: "uv"
4
+ directory: "/"
5
+ schedule:
6
+ interval: "weekly"
7
+ - package-ecosystem: "docker"
8
+ directory: "/"
9
+ schedule:
10
+ interval: "weekly"
@@ -42,10 +42,10 @@ jobs:
42
42
  if curl -sfL https://pypi.org/packages/source/${REPO_NAME_FIRST_LETTER}/${REPO_NAME}/${REPO_NAME}-${REPO_TAG}.tar.gz > /dev/null; then
43
43
  break
44
44
  fi
45
- sleep 1
45
+ sleep 5
46
46
  count=$((count+1))
47
47
  if [ $count -ge 60 ]; then
48
- echo "Timeout reached after 60 seconds"
48
+ echo "Timeout reached after 300 seconds"
49
49
  exit 1
50
50
  fi
51
51
  done
@@ -58,13 +58,13 @@ jobs:
58
58
  - name: Install uv
59
59
  uses: astral-sh/setup-uv@v5
60
60
 
61
- - run: uv build --sdist --out-dir test-build
61
+ - run: uv build --wheel --out-dir test-build
62
62
 
63
63
  - name: Upload built package
64
64
  uses: actions/upload-artifact@v4
65
65
  with:
66
66
  name: built-package
67
- path: test-build/*.tar.gz
67
+ path: test-build/*.whl
68
68
 
69
69
  test-package:
70
70
  needs: build-package
@@ -88,14 +88,16 @@ jobs:
88
88
  with:
89
89
  python-version: 3.12
90
90
 
91
- - name: Extract path to sdist
92
- id: sdist_path
93
- run: echo "sdist_path=$(ls test-build/*.tar.gz)" >> $GITHUB_OUTPUT
91
+ - name: Install uv
92
+ uses: astral-sh/setup-uv@v5
94
93
 
95
- - name: Install sdist
96
- uses: threeal/pipx-install-action@v1.0.0
97
- with:
98
- packages: "${{ steps.sdist_path.outputs.sdist_path }}"
94
+ - name: Extract path to wheel
95
+ id: wheel_path
96
+ run: echo "wheel_path=$(ls test-build/*.whl)" >> $GITHUB_OUTPUT
97
+
98
+ # This is equivalent to `pipx install --include-deps, but faster
99
+ - name: Install wheel
100
+ run: uv tool install "${{ steps.wheel_path.outputs.wheel_path }}"
99
101
 
100
102
  - name: Run simple version command test
101
103
  run: kodit version
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kodit
3
- Version: 0.1.14
3
+ Version: 0.1.15
4
4
  Summary: Code indexing for better AI code generation
5
5
  Project-URL: Homepage, https://docs.helixml.tech/kodit/
6
6
  Project-URL: Documentation, https://docs.helixml.tech/kodit/
@@ -21,6 +21,7 @@ Requires-Dist: aiofiles>=24.1.0
21
21
  Requires-Dist: aiosqlite>=0.20.0
22
22
  Requires-Dist: alembic>=1.15.2
23
23
  Requires-Dist: asgi-correlation-id>=4.3.4
24
+ Requires-Dist: asyncpg>=0.30.0
24
25
  Requires-Dist: better-exceptions>=0.3.3
25
26
  Requires-Dist: bm25s[core]>=0.2.12
26
27
  Requires-Dist: click>=8.1.8
@@ -188,6 +188,65 @@ DEFAULT_ENDPOINT_BASE_URL=https://api.openai.com/v1
188
188
  DEFAULT_ENDPOINT_API_KEY=sk-xxxxxx
189
189
  ```
190
190
 
191
+ ### Database
192
+
193
+ Out of the box Kodit uses a local sqlite file to make it easier for users to get
194
+ started. But for production use, it's likely you will want to use a database that has
195
+ dedicated semantic and keyword search capabilities for reduced latency.
196
+
197
+ #### VectorChord Database
198
+
199
+ [VectorChord](https://github.com/tensorchord/VectorChord) is an optimized PostgreSQL
200
+ extension that provides both vector and BM25 search. (See [Search](#search))
201
+
202
+ Start a container with:
203
+
204
+ ```sh
205
+ docker run \
206
+ --name kodit-vectorchord \
207
+ -e POSTGRES_DB=kodit \
208
+ -e POSTGRES_PASSWORD=mysecretpassword \
209
+ -p 5432:5432 \
210
+ -d tensorchord/vchord-suite:pg17-20250601
211
+ ```
212
+
213
+ {{< warn >}}
214
+ Kodit assumes the database exists. In the above example I'm abusing the POSTGRES_DB
215
+ environmental variable from the [Postgres Docker
216
+ container](https://hub.docker.com/_/postgres/) to create the database for me. In
217
+ production setups, please create a database yourself.
218
+ {{< /warn >}}
219
+
220
+ Then update your `.env` file to include:
221
+
222
+ ```env
223
+ DB_URL=postgresql+asyncpg://postgres:mysecretpassword@localhost:5432/kodit
224
+ ```
225
+
226
+ ### Search
227
+
228
+ #### Default Search Provider
229
+
230
+ By default, Kodit will use built-in implementations of BM25 and similarity search to
231
+ improve the out of the box experience. If you are using Kodit in a professional
232
+ capacity, it is likely that the search latency is too high to provide a good developer
233
+ experience.
234
+
235
+ Instead, you should use the features included in your database. The settings provided
236
+ here will cause all search functionality to use this database by default. You can
237
+ override the database used for each search type if you wish. (Coming soon!)
238
+
239
+ ##### VectorChord Search
240
+
241
+ Configure Kodit to use a [VectorChord database](#vectorchord-database).
242
+
243
+ Then update your `.env` file to include:
244
+
245
+ ```env
246
+ DB_URL=postgresql+asyncpg://postgres:mysecretpassword@localhost:5432/kodit
247
+ DEFAULT_SEARCH_PROVIDER=vectorchord
248
+ ```
249
+
191
250
  ## Managing Kodit
192
251
 
193
252
  There is limited management functionality at this time. To delete indexes you must
@@ -48,6 +48,7 @@ dependencies = [
48
48
  "hf-xet>=1.1.2",
49
49
  "openai>=1.82.0",
50
50
  "tiktoken>=0.9.0",
51
+ "asyncpg>=0.30.0",
51
52
  ]
52
53
 
53
54
  [dependency-groups]
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.1.14'
21
- __version_tuple__ = version_tuple = (0, 1, 14)
20
+ __version__ = version = '0.1.15'
21
+ __version_tuple__ = version_tuple = (0, 1, 15)
@@ -0,0 +1,17 @@
1
+ """Factory for creating keyword search providers."""
2
+
3
+ from sqlalchemy.ext.asyncio import AsyncSession
4
+
5
+ from kodit.bm25.keyword_search_service import KeywordSearchProvider
6
+ from kodit.bm25.local_bm25 import BM25Service
7
+ from kodit.bm25.vectorchord_bm25 import VectorChordBM25
8
+ from kodit.config import AppContext
9
+
10
+
11
+ def keyword_search_factory(
12
+ app_context: AppContext, session: AsyncSession
13
+ ) -> KeywordSearchProvider:
14
+ """Create a keyword search provider."""
15
+ if app_context.default_search.provider == "vectorchord":
16
+ return VectorChordBM25(session=session)
17
+ return BM25Service(data_dir=app_context.get_data_dir())
@@ -0,0 +1,34 @@
1
+ """Keyword search service."""
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import NamedTuple
5
+
6
+
7
+ class BM25Document(NamedTuple):
8
+ """BM25 document."""
9
+
10
+ snippet_id: int
11
+ text: str
12
+
13
+
14
+ class BM25Result(NamedTuple):
15
+ """BM25 result."""
16
+
17
+ snippet_id: int
18
+ score: float
19
+
20
+
21
+ class KeywordSearchProvider(ABC):
22
+ """Interface for keyword search providers."""
23
+
24
+ @abstractmethod
25
+ async def index(self, corpus: list[BM25Document]) -> None:
26
+ """Index a new corpus."""
27
+
28
+ @abstractmethod
29
+ async def retrieve(self, query: str, top_k: int = 2) -> list[BM25Result]:
30
+ """Retrieve from the index."""
31
+
32
+ @abstractmethod
33
+ async def delete(self, snippet_ids: list[int]) -> None:
34
+ """Delete documents from the index."""
@@ -1,23 +1,36 @@
1
- """BM25 service."""
1
+ """Locally hosted BM25 service primarily for use with SQLite."""
2
2
 
3
+ import json
3
4
  from pathlib import Path
4
5
 
6
+ import aiofiles
5
7
  import bm25s
6
8
  import Stemmer
7
9
  import structlog
8
10
  from bm25s.tokenization import Tokenized
9
11
 
12
+ from kodit.bm25.keyword_search_service import (
13
+ BM25Document,
14
+ BM25Result,
15
+ KeywordSearchProvider,
16
+ )
10
17
 
11
- class BM25Service:
12
- """Service for BM25."""
18
+ SNIPPET_IDS_FILE = "snippet_ids.jsonl"
19
+
20
+
21
+ class BM25Service(KeywordSearchProvider):
22
+ """LocalBM25 service."""
13
23
 
14
24
  def __init__(self, data_dir: Path) -> None:
15
25
  """Initialize the BM25 service."""
16
26
  self.log = structlog.get_logger(__name__)
17
27
  self.index_path = data_dir / "bm25s_index"
28
+ self.snippet_ids: list[int] = []
18
29
  try:
19
30
  self.log.debug("Loading BM25 index")
20
31
  self.retriever = bm25s.BM25.load(self.index_path, mmap=True)
32
+ with Path(self.index_path / SNIPPET_IDS_FILE).open() as f:
33
+ self.snippet_ids = json.load(f)
21
34
  except FileNotFoundError:
22
35
  self.log.debug("BM25 index not found, creating new index")
23
36
  self.retriever = bm25s.BM25()
@@ -33,28 +46,34 @@ class BM25Service:
33
46
  show_progress=True,
34
47
  )
35
48
 
36
- def index(self, corpus: list[str]) -> None:
49
+ async def index(self, corpus: list[BM25Document]) -> None:
37
50
  """Index a new corpus."""
38
51
  self.log.debug("Indexing corpus")
39
- vocab = self._tokenize(corpus)
52
+ vocab = self._tokenize([doc.text for doc in corpus])
40
53
  self.retriever = bm25s.BM25()
41
54
  self.retriever.index(vocab, show_progress=False)
42
55
  self.retriever.save(self.index_path)
56
+ self.snippet_ids = self.snippet_ids + [doc.snippet_id for doc in corpus]
57
+ async with aiofiles.open(self.index_path / SNIPPET_IDS_FILE, "w") as f:
58
+ await f.write(json.dumps(self.snippet_ids))
43
59
 
44
- def retrieve(
45
- self, doc_ids: list[int], query: str, top_k: int = 2
46
- ) -> list[tuple[int, float]]:
60
+ async def retrieve(self, query: str, top_k: int = 2) -> list[BM25Result]:
47
61
  """Retrieve from the index."""
48
62
  if top_k == 0:
49
63
  self.log.warning("Top k is 0, returning empty list")
50
64
  return []
51
- if len(doc_ids) == 0:
52
- self.log.warning("No documents to retrieve from, returning empty list")
65
+
66
+ # Get the number of documents in the index
67
+ num_docs = self.retriever.scores["num_docs"]
68
+ if num_docs == 0:
53
69
  return []
54
70
 
55
- top_k = min(top_k, len(self.retriever.scores))
71
+ # Adjust top_k to not exceed corpus size
72
+ top_k = min(top_k, num_docs)
56
73
  self.log.debug(
57
- "Retrieving from index", query=query, top_k=top_k, num_docs=len(doc_ids)
74
+ "Retrieving from index",
75
+ query=query,
76
+ top_k=top_k,
58
77
  )
59
78
 
60
79
  query_tokens = self._tokenize([query])
@@ -62,10 +81,17 @@ class BM25Service:
62
81
  self.log.debug("Query tokens", query_tokens=query_tokens)
63
82
 
64
83
  results, scores = self.retriever.retrieve(
65
- query_tokens=query_tokens, corpus=doc_ids, k=top_k
84
+ query_tokens=query_tokens,
85
+ corpus=self.snippet_ids,
86
+ k=top_k,
66
87
  )
67
88
  self.log.debug("Raw results", results=results, scores=scores)
68
89
  return [
69
- (int(result), float(score))
90
+ BM25Result(snippet_id=int(result), score=float(score))
70
91
  for result, score in zip(results[0], scores[0], strict=False)
92
+ if score > 0.0
71
93
  ]
94
+
95
+ async def delete(self, snippet_ids: list[int]) -> None: # noqa: ARG002
96
+ """Delete documents from the index."""
97
+ self.log.warning("Deletion not supported for local BM25 index")
@@ -0,0 +1,193 @@
1
+ """VectorChord repository for document operations."""
2
+
3
+ from typing import Any
4
+
5
+ from sqlalchemy import Result, TextClause, bindparam, text
6
+ from sqlalchemy.ext.asyncio import AsyncSession
7
+
8
+ from kodit.bm25.keyword_search_service import (
9
+ BM25Document,
10
+ BM25Result,
11
+ KeywordSearchProvider,
12
+ )
13
+
14
+ TABLE_NAME = "vectorchord_bm25_documents"
15
+ INDEX_NAME = f"{TABLE_NAME}_idx"
16
+ TOKENIZER_NAME = "bert"
17
+
18
+ # SQL statements
19
+ CREATE_VCHORD_EXTENSION = "CREATE EXTENSION IF NOT EXISTS vchord CASCADE;"
20
+ CREATE_PG_TOKENIZER = "CREATE EXTENSION IF NOT EXISTS pg_tokenizer CASCADE;"
21
+ CREATE_VCHORD_BM25 = "CREATE EXTENSION IF NOT EXISTS vchord_bm25 CASCADE;"
22
+ SET_SEARCH_PATH = """
23
+ SET search_path TO
24
+ "$user", public, bm25_catalog, pg_catalog, information_schema, tokenizer_catalog;
25
+ """
26
+ CREATE_BM25_TABLE = f"""
27
+ CREATE TABLE IF NOT EXISTS {TABLE_NAME} (
28
+ id SERIAL PRIMARY KEY,
29
+ snippet_id BIGINT NOT NULL,
30
+ passage TEXT NOT NULL,
31
+ embedding bm25vector,
32
+ UNIQUE(snippet_id)
33
+ )
34
+ """
35
+
36
+ CREATE_BM25_INDEX = f"""
37
+ CREATE INDEX IF NOT EXISTS {INDEX_NAME}
38
+ ON {TABLE_NAME}
39
+ USING bm25 (embedding bm25_ops)
40
+ """
41
+ TOKENIZER_NAME_CHECK_QUERY = (
42
+ f"SELECT 1 FROM tokenizer_catalog.tokenizer WHERE name = '{TOKENIZER_NAME}'" # noqa: S608
43
+ )
44
+ LOAD_TOKENIZER = """
45
+ SELECT create_tokenizer('bert', $$
46
+ model = "llmlingua2"
47
+ pre_tokenizer = "unicode_segmentation" # Unicode Standard Annex #29
48
+ [[character_filters]]
49
+ to_lowercase = {} # convert all characters to lowercase
50
+ [[character_filters]]
51
+ unicode_normalization = "nfkd" # Unicode Normalization Form KD
52
+ [[token_filters]]
53
+ skip_non_alphanumeric = {} # remove non-alphanumeric tokens
54
+ [[token_filters]]
55
+ stopwords = "nltk_english" # remove stopwords using the nltk dictionary
56
+ [[token_filters]]
57
+ stemmer = "english_porter2" # stem tokens using the English Porter2 stemmer
58
+ $$)
59
+ """
60
+ INSERT_QUERY = f"""
61
+ INSERT INTO {TABLE_NAME} (snippet_id, passage)
62
+ VALUES (:snippet_id, :passage)
63
+ ON CONFLICT (snippet_id) DO UPDATE
64
+ SET passage = EXCLUDED.passage
65
+ """ # noqa: S608
66
+ UPDATE_QUERY = f"""
67
+ UPDATE {TABLE_NAME}
68
+ SET embedding = tokenize(passage, '{TOKENIZER_NAME}')
69
+ """ # noqa: S608
70
+ SEARCH_QUERY = f"""
71
+ SELECT
72
+ snippet_id,
73
+ embedding <&>
74
+ to_bm25query('{INDEX_NAME}', tokenize(:query_text, '{TOKENIZER_NAME}'))
75
+ AS bm25_score
76
+ FROM {TABLE_NAME}
77
+ ORDER BY bm25_score
78
+ LIMIT :limit
79
+ """ # noqa: S608
80
+ DELETE_QUERY = f"""
81
+ DELETE FROM {TABLE_NAME}
82
+ WHERE snippet_id IN :snippet_ids
83
+ """ # noqa: S608
84
+
85
+
86
+ class VectorChordBM25(KeywordSearchProvider):
87
+ """BM25 using VectorChord."""
88
+
89
+ def __init__(
90
+ self,
91
+ session: AsyncSession,
92
+ ) -> None:
93
+ """Initialize the VectorChord BM25."""
94
+ self.__session = session
95
+ self._initialized = False
96
+
97
+ async def _initialize(self) -> None:
98
+ """Initialize the VectorChord environment."""
99
+ try:
100
+ await self._create_extensions()
101
+ await self._create_tokenizer_if_not_exists()
102
+ await self._create_tables()
103
+ self._initialized = True
104
+ except Exception as e:
105
+ msg = f"Failed to initialize VectorChord repository: {e}"
106
+ raise RuntimeError(msg) from e
107
+
108
+ async def _create_extensions(self) -> None:
109
+ """Create the necessary extensions."""
110
+ await self.__session.execute(text(CREATE_VCHORD_EXTENSION))
111
+ await self.__session.execute(text(CREATE_PG_TOKENIZER))
112
+ await self.__session.execute(text(CREATE_VCHORD_BM25))
113
+ await self.__session.execute(text(SET_SEARCH_PATH))
114
+ await self._commit()
115
+
116
+ async def _create_tokenizer_if_not_exists(self) -> None:
117
+ """Create the tokenizer if it doesn't exist."""
118
+ # Check if tokenizer exists in the catalog
119
+ result = await self.__session.execute(text(TOKENIZER_NAME_CHECK_QUERY))
120
+ if result.scalar_one_or_none() is None:
121
+ # Tokenizer doesn't exist, create it
122
+ await self.__session.execute(text(LOAD_TOKENIZER))
123
+ await self._commit()
124
+
125
+ async def _create_tables(self) -> None:
126
+ """Create the necessary tables in the correct order."""
127
+ await self.__session.execute(text(CREATE_BM25_TABLE))
128
+ await self.__session.execute(text(CREATE_BM25_INDEX))
129
+ await self._commit()
130
+
131
+ async def _execute(
132
+ self, query: TextClause, param_list: list[Any] | dict[str, Any] | None = None
133
+ ) -> Result:
134
+ """Execute a query."""
135
+ if not self._initialized:
136
+ await self._initialize()
137
+ return await self.__session.execute(query, param_list)
138
+
139
+ async def _commit(self) -> None:
140
+ """Commit the session."""
141
+ await self.__session.commit()
142
+
143
+ async def index(self, corpus: list[BM25Document]) -> None:
144
+ """Index a new corpus."""
145
+ # Filter out any documents that don't have a snippet_id or text
146
+ corpus = [
147
+ doc
148
+ for doc in corpus
149
+ if doc.snippet_id is not None and doc.text is not None and doc.text != ""
150
+ ]
151
+
152
+ if not corpus:
153
+ return
154
+
155
+ # Execute inserts
156
+ await self._execute(
157
+ text(INSERT_QUERY),
158
+ [{"snippet_id": doc.snippet_id, "passage": doc.text} for doc in corpus],
159
+ )
160
+
161
+ # Tokenize the new documents with schema qualification
162
+ await self._execute(text(UPDATE_QUERY))
163
+ await self._commit()
164
+
165
+ async def delete(self, snippet_ids: list[int]) -> None:
166
+ """Delete documents from the index."""
167
+ await self._execute(
168
+ text(DELETE_QUERY).bindparams(bindparam("snippet_ids", expanding=True)),
169
+ {"snippet_ids": snippet_ids},
170
+ )
171
+ await self._commit()
172
+
173
+ async def retrieve(
174
+ self,
175
+ query: str,
176
+ top_k: int = 10,
177
+ ) -> list[BM25Result]:
178
+ """Search documents using BM25 similarity."""
179
+ if not query or query == "":
180
+ return []
181
+
182
+ sql = text(SEARCH_QUERY).bindparams(query_text=query, limit=top_k)
183
+ try:
184
+ result = await self._execute(sql)
185
+ rows = result.mappings().all()
186
+
187
+ return [
188
+ BM25Result(snippet_id=row["snippet_id"], score=row["bm25_score"])
189
+ for row in rows
190
+ ]
191
+ except Exception as e:
192
+ msg = f"Error during BM25 search: {e}"
193
+ raise RuntimeError(msg) from e
@@ -10,12 +10,13 @@ import uvicorn
10
10
  from pytable_formatter import Cell, Table
11
11
  from sqlalchemy.ext.asyncio import AsyncSession
12
12
 
13
+ from kodit.bm25.keyword_search_factory import keyword_search_factory
13
14
  from kodit.config import (
14
15
  AppContext,
15
16
  with_app_context,
16
17
  with_session,
17
18
  )
18
- from kodit.embedding.embedding import embedding_factory
19
+ from kodit.embedding.embedding_factory import embedding_factory
19
20
  from kodit.indexing.indexing_repository import IndexRepository
20
21
  from kodit.indexing.indexing_service import IndexService
21
22
  from kodit.log import configure_logging, configure_telemetry, log_event
@@ -68,10 +69,12 @@ async def index(
68
69
  source_service = SourceService(app_context.get_clone_dir(), source_repository)
69
70
  repository = IndexRepository(session)
70
71
  service = IndexService(
71
- repository,
72
- source_service,
73
- app_context.get_data_dir(),
74
- embedding_service=embedding_factory(app_context.get_default_openai_client()),
72
+ repository=repository,
73
+ source_service=source_service,
74
+ keyword_search_provider=keyword_search_factory(app_context, session),
75
+ vector_search_service=embedding_factory(
76
+ app_context=app_context, session=session
77
+ ),
75
78
  )
76
79
 
77
80
  if not sources:
@@ -131,8 +134,8 @@ async def code(
131
134
  repository = SearchRepository(session)
132
135
  service = SearchService(
133
136
  repository,
134
- app_context.get_data_dir(),
135
- embedding_service=embedding_factory(app_context.get_default_openai_client()),
137
+ keyword_search_provider=keyword_search_factory(app_context, session),
138
+ embedding_service=embedding_factory(app_context=app_context, session=session),
136
139
  )
137
140
 
138
141
  snippets = await service.search(SearchRequest(code_query=query, top_k=top_k))
@@ -164,8 +167,8 @@ async def keyword(
164
167
  repository = SearchRepository(session)
165
168
  service = SearchService(
166
169
  repository,
167
- app_context.get_data_dir(),
168
- embedding_service=embedding_factory(app_context.get_default_openai_client()),
170
+ keyword_search_provider=keyword_search_factory(app_context, session),
171
+ embedding_service=embedding_factory(app_context=app_context, session=session),
169
172
  )
170
173
 
171
174
  snippets = await service.search(SearchRequest(keywords=keywords, top_k=top_k))
@@ -199,8 +202,8 @@ async def hybrid(
199
202
  repository = SearchRepository(session)
200
203
  service = SearchService(
201
204
  repository,
202
- app_context.get_data_dir(),
203
- embedding_service=embedding_factory(app_context.get_default_openai_client()),
205
+ keyword_search_provider=keyword_search_factory(app_context, session),
206
+ embedding_service=embedding_factory(app_context=app_context, session=session),
204
207
  )
205
208
 
206
209
  # Parse keywords into a list of strings
@@ -12,14 +12,12 @@ from pydantic import BaseModel, Field
12
12
  from pydantic_settings import BaseSettings, SettingsConfigDict
13
13
 
14
14
  from kodit.database import Database
15
- from kodit.embedding.embedding import TINY
16
15
 
17
16
  DEFAULT_BASE_DIR = Path.home() / ".kodit"
18
17
  DEFAULT_DB_URL = f"sqlite+aiosqlite:///{DEFAULT_BASE_DIR}/kodit.db"
19
18
  DEFAULT_LOG_LEVEL = "INFO"
20
19
  DEFAULT_LOG_FORMAT = "pretty"
21
20
  DEFAULT_DISABLE_TELEMETRY = False
22
- DEFAULT_EMBEDDING_MODEL_NAME = TINY
23
21
  T = TypeVar("T")
24
22
 
25
23
 
@@ -31,6 +29,12 @@ class Endpoint(BaseModel):
31
29
  base_url: str | None = None
32
30
 
33
31
 
32
+ class Search(BaseModel):
33
+ """Search provides configuration for a search engine."""
34
+
35
+ provider: Literal["sqlite", "vectorchord"] = Field(default="sqlite")
36
+
37
+
34
38
  class AppContext(BaseSettings):
35
39
  """Global context for the kodit project. Provides a shared state for the app."""
36
40
 
@@ -57,6 +61,9 @@ class AppContext(BaseSettings):
57
61
  "(can be overridden by task-specific configuration)."
58
62
  ),
59
63
  )
64
+ default_search: Search = Field(
65
+ default=Search(),
66
+ )
60
67
  _db: Database | None = None
61
68
 
62
69
  def model_post_init(self, _: Any) -> None:
@@ -27,10 +27,12 @@ class CommonMixin:
27
27
 
28
28
  id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
29
29
  created_at: Mapped[datetime] = mapped_column(
30
- DateTime, default=lambda: datetime.now(UTC)
30
+ DateTime(timezone=True), default=lambda: datetime.now(UTC)
31
31
  )
32
32
  updated_at: Mapped[datetime] = mapped_column(
33
- DateTime, default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
33
+ DateTime(timezone=True),
34
+ default=lambda: datetime.now(UTC),
35
+ onupdate=lambda: datetime.now(UTC),
34
36
  )
35
37
 
36
38
 
@@ -0,0 +1,44 @@
1
+ """Embedding service."""
2
+
3
+ from sqlalchemy.ext.asyncio import AsyncSession
4
+
5
+ from kodit.config import AppContext
6
+ from kodit.embedding.embedding_provider.local_embedding_provider import (
7
+ CODE,
8
+ LocalEmbeddingProvider,
9
+ )
10
+ from kodit.embedding.embedding_provider.openai_embedding_provider import (
11
+ OpenAIEmbeddingProvider,
12
+ )
13
+ from kodit.embedding.embedding_repository import EmbeddingRepository
14
+ from kodit.embedding.local_vector_search_service import LocalVectorSearchService
15
+ from kodit.embedding.vector_search_service import (
16
+ VectorSearchService,
17
+ )
18
+ from kodit.embedding.vectorchord_vector_search_service import (
19
+ VectorChordVectorSearchService,
20
+ )
21
+
22
+
23
+ def embedding_factory(
24
+ app_context: AppContext, session: AsyncSession
25
+ ) -> VectorSearchService:
26
+ """Create an embedding service."""
27
+ embedding_repository = EmbeddingRepository(session=session)
28
+ embedding_provider = None
29
+ openai_client = app_context.get_default_openai_client()
30
+ if openai_client is not None:
31
+ embedding_provider = OpenAIEmbeddingProvider(openai_client=openai_client)
32
+ else:
33
+ embedding_provider = LocalEmbeddingProvider(CODE)
34
+
35
+ if app_context.default_search.provider == "vectorchord":
36
+ return VectorChordVectorSearchService(session, embedding_provider)
37
+ if app_context.default_search.provider == "sqlite":
38
+ return LocalVectorSearchService(
39
+ embedding_repository=embedding_repository,
40
+ embedding_provider=embedding_provider,
41
+ )
42
+
43
+ msg = f"Invalid semantic search provider: {app_context.default_search.provider}"
44
+ raise ValueError(msg)