codeembed 0.1.0__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {codeembed-0.1.0 → codeembed-0.1.1}/PKG-INFO +1 -1
- {codeembed-0.1.0 → codeembed-0.1.1}/pyproject.toml +1 -1
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/llm/openai_adapter.py +10 -4
- {codeembed-0.1.0 → codeembed-0.1.1}/.claude/settings.local.json +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.github/workflows/ci.yml +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.github/workflows/release.yml +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.gitignore +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.mcp.json +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.pre-commit-config.yaml +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.python-version +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/.vscode/mcp.json +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/AGENTS.md +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/CHANGELOG.md +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/LICENSE +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/README.md +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/codeembed.toml +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/scripts/generate_init_files.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/bootstrap/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/bootstrap/services.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/cli.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/config/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/config/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/cost_tracking/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/cost_tracking/llm_wrapper.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/cost_tracking/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/delta_computer/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/delta_computer/delta_computer.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_embedder/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_embedder/doc_embedder.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_provider/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_provider/base.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_provider/local_doc_provider.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_provider/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_search_service/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_search_service/doc_search_service.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_splitters/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_splitters/generic_splitter.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/doc_splitters/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/llm/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/llm/base.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/llm/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/llm/ollama_adapter.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/mcp_server.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/setup_logger.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/utils/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/utils/checksum_utils.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/utils/string_utils.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/utils/time_utils.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/vector_db/__init__.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/vector_db/base.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/vector_db/chromadb_adapter.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/src/codeembed/vector_db/models.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/tests/test_cost_tracking.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/tests/test_delta_computer.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/tests/test_openai_adapter.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/tests/test_splitter.py +0 -0
- {codeembed-0.1.0 → codeembed-0.1.1}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: codeembed
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: Embeds your codebase and makes it available for quick LLM lookups via MCP.
|
|
5
5
|
Project-URL: Homepage, https://github.com/robino16/codeembed
|
|
6
6
|
Project-URL: Repository, https://github.com/robino16/codeembed
|
|
@@ -1,13 +1,15 @@
|
|
|
1
|
-
from
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, List, Optional, Type, TypeVar, cast
|
|
2
4
|
|
|
3
|
-
from openai import OpenAI
|
|
4
|
-
from openai._types import omit
|
|
5
|
-
from openai.types.chat import ChatCompletionMessageParam
|
|
6
5
|
from pydantic import BaseModel
|
|
7
6
|
|
|
8
7
|
from codeembed.llm.base import LLMServiceBase
|
|
9
8
|
from codeembed.llm.models import ChatMessage, LLMResponse, StructuredLLMResponse
|
|
10
9
|
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from openai import OpenAI
|
|
12
|
+
|
|
11
13
|
T = TypeVar("T", bound=BaseModel)
|
|
12
14
|
|
|
13
15
|
|
|
@@ -23,6 +25,8 @@ class OpenAILLMService(LLMServiceBase):
|
|
|
23
25
|
max_tokens: Optional[int] = None,
|
|
24
26
|
temperature: Optional[float] = None,
|
|
25
27
|
) -> StructuredLLMResponse[T]:
|
|
28
|
+
from openai._types import omit
|
|
29
|
+
from openai.types.chat import ChatCompletionMessageParam
|
|
26
30
|
|
|
27
31
|
openai_messages = cast(List[ChatCompletionMessageParam], messages)
|
|
28
32
|
|
|
@@ -61,6 +65,8 @@ class OpenAILLMService(LLMServiceBase):
|
|
|
61
65
|
max_tokens: Optional[int] = None,
|
|
62
66
|
temperature: Optional[float] = None,
|
|
63
67
|
) -> LLMResponse:
|
|
68
|
+
from openai._types import omit
|
|
69
|
+
from openai.types.chat import ChatCompletionMessageParam
|
|
64
70
|
|
|
65
71
|
openai_messages = cast(List[ChatCompletionMessageParam], messages)
|
|
66
72
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|