karaoke-gen 0.76.20__py3-none-any.whl → 0.82.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. karaoke_gen/instrumental_review/static/index.html +179 -16
  2. karaoke_gen/karaoke_gen.py +5 -4
  3. karaoke_gen/lyrics_processor.py +25 -6
  4. {karaoke_gen-0.76.20.dist-info → karaoke_gen-0.82.0.dist-info}/METADATA +79 -3
  5. {karaoke_gen-0.76.20.dist-info → karaoke_gen-0.82.0.dist-info}/RECORD +33 -31
  6. lyrics_transcriber/core/config.py +8 -0
  7. lyrics_transcriber/core/controller.py +43 -1
  8. lyrics_transcriber/correction/agentic/observability/langfuse_integration.py +178 -5
  9. lyrics_transcriber/correction/agentic/prompts/__init__.py +23 -0
  10. lyrics_transcriber/correction/agentic/prompts/classifier.py +66 -6
  11. lyrics_transcriber/correction/agentic/prompts/langfuse_prompts.py +298 -0
  12. lyrics_transcriber/correction/agentic/providers/config.py +7 -0
  13. lyrics_transcriber/correction/agentic/providers/constants.py +1 -1
  14. lyrics_transcriber/correction/agentic/providers/langchain_bridge.py +22 -7
  15. lyrics_transcriber/correction/agentic/providers/model_factory.py +28 -13
  16. lyrics_transcriber/correction/agentic/router.py +18 -13
  17. lyrics_transcriber/correction/corrector.py +1 -45
  18. lyrics_transcriber/frontend/.gitignore +1 -0
  19. lyrics_transcriber/frontend/e2e/agentic-corrections.spec.ts +207 -0
  20. lyrics_transcriber/frontend/e2e/fixtures/agentic-correction-data.json +226 -0
  21. lyrics_transcriber/frontend/package.json +4 -1
  22. lyrics_transcriber/frontend/playwright.config.ts +1 -1
  23. lyrics_transcriber/frontend/src/components/CorrectedWordWithActions.tsx +34 -30
  24. lyrics_transcriber/frontend/src/components/Header.tsx +141 -34
  25. lyrics_transcriber/frontend/src/components/LyricsAnalyzer.tsx +120 -3
  26. lyrics_transcriber/frontend/src/components/TranscriptionView.tsx +11 -1
  27. lyrics_transcriber/frontend/src/components/shared/components/HighlightedText.tsx +122 -35
  28. lyrics_transcriber/frontend/src/components/shared/types.ts +6 -0
  29. lyrics_transcriber/output/generator.py +50 -3
  30. lyrics_transcriber/transcribers/local_whisper.py +260 -0
  31. lyrics_transcriber/correction/handlers/llm.py +0 -293
  32. lyrics_transcriber/correction/handlers/llm_providers.py +0 -60
  33. {karaoke_gen-0.76.20.dist-info → karaoke_gen-0.82.0.dist-info}/WHEEL +0 -0
  34. {karaoke_gen-0.76.20.dist-info → karaoke_gen-0.82.0.dist-info}/entry_points.txt +0 -0
  35. {karaoke_gen-0.76.20.dist-info → karaoke_gen-0.82.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,60 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from typing import Optional
3
- import logging
4
- from ollama import chat as ollama_chat
5
- import openai
6
-
7
-
8
- class LLMProvider(ABC):
9
- """Abstract base class for LLM providers."""
10
-
11
- def __init__(self, logger: Optional[logging.Logger] = None):
12
- self.logger = logger or logging.getLogger(__name__)
13
-
14
- @abstractmethod
15
- def generate_response(self, prompt: str, **kwargs) -> str:
16
- """Generate a response from the LLM.
17
-
18
- Args:
19
- prompt: The prompt to send to the LLM
20
- **kwargs: Additional provider-specific parameters
21
-
22
- Returns:
23
- str: The LLM's response
24
- """
25
- pass
26
-
27
-
28
- class OllamaProvider(LLMProvider):
29
- """Provider for local Ollama models."""
30
-
31
- def __init__(self, model: str, logger: Optional[logging.Logger] = None):
32
- super().__init__(logger)
33
- self.model = model
34
-
35
- def generate_response(self, prompt: str, **kwargs) -> str:
36
- try:
37
- response = ollama_chat(model=self.model, messages=[{"role": "user", "content": prompt}], format="json")
38
- return response.message.content
39
- except Exception as e:
40
- self.logger.error(f"Error generating Ollama response: {e}")
41
- raise
42
-
43
-
44
- class OpenAIProvider(LLMProvider):
45
- """Provider for OpenAI-compatible APIs (including OpenRouter)."""
46
-
47
- def __init__(self, model: str, api_key: str, base_url: Optional[str] = None, logger: Optional[logging.Logger] = None):
48
- super().__init__(logger)
49
- self.model = model
50
- self.client = openai.OpenAI(api_key=api_key, base_url=base_url)
51
-
52
- def generate_response(self, prompt: str, **kwargs) -> str:
53
- try:
54
- response = self.client.chat.completions.create(
55
- model=self.model, messages=[{"role": "user", "content": prompt}], response_format={"type": "json_object"}, **kwargs
56
- )
57
- return response.choices[0].message.content
58
- except Exception as e:
59
- self.logger.error(f"Error generating OpenAI response: {e}")
60
- raise