bear-utils 0.7.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. bear_utils/__init__.py +13 -0
  2. bear_utils/ai/__init__.py +30 -0
  3. bear_utils/ai/ai_helpers/__init__.py +130 -0
  4. bear_utils/ai/ai_helpers/_common.py +19 -0
  5. bear_utils/ai/ai_helpers/_config.py +24 -0
  6. bear_utils/ai/ai_helpers/_parsers.py +188 -0
  7. bear_utils/ai/ai_helpers/_types.py +20 -0
  8. bear_utils/cache/__init__.py +119 -0
  9. bear_utils/cli/__init__.py +4 -0
  10. bear_utils/cli/commands.py +59 -0
  11. bear_utils/cli/prompt_helpers.py +166 -0
  12. bear_utils/cli/shell/__init__.py +0 -0
  13. bear_utils/cli/shell/_base_command.py +74 -0
  14. bear_utils/cli/shell/_base_shell.py +390 -0
  15. bear_utils/cli/shell/_common.py +19 -0
  16. bear_utils/config/__init__.py +11 -0
  17. bear_utils/config/config_manager.py +92 -0
  18. bear_utils/config/dir_manager.py +64 -0
  19. bear_utils/config/settings_manager.py +232 -0
  20. bear_utils/constants/__init__.py +16 -0
  21. bear_utils/constants/_exceptions.py +3 -0
  22. bear_utils/constants/_lazy_typing.py +15 -0
  23. bear_utils/constants/date_related.py +36 -0
  24. bear_utils/constants/time_related.py +22 -0
  25. bear_utils/database/__init__.py +6 -0
  26. bear_utils/database/_db_manager.py +104 -0
  27. bear_utils/events/__init__.py +16 -0
  28. bear_utils/events/events_class.py +52 -0
  29. bear_utils/events/events_module.py +65 -0
  30. bear_utils/extras/__init__.py +17 -0
  31. bear_utils/extras/_async_helpers.py +15 -0
  32. bear_utils/extras/_tools.py +178 -0
  33. bear_utils/extras/platform_utils.py +53 -0
  34. bear_utils/extras/wrappers/__init__.py +0 -0
  35. bear_utils/extras/wrappers/add_methods.py +98 -0
  36. bear_utils/files/__init__.py +4 -0
  37. bear_utils/files/file_handlers/__init__.py +3 -0
  38. bear_utils/files/file_handlers/_base_file_handler.py +93 -0
  39. bear_utils/files/file_handlers/file_handler_factory.py +278 -0
  40. bear_utils/files/file_handlers/json_file_handler.py +44 -0
  41. bear_utils/files/file_handlers/log_file_handler.py +33 -0
  42. bear_utils/files/file_handlers/txt_file_handler.py +34 -0
  43. bear_utils/files/file_handlers/yaml_file_handler.py +57 -0
  44. bear_utils/files/ignore_parser.py +298 -0
  45. bear_utils/graphics/__init__.py +4 -0
  46. bear_utils/graphics/bear_gradient.py +140 -0
  47. bear_utils/graphics/image_helpers.py +39 -0
  48. bear_utils/gui/__init__.py +3 -0
  49. bear_utils/gui/gui_tools/__init__.py +5 -0
  50. bear_utils/gui/gui_tools/_settings.py +37 -0
  51. bear_utils/gui/gui_tools/_types.py +12 -0
  52. bear_utils/gui/gui_tools/qt_app.py +145 -0
  53. bear_utils/gui/gui_tools/qt_color_picker.py +119 -0
  54. bear_utils/gui/gui_tools/qt_file_handler.py +138 -0
  55. bear_utils/gui/gui_tools/qt_input_dialog.py +306 -0
  56. bear_utils/logging/__init__.py +25 -0
  57. bear_utils/logging/logger_manager/__init__.py +0 -0
  58. bear_utils/logging/logger_manager/_common.py +47 -0
  59. bear_utils/logging/logger_manager/_console_junk.py +131 -0
  60. bear_utils/logging/logger_manager/_styles.py +91 -0
  61. bear_utils/logging/logger_manager/loggers/__init__.py +0 -0
  62. bear_utils/logging/logger_manager/loggers/_base_logger.py +238 -0
  63. bear_utils/logging/logger_manager/loggers/_base_logger.pyi +50 -0
  64. bear_utils/logging/logger_manager/loggers/_buffer_logger.py +55 -0
  65. bear_utils/logging/logger_manager/loggers/_console_logger.py +249 -0
  66. bear_utils/logging/logger_manager/loggers/_console_logger.pyi +64 -0
  67. bear_utils/logging/logger_manager/loggers/_file_logger.py +141 -0
  68. bear_utils/logging/logger_manager/loggers/_level_sin.py +58 -0
  69. bear_utils/logging/logger_manager/loggers/_logger.py +18 -0
  70. bear_utils/logging/logger_manager/loggers/_sub_logger.py +110 -0
  71. bear_utils/logging/logger_manager/loggers/_sub_logger.pyi +38 -0
  72. bear_utils/logging/loggers.py +76 -0
  73. bear_utils/monitoring/__init__.py +10 -0
  74. bear_utils/monitoring/host_monitor.py +350 -0
  75. bear_utils/time/__init__.py +16 -0
  76. bear_utils/time/_helpers.py +91 -0
  77. bear_utils/time/_time_class.py +316 -0
  78. bear_utils/time/_timer.py +80 -0
  79. bear_utils/time/_tools.py +17 -0
  80. bear_utils/time/time_manager.py +218 -0
  81. bear_utils-0.7.11.dist-info/METADATA +260 -0
  82. bear_utils-0.7.11.dist-info/RECORD +83 -0
  83. bear_utils-0.7.11.dist-info/WHEEL +4 -0
bear_utils/__init__.py ADDED
@@ -0,0 +1,13 @@
1
+ from .cache import CacheWrapper, cache, cache_factory
2
+ from .config.settings_manager import SettingsManager, get_settings_manager
3
+ from .constants.date_related import DATE_FORMAT, DATE_TIME_FORMAT
4
+ from .database import DatabaseManager
5
+ from .events import Events
6
+ from .files.file_handlers.file_handler_factory import FileHandlerFactory
7
+ from .logging.logger_manager._common import VERBOSE_CONSOLE_FORMAT
8
+ from .logging.logger_manager._styles import VERBOSE
9
+ from .logging.loggers import BaseLogger, BufferLogger, ConsoleLogger, FileLogger
10
+ from .time._time_class import EpochTimestamp
11
+ from .time.time_manager import TimeTools
12
+
13
+ __version__ = "__version__ = "0.7.11""
@@ -0,0 +1,30 @@
1
+ from .ai_helpers._common import (
2
+ GPT_4_1,
3
+ GPT_4_1_MINI,
4
+ GPT_4_1_NANO,
5
+ PRODUCTION_MODE,
6
+ TESTING_MODE,
7
+ AIModel,
8
+ EnvironmentMode,
9
+ )
10
+ from .ai_helpers._config import AIEndpointConfig
11
+ from .ai_helpers._parsers import CommandResponseParser, PassthroughResponseParser, ResponseParser, TypedResponseParser
12
+ from .ai_helpers._types import ResponseParser as BaseResponseParser
13
+ from .ai_helpers._types import T_Response
14
+
15
+ __all__ = [
16
+ "GPT_4_1",
17
+ "GPT_4_1_MINI",
18
+ "GPT_4_1_NANO",
19
+ "PRODUCTION_MODE",
20
+ "TESTING_MODE",
21
+ "AIModel",
22
+ "EnvironmentMode",
23
+ "AIEndpointConfig",
24
+ "CommandResponseParser",
25
+ "PassthroughResponseParser",
26
+ "ResponseParser",
27
+ "TypedResponseParser",
28
+ "T_Response",
29
+ "BaseResponseParser",
30
+ ]
@@ -0,0 +1,130 @@
1
+ from collections.abc import Callable
2
+ from typing import Any
3
+
4
+ from ...logging import BaseLogger
5
+ from ._common import GPT_4_1, GPT_4_1_MINI, GPT_4_1_NANO, PRODUCTION_MODE, TESTING_MODE, AIModel, EnvironmentMode
6
+ from ._config import AIEndpointConfig
7
+ from ._parsers import JSONResponseParser, ModularAIEndpoint, PassthroughResponseParser, TypedResponseParser
8
+
9
+
10
+ def create_typed_endpoint[T_Response](
11
+ response_type: type[T_Response],
12
+ project_name: str,
13
+ prompt: str,
14
+ testing_url: str,
15
+ production_url: str,
16
+ logger: BaseLogger,
17
+ transformers: dict[str, Callable] | None = None,
18
+ environment: EnvironmentMode = PRODUCTION_MODE,
19
+ chat_model: AIModel = GPT_4_1_NANO,
20
+ **kwargs,
21
+ ) -> ModularAIEndpoint[T_Response]:
22
+ """Create an endpoint with strict TypedDict response typing."""
23
+ config = AIEndpointConfig(
24
+ project_name=project_name,
25
+ prompt=prompt,
26
+ testing_url=testing_url,
27
+ production_url=production_url,
28
+ environment=environment,
29
+ chat_model=chat_model,
30
+ **kwargs,
31
+ )
32
+ parser = TypedResponseParser(default_response=response_type, response_transformers=transformers)
33
+ return ModularAIEndpoint(config, logger, parser) # type: ignore[return-value, arg-type]
34
+
35
+
36
+ def create_command_endpoint[T_Response](
37
+ default_response: T_Response,
38
+ project_name: str,
39
+ prompt: str,
40
+ testing_url: str,
41
+ production_url: str,
42
+ logger: BaseLogger,
43
+ environment: EnvironmentMode = EnvironmentMode.PRODUCTION,
44
+ chat_model: AIModel = GPT_4_1_NANO,
45
+ **kwargs,
46
+ ) -> ModularAIEndpoint[T_Response]:
47
+ from rich.markdown import Markdown
48
+
49
+ config = AIEndpointConfig(
50
+ project_name=project_name,
51
+ prompt=prompt,
52
+ testing_url=testing_url,
53
+ production_url=production_url,
54
+ environment=environment,
55
+ chat_model=chat_model,
56
+ **kwargs,
57
+ )
58
+
59
+ parser = TypedResponseParser(
60
+ default_response=default_response,
61
+ response_transformers={"response": lambda x: Markdown(str(x))},
62
+ )
63
+
64
+ return ModularAIEndpoint(config, logger, parser)
65
+
66
+
67
+ def create_flexible_endpoint(
68
+ project_name: str,
69
+ prompt: str,
70
+ testing_url: str,
71
+ production_url: str,
72
+ logger: BaseLogger,
73
+ required_fields: list | None = None,
74
+ transformers: dict[str, Callable] | None = None,
75
+ append_json: bool = True,
76
+ environment: EnvironmentMode = PRODUCTION_MODE,
77
+ chat_model: AIModel = GPT_4_1_NANO,
78
+ **kwargs,
79
+ ) -> ModularAIEndpoint[dict[str, Any]]:
80
+ """Create an endpoint with flexible JSON parsing."""
81
+ config = AIEndpointConfig(
82
+ project_name=project_name,
83
+ prompt=prompt,
84
+ testing_url=testing_url,
85
+ production_url=production_url,
86
+ append_json_suffix=append_json,
87
+ environment=environment,
88
+ chat_model=chat_model,
89
+ **kwargs,
90
+ )
91
+ parser = JSONResponseParser(required_fields, transformers)
92
+ return ModularAIEndpoint(config, logger, parser)
93
+
94
+
95
+ def create_simple_endpoint(
96
+ project_name: str,
97
+ prompt: str,
98
+ testing_url: str,
99
+ production_url: str,
100
+ logger: BaseLogger,
101
+ environment: EnvironmentMode = PRODUCTION_MODE,
102
+ chat_model: AIModel = GPT_4_1_NANO,
103
+ **kwargs,
104
+ ) -> ModularAIEndpoint[dict[str, Any]]:
105
+ """Create an endpoint that returns raw output without JSON parsing."""
106
+ config = AIEndpointConfig(
107
+ project_name=project_name,
108
+ prompt=prompt,
109
+ testing_url=testing_url,
110
+ production_url=production_url,
111
+ append_json_suffix=False,
112
+ environment=environment,
113
+ chat_model=chat_model,
114
+ **kwargs,
115
+ )
116
+ parser = PassthroughResponseParser()
117
+ return ModularAIEndpoint(config, logger, parser)
118
+
119
+
120
+ __all__: list[str] = [
121
+ "create_typed_endpoint",
122
+ "create_command_endpoint",
123
+ "create_flexible_endpoint",
124
+ "create_simple_endpoint",
125
+ "AIEndpointConfig",
126
+ "EnvironmentMode",
127
+ "PRODUCTION_MODE",
128
+ "TESTING_MODE",
129
+ "ModularAIEndpoint",
130
+ ]
@@ -0,0 +1,19 @@
1
+ from enum import Enum, StrEnum
2
+
3
+
4
+ class EnvironmentMode(Enum):
5
+ TESTING = "testing"
6
+ PRODUCTION = "production"
7
+
8
+
9
+ class AIModel(StrEnum):
10
+ GPT_4_1_NANO = "gpt-4.1-nano"
11
+ GPT_4_1_MINI = "gpt-4.1-mini"
12
+ GPT_4_1 = "gpt-4.1"
13
+
14
+
15
+ GPT_4_1_NANO = AIModel.GPT_4_1_NANO
16
+ GPT_4_1_MINI = AIModel.GPT_4_1_MINI
17
+ GPT_4_1 = AIModel.GPT_4_1
18
+ TESTING_MODE = EnvironmentMode.TESTING
19
+ PRODUCTION_MODE = EnvironmentMode.PRODUCTION
@@ -0,0 +1,24 @@
1
+ from dataclasses import dataclass
2
+
3
+ from ._common import GPT_4_1_NANO, PRODUCTION_MODE, AIModel, EnvironmentMode
4
+
5
+
6
+ @dataclass
7
+ class AIEndpointConfig:
8
+ """Configuration for AI endpoint communication."""
9
+
10
+ project_name: str
11
+ bearer_token: str
12
+ prompt: str
13
+ testing_url: str
14
+ production_url: str
15
+ connection_timeout: int = 20
16
+ append_json_suffix: bool = True
17
+ json_suffix: str = " \nEnsure to output your response as json as specified in the prompt"
18
+ chat_model: AIModel | str = GPT_4_1_NANO
19
+ environment: EnvironmentMode = PRODUCTION_MODE
20
+
21
+ @property
22
+ def url(self) -> str:
23
+ """Get the appropriate URL based on environment."""
24
+ return self.testing_url if self.environment == EnvironmentMode.TESTING else self.production_url
@@ -0,0 +1,188 @@
1
+ import json
2
+ from collections.abc import Callable
3
+ from typing import Any, Generic, cast
4
+
5
+ from httpx import AsyncClient, Headers, Response
6
+ from rich.markdown import Markdown
7
+ from singleton_base.singleton_base_new import SingletonBase
8
+
9
+ from ... import BaseLogger, EpochTimestamp, SettingsManager, get_settings_manager
10
+ from . import AIEndpointConfig
11
+ from ._types import ResponseParser, T_Response
12
+
13
+
14
+ class JSONResponseParser(ResponseParser[dict[str, Any]]):
15
+ """Parser for JSON responses with flexible output structure."""
16
+
17
+ def __init__(self, required_fields: list | None = None, response_transformers: dict[str, Callable] | None = None):
18
+ self.required_fields = required_fields or []
19
+ self.response_transformers = response_transformers or {}
20
+
21
+ async def parse(self, raw_response: dict, logger: BaseLogger) -> dict[str, Any]:
22
+ """Parse JSON response with configurable validation and transformation."""
23
+ default = self.get_default_response()
24
+
25
+ output = raw_response.get("output", "")
26
+ if not output:
27
+ logger.error("No output received from AI.")
28
+ return default
29
+
30
+ try:
31
+ response_dict = json.loads(output)
32
+ if not isinstance(response_dict, dict):
33
+ logger.error("Response is not a valid JSON object.")
34
+ return default
35
+
36
+ logger.verbose(json.dumps(response_dict, indent=4))
37
+
38
+ if self.required_fields:
39
+ missing_fields = [field for field in self.required_fields if field not in response_dict]
40
+ if missing_fields:
41
+ logger.error(f"Response JSON missing required fields: {missing_fields}")
42
+ return default
43
+
44
+ for field_name, transformer in self.response_transformers.items():
45
+ if field_name in response_dict:
46
+ response_dict[field_name] = transformer(response_dict[field_name])
47
+
48
+ return response_dict
49
+
50
+ except json.JSONDecodeError as e:
51
+ logger.error(f"Failed to parse response JSON: {e}")
52
+ return default
53
+
54
+ def get_default_response(self) -> dict[str, Any]:
55
+ """Return a basic default response."""
56
+ return {"error": "Failed to parse response"}
57
+
58
+
59
+ class TypedResponseParser(ResponseParser[T_Response], Generic[T_Response]):
60
+ def __init__(self, default_response: T_Response, response_transformers: dict[str, Callable] | None = None):
61
+ self.default_response: T_Response = default_response
62
+ self.response_transformers = response_transformers or {}
63
+ self.required_fields = list(cast(dict, self.default_response).keys())
64
+
65
+ def get_default_response(self) -> T_Response:
66
+ return cast(T_Response, self.default_response)
67
+
68
+ async def parse(self, raw_response: dict, logger: BaseLogger) -> T_Response:
69
+ """Parse JSON response with strict typing."""
70
+ default = self.get_default_response()
71
+
72
+ output = raw_response.get("output", "")
73
+ if not output:
74
+ logger.error("No output received from AI.")
75
+ return default
76
+
77
+ try:
78
+ response_dict = json.loads(output)
79
+ if not isinstance(response_dict, dict):
80
+ logger.error("Response is not a valid JSON object.")
81
+ return default
82
+
83
+ logger.verbose(json.dumps(response_dict, indent=4))
84
+
85
+ missing_fields = [field for field in self.required_fields if field not in response_dict]
86
+ if missing_fields:
87
+ logger.error(f"Response JSON missing required fields: {missing_fields}")
88
+ return default
89
+
90
+ for field_name, transformer in self.response_transformers.items():
91
+ if field_name in response_dict:
92
+ response_dict[field_name] = transformer(response_dict[field_name])
93
+ return cast(T_Response, response_dict)
94
+
95
+ except json.JSONDecodeError as e:
96
+ logger.error(f"Failed to parse response JSON: {e}")
97
+ return default
98
+
99
+
100
+ class CommandResponseParser(TypedResponseParser):
101
+ """Specialized parser for command-based responses."""
102
+
103
+ def __init__(self, response_type: type[T_Response]):
104
+
105
+ super().__init__(
106
+ default_response=response_type(),
107
+ response_transformers={
108
+ "output": lambda x: Markdown(x) if isinstance(x, str) else x,
109
+ },
110
+ )
111
+
112
+
113
+ class PassthroughResponseParser(ResponseParser[dict[str, Any]]):
114
+ """Parser that returns the raw output without JSON parsing."""
115
+
116
+ async def parse(self, raw_response: dict, logger: BaseLogger) -> dict[str, Any]:
117
+ output = raw_response.get("output", "")
118
+ return {"output": output}
119
+
120
+ def get_default_response(self) -> dict[str, Any]:
121
+ return {"output": ""}
122
+
123
+
124
+ class ModularAIEndpoint(SingletonBase, Generic[T_Response]):
125
+ """Modular AI endpoint for flexible communication patterns."""
126
+
127
+ def __init__(self, config: AIEndpointConfig, logger: BaseLogger, response_parser: ResponseParser[T_Response]):
128
+ self.config: AIEndpointConfig = config
129
+ self.logger: BaseLogger = logger
130
+ self.response_parser: ResponseParser[T_Response] = response_parser
131
+ self.session_id: str | None = None
132
+
133
+ self.logger.verbose(f"Using URL: {self.config.url}")
134
+ self.logger.verbose(f"Using prompt: {self.config.prompt[:50]}...")
135
+
136
+ self.settings_manager: SettingsManager = get_settings_manager(self.config.project_name)
137
+ self.set_session_id(new=True)
138
+
139
+ def set_session_id(self, new: bool = False):
140
+ """
141
+ Set the session ID for the current interaction.
142
+
143
+ Args:
144
+ new (bool): If True, start a new session; otherwise, continue the existing session.
145
+ """
146
+ if new or not self.settings_manager.has("session_id"):
147
+ self.logger.verbose("Starting a new session.")
148
+ self.session_id = str(EpochTimestamp.now())
149
+ self.settings_manager.set("session_id", self.session_id)
150
+ else:
151
+ self.logger.verbose("Continuing existing session with AI.")
152
+ self.session_id = self.settings_manager.get("session_id")
153
+ self.logger.debug(f"Using session ID: {self.session_id}")
154
+
155
+ def _prepare_message(self, message: str) -> str:
156
+ """Prepare the message with optional JSON suffix."""
157
+ if self.config.append_json_suffix:
158
+ return f"{message}{self.config.json_suffix}"
159
+ return message
160
+
161
+ async def send_message(self, message: str, override_parser: ResponseParser[T_Response] | None = None) -> T_Response:
162
+ """Send a message to the AI endpoint with flexible response parsing."""
163
+ parser: ResponseParser[T_Response] = override_parser or self.response_parser
164
+ async with AsyncClient(timeout=self.config.connection_timeout) as client:
165
+ try:
166
+ response: Response = await client.post(
167
+ url=self.config.url,
168
+ json={
169
+ "chatModel": self.config.chat_model,
170
+ "chatInput": self._prepare_message(message),
171
+ "sessionId": self.session_id,
172
+ "systemPrompt": self.config.prompt,
173
+ },
174
+ headers=Headers(
175
+ {
176
+ "Content-Type": "application/json",
177
+ "Authorization": f"Bearer {self.config.bearer_token}",
178
+ }
179
+ ),
180
+ )
181
+ if response.status_code == 200:
182
+ return await parser.parse(response.json(), self.logger)
183
+ else:
184
+ self.logger.error(f"Failed to send message to AI: {response.status_code} - {response.text}")
185
+ return parser.get_default_response()
186
+ except Exception as e:
187
+ self.logger.error(f"Exception during AI communication: {e}")
188
+ return parser.get_default_response()
@@ -0,0 +1,20 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Generic, TypeVar
3
+
4
+ from ... import BaseLogger
5
+
6
+ T_Response = TypeVar("T_Response")
7
+
8
+
9
+ class ResponseParser(ABC, Generic[T_Response]):
10
+ """Abstract base class for response parsers."""
11
+
12
+ @abstractmethod
13
+ async def parse(self, raw_response: dict, logger: BaseLogger) -> T_Response:
14
+ """Parse the raw response into the desired format."""
15
+ pass
16
+
17
+ @abstractmethod
18
+ def get_default_response(self) -> T_Response:
19
+ """Return a default response structure."""
20
+ pass
@@ -0,0 +1,119 @@
1
+ import functools
2
+ from pathlib import Path
3
+
4
+ from diskcache import Cache
5
+
6
+ DEFAULT_CACHE_DIR = Path("~/.cache/app_cache").expanduser()
7
+
8
+
9
+ class CacheWrapper:
10
+ """
11
+ A simple wrapper around diskcache.Cache to provide a consistent interface.
12
+ This class allows for easy caching of function results with a specified directory,
13
+ size limit, and default timeout.
14
+ """
15
+
16
+ def __init__(self, directory=None, size_limit=None, default_timeout=None, **kwargs):
17
+ """
18
+ Initialize the CacheWrapper with a specified directory, size limit, and default timeout.
19
+
20
+ Args:
21
+ directory (str, optional): Directory path for the cache. Defaults to ~/.cache/app_cache.
22
+ size_limit (int, optional): Maximum size of the cache in bytes. Defaults to 1_000_000_000.
23
+ default_timeout (int, optional): Default timeout for cache entries in seconds. Defaults to None.
24
+ """
25
+ self.cache = Cache(directory or DEFAULT_CACHE_DIR, size_limit=size_limit or 1_000_000_000, **kwargs)
26
+ self.default_timeout = default_timeout
27
+
28
+ def get(self, key, default=None):
29
+ """Get a value from the cache."""
30
+ return self.cache.get(key, default=default)
31
+
32
+ def set(self, key, value, expire=None) -> None:
33
+ """Set a value in the cache."""
34
+ if expire is None:
35
+ expire = self.default_timeout
36
+ self.cache.set(key, value, expire=expire)
37
+
38
+
39
+ def cache_factory(directory=None, size_limit=None, default_timeout=None, **kwargs):
40
+ """
41
+ Creates and configures a cache decorator factory.
42
+
43
+ Args:
44
+ directory (str, optional): Cache directory path. Defaults to ~/.cache/app_cache.
45
+ size_limit (int, optional): Maximum size in bytes. Defaults to None.
46
+ default_timeout (int, optional): Default timeout in seconds. Defaults to None.
47
+ **kwargs: Additional arguments to pass to the Cache constructor.
48
+
49
+ Returns:
50
+ function: A decorator function that can be used to cache function results.
51
+
52
+ Examples:
53
+ # Create a custom cache
54
+ my_cache = cache_factory(directory='/tmp/mycache', default_timeout=3600)
55
+
56
+ # Use as a simple decorator
57
+ @my_cache
58
+ def expensive_function(x, y):
59
+ return x + y
60
+
61
+ # Use with custom parameters
62
+ @my_cache(expire=60)
63
+ def another_function(x, y):
64
+ return x * y
65
+ """
66
+ if directory is None:
67
+ directory = Path(DEFAULT_CACHE_DIR)
68
+ if not directory.exists():
69
+ directory.mkdir(parents=True, exist_ok=True)
70
+
71
+ if size_limit is None:
72
+ size_limit = 1_000_000_000
73
+
74
+ cache_instance = Cache(directory, size_limit=size_limit, **kwargs)
75
+
76
+ def decorator(func=None, *, expire=default_timeout, key=None):
77
+ """
78
+ Decorator that caches function results.
79
+
80
+ Args:
81
+ func: The function to cache (when used as @cache)
82
+ expire (int, optional): Expiration time in seconds.
83
+ key (callable, optional): Custom key function.
84
+
85
+ Returns:
86
+ callable: Decorated function or decorator
87
+ """
88
+
89
+ def actual_decorator(fn):
90
+ def wrapper(*args, **kwargs):
91
+ if key is not None:
92
+ cache_key = key(fn, *args, **kwargs)
93
+ else:
94
+ cache_key = (fn.__module__, fn.__qualname__, args, frozenset(kwargs.items()))
95
+
96
+ # Try to get from cache
97
+ result = cache_instance.get(cache_key, default=None)
98
+ if result is not None:
99
+ return result
100
+
101
+ # If not in cache, compute and store
102
+ result = fn(*args, **kwargs)
103
+ cache_instance.set(cache_key, result, expire=expire)
104
+ return result
105
+
106
+ # Preserve function metadata
107
+ return functools.update_wrapper(wrapper, fn)
108
+
109
+ # Handle both @cache and @cache(expire=300) styles
110
+ if func is None:
111
+ return actual_decorator
112
+ return actual_decorator(func)
113
+
114
+ return decorator
115
+
116
+
117
+ cache = cache_factory()
118
+
119
+ __all__ = ["CacheWrapper", "cache_factory", "cache"]
@@ -0,0 +1,4 @@
1
+ from .commands import MaskShellCommand, OPShellCommand, UVShellCommand
2
+ from .shell._base_command import BaseShellCommand
3
+ from .shell._base_shell import SimpleShellSession, shell_session
4
+ from .shell._common import DEFAULT_SHELL
@@ -0,0 +1,59 @@
1
+ from typing import Self
2
+
3
+ from .shell._base_command import BaseShellCommand
4
+
5
+
6
+ class OPShellCommand(BaseShellCommand):
7
+ """OP command for running 1Password CLI commands"""
8
+
9
+ command_name = "op"
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super().__init__(*args, **kwargs)
13
+
14
+ @classmethod
15
+ def read(cls, *args, **kwargs) -> Self:
16
+ """Create a read command for 1Password"""
17
+ return cls.sub("read", *args, **kwargs)
18
+
19
+
20
+ class UVShellCommand(BaseShellCommand):
21
+ """UV command for running Python scripts with uv"""
22
+
23
+ command_name = "uv"
24
+
25
+ def __init__(self, *args, **kwargs):
26
+ super().__init__(*args, **kwargs)
27
+
28
+ @classmethod
29
+ def pip(cls, s="", *args, **kwargs) -> Self:
30
+ """Create a piped command for uv"""
31
+ if s:
32
+ return cls.sub(f"pip {s}", *args, **kwargs)
33
+ return cls.sub("pip", *args, **kwargs)
34
+
35
+
36
+ class MaskShellCommand(BaseShellCommand):
37
+ """Mask command for running masked commands"""
38
+
39
+ command_name = "mask"
40
+
41
+ def __init__(self, *args, **kwargs):
42
+ super().__init__(*args, **kwargs)
43
+
44
+ @classmethod
45
+ def maskfile(cls, maskfile, *args, **kwargs) -> Self:
46
+ """Create a maskfile command with the specified maskfile"""
47
+ return cls.sub("--maskfile", *args, **kwargs).value(maskfile)
48
+
49
+ @classmethod
50
+ def init(cls, *args, **kwargs) -> Self:
51
+ """Create an init command for mask"""
52
+ return cls.sub("init", *args, **kwargs)
53
+
54
+
55
+ __all__ = [
56
+ "MaskShellCommand",
57
+ "OPShellCommand",
58
+ "UVShellCommand",
59
+ ]