jl-ecms-client 0.2.8__py3-none-any.whl → 0.2.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jl-ecms-client might be problematic. Click here for more details.

Files changed (40) hide show
  1. {jl_ecms_client-0.2.8.dist-info → jl_ecms_client-0.2.23.dist-info}/METADATA +6 -1
  2. jl_ecms_client-0.2.23.dist-info/RECORD +67 -0
  3. mirix/__init__.py +41 -0
  4. mirix/client/client.py +1 -1
  5. mirix/constants.py +251 -0
  6. mirix/errors.py +238 -0
  7. mirix/functions/__init__.py +0 -0
  8. mirix/functions/ast_parsers.py +113 -0
  9. mirix/functions/function_sets/__init__.py +1 -0
  10. mirix/functions/function_sets/base.py +330 -0
  11. mirix/functions/function_sets/extras.py +271 -0
  12. mirix/functions/function_sets/memory_tools.py +933 -0
  13. mirix/functions/functions.py +199 -0
  14. mirix/functions/helpers.py +311 -0
  15. mirix/functions/schema_generator.py +511 -0
  16. mirix/helpers/json_helpers.py +3 -3
  17. mirix/log.py +163 -0
  18. mirix/schemas/agent.py +1 -1
  19. mirix/schemas/block.py +1 -1
  20. mirix/schemas/embedding_config.py +0 -3
  21. mirix/schemas/enums.py +12 -0
  22. mirix/schemas/episodic_memory.py +1 -1
  23. mirix/schemas/knowledge_vault.py +1 -1
  24. mirix/schemas/memory.py +1 -1
  25. mirix/schemas/message.py +1 -1
  26. mirix/schemas/mirix_request.py +1 -1
  27. mirix/schemas/procedural_memory.py +1 -1
  28. mirix/schemas/providers.py +1 -1
  29. mirix/schemas/resource_memory.py +1 -1
  30. mirix/schemas/sandbox_config.py +1 -3
  31. mirix/schemas/semantic_memory.py +1 -1
  32. mirix/schemas/tool.py +241 -241
  33. mirix/schemas/user.py +3 -3
  34. mirix/settings.py +280 -0
  35. mirix/system.py +261 -0
  36. jl_ecms_client-0.2.8.dist-info/RECORD +0 -53
  37. mirix/client/constants.py +0 -60
  38. {jl_ecms_client-0.2.8.dist-info → jl_ecms_client-0.2.23.dist-info}/WHEEL +0 -0
  39. {jl_ecms_client-0.2.8.dist-info → jl_ecms_client-0.2.23.dist-info}/licenses/LICENSE +0 -0
  40. {jl_ecms_client-0.2.8.dist-info → jl_ecms_client-0.2.23.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,511 @@
1
+ import inspect
2
+ import warnings
3
+ from typing import Any, Dict, List, Optional, Type, Union, get_args, get_origin
4
+
5
+ from docstring_parser import parse
6
+ from pydantic import BaseModel
7
+
8
+
9
+ def is_optional(annotation):
10
+ # Check if the annotation is a Union
11
+ if getattr(annotation, "__origin__", None) is Union:
12
+ # Check if None is one of the options in the Union
13
+ return type(None) in annotation.__args__
14
+ return False
15
+
16
+
17
+ def optional_length(annotation):
18
+ if is_optional(annotation):
19
+ # Subtract 1 to account for NoneType
20
+ return len(annotation.__args__) - 1
21
+ else:
22
+ raise ValueError("The annotation is not an Optional type")
23
+
24
+
25
+ def type_to_json_schema_type(py_type) -> dict:
26
+ """
27
+ Maps a Python type to a JSON schema type.
28
+ Specifically handles typing.Optional and common Python types.
29
+ """
30
+ # if get_origin(py_type) is typing.Optional:
31
+ if is_optional(py_type):
32
+ # Assert that Optional has only one type argument
33
+ type_args = get_args(py_type)
34
+ assert optional_length(py_type) == 1, (
35
+ f"Optional type must have exactly one type argument, but got {py_type}"
36
+ )
37
+
38
+ # Extract and map the inner type
39
+ return type_to_json_schema_type(type_args[0])
40
+
41
+ # Handle Union types (except Optional which is handled above)
42
+ if get_origin(py_type) is Union:
43
+ # TODO support mapping Unions to anyOf
44
+ raise NotImplementedError("General Union types are not yet supported")
45
+
46
+ # Handle array types
47
+ origin = get_origin(py_type)
48
+ if py_type is list or origin in (list, List):
49
+ args = get_args(py_type)
50
+ if len(args) == 0:
51
+ # is this correct
52
+ warnings.warn("Defaulting to string type for untyped List")
53
+ return {
54
+ "type": "array",
55
+ "items": {"type": "string"},
56
+ }
57
+
58
+ if args and inspect.isclass(args[0]) and issubclass(args[0], BaseModel):
59
+ # If it's a list of Pydantic models, return an array with the model schema as items
60
+ return {
61
+ "type": "array",
62
+ "items": pydantic_model_to_json_schema(args[0]),
63
+ }
64
+
65
+ # Otherwise, recursively call the basic type checker
66
+ return {
67
+ "type": "array",
68
+ # get the type of the items in the list
69
+ "items": type_to_json_schema_type(args[0]),
70
+ }
71
+
72
+ # Handle object types
73
+ if py_type is dict or origin in (dict, Dict):
74
+ args = get_args(py_type)
75
+ if not args:
76
+ # Generic dict without type arguments
77
+ return {
78
+ "type": "object",
79
+ # "properties": {}
80
+ }
81
+ else:
82
+ raise ValueError(
83
+ f"Dictionary types {py_type} with nested type arguments are not supported (consider using a Pydantic model instead)"
84
+ )
85
+
86
+ # NOTE: the below code works for generic JSON schema parsing, but there's a problem with the key inference
87
+ # when it comes to OpenAI function schema generation so it doesn't make sense to allow for dict[str, Any] type hints
88
+ # key_type, value_type = args
89
+
90
+ # # Ensure dict keys are strings
91
+ # # Otherwise there's no JSON schema equivalent
92
+ # if key_type != str:
93
+ # raise ValueError("Dictionary keys must be strings for OpenAI function schema compatibility")
94
+
95
+ # # Handle value type to determine property schema
96
+ # value_schema = {}
97
+ # if inspect.isclass(value_type) and issubclass(value_type, BaseModel):
98
+ # value_schema = pydantic_model_to_json_schema(value_type)
99
+ # else:
100
+ # value_schema = type_to_json_schema_type(value_type)
101
+
102
+ # # NOTE: the problem lies here - the key is always "key_placeholder"
103
+ # return {"type": "object", "properties": {"key_placeholder": value_schema}}
104
+
105
+ # Handle direct Pydantic models
106
+ if inspect.isclass(py_type) and issubclass(py_type, BaseModel):
107
+ return pydantic_model_to_json_schema(py_type)
108
+
109
+ # Mapping of Python types to JSON schema types
110
+ type_map = {
111
+ # Basic types
112
+ # Optional, Union, and collections are handled above ^
113
+ int: "integer",
114
+ str: "string",
115
+ bool: "boolean",
116
+ float: "number",
117
+ None: "null",
118
+ }
119
+ if py_type not in type_map:
120
+ raise ValueError(
121
+ f"Python type {py_type} has no corresponding JSON schema type - full map: {type_map}"
122
+ )
123
+ else:
124
+ return {"type": type_map[py_type]}
125
+
126
+
127
+ def pydantic_model_to_open_ai(model: Type[BaseModel]) -> dict:
128
+ """
129
+ Converts a Pydantic model as a singular arg to a JSON schema object for use in OpenAI function calling.
130
+ """
131
+ schema = model.model_json_schema()
132
+ docstring = parse(model.__doc__ or "")
133
+ parameters = {k: v for k, v in schema.items() if k not in ("title", "description")}
134
+ for param in docstring.params:
135
+ if (name := param.arg_name) in parameters["properties"] and (
136
+ description := param.description
137
+ ):
138
+ if "description" not in parameters["properties"][name]:
139
+ parameters["properties"][name]["description"] = description
140
+
141
+ parameters["required"] = sorted(
142
+ k for k, v in parameters["properties"].items() if "default" not in v
143
+ )
144
+
145
+ if "description" not in schema:
146
+ if docstring.short_description:
147
+ schema["description"] = docstring.short_description
148
+ else:
149
+ raise ValueError(
150
+ f"No description found in docstring or description field (model: {model}, docstring: {docstring})"
151
+ )
152
+
153
+ return {
154
+ "name": schema["title"],
155
+ "description": schema["description"],
156
+ "parameters": parameters,
157
+ }
158
+
159
+
160
+ def pydantic_model_to_json_schema(model: Type[BaseModel]) -> dict:
161
+ """
162
+ Converts a Pydantic model (as an arg that already is annotated) to a JSON schema object for use in OpenAI function calling.
163
+
164
+ An example of a Pydantic model as an arg:
165
+
166
+ class Step(BaseModel):
167
+ name: str = Field(
168
+ ...,
169
+ description="Name of the step.",
170
+ )
171
+ key: str = Field(
172
+ ...,
173
+ description="Unique identifier for the step.",
174
+ )
175
+ description: str = Field(
176
+ ...,
177
+ description="An exhaustic description of what this step is trying to achieve and accomplish.",
178
+ )
179
+
180
+ def create_task_plan(steps: list[Step]):
181
+ '''
182
+ Creates a task plan for the current task.
183
+
184
+ Args:
185
+ steps: List of steps to add to the task plan.
186
+ ...
187
+
188
+ Should result in:
189
+ {
190
+ "name": "create_task_plan",
191
+ "description": "Creates a task plan for the current task.",
192
+ "parameters": {
193
+ "type": "object",
194
+ "properties": {
195
+ "steps": { # <= this is the name of the arg
196
+ "type": "object",
197
+ "description": "List of steps to add to the task plan.",
198
+ "properties": {
199
+ "name": {
200
+ "type": "str",
201
+ "description": "Name of the step.",
202
+ },
203
+ "key": {
204
+ "type": "str",
205
+ "description": "Unique identifier for the step.",
206
+ },
207
+ "description": {
208
+ "type": "str",
209
+ "description": "An exhaustic description of what this step is trying to achieve and accomplish.",
210
+ },
211
+ },
212
+ "required": ["name", "key", "description"],
213
+ }
214
+ },
215
+ "required": ["steps"],
216
+ }
217
+ }
218
+
219
+ Specifically, the result of pydantic_model_to_json_schema(steps) (where `steps` is an instance of BaseModel) is:
220
+ {
221
+ "type": "object",
222
+ "properties": {
223
+ "name": {
224
+ "type": "str",
225
+ "description": "Name of the step."
226
+ },
227
+ "key": {
228
+ "type": "str",
229
+ "description": "Unique identifier for the step."
230
+ },
231
+ "description": {
232
+ "type": "str",
233
+ "description": "An exhaustic description of what this step is trying to achieve and accomplish."
234
+ },
235
+ },
236
+ "required": ["name", "key", "description"],
237
+ }
238
+ """
239
+ schema = model.model_json_schema()
240
+
241
+ def clean_property(prop: dict) -> dict:
242
+ """Clean up a property schema to match desired format"""
243
+
244
+ if "description" not in prop:
245
+ raise ValueError(f"Property {prop} lacks a 'description' key")
246
+
247
+ # Handle anyOf structures (e.g., for Optional[List[str]])
248
+ if "anyOf" in prop:
249
+ # For anyOf, we'll choose the first non-null type
250
+ for option in prop["anyOf"]:
251
+ if option.get("type") != "null":
252
+ result = {
253
+ "type": option["type"],
254
+ "description": prop["description"],
255
+ }
256
+ # Only include items for array types
257
+ if option["type"] == "array" and "items" in option:
258
+ result["items"] = option["items"]
259
+ return result
260
+ # If all options are null, default to string
261
+ return {
262
+ "type": "string",
263
+ "description": prop["description"],
264
+ }
265
+
266
+ if "type" not in prop:
267
+ raise ValueError(f"Property {prop} lacks a 'type' key")
268
+
269
+ return {
270
+ "type": "string" if prop["type"] == "string" else prop["type"],
271
+ "description": prop["description"],
272
+ }
273
+
274
+ def resolve_ref(ref: str, schema: dict) -> dict:
275
+ """Resolve a $ref reference in the schema"""
276
+ if not ref.startswith("#/$defs/"):
277
+ raise ValueError(f"Unexpected reference format: {ref}")
278
+
279
+ model_name = ref.split("/")[-1]
280
+ if model_name not in schema.get("$defs", {}):
281
+ raise ValueError(f"Reference {model_name} not found in schema definitions")
282
+
283
+ return schema["$defs"][model_name]
284
+
285
+ def clean_schema(schema_part: dict, full_schema: dict) -> dict:
286
+ """Clean up a schema part, handling references and nested structures"""
287
+ # Handle $ref
288
+ if "$ref" in schema_part:
289
+ schema_part = resolve_ref(schema_part["$ref"], full_schema)
290
+
291
+ if "type" not in schema_part:
292
+ raise ValueError(f"Schema part lacks a 'type' key: {schema_part}")
293
+
294
+ # Handle array type
295
+ if schema_part["type"] == "array":
296
+ items_schema = schema_part["items"]
297
+ if "$ref" in items_schema:
298
+ items_schema = resolve_ref(items_schema["$ref"], full_schema)
299
+ return {
300
+ "type": "array",
301
+ "items": clean_schema(items_schema, full_schema),
302
+ "description": schema_part.get("description", ""),
303
+ }
304
+
305
+ # Handle object type
306
+ if schema_part["type"] == "object":
307
+ if "properties" not in schema_part:
308
+ raise ValueError(f"Object schema lacks 'properties' key: {schema_part}")
309
+
310
+ properties = {}
311
+ for name, prop in schema_part["properties"].items():
312
+ try:
313
+ if "items" in prop: # Handle arrays
314
+ if "description" not in prop:
315
+ raise ValueError(
316
+ f"Property {prop} lacks a 'description' key"
317
+ )
318
+ properties[name] = {
319
+ "type": "array",
320
+ "items": clean_schema(prop["items"], full_schema),
321
+ "description": prop["description"],
322
+ }
323
+ else:
324
+ properties[name] = clean_property(prop)
325
+ except Exception as e:
326
+ raise ValueError(
327
+ f"Error processing property '{name}': {prop}. Error: {e}"
328
+ )
329
+
330
+ pydantic_model_schema_dict = {
331
+ "type": "object",
332
+ "properties": properties,
333
+ "required": schema_part.get("required", []),
334
+ }
335
+ if "description" in schema_part:
336
+ pydantic_model_schema_dict["description"] = schema_part["description"]
337
+
338
+ return pydantic_model_schema_dict
339
+
340
+ # Handle primitive types
341
+ # If it's a simple type definition without description (like {'type': 'string'}),
342
+ # return it as-is since it doesn't need cleaning
343
+ if (
344
+ "description" not in schema_part
345
+ and len(schema_part) == 1
346
+ and "type" in schema_part
347
+ ):
348
+ return schema_part
349
+ return clean_property(schema_part)
350
+
351
+ response = clean_schema(schema_part=schema, full_schema=schema)
352
+ return response
353
+
354
+
355
+ def generate_schema(
356
+ function, name: Optional[str] = None, description: Optional[str] = None
357
+ ) -> dict:
358
+ # Get the signature of the function
359
+ sig = inspect.signature(function)
360
+
361
+ # Parse the docstring
362
+ docstring = parse(function.__doc__)
363
+
364
+ # Prepare the schema dictionary
365
+ schema = {
366
+ "name": function.__name__ if name is None else name,
367
+ "description": docstring.short_description
368
+ if description is None
369
+ else description,
370
+ "parameters": {"type": "object", "properties": {}, "required": []},
371
+ }
372
+
373
+ # TODO: ensure that 'agent' keyword is reserved for `Agent` class
374
+
375
+ for param in sig.parameters.values():
376
+ # Exclude 'self' parameter
377
+ # TODO: eventually remove this (only applies to BASE_TOOLS)
378
+ if param.name in [
379
+ "self",
380
+ "agent_state",
381
+ "user_message",
382
+ "timezone_str",
383
+ ]: # Add agent_manager to excluded
384
+ continue
385
+
386
+ # Assert that the parameter has a type annotation
387
+ if param.annotation == inspect.Parameter.empty:
388
+ raise TypeError(
389
+ f"Parameter '{param.name}' in function '{function.__name__}' lacks a type annotation"
390
+ )
391
+
392
+ # Find the parameter's description in the docstring
393
+ param_doc = next(
394
+ (d for d in docstring.params if d.arg_name == param.name), None
395
+ )
396
+
397
+ # Assert that the parameter has a description
398
+ if not param_doc or not param_doc.description:
399
+ raise ValueError(
400
+ f"Parameter '{param.name}' in function '{function.__name__}' lacks a description in the docstring"
401
+ )
402
+
403
+ # If the parameter is a pydantic model, we need to unpack the Pydantic model type into a JSON schema object
404
+ # if inspect.isclass(param.annotation) and issubclass(param.annotation, BaseModel):
405
+ if (
406
+ (
407
+ inspect.isclass(param.annotation)
408
+ or inspect.isclass(get_origin(param.annotation) or param.annotation)
409
+ )
410
+ and not get_origin(param.annotation)
411
+ and issubclass(param.annotation, BaseModel)
412
+ ):
413
+ # print("Generating schema for pydantic model:", param.annotation)
414
+ # Extract the properties from the pydantic model
415
+ schema["parameters"]["properties"][param.name] = (
416
+ pydantic_model_to_json_schema(param.annotation)
417
+ )
418
+ schema["parameters"]["properties"][param.name]["description"] = (
419
+ param_doc.description
420
+ )
421
+
422
+ # Otherwise, we convert the Python typing to JSON schema types
423
+ # NOTE: important - if a dict or list, the internal type can be a Pydantic model itself
424
+ # however in that
425
+ else:
426
+ # print("Generating schema for non-pydantic model:", param.annotation)
427
+ # Grab the description for the parameter from the extended docstring
428
+ # If it doesn't exist, we should raise an error
429
+ param_doc = next(
430
+ (d for d in docstring.params if d.arg_name == param.name), None
431
+ )
432
+
433
+ if not param_doc:
434
+ raise ValueError(
435
+ f"Parameter '{param.name}' in function '{function.__name__}' lacks a description in the docstring"
436
+ )
437
+ elif not isinstance(param_doc.description, str):
438
+ raise ValueError(
439
+ f"Parameter '{param.name}' in function '{function.__name__}' has a description in the docstring that is not a string (type: {type(param_doc.description)})"
440
+ )
441
+ else:
442
+ # If it's a string or a basic type, then all you need is: (1) type, (2) description
443
+ # If it's a more complex type, then you also need either:
444
+ # - for array, you need "items", each of which has "type"
445
+ # - for a dict, you need "properties", which has keys which each have "type"
446
+ if param.annotation != inspect.Parameter.empty:
447
+ param_generated_schema = type_to_json_schema_type(param.annotation)
448
+ else:
449
+ # TODO why are we inferring here?
450
+ param_generated_schema = {"type": "string"}
451
+
452
+ # Add in the description
453
+ param_generated_schema["description"] = param_doc.description
454
+
455
+ # Add the schema to the function arg key
456
+ schema["parameters"]["properties"][param.name] = param_generated_schema
457
+
458
+ # If the parameter doesn't have a default value, it is required (so we need to add it to the required list)
459
+ if param.default == inspect.Parameter.empty and not is_optional(
460
+ param.annotation
461
+ ):
462
+ schema["parameters"]["required"].append(param.name)
463
+
464
+ # TODO what's going on here?
465
+ # If the parameter is a list of strings we need to hard cast to "string" instead of `str`
466
+ if get_origin(param.annotation) is list:
467
+ if get_args(param.annotation)[0] is str:
468
+ schema["parameters"]["properties"][param.name]["items"] = {
469
+ "type": "string"
470
+ }
471
+
472
+ # TODO is this not duplicating the other append directly above?
473
+ if param.annotation == inspect.Parameter.empty:
474
+ schema["parameters"]["required"].append(param.name)
475
+
476
+ return schema
477
+
478
+
479
+ def generate_schema_from_args_schema_v2(
480
+ args_schema: Type[BaseModel],
481
+ name: Optional[str] = None,
482
+ description: Optional[str] = None,
483
+ append_contine_chaining: bool = True,
484
+ ) -> Dict[str, Any]:
485
+ properties = {}
486
+ required = []
487
+ for field_name, field in args_schema.model_fields.items():
488
+ field_type_annotation = field.annotation
489
+ properties[field_name] = type_to_json_schema_type(field_type_annotation)
490
+ properties[field_name]["description"] = field.description
491
+ if field.is_required():
492
+ required.append(field_name)
493
+
494
+ function_call_json = {
495
+ "name": name,
496
+ "description": description,
497
+ "parameters": {
498
+ "type": "object",
499
+ "properties": properties,
500
+ "required": required,
501
+ },
502
+ }
503
+
504
+ if append_contine_chaining:
505
+ function_call_json["parameters"]["properties"]["continue_chaining"] = {
506
+ "type": "boolean",
507
+ "description": "Request an immediate contine_chaining after function execution. Set to `True` if you want to send a follow-up message or run a follow-up function.",
508
+ }
509
+ function_call_json["parameters"]["required"].append("continue_chaining")
510
+
511
+ return function_call_json
@@ -28,13 +28,13 @@ def parse_json(string) -> dict:
28
28
  result = json_loads(string)
29
29
  return result
30
30
  except Exception as e:
31
- logger.debug("Error parsing json with json package: %s", e)
31
+ logger.debug(f"Error parsing json with json package: {e}")
32
32
 
33
33
  try:
34
34
  result = demjson.decode(string)
35
35
  return result
36
36
  except demjson.JSONDecodeError as e:
37
- logger.debug("Error parsing json with demjson package: %s", e)
37
+ logger.debug(f"Error parsing json with demjson package: {e}")
38
38
 
39
39
  try:
40
40
  from json_repair import repair_json
@@ -43,5 +43,5 @@ def parse_json(string) -> dict:
43
43
  return result
44
44
 
45
45
  except Exception as e:
46
- logger.debug("Error repairing json with json_repair package: %s", e)
46
+ logger.debug(f"Error repairing json with json_repair package: {e}")
47
47
  raise e
mirix/log.py ADDED
@@ -0,0 +1,163 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ from logging.handlers import RotatingFileHandler
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ from mirix.settings import settings
9
+
10
+
11
+ def get_log_level() -> int:
12
+ """Get the configured log level."""
13
+ if settings.debug:
14
+ return logging.DEBUG
15
+
16
+ # Map string level to logging constant
17
+ level_map = {
18
+ "DEBUG": logging.DEBUG,
19
+ "INFO": logging.INFO,
20
+ "WARNING": logging.WARNING,
21
+ "ERROR": logging.ERROR,
22
+ "CRITICAL": logging.CRITICAL,
23
+ }
24
+
25
+ return level_map.get(settings.log_level.upper(), logging.INFO)
26
+
27
+
28
+ selected_log_level = get_log_level()
29
+
30
+
31
+ def validate_log_file_path(log_file_path: Path) -> Path:
32
+ """
33
+ Validate that the log file path is writable.
34
+
35
+ Checks:
36
+ - Path is not a directory
37
+ - Parent directory exists or can be created
38
+ - We have write permissions to the directory
39
+
40
+ Args:
41
+ log_file_path: Path to the log file
42
+
43
+ Returns:
44
+ Path: Validated absolute path
45
+
46
+ Raises:
47
+ ValueError: If the path is invalid or not writable
48
+ """
49
+ # Convert to absolute path
50
+ log_file_path = log_file_path.expanduser().resolve()
51
+
52
+ # Check if path exists and is a directory (not allowed)
53
+ if log_file_path.exists() and log_file_path.is_dir():
54
+ raise ValueError(
55
+ f"Invalid log file path: '{log_file_path}' is a directory. "
56
+ f"MIRIX_LOG_FILE must be a file path, not a directory."
57
+ )
58
+
59
+ # Get parent directory
60
+ parent_dir = log_file_path.parent
61
+
62
+ # Try to create parent directory if it doesn't exist
63
+ try:
64
+ parent_dir.mkdir(parents=True, exist_ok=True)
65
+ except (OSError, PermissionError) as e:
66
+ raise ValueError(
67
+ f"Invalid log file path: Cannot create directory '{parent_dir}'. "
68
+ f"Error: {e}"
69
+ ) from e
70
+
71
+ # Check if parent directory is writable
72
+ if not os.access(parent_dir, os.W_OK):
73
+ raise ValueError(
74
+ f"Invalid log file path: Directory '{parent_dir}' is not writable. "
75
+ f"Check permissions for MIRIX_LOG_FILE."
76
+ )
77
+
78
+ # If file exists, check if it's writable
79
+ if log_file_path.exists() and not os.access(log_file_path, os.W_OK):
80
+ raise ValueError(
81
+ f"Invalid log file path: File '{log_file_path}' exists but is not writable. "
82
+ f"Check file permissions for MIRIX_LOG_FILE."
83
+ )
84
+
85
+ return log_file_path
86
+
87
+
88
+ def get_logger(name: Optional[str] = None) -> "logging.Logger":
89
+ """
90
+ Get the Mirix logger with configured handlers.
91
+
92
+ Log Level Configuration:
93
+ - Single log level (MIRIX_LOG_LEVEL) applies to ALL handlers
94
+ - Controlled by: MIRIX_LOG_LEVEL or MIRIX_DEBUG environment variables
95
+ - Same level used for both console and file output
96
+
97
+ Handler Configuration (Default Behavior):
98
+ - Console: ALWAYS enabled UNLESS explicitly disabled (MIRIX_LOG_TO_CONSOLE=false)
99
+ - File: Automatically enabled if MIRIX_LOG_FILE is set with a valid path
100
+ - Handlers determine WHERE logs go, NOT what level they use
101
+
102
+ Returns:
103
+ logging.Logger: Configured logger instance
104
+
105
+ Raises:
106
+ ValueError: If MIRIX_LOG_FILE is set but the path is invalid or not writable
107
+ """
108
+ logger = logging.getLogger("Mirix")
109
+
110
+ # Set the log level ONCE for the entire logger
111
+ # This single level applies to all handlers (console and file)
112
+ logger.setLevel(selected_log_level)
113
+
114
+ # Add handlers if not already configured
115
+ # Handlers control WHERE logs go (console/file), not WHAT level they use
116
+ if not logger.handlers:
117
+ # Create a single formatter for consistency across all handlers
118
+ formatter = logging.Formatter(
119
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
120
+ datefmt='%Y-%m-%d %H:%M:%S'
121
+ )
122
+
123
+ handlers_added = []
124
+
125
+ # Console handler - ALWAYS enabled unless explicitly disabled
126
+ # Console logging is the default behavior
127
+ if settings.log_to_console:
128
+ console_handler = logging.StreamHandler(sys.stdout)
129
+ console_handler.setFormatter(formatter)
130
+ logger.addHandler(console_handler)
131
+ handlers_added.append("console")
132
+
133
+ # File handler - ONLY enabled if MIRIX_LOG_FILE is configured
134
+ # Automatically enabled when MIRIX_LOG_FILE is set
135
+ if settings.log_file is not None:
136
+ # Validate and get absolute path
137
+ # This will raise ValueError if path is invalid
138
+ log_file = validate_log_file_path(Path(settings.log_file))
139
+
140
+ # Create rotating file handler
141
+ file_handler = RotatingFileHandler(
142
+ log_file,
143
+ maxBytes=settings.log_max_bytes,
144
+ backupCount=settings.log_backup_count,
145
+ )
146
+ file_handler.setFormatter(formatter)
147
+ logger.addHandler(file_handler)
148
+ handlers_added.append(f"file ({log_file})")
149
+
150
+ # Log where logs are being written (if any handlers were added)
151
+ if handlers_added:
152
+ destinations = " and ".join(handlers_added)
153
+ log_level_name = logging.getLevelName(selected_log_level)
154
+ logger.info("Logging to: %s (level: %s)", destinations, log_level_name)
155
+ else:
156
+ # No handlers configured - add NullHandler to prevent warnings
157
+ # This only happens if console is explicitly disabled AND file is not configured
158
+ logger.addHandler(logging.NullHandler())
159
+
160
+ # Prevent propagation to root logger to avoid duplicate messages
161
+ logger.propagate = False
162
+
163
+ return logger