ms-enclave 0.0.0__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ms-enclave might be problematic. Click here for more details.
- ms_enclave/__init__.py +2 -2
- ms_enclave/cli/__init__.py +1 -0
- ms_enclave/cli/base.py +20 -0
- ms_enclave/cli/cli.py +27 -0
- ms_enclave/cli/start_server.py +84 -0
- ms_enclave/sandbox/__init__.py +27 -0
- ms_enclave/sandbox/boxes/__init__.py +16 -0
- ms_enclave/sandbox/boxes/base.py +270 -0
- ms_enclave/sandbox/boxes/docker_notebook.py +214 -0
- ms_enclave/sandbox/boxes/docker_sandbox.py +317 -0
- ms_enclave/sandbox/manager/__init__.py +11 -0
- ms_enclave/sandbox/manager/base.py +155 -0
- ms_enclave/sandbox/manager/http_manager.py +405 -0
- ms_enclave/sandbox/manager/local_manager.py +295 -0
- ms_enclave/sandbox/model/__init__.py +21 -0
- ms_enclave/sandbox/model/base.py +36 -0
- ms_enclave/sandbox/model/config.py +97 -0
- ms_enclave/sandbox/model/requests.py +57 -0
- ms_enclave/sandbox/model/responses.py +57 -0
- ms_enclave/sandbox/server/__init__.py +0 -0
- ms_enclave/sandbox/server/server.py +195 -0
- ms_enclave/sandbox/tools/__init__.py +4 -0
- ms_enclave/sandbox/tools/base.py +95 -0
- ms_enclave/sandbox/tools/sandbox_tool.py +46 -0
- ms_enclave/sandbox/tools/sandbox_tools/__init__.py +4 -0
- ms_enclave/sandbox/tools/sandbox_tools/file_operation.py +331 -0
- ms_enclave/sandbox/tools/sandbox_tools/notebook_executor.py +167 -0
- ms_enclave/sandbox/tools/sandbox_tools/python_executor.py +87 -0
- ms_enclave/sandbox/tools/sandbox_tools/shell_executor.py +63 -0
- ms_enclave/sandbox/tools/tool_info.py +141 -0
- ms_enclave/utils/__init__.py +1 -0
- ms_enclave/utils/json_schema.py +208 -0
- ms_enclave/utils/logger.py +170 -0
- ms_enclave/version.py +2 -2
- ms_enclave-0.0.2.dist-info/METADATA +366 -0
- ms_enclave-0.0.2.dist-info/RECORD +40 -0
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.2.dist-info}/WHEEL +1 -1
- ms_enclave-0.0.2.dist-info/entry_points.txt +2 -0
- ms_enclave/run_server.py +0 -21
- ms_enclave-0.0.0.dist-info/METADATA +0 -329
- ms_enclave-0.0.0.dist-info/RECORD +0 -8
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.2.dist-info}/licenses/LICENSE +0 -0
- {ms_enclave-0.0.0.dist-info → ms_enclave-0.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Any, Callable, Dict, List, Literal, Optional, TypeAlias, Union, get_args, get_type_hints
|
|
4
|
+
|
|
5
|
+
from docstring_parser import Docstring, parse
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
from ms_enclave.utils.json_schema import JSONSchema, JSONType, json_schema, python_type_to_json_type
|
|
9
|
+
|
|
10
|
+
ToolParam: TypeAlias = JSONSchema
|
|
11
|
+
"""Description of tool parameter in JSON Schema format."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ToolParams(BaseModel):
|
|
15
|
+
"""Description of tool parameters object in JSON Schema format."""
|
|
16
|
+
|
|
17
|
+
type: Literal['object'] = Field(default='object', description="Params type (always 'object')")
|
|
18
|
+
properties: Dict[str, ToolParam] = Field(default_factory=dict, description='Tool function parameters.')
|
|
19
|
+
required: List[str] = Field(default_factory=list, description='List of required fields.')
|
|
20
|
+
additionalProperties: bool = Field(
|
|
21
|
+
default=False, description='Are additional object properties allowed? (always `False`)'
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ToolInfo(BaseModel):
|
|
26
|
+
"""Specification of a tool (JSON Schema compatible)
|
|
27
|
+
|
|
28
|
+
If you are implementing a ModelAPI, most LLM libraries can
|
|
29
|
+
be passed this object (dumped to a dict) directly as a function
|
|
30
|
+
specification. For example, in the OpenAI provider:
|
|
31
|
+
|
|
32
|
+
```python
|
|
33
|
+
ChatCompletionToolParam(
|
|
34
|
+
type="function",
|
|
35
|
+
function=tool.model_dump(exclude_none=True),
|
|
36
|
+
)
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
In some cases the field names don't match up exactly. In that case
|
|
40
|
+
call `model_dump()` on the `parameters` field. For example, in the
|
|
41
|
+
Anthropic provider:
|
|
42
|
+
|
|
43
|
+
```python
|
|
44
|
+
ToolParam(
|
|
45
|
+
name=tool.name,
|
|
46
|
+
description=tool.description,
|
|
47
|
+
input_schema=tool.parameters.model_dump(exclude_none=True),
|
|
48
|
+
)
|
|
49
|
+
```
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
name: str = Field(description='Name of tool.')
|
|
53
|
+
description: str = Field(description='Short description of tool.')
|
|
54
|
+
parameters: ToolParams = Field(default_factory=ToolParams, description='JSON Schema of tool parameters object.')
|
|
55
|
+
options: Optional[Dict[str, object]] = Field(
|
|
56
|
+
default=None,
|
|
57
|
+
description=
|
|
58
|
+
'Optional property bag that can be used by the model provider to customize the implementation of the tool'
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def parse_tool_info(func: Callable[..., Any]) -> ToolInfo:
|
|
63
|
+
# tool may already have registry attributes w/ tool info
|
|
64
|
+
|
|
65
|
+
if (getattr(func, 'name', None) and getattr(func, 'description', None) and getattr(func, 'parameters', None)):
|
|
66
|
+
return ToolInfo(
|
|
67
|
+
name=func.name,
|
|
68
|
+
description=func.description,
|
|
69
|
+
parameters=func.parameters,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
signature = inspect.signature(func)
|
|
73
|
+
type_hints = get_type_hints(func)
|
|
74
|
+
docstring = inspect.getdoc(func)
|
|
75
|
+
parsed_docstring: Optional[Docstring] = parse(docstring) if docstring else None
|
|
76
|
+
|
|
77
|
+
info = ToolInfo(name=func.__name__, description='')
|
|
78
|
+
|
|
79
|
+
for param_name, param in signature.parameters.items():
|
|
80
|
+
tool_param = ToolParam()
|
|
81
|
+
|
|
82
|
+
# Parse docstring
|
|
83
|
+
docstring_info = parse_docstring(docstring, param_name)
|
|
84
|
+
|
|
85
|
+
# Get type information from type annotations
|
|
86
|
+
if param_name in type_hints:
|
|
87
|
+
tool_param = json_schema(type_hints[param_name])
|
|
88
|
+
# as a fallback try to parse it from the docstring
|
|
89
|
+
# (this is minimally necessary for backwards compatiblity
|
|
90
|
+
# with tools gen1 type parsing, which only used docstrings)
|
|
91
|
+
elif 'docstring_type' in docstring_info:
|
|
92
|
+
json_type = python_type_to_json_type(docstring_info['docstring_type'])
|
|
93
|
+
if json_type and (json_type in get_args(JSONType)):
|
|
94
|
+
tool_param = ToolParam(type=json_type)
|
|
95
|
+
|
|
96
|
+
# Get default value
|
|
97
|
+
if param.default is param.empty:
|
|
98
|
+
info.parameters.required.append(param_name)
|
|
99
|
+
else:
|
|
100
|
+
tool_param.default = param.default
|
|
101
|
+
|
|
102
|
+
# Add description from docstring
|
|
103
|
+
if 'description' in docstring_info:
|
|
104
|
+
tool_param.description = docstring_info['description']
|
|
105
|
+
|
|
106
|
+
# append the tool param
|
|
107
|
+
info.parameters.properties[param_name] = tool_param
|
|
108
|
+
|
|
109
|
+
# Add function description if available
|
|
110
|
+
if parsed_docstring:
|
|
111
|
+
if parsed_docstring.description:
|
|
112
|
+
info.description = parsed_docstring.description.strip()
|
|
113
|
+
elif parsed_docstring.long_description:
|
|
114
|
+
info.description = parsed_docstring.long_description.strip()
|
|
115
|
+
elif parsed_docstring.short_description:
|
|
116
|
+
info.description = parsed_docstring.short_description.strip()
|
|
117
|
+
|
|
118
|
+
# Add examples if available
|
|
119
|
+
if parsed_docstring.examples:
|
|
120
|
+
examples = '\n\n'.join([(example.description or '') for example in parsed_docstring.examples])
|
|
121
|
+
info.description = f'{info.description}\n\nExamples\n\n{examples}'
|
|
122
|
+
|
|
123
|
+
return info
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def parse_docstring(docstring: Optional[str], param_name: str) -> Dict[str, str]:
|
|
127
|
+
if not docstring:
|
|
128
|
+
return {}
|
|
129
|
+
|
|
130
|
+
parsed_docstring: Docstring = parse(docstring)
|
|
131
|
+
|
|
132
|
+
for param in parsed_docstring.params:
|
|
133
|
+
if param.arg_name == param_name:
|
|
134
|
+
schema: Dict[str, str] = {'description': param.description or ''}
|
|
135
|
+
|
|
136
|
+
if param.type_name:
|
|
137
|
+
schema['docstring_type'] = param.type_name
|
|
138
|
+
|
|
139
|
+
return schema
|
|
140
|
+
|
|
141
|
+
return {}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .logger import get_logger
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import types
|
|
2
|
+
import typing
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from dataclasses import is_dataclass
|
|
5
|
+
from datetime import date, datetime, time
|
|
6
|
+
from enum import EnumMeta
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
Dict,
|
|
10
|
+
List,
|
|
11
|
+
Literal,
|
|
12
|
+
Optional,
|
|
13
|
+
Set,
|
|
14
|
+
Tuple,
|
|
15
|
+
Type,
|
|
16
|
+
Union,
|
|
17
|
+
cast,
|
|
18
|
+
get_args,
|
|
19
|
+
get_origin,
|
|
20
|
+
get_type_hints,
|
|
21
|
+
is_typeddict,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from pydantic import BaseModel, Field
|
|
25
|
+
|
|
26
|
+
JSONType = Literal['string', 'integer', 'number', 'boolean', 'array', 'object', 'null']
|
|
27
|
+
"""Valid types within JSON schema."""
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class JSONSchema(BaseModel):
|
|
31
|
+
"""JSON Schema for type."""
|
|
32
|
+
|
|
33
|
+
type: Optional[JSONType] = Field(default=None)
|
|
34
|
+
"""JSON type of tool parameter."""
|
|
35
|
+
|
|
36
|
+
format: Optional[str] = Field(default=None)
|
|
37
|
+
"""Format of the parameter (e.g. date-time)."""
|
|
38
|
+
|
|
39
|
+
description: Optional[str] = Field(default=None)
|
|
40
|
+
"""Parameter description."""
|
|
41
|
+
|
|
42
|
+
default: Any = Field(default=None)
|
|
43
|
+
"""Default value for parameter."""
|
|
44
|
+
|
|
45
|
+
enum: Optional[List[Any]] = Field(default=None)
|
|
46
|
+
"""Valid values for enum parameters."""
|
|
47
|
+
|
|
48
|
+
items: Optional['JSONSchema'] = Field(default=None)
|
|
49
|
+
"""Valid type for array parameters."""
|
|
50
|
+
|
|
51
|
+
properties: Optional[Dict[str, 'JSONSchema']] = Field(default=None)
|
|
52
|
+
"""Valid fields for object parametrs."""
|
|
53
|
+
|
|
54
|
+
additionalProperties: Optional[Union['JSONSchema', bool]] = Field(default=None)
|
|
55
|
+
"""Are additional properties allowed?"""
|
|
56
|
+
|
|
57
|
+
anyOf: Optional[List['JSONSchema']] = Field(default=None)
|
|
58
|
+
"""Valid types for union parameters."""
|
|
59
|
+
|
|
60
|
+
required: Optional[List[str]] = Field(default=None)
|
|
61
|
+
"""Required fields for object parameters."""
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def json_schema(t: Type[Any]) -> JSONSchema:
|
|
65
|
+
"""Provide a JSON Schema for the specified type.
|
|
66
|
+
|
|
67
|
+
Schemas can be automatically inferred for a wide variety of
|
|
68
|
+
Python class types including Pydantic BaseModel, dataclasses,
|
|
69
|
+
and typed dicts.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
t: Python type
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
JSON Schema for type.
|
|
76
|
+
"""
|
|
77
|
+
origin = get_origin(t)
|
|
78
|
+
args = get_args(t)
|
|
79
|
+
|
|
80
|
+
if origin is None:
|
|
81
|
+
if t is int:
|
|
82
|
+
return JSONSchema(type='integer')
|
|
83
|
+
elif t is float:
|
|
84
|
+
return JSONSchema(type='number')
|
|
85
|
+
elif t is str:
|
|
86
|
+
return JSONSchema(type='string')
|
|
87
|
+
elif t is bool:
|
|
88
|
+
return JSONSchema(type='boolean')
|
|
89
|
+
elif t is datetime:
|
|
90
|
+
return JSONSchema(type='string', format='date-time')
|
|
91
|
+
elif t is date:
|
|
92
|
+
return JSONSchema(type='string', format='date')
|
|
93
|
+
elif t is time:
|
|
94
|
+
return JSONSchema(type='string', format='time')
|
|
95
|
+
elif t is list or t is set:
|
|
96
|
+
return JSONSchema(type='array', items=JSONSchema())
|
|
97
|
+
elif t is dict:
|
|
98
|
+
return JSONSchema(type='object', additionalProperties=JSONSchema())
|
|
99
|
+
elif (is_dataclass(t) or is_typeddict(t) or (isinstance(t, type) and issubclass(t, BaseModel))):
|
|
100
|
+
return cls_json_schema(t)
|
|
101
|
+
elif isinstance(t, EnumMeta):
|
|
102
|
+
return JSONSchema(enum=[item.value for item in t])
|
|
103
|
+
elif t is type(None):
|
|
104
|
+
return JSONSchema(type='null')
|
|
105
|
+
else:
|
|
106
|
+
return JSONSchema()
|
|
107
|
+
elif (origin is list or origin is List or origin is tuple or origin is Tuple or origin is set or origin is Set):
|
|
108
|
+
return JSONSchema(type='array', items=json_schema(args[0]) if args else JSONSchema())
|
|
109
|
+
elif origin is dict or origin is Dict:
|
|
110
|
+
return JSONSchema(
|
|
111
|
+
type='object',
|
|
112
|
+
additionalProperties=json_schema(args[1]) if len(args) > 1 else JSONSchema(),
|
|
113
|
+
)
|
|
114
|
+
elif origin is Union or origin is types.UnionType:
|
|
115
|
+
return JSONSchema(anyOf=[json_schema(arg) for arg in args])
|
|
116
|
+
elif origin is Optional:
|
|
117
|
+
return JSONSchema(anyOf=[json_schema(arg) for arg in args] + [JSONSchema(type='null')])
|
|
118
|
+
elif origin is typing.Literal:
|
|
119
|
+
return JSONSchema(enum=list(args))
|
|
120
|
+
|
|
121
|
+
return JSONSchema() # Default case if we can't determine the type
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def cls_json_schema(cls: Type[Any]) -> JSONSchema:
|
|
125
|
+
properties: Dict[str, JSONSchema] = {}
|
|
126
|
+
required: List[str] = []
|
|
127
|
+
|
|
128
|
+
if is_dataclass(cls):
|
|
129
|
+
fields = cls.__dataclass_fields__ # type: ignore
|
|
130
|
+
for name, field in fields.items():
|
|
131
|
+
properties[name] = json_schema(field.type) # type: ignore
|
|
132
|
+
if field.default == field.default_factory:
|
|
133
|
+
required.append(name)
|
|
134
|
+
elif isinstance(cls, type) and issubclass(cls, BaseModel):
|
|
135
|
+
schema = cls.model_json_schema()
|
|
136
|
+
schema = resolve_schema_references(schema)
|
|
137
|
+
for name, prop in schema.get('properties', {}).items():
|
|
138
|
+
properties[name] = JSONSchema(**prop)
|
|
139
|
+
required = schema.get('required', [])
|
|
140
|
+
elif is_typeddict(cls):
|
|
141
|
+
annotations = get_type_hints(cls)
|
|
142
|
+
for name, type_hint in annotations.items():
|
|
143
|
+
properties[name] = json_schema(type_hint)
|
|
144
|
+
if name in cls.__required_keys__:
|
|
145
|
+
required.append(name)
|
|
146
|
+
|
|
147
|
+
return JSONSchema(
|
|
148
|
+
type='object',
|
|
149
|
+
properties=properties,
|
|
150
|
+
required=required if required else None,
|
|
151
|
+
additionalProperties=False,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def python_type_to_json_type(python_type: Optional[str]) -> JSONType:
|
|
156
|
+
if python_type == 'str':
|
|
157
|
+
return 'string'
|
|
158
|
+
elif python_type == 'int':
|
|
159
|
+
return 'integer'
|
|
160
|
+
elif python_type == 'float':
|
|
161
|
+
return 'number'
|
|
162
|
+
elif python_type == 'bool':
|
|
163
|
+
return 'boolean'
|
|
164
|
+
elif python_type == 'list':
|
|
165
|
+
return 'array'
|
|
166
|
+
elif python_type == 'dict':
|
|
167
|
+
return 'object'
|
|
168
|
+
elif python_type == 'None':
|
|
169
|
+
return 'null'
|
|
170
|
+
elif python_type is None:
|
|
171
|
+
# treat 'unknown' as string as anything can be converted to string
|
|
172
|
+
return 'string'
|
|
173
|
+
else:
|
|
174
|
+
raise ValueError(f'Unsupported type: {python_type} for Python to JSON conversion.')
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def resolve_schema_references(schema: Dict[str, Any]) -> Dict[str, Any]:
|
|
178
|
+
"""Resolves all $ref references in a JSON schema by inlining the definitions."""
|
|
179
|
+
schema = deepcopy(schema)
|
|
180
|
+
definitions = schema.pop('$defs', {})
|
|
181
|
+
|
|
182
|
+
def _resolve_refs(obj: Any) -> Any:
|
|
183
|
+
if isinstance(obj, dict):
|
|
184
|
+
if '$ref' in obj and obj['$ref'].startswith('#/$defs/'):
|
|
185
|
+
ref_key = obj['$ref'].split('/')[-1]
|
|
186
|
+
if ref_key in definitions:
|
|
187
|
+
# Replace with a deep copy of the definition
|
|
188
|
+
resolved = deepcopy(definitions[ref_key])
|
|
189
|
+
# Process any nested references in the definition
|
|
190
|
+
resolved = _resolve_refs(resolved)
|
|
191
|
+
|
|
192
|
+
# Merge in the current object fields, which should take priority
|
|
193
|
+
# This means that if you have e.g.
|
|
194
|
+
# {"$ref": "#/$defs/SubType", "description": "subtype of type SubType"},
|
|
195
|
+
# and SubType resolves to
|
|
196
|
+
# {"description": "The SubType Class", "parameters": {"param1": {"type": "string"}}},
|
|
197
|
+
# the final result will be:
|
|
198
|
+
# {"description": "subtype of type SubType", "parameters": {"param1": {"type": "string"}}}
|
|
199
|
+
return resolved | {k: o for k, o in obj.items() if k != '$ref'}
|
|
200
|
+
|
|
201
|
+
# Process all entries in the dictionary
|
|
202
|
+
return {k: _resolve_refs(v) for k, v in obj.items()}
|
|
203
|
+
elif isinstance(obj, list):
|
|
204
|
+
return [_resolve_refs(item) for item in obj]
|
|
205
|
+
else:
|
|
206
|
+
return obj
|
|
207
|
+
|
|
208
|
+
return cast(Dict[str, Any], _resolve_refs(schema))
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
# Copyright (c) Alibaba, Inc. and its affiliates.
|
|
2
|
+
import importlib.util
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
import threading
|
|
7
|
+
from types import MethodType
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
init_loggers = {}
|
|
11
|
+
|
|
12
|
+
# ANSI color helpers for levelname coloring in TTY streams
|
|
13
|
+
RESET = '\033[0m'
|
|
14
|
+
LEVEL_COLORS = {
|
|
15
|
+
'DEBUG': '\033[34m', # Blue
|
|
16
|
+
'INFO': '\033[32m', # Green
|
|
17
|
+
'WARNING': '\033[33m', # Yellow
|
|
18
|
+
'ERROR': '\033[31m', # Red
|
|
19
|
+
'CRITICAL': '\033[35m', # Magenta
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
logger_format = logging.Formatter('[%(levelname)s:%(name)s] %(message)s')
|
|
23
|
+
|
|
24
|
+
info_set = set()
|
|
25
|
+
warning_set = set()
|
|
26
|
+
_once_lock = threading.Lock()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ColorFormatter(logging.Formatter):
|
|
30
|
+
"""Formatter that colors only the levelname for TTY streams."""
|
|
31
|
+
|
|
32
|
+
def __init__(self, fmt: str, datefmt: Optional[str] = None, style: str = '%', use_color: bool = True) -> None:
|
|
33
|
+
super().__init__(fmt=fmt, datefmt=datefmt, style=style)
|
|
34
|
+
self.use_color = use_color
|
|
35
|
+
|
|
36
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
37
|
+
original_levelname = record.levelname
|
|
38
|
+
try:
|
|
39
|
+
if self.use_color:
|
|
40
|
+
color = LEVEL_COLORS.get(record.levelname, '')
|
|
41
|
+
if color:
|
|
42
|
+
record.levelname = f'{color}{record.levelname}{RESET}'
|
|
43
|
+
return super().format(record)
|
|
44
|
+
finally:
|
|
45
|
+
record.levelname = original_levelname
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _should_use_color(stream) -> bool:
|
|
49
|
+
"""Decide if we should use colors for a given stream based on TTY and env."""
|
|
50
|
+
# Respect NO_COLOR to disable, FORCE_COLOR or LOG_COLOR=1 to force enable
|
|
51
|
+
if os.getenv('NO_COLOR'):
|
|
52
|
+
return False
|
|
53
|
+
if os.getenv('FORCE_COLOR') or os.getenv('LOG_COLOR') == '1':
|
|
54
|
+
return True
|
|
55
|
+
try:
|
|
56
|
+
return hasattr(stream, 'isatty') and stream.isatty()
|
|
57
|
+
except Exception:
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def info_once(self: logging.Logger, msg: str, *args, **kwargs) -> None:
|
|
62
|
+
hash_id = kwargs.pop('hash_id', msg)
|
|
63
|
+
with _once_lock:
|
|
64
|
+
if hash_id in info_set:
|
|
65
|
+
return
|
|
66
|
+
info_set.add(hash_id)
|
|
67
|
+
self.info(msg, *args, **kwargs)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def warning_once(self: logging.Logger, msg: str, *args, **kwargs) -> None:
|
|
71
|
+
hash_id = kwargs.pop('hash_id', msg)
|
|
72
|
+
with _once_lock:
|
|
73
|
+
if hash_id in warning_set:
|
|
74
|
+
return
|
|
75
|
+
warning_set.add(hash_id)
|
|
76
|
+
self.warning(msg, *args, **kwargs)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _update_handler_levels(logger: logging.Logger, log_level: int) -> None:
|
|
80
|
+
"""Set all handler levels to the given log level."""
|
|
81
|
+
for handler in logger.handlers:
|
|
82
|
+
handler.setLevel(log_level)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def get_logger(log_file: Optional[str] = None, log_level: Optional[int] = None, file_mode: str = 'w'):
|
|
86
|
+
"""Get project logger configured with colored console output and optional file output.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
log_file: Log filename. If specified, a FileHandler will be added to the logger.
|
|
90
|
+
log_level: Logging level. If None, resolve from env LOG_LEVEL (default INFO).
|
|
91
|
+
file_mode: Mode to open the log file if log_file is provided (default 'w').
|
|
92
|
+
"""
|
|
93
|
+
if log_level is None:
|
|
94
|
+
env_level = os.getenv('LOG_LEVEL', 'INFO').upper()
|
|
95
|
+
log_level = getattr(logging, env_level, logging.INFO)
|
|
96
|
+
|
|
97
|
+
logger_name = __name__.split('.')[0]
|
|
98
|
+
logger = logging.getLogger(logger_name)
|
|
99
|
+
logger.propagate = False
|
|
100
|
+
|
|
101
|
+
# If logger is already initialized, just ensure file handler and update handler levels.
|
|
102
|
+
if logger_name in init_loggers:
|
|
103
|
+
add_file_handler_if_needed(logger, log_file, file_mode, log_level)
|
|
104
|
+
_update_handler_levels(logger, log_level)
|
|
105
|
+
return logger
|
|
106
|
+
|
|
107
|
+
# Handle duplicate logs to the console (PyTorch DDP root StreamHandler quirk)
|
|
108
|
+
for handler in logger.root.handlers:
|
|
109
|
+
if isinstance(handler, logging.StreamHandler):
|
|
110
|
+
handler.setLevel(logging.ERROR)
|
|
111
|
+
|
|
112
|
+
# Console handler with colorized levelname when appropriate
|
|
113
|
+
stream_handler = logging.StreamHandler(stream=sys.stderr)
|
|
114
|
+
use_color = _should_use_color(getattr(stream_handler, 'stream', sys.stderr))
|
|
115
|
+
color_fmt = ColorFormatter('[%(levelname)s:%(name)s] %(message)s', use_color=use_color)
|
|
116
|
+
stream_handler.setFormatter(color_fmt)
|
|
117
|
+
stream_handler.setLevel(log_level)
|
|
118
|
+
logger.addHandler(stream_handler)
|
|
119
|
+
|
|
120
|
+
# Optional file handler (no color)
|
|
121
|
+
if log_file is not None:
|
|
122
|
+
file_handler = logging.FileHandler(log_file, file_mode)
|
|
123
|
+
file_handler.setFormatter(logger_format)
|
|
124
|
+
file_handler.setLevel(log_level)
|
|
125
|
+
logger.addHandler(file_handler)
|
|
126
|
+
|
|
127
|
+
logger.setLevel(log_level)
|
|
128
|
+
init_loggers[logger_name] = True
|
|
129
|
+
logger.info_once = MethodType(info_once, logger)
|
|
130
|
+
logger.warning_once = MethodType(warning_once, logger)
|
|
131
|
+
return logger
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
logger = get_logger()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def add_file_handler_if_needed(logger: logging.Logger, log_file: Optional[str], file_mode: str, log_level: int) -> None:
|
|
138
|
+
"""Attach a FileHandler for the given log_file if not already present.
|
|
139
|
+
|
|
140
|
+
Ensures:
|
|
141
|
+
- Only one FileHandler per log file path.
|
|
142
|
+
- FileHandler uses the standard, uncolored formatter.
|
|
143
|
+
- FileHandler level matches the requested log_level.
|
|
144
|
+
"""
|
|
145
|
+
if log_file is None:
|
|
146
|
+
return
|
|
147
|
+
|
|
148
|
+
# Only worker 0 writes logs when torch DDP is present
|
|
149
|
+
if importlib.util.find_spec('torch') is not None:
|
|
150
|
+
is_worker0 = int(os.getenv('LOCAL_RANK', -1)) in {-1, 0}
|
|
151
|
+
else:
|
|
152
|
+
is_worker0 = True
|
|
153
|
+
|
|
154
|
+
if not is_worker0:
|
|
155
|
+
return
|
|
156
|
+
|
|
157
|
+
abs_path = os.path.abspath(log_file)
|
|
158
|
+
for handler in logger.handlers:
|
|
159
|
+
if isinstance(handler, logging.FileHandler):
|
|
160
|
+
# If a handler is already logging to the same file, just update it
|
|
161
|
+
if getattr(handler, 'baseFilename', None) == abs_path:
|
|
162
|
+
handler.setFormatter(logger_format)
|
|
163
|
+
handler.setLevel(log_level)
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
# Add a new file handler for this log file
|
|
167
|
+
file_handler = logging.FileHandler(abs_path, file_mode)
|
|
168
|
+
file_handler.setFormatter(logger_format)
|
|
169
|
+
file_handler.setLevel(log_level)
|
|
170
|
+
logger.addHandler(file_handler)
|
ms_enclave/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = '0.0.
|
|
2
|
-
__release_date__ = '2025-
|
|
1
|
+
__version__ = '0.0.2'
|
|
2
|
+
__release_date__ = '2025-10-30 12:00:00'
|