ic-python-logging 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ic_python_logging/__init__.py +38 -0
- ic_python_logging/_cdk.py +36 -0
- ic_python_logging/_handler.py +407 -0
- ic_python_logging/cli.py +230 -0
- ic_python_logging-0.3.0.dist-info/METADATA +193 -0
- ic_python_logging-0.3.0.dist-info/RECORD +10 -0
- ic_python_logging-0.3.0.dist-info/WHEEL +5 -0
- ic_python_logging-0.3.0.dist-info/entry_points.txt +2 -0
- ic_python_logging-0.3.0.dist-info/licenses/LICENSE +21 -0
- ic_python_logging-0.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Debug variable storage functions
|
|
2
|
+
# New in-memory logging functions
|
|
3
|
+
from ._handler import Level # Enum for log levels
|
|
4
|
+
from ._handler import LogEntry # Log entry data class
|
|
5
|
+
from ._handler import SimpleLogger # The logger class itself
|
|
6
|
+
from ._handler import clear_logs # Function to clear all logs from memory
|
|
7
|
+
from ._handler import disable_logging # Function to disable all logging
|
|
8
|
+
from ._handler import disable_memory_logging # Function to disable in-memory logging
|
|
9
|
+
from ._handler import enable_logging # Function to re-enable logging
|
|
10
|
+
from ._handler import enable_memory_logging # Function to enable in-memory logging
|
|
11
|
+
from ._handler import get_logger # Function to get a named logger
|
|
12
|
+
from ._handler import get_logs # Function to retrieve logs from memory
|
|
13
|
+
from ._handler import list_vars # Function to list all saved variables
|
|
14
|
+
from ._handler import load_var # Function to load a saved variable
|
|
15
|
+
from ._handler import logger # Default logger for backwards compatibility
|
|
16
|
+
from ._handler import save_var # Function to save a variable for debugging
|
|
17
|
+
from ._handler import set_log_level # Function to set log level for one or all loggers
|
|
18
|
+
from ._handler import set_max_log_entries # Function to set maximum log storage size
|
|
19
|
+
from ._handler import ( # Function to check memory logging status
|
|
20
|
+
is_memory_logging_enabled,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
# New canister query function for exposing logs
|
|
24
|
+
try:
|
|
25
|
+
from ._handler import PublicLogEntry # Public log entry type for canister queries
|
|
26
|
+
from ._handler import ( # Query function to expose logs via canister query
|
|
27
|
+
get_canister_logs,
|
|
28
|
+
)
|
|
29
|
+
except ImportError:
|
|
30
|
+
# If CDK isn't available, these imports will fail
|
|
31
|
+
# This allows the library to be used in non-IC environments
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
# This allows imports like:
|
|
35
|
+
# from ic_python_logging import logger, get_logger, set_log_level
|
|
36
|
+
# from ic_python_logging import save_var, load_var, list_vars
|
|
37
|
+
# from ic_python_logging import get_logs, clear_logs, set_max_log_entries, enable_memory_logging, disable_memory_logging
|
|
38
|
+
# from ic_python_logging import PublicLogEntry, get_canister_logs
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""CDK compatibility layer.
|
|
2
|
+
|
|
3
|
+
This module centralizes all imports from the Internet Computer CDK (currently Basilisk).
|
|
4
|
+
To switch CDKs, only this file needs to be modified.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
HAS_CDK = False
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from basilisk import ic # noqa: F401
|
|
11
|
+
|
|
12
|
+
# Verify ic.print actually works (we might be imported but not in IC env)
|
|
13
|
+
try:
|
|
14
|
+
ic.print("")
|
|
15
|
+
IN_IC_ENVIRONMENT = True
|
|
16
|
+
except Exception:
|
|
17
|
+
IN_IC_ENVIRONMENT = False
|
|
18
|
+
|
|
19
|
+
HAS_CDK = True
|
|
20
|
+
|
|
21
|
+
except ImportError:
|
|
22
|
+
ic = None # type: ignore
|
|
23
|
+
IN_IC_ENVIRONMENT = False
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _import_types():
|
|
27
|
+
"""Import CDK types for query function definitions.
|
|
28
|
+
|
|
29
|
+
Returns a tuple of (Opt, Record, Vec, nat, query) or None if CDK unavailable.
|
|
30
|
+
"""
|
|
31
|
+
try:
|
|
32
|
+
from basilisk import Opt, Record, Vec, nat, query
|
|
33
|
+
|
|
34
|
+
return Opt, Record, Vec, nat, query
|
|
35
|
+
except ImportError:
|
|
36
|
+
return None
|
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
# Simple custom logger that doesn't use Python's logging module
|
|
2
|
+
# to avoid process ID access which is unsupported in IC environment
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import pickle
|
|
6
|
+
import sys
|
|
7
|
+
import time
|
|
8
|
+
from collections import deque
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from enum import IntEnum
|
|
11
|
+
from typing import Any, Callable, Deque, Dict, List, Optional, Union
|
|
12
|
+
|
|
13
|
+
# Global settings
|
|
14
|
+
_LOGGING_ENABLED = True
|
|
15
|
+
_MEMORY_LOGGING_ENABLED = True # Controls whether logs are stored in memory
|
|
16
|
+
_LOGGERS: Dict[str, "SimpleLogger"] = {}
|
|
17
|
+
|
|
18
|
+
# Debug variable storage
|
|
19
|
+
_DEBUG_VARS: Dict[str, Any] = {}
|
|
20
|
+
|
|
21
|
+
# In-memory log storage
|
|
22
|
+
_MAX_LOG_ENTRIES = 1000 # Maximum number of log entries to keep in memory
|
|
23
|
+
_LOG_STORAGE: Deque["LogEntry"] = deque(maxlen=_MAX_LOG_ENTRIES)
|
|
24
|
+
_LOG_SEQUENCE_COUNTER = 0 # Global counter for generating unique log entry IDs
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# Define Level enum
|
|
28
|
+
class Level(IntEnum):
|
|
29
|
+
DEBUG = 10
|
|
30
|
+
INFO = 20
|
|
31
|
+
WARNING = 30
|
|
32
|
+
ERROR = 40
|
|
33
|
+
CRITICAL = 50
|
|
34
|
+
|
|
35
|
+
def __str__(self) -> str:
|
|
36
|
+
"""Return the string representation of the log level"""
|
|
37
|
+
return self.name
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class LogEntry:
|
|
42
|
+
"""Represents a single log entry stored in memory"""
|
|
43
|
+
|
|
44
|
+
timestamp: float
|
|
45
|
+
level: Level
|
|
46
|
+
logger_name: str
|
|
47
|
+
message: str
|
|
48
|
+
id: int # Unique identifier for the log entry
|
|
49
|
+
|
|
50
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
51
|
+
"""Convert log entry to dictionary for serialization"""
|
|
52
|
+
return {
|
|
53
|
+
"timestamp": self.timestamp,
|
|
54
|
+
"level": str(self.level),
|
|
55
|
+
"logger_name": self.logger_name,
|
|
56
|
+
"message": self.message,
|
|
57
|
+
"id": self.id,
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# Define a safe fallback first
|
|
62
|
+
def _print_log(level: Level, message: str, logger_name: str) -> None:
|
|
63
|
+
if not _LOGGING_ENABLED:
|
|
64
|
+
return
|
|
65
|
+
print(f"[{level}] [{logger_name}] {message}")
|
|
66
|
+
# Store in memory regardless of print settings
|
|
67
|
+
_store_log_entry(level, message, logger_name)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _store_log_entry(level: Level, message: str, logger_name: str) -> None:
|
|
71
|
+
"""Store a log entry in the memory buffer if memory logging is enabled"""
|
|
72
|
+
if not _MEMORY_LOGGING_ENABLED:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
global _LOG_SEQUENCE_COUNTER
|
|
76
|
+
_LOG_SEQUENCE_COUNTER += 1
|
|
77
|
+
|
|
78
|
+
entry = LogEntry(
|
|
79
|
+
timestamp=time.time(),
|
|
80
|
+
level=level,
|
|
81
|
+
logger_name=logger_name,
|
|
82
|
+
message=message,
|
|
83
|
+
id=_LOG_SEQUENCE_COUNTER,
|
|
84
|
+
)
|
|
85
|
+
_LOG_STORAGE.append(entry)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# Now try to use the IC-specific functionality if available and working
|
|
89
|
+
_in_ic_environment = False
|
|
90
|
+
try:
|
|
91
|
+
# Import IC functionality via CDK compatibility layer
|
|
92
|
+
from ._cdk import HAS_CDK, IN_IC_ENVIRONMENT, ic
|
|
93
|
+
|
|
94
|
+
if not HAS_CDK:
|
|
95
|
+
raise ImportError("CDK not available")
|
|
96
|
+
|
|
97
|
+
# Now safely test if ic.print actually works
|
|
98
|
+
try:
|
|
99
|
+
# Try to use ic.print but catch any errors
|
|
100
|
+
ic.print("Logger initializing")
|
|
101
|
+
|
|
102
|
+
# If we get here, ic.print works!
|
|
103
|
+
_in_ic_environment = True
|
|
104
|
+
|
|
105
|
+
# Override the print_log function with IC-specific version
|
|
106
|
+
def _ic_print_log(level: Level, message: str, logger_name: str) -> None:
|
|
107
|
+
if not _LOGGING_ENABLED:
|
|
108
|
+
return
|
|
109
|
+
ic.print(f"[{level}] [{logger_name}] {message}")
|
|
110
|
+
# Store in memory regardless of print settings
|
|
111
|
+
_store_log_entry(level, message, logger_name)
|
|
112
|
+
|
|
113
|
+
# Define IC-specific version of store_log_entry using ic.time()
|
|
114
|
+
def _ic_store_log_entry(level: Level, message: str, logger_name: str) -> None:
|
|
115
|
+
"""Store a log entry in the memory buffer if memory logging is enabled"""
|
|
116
|
+
if not _MEMORY_LOGGING_ENABLED:
|
|
117
|
+
return
|
|
118
|
+
|
|
119
|
+
global _LOG_SEQUENCE_COUNTER
|
|
120
|
+
_LOG_SEQUENCE_COUNTER += 1
|
|
121
|
+
|
|
122
|
+
entry = LogEntry(
|
|
123
|
+
timestamp=ic.time(),
|
|
124
|
+
level=level,
|
|
125
|
+
logger_name=logger_name,
|
|
126
|
+
message=message,
|
|
127
|
+
id=_LOG_SEQUENCE_COUNTER,
|
|
128
|
+
)
|
|
129
|
+
_LOG_STORAGE.append(entry)
|
|
130
|
+
|
|
131
|
+
# Replace the regular functions with IC versions
|
|
132
|
+
_print_log = _ic_print_log
|
|
133
|
+
_store_log_entry = _ic_store_log_entry
|
|
134
|
+
|
|
135
|
+
except:
|
|
136
|
+
# If we get an error trying to use ic.print, fall back to regular print
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
except ImportError:
|
|
140
|
+
# If CDK isn't available, we're definitely not in an IC environment
|
|
141
|
+
print("Note: IC CDK not available, using regular print for logging")
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class SimpleLogger:
|
|
145
|
+
def __init__(self, name: str = "ic_python_logger", level: Level = Level.INFO):
|
|
146
|
+
self.name = name
|
|
147
|
+
self.level = level
|
|
148
|
+
|
|
149
|
+
def set_level(self, level: Level) -> None:
|
|
150
|
+
"""Set the minimum logging level"""
|
|
151
|
+
self.level = level
|
|
152
|
+
|
|
153
|
+
def is_enabled_for(self, level: Level) -> bool:
|
|
154
|
+
"""Check if this level should be logged"""
|
|
155
|
+
return int(level) >= int(self.level)
|
|
156
|
+
|
|
157
|
+
def log(self, level: Level, message: str) -> None:
|
|
158
|
+
if not self.is_enabled_for(level):
|
|
159
|
+
return
|
|
160
|
+
_print_log(level, message, self.name)
|
|
161
|
+
|
|
162
|
+
def debug(self, message: str) -> None:
|
|
163
|
+
self.log(Level.DEBUG, message)
|
|
164
|
+
|
|
165
|
+
def info(self, message: str) -> None:
|
|
166
|
+
self.log(Level.INFO, message)
|
|
167
|
+
|
|
168
|
+
def warning(self, message: str) -> None:
|
|
169
|
+
self.log(Level.WARNING, message)
|
|
170
|
+
|
|
171
|
+
def warn(self, message: str) -> None:
|
|
172
|
+
self.warning(message)
|
|
173
|
+
|
|
174
|
+
def error(self, message: str) -> None:
|
|
175
|
+
self.log(Level.ERROR, message)
|
|
176
|
+
|
|
177
|
+
def critical(self, message: str) -> None:
|
|
178
|
+
self.log(Level.CRITICAL, message)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
# Public API functions
|
|
182
|
+
def get_logger(name: str = "ic_python_logging") -> SimpleLogger:
|
|
183
|
+
"""Get or create a logger with the specified name"""
|
|
184
|
+
if name not in _LOGGERS:
|
|
185
|
+
_LOGGERS[name] = SimpleLogger(name)
|
|
186
|
+
return _LOGGERS[name]
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def set_log_level(level: Level, logger_name: Optional[str] = None) -> None:
|
|
190
|
+
"""Set log level for all loggers or a specific one
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
level: The log level to set (e.g., Level.DEBUG, Level.INFO)
|
|
194
|
+
logger_name: Optional name of logger to set level for, or None for all loggers
|
|
195
|
+
"""
|
|
196
|
+
if logger_name is not None:
|
|
197
|
+
if logger_name in _LOGGERS:
|
|
198
|
+
_LOGGERS[logger_name].set_level(level)
|
|
199
|
+
else:
|
|
200
|
+
# Set for all loggers
|
|
201
|
+
for logger in _LOGGERS.values():
|
|
202
|
+
logger.set_level(level)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def disable_logging() -> None:
|
|
206
|
+
"""Completely disable all logging"""
|
|
207
|
+
global _LOGGING_ENABLED
|
|
208
|
+
_LOGGING_ENABLED = False
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def enable_logging() -> None:
|
|
212
|
+
"""Re-enable logging"""
|
|
213
|
+
global _LOGGING_ENABLED
|
|
214
|
+
_LOGGING_ENABLED = True
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
# Debug variable storage functions
|
|
218
|
+
def save_var(tag: str, obj: Any) -> None:
|
|
219
|
+
"""Store a variable with a tag for debugging purposes
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
tag: A string identifier to later retrieve the object
|
|
223
|
+
obj: Any Python object to store
|
|
224
|
+
"""
|
|
225
|
+
_DEBUG_VARS[tag] = obj
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def load_var(tag: str) -> Any:
|
|
229
|
+
"""Retrieve a previously stored variable by its tag
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
tag: The identifier used when saving the variable
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
The stored object or None if not found
|
|
236
|
+
"""
|
|
237
|
+
if tag not in _DEBUG_VARS:
|
|
238
|
+
return None
|
|
239
|
+
return _DEBUG_VARS[tag]
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def list_vars() -> Dict[str, str]:
|
|
243
|
+
"""List all stored variables with their types
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
A dictionary mapping variable tags to their types
|
|
247
|
+
"""
|
|
248
|
+
return {tag: str(type(obj).__name__) for tag, obj in _DEBUG_VARS.items()}
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
# Default logger for backwards compatibility
|
|
252
|
+
logger = get_logger()
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
# In-memory log retrieval functions
|
|
256
|
+
def get_logs(
|
|
257
|
+
from_entry: Optional[int] = None,
|
|
258
|
+
max_entries: Optional[int] = None,
|
|
259
|
+
min_level: Optional[Level] = None,
|
|
260
|
+
logger_name: Optional[str] = None,
|
|
261
|
+
) -> List[Dict[str, Any]]:
|
|
262
|
+
"""Retrieve logs from memory with optional filtering
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
from_entry: Start from a specific log entry ID
|
|
266
|
+
max_entries: Maximum number of entries to return (oldest first by default)
|
|
267
|
+
min_level: Minimum log level to include
|
|
268
|
+
logger_name: Filter logs to a specific logger
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
List of log entries as dictionaries
|
|
272
|
+
"""
|
|
273
|
+
# Create a copy of the logs to avoid modifying the original
|
|
274
|
+
logs = list(_LOG_STORAGE)
|
|
275
|
+
|
|
276
|
+
logs = [
|
|
277
|
+
log
|
|
278
|
+
for log in logs
|
|
279
|
+
if (from_entry is None or log.id >= from_entry)
|
|
280
|
+
and (min_level is None or log.level >= min_level)
|
|
281
|
+
and (logger_name is None or log.logger_name == logger_name)
|
|
282
|
+
]
|
|
283
|
+
|
|
284
|
+
# Limit the number of entries if requested - return the LAST entries (most recent)
|
|
285
|
+
logs = logs[-max_entries:] if max_entries is not None else logs
|
|
286
|
+
|
|
287
|
+
# Convert to dictionaries for easier serialization
|
|
288
|
+
return [log.to_dict() for log in logs]
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def clear_logs() -> None:
|
|
292
|
+
"""Clear all logs from memory"""
|
|
293
|
+
_LOG_STORAGE.clear()
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def disable_memory_logging() -> None:
|
|
297
|
+
"""Disable storing logs in memory"""
|
|
298
|
+
global _MEMORY_LOGGING_ENABLED
|
|
299
|
+
_MEMORY_LOGGING_ENABLED = False
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def enable_memory_logging() -> None:
|
|
303
|
+
"""Enable storing logs in memory"""
|
|
304
|
+
global _MEMORY_LOGGING_ENABLED
|
|
305
|
+
_MEMORY_LOGGING_ENABLED = True
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def is_memory_logging_enabled() -> bool:
|
|
309
|
+
"""Check if memory logging is enabled"""
|
|
310
|
+
return _MEMORY_LOGGING_ENABLED
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def set_max_log_entries(max_entries: int) -> None:
|
|
314
|
+
"""Set the maximum number of log entries to keep in memory
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
max_entries: New maximum capacity of the log storage
|
|
318
|
+
"""
|
|
319
|
+
global _LOG_STORAGE, _MAX_LOG_ENTRIES
|
|
320
|
+
# Create a new deque with the new max length
|
|
321
|
+
_MAX_LOG_ENTRIES = max(1, max_entries) # Ensure at least 1 entry
|
|
322
|
+
new_storage = deque(maxlen=_MAX_LOG_ENTRIES)
|
|
323
|
+
|
|
324
|
+
# Transfer any existing logs (up to the new capacity)
|
|
325
|
+
logs = list(_LOG_STORAGE)
|
|
326
|
+
logs.sort(key=lambda log: log.timestamp) # Sort by timestamp (oldest first)
|
|
327
|
+
|
|
328
|
+
# Keep the newest logs if we're reducing capacity
|
|
329
|
+
if len(logs) > _MAX_LOG_ENTRIES:
|
|
330
|
+
logs = logs[-_MAX_LOG_ENTRIES:]
|
|
331
|
+
|
|
332
|
+
# Add logs to the new storage
|
|
333
|
+
for log in logs:
|
|
334
|
+
new_storage.append(log)
|
|
335
|
+
|
|
336
|
+
# Replace the old storage with the new one
|
|
337
|
+
_LOG_STORAGE = new_storage
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
try:
|
|
341
|
+
# Add CDK imports for the query function via compatibility layer
|
|
342
|
+
_types = None
|
|
343
|
+
try:
|
|
344
|
+
from ._cdk import _import_types
|
|
345
|
+
|
|
346
|
+
_types = _import_types()
|
|
347
|
+
except ImportError:
|
|
348
|
+
pass
|
|
349
|
+
|
|
350
|
+
if _types is None:
|
|
351
|
+
raise ImportError("CDK types not available")
|
|
352
|
+
|
|
353
|
+
Opt, Record, Vec, nat, query = _types
|
|
354
|
+
|
|
355
|
+
# Define a public-facing LogEntry type for queries
|
|
356
|
+
class PublicLogEntry(Record):
|
|
357
|
+
"""Public-facing log entry type for canister queries"""
|
|
358
|
+
|
|
359
|
+
timestamp: nat
|
|
360
|
+
level: str
|
|
361
|
+
logger_name: str
|
|
362
|
+
message: str
|
|
363
|
+
id: nat
|
|
364
|
+
|
|
365
|
+
@query
|
|
366
|
+
def get_canister_logs(
|
|
367
|
+
from_entry: Opt[int] = None,
|
|
368
|
+
max_entries: Opt[int] = None,
|
|
369
|
+
min_level: Opt[str] = None,
|
|
370
|
+
logger_name: Opt[str] = None,
|
|
371
|
+
) -> Vec[PublicLogEntry]:
|
|
372
|
+
"""Query function to retrieve logs from the canister
|
|
373
|
+
|
|
374
|
+
This function can be called externally via a canister query call.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
max_entries: Maximum number of entries to return
|
|
378
|
+
min_level: Minimum log level to include
|
|
379
|
+
logger_name: Filter logs to a specific logger
|
|
380
|
+
|
|
381
|
+
Returns:
|
|
382
|
+
List of log entries
|
|
383
|
+
"""
|
|
384
|
+
# Use the existing get_logs function
|
|
385
|
+
logs = get_logs(
|
|
386
|
+
from_entry=from_entry,
|
|
387
|
+
max_entries=max_entries,
|
|
388
|
+
min_level=None if min_level is None else Level[min_level],
|
|
389
|
+
logger_name=logger_name,
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
# Convert to PublicLogEntry objects
|
|
393
|
+
return [
|
|
394
|
+
PublicLogEntry(
|
|
395
|
+
timestamp=log["timestamp"],
|
|
396
|
+
level=log["level"],
|
|
397
|
+
logger_name=log["logger_name"],
|
|
398
|
+
message=log["message"],
|
|
399
|
+
id=log["id"],
|
|
400
|
+
)
|
|
401
|
+
for log in logs
|
|
402
|
+
]
|
|
403
|
+
|
|
404
|
+
except ImportError:
|
|
405
|
+
# If CDK isn't available, we don't expose the query function
|
|
406
|
+
# This allows the library to be used in non-IC environments
|
|
407
|
+
pass
|
ic_python_logging/cli.py
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import select
|
|
7
|
+
import subprocess
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def parse_args():
|
|
14
|
+
parser = argparse.ArgumentParser(description="Query and display canister logs")
|
|
15
|
+
parser.add_argument("canister_id", help="Canister ID to query logs from")
|
|
16
|
+
|
|
17
|
+
# Log filtering options
|
|
18
|
+
parser.add_argument("--tail", type=int, help="Show only the last N logs")
|
|
19
|
+
parser.add_argument(
|
|
20
|
+
"--level",
|
|
21
|
+
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
|
22
|
+
help="Minimum log level to display",
|
|
23
|
+
)
|
|
24
|
+
parser.add_argument(
|
|
25
|
+
"--name",
|
|
26
|
+
help="Filter logs by logger name",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
# Follow mode options
|
|
30
|
+
parser.add_argument(
|
|
31
|
+
"--follow", action="store_true", help="Follow logs (poll every 5 seconds)"
|
|
32
|
+
)
|
|
33
|
+
parser.add_argument(
|
|
34
|
+
"--interval",
|
|
35
|
+
type=int,
|
|
36
|
+
default=5,
|
|
37
|
+
help="Polling interval in seconds for follow mode",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# Network options
|
|
41
|
+
network_group = parser.add_mutually_exclusive_group()
|
|
42
|
+
network_group.add_argument(
|
|
43
|
+
"--network", help="Network URL (e.g., http://localhost:4943)"
|
|
44
|
+
)
|
|
45
|
+
network_group.add_argument("--ic", action="store_true", help="Use the IC mainnet")
|
|
46
|
+
|
|
47
|
+
return parser.parse_args()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_logs(
|
|
51
|
+
canister_id, tail=None, level=None, network=None, from_entry=None, name=None
|
|
52
|
+
):
|
|
53
|
+
"""Query log entries from a canister
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
canister_id: ID of the canister to query
|
|
57
|
+
tail: Maximum number of entries to return (optional)
|
|
58
|
+
level: Minimum log level to include (optional)
|
|
59
|
+
network: Network to query (optional)
|
|
60
|
+
from_entry: Start retrieving logs from this ID (optional)
|
|
61
|
+
name: Filter logs by logger name (optional)
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
List of log entries as dictionaries
|
|
65
|
+
|
|
66
|
+
Note:
|
|
67
|
+
The parameters are passed to the canister in the following order:
|
|
68
|
+
1. from_entry
|
|
69
|
+
2. max_entries (tail)
|
|
70
|
+
3. min_level (level)
|
|
71
|
+
4. logger_name (name)
|
|
72
|
+
"""
|
|
73
|
+
# Build the query arguments in the correct order as expected by the canister API
|
|
74
|
+
args = []
|
|
75
|
+
|
|
76
|
+
# 1. from_entry parameter
|
|
77
|
+
if from_entry is not None:
|
|
78
|
+
args.append(f"(opt {from_entry})")
|
|
79
|
+
else:
|
|
80
|
+
args.append("null")
|
|
81
|
+
|
|
82
|
+
# 2. tail/max_entries parameter
|
|
83
|
+
if tail is not None:
|
|
84
|
+
args.append(f"(opt {tail})")
|
|
85
|
+
else:
|
|
86
|
+
args.append("null")
|
|
87
|
+
|
|
88
|
+
# 3. level/min_level parameter
|
|
89
|
+
if level is not None:
|
|
90
|
+
args.append(f'(opt "{level}")')
|
|
91
|
+
else:
|
|
92
|
+
args.append("null")
|
|
93
|
+
|
|
94
|
+
# 4. logger_name parameter
|
|
95
|
+
if name is not None:
|
|
96
|
+
args.append(f'(opt "{name}")')
|
|
97
|
+
else:
|
|
98
|
+
args.append("null")
|
|
99
|
+
|
|
100
|
+
query_args = ", ".join(args)
|
|
101
|
+
|
|
102
|
+
# Call dfx to query the logs with JSON output
|
|
103
|
+
cmd = ["dfx", "canister", "call", "--output", "json"]
|
|
104
|
+
|
|
105
|
+
# Add network option if specified
|
|
106
|
+
if network is not None:
|
|
107
|
+
cmd.extend(["--network", network])
|
|
108
|
+
|
|
109
|
+
# Add canister ID and method
|
|
110
|
+
cmd.extend([canister_id, "get_canister_logs", f"({query_args})"])
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
|
114
|
+
return json.loads(result.stdout)
|
|
115
|
+
except subprocess.CalledProcessError as e:
|
|
116
|
+
print(f"Error querying logs: {e.stderr}", file=sys.stderr)
|
|
117
|
+
sys.exit(1)
|
|
118
|
+
except json.JSONDecodeError as e:
|
|
119
|
+
print(f"Error parsing JSON response: {e}", file=sys.stderr)
|
|
120
|
+
sys.exit(1)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def format_log(log_entry):
|
|
124
|
+
# Convert timestamp from nanoseconds to seconds and format as datetime
|
|
125
|
+
try:
|
|
126
|
+
timestamp = datetime.fromtimestamp(int(log_entry["timestamp"]) / 1e9).strftime(
|
|
127
|
+
"%Y-%m-%d %H:%M:%S"
|
|
128
|
+
)
|
|
129
|
+
except (ValueError, KeyError):
|
|
130
|
+
timestamp = "Unknown time"
|
|
131
|
+
|
|
132
|
+
level = log_entry.get("level", "UNKNOWN")
|
|
133
|
+
name = log_entry.get("logger_name", "unknown")
|
|
134
|
+
message = log_entry.get("message", "")
|
|
135
|
+
id = log_entry.get("id", "unknown")
|
|
136
|
+
|
|
137
|
+
# Add colors based on log level
|
|
138
|
+
level_colors = {
|
|
139
|
+
"DEBUG": "\033[94m", # Blue
|
|
140
|
+
"INFO": "\033[92m", # Green
|
|
141
|
+
"WARNING": "\033[93m", # Yellow
|
|
142
|
+
"ERROR": "\033[91m", # Red
|
|
143
|
+
"CRITICAL": "\033[91m\033[1m", # Bold Red
|
|
144
|
+
}
|
|
145
|
+
reset = "\033[0m"
|
|
146
|
+
color = level_colors.get(level, "")
|
|
147
|
+
|
|
148
|
+
return f"{timestamp} [{id}] {color}[{level}]{reset} [{name}] {message}"
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def main():
|
|
152
|
+
# Set stdout to line buffering mode to ensure timely output when piped
|
|
153
|
+
import io
|
|
154
|
+
|
|
155
|
+
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, line_buffering=True)
|
|
156
|
+
|
|
157
|
+
args = parse_args()
|
|
158
|
+
|
|
159
|
+
# Determine network option
|
|
160
|
+
network = None
|
|
161
|
+
if args.ic:
|
|
162
|
+
network = "ic"
|
|
163
|
+
elif args.network:
|
|
164
|
+
network = args.network
|
|
165
|
+
|
|
166
|
+
if not args.follow:
|
|
167
|
+
# One-time query
|
|
168
|
+
logs = get_logs(
|
|
169
|
+
args.canister_id,
|
|
170
|
+
tail=args.tail,
|
|
171
|
+
level=args.level,
|
|
172
|
+
network=network,
|
|
173
|
+
name=args.name,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
for log in logs:
|
|
177
|
+
print(format_log(log), flush=True)
|
|
178
|
+
return
|
|
179
|
+
|
|
180
|
+
# Follow mode
|
|
181
|
+
try:
|
|
182
|
+
last_poll_time = 0
|
|
183
|
+
last_log_id = 0
|
|
184
|
+
first_poll = True
|
|
185
|
+
|
|
186
|
+
while True:
|
|
187
|
+
current_time = time.time()
|
|
188
|
+
if (
|
|
189
|
+
first_poll
|
|
190
|
+
or current_time - last_poll_time >= args.interval
|
|
191
|
+
or select.select([sys.stdin], [], [], 0)[0]
|
|
192
|
+
):
|
|
193
|
+
if select.select([sys.stdin], [], [], 0)[0]:
|
|
194
|
+
input()
|
|
195
|
+
|
|
196
|
+
if first_poll:
|
|
197
|
+
logs = get_logs(
|
|
198
|
+
args.canister_id,
|
|
199
|
+
tail=args.tail,
|
|
200
|
+
level=args.level,
|
|
201
|
+
network=network,
|
|
202
|
+
name=args.name,
|
|
203
|
+
)
|
|
204
|
+
first_poll = False
|
|
205
|
+
else:
|
|
206
|
+
logs = get_logs(
|
|
207
|
+
args.canister_id,
|
|
208
|
+
tail=None,
|
|
209
|
+
level=args.level,
|
|
210
|
+
network=network,
|
|
211
|
+
from_entry=last_log_id + 1,
|
|
212
|
+
name=args.name,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
last_poll_time = current_time
|
|
216
|
+
|
|
217
|
+
for log in logs:
|
|
218
|
+
print(format_log(log), flush=True)
|
|
219
|
+
|
|
220
|
+
# Update the last log ID if we have logs
|
|
221
|
+
if logs:
|
|
222
|
+
last_log_id = max(int(log.get("id", 0)) for log in logs)
|
|
223
|
+
|
|
224
|
+
time.sleep(0.1)
|
|
225
|
+
except KeyboardInterrupt:
|
|
226
|
+
print("\nExiting log follower")
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
if __name__ == "__main__":
|
|
230
|
+
main()
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ic_python_logging
|
|
3
|
+
Version: 0.3.0
|
|
4
|
+
Summary: A lightweight logging library for the Internet Computer (IC)
|
|
5
|
+
Home-page: https://github.com/smart-social-contracts/ic-python-logging
|
|
6
|
+
Author: Smart Social Contracts
|
|
7
|
+
Author-email: Smart Social Contracts <smartsocialcontracts@gmail.com>
|
|
8
|
+
License: MIT License
|
|
9
|
+
|
|
10
|
+
Copyright (c) 2025 Smart Social Contracts
|
|
11
|
+
|
|
12
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
13
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
14
|
+
in the Software without restriction, including without limitation the rights
|
|
15
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
16
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
17
|
+
furnished to do so, subject to the following conditions:
|
|
18
|
+
|
|
19
|
+
The above copyright notice and this permission notice shall be included in all
|
|
20
|
+
copies or substantial portions of the Software.
|
|
21
|
+
|
|
22
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
23
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
24
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
25
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
26
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
27
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
28
|
+
SOFTWARE.
|
|
29
|
+
|
|
30
|
+
Project-URL: Homepage, https://github.com/smart-social-contracts/ic-python-logging
|
|
31
|
+
Project-URL: Repository, https://github.com/smart-social-contracts/ic-python-logging.git
|
|
32
|
+
Project-URL: Issues, https://github.com/smart-social-contracts/ic-python-logging/issues
|
|
33
|
+
Keywords: logging,debugging,ic,internet-computer
|
|
34
|
+
Classifier: Development Status :: 4 - Beta
|
|
35
|
+
Classifier: Intended Audience :: Developers
|
|
36
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
37
|
+
Classifier: Operating System :: OS Independent
|
|
38
|
+
Classifier: Programming Language :: Python :: 3
|
|
39
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
40
|
+
Classifier: Topic :: Database
|
|
41
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
42
|
+
Requires-Python: >=3.7
|
|
43
|
+
Description-Content-Type: text/markdown
|
|
44
|
+
License-File: LICENSE
|
|
45
|
+
Dynamic: author
|
|
46
|
+
Dynamic: home-page
|
|
47
|
+
Dynamic: license-file
|
|
48
|
+
Dynamic: requires-python
|
|
49
|
+
|
|
50
|
+
# IC Python Logging
|
|
51
|
+
|
|
52
|
+
[](https://github.com/smart-social-contracts/ic-python-logging/actions)
|
|
53
|
+
[](https://github.com/smart-social-contracts/ic-python-logging/actions)
|
|
54
|
+
[](https://badge.fury.io/py/ic-python-logging)
|
|
55
|
+
[](https://www.python.org/downloads/release/python-3107/)
|
|
56
|
+
[](https://github.com/smart-social-contracts/ic-python-logging/blob/main/LICENSE)
|
|
57
|
+
|
|
58
|
+
A simple logging system for the [Internet Computer](https://internetcomputer.org). Forked from [kybra-simple-logging](https://github.com/smart-social-contracts/kybra-simple-logging). The library includes in-memory log storage capabilities, providing robust logging for all canister functions including asynchronous operations.
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
## Features
|
|
62
|
+
|
|
63
|
+
- CLI tool for querying logs directly from canisters to your local machine (including in semi-real time with `--follow`)
|
|
64
|
+
- Works seamlessly in both Internet Computer and non-IC environments
|
|
65
|
+
- Avoids using Python's standard logging module (which has compatibility issues in the IC environment)
|
|
66
|
+
- Named loggers with `get_logger()` function similar to Python's standard library
|
|
67
|
+
- Support for level-based filtering (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
68
|
+
- Global and per-logger log level configuration
|
|
69
|
+
- Ability to enable/disable logging completely
|
|
70
|
+
- Circular buffer to store logs in memory without exhausting memory
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
## Installation
|
|
74
|
+
|
|
75
|
+
```bash
|
|
76
|
+
pip install ic-python-logging
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## Quick Start
|
|
80
|
+
|
|
81
|
+
```python
|
|
82
|
+
from ic_python_logging import get_logger
|
|
83
|
+
|
|
84
|
+
# Create a logger
|
|
85
|
+
logger = get_logger("my_canister")
|
|
86
|
+
|
|
87
|
+
# Log messages at a specific level
|
|
88
|
+
logger.info("This is an info message")
|
|
89
|
+
|
|
90
|
+
# Set log level for a specific logger
|
|
91
|
+
logger.set_level(LogLevel.DEBUG)
|
|
92
|
+
|
|
93
|
+
# Use in-memory logging to retrieve logs
|
|
94
|
+
from ic_python_logging import get_logs, clear_logs, enable_memory_logging, disable_memory_logging
|
|
95
|
+
|
|
96
|
+
# Retrieve only ERROR logs
|
|
97
|
+
error_logs = get_logs(min_level="ERROR")
|
|
98
|
+
|
|
99
|
+
# Filter logs by logger name
|
|
100
|
+
component_logs = get_logs(logger_name="my_component")
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## CLI Tool
|
|
104
|
+
|
|
105
|
+
The package includes a command-line tool for querying logs from canisters.
|
|
106
|
+
Example:
|
|
107
|
+
|
|
108
|
+
```bash
|
|
109
|
+
# View the first 10 ERROR log entries of logger with name MY_LOGGER_NAME, and then follow and poll every 5 seconds, from the canister with ID <CANISTER_ID> on the IC network
|
|
110
|
+
kslog <CANISTER_ID> --tail 10 --level ERROR --name MY_LOGGER_NAME --follow --ic --interval 5
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
To use this `kslog` with your canister, expose the query function:
|
|
114
|
+
|
|
115
|
+
```python
|
|
116
|
+
# ##### Import Basilisk and the internal function #####
|
|
117
|
+
|
|
118
|
+
from basilisk import Opt, Record, Vec, nat, query
|
|
119
|
+
from ic_python_logging import get_canister_logs as _get_canister_logs
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# Define the PublicLogEntry class directly in the test canister
|
|
123
|
+
class PublicLogEntry(Record):
|
|
124
|
+
timestamp: nat
|
|
125
|
+
level: str
|
|
126
|
+
logger_name: str
|
|
127
|
+
message: str
|
|
128
|
+
id: nat
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@query
|
|
132
|
+
def get_canister_logs(
|
|
133
|
+
from_entry: Opt[nat] = None,
|
|
134
|
+
max_entries: Opt[nat] = None,
|
|
135
|
+
min_level: Opt[str] = None,
|
|
136
|
+
logger_name: Opt[str] = None,
|
|
137
|
+
) -> Vec[PublicLogEntry]:
|
|
138
|
+
"""
|
|
139
|
+
Re-export the get_canister_logs query function from the library
|
|
140
|
+
This makes it accessible as a query method on the test canister
|
|
141
|
+
"""
|
|
142
|
+
logs = _get_canister_logs(
|
|
143
|
+
from_entry=from_entry,
|
|
144
|
+
max_entries=max_entries,
|
|
145
|
+
min_level=min_level,
|
|
146
|
+
logger_name=logger_name
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
# Convert the logs to our local PublicLogEntry type
|
|
150
|
+
return [
|
|
151
|
+
PublicLogEntry(
|
|
152
|
+
timestamp=log["timestamp"],
|
|
153
|
+
level=log["level"],
|
|
154
|
+
logger_name=log["logger_name"],
|
|
155
|
+
message=log["message"],
|
|
156
|
+
id=log["id"],
|
|
157
|
+
)
|
|
158
|
+
for log in logs
|
|
159
|
+
]
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
## Development
|
|
163
|
+
|
|
164
|
+
```bash
|
|
165
|
+
# Clone the repository
|
|
166
|
+
git clone https://github.com/smart-social-contracts/ic-python-logging.git
|
|
167
|
+
cd ic-python-logging
|
|
168
|
+
|
|
169
|
+
# Recommended setup
|
|
170
|
+
pyenv install 3.10.7
|
|
171
|
+
pyenv local 3.10.7
|
|
172
|
+
python -m venv venv
|
|
173
|
+
source venv/bin/activate
|
|
174
|
+
pip install ic-basilisk
|
|
175
|
+
python -m basilisk install-dfx-extension
|
|
176
|
+
|
|
177
|
+
# Install development dependencies
|
|
178
|
+
pip install -r requirements-dev.txt
|
|
179
|
+
|
|
180
|
+
# Run linters
|
|
181
|
+
./run_linters.sh
|
|
182
|
+
|
|
183
|
+
# Run tests
|
|
184
|
+
cd tests && ./run_test.sh
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
## Contributing
|
|
188
|
+
|
|
189
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
|
190
|
+
|
|
191
|
+
## License
|
|
192
|
+
|
|
193
|
+
MIT
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
ic_python_logging/__init__.py,sha256=NzzwXsYJl5O7pZGk-OQ6jqdF4UxDxBXzwrRf5Qfizrw,2074
|
|
2
|
+
ic_python_logging/_cdk.py,sha256=CYAnqJ77hsjoSDJNe0yHUOpw1AJAYbIRy-qcsI5ticU,865
|
|
3
|
+
ic_python_logging/_handler.py,sha256=rGR-tcbtkbxsv6VPOYx3jQx586oodOLF0MU5D5lD_tI,11892
|
|
4
|
+
ic_python_logging/cli.py,sha256=F39B13SnhhlxxbWH99WfZeJNG7_TQXDsN5TKghE7Zgc,6539
|
|
5
|
+
ic_python_logging-0.3.0.dist-info/licenses/LICENSE,sha256=6q6XYNOGnJcVSus2bAezFn7bU_2Y5T6W4aGQHBb8X-c,1079
|
|
6
|
+
ic_python_logging-0.3.0.dist-info/METADATA,sha256=bip1zm-zacqDiQCBULOswx0aVQFRvUTaQDKnm2b3mTc,7015
|
|
7
|
+
ic_python_logging-0.3.0.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
|
8
|
+
ic_python_logging-0.3.0.dist-info/entry_points.txt,sha256=UKmZ8D0AC-HHn4HDypuhpT_2C9rKtY5ETkcnUFRhkW8,53
|
|
9
|
+
ic_python_logging-0.3.0.dist-info/top_level.txt,sha256=LMsY26rDPsH7Vu-7chKcbK2aiKEEgcyW_gEs75NzbTw,18
|
|
10
|
+
ic_python_logging-0.3.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Smart Social Contracts
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ic_python_logging
|