roblox-studio-physical-operation-mcp 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roblox_studio_physical_operation_mcp/log_utils.py +467 -467
- roblox_studio_physical_operation_mcp/server.py +1 -1
- roblox_studio_physical_operation_mcp/templates/pause.png +0 -0
- roblox_studio_physical_operation_mcp/templates/play.png +0 -0
- roblox_studio_physical_operation_mcp/templates/stop.png +0 -0
- roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/pause.png +0 -0
- roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/play.png +0 -0
- roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/stop.png +0 -0
- {roblox_studio_physical_operation_mcp-0.1.0.dist-info → roblox_studio_physical_operation_mcp-0.1.1.dist-info}/METADATA +1 -1
- roblox_studio_physical_operation_mcp-0.1.1.dist-info/RECORD +19 -0
- roblox_studio_physical_operation_mcp-0.1.0.dist-info/RECORD +0 -13
- {roblox_studio_physical_operation_mcp-0.1.0.dist-info → roblox_studio_physical_operation_mcp-0.1.1.dist-info}/WHEEL +0 -0
- {roblox_studio_physical_operation_mcp-0.1.0.dist-info → roblox_studio_physical_operation_mcp-0.1.1.dist-info}/entry_points.txt +0 -0
- {roblox_studio_physical_operation_mcp-0.1.0.dist-info → roblox_studio_physical_operation_mcp-0.1.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,467 +1,467 @@
|
|
|
1
|
-
"""
|
|
2
|
-
日志工具模块: 日志读取、搜索等
|
|
3
|
-
|
|
4
|
-
优化:
|
|
5
|
-
- 从文件末尾倒序读取,适合大文件
|
|
6
|
-
- 支持过滤特定类别 (如 FLog::Output)
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import os
|
|
10
|
-
import re
|
|
11
|
-
from typing import Optional, Generator
|
|
12
|
-
from dataclasses import dataclass
|
|
13
|
-
|
|
14
|
-
LOG_DIR = os.path.expandvars(r"%LOCALAPPDATA%\Roblox\logs")
|
|
15
|
-
|
|
16
|
-
#
|
|
17
|
-
DEFAULT_CATEGORIES = ["FLog::Output"]
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
@dataclass
|
|
21
|
-
class LogEntry:
|
|
22
|
-
timestamp: str
|
|
23
|
-
level: str
|
|
24
|
-
category: str
|
|
25
|
-
message: str
|
|
26
|
-
raw: str
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def parse_log_line(line: str) -> Optional[LogEntry]:
|
|
30
|
-
"""解析单行日志"""
|
|
31
|
-
# 格式: 2026-02-03T08:52:02.095Z,128.095795,1996c,12 [DFLog::HttpTraceError] message
|
|
32
|
-
# 或: 2026-02-03T08:52:04.244Z,130.244095,12f4,6,Info [FLog::...] message
|
|
33
|
-
match = re.match(
|
|
34
|
-
r'^(\d{4}-\d{2}-\d{2}T[\d:.]+Z),[\d.]+,[a-f0-9]+,\d+(?:,(\w+))?\s*\[([^\]]+)\]\s*(.*)$',
|
|
35
|
-
line
|
|
36
|
-
)
|
|
37
|
-
if match:
|
|
38
|
-
return LogEntry(
|
|
39
|
-
timestamp=match.group(1),
|
|
40
|
-
level=match.group(2) or "Info",
|
|
41
|
-
category=match.group(3),
|
|
42
|
-
message=match.group(4),
|
|
43
|
-
raw=line
|
|
44
|
-
)
|
|
45
|
-
return None
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def read_file_reverse(file_path: str, chunk_size: int = 8192) -> Generator[str, None, None]:
|
|
49
|
-
"""
|
|
50
|
-
从文件末尾倒序读取行
|
|
51
|
-
|
|
52
|
-
对于大文件,这比读取整个文件再 reverse 更高效
|
|
53
|
-
"""
|
|
54
|
-
with open(file_path, 'rb') as f:
|
|
55
|
-
# 移动到文件末尾
|
|
56
|
-
f.seek(0, 2)
|
|
57
|
-
file_size = f.tell()
|
|
58
|
-
|
|
59
|
-
buffer = b''
|
|
60
|
-
position = file_size
|
|
61
|
-
|
|
62
|
-
while position > 0:
|
|
63
|
-
# 计算读取位置
|
|
64
|
-
read_size = min(chunk_size, position)
|
|
65
|
-
position -= read_size
|
|
66
|
-
f.seek(position)
|
|
67
|
-
|
|
68
|
-
# 读取并拼接
|
|
69
|
-
chunk = f.read(read_size)
|
|
70
|
-
buffer = chunk + buffer
|
|
71
|
-
|
|
72
|
-
# 按行分割
|
|
73
|
-
lines = buffer.split(b'\n')
|
|
74
|
-
|
|
75
|
-
# 最后一个可能不完整,保留到下次
|
|
76
|
-
buffer = lines[0]
|
|
77
|
-
|
|
78
|
-
# 倒序返回完整的行
|
|
79
|
-
for line in reversed(lines[1:]):
|
|
80
|
-
line_str = line.decode('utf-8', errors='ignore').strip()
|
|
81
|
-
if line_str:
|
|
82
|
-
yield line_str
|
|
83
|
-
|
|
84
|
-
# 处理剩余的 buffer
|
|
85
|
-
if buffer:
|
|
86
|
-
line_str = buffer.decode('utf-8', errors='ignore').strip()
|
|
87
|
-
if line_str:
|
|
88
|
-
yield line_str
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
from .log_filter import should_exclude
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def get_logs_from_line(
|
|
95
|
-
log_path: str,
|
|
96
|
-
after_line: int = None,
|
|
97
|
-
before_line: int = None,
|
|
98
|
-
timestamps: bool = False,
|
|
99
|
-
categories: list[str] = None,
|
|
100
|
-
apply_filter: bool = True
|
|
101
|
-
) -> dict:
|
|
102
|
-
"""
|
|
103
|
-
从指定行范围读取日志
|
|
104
|
-
|
|
105
|
-
Args:
|
|
106
|
-
log_path: 日志文件路径
|
|
107
|
-
after_line: 从哪一行之后开始读取,None 表示从头开始
|
|
108
|
-
before_line: 到哪一行之前结束,None 表示到末尾
|
|
109
|
-
timestamps: 是否附加时间戳
|
|
110
|
-
categories: 只返回这些类别的日志,默认 ["FLog::Output"]
|
|
111
|
-
apply_filter: 是否应用过滤规则排除 Studio 内部日志
|
|
112
|
-
|
|
113
|
-
Returns:
|
|
114
|
-
{
|
|
115
|
-
"logs": "日志文本",
|
|
116
|
-
"start_line": 起始行号,
|
|
117
|
-
"last_line": 最后行号,
|
|
118
|
-
"remaining": 剩余有效日志行数,
|
|
119
|
-
"has_more": 是否还有更多
|
|
120
|
-
}
|
|
121
|
-
"""
|
|
122
|
-
MAX_BYTES = 32000
|
|
123
|
-
|
|
124
|
-
if not os.path.exists(log_path):
|
|
125
|
-
return {"logs": "", "start_line": 0, "last_line": 0, "remaining": 0, "has_more": False}
|
|
126
|
-
|
|
127
|
-
if categories is None:
|
|
128
|
-
categories = DEFAULT_CATEGORIES
|
|
129
|
-
|
|
130
|
-
start_line = None
|
|
131
|
-
last_line = 0
|
|
132
|
-
current_bytes = 0
|
|
133
|
-
log_lines = []
|
|
134
|
-
remaining = 0
|
|
135
|
-
bytes_exceeded = False
|
|
136
|
-
|
|
137
|
-
try:
|
|
138
|
-
with open(log_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
139
|
-
for line_num, line in enumerate(f, 1):
|
|
140
|
-
# 跳过 after_line 之前的行
|
|
141
|
-
if after_line is not None and line_num <= after_line:
|
|
142
|
-
continue
|
|
143
|
-
|
|
144
|
-
# 停止在 before_line
|
|
145
|
-
if before_line is not None and line_num >= before_line:
|
|
146
|
-
break
|
|
147
|
-
|
|
148
|
-
line = line.strip()
|
|
149
|
-
if not line:
|
|
150
|
-
continue
|
|
151
|
-
|
|
152
|
-
entry = parse_log_line(line)
|
|
153
|
-
if not entry:
|
|
154
|
-
continue
|
|
155
|
-
|
|
156
|
-
# 类别过滤
|
|
157
|
-
if categories and entry.category not in categories:
|
|
158
|
-
continue
|
|
159
|
-
|
|
160
|
-
# 应用排除规则
|
|
161
|
-
if apply_filter and should_exclude(entry.message):
|
|
162
|
-
continue
|
|
163
|
-
|
|
164
|
-
# 这是一条有效日志
|
|
165
|
-
remaining += 1
|
|
166
|
-
|
|
167
|
-
# 如果已超过字节限制,只统计不添加
|
|
168
|
-
if bytes_exceeded:
|
|
169
|
-
continue
|
|
170
|
-
|
|
171
|
-
# 格式化输出
|
|
172
|
-
if timestamps:
|
|
173
|
-
time_part = entry.timestamp[11:19]
|
|
174
|
-
output_line = f"[{time_part}] {entry.message}"
|
|
175
|
-
else:
|
|
176
|
-
output_line = entry.message
|
|
177
|
-
|
|
178
|
-
line_bytes = len(output_line.encode('utf-8')) + 1
|
|
179
|
-
|
|
180
|
-
# 检查是否超过字节限制
|
|
181
|
-
if current_bytes + line_bytes > MAX_BYTES and log_lines:
|
|
182
|
-
bytes_exceeded = True
|
|
183
|
-
continue
|
|
184
|
-
|
|
185
|
-
if start_line is None:
|
|
186
|
-
start_line = line_num
|
|
187
|
-
|
|
188
|
-
log_lines.append(output_line)
|
|
189
|
-
last_line = line_num
|
|
190
|
-
current_bytes += line_bytes
|
|
191
|
-
|
|
192
|
-
except Exception:
|
|
193
|
-
pass
|
|
194
|
-
|
|
195
|
-
returned_count = len(log_lines)
|
|
196
|
-
return {
|
|
197
|
-
"logs": "\n".join(log_lines),
|
|
198
|
-
"start_line": start_line or 0,
|
|
199
|
-
"last_line": last_line,
|
|
200
|
-
"remaining": remaining - returned_count,
|
|
201
|
-
"has_more": remaining > returned_count
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
def get_recent_logs(
|
|
206
|
-
log_path: str,
|
|
207
|
-
limit: int = 100,
|
|
208
|
-
categories: list[str] = None,
|
|
209
|
-
min_level: Optional[str] = None,
|
|
210
|
-
apply_filter: bool = True
|
|
211
|
-
) -> list[LogEntry]:
|
|
212
|
-
"""
|
|
213
|
-
从日志文件末尾读取最近的日志
|
|
214
|
-
|
|
215
|
-
Args:
|
|
216
|
-
log_path: 日志文件路径
|
|
217
|
-
limit: 返回的最大条数
|
|
218
|
-
categories: 只返回这些类别的日志,默认 ["FLog::Output"]
|
|
219
|
-
min_level: 最低日志级别过滤
|
|
220
|
-
apply_filter: 是否应用过滤规则排除 Studio 内部日志,默认 True
|
|
221
|
-
|
|
222
|
-
Returns:
|
|
223
|
-
日志条目列表 (按时间正序)
|
|
224
|
-
"""
|
|
225
|
-
if not os.path.exists(log_path):
|
|
226
|
-
return []
|
|
227
|
-
|
|
228
|
-
if categories is None:
|
|
229
|
-
categories = DEFAULT_CATEGORIES
|
|
230
|
-
|
|
231
|
-
entries = []
|
|
232
|
-
try:
|
|
233
|
-
for line in read_file_reverse(log_path):
|
|
234
|
-
entry = parse_log_line(line)
|
|
235
|
-
if not entry:
|
|
236
|
-
continue
|
|
237
|
-
|
|
238
|
-
# 类别过滤
|
|
239
|
-
if categories and entry.category not in categories:
|
|
240
|
-
continue
|
|
241
|
-
|
|
242
|
-
# 级别过滤
|
|
243
|
-
if min_level and entry.level.lower() != min_level.lower():
|
|
244
|
-
continue
|
|
245
|
-
|
|
246
|
-
# 应用排除规则
|
|
247
|
-
if apply_filter and should_exclude(entry.message):
|
|
248
|
-
continue
|
|
249
|
-
|
|
250
|
-
entries.append(entry)
|
|
251
|
-
if len(entries) >= limit:
|
|
252
|
-
break
|
|
253
|
-
except Exception:
|
|
254
|
-
pass
|
|
255
|
-
|
|
256
|
-
# 返回正序 (最旧的在前)
|
|
257
|
-
return list(reversed(entries))
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
def get_all_logs(
|
|
261
|
-
log_path: str,
|
|
262
|
-
limit: int = 100,
|
|
263
|
-
min_level: Optional[str] = None
|
|
264
|
-
) -> list[LogEntry]:
|
|
265
|
-
"""
|
|
266
|
-
获取所有类别的日志 (不过滤类别)
|
|
267
|
-
"""
|
|
268
|
-
return get_recent_logs(log_path, limit, categories=[], min_level=min_level)
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
def search_logs(
|
|
272
|
-
log_path: str,
|
|
273
|
-
pattern: str,
|
|
274
|
-
limit: int = 50,
|
|
275
|
-
categories: list[str] = None
|
|
276
|
-
) -> list[LogEntry]:
|
|
277
|
-
"""
|
|
278
|
-
在日志中搜索匹配的条目
|
|
279
|
-
|
|
280
|
-
Args:
|
|
281
|
-
log_path: 日志文件路径
|
|
282
|
-
pattern: 正则表达式模式
|
|
283
|
-
limit: 返回的最大条数
|
|
284
|
-
categories: 只搜索这些类别,None 表示搜索所有
|
|
285
|
-
"""
|
|
286
|
-
if not os.path.exists(log_path):
|
|
287
|
-
return []
|
|
288
|
-
|
|
289
|
-
entries = []
|
|
290
|
-
regex = re.compile(pattern, re.IGNORECASE)
|
|
291
|
-
|
|
292
|
-
try:
|
|
293
|
-
for line in read_file_reverse(log_path):
|
|
294
|
-
if not regex.search(line):
|
|
295
|
-
continue
|
|
296
|
-
|
|
297
|
-
entry = parse_log_line(line)
|
|
298
|
-
if not entry:
|
|
299
|
-
continue
|
|
300
|
-
|
|
301
|
-
if categories and entry.category not in categories:
|
|
302
|
-
continue
|
|
303
|
-
|
|
304
|
-
entries.append(entry)
|
|
305
|
-
if len(entries) >= limit:
|
|
306
|
-
break
|
|
307
|
-
except Exception:
|
|
308
|
-
pass
|
|
309
|
-
|
|
310
|
-
return list(reversed(entries))
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
def search_logs_from_line(
|
|
314
|
-
log_path: str,
|
|
315
|
-
pattern: str,
|
|
316
|
-
after_line: int = None,
|
|
317
|
-
before_line: int = None,
|
|
318
|
-
timestamps: bool = False,
|
|
319
|
-
categories: list[str] = None,
|
|
320
|
-
apply_filter: bool = True
|
|
321
|
-
) -> dict:
|
|
322
|
-
"""
|
|
323
|
-
在指定行范围内搜索日志
|
|
324
|
-
|
|
325
|
-
Args:
|
|
326
|
-
log_path: 日志文件路径
|
|
327
|
-
pattern: 正则表达式模式
|
|
328
|
-
after_line: 从哪一行之后开始搜索
|
|
329
|
-
before_line: 到哪一行之前结束
|
|
330
|
-
timestamps: 是否附加时间戳
|
|
331
|
-
categories: 只搜索这些类别,默认 ["FLog::Output"]
|
|
332
|
-
apply_filter: 是否应用过滤规则
|
|
333
|
-
|
|
334
|
-
Returns:
|
|
335
|
-
{
|
|
336
|
-
"logs": "匹配的日志文本",
|
|
337
|
-
"start_line": 起始行号,
|
|
338
|
-
"last_line": 最后行号,
|
|
339
|
-
"match_count": 匹配条数,
|
|
340
|
-
"remaining": 剩余匹配数,
|
|
341
|
-
"has_more": 是否还有更多
|
|
342
|
-
}
|
|
343
|
-
"""
|
|
344
|
-
MAX_BYTES = 32000
|
|
345
|
-
|
|
346
|
-
if not os.path.exists(log_path):
|
|
347
|
-
return {"logs": "", "start_line": 0, "last_line": 0, "match_count": 0, "remaining": 0, "has_more": False}
|
|
348
|
-
|
|
349
|
-
if categories is None:
|
|
350
|
-
categories = DEFAULT_CATEGORIES
|
|
351
|
-
|
|
352
|
-
try:
|
|
353
|
-
regex = re.compile(pattern, re.IGNORECASE)
|
|
354
|
-
except re.error:
|
|
355
|
-
return {"error": f"Invalid regex pattern: {pattern}"}
|
|
356
|
-
|
|
357
|
-
start_line = None
|
|
358
|
-
last_line = 0
|
|
359
|
-
current_bytes = 0
|
|
360
|
-
log_lines = []
|
|
361
|
-
match_count = 0
|
|
362
|
-
bytes_exceeded = False
|
|
363
|
-
|
|
364
|
-
try:
|
|
365
|
-
with open(log_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
366
|
-
for line_num, line in enumerate(f, 1):
|
|
367
|
-
if after_line is not None and line_num <= after_line:
|
|
368
|
-
continue
|
|
369
|
-
|
|
370
|
-
if before_line is not None and line_num >= before_line:
|
|
371
|
-
break
|
|
372
|
-
|
|
373
|
-
line = line.strip()
|
|
374
|
-
if not line:
|
|
375
|
-
continue
|
|
376
|
-
|
|
377
|
-
entry = parse_log_line(line)
|
|
378
|
-
if not entry:
|
|
379
|
-
continue
|
|
380
|
-
|
|
381
|
-
if categories and entry.category not in categories:
|
|
382
|
-
continue
|
|
383
|
-
|
|
384
|
-
if apply_filter and should_exclude(entry.message):
|
|
385
|
-
continue
|
|
386
|
-
|
|
387
|
-
# 正则匹配
|
|
388
|
-
if not regex.search(entry.message):
|
|
389
|
-
continue
|
|
390
|
-
|
|
391
|
-
match_count += 1
|
|
392
|
-
|
|
393
|
-
if bytes_exceeded:
|
|
394
|
-
continue
|
|
395
|
-
|
|
396
|
-
if timestamps:
|
|
397
|
-
time_part = entry.timestamp[11:19]
|
|
398
|
-
output_line = f"{line_num}|[{time_part}] {entry.message}"
|
|
399
|
-
else:
|
|
400
|
-
output_line = f"{line_num}|{entry.message}"
|
|
401
|
-
|
|
402
|
-
line_bytes = len(output_line.encode('utf-8')) + 1
|
|
403
|
-
|
|
404
|
-
if current_bytes + line_bytes > MAX_BYTES and log_lines:
|
|
405
|
-
bytes_exceeded = True
|
|
406
|
-
continue
|
|
407
|
-
|
|
408
|
-
if start_line is None:
|
|
409
|
-
start_line = line_num
|
|
410
|
-
|
|
411
|
-
log_lines.append(output_line)
|
|
412
|
-
last_line = line_num
|
|
413
|
-
current_bytes += line_bytes
|
|
414
|
-
|
|
415
|
-
except Exception:
|
|
416
|
-
pass
|
|
417
|
-
|
|
418
|
-
returned_count = len(log_lines)
|
|
419
|
-
return {
|
|
420
|
-
"logs": "\n".join(log_lines),
|
|
421
|
-
"start_line": start_line or 0,
|
|
422
|
-
"last_line": last_line,
|
|
423
|
-
"match_count": returned_count,
|
|
424
|
-
"remaining": match_count - returned_count,
|
|
425
|
-
"has_more": match_count > returned_count
|
|
426
|
-
}
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
def find_latest_studio_log() -> Optional[str]:
|
|
430
|
-
"""查找最新的 Studio 日志文件"""
|
|
431
|
-
if not os.path.exists(LOG_DIR):
|
|
432
|
-
return None
|
|
433
|
-
|
|
434
|
-
studio_logs = []
|
|
435
|
-
for f in os.listdir(LOG_DIR):
|
|
436
|
-
if "Studio" in f and f.endswith(".log"):
|
|
437
|
-
path = os.path.join(LOG_DIR, f)
|
|
438
|
-
studio_logs.append((path, os.path.getmtime(path)))
|
|
439
|
-
|
|
440
|
-
if not studio_logs:
|
|
441
|
-
return None
|
|
442
|
-
|
|
443
|
-
studio_logs.sort(key=lambda x: x[1], reverse=True)
|
|
444
|
-
return studio_logs[0][0]
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
def clean_old_logs(days: int = 7) -> int:
|
|
448
|
-
"""清理超过指定天数的旧日志"""
|
|
449
|
-
if not os.path.exists(LOG_DIR):
|
|
450
|
-
return 0
|
|
451
|
-
|
|
452
|
-
from datetime import datetime
|
|
453
|
-
count = 0
|
|
454
|
-
now = datetime.now().timestamp()
|
|
455
|
-
threshold = days * 24 * 60 * 60
|
|
456
|
-
|
|
457
|
-
for f in os.listdir(LOG_DIR):
|
|
458
|
-
if f.endswith(".log"):
|
|
459
|
-
path = os.path.join(LOG_DIR, f)
|
|
460
|
-
try:
|
|
461
|
-
if now - os.path.getmtime(path) > threshold:
|
|
462
|
-
os.remove(path)
|
|
463
|
-
count += 1
|
|
464
|
-
except Exception:
|
|
465
|
-
pass
|
|
466
|
-
|
|
467
|
-
return count
|
|
1
|
+
"""
|
|
2
|
+
日志工具模块: 日志读取、搜索等
|
|
3
|
+
|
|
4
|
+
优化:
|
|
5
|
+
- 从文件末尾倒序读取,适合大文件
|
|
6
|
+
- 支持过滤特定类别 (如 FLog::Output)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import re
|
|
11
|
+
from typing import Optional, Generator
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
|
|
14
|
+
LOG_DIR = os.path.expandvars(r"%LOCALAPPDATA%\Roblox\logs")
|
|
15
|
+
|
|
16
|
+
# 默认读取这些类别的日志(包含用户脚本的 print 和 warn 输出)
|
|
17
|
+
DEFAULT_CATEGORIES = ["FLog::Output", "FLog::Warning"]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class LogEntry:
|
|
22
|
+
timestamp: str
|
|
23
|
+
level: str
|
|
24
|
+
category: str
|
|
25
|
+
message: str
|
|
26
|
+
raw: str
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def parse_log_line(line: str) -> Optional[LogEntry]:
|
|
30
|
+
"""解析单行日志"""
|
|
31
|
+
# 格式: 2026-02-03T08:52:02.095Z,128.095795,1996c,12 [DFLog::HttpTraceError] message
|
|
32
|
+
# 或: 2026-02-03T08:52:04.244Z,130.244095,12f4,6,Info [FLog::...] message
|
|
33
|
+
match = re.match(
|
|
34
|
+
r'^(\d{4}-\d{2}-\d{2}T[\d:.]+Z),[\d.]+,[a-f0-9]+,\d+(?:,(\w+))?\s*\[([^\]]+)\]\s*(.*)$',
|
|
35
|
+
line
|
|
36
|
+
)
|
|
37
|
+
if match:
|
|
38
|
+
return LogEntry(
|
|
39
|
+
timestamp=match.group(1),
|
|
40
|
+
level=match.group(2) or "Info",
|
|
41
|
+
category=match.group(3),
|
|
42
|
+
message=match.group(4),
|
|
43
|
+
raw=line
|
|
44
|
+
)
|
|
45
|
+
return None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def read_file_reverse(file_path: str, chunk_size: int = 8192) -> Generator[str, None, None]:
|
|
49
|
+
"""
|
|
50
|
+
从文件末尾倒序读取行
|
|
51
|
+
|
|
52
|
+
对于大文件,这比读取整个文件再 reverse 更高效
|
|
53
|
+
"""
|
|
54
|
+
with open(file_path, 'rb') as f:
|
|
55
|
+
# 移动到文件末尾
|
|
56
|
+
f.seek(0, 2)
|
|
57
|
+
file_size = f.tell()
|
|
58
|
+
|
|
59
|
+
buffer = b''
|
|
60
|
+
position = file_size
|
|
61
|
+
|
|
62
|
+
while position > 0:
|
|
63
|
+
# 计算读取位置
|
|
64
|
+
read_size = min(chunk_size, position)
|
|
65
|
+
position -= read_size
|
|
66
|
+
f.seek(position)
|
|
67
|
+
|
|
68
|
+
# 读取并拼接
|
|
69
|
+
chunk = f.read(read_size)
|
|
70
|
+
buffer = chunk + buffer
|
|
71
|
+
|
|
72
|
+
# 按行分割
|
|
73
|
+
lines = buffer.split(b'\n')
|
|
74
|
+
|
|
75
|
+
# 最后一个可能不完整,保留到下次
|
|
76
|
+
buffer = lines[0]
|
|
77
|
+
|
|
78
|
+
# 倒序返回完整的行
|
|
79
|
+
for line in reversed(lines[1:]):
|
|
80
|
+
line_str = line.decode('utf-8', errors='ignore').strip()
|
|
81
|
+
if line_str:
|
|
82
|
+
yield line_str
|
|
83
|
+
|
|
84
|
+
# 处理剩余的 buffer
|
|
85
|
+
if buffer:
|
|
86
|
+
line_str = buffer.decode('utf-8', errors='ignore').strip()
|
|
87
|
+
if line_str:
|
|
88
|
+
yield line_str
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
from .log_filter import should_exclude
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def get_logs_from_line(
|
|
95
|
+
log_path: str,
|
|
96
|
+
after_line: int = None,
|
|
97
|
+
before_line: int = None,
|
|
98
|
+
timestamps: bool = False,
|
|
99
|
+
categories: list[str] = None,
|
|
100
|
+
apply_filter: bool = True
|
|
101
|
+
) -> dict:
|
|
102
|
+
"""
|
|
103
|
+
从指定行范围读取日志
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
log_path: 日志文件路径
|
|
107
|
+
after_line: 从哪一行之后开始读取,None 表示从头开始
|
|
108
|
+
before_line: 到哪一行之前结束,None 表示到末尾
|
|
109
|
+
timestamps: 是否附加时间戳
|
|
110
|
+
categories: 只返回这些类别的日志,默认 ["FLog::Output"]
|
|
111
|
+
apply_filter: 是否应用过滤规则排除 Studio 内部日志
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
{
|
|
115
|
+
"logs": "日志文本",
|
|
116
|
+
"start_line": 起始行号,
|
|
117
|
+
"last_line": 最后行号,
|
|
118
|
+
"remaining": 剩余有效日志行数,
|
|
119
|
+
"has_more": 是否还有更多
|
|
120
|
+
}
|
|
121
|
+
"""
|
|
122
|
+
MAX_BYTES = 32000
|
|
123
|
+
|
|
124
|
+
if not os.path.exists(log_path):
|
|
125
|
+
return {"logs": "", "start_line": 0, "last_line": 0, "remaining": 0, "has_more": False}
|
|
126
|
+
|
|
127
|
+
if categories is None:
|
|
128
|
+
categories = DEFAULT_CATEGORIES
|
|
129
|
+
|
|
130
|
+
start_line = None
|
|
131
|
+
last_line = 0
|
|
132
|
+
current_bytes = 0
|
|
133
|
+
log_lines = []
|
|
134
|
+
remaining = 0
|
|
135
|
+
bytes_exceeded = False
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
with open(log_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
139
|
+
for line_num, line in enumerate(f, 1):
|
|
140
|
+
# 跳过 after_line 之前的行
|
|
141
|
+
if after_line is not None and line_num <= after_line:
|
|
142
|
+
continue
|
|
143
|
+
|
|
144
|
+
# 停止在 before_line
|
|
145
|
+
if before_line is not None and line_num >= before_line:
|
|
146
|
+
break
|
|
147
|
+
|
|
148
|
+
line = line.strip()
|
|
149
|
+
if not line:
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
entry = parse_log_line(line)
|
|
153
|
+
if not entry:
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
# 类别过滤
|
|
157
|
+
if categories and entry.category not in categories:
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
# 应用排除规则
|
|
161
|
+
if apply_filter and should_exclude(entry.message):
|
|
162
|
+
continue
|
|
163
|
+
|
|
164
|
+
# 这是一条有效日志
|
|
165
|
+
remaining += 1
|
|
166
|
+
|
|
167
|
+
# 如果已超过字节限制,只统计不添加
|
|
168
|
+
if bytes_exceeded:
|
|
169
|
+
continue
|
|
170
|
+
|
|
171
|
+
# 格式化输出
|
|
172
|
+
if timestamps:
|
|
173
|
+
time_part = entry.timestamp[11:19]
|
|
174
|
+
output_line = f"[{time_part}] {entry.message}"
|
|
175
|
+
else:
|
|
176
|
+
output_line = entry.message
|
|
177
|
+
|
|
178
|
+
line_bytes = len(output_line.encode('utf-8')) + 1
|
|
179
|
+
|
|
180
|
+
# 检查是否超过字节限制
|
|
181
|
+
if current_bytes + line_bytes > MAX_BYTES and log_lines:
|
|
182
|
+
bytes_exceeded = True
|
|
183
|
+
continue
|
|
184
|
+
|
|
185
|
+
if start_line is None:
|
|
186
|
+
start_line = line_num
|
|
187
|
+
|
|
188
|
+
log_lines.append(output_line)
|
|
189
|
+
last_line = line_num
|
|
190
|
+
current_bytes += line_bytes
|
|
191
|
+
|
|
192
|
+
except Exception:
|
|
193
|
+
pass
|
|
194
|
+
|
|
195
|
+
returned_count = len(log_lines)
|
|
196
|
+
return {
|
|
197
|
+
"logs": "\n".join(log_lines),
|
|
198
|
+
"start_line": start_line or 0,
|
|
199
|
+
"last_line": last_line,
|
|
200
|
+
"remaining": remaining - returned_count,
|
|
201
|
+
"has_more": remaining > returned_count
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def get_recent_logs(
|
|
206
|
+
log_path: str,
|
|
207
|
+
limit: int = 100,
|
|
208
|
+
categories: list[str] = None,
|
|
209
|
+
min_level: Optional[str] = None,
|
|
210
|
+
apply_filter: bool = True
|
|
211
|
+
) -> list[LogEntry]:
|
|
212
|
+
"""
|
|
213
|
+
从日志文件末尾读取最近的日志
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
log_path: 日志文件路径
|
|
217
|
+
limit: 返回的最大条数
|
|
218
|
+
categories: 只返回这些类别的日志,默认 ["FLog::Output"]
|
|
219
|
+
min_level: 最低日志级别过滤
|
|
220
|
+
apply_filter: 是否应用过滤规则排除 Studio 内部日志,默认 True
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
日志条目列表 (按时间正序)
|
|
224
|
+
"""
|
|
225
|
+
if not os.path.exists(log_path):
|
|
226
|
+
return []
|
|
227
|
+
|
|
228
|
+
if categories is None:
|
|
229
|
+
categories = DEFAULT_CATEGORIES
|
|
230
|
+
|
|
231
|
+
entries = []
|
|
232
|
+
try:
|
|
233
|
+
for line in read_file_reverse(log_path):
|
|
234
|
+
entry = parse_log_line(line)
|
|
235
|
+
if not entry:
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
# 类别过滤
|
|
239
|
+
if categories and entry.category not in categories:
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
# 级别过滤
|
|
243
|
+
if min_level and entry.level.lower() != min_level.lower():
|
|
244
|
+
continue
|
|
245
|
+
|
|
246
|
+
# 应用排除规则
|
|
247
|
+
if apply_filter and should_exclude(entry.message):
|
|
248
|
+
continue
|
|
249
|
+
|
|
250
|
+
entries.append(entry)
|
|
251
|
+
if len(entries) >= limit:
|
|
252
|
+
break
|
|
253
|
+
except Exception:
|
|
254
|
+
pass
|
|
255
|
+
|
|
256
|
+
# 返回正序 (最旧的在前)
|
|
257
|
+
return list(reversed(entries))
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def get_all_logs(
|
|
261
|
+
log_path: str,
|
|
262
|
+
limit: int = 100,
|
|
263
|
+
min_level: Optional[str] = None
|
|
264
|
+
) -> list[LogEntry]:
|
|
265
|
+
"""
|
|
266
|
+
获取所有类别的日志 (不过滤类别)
|
|
267
|
+
"""
|
|
268
|
+
return get_recent_logs(log_path, limit, categories=[], min_level=min_level)
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def search_logs(
|
|
272
|
+
log_path: str,
|
|
273
|
+
pattern: str,
|
|
274
|
+
limit: int = 50,
|
|
275
|
+
categories: list[str] = None
|
|
276
|
+
) -> list[LogEntry]:
|
|
277
|
+
"""
|
|
278
|
+
在日志中搜索匹配的条目
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
log_path: 日志文件路径
|
|
282
|
+
pattern: 正则表达式模式
|
|
283
|
+
limit: 返回的最大条数
|
|
284
|
+
categories: 只搜索这些类别,None 表示搜索所有
|
|
285
|
+
"""
|
|
286
|
+
if not os.path.exists(log_path):
|
|
287
|
+
return []
|
|
288
|
+
|
|
289
|
+
entries = []
|
|
290
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
for line in read_file_reverse(log_path):
|
|
294
|
+
if not regex.search(line):
|
|
295
|
+
continue
|
|
296
|
+
|
|
297
|
+
entry = parse_log_line(line)
|
|
298
|
+
if not entry:
|
|
299
|
+
continue
|
|
300
|
+
|
|
301
|
+
if categories and entry.category not in categories:
|
|
302
|
+
continue
|
|
303
|
+
|
|
304
|
+
entries.append(entry)
|
|
305
|
+
if len(entries) >= limit:
|
|
306
|
+
break
|
|
307
|
+
except Exception:
|
|
308
|
+
pass
|
|
309
|
+
|
|
310
|
+
return list(reversed(entries))
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def search_logs_from_line(
|
|
314
|
+
log_path: str,
|
|
315
|
+
pattern: str,
|
|
316
|
+
after_line: int = None,
|
|
317
|
+
before_line: int = None,
|
|
318
|
+
timestamps: bool = False,
|
|
319
|
+
categories: list[str] = None,
|
|
320
|
+
apply_filter: bool = True
|
|
321
|
+
) -> dict:
|
|
322
|
+
"""
|
|
323
|
+
在指定行范围内搜索日志
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
log_path: 日志文件路径
|
|
327
|
+
pattern: 正则表达式模式
|
|
328
|
+
after_line: 从哪一行之后开始搜索
|
|
329
|
+
before_line: 到哪一行之前结束
|
|
330
|
+
timestamps: 是否附加时间戳
|
|
331
|
+
categories: 只搜索这些类别,默认 ["FLog::Output"]
|
|
332
|
+
apply_filter: 是否应用过滤规则
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
{
|
|
336
|
+
"logs": "匹配的日志文本",
|
|
337
|
+
"start_line": 起始行号,
|
|
338
|
+
"last_line": 最后行号,
|
|
339
|
+
"match_count": 匹配条数,
|
|
340
|
+
"remaining": 剩余匹配数,
|
|
341
|
+
"has_more": 是否还有更多
|
|
342
|
+
}
|
|
343
|
+
"""
|
|
344
|
+
MAX_BYTES = 32000
|
|
345
|
+
|
|
346
|
+
if not os.path.exists(log_path):
|
|
347
|
+
return {"logs": "", "start_line": 0, "last_line": 0, "match_count": 0, "remaining": 0, "has_more": False}
|
|
348
|
+
|
|
349
|
+
if categories is None:
|
|
350
|
+
categories = DEFAULT_CATEGORIES
|
|
351
|
+
|
|
352
|
+
try:
|
|
353
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
354
|
+
except re.error:
|
|
355
|
+
return {"error": f"Invalid regex pattern: {pattern}"}
|
|
356
|
+
|
|
357
|
+
start_line = None
|
|
358
|
+
last_line = 0
|
|
359
|
+
current_bytes = 0
|
|
360
|
+
log_lines = []
|
|
361
|
+
match_count = 0
|
|
362
|
+
bytes_exceeded = False
|
|
363
|
+
|
|
364
|
+
try:
|
|
365
|
+
with open(log_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
366
|
+
for line_num, line in enumerate(f, 1):
|
|
367
|
+
if after_line is not None and line_num <= after_line:
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
if before_line is not None and line_num >= before_line:
|
|
371
|
+
break
|
|
372
|
+
|
|
373
|
+
line = line.strip()
|
|
374
|
+
if not line:
|
|
375
|
+
continue
|
|
376
|
+
|
|
377
|
+
entry = parse_log_line(line)
|
|
378
|
+
if not entry:
|
|
379
|
+
continue
|
|
380
|
+
|
|
381
|
+
if categories and entry.category not in categories:
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
if apply_filter and should_exclude(entry.message):
|
|
385
|
+
continue
|
|
386
|
+
|
|
387
|
+
# 正则匹配
|
|
388
|
+
if not regex.search(entry.message):
|
|
389
|
+
continue
|
|
390
|
+
|
|
391
|
+
match_count += 1
|
|
392
|
+
|
|
393
|
+
if bytes_exceeded:
|
|
394
|
+
continue
|
|
395
|
+
|
|
396
|
+
if timestamps:
|
|
397
|
+
time_part = entry.timestamp[11:19]
|
|
398
|
+
output_line = f"{line_num}|[{time_part}] {entry.message}"
|
|
399
|
+
else:
|
|
400
|
+
output_line = f"{line_num}|{entry.message}"
|
|
401
|
+
|
|
402
|
+
line_bytes = len(output_line.encode('utf-8')) + 1
|
|
403
|
+
|
|
404
|
+
if current_bytes + line_bytes > MAX_BYTES and log_lines:
|
|
405
|
+
bytes_exceeded = True
|
|
406
|
+
continue
|
|
407
|
+
|
|
408
|
+
if start_line is None:
|
|
409
|
+
start_line = line_num
|
|
410
|
+
|
|
411
|
+
log_lines.append(output_line)
|
|
412
|
+
last_line = line_num
|
|
413
|
+
current_bytes += line_bytes
|
|
414
|
+
|
|
415
|
+
except Exception:
|
|
416
|
+
pass
|
|
417
|
+
|
|
418
|
+
returned_count = len(log_lines)
|
|
419
|
+
return {
|
|
420
|
+
"logs": "\n".join(log_lines),
|
|
421
|
+
"start_line": start_line or 0,
|
|
422
|
+
"last_line": last_line,
|
|
423
|
+
"match_count": returned_count,
|
|
424
|
+
"remaining": match_count - returned_count,
|
|
425
|
+
"has_more": match_count > returned_count
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def find_latest_studio_log() -> Optional[str]:
|
|
430
|
+
"""查找最新的 Studio 日志文件"""
|
|
431
|
+
if not os.path.exists(LOG_DIR):
|
|
432
|
+
return None
|
|
433
|
+
|
|
434
|
+
studio_logs = []
|
|
435
|
+
for f in os.listdir(LOG_DIR):
|
|
436
|
+
if "Studio" in f and f.endswith(".log"):
|
|
437
|
+
path = os.path.join(LOG_DIR, f)
|
|
438
|
+
studio_logs.append((path, os.path.getmtime(path)))
|
|
439
|
+
|
|
440
|
+
if not studio_logs:
|
|
441
|
+
return None
|
|
442
|
+
|
|
443
|
+
studio_logs.sort(key=lambda x: x[1], reverse=True)
|
|
444
|
+
return studio_logs[0][0]
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def clean_old_logs(days: int = 7) -> int:
|
|
448
|
+
"""清理超过指定天数的旧日志"""
|
|
449
|
+
if not os.path.exists(LOG_DIR):
|
|
450
|
+
return 0
|
|
451
|
+
|
|
452
|
+
from datetime import datetime
|
|
453
|
+
count = 0
|
|
454
|
+
now = datetime.now().timestamp()
|
|
455
|
+
threshold = days * 24 * 60 * 60
|
|
456
|
+
|
|
457
|
+
for f in os.listdir(LOG_DIR):
|
|
458
|
+
if f.endswith(".log"):
|
|
459
|
+
path = os.path.join(LOG_DIR, f)
|
|
460
|
+
try:
|
|
461
|
+
if now - os.path.getmtime(path) > threshold:
|
|
462
|
+
os.remove(path)
|
|
463
|
+
count += 1
|
|
464
|
+
except Exception:
|
|
465
|
+
pass
|
|
466
|
+
|
|
467
|
+
return count
|
|
@@ -18,7 +18,7 @@ from .windows_utils import (
|
|
|
18
18
|
from .log_utils import get_recent_logs, search_logs, clean_old_logs
|
|
19
19
|
from .toolbar_detector import detect_toolbar_state, detect_toolbar_state_with_debug
|
|
20
20
|
|
|
21
|
-
mcp = FastMCP("roblox-studio-mcp")
|
|
21
|
+
mcp = FastMCP("roblox-studio-physical-operation-mcp")
|
|
22
22
|
|
|
23
23
|
# 截图输出目录 (系统临时文件夹)
|
|
24
24
|
import tempfile
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: roblox-studio-physical-operation-mcp
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: MCP server for controlling Roblox Studio - toolbar detection, game control, log analysis
|
|
5
5
|
Project-URL: Homepage, https://github.com/white-dragon-tools/roblox-studio-physical-operation-mcp
|
|
6
6
|
Project-URL: Repository, https://github.com/white-dragon-tools/roblox-studio-physical-operation-mcp
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
roblox_studio_physical_operation_mcp/__init__.py,sha256=aBu34q8vbpcQdnO9ebtAXxJayGqlbPMAzovy3PsJZEw,364
|
|
2
|
+
roblox_studio_physical_operation_mcp/__main__.py,sha256=-H8yyFArBqNGyMxDs4bwQYiaNpN98Cm_ccZTy7dk09Y,127
|
|
3
|
+
roblox_studio_physical_operation_mcp/log_filter.py,sha256=sJ98G5wQQtOYVwNBY_6bH39qkgMKyMsERYbztOjegxU,2022
|
|
4
|
+
roblox_studio_physical_operation_mcp/log_utils.py,sha256=Kcnnz13P_QEZBKsnp6-4I0AMg_cQP1pXXRL_35V0QuQ,13935
|
|
5
|
+
roblox_studio_physical_operation_mcp/server.py,sha256=VaH1h-jMAlPNZy2IZNDtaDWyIR-TqaXw-Hr-QEvYKyA,17358
|
|
6
|
+
roblox_studio_physical_operation_mcp/studio_manager.py,sha256=xwxi3LWtGJSOWJeYKpsKvlwEUUmyyO2CmTsiTdjhI9M,13364
|
|
7
|
+
roblox_studio_physical_operation_mcp/toolbar_detector.py,sha256=vC_GiinNadpCqDgsHZAC6dvK0WrjWoZoc-4SEMFOfbU,15996
|
|
8
|
+
roblox_studio_physical_operation_mcp/windows_utils.py,sha256=efYiuDnJJzec44dars8f-7I80y68PT8GOb6TGP6WpIs,16708
|
|
9
|
+
roblox_studio_physical_operation_mcp/templates/pause.png,sha256=zwG1qPOd12fUPIrr7t_yktwVq2TmdpdphirQZR8DGk4,258
|
|
10
|
+
roblox_studio_physical_operation_mcp/templates/play.png,sha256=v7mQPlydJs7Kce4ybjLrO3iioQXIAAzUPQV_2x_qVz8,421
|
|
11
|
+
roblox_studio_physical_operation_mcp/templates/stop.png,sha256=eFliYfZEt9ALm-agbbGoQvALmEBe8vg9kvaiaAgjAOc,201
|
|
12
|
+
roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/pause.png,sha256=zwG1qPOd12fUPIrr7t_yktwVq2TmdpdphirQZR8DGk4,258
|
|
13
|
+
roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/play.png,sha256=v7mQPlydJs7Kce4ybjLrO3iioQXIAAzUPQV_2x_qVz8,421
|
|
14
|
+
roblox_studio_physical_operation_mcp-0.1.1.data/data/roblox_studio_physical_operation_mcp/templates/stop.png,sha256=eFliYfZEt9ALm-agbbGoQvALmEBe8vg9kvaiaAgjAOc,201
|
|
15
|
+
roblox_studio_physical_operation_mcp-0.1.1.dist-info/METADATA,sha256=ADzmkD3vyKsNuHqaaOXla5rskPbeH2I9I6smnM9jP1w,7844
|
|
16
|
+
roblox_studio_physical_operation_mcp-0.1.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
17
|
+
roblox_studio_physical_operation_mcp-0.1.1.dist-info/entry_points.txt,sha256=26pkCdFNNNWJyE4veDR1M1b8qJqMMxQ6NHn5BqH5IUk,99
|
|
18
|
+
roblox_studio_physical_operation_mcp-0.1.1.dist-info/licenses/LICENSE,sha256=LjARDhdgOGl9MR23LXmC5mwnYYJ0zww90CGg--aADAI,1075
|
|
19
|
+
roblox_studio_physical_operation_mcp-0.1.1.dist-info/RECORD,,
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
roblox_studio_physical_operation_mcp/__init__.py,sha256=aBu34q8vbpcQdnO9ebtAXxJayGqlbPMAzovy3PsJZEw,364
|
|
2
|
-
roblox_studio_physical_operation_mcp/__main__.py,sha256=-H8yyFArBqNGyMxDs4bwQYiaNpN98Cm_ccZTy7dk09Y,127
|
|
3
|
-
roblox_studio_physical_operation_mcp/log_filter.py,sha256=sJ98G5wQQtOYVwNBY_6bH39qkgMKyMsERYbztOjegxU,2022
|
|
4
|
-
roblox_studio_physical_operation_mcp/log_utils.py,sha256=1EV6c2qemKuxHASQH_o3IDa4S_hjSYiodlnReXxv-b0,13405
|
|
5
|
-
roblox_studio_physical_operation_mcp/server.py,sha256=XPnRG7hW_klhvVsEFrbiqR7oSm_AOmnbGzdMnv9avVw,17339
|
|
6
|
-
roblox_studio_physical_operation_mcp/studio_manager.py,sha256=xwxi3LWtGJSOWJeYKpsKvlwEUUmyyO2CmTsiTdjhI9M,13364
|
|
7
|
-
roblox_studio_physical_operation_mcp/toolbar_detector.py,sha256=vC_GiinNadpCqDgsHZAC6dvK0WrjWoZoc-4SEMFOfbU,15996
|
|
8
|
-
roblox_studio_physical_operation_mcp/windows_utils.py,sha256=efYiuDnJJzec44dars8f-7I80y68PT8GOb6TGP6WpIs,16708
|
|
9
|
-
roblox_studio_physical_operation_mcp-0.1.0.dist-info/METADATA,sha256=y4Tr5pg6pmBy5fw5ycamrCFwoUTAB0pJHbxTwqlTsxk,7844
|
|
10
|
-
roblox_studio_physical_operation_mcp-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
11
|
-
roblox_studio_physical_operation_mcp-0.1.0.dist-info/entry_points.txt,sha256=26pkCdFNNNWJyE4veDR1M1b8qJqMMxQ6NHn5BqH5IUk,99
|
|
12
|
-
roblox_studio_physical_operation_mcp-0.1.0.dist-info/licenses/LICENSE,sha256=LjARDhdgOGl9MR23LXmC5mwnYYJ0zww90CGg--aADAI,1075
|
|
13
|
-
roblox_studio_physical_operation_mcp-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|