@misterhuydo/sentinel 1.5.39 → 1.5.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.5.
|
|
1
|
+
__version__ = "1.5.40"
|
|
@@ -152,15 +152,20 @@ COMPLETE TOOL REFERENCE
|
|
|
152
152
|
── Log Management ─────────────────────────────────────────────────────────────
|
|
153
153
|
|
|
154
154
|
6. fetch_logs Run fetch_log.sh on demand — pull fresh logs from servers now.
|
|
155
|
-
|
|
156
|
-
|
|
155
|
+
When grep_filter is set, results go to TEMP files (workspace/fetched_temp/)
|
|
156
|
+
and do NOT affect the main rolling logs. Temp files are cleared on every
|
|
157
|
+
custom fetch, and filter_logs searches them automatically.
|
|
158
|
+
Use grep_filter for INFO-level or feature-specific patterns that the default
|
|
159
|
+
WARN/ERROR filter would miss.
|
|
160
|
+
"fetch logs", "fetch SSOLWA with filter provision/phone", "fetch without filter"
|
|
157
161
|
|
|
158
162
|
7. search_logs Live SSH grep on production servers using GREP_FILTER.
|
|
159
163
|
Falls back to cached files if SSH unavailable.
|
|
160
164
|
"search logs for illegal PIN in 1881", "find NullPointerException in STS"
|
|
161
165
|
|
|
162
|
-
8. filter_logs Instant keyword/regex search on locally-synced logs
|
|
163
|
-
|
|
166
|
+
8. filter_logs Instant keyword/regex search on locally-synced logs + any temp fetch results.
|
|
167
|
+
No SSH, sub-second. Also searches workspace/fetched_temp/ if a custom
|
|
168
|
+
fetch was recently run.
|
|
164
169
|
"filter logs for TryDig", "errors last 6h", "find appid=X in STS logs"
|
|
165
170
|
|
|
166
171
|
9. tail_log Last N lines of a log source live, no filter.
|
|
@@ -1156,7 +1161,14 @@ _TOOLS = [
|
|
|
1156
1161
|
"Run fetch_log.sh for one or all configured log sources to pull the latest logs "
|
|
1157
1162
|
"from remote servers right now. Use for: 'fetch logs', 'run fetch_log.sh', "
|
|
1158
1163
|
"'grab latest logs from SSOLWA', 'try fetch_log.sh for STS', "
|
|
1159
|
-
"'pull logs from server', 'get fresh logs'
|
|
1164
|
+
"'pull logs from server', 'get fresh logs'.\n\n"
|
|
1165
|
+
"IMPORTANT: When a grep_filter is provided, results go to a TEMPORARY location "
|
|
1166
|
+
"(workspace/fetched_temp/) and do NOT overwrite the main rolling logs. "
|
|
1167
|
+
"The temp files are cleared on every custom fetch. "
|
|
1168
|
+
"After a custom fetch, filter_logs will automatically search the temp results too. "
|
|
1169
|
+
"Use grep_filter whenever the user wants to find INFO-level or feature-specific log lines "
|
|
1170
|
+
"(e.g. 'provision/phone', 'appId', startup messages) — the default filter only captures "
|
|
1171
|
+
"WARN|ERROR|FATAL|Exception|Error lines."
|
|
1160
1172
|
),
|
|
1161
1173
|
"input_schema": {
|
|
1162
1174
|
"type": "object",
|
|
@@ -1176,7 +1188,11 @@ _TOOLS = [
|
|
|
1176
1188
|
},
|
|
1177
1189
|
"grep_filter": {
|
|
1178
1190
|
"type": "string",
|
|
1179
|
-
"description":
|
|
1191
|
+
"description": (
|
|
1192
|
+
"Custom grep filter (regex). Results saved to temp files, main logs untouched. "
|
|
1193
|
+
"Pass 'none' to fetch all lines unfiltered. "
|
|
1194
|
+
"Use when searching for INFO-level or feature-specific patterns."
|
|
1195
|
+
),
|
|
1180
1196
|
},
|
|
1181
1197
|
},
|
|
1182
1198
|
},
|
|
@@ -2814,104 +2830,44 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
2814
2830
|
return json.dumps({"error": f"Invalid regex: {e}"})
|
|
2815
2831
|
|
|
2816
2832
|
synced_base = Path("workspace/synced")
|
|
2817
|
-
|
|
2818
|
-
return json.dumps({
|
|
2819
|
-
"error": "No synced logs found.",
|
|
2820
|
-
"hint": "Log sync runs every SYNC_INTERVAL_SECONDS (default 300s). "
|
|
2821
|
-
"If just started, wait a minute then try again.",
|
|
2822
|
-
})
|
|
2833
|
+
temp_base = Path(cfg_loader.sentinel.workspace_dir) / "fetched_temp"
|
|
2823
2834
|
|
|
2824
2835
|
# Build cutoff timestamp for since_hours filter
|
|
2825
2836
|
cutoff = None
|
|
2826
2837
|
if since_hours:
|
|
2827
2838
|
cutoff = _datetime.now(_tz.utc) - timedelta(hours=int(since_hours))
|
|
2828
2839
|
|
|
2829
|
-
#
|
|
2830
|
-
|
|
2831
|
-
|
|
2832
|
-
|
|
2833
|
-
|
|
2834
|
-
|
|
2835
|
-
|
|
2836
|
-
if not src_dirs:
|
|
2837
|
-
available = [d.name for d in synced_base.iterdir() if d.is_dir()]
|
|
2838
|
-
return json.dumps({
|
|
2839
|
-
"error": f"No synced source matching '{source_f}'",
|
|
2840
|
-
"available_sources": available,
|
|
2841
|
-
})
|
|
2842
|
-
|
|
2843
|
-
results = []
|
|
2844
|
-
total_matches = 0
|
|
2845
|
-
for src_dir in src_dirs:
|
|
2846
|
-
for log_file in sorted(src_dir.glob("*")):
|
|
2847
|
-
try:
|
|
2848
|
-
lines = log_file.read_text(encoding="utf-8", errors="replace").splitlines()
|
|
2849
|
-
matches = []
|
|
2850
|
-
for line in lines:
|
|
2851
|
-
if not pat.search(line):
|
|
2852
|
-
continue
|
|
2853
|
-
if cutoff:
|
|
2854
|
-
# Try to parse timestamp from line
|
|
2855
|
-
from .log_fetcher import _parse_line_ts
|
|
2856
|
-
ts = _parse_line_ts(line)
|
|
2857
|
-
if ts and ts < cutoff:
|
|
2858
|
-
continue
|
|
2859
|
-
matches.append(line[:300])
|
|
2860
|
-
if len(matches) >= max_matches:
|
|
2861
|
-
break
|
|
2862
|
-
if matches:
|
|
2863
|
-
results.append({
|
|
2864
|
-
"source": src_dir.name,
|
|
2865
|
-
"file": log_file.name,
|
|
2866
|
-
"matches": matches,
|
|
2867
|
-
})
|
|
2868
|
-
total_matches += len(matches)
|
|
2869
|
-
except Exception:
|
|
2870
|
-
pass
|
|
2871
|
-
|
|
2872
|
-
if not results:
|
|
2873
|
-
return json.dumps({
|
|
2874
|
-
"query": query_f,
|
|
2875
|
-
"total_matches": 0,
|
|
2876
|
-
"sources_searched": [d.name for d in src_dirs],
|
|
2877
|
-
"note": "No matches found in synced logs.",
|
|
2878
|
-
})
|
|
2840
|
+
# Collect candidate directories from both synced/ and fetched_temp/
|
|
2841
|
+
def _collect_dirs(base):
|
|
2842
|
+
if not base.exists():
|
|
2843
|
+
return []
|
|
2844
|
+
if source_f:
|
|
2845
|
+
return [d for d in sorted(base.iterdir()) if d.is_dir() and source_f in d.name.lower()]
|
|
2846
|
+
return [d for d in sorted(base.iterdir()) if d.is_dir()]
|
|
2879
2847
|
|
|
2848
|
+
src_dirs = _collect_dirs(synced_base)
|
|
2849
|
+
temp_dirs = _collect_dirs(temp_base)
|
|
2850
|
+
all_search_dirs = src_dirs + [(d, True) for d in temp_dirs] # True = is_temp
|
|
2880
2851
|
|
|
2881
|
-
|
|
2882
|
-
|
|
2883
|
-
except _re.error as e:
|
|
2884
|
-
return json.dumps({"error": f"Invalid regex: {e}"})
|
|
2852
|
+
# Flatten to (dir, is_temp) pairs
|
|
2853
|
+
search_pairs = [(d, False) for d in src_dirs] + [(d, True) for d in temp_dirs]
|
|
2885
2854
|
|
|
2886
|
-
|
|
2887
|
-
|
|
2855
|
+
if not search_pairs:
|
|
2856
|
+
available = ([d.name for d in synced_base.iterdir() if d.is_dir()] if synced_base.exists() else [])
|
|
2888
2857
|
return json.dumps({
|
|
2889
|
-
"error": "No synced logs found.",
|
|
2858
|
+
"error": f"No synced or temp source matching '{source_f}'" if source_f else "No logs found.",
|
|
2859
|
+
"available_sources": available,
|
|
2890
2860
|
"hint": "Log sync runs every SYNC_INTERVAL_SECONDS (default 300s). "
|
|
2891
2861
|
"If just started, wait a minute then try again.",
|
|
2892
2862
|
})
|
|
2893
2863
|
|
|
2894
|
-
|
|
2895
|
-
if since_hours:
|
|
2896
|
-
cutoff = _datetime.now(_tz.utc) - timedelta(hours=int(since_hours))
|
|
2897
|
-
|
|
2898
|
-
if source_f:
|
|
2899
|
-
src_dirs = [d for d in sorted(synced_base.iterdir())
|
|
2900
|
-
if d.is_dir() and source_f in d.name.lower()]
|
|
2901
|
-
else:
|
|
2902
|
-
src_dirs = [d for d in sorted(synced_base.iterdir()) if d.is_dir()]
|
|
2903
|
-
|
|
2904
|
-
if not src_dirs:
|
|
2905
|
-
available = [d.name for d in synced_base.iterdir() if d.is_dir()]
|
|
2906
|
-
return json.dumps({
|
|
2907
|
-
"error": f"No synced source matching '{source_f}'",
|
|
2908
|
-
"available_sources": available,
|
|
2909
|
-
})
|
|
2910
|
-
|
|
2911
|
-
all_matches = [] # list of (source_name, line)
|
|
2864
|
+
all_matches = [] # list of (source_label, line)
|
|
2912
2865
|
sources_hit = set()
|
|
2913
|
-
for src_dir in
|
|
2914
|
-
|
|
2866
|
+
for src_dir, is_temp in search_pairs:
|
|
2867
|
+
label = src_dir.name + (" [temp]" if is_temp else "")
|
|
2868
|
+
for log_file in sorted(src_dir.glob("**/*")):
|
|
2869
|
+
if not log_file.is_file():
|
|
2870
|
+
continue
|
|
2915
2871
|
try:
|
|
2916
2872
|
lines = log_file.read_text(encoding="utf-8", errors="replace").splitlines()
|
|
2917
2873
|
for line in lines:
|
|
@@ -2922,8 +2878,8 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
2922
2878
|
ts = _parse_line_ts(line)
|
|
2923
2879
|
if ts and ts < cutoff:
|
|
2924
2880
|
continue
|
|
2925
|
-
all_matches.append((
|
|
2926
|
-
sources_hit.add(
|
|
2881
|
+
all_matches.append((label, line[:300]))
|
|
2882
|
+
sources_hit.add(label)
|
|
2927
2883
|
if len(all_matches) >= max_matches:
|
|
2928
2884
|
break
|
|
2929
2885
|
except Exception:
|
|
@@ -2932,12 +2888,19 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
2932
2888
|
break
|
|
2933
2889
|
|
|
2934
2890
|
total = len(all_matches)
|
|
2891
|
+
sources_searched = [d.name + (" [temp]" if is_temp else "") for d, is_temp in search_pairs]
|
|
2935
2892
|
if total == 0:
|
|
2893
|
+
has_temp = bool(temp_dirs)
|
|
2936
2894
|
return json.dumps({
|
|
2937
2895
|
"query": query_f,
|
|
2938
2896
|
"total_matches": 0,
|
|
2939
|
-
"sources_searched":
|
|
2940
|
-
"note":
|
|
2897
|
+
"sources_searched": sources_searched,
|
|
2898
|
+
"note": (
|
|
2899
|
+
"No matches found. "
|
|
2900
|
+
+ ("Temp fetch results were also checked. " if has_temp else "")
|
|
2901
|
+
+ "If searching for a specific log line from a new feature, use fetch_logs "
|
|
2902
|
+
"with a matching grep_filter first — the default filter only captures WARN/ERROR."
|
|
2903
|
+
),
|
|
2941
2904
|
})
|
|
2942
2905
|
|
|
2943
2906
|
# Pattern grouping: count occurrences of each error signature
|
|
@@ -2983,7 +2946,7 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
2983
2946
|
"query": query_f,
|
|
2984
2947
|
"total_matches": total,
|
|
2985
2948
|
"sources_hit": sorted(sources_hit),
|
|
2986
|
-
"sources_searched":
|
|
2949
|
+
"sources_searched": sources_searched,
|
|
2987
2950
|
"top_patterns": top_patterns,
|
|
2988
2951
|
"sample_lines": sample_lines,
|
|
2989
2952
|
"time_span": time_span,
|
|
@@ -3081,6 +3044,20 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
3081
3044
|
if not props_files:
|
|
3082
3045
|
return json.dumps({"error": f"No log-config found matching '{source_filter}'"})
|
|
3083
3046
|
|
|
3047
|
+
# When a custom grep_filter is set, route output to a temp directory
|
|
3048
|
+
# so the main rolling logs are never polluted by user-requested searches.
|
|
3049
|
+
# The temp dir is cleared before each custom fetch.
|
|
3050
|
+
workspace_dir = Path(cfg_loader.sentinel.workspace_dir)
|
|
3051
|
+
temp_base = workspace_dir / "fetched_temp"
|
|
3052
|
+
use_temp = bool(grep_override)
|
|
3053
|
+
|
|
3054
|
+
if use_temp:
|
|
3055
|
+
# Clear old temp results
|
|
3056
|
+
import shutil
|
|
3057
|
+
if temp_base.exists():
|
|
3058
|
+
shutil.rmtree(temp_base)
|
|
3059
|
+
temp_base.mkdir(parents=True, exist_ok=True)
|
|
3060
|
+
|
|
3084
3061
|
results = []
|
|
3085
3062
|
for props in props_files:
|
|
3086
3063
|
env = os.environ.copy()
|
|
@@ -3088,6 +3065,9 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
3088
3065
|
env["TAIL"] = str(tail_override)
|
|
3089
3066
|
if grep_override:
|
|
3090
3067
|
env["SENTINEL_GREP_FILTER_OVERRIDE"] = grep_override
|
|
3068
|
+
if use_temp:
|
|
3069
|
+
# Tell fetch_log.sh where to write output files
|
|
3070
|
+
env["OUTPUT_DIR"] = str(temp_base)
|
|
3091
3071
|
|
|
3092
3072
|
cmd = ["bash", str(script)]
|
|
3093
3073
|
if debug:
|
|
@@ -3100,13 +3080,24 @@ async def _run_tool(name: str, inputs: dict, cfg_loader, store, slack_client=Non
|
|
|
3100
3080
|
)
|
|
3101
3081
|
output = (r.stdout or "").strip()
|
|
3102
3082
|
stderr = (r.stderr or "").strip()
|
|
3083
|
+
|
|
3084
|
+
# Collect lines from temp files for this source
|
|
3085
|
+
temp_lines = []
|
|
3086
|
+
if use_temp:
|
|
3087
|
+
for f in sorted((temp_base / props.stem).glob("*.log")) if (temp_base / props.stem).exists() else []:
|
|
3088
|
+
try:
|
|
3089
|
+
temp_lines.extend(f.read_text(encoding="utf-8", errors="replace").splitlines())
|
|
3090
|
+
except Exception:
|
|
3091
|
+
pass
|
|
3092
|
+
|
|
3103
3093
|
results.append({
|
|
3104
3094
|
"source": props.stem,
|
|
3105
3095
|
"returncode": r.returncode,
|
|
3106
3096
|
"output": output[-2000:] if output else "",
|
|
3107
3097
|
"stderr": stderr[-1000:] if stderr else "",
|
|
3098
|
+
**({"lines": temp_lines, "temp_file": str(temp_base / props.stem)} if use_temp else {}),
|
|
3108
3099
|
})
|
|
3109
|
-
logger.info("Boss fetch_logs %s rc=%d", props.stem, r.returncode)
|
|
3100
|
+
logger.info("Boss fetch_logs %s rc=%d (temp=%s)", props.stem, r.returncode, use_temp)
|
|
3110
3101
|
except subprocess.TimeoutExpired:
|
|
3111
3102
|
results.append({"source": props.stem, "error": "timed out after 120s"})
|
|
3112
3103
|
except Exception as e:
|