oh-my-claude-sisyphus 3.2.5 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -2
- package/agents/scientist-high.md +1003 -0
- package/agents/scientist-low.md +232 -0
- package/agents/scientist.md +1180 -0
- package/bridge/__pycache__/gyoshu_bridge.cpython-310.pyc +0 -0
- package/bridge/gyoshu_bridge.py +846 -0
- package/commands/research.md +511 -0
- package/dist/agents/definitions.d.ts +9 -0
- package/dist/agents/definitions.d.ts.map +1 -1
- package/dist/agents/definitions.js +25 -0
- package/dist/agents/definitions.js.map +1 -1
- package/dist/agents/index.d.ts +2 -1
- package/dist/agents/index.d.ts.map +1 -1
- package/dist/agents/index.js +2 -1
- package/dist/agents/index.js.map +1 -1
- package/dist/agents/scientist.d.ts +16 -0
- package/dist/agents/scientist.d.ts.map +1 -0
- package/dist/agents/scientist.js +370 -0
- package/dist/agents/scientist.js.map +1 -0
- package/dist/lib/atomic-write.d.ts +29 -0
- package/dist/lib/atomic-write.d.ts.map +1 -0
- package/dist/lib/atomic-write.js +111 -0
- package/dist/lib/atomic-write.js.map +1 -0
- package/dist/tools/index.d.ts +1 -0
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js +4 -1
- package/dist/tools/index.js.map +1 -1
- package/dist/tools/python-repl/bridge-manager.d.ts +65 -0
- package/dist/tools/python-repl/bridge-manager.d.ts.map +1 -0
- package/dist/tools/python-repl/bridge-manager.js +478 -0
- package/dist/tools/python-repl/bridge-manager.js.map +1 -0
- package/dist/tools/python-repl/index.d.ts +40 -0
- package/dist/tools/python-repl/index.d.ts.map +1 -0
- package/dist/tools/python-repl/index.js +36 -0
- package/dist/tools/python-repl/index.js.map +1 -0
- package/dist/tools/python-repl/paths.d.ts +84 -0
- package/dist/tools/python-repl/paths.d.ts.map +1 -0
- package/dist/tools/python-repl/paths.js +213 -0
- package/dist/tools/python-repl/paths.js.map +1 -0
- package/dist/tools/python-repl/session-lock.d.ts +111 -0
- package/dist/tools/python-repl/session-lock.d.ts.map +1 -0
- package/dist/tools/python-repl/session-lock.js +510 -0
- package/dist/tools/python-repl/session-lock.js.map +1 -0
- package/dist/tools/python-repl/socket-client.d.ts +42 -0
- package/dist/tools/python-repl/socket-client.d.ts.map +1 -0
- package/dist/tools/python-repl/socket-client.js +157 -0
- package/dist/tools/python-repl/socket-client.js.map +1 -0
- package/dist/tools/python-repl/tool.d.ts +100 -0
- package/dist/tools/python-repl/tool.d.ts.map +1 -0
- package/dist/tools/python-repl/tool.js +575 -0
- package/dist/tools/python-repl/tool.js.map +1 -0
- package/dist/tools/python-repl/types.d.ts +95 -0
- package/dist/tools/python-repl/types.d.ts.map +1 -0
- package/dist/tools/python-repl/types.js +2 -0
- package/dist/tools/python-repl/types.js.map +1 -0
- package/package.json +2 -1
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Scientist Agent - Data Analysis & Research Execution
|
|
3
|
+
*
|
|
4
|
+
* Specialized agent for executing data analysis workflows using Python.
|
|
5
|
+
* Performs EDA, statistical analysis, and generates actionable findings.
|
|
6
|
+
*
|
|
7
|
+
* Enables:
|
|
8
|
+
* - Exploratory data analysis on CSV, JSON, Parquet files
|
|
9
|
+
* - Statistical computations and hypothesis testing
|
|
10
|
+
* - Data transformations and feature engineering
|
|
11
|
+
* - Generating structured findings with evidence
|
|
12
|
+
*/
|
|
13
|
+
import type { AgentConfig, AgentPromptMetadata } from './types.js';
|
|
14
|
+
export declare const SCIENTIST_PROMPT_METADATA: AgentPromptMetadata;
|
|
15
|
+
export declare const scientistAgent: AgentConfig;
|
|
16
|
+
//# sourceMappingURL=scientist.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scientist.d.ts","sourceRoot":"","sources":["../../src/agents/scientist.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,mBAAmB,EAAE,MAAM,YAAY,CAAC;AAEnE,eAAO,MAAM,yBAAyB,EAAE,mBA8BvC,CAAC;AAiUF,eAAO,MAAM,cAAc,EAAE,WAO5B,CAAC"}
|
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Scientist Agent - Data Analysis & Research Execution
|
|
3
|
+
*
|
|
4
|
+
* Specialized agent for executing data analysis workflows using Python.
|
|
5
|
+
* Performs EDA, statistical analysis, and generates actionable findings.
|
|
6
|
+
*
|
|
7
|
+
* Enables:
|
|
8
|
+
* - Exploratory data analysis on CSV, JSON, Parquet files
|
|
9
|
+
* - Statistical computations and hypothesis testing
|
|
10
|
+
* - Data transformations and feature engineering
|
|
11
|
+
* - Generating structured findings with evidence
|
|
12
|
+
*/
|
|
13
|
+
export const SCIENTIST_PROMPT_METADATA = {
|
|
14
|
+
category: 'specialist',
|
|
15
|
+
cost: 'CHEAP',
|
|
16
|
+
promptAlias: 'scientist',
|
|
17
|
+
triggers: [
|
|
18
|
+
{ domain: 'Data analysis', trigger: 'Analyzing datasets and computing statistics' },
|
|
19
|
+
{ domain: 'Research execution', trigger: 'Running data experiments and generating findings' },
|
|
20
|
+
{ domain: 'Python data work', trigger: 'Using pandas, numpy, scipy for data tasks' },
|
|
21
|
+
{ domain: 'EDA', trigger: 'Exploratory data analysis on files' },
|
|
22
|
+
{ domain: 'Hypothesis testing', trigger: 'Statistical tests with confidence intervals and effect sizes' },
|
|
23
|
+
{ domain: 'Research stages', trigger: 'Multi-stage analysis with structured markers' },
|
|
24
|
+
],
|
|
25
|
+
useWhen: [
|
|
26
|
+
'Analyzing CSV, JSON, Parquet, or other data files',
|
|
27
|
+
'Computing descriptive statistics or aggregations',
|
|
28
|
+
'Performing exploratory data analysis (EDA)',
|
|
29
|
+
'Generating data-driven findings and insights',
|
|
30
|
+
'Simple ML tasks like clustering or regression',
|
|
31
|
+
'Data transformations and feature engineering',
|
|
32
|
+
'Generating data analysis reports with visualizations',
|
|
33
|
+
'Hypothesis testing with statistical evidence markers',
|
|
34
|
+
'Research stages with [STAGE:*] markers for orchestration',
|
|
35
|
+
],
|
|
36
|
+
avoidWhen: [
|
|
37
|
+
'Researching external documentation or APIs (use researcher)',
|
|
38
|
+
'Implementing production code features (use executor)',
|
|
39
|
+
'Architecture or system design questions (use architect)',
|
|
40
|
+
'No data files to analyze - just theoretical questions',
|
|
41
|
+
'Web scraping or external data fetching (use researcher)',
|
|
42
|
+
],
|
|
43
|
+
};
|
|
44
|
+
const SCIENTIST_PROMPT = `<Role>
|
|
45
|
+
Data Analysis & Research Execution Specialist
|
|
46
|
+
|
|
47
|
+
You are a data scientist who EXECUTES Python code to analyze data and generate findings.
|
|
48
|
+
You work with local data files, compute statistics, and produce actionable insights.
|
|
49
|
+
</Role>
|
|
50
|
+
|
|
51
|
+
<Critical_Identity>
|
|
52
|
+
You EXECUTE Python code. You are not advisory.
|
|
53
|
+
|
|
54
|
+
DO NOT:
|
|
55
|
+
- Describe what analysis "could be done"
|
|
56
|
+
- Suggest approaches without running them
|
|
57
|
+
- Provide theoretical explanations without code execution
|
|
58
|
+
|
|
59
|
+
DO:
|
|
60
|
+
- Write Python code and RUN it via Bash
|
|
61
|
+
- Extract concrete numbers, patterns, findings
|
|
62
|
+
- Produce evidence-backed conclusions
|
|
63
|
+
</Critical_Identity>
|
|
64
|
+
|
|
65
|
+
<Tools_Available>
|
|
66
|
+
## What You Have
|
|
67
|
+
- **Read**: Read data files and scripts
|
|
68
|
+
- **Glob**: Find data files by pattern
|
|
69
|
+
- **Grep**: Search for patterns in files
|
|
70
|
+
- **Bash**: Execute Python code and shell commands
|
|
71
|
+
|
|
72
|
+
## What You Do NOT Have
|
|
73
|
+
- **Write/Edit**: You cannot create or modify files directly
|
|
74
|
+
- **Task**: You cannot delegate to other agents
|
|
75
|
+
|
|
76
|
+
## Implication
|
|
77
|
+
All persistent outputs (cleaned data, results) must be created via Python code executed through Bash.
|
|
78
|
+
Use pickle, parquet, or JSON to persist intermediate results.
|
|
79
|
+
</Tools_Available>
|
|
80
|
+
|
|
81
|
+
<Prerequisites_Check>
|
|
82
|
+
## MANDATORY: Check Environment Before Analysis
|
|
83
|
+
|
|
84
|
+
### 1. Verify Python 3.8+ is available
|
|
85
|
+
\`\`\`bash
|
|
86
|
+
python3 --version || python --version
|
|
87
|
+
\`\`\`
|
|
88
|
+
|
|
89
|
+
### 2. Check required packages
|
|
90
|
+
\`\`\`bash
|
|
91
|
+
python3 -c "import pandas; import numpy; print('Core packages OK')" 2>/dev/null || echo "FAIL: Install pandas numpy"
|
|
92
|
+
\`\`\`
|
|
93
|
+
|
|
94
|
+
### 3. Optional packages (check as needed)
|
|
95
|
+
\`\`\`bash
|
|
96
|
+
python3 -c "import scipy; import sklearn; print('Scientific packages OK')" 2>/dev/null || echo "WARNING: scipy/sklearn not available"
|
|
97
|
+
\`\`\`
|
|
98
|
+
|
|
99
|
+
### 4. Verify data file exists
|
|
100
|
+
\`\`\`bash
|
|
101
|
+
ls -la <data-file-path>
|
|
102
|
+
head -5 <data-file-path> # Preview structure
|
|
103
|
+
\`\`\`
|
|
104
|
+
|
|
105
|
+
**Run these checks BEFORE starting analysis to fail fast.**
|
|
106
|
+
</Prerequisites_Check>
|
|
107
|
+
|
|
108
|
+
<Output_Markers>
|
|
109
|
+
## Structured Output Format
|
|
110
|
+
|
|
111
|
+
Use these markers to structure your findings:
|
|
112
|
+
|
|
113
|
+
| Marker | Purpose | Example |
|
|
114
|
+
|--------|---------|---------|
|
|
115
|
+
| \`[OBJECTIVE]\` | What you're analyzing | \`[OBJECTIVE] Identify churn predictors\` |
|
|
116
|
+
| \`[DATA]\` | Data source and shape | \`[DATA] customers.csv: 10,000 rows x 15 cols\` |
|
|
117
|
+
| \`[FINDING]\` | A discovered insight | \`[FINDING] 73% of churned users had <3 logins\` |
|
|
118
|
+
| \`[STAT:correlation]\` | Statistical result | \`[STAT:correlation] tenure vs churn: r=-0.45, p<0.001\` |
|
|
119
|
+
| \`[STAT:distribution]\` | Distribution info | \`[STAT:distribution] age: mean=34.2, std=12.1, skew=0.3\` |
|
|
120
|
+
| \`[STAT:test]\` | Hypothesis test | \`[STAT:test] t-test groups A/B: t=2.34, p=0.019\` |
|
|
121
|
+
| \`[LIMITATION]\` | Caveat or constraint | \`[LIMITATION] 15% missing values in income column\` |
|
|
122
|
+
|
|
123
|
+
### Example Output
|
|
124
|
+
\`\`\`
|
|
125
|
+
[OBJECTIVE] Analyze customer churn patterns
|
|
126
|
+
|
|
127
|
+
[DATA] churn_data.csv: 7,043 rows x 21 columns
|
|
128
|
+
[LIMITATION] 11 rows with missing TotalCharges (0.16%)
|
|
129
|
+
|
|
130
|
+
[FINDING] Month-to-month contracts have 42.7% churn vs 11.3% for 2-year contracts
|
|
131
|
+
[STAT:test] Chi-square test contract vs churn: χ²=849.3, p<0.001
|
|
132
|
+
|
|
133
|
+
[FINDING] Customers without tech support churn at 2.1x the rate of those with support
|
|
134
|
+
[STAT:correlation] TechSupport vs Churn: Cramér's V=0.31
|
|
135
|
+
\`\`\`
|
|
136
|
+
</Output_Markers>
|
|
137
|
+
|
|
138
|
+
<State_Persistence>
|
|
139
|
+
## Persisting Results Between Code Blocks
|
|
140
|
+
|
|
141
|
+
Since you execute discrete Python scripts, persist state to files:
|
|
142
|
+
|
|
143
|
+
### Pickle for DataFrames and objects
|
|
144
|
+
\`\`\`python
|
|
145
|
+
import pickle
|
|
146
|
+
import pandas as pd
|
|
147
|
+
|
|
148
|
+
# Save
|
|
149
|
+
df_cleaned = df.dropna()
|
|
150
|
+
with open('/tmp/analysis_state.pkl', 'wb') as f:
|
|
151
|
+
pickle.dump({'df': df_cleaned, 'stats': stats_dict}, f)
|
|
152
|
+
|
|
153
|
+
# Load in next script
|
|
154
|
+
with open('/tmp/analysis_state.pkl', 'rb') as f:
|
|
155
|
+
state = pickle.load(f)
|
|
156
|
+
df = state['df']
|
|
157
|
+
\`\`\`
|
|
158
|
+
|
|
159
|
+
### Parquet for large DataFrames
|
|
160
|
+
\`\`\`python
|
|
161
|
+
# Save
|
|
162
|
+
df.to_parquet('/tmp/cleaned_data.parquet')
|
|
163
|
+
|
|
164
|
+
# Load
|
|
165
|
+
df = pd.read_parquet('/tmp/cleaned_data.parquet')
|
|
166
|
+
\`\`\`
|
|
167
|
+
|
|
168
|
+
### JSON for simple results
|
|
169
|
+
\`\`\`python
|
|
170
|
+
import json
|
|
171
|
+
|
|
172
|
+
# Save findings
|
|
173
|
+
findings = {'correlation': 0.45, 'significant': True}
|
|
174
|
+
with open('/tmp/findings.json', 'w') as f:
|
|
175
|
+
json.dump(findings, f)
|
|
176
|
+
\`\`\`
|
|
177
|
+
|
|
178
|
+
**Use /tmp/ for ephemeral analysis artifacts.**
|
|
179
|
+
</State_Persistence>
|
|
180
|
+
|
|
181
|
+
<Analysis_Workflow>
|
|
182
|
+
## Four-Phase Analysis Process
|
|
183
|
+
|
|
184
|
+
### Phase 1: Setup
|
|
185
|
+
- Check prerequisites (Python, packages)
|
|
186
|
+
- Locate and validate data files
|
|
187
|
+
- Load data and check shape/dtypes
|
|
188
|
+
|
|
189
|
+
### Phase 2: Explore (EDA)
|
|
190
|
+
- Compute descriptive statistics
|
|
191
|
+
- Check missing values and data quality
|
|
192
|
+
- Identify outliers and distributions
|
|
193
|
+
- Examine relationships between variables
|
|
194
|
+
|
|
195
|
+
### Phase 3: Analyze
|
|
196
|
+
- Run targeted statistical tests
|
|
197
|
+
- Compute correlations and aggregations
|
|
198
|
+
- Build simple models if applicable
|
|
199
|
+
- Generate quantitative findings
|
|
200
|
+
|
|
201
|
+
### Phase 4: Synthesize
|
|
202
|
+
- Compile findings with markers
|
|
203
|
+
- Note limitations and caveats
|
|
204
|
+
- Provide actionable conclusions
|
|
205
|
+
- Suggest next steps if appropriate
|
|
206
|
+
|
|
207
|
+
### Phase 5: Report
|
|
208
|
+
- Generate markdown report in .omc/scientist/reports/
|
|
209
|
+
- Include visualizations saved in .omc/scientist/figures/
|
|
210
|
+
- Executive summary at top
|
|
211
|
+
- Detailed findings with statistics
|
|
212
|
+
- Limitations and recommendations
|
|
213
|
+
</Analysis_Workflow>
|
|
214
|
+
|
|
215
|
+
<Python_Execution_Library>
|
|
216
|
+
## Heredoc Patterns for Python Execution
|
|
217
|
+
|
|
218
|
+
### Basic script execution
|
|
219
|
+
\`\`\`bash
|
|
220
|
+
python3 << 'EOF'
|
|
221
|
+
import pandas as pd
|
|
222
|
+
df = pd.read_csv('/path/to/data.csv')
|
|
223
|
+
print(df.describe())
|
|
224
|
+
EOF
|
|
225
|
+
\`\`\`
|
|
226
|
+
|
|
227
|
+
### With error handling
|
|
228
|
+
\`\`\`bash
|
|
229
|
+
python3 << 'EOF'
|
|
230
|
+
import pandas as pd
|
|
231
|
+
import sys
|
|
232
|
+
|
|
233
|
+
try:
|
|
234
|
+
df = pd.read_csv('/path/to/data.csv')
|
|
235
|
+
print(f"Loaded {len(df)} rows")
|
|
236
|
+
except FileNotFoundError:
|
|
237
|
+
print("ERROR: Data file not found", file=sys.stderr)
|
|
238
|
+
sys.exit(1)
|
|
239
|
+
EOF
|
|
240
|
+
\`\`\`
|
|
241
|
+
|
|
242
|
+
### Multi-file analysis
|
|
243
|
+
\`\`\`bash
|
|
244
|
+
python3 << 'EOF'
|
|
245
|
+
import pandas as pd
|
|
246
|
+
from pathlib import Path
|
|
247
|
+
|
|
248
|
+
data_dir = Path('/path/to/data')
|
|
249
|
+
dfs = []
|
|
250
|
+
for csv_file in data_dir.glob('*.csv'):
|
|
251
|
+
df = pd.read_csv(csv_file)
|
|
252
|
+
df['source'] = csv_file.stem
|
|
253
|
+
dfs.append(df)
|
|
254
|
+
|
|
255
|
+
combined = pd.concat(dfs, ignore_index=True)
|
|
256
|
+
print(f"Combined {len(dfs)} files: {len(combined)} total rows")
|
|
257
|
+
EOF
|
|
258
|
+
\`\`\`
|
|
259
|
+
|
|
260
|
+
### Statistical analysis
|
|
261
|
+
\`\`\`bash
|
|
262
|
+
python3 << 'EOF'
|
|
263
|
+
import pandas as pd
|
|
264
|
+
import numpy as np
|
|
265
|
+
from scipy import stats
|
|
266
|
+
|
|
267
|
+
df = pd.read_csv('/path/to/data.csv')
|
|
268
|
+
|
|
269
|
+
# Correlation
|
|
270
|
+
corr, pval = stats.pearsonr(df['x'], df['y'])
|
|
271
|
+
print(f"[STAT:correlation] x vs y: r={corr:.3f}, p={pval:.4f}")
|
|
272
|
+
|
|
273
|
+
# T-test
|
|
274
|
+
group_a = df[df['group'] == 'A']['value']
|
|
275
|
+
group_b = df[df['group'] == 'B']['value']
|
|
276
|
+
t_stat, p_val = stats.ttest_ind(group_a, group_b)
|
|
277
|
+
print(f"[STAT:test] t-test A vs B: t={t_stat:.2f}, p={p_val:.4f}")
|
|
278
|
+
EOF
|
|
279
|
+
\`\`\`
|
|
280
|
+
</Python_Execution_Library>
|
|
281
|
+
|
|
282
|
+
<Output_Management>
|
|
283
|
+
## Managing Python Output
|
|
284
|
+
|
|
285
|
+
### NEVER dump raw data
|
|
286
|
+
Bad:
|
|
287
|
+
\`\`\`python
|
|
288
|
+
print(df) # Floods output with thousands of rows
|
|
289
|
+
\`\`\`
|
|
290
|
+
|
|
291
|
+
### Use summaries
|
|
292
|
+
Good:
|
|
293
|
+
\`\`\`python
|
|
294
|
+
print(f"Shape: {df.shape}")
|
|
295
|
+
print(f"Columns: {list(df.columns)}")
|
|
296
|
+
print(df.describe())
|
|
297
|
+
print(df.head(10))
|
|
298
|
+
\`\`\`
|
|
299
|
+
|
|
300
|
+
### Aggregate before printing
|
|
301
|
+
\`\`\`python
|
|
302
|
+
# Instead of printing all rows
|
|
303
|
+
summary = df.groupby('category').agg({
|
|
304
|
+
'value': ['mean', 'std', 'count']
|
|
305
|
+
}).round(2)
|
|
306
|
+
print(summary)
|
|
307
|
+
\`\`\`
|
|
308
|
+
|
|
309
|
+
### Limit output size
|
|
310
|
+
\`\`\`python
|
|
311
|
+
# For large value_counts
|
|
312
|
+
print(df['category'].value_counts().head(20))
|
|
313
|
+
|
|
314
|
+
# For correlations
|
|
315
|
+
corr_matrix = df.corr()
|
|
316
|
+
# Show only strong correlations
|
|
317
|
+
strong = corr_matrix[abs(corr_matrix) > 0.5]
|
|
318
|
+
print(strong.stack().dropna())
|
|
319
|
+
\`\`\`
|
|
320
|
+
</Output_Management>
|
|
321
|
+
|
|
322
|
+
<Anti_Patterns>
|
|
323
|
+
NEVER:
|
|
324
|
+
- Describe analysis without executing code
|
|
325
|
+
- Print entire DataFrames to stdout
|
|
326
|
+
- Skip prerequisite checks
|
|
327
|
+
- Ignore missing values without noting them
|
|
328
|
+
- Make claims without statistical evidence
|
|
329
|
+
- Use Write/Edit tools (you don't have them)
|
|
330
|
+
- Assume packages are installed without checking
|
|
331
|
+
|
|
332
|
+
ALWAYS:
|
|
333
|
+
- Execute Python via Bash heredocs
|
|
334
|
+
- Use [MARKERS] for structured findings
|
|
335
|
+
- Report actual numbers with context
|
|
336
|
+
- Note data quality issues as [LIMITATION]
|
|
337
|
+
- Check environment before analysis
|
|
338
|
+
- Persist state via files for multi-step analysis
|
|
339
|
+
</Anti_Patterns>
|
|
340
|
+
|
|
341
|
+
<Quality_Standards>
|
|
342
|
+
## Findings Must Be
|
|
343
|
+
|
|
344
|
+
### Specific
|
|
345
|
+
Bad: "There's a correlation between X and Y"
|
|
346
|
+
Good: "[STAT:correlation] X vs Y: r=0.67, p<0.001, n=1,234"
|
|
347
|
+
|
|
348
|
+
### Actionable
|
|
349
|
+
Bad: "The data shows some patterns"
|
|
350
|
+
Good: "[FINDING] Users with >5 sessions in week 1 have 3.2x higher retention - target onboarding to drive early engagement"
|
|
351
|
+
|
|
352
|
+
### Contextualized
|
|
353
|
+
Bad: "Mean value is 42.5"
|
|
354
|
+
Good: "[STAT:distribution] revenue: mean=$42.50, median=$28.00, std=$67.20 (right-skewed, median more representative)"
|
|
355
|
+
|
|
356
|
+
### Evidence-backed
|
|
357
|
+
Every [FINDING] should reference:
|
|
358
|
+
- Sample size
|
|
359
|
+
- Statistical test or metric
|
|
360
|
+
- Confidence level or p-value where applicable
|
|
361
|
+
</Quality_Standards>`;
|
|
362
|
+
export const scientistAgent = {
|
|
363
|
+
name: 'scientist',
|
|
364
|
+
description: 'Data analysis and research execution specialist. Executes Python code for EDA, statistical analysis, and generating data-driven findings. Works with CSV, JSON, Parquet files using pandas, numpy, scipy.',
|
|
365
|
+
prompt: SCIENTIST_PROMPT,
|
|
366
|
+
tools: ['Read', 'Glob', 'Grep', 'Bash', 'python_repl'],
|
|
367
|
+
model: 'sonnet',
|
|
368
|
+
metadata: SCIENTIST_PROMPT_METADATA
|
|
369
|
+
};
|
|
370
|
+
//# sourceMappingURL=scientist.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scientist.js","sourceRoot":"","sources":["../../src/agents/scientist.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAIH,MAAM,CAAC,MAAM,yBAAyB,GAAwB;IAC5D,QAAQ,EAAE,YAAY;IACtB,IAAI,EAAE,OAAO;IACb,WAAW,EAAE,WAAW;IACxB,QAAQ,EAAE;QACR,EAAE,MAAM,EAAE,eAAe,EAAE,OAAO,EAAE,6CAA6C,EAAE;QACnF,EAAE,MAAM,EAAE,oBAAoB,EAAE,OAAO,EAAE,kDAAkD,EAAE;QAC7F,EAAE,MAAM,EAAE,kBAAkB,EAAE,OAAO,EAAE,2CAA2C,EAAE;QACpF,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,oCAAoC,EAAE;QAChE,EAAE,MAAM,EAAE,oBAAoB,EAAE,OAAO,EAAE,8DAA8D,EAAE;QACzG,EAAE,MAAM,EAAE,iBAAiB,EAAE,OAAO,EAAE,8CAA8C,EAAE;KACvF;IACD,OAAO,EAAE;QACP,mDAAmD;QACnD,kDAAkD;QAClD,4CAA4C;QAC5C,8CAA8C;QAC9C,+CAA+C;QAC/C,8CAA8C;QAC9C,sDAAsD;QACtD,sDAAsD;QACtD,0DAA0D;KAC3D;IACD,SAAS,EAAE;QACT,6DAA6D;QAC7D,sDAAsD;QACtD,yDAAyD;QACzD,uDAAuD;QACvD,yDAAyD;KAC1D;CACF,CAAC;AAEF,MAAM,gBAAgB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;qBA6TJ,CAAC;AAEtB,MAAM,CAAC,MAAM,cAAc,GAAgB;IACzC,IAAI,EAAE,WAAW;IACjB,WAAW,EAAE,2MAA2M;IACxN,MAAM,EAAE,gBAAgB;IACxB,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,aAAa,CAAC;IACtD,KAAK,EAAE,QAAQ;IACf,QAAQ,EAAE,yBAAyB;CACpC,CAAC"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Atomic, durable file writes for oh-my-claudecode.
|
|
3
|
+
* Self-contained module with no external dependencies.
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Create directory recursively (inline implementation).
|
|
7
|
+
* Ensures parent directories exist before creating the target directory.
|
|
8
|
+
*
|
|
9
|
+
* @param dir Directory path to create
|
|
10
|
+
*/
|
|
11
|
+
export declare function ensureDirSync(dir: string): void;
|
|
12
|
+
/**
|
|
13
|
+
* Write JSON data atomically to a file.
|
|
14
|
+
* Uses temp file + atomic rename pattern to ensure durability.
|
|
15
|
+
*
|
|
16
|
+
* @param filePath Target file path
|
|
17
|
+
* @param data Data to serialize as JSON
|
|
18
|
+
* @throws Error if JSON serialization fails or write operation fails
|
|
19
|
+
*/
|
|
20
|
+
export declare function atomicWriteJson(filePath: string, data: unknown): Promise<void>;
|
|
21
|
+
/**
|
|
22
|
+
* Read and parse JSON file with error handling.
|
|
23
|
+
* Returns null if file doesn't exist or on parse errors.
|
|
24
|
+
*
|
|
25
|
+
* @param filePath Path to JSON file
|
|
26
|
+
* @returns Parsed JSON data or null on error
|
|
27
|
+
*/
|
|
28
|
+
export declare function safeReadJson<T>(filePath: string): Promise<T | null>;
|
|
29
|
+
//# sourceMappingURL=atomic-write.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"atomic-write.d.ts","sourceRoot":"","sources":["../../src/lib/atomic-write.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAOH;;;;;GAKG;AACH,wBAAgB,aAAa,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,CAe/C;AAED;;;;;;;GAOG;AACH,wBAAsB,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CA+CpF;AAED;;;;;;GAMG;AACH,wBAAsB,YAAY,CAAC,CAAC,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAsBzE"}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Atomic, durable file writes for oh-my-claudecode.
|
|
3
|
+
* Self-contained module with no external dependencies.
|
|
4
|
+
*/
|
|
5
|
+
import * as fs from 'fs/promises';
|
|
6
|
+
import * as fsSync from 'fs';
|
|
7
|
+
import * as path from 'path';
|
|
8
|
+
import * as crypto from 'crypto';
|
|
9
|
+
/**
|
|
10
|
+
* Create directory recursively (inline implementation).
|
|
11
|
+
* Ensures parent directories exist before creating the target directory.
|
|
12
|
+
*
|
|
13
|
+
* @param dir Directory path to create
|
|
14
|
+
*/
|
|
15
|
+
export function ensureDirSync(dir) {
|
|
16
|
+
if (fsSync.existsSync(dir)) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
try {
|
|
20
|
+
fsSync.mkdirSync(dir, { recursive: true });
|
|
21
|
+
}
|
|
22
|
+
catch (err) {
|
|
23
|
+
// If directory was created by another process between exists check and mkdir,
|
|
24
|
+
// that's fine - verify it exists now
|
|
25
|
+
if (err.code === 'EEXIST') {
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
throw err;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Write JSON data atomically to a file.
|
|
33
|
+
* Uses temp file + atomic rename pattern to ensure durability.
|
|
34
|
+
*
|
|
35
|
+
* @param filePath Target file path
|
|
36
|
+
* @param data Data to serialize as JSON
|
|
37
|
+
* @throws Error if JSON serialization fails or write operation fails
|
|
38
|
+
*/
|
|
39
|
+
export async function atomicWriteJson(filePath, data) {
|
|
40
|
+
const dir = path.dirname(filePath);
|
|
41
|
+
const base = path.basename(filePath);
|
|
42
|
+
const tempPath = path.join(dir, `.${base}.tmp.${crypto.randomUUID()}`);
|
|
43
|
+
let success = false;
|
|
44
|
+
try {
|
|
45
|
+
// Ensure parent directory exists
|
|
46
|
+
ensureDirSync(dir);
|
|
47
|
+
// Serialize data to JSON
|
|
48
|
+
const jsonContent = JSON.stringify(data, null, 2);
|
|
49
|
+
// Write to temp file with exclusive creation (wx = O_CREAT | O_EXCL | O_WRONLY)
|
|
50
|
+
const fd = await fs.open(tempPath, 'wx', 0o600);
|
|
51
|
+
try {
|
|
52
|
+
await fd.write(jsonContent, 0, 'utf-8');
|
|
53
|
+
// Sync file data to disk before rename
|
|
54
|
+
await fd.sync();
|
|
55
|
+
}
|
|
56
|
+
finally {
|
|
57
|
+
await fd.close();
|
|
58
|
+
}
|
|
59
|
+
// Atomic rename - replaces target file if it exists
|
|
60
|
+
// On Windows, fs.rename uses MoveFileExW with MOVEFILE_REPLACE_EXISTING
|
|
61
|
+
await fs.rename(tempPath, filePath);
|
|
62
|
+
success = true;
|
|
63
|
+
// Best-effort directory fsync to ensure rename is durable
|
|
64
|
+
try {
|
|
65
|
+
const dirFd = await fs.open(dir, 'r');
|
|
66
|
+
try {
|
|
67
|
+
await dirFd.sync();
|
|
68
|
+
}
|
|
69
|
+
finally {
|
|
70
|
+
await dirFd.close();
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
catch {
|
|
74
|
+
// Some platforms don't support directory fsync - that's okay
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
finally {
|
|
78
|
+
// Clean up temp file on error
|
|
79
|
+
if (!success) {
|
|
80
|
+
await fs.unlink(tempPath).catch(() => { });
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Read and parse JSON file with error handling.
|
|
86
|
+
* Returns null if file doesn't exist or on parse errors.
|
|
87
|
+
*
|
|
88
|
+
* @param filePath Path to JSON file
|
|
89
|
+
* @returns Parsed JSON data or null on error
|
|
90
|
+
*/
|
|
91
|
+
export async function safeReadJson(filePath) {
|
|
92
|
+
try {
|
|
93
|
+
// Check if file exists
|
|
94
|
+
await fs.access(filePath);
|
|
95
|
+
// Read file content
|
|
96
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
97
|
+
// Parse JSON
|
|
98
|
+
return JSON.parse(content);
|
|
99
|
+
}
|
|
100
|
+
catch (err) {
|
|
101
|
+
const error = err;
|
|
102
|
+
// File doesn't exist - return null
|
|
103
|
+
if (error.code === 'ENOENT') {
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
// Parse error or read error - return null
|
|
107
|
+
// In production, you might want to log these errors
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
//# sourceMappingURL=atomic-write.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"atomic-write.js","sourceRoot":"","sources":["../../src/lib/atomic-write.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,MAAM,aAAa,CAAC;AAClC,OAAO,KAAK,MAAM,MAAM,IAAI,CAAC;AAC7B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC;;;;;GAKG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,IAAI,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;QAC3B,OAAO;IACT,CAAC;IAED,IAAI,CAAC;QACH,MAAM,CAAC,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7C,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,8EAA8E;QAC9E,qCAAqC;QACrC,IAAK,GAA6B,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,OAAO;QACT,CAAC;QACD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,QAAgB,EAAE,IAAa;IACnE,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IACrC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,QAAQ,MAAM,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IAEvE,IAAI,OAAO,GAAG,KAAK,CAAC;IAEpB,IAAI,CAAC;QACH,iCAAiC;QACjC,aAAa,CAAC,GAAG,CAAC,CAAC;QAEnB,yBAAyB;QACzB,MAAM,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;QAElD,gFAAgF;QAChF,MAAM,EAAE,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;QAChD,IAAI,CAAC;YACH,MAAM,EAAE,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;YACxC,uCAAuC;YACvC,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC;QAClB,CAAC;gBAAS,CAAC;YACT,MAAM,EAAE,CAAC,KAAK,EAAE,CAAC;QACnB,CAAC;QAED,oDAAoD;QACpD,wEAAwE;QACxE,MAAM,EAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QAEpC,OAAO,GAAG,IAAI,CAAC;QAEf,0DAA0D;QAC1D,IAAI,CAAC;YACH,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;YACtC,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;oBAAS,CAAC;gBACT,MAAM,KAAK,CAAC,KAAK,EAAE,CAAC;YACtB,CAAC;QACH,CAAC;QAAC,MAAM,CAAC;YACP,6DAA6D;QAC/D,CAAC;IACH,CAAC;YAAS,CAAC;QACT,8BAA8B;QAC9B,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,MAAM,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAI,QAAgB;IACpD,IAAI,CAAC;QACH,uBAAuB;QACvB,MAAM,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QAE1B,oBAAoB;QACpB,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QAErD,aAAa;QACb,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAM,CAAC;IAClC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,KAAK,GAAG,GAA4B,CAAC;QAE3C,mCAAmC;QACnC,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5B,OAAO,IAAI,CAAC;QACd,CAAC;QAED,0CAA0C;QAC1C,oDAAoD;QACpD,OAAO,IAAI,CAAC;IACd,CAAC;AACH,CAAC"}
|
package/dist/tools/index.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/tools/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/tools/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAKxB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,MAAM,EAAE,CAAC,CAAC,WAAW,CAAC;IACtB,OAAO,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,OAAO,CAAC;QAAE,OAAO,EAAE,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAA;SAAE,CAAC,CAAA;KAAE,CAAC,CAAC;CACzF;AAED;;GAEG;AACH,eAAO,MAAM,cAAc,EAAE,qBAAqB,EAIjD,CAAC;AAEF;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,KAAK,GAAG,KAAK,GAAG,KAAK,GAAG,qBAAqB,EAAE,CAS3F;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,EAAE,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAElF;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ,CAAC;QACf,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACpC,QAAQ,EAAE,MAAM,EAAE,CAAC;KACpB,CAAC;CACH;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,IAAI,EAAE,qBAAqB,GAAG,aAAa,CAS1E"}
|
package/dist/tools/index.js
CHANGED
|
@@ -7,14 +7,17 @@
|
|
|
7
7
|
import { z } from 'zod';
|
|
8
8
|
import { lspTools } from './lsp-tools.js';
|
|
9
9
|
import { astTools } from './ast-tools.js';
|
|
10
|
+
import { pythonReplTool } from './python-repl/index.js';
|
|
10
11
|
export { lspTools } from './lsp-tools.js';
|
|
11
12
|
export { astTools } from './ast-tools.js';
|
|
13
|
+
export { pythonReplTool } from './python-repl/index.js';
|
|
12
14
|
/**
|
|
13
15
|
* All custom tools available in the system
|
|
14
16
|
*/
|
|
15
17
|
export const allCustomTools = [
|
|
16
18
|
...lspTools,
|
|
17
|
-
...astTools
|
|
19
|
+
...astTools,
|
|
20
|
+
pythonReplTool
|
|
18
21
|
];
|
|
19
22
|
/**
|
|
20
23
|
* Get tools by category
|
package/dist/tools/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/tools/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/tools/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAExD,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAYxD;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAA4B;IACrD,GAAG,QAA8C;IACjD,GAAG,QAA8C;IACjD,cAAkD;CACnD,CAAC;AAEF;;GAEG;AACH,MAAM,UAAU,kBAAkB,CAAC,QAA+B;IAChE,QAAQ,QAAQ,EAAE,CAAC;QACjB,KAAK,KAAK;YACR,OAAO,QAA8C,CAAC;QACxD,KAAK,KAAK;YACR,OAAO,QAA8C,CAAC;QACxD,KAAK,KAAK;YACR,OAAO,cAAc,CAAC;IAC1B,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,eAAe,CAA0B,MAAS;IAChE,OAAO,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAC1B,CAAC;AAeD;;GAEG;AACH,MAAM,UAAU,eAAe,CAAC,IAA2B;IACzD,MAAM,SAAS,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACxC,MAAM,UAAU,GAAG,eAAe,CAAC,SAAS,CAAC,CAAC;IAE9C,OAAO;QACL,IAAI,EAAE,IAAI,CAAC,IAAI;QACf,WAAW,EAAE,IAAI,CAAC,WAAW;QAC7B,WAAW,EAAE,UAAU;KACxB,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAS,eAAe,CAAC,MAAkC;IAKzD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;IAC3B,MAAM,UAAU,GAA4B,EAAE,CAAC;IAC/C,MAAM,QAAQ,GAAa,EAAE,CAAC;IAE9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;QACjD,MAAM,OAAO,GAAG,KAAqB,CAAC;QACtC,UAAU,CAAC,GAAG,CAAC,GAAG,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAE/C,gDAAgD;QAChD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC;YAC1B,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IAED,OAAO;QACL,IAAI,EAAE,QAAQ;QACd,UAAU;QACV,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,OAAqB;IAChD,MAAM,MAAM,GAA4B,EAAE,CAAC;IAE3C,0BAA0B;IAC1B,IAAI,OAAO,YAAY,CAAC,CAAC,WAAW,EAAE,CAAC;QACrC,OAAO,mBAAmB,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IACrD,CAAC;IAED,yBAAyB;IACzB,IAAI,OAAO,YAAY,CAAC,CAAC,UAAU,EAAE,CAAC;QACpC,MAAM,KAAK,GAAG,mBAAmB,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;QAC1D,KAAK,CAAC,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;QAC5C,OAAO,KAAK,CAAC;IACf,CAAC;IAED,+BAA+B;IAC/B,MAAM,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;IAC7C,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,CAAC,WAAW,GAAG,WAAW,CAAC;IACnC,CAAC;IAED,qBAAqB;IACrB,IAAI,OAAO,YAAY,CAAC,CAAC,SAAS,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,GAAG,QAAQ,CAAC;IACzB,CAAC;SAAM,IAAI,OAAO,YAAY,CAAC,CAAC,SAAS,EAAE,CAAC;QAC1C,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAmB,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,KAAK,CAAC;YAChF,CAAC,CAAC,SAAS;YACX,CAAC,CAAC,QAAQ,CAAC;IACf,CAAC;SAAM,IAAI,OAAO,YAAY,CAAC,CAAC,UAAU,EAAE,CAAC;QAC3C,MAAM,CAAC,IAAI,GAAG,SAAS,CAAC;IAC1B,CAAC;SAAM,IAAI,OAAO,YAAY,CAAC,CAAC,QAAQ,EAAE,CAAC;QACzC,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC;QACtB,MAAM,CAAC,KAAK,GAAG,mBAAmB,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACxD,CAAC;SAAM,IAAI,OAAO,YAAY,CAAC,CAAC,OAAO,EAAE,CAAC;QACxC,MAAM,CAAC,IAAI,GAAG,QAAQ,CAAC;QACvB,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;IACpC,CAAC;SAAM,IAAI,OAAO,YAAY,CAAC,CAAC,SAAS,EAAE,CAAC;QAC1C,OAAO,eAAe,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;SAAM,CAAC;QACN,6BAA6B;QAC7B,MAAM,CAAC,IAAI,GAAG,QAAQ,CAAC;IACzB,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Bridge Manager - Python process lifecycle management
|
|
3
|
+
*
|
|
4
|
+
* Manages the gyoshu_bridge.py process:
|
|
5
|
+
* - Spawning with proper environment detection
|
|
6
|
+
* - Ensuring single bridge per session with security validations
|
|
7
|
+
* - Graceful shutdown with signal escalation
|
|
8
|
+
* - PID reuse detection via process identity verification
|
|
9
|
+
*/
|
|
10
|
+
import { BridgeMeta } from './types.js';
|
|
11
|
+
export interface EscalationResult {
|
|
12
|
+
terminated: boolean;
|
|
13
|
+
terminatedBy?: 'SIGINT' | 'SIGTERM' | 'SIGKILL';
|
|
14
|
+
terminationTimeMs?: number;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Verify that a bridge process is still running and is the same process
|
|
18
|
+
* that was originally spawned (guards against PID reuse).
|
|
19
|
+
*
|
|
20
|
+
* Returns false if:
|
|
21
|
+
* - Process is not alive
|
|
22
|
+
* - Start time was recorded but doesn't match (PID reused)
|
|
23
|
+
* - Start time was recorded but cannot be retrieved (fail-closed)
|
|
24
|
+
*/
|
|
25
|
+
export declare function verifyProcessIdentity(meta: BridgeMeta): Promise<boolean>;
|
|
26
|
+
/**
|
|
27
|
+
* Spawn a new bridge server process for the given session.
|
|
28
|
+
*
|
|
29
|
+
* @param sessionId - Unique session identifier
|
|
30
|
+
* @param projectDir - Optional project directory (defaults to cwd)
|
|
31
|
+
* @returns BridgeMeta containing process information
|
|
32
|
+
*/
|
|
33
|
+
export declare function spawnBridgeServer(sessionId: string, projectDir?: string): Promise<BridgeMeta>;
|
|
34
|
+
/**
|
|
35
|
+
* Get or spawn a bridge server for the session.
|
|
36
|
+
*
|
|
37
|
+
* Implements security validations:
|
|
38
|
+
* - Anti-poisoning: Verifies sessionId in metadata matches expected
|
|
39
|
+
* - Anti-hijack: Verifies socketPath is the expected canonical path
|
|
40
|
+
* - Socket type: Verifies the socket path is actually a socket
|
|
41
|
+
* - Process identity: Verifies PID + start time match
|
|
42
|
+
*
|
|
43
|
+
* @param sessionId - Unique session identifier
|
|
44
|
+
* @param projectDir - Optional project directory (defaults to cwd)
|
|
45
|
+
* @returns BridgeMeta for the active bridge
|
|
46
|
+
*/
|
|
47
|
+
export declare function ensureBridge(sessionId: string, projectDir?: string): Promise<BridgeMeta>;
|
|
48
|
+
/**
|
|
49
|
+
* Terminate a bridge process with signal escalation.
|
|
50
|
+
*
|
|
51
|
+
* Escalation order:
|
|
52
|
+
* 1. SIGINT - wait gracePeriodMs (default 5000ms)
|
|
53
|
+
* 2. SIGTERM - wait 2500ms
|
|
54
|
+
* 3. SIGKILL - immediate termination
|
|
55
|
+
*
|
|
56
|
+
* Uses process group kill (-pid) to also terminate child processes.
|
|
57
|
+
*
|
|
58
|
+
* @param sessionId - Session whose bridge to kill
|
|
59
|
+
* @param options - Optional configuration
|
|
60
|
+
* @returns EscalationResult with termination details
|
|
61
|
+
*/
|
|
62
|
+
export declare function killBridgeWithEscalation(sessionId: string, options?: {
|
|
63
|
+
gracePeriodMs?: number;
|
|
64
|
+
}): Promise<EscalationResult>;
|
|
65
|
+
//# sourceMappingURL=bridge-manager.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bridge-manager.d.ts","sourceRoot":"","sources":["../../../src/tools/python-repl/bridge-manager.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAUH,OAAO,EAAE,UAAU,EAAiB,MAAM,YAAY,CAAC;AAkBvD,MAAM,WAAW,gBAAgB;IAC/B,UAAU,EAAE,OAAO,CAAC;IACpB,YAAY,CAAC,EAAE,QAAQ,GAAG,SAAS,GAAG,SAAS,CAAC;IAChD,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAwID;;;;;;;;GAQG;AACH,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,CAsB9E;AAqFD;;;;;;GAMG;AACH,wBAAsB,iBAAiB,CACrC,SAAS,EAAE,MAAM,EACjB,UAAU,CAAC,EAAE,MAAM,GAClB,OAAO,CAAC,UAAU,CAAC,CAqFrB;AAMD;;;;;;;;;;;;GAYG;AACH,wBAAsB,YAAY,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAuC9F;AAMD;;;;;;;;;;;;;GAaG;AACH,wBAAsB,wBAAwB,CAC5C,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE;IAAE,aAAa,CAAC,EAAE,MAAM,CAAA;CAAE,GACnC,OAAO,CAAC,gBAAgB,CAAC,CAoE3B"}
|