docdex 0.2.24 → 0.2.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/agents.md +109 -6
- package/lib/postinstall_setup.js +84 -52
- package/package.json +1 -1
package/assets/agents.md
CHANGED
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
---- START OF DOCDEX INFO V0.2.25 ----
|
|
2
|
+
Docdex URL: http://127.0.0.1:28491
|
|
3
|
+
Use this base URL for Docdex HTTP endpoints.
|
|
4
|
+
|
|
1
5
|
## Priority Directives (Mandatory)
|
|
2
6
|
|
|
3
7
|
- Always use both memory lobes early: docdex_get_profile + docdex_memory_recall; save new facts via docdex_memory_save and new preferences via docdex_save_preference.
|
|
@@ -66,6 +70,7 @@ Precision tools for structural analysis. Do not rely on text search for definiti
|
|
|
66
70
|
| docdex_ast | Specific AST nodes (e.g., "Find all class definitions"). |
|
|
67
71
|
| docdex_impact_diagnostics | Check for broken/dynamic imports. |
|
|
68
72
|
| HTTP /v1/graph/impact | Impact Analysis: "What breaks if I change this?" Returns inbound/outbound dependencies. |
|
|
73
|
+
| HTTP /v1/dag/export | Export the dependency DAG for change ordering and scope. |
|
|
69
74
|
|
|
70
75
|
### C. Memory Operations
|
|
71
76
|
|
|
@@ -76,6 +81,30 @@ Precision tools for structural analysis. Do not rely on text search for definiti
|
|
|
76
81
|
| docdex_save_preference | Store a global user preference (Style, Tooling, Constraint). |
|
|
77
82
|
| docdex_get_profile | Retrieve global preferences. |
|
|
78
83
|
|
|
84
|
+
### D. Local Delegation (Cheap Models)
|
|
85
|
+
|
|
86
|
+
Use local delegation for low-complexity, code-generation-oriented tasks to reduce paid-model usage.
|
|
87
|
+
|
|
88
|
+
| MCP Tool / HTTP | Purpose |
|
|
89
|
+
| --- | --- |
|
|
90
|
+
| docdex_local_completion | Delegate small tasks to a local model with strict output formats. |
|
|
91
|
+
| HTTP /v1/delegate | HTTP endpoint for delegated completions with structured responses. |
|
|
92
|
+
|
|
93
|
+
Required fields: `task_type`, `instruction`, `context`. Optional: `max_tokens`, `timeout_ms`, `mode` (`draft_only` or `draft_then_refine`), `agent` (local agent id/slug).
|
|
94
|
+
Expensive model library: `docs/expensive_models.json` (match by `agent_id`, `agent_slug`, `model`, or adapter type; case-insensitive).
|
|
95
|
+
|
|
96
|
+
### E. Index Health + File Access
|
|
97
|
+
|
|
98
|
+
Use these to verify index coverage, repo binding, and to read precise file slices.
|
|
99
|
+
|
|
100
|
+
| MCP Tool | Purpose |
|
|
101
|
+
| --- | --- |
|
|
102
|
+
| docdex_repo_inspect | Confirm normalized repo root/identity (resolve missing_repo). |
|
|
103
|
+
| docdex_stats | Index size/last update; detect stale indexes. |
|
|
104
|
+
| docdex_files | Indexed file coverage; confirm a file is in the index. |
|
|
105
|
+
| docdex_index | Reindex full repo or ingest specific files when stale/missing. |
|
|
106
|
+
| docdex_open | Read exact file slices after you identify targets. |
|
|
107
|
+
|
|
79
108
|
## Quick Tool Map (Often Missed)
|
|
80
109
|
|
|
81
110
|
- docdex_files: List indexed docs with rel_path/doc_id/token_estimate; use to verify indexing coverage.
|
|
@@ -84,8 +113,14 @@ Precision tools for structural analysis. Do not rely on text search for definiti
|
|
|
84
113
|
- docdex_index: Reindex the full repo or ingest specific files when stale.
|
|
85
114
|
- docdex_search diff: Limit search to working tree, staged, or ref ranges; filter by paths.
|
|
86
115
|
- docdex_web_research knobs: force_web, skip_local_search, repo_only, no_cache, web_limit, llm_filter_local_results, llm_model.
|
|
116
|
+
- docdex_open: Read narrow file slices after targets are identified.
|
|
117
|
+
- docdex_impact_diagnostics: Scan dynamic imports when imports are unclear or failing.
|
|
118
|
+
- docdex_local_completion: Delegate low-complexity codegen tasks (tests, docstrings, boilerplate, simple refactors).
|
|
119
|
+
- docdex_ast: Use AST queries for precise structure (class/function definitions, call sites, imports).
|
|
120
|
+
- docdex_symbols: Use symbols to confirm exact signatures/locations before edits.
|
|
121
|
+
- HTTP /v1/graph/impact: Mandatory before code changes to review inbound/outbound deps.
|
|
122
|
+
- HTTP /v1/dag/export: Export dependency graph to plan change order.
|
|
87
123
|
- HTTP /v1/initialize: Bind a default repo root for MCP when clients omit project_root.
|
|
88
|
-
- HTTP /v1/dag/export: Export the dependency graph for external analysis.
|
|
89
124
|
|
|
90
125
|
## Interaction Patterns
|
|
91
126
|
|
|
@@ -96,9 +131,76 @@ When answering a complex coding query, follow this "Reasoning Trace":
|
|
|
96
131
|
1. Retrieve Profile: Call docdex_get_profile to load user style/constraints (e.g., "Use functional components").
|
|
97
132
|
2. Search Code: Call docdex_search or docdex_symbols to find the relevant code.
|
|
98
133
|
3. Check Memory: Call docdex_memory_recall for project-specific caveats (e.g., "Auth logic was refactored last week").
|
|
99
|
-
4.
|
|
134
|
+
4. Validate structure: Use docdex_ast/docdex_symbols to confirm targets before editing.
|
|
135
|
+
5. Read context: Use docdex_open to fetch minimal file slices after locating targets.
|
|
136
|
+
6. Plan with DAG: Use /v1/dag/export or /v1/graph/impact to order changes by dependencies.
|
|
137
|
+
7. Synthesize: Generate code that matches the Repo Truth while adhering to the Profile Style.
|
|
138
|
+
|
|
139
|
+
### 2. Memory Capture (Mandatory)
|
|
140
|
+
|
|
141
|
+
Save more memories for both lobes during the task, not just at the end.
|
|
142
|
+
|
|
143
|
+
1. Repo memory: After each meaningful discovery or code change, save at least one durable fact (file location, behavior, config, gotcha) via `docdex_memory_save`.
|
|
144
|
+
2. Profile memory: When the user expresses a preference, constraint, or workflow correction, call `docdex_save_preference` immediately with the right category.
|
|
145
|
+
3. Keep it crisp: 1-3 short sentences, include file paths when relevant, avoid raw code blobs.
|
|
146
|
+
4. Safety: Never store secrets, tokens, or sensitive user data. Skip transient or speculative info.
|
|
147
|
+
|
|
148
|
+
### 3. Index Health + Diff-Aware Search (Mandatory)
|
|
149
|
+
|
|
150
|
+
Use these when results look incomplete or when the task is about recent changes.
|
|
151
|
+
|
|
152
|
+
1. Confirm repo binding: Use docdex_repo_inspect or /v1/initialize when repo_id is missing/ambiguous.
|
|
153
|
+
2. Check index coverage: Use docdex_stats + docdex_files before assuming code is missing.
|
|
154
|
+
3. Reindex if needed: Run docdex_index (or advise it) when stale_index/missing files appear.
|
|
155
|
+
4. Use diff search: For change-specific tasks, use docdex_search with diff mode (working tree/staged/range).
|
|
156
|
+
|
|
157
|
+
### 4. Local Delegation Workflow (Cheap Models)
|
|
158
|
+
|
|
159
|
+
When you are an expensive/paid model, delegate eligible low-complexity tasks first unless the user opts out.
|
|
160
|
+
Delegation is mandatory for eligible tasks. If you are expensive and delegation is enabled (explicitly or auto-enabled with local candidates), you MUST attempt local delegation before doing the work yourself. Only skip when delegation is disabled, no local candidate exists, or local output fails validation after a retry/fallback.
|
|
161
|
+
|
|
162
|
+
#### Mandatory Delegation Triggers
|
|
163
|
+
|
|
164
|
+
- The work fits a delegation task type or can be split into sub-tasks that do.
|
|
165
|
+
- The change is small, localized, or mechanical (boilerplate, formatting, docstrings, small refactors).
|
|
166
|
+
- The task can be fully expressed in a capsule with explicit context and output rules.
|
|
167
|
+
|
|
168
|
+
#### Reasons to Delegate (Non-Optional)
|
|
169
|
+
|
|
170
|
+
- Cuts paid-token usage and cost.
|
|
171
|
+
- Lowers latency and avoids rate-limit pressure on premium models.
|
|
172
|
+
- Keeps expensive models focused on complex reasoning and multi-step planning.
|
|
173
|
+
- Uses tuned prompts that reduce drift for repetitive edits.
|
|
174
|
+
- Enforces the product's cost-control policy; skipping delegation is a regression.
|
|
175
|
+
|
|
176
|
+
1. Determine if you are expensive: load `docs/expensive_models.json` and match your `agent_id`, `agent_slug`, `model`, or adapter type. If you match, continue; if not and you are clearly local/cheap, skip delegation. If unsure, default to expensive.
|
|
177
|
+
2. Check config: Only delegate when `[llm.delegation].enabled` is true or `auto_enable` is true with an eligible local model/agent (and `task_type` is allowed). If uncertain, attempt delegation and handle the error.
|
|
178
|
+
3. Choose task type: Use one of `GENERATE_TESTS`, `WRITE_DOCSTRING`, `SCAFFOLD_BOILERPLATE`, `REFACTOR_SIMPLE`, `FORMAT_CODE`.
|
|
179
|
+
4. Call the tool: `docdex_local_completion` with `task_type`, `instruction`, and minimal `context` (smallest necessary snippet).
|
|
180
|
+
5. Validate output: If the local output is invalid or empty, fall back to the primary agent or handle with the paid model.
|
|
181
|
+
6. Optional refine: If mode is `draft_then_refine`, refine the draft with the primary agent and return a final result.
|
|
182
|
+
|
|
183
|
+
#### Delegation Handoff Package (Required)
|
|
184
|
+
|
|
185
|
+
Local models cannot call tools. The leading agent must provide a complete, minimal capsule.
|
|
186
|
+
|
|
187
|
+
1. Task capsule: `task_type`, goal, success criteria, output format, and constraints (tests to update, style rules).
|
|
188
|
+
2. Context payload: file paths plus the exact snippets from docdex_open; include symbol signatures/AST findings.
|
|
189
|
+
3. Dependency notes: summarize impact analysis and any DAG ordering that affects the change.
|
|
190
|
+
4. Boundaries: explicit files allowed to edit vs read-only; no new dependencies unless allowed.
|
|
191
|
+
5. Guardrails: ask for clarification if context is insufficient; do not invent missing APIs; return only the requested format.
|
|
192
|
+
|
|
193
|
+
### 5. Graph + AST Usage (Mandatory for Code Changes)
|
|
194
|
+
|
|
195
|
+
For any code change, use both AST and graph tools to reduce drift and hidden coupling.
|
|
196
|
+
|
|
197
|
+
1. Use `docdex_ast` or `docdex_symbols` to locate exact definitions and call sites.
|
|
198
|
+
2. Call HTTP `/v1/graph/impact?file=...` before edits and summarize inbound/outbound deps.
|
|
199
|
+
3. For multi-file changes, export the DAG (`/v1/dag/export`) and order edits by dependency direction.
|
|
200
|
+
4. Use docdex_impact_diagnostics when imports are dynamic or unresolved.
|
|
201
|
+
5. If graph endpoints are unavailable, state it and proceed cautiously with extra local search.
|
|
100
202
|
|
|
101
|
-
###
|
|
203
|
+
### 6. Handling Corrections (Learning)
|
|
102
204
|
|
|
103
205
|
If the user says: "I told you, we do not use Moment.js here, use date-fns!"
|
|
104
206
|
|
|
@@ -107,21 +209,21 @@ If the user says: "I told you, we do not use Moment.js here, use date-fns!"
|
|
|
107
209
|
- content: "Do not use Moment.js; prefer date-fns."
|
|
108
210
|
- agent_id: "default" (or active agent ID)
|
|
109
211
|
|
|
110
|
-
###
|
|
212
|
+
### 7. Impact Analysis
|
|
111
213
|
|
|
112
214
|
If the user asks: "Safe to delete getUser?"
|
|
113
215
|
|
|
114
216
|
- Action: Call GET /v1/graph/impact?file=src/user.ts
|
|
115
217
|
- Output: Analyze the inbound edges. If the list is not empty, it is unsafe.
|
|
116
218
|
|
|
117
|
-
###
|
|
219
|
+
### 8. Non-Repo Real-World Queries (Web First)
|
|
118
220
|
|
|
119
221
|
If the user asks a non-repo, real-world question (weather, news, general facts), immediately call docdex_web_research with force_web=true.
|
|
120
222
|
- Resolve relative dates ("yesterday", "last week") using system time by default.
|
|
121
223
|
- Do not run docdex_search unless the user explicitly wants repo-local context.
|
|
122
224
|
- Assume web access is allowed unless the user forbids it; if the web call fails, report the failure and ask for a source or permission.
|
|
123
225
|
|
|
124
|
-
###
|
|
226
|
+
### 9. Failure Handling (Missing Results or Errors)
|
|
125
227
|
|
|
126
228
|
- Ensure project_root or repo_path is set, or call /v1/initialize to bind a default root.
|
|
127
229
|
- Use docdex_repo_inspect to confirm repo identity and normalized root.
|
|
@@ -151,3 +253,4 @@ Docdex adapts to the host.
|
|
|
151
253
|
|
|
152
254
|
- Project Mapping: On constrained hardware, docdex uses a "Spotlight Heuristic" to show you only a skeletal file tree based on your role keywords, rather than the full file system.
|
|
153
255
|
- LLM: It may be running a quantized model (e.g., phi3.5) or a heavy model (llama3.1:70b) depending on VRAM. Trust the daemon's token limits; it handles truncation.
|
|
256
|
+
---- END OF DOCDEX INFO -----
|
package/lib/postinstall_setup.js
CHANGED
|
@@ -26,6 +26,7 @@ const SETUP_PENDING_MARKER = "setup_pending.json";
|
|
|
26
26
|
const AGENTS_DOC_FILENAME = "agents.md";
|
|
27
27
|
const DOCDEX_INFO_START_PREFIX = "---- START OF DOCDEX INFO V";
|
|
28
28
|
const DOCDEX_INFO_END = "---- END OF DOCDEX INFO -----";
|
|
29
|
+
const DOCDEX_INFO_END_LEGACY = "---- END OF DOCDEX INFO ----";
|
|
29
30
|
|
|
30
31
|
function defaultConfigPath() {
|
|
31
32
|
return path.join(os.homedir(), ".docdex", "config.toml");
|
|
@@ -353,9 +354,14 @@ function docdexBlockStart(version) {
|
|
|
353
354
|
return `${DOCDEX_INFO_START_PREFIX}${version} ----`;
|
|
354
355
|
}
|
|
355
356
|
|
|
357
|
+
function docdexInfoEndPattern() {
|
|
358
|
+
return `(?:${escapeRegExp(DOCDEX_INFO_END)}|${escapeRegExp(DOCDEX_INFO_END_LEGACY)})`;
|
|
359
|
+
}
|
|
360
|
+
|
|
356
361
|
function buildDocdexInstructionBlock(instructions) {
|
|
357
362
|
const next = normalizeInstructionText(instructions);
|
|
358
363
|
if (!next) return "";
|
|
364
|
+
if (hasDocdexBlock(next)) return next;
|
|
359
365
|
const version = resolvePackageVersion();
|
|
360
366
|
return `${docdexBlockStart(version)}\n${next}\n${DOCDEX_INFO_END}`;
|
|
361
367
|
}
|
|
@@ -363,9 +369,7 @@ function buildDocdexInstructionBlock(instructions) {
|
|
|
363
369
|
function extractDocdexBlockBody(text) {
|
|
364
370
|
const match = String(text || "").match(
|
|
365
371
|
new RegExp(
|
|
366
|
-
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n([\\s\\S]*?)\\r?\\n${
|
|
367
|
-
DOCDEX_INFO_END
|
|
368
|
-
)}`
|
|
372
|
+
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n([\\s\\S]*?)\\r?\\n${docdexInfoEndPattern()}`
|
|
369
373
|
)
|
|
370
374
|
);
|
|
371
375
|
return match ? normalizeInstructionText(match[1]) : "";
|
|
@@ -383,11 +387,17 @@ function hasDocdexBlockVersion(text, version) {
|
|
|
383
387
|
return String(text || "").includes(docdexBlockStart(version));
|
|
384
388
|
}
|
|
385
389
|
|
|
390
|
+
function hasDocdexBlock(text) {
|
|
391
|
+
const source = String(text || "");
|
|
392
|
+
return (
|
|
393
|
+
source.includes(DOCDEX_INFO_START_PREFIX) &&
|
|
394
|
+
(source.includes(DOCDEX_INFO_END) || source.includes(DOCDEX_INFO_END_LEGACY))
|
|
395
|
+
);
|
|
396
|
+
}
|
|
397
|
+
|
|
386
398
|
function stripDocdexBlocks(text) {
|
|
387
399
|
const re = new RegExp(
|
|
388
|
-
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n[\\s\\S]*?\\r?\\n${
|
|
389
|
-
DOCDEX_INFO_END
|
|
390
|
-
)}\\r?\\n?`,
|
|
400
|
+
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n[\\s\\S]*?\\r?\\n${docdexInfoEndPattern()}\\r?\\n?`,
|
|
391
401
|
"g"
|
|
392
402
|
);
|
|
393
403
|
return String(text || "").replace(re, "").trim();
|
|
@@ -397,9 +407,7 @@ function stripDocdexBlocksExcept(text, version) {
|
|
|
397
407
|
if (!version) return stripDocdexBlocks(text);
|
|
398
408
|
const source = String(text || "");
|
|
399
409
|
const re = new RegExp(
|
|
400
|
-
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n[\\s\\S]*?\\r?\\n${
|
|
401
|
-
DOCDEX_INFO_END
|
|
402
|
-
)}\\r?\\n?`,
|
|
410
|
+
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\r\\n]* ----\\r?\\n[\\s\\S]*?\\r?\\n${docdexInfoEndPattern()}\\r?\\n?`,
|
|
403
411
|
"g"
|
|
404
412
|
);
|
|
405
413
|
let result = "";
|
|
@@ -432,7 +440,7 @@ function stripLegacyDocdexBody(text, body) {
|
|
|
432
440
|
if (!body) return String(text || "");
|
|
433
441
|
const source = String(text || "").replace(/\r\n/g, "\n");
|
|
434
442
|
const re = new RegExp(
|
|
435
|
-
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\n]* ----\\n[\\s\\S]*?\\n${
|
|
443
|
+
`${escapeRegExp(DOCDEX_INFO_START_PREFIX)}[^\\n]* ----\\n[\\s\\S]*?\\n${docdexInfoEndPattern()}\\n?`,
|
|
436
444
|
"g"
|
|
437
445
|
);
|
|
438
446
|
let result = "";
|
|
@@ -493,6 +501,18 @@ function upsertPromptFile(pathname, instructions, { prepend = false } = {}) {
|
|
|
493
501
|
return writeTextFile(pathname, merged);
|
|
494
502
|
}
|
|
495
503
|
|
|
504
|
+
function removePromptFile(pathname) {
|
|
505
|
+
if (!fs.existsSync(pathname)) return false;
|
|
506
|
+
const current = fs.readFileSync(pathname, "utf8");
|
|
507
|
+
const stripped = stripDocdexBlocks(current);
|
|
508
|
+
if (normalizeInstructionText(stripped) === normalizeInstructionText(current)) return false;
|
|
509
|
+
if (!stripped) {
|
|
510
|
+
fs.unlinkSync(pathname);
|
|
511
|
+
return true;
|
|
512
|
+
}
|
|
513
|
+
return writeTextFile(pathname, stripped);
|
|
514
|
+
}
|
|
515
|
+
|
|
496
516
|
function escapeRegExp(value) {
|
|
497
517
|
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
498
518
|
}
|
|
@@ -554,13 +574,7 @@ function upsertYamlInstruction(pathname, key, instructions) {
|
|
|
554
574
|
}
|
|
555
575
|
|
|
556
576
|
function upsertClaudeInstructions(pathname, instructions) {
|
|
557
|
-
|
|
558
|
-
if (typeof value !== "object" || value == null || Array.isArray(value)) return false;
|
|
559
|
-
const merged = mergeInstructionText(value.instructions, instructions);
|
|
560
|
-
if (!merged || merged === value.instructions) return false;
|
|
561
|
-
value.instructions = merged;
|
|
562
|
-
writeJson(pathname, value);
|
|
563
|
-
return true;
|
|
577
|
+
return upsertPromptFile(pathname, instructions);
|
|
564
578
|
}
|
|
565
579
|
|
|
566
580
|
function upsertContinueJsonInstructions(pathname, instructions) {
|
|
@@ -731,7 +745,7 @@ function rewriteContinueYamlRules(source, instructions, addDocdex) {
|
|
|
731
745
|
|
|
732
746
|
const keptItems = items.filter((item) => {
|
|
733
747
|
const text = item.join("\n");
|
|
734
|
-
return !(text
|
|
748
|
+
return !hasDocdexBlock(text);
|
|
735
749
|
});
|
|
736
750
|
|
|
737
751
|
if (addDocdex) {
|
|
@@ -786,45 +800,43 @@ function upsertVsCodeInstructionKey(value, key, instructions) {
|
|
|
786
800
|
return true;
|
|
787
801
|
}
|
|
788
802
|
|
|
789
|
-
function
|
|
790
|
-
|
|
791
|
-
const
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
value[key].push(location);
|
|
803
|
+
function removeVsCodeInstructionKey(value, key, instructions, { legacyPath } = {}) {
|
|
804
|
+
if (typeof value[key] !== "string") return false;
|
|
805
|
+
const current = value[key];
|
|
806
|
+
const stripped = stripDocdexBlocks(current);
|
|
807
|
+
if (normalizeInstructionText(stripped) !== normalizeInstructionText(current)) {
|
|
808
|
+
if (!stripped) {
|
|
809
|
+
delete value[key];
|
|
810
|
+
} else {
|
|
811
|
+
value[key] = stripped;
|
|
812
|
+
}
|
|
800
813
|
return true;
|
|
801
814
|
}
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
815
|
+
const normalized = normalizeInstructionText(instructions);
|
|
816
|
+
if (current === normalized || (legacyPath && current === legacyPath)) {
|
|
817
|
+
delete value[key];
|
|
805
818
|
return true;
|
|
806
819
|
}
|
|
807
|
-
|
|
808
|
-
return true;
|
|
820
|
+
return false;
|
|
809
821
|
}
|
|
810
822
|
|
|
811
|
-
function upsertVsCodeInstructions(pathname, instructions,
|
|
823
|
+
function upsertVsCodeInstructions(pathname, instructions, legacyPath) {
|
|
812
824
|
const { value } = readJson(pathname);
|
|
813
825
|
if (typeof value !== "object" || value == null || Array.isArray(value)) return false;
|
|
814
826
|
const normalized = normalizeInstructionText(instructions);
|
|
815
827
|
if (!normalized) return false;
|
|
816
828
|
let updated = false;
|
|
817
|
-
if (upsertVsCodeInstructionKey(value, "
|
|
818
|
-
updated = true;
|
|
819
|
-
}
|
|
820
|
-
if (upsertVsCodeInstructionKey(value, "copilot.chat.codeGeneration.instructions", instructions)) {
|
|
829
|
+
if (upsertVsCodeInstructionKey(value, "chat.instructions", instructions)) {
|
|
821
830
|
updated = true;
|
|
822
831
|
}
|
|
823
|
-
if (value
|
|
824
|
-
value["github.copilot.chat.codeGeneration.useInstructionFiles"] = true;
|
|
832
|
+
if (removeVsCodeInstructionKey(value, "github.copilot.chat.codeGeneration.instructions", instructions)) {
|
|
825
833
|
updated = true;
|
|
826
834
|
}
|
|
827
|
-
if (
|
|
835
|
+
if (
|
|
836
|
+
removeVsCodeInstructionKey(value, "copilot.chat.codeGeneration.instructions", instructions, {
|
|
837
|
+
legacyPath
|
|
838
|
+
})
|
|
839
|
+
) {
|
|
828
840
|
updated = true;
|
|
829
841
|
}
|
|
830
842
|
if (!updated) return false;
|
|
@@ -1169,11 +1181,18 @@ function clientInstructionPaths() {
|
|
|
1169
1181
|
const aiderConfig = path.join(home, ".aider.conf.yml");
|
|
1170
1182
|
const gooseConfig = path.join(home, ".config", "goose", "config.yaml");
|
|
1171
1183
|
const openInterpreterConfig = path.join(home, ".openinterpreter", "profiles", "default.yaml");
|
|
1184
|
+
const geminiInstructions = path.join(userProfile, ".gemini", "GEMINI.md");
|
|
1185
|
+
const claudeInstructions = path.join(userProfile, ".claude", "CLAUDE.md");
|
|
1186
|
+
const cursorAgents = path.join(userProfile, ".cursor", "agents.md");
|
|
1187
|
+
const cursorAgentsUpper = path.join(userProfile, ".cursor", "AGENTS.md");
|
|
1172
1188
|
const codexAgents = path.join(userProfile, ".codex", "AGENTS.md");
|
|
1173
1189
|
switch (process.platform) {
|
|
1174
1190
|
case "win32":
|
|
1175
1191
|
return {
|
|
1176
|
-
|
|
1192
|
+
gemini: geminiInstructions,
|
|
1193
|
+
claude: claudeInstructions,
|
|
1194
|
+
cursorAgents,
|
|
1195
|
+
cursorAgentsUpper,
|
|
1177
1196
|
continue: continueJson,
|
|
1178
1197
|
continueYaml,
|
|
1179
1198
|
continueYml,
|
|
@@ -1192,7 +1211,10 @@ function clientInstructionPaths() {
|
|
|
1192
1211
|
};
|
|
1193
1212
|
case "darwin":
|
|
1194
1213
|
return {
|
|
1195
|
-
|
|
1214
|
+
gemini: geminiInstructions,
|
|
1215
|
+
claude: claudeInstructions,
|
|
1216
|
+
cursorAgents,
|
|
1217
|
+
cursorAgentsUpper,
|
|
1196
1218
|
continue: continueJson,
|
|
1197
1219
|
continueYaml,
|
|
1198
1220
|
continueYml,
|
|
@@ -1211,7 +1233,10 @@ function clientInstructionPaths() {
|
|
|
1211
1233
|
};
|
|
1212
1234
|
default:
|
|
1213
1235
|
return {
|
|
1214
|
-
|
|
1236
|
+
gemini: geminiInstructions,
|
|
1237
|
+
claude: claudeInstructions,
|
|
1238
|
+
cursorAgents,
|
|
1239
|
+
cursorAgentsUpper,
|
|
1215
1240
|
continue: continueJson,
|
|
1216
1241
|
continueYaml,
|
|
1217
1242
|
continueYml,
|
|
@@ -1295,18 +1320,22 @@ function applyAgentInstructions({ logger } = {}) {
|
|
|
1295
1320
|
};
|
|
1296
1321
|
|
|
1297
1322
|
if (paths.vscodeGlobalInstructions) {
|
|
1298
|
-
safeApply("vscode-global", () =>
|
|
1299
|
-
upsertPromptFile(paths.vscodeGlobalInstructions, instructions, { prepend: true })
|
|
1300
|
-
);
|
|
1323
|
+
safeApply("vscode-global-cleanup", () => removePromptFile(paths.vscodeGlobalInstructions));
|
|
1301
1324
|
}
|
|
1302
1325
|
if (paths.vscodeInstructionsFile) {
|
|
1303
|
-
safeApply("vscode-instructions-file", () =>
|
|
1304
|
-
|
|
1326
|
+
safeApply("vscode-instructions-file-cleanup", () =>
|
|
1327
|
+
removePromptFile(paths.vscodeInstructionsFile)
|
|
1305
1328
|
);
|
|
1306
1329
|
}
|
|
1307
|
-
if (paths.
|
|
1330
|
+
if (paths.cursorAgents) {
|
|
1331
|
+
safeApply("cursor-legacy-cleanup", () => removePromptFile(paths.cursorAgents));
|
|
1332
|
+
}
|
|
1333
|
+
if (paths.cursorAgentsUpper) {
|
|
1334
|
+
safeApply("cursor-legacy-upper-cleanup", () => removePromptFile(paths.cursorAgentsUpper));
|
|
1335
|
+
}
|
|
1336
|
+
if (paths.vscodeSettings) {
|
|
1308
1337
|
safeApply("vscode-settings", () =>
|
|
1309
|
-
upsertVsCodeInstructions(paths.vscodeSettings, instructions, paths.
|
|
1338
|
+
upsertVsCodeInstructions(paths.vscodeSettings, instructions, paths.vscodeGlobalInstructions)
|
|
1310
1339
|
);
|
|
1311
1340
|
}
|
|
1312
1341
|
if (paths.windsurfGlobalRules) {
|
|
@@ -1321,6 +1350,9 @@ function applyAgentInstructions({ logger } = {}) {
|
|
|
1321
1350
|
if (paths.claude) {
|
|
1322
1351
|
safeApply("claude", () => upsertClaudeInstructions(paths.claude, instructions));
|
|
1323
1352
|
}
|
|
1353
|
+
if (paths.gemini) {
|
|
1354
|
+
safeApply("gemini", () => upsertPromptFile(paths.gemini, instructions));
|
|
1355
|
+
}
|
|
1324
1356
|
const continueYamlExists =
|
|
1325
1357
|
(paths.continueYaml && fs.existsSync(paths.continueYaml)) ||
|
|
1326
1358
|
(paths.continueYml && fs.existsSync(paths.continueYml));
|