rex-claude 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +163 -0
- package/activity/activity.jsonl +401 -0
- package/activity/config.lua +3 -0
- package/activity/init.lua +49 -0
- package/dist/cli.js +204 -0
- package/dotfiles/CLAUDE.md +136 -0
- package/dotfiles/commands/clean.md +8 -0
- package/dotfiles/commands/doc.md +8 -0
- package/dotfiles/commands/review.md +15 -0
- package/dotfiles/commands/scaffold.md +11 -0
- package/dotfiles/commands/test.md +11 -0
- package/dotfiles/docs/cloudflare.md +62 -0
- package/dotfiles/docs/nextjs.md +79 -0
- package/dotfiles/docs/react.md +63 -0
- package/dotfiles/docs/tailwind.md +45 -0
- package/dotfiles/docs/telegram-bot.md +55 -0
- package/dotfiles/rules/api-design.md +63 -0
- package/dotfiles/rules/defensive-engineering.md +42 -0
- package/dotfiles/rules/docs-first.md +47 -0
- package/dotfiles/rules/frontend.md +41 -0
- package/dotfiles/rules/git-workflow.md +57 -0
- package/dotfiles/rules/never-assume.md +39 -0
- package/dotfiles/rules/security.md +46 -0
- package/dotfiles/rules/testing.md +33 -0
- package/dotfiles/settings.json +69 -0
- package/dotfiles/skills/build-validate/SKILL.md +16 -0
- package/dotfiles/skills/code-review/SKILL.md +18 -0
- package/dotfiles/skills/context-loader/SKILL.md +25 -0
- package/dotfiles/skills/debug-assist/SKILL.md +26 -0
- package/dotfiles/skills/deploy-checklist/SKILL.md +54 -0
- package/dotfiles/skills/dstudio-design-system/SKILL.md +120 -0
- package/dotfiles/skills/figma-workflow/SKILL.md +23 -0
- package/dotfiles/skills/fix-issue/SKILL.md +43 -0
- package/dotfiles/skills/one-shot/SKILL.md +18 -0
- package/dotfiles/skills/pr-review-loop/SKILL.md +41 -0
- package/dotfiles/skills/project-init/SKILL.md +45 -0
- package/dotfiles/skills/research/SKILL.md +17 -0
- package/dotfiles/skills/spec-interview/SKILL.md +20 -0
- package/dotfiles/skills/token-guard/SKILL.md +26 -0
- package/dotfiles/templates/CLAUDE.md.template +39 -0
- package/memory/package.json +24 -0
- package/memory/src/embed.ts +23 -0
- package/memory/src/ingest.ts +257 -0
- package/memory/src/search.ts +32 -0
- package/memory/src/server.ts +69 -0
- package/memory/tsconfig.json +14 -0
- package/package.json +39 -0
- package/tmux/.tmux.conf +73 -0
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
import * as sqliteVec from "sqlite-vec";
|
|
3
|
+
import { readFileSync, readdirSync, existsSync, statSync } from "fs";
|
|
4
|
+
import { join, basename } from "path";
|
|
5
|
+
import { embed, embeddingToBuffer, EMBEDDING_DIM } from "./embed.js";
|
|
6
|
+
|
|
7
|
+
const DB_PATH = join(import.meta.dirname, "..", "db", "rex.sqlite");
|
|
8
|
+
const MAX_CHUNKS_PER_FILE = 50;
|
|
9
|
+
const CHUNK_SIZE = 1000;
|
|
10
|
+
const MAX_CHUNK_LENGTH = 2000;
|
|
11
|
+
const MIN_TEXT_LENGTH = 50;
|
|
12
|
+
const MIN_FINAL_CHUNK = 100;
|
|
13
|
+
const EMBED_RETRY_DELAY = 2000;
|
|
14
|
+
const EMBED_MAX_RETRIES = 3;
|
|
15
|
+
|
|
16
|
+
let _db: Database.Database | null = null;
|
|
17
|
+
|
|
18
|
+
export function getDb(): Database.Database {
|
|
19
|
+
if (_db) return _db;
|
|
20
|
+
|
|
21
|
+
const dbDir = join(import.meta.dirname, "..", "db");
|
|
22
|
+
if (!existsSync(dbDir)) {
|
|
23
|
+
const { mkdirSync } = require("fs");
|
|
24
|
+
mkdirSync(dbDir, { recursive: true });
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
_db = new Database(DB_PATH);
|
|
28
|
+
sqliteVec.load(_db);
|
|
29
|
+
|
|
30
|
+
_db.pragma("journal_mode = WAL");
|
|
31
|
+
_db.defaultSafeIntegers(false);
|
|
32
|
+
|
|
33
|
+
_db.exec(`
|
|
34
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
35
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
36
|
+
content TEXT NOT NULL,
|
|
37
|
+
category TEXT NOT NULL DEFAULT 'general',
|
|
38
|
+
source TEXT,
|
|
39
|
+
project TEXT,
|
|
40
|
+
created_at TEXT DEFAULT (datetime('now'))
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS memory_vec USING vec0(
|
|
44
|
+
embedding float[${EMBEDDING_DIM}]
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
CREATE TABLE IF NOT EXISTS ingest_log (
|
|
48
|
+
file_path TEXT PRIMARY KEY,
|
|
49
|
+
chunks_count INTEGER,
|
|
50
|
+
ingested_at TEXT DEFAULT (datetime('now'))
|
|
51
|
+
);
|
|
52
|
+
`);
|
|
53
|
+
|
|
54
|
+
return _db;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async function embedWithRetry(text: string): Promise<Float32Array> {
|
|
58
|
+
for (let attempt = 1; attempt <= EMBED_MAX_RETRIES; attempt++) {
|
|
59
|
+
try {
|
|
60
|
+
return await embed(text);
|
|
61
|
+
} catch (err) {
|
|
62
|
+
if (attempt === EMBED_MAX_RETRIES) throw err;
|
|
63
|
+
console.error(` Embed attempt ${attempt} failed, retrying in ${EMBED_RETRY_DELAY}ms...`);
|
|
64
|
+
await new Promise((r) => setTimeout(r, EMBED_RETRY_DELAY));
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
throw new Error("Unreachable");
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export async function learn(fact: string, category: string = "general", source?: string, project?: string): Promise<void> {
|
|
71
|
+
const db = getDb();
|
|
72
|
+
|
|
73
|
+
let embedding: Float32Array;
|
|
74
|
+
try {
|
|
75
|
+
embedding = await embedWithRetry(fact);
|
|
76
|
+
} catch (err) {
|
|
77
|
+
console.error(` Skipping chunk (embedding failed): ${(err as Error).message}`);
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
const info = db.prepare("INSERT INTO memories (content, category, source, project) VALUES (?, ?, ?, ?)").run(fact, category, source ?? null, project ?? null);
|
|
83
|
+
db.prepare("INSERT INTO memory_vec (rowid, embedding) VALUES (CAST(? AS INTEGER), ?)").run(Number(info.lastInsertRowid), embeddingToBuffer(embedding));
|
|
84
|
+
} catch (err) {
|
|
85
|
+
console.error(` Skipping chunk (DB insert failed): ${(err as Error).message}`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export async function getContext(projectPath: string): Promise<string> {
|
|
90
|
+
const db = getDb();
|
|
91
|
+
const projectName = basename(projectPath);
|
|
92
|
+
|
|
93
|
+
const projectMemories = db
|
|
94
|
+
.prepare("SELECT content, category FROM memories WHERE project = ? ORDER BY created_at DESC LIMIT 20")
|
|
95
|
+
.all(projectName) as Array<{ content: string; category: string }>;
|
|
96
|
+
|
|
97
|
+
let semanticResults: Array<{ content: string; category: string; score: number }> = [];
|
|
98
|
+
try {
|
|
99
|
+
semanticResults = await (await import("./search.js")).search(projectName, 5);
|
|
100
|
+
} catch {
|
|
101
|
+
// Ollama might be down — degrade gracefully
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const sections: string[] = [];
|
|
105
|
+
|
|
106
|
+
if (projectMemories.length) {
|
|
107
|
+
sections.push("## Project memories\n" + projectMemories.map((m) => `- [${m.category}] ${m.content}`).join("\n"));
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
if (semanticResults.length) {
|
|
111
|
+
sections.push("## Related context\n" + semanticResults.map((r) => `- [${r.category}] (${r.score.toFixed(2)}) ${r.content}`).join("\n"));
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return sections.join("\n\n") || "";
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function extractTextFromMessage(msg: any): string {
|
|
118
|
+
if (!msg?.message?.content) return "";
|
|
119
|
+
if (typeof msg.message.content === "string") return msg.message.content;
|
|
120
|
+
if (!Array.isArray(msg.message.content)) return "";
|
|
121
|
+
return msg.message.content
|
|
122
|
+
.filter((c: any) => c?.type === "text" && typeof c?.text === "string")
|
|
123
|
+
.map((c: any) => c.text)
|
|
124
|
+
.join("\n");
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
function chunkText(lines: string[]): string[] {
|
|
128
|
+
const chunks: string[] = [];
|
|
129
|
+
let current = "";
|
|
130
|
+
|
|
131
|
+
for (const line of lines) {
|
|
132
|
+
try {
|
|
133
|
+
const msg = JSON.parse(line);
|
|
134
|
+
if (msg.type !== "human" && msg.type !== "assistant") continue;
|
|
135
|
+
|
|
136
|
+
const text = extractTextFromMessage(msg);
|
|
137
|
+
if (text.length < MIN_TEXT_LENGTH) continue;
|
|
138
|
+
|
|
139
|
+
current += text + "\n";
|
|
140
|
+
if (current.length > CHUNK_SIZE) {
|
|
141
|
+
chunks.push(current.slice(0, MAX_CHUNK_LENGTH));
|
|
142
|
+
current = "";
|
|
143
|
+
}
|
|
144
|
+
} catch {
|
|
145
|
+
// skip malformed JSON lines
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
if (current.length > MIN_FINAL_CHUNK) {
|
|
150
|
+
chunks.push(current.slice(0, MAX_CHUNK_LENGTH));
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
return chunks.slice(0, MAX_CHUNKS_PER_FILE);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
async function checkOllama(): Promise<boolean> {
|
|
157
|
+
try {
|
|
158
|
+
const res = await fetch(`${process.env.OLLAMA_URL || "http://localhost:11434"}/api/tags`);
|
|
159
|
+
return res.ok;
|
|
160
|
+
} catch {
|
|
161
|
+
return false;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async function ingestSessions() {
|
|
166
|
+
// Pre-flight: check Ollama
|
|
167
|
+
if (!(await checkOllama())) {
|
|
168
|
+
console.error("ERROR: Ollama is not running. Start it with: ollama serve");
|
|
169
|
+
process.exit(1);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const sessionsDir = join(process.env.HOME || "~", ".claude", "projects");
|
|
173
|
+
if (!existsSync(sessionsDir)) {
|
|
174
|
+
console.error("No sessions directory found at", sessionsDir);
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const db = getDb();
|
|
179
|
+
|
|
180
|
+
// Use ingest_log to track already-processed files (more reliable than source column)
|
|
181
|
+
const alreadyIngested = new Set(
|
|
182
|
+
(db.prepare("SELECT file_path FROM ingest_log").all() as Array<{ file_path: string }>).map((r) => r.file_path)
|
|
183
|
+
);
|
|
184
|
+
|
|
185
|
+
let totalIngested = 0;
|
|
186
|
+
let totalSkipped = 0;
|
|
187
|
+
let totalErrors = 0;
|
|
188
|
+
|
|
189
|
+
let projectDirs: string[];
|
|
190
|
+
try {
|
|
191
|
+
projectDirs = readdirSync(sessionsDir).filter((d) => {
|
|
192
|
+
try {
|
|
193
|
+
return statSync(join(sessionsDir, d)).isDirectory();
|
|
194
|
+
} catch {
|
|
195
|
+
return false;
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
} catch (err) {
|
|
199
|
+
console.error("Cannot read sessions directory:", err);
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
for (const projectDir of projectDirs) {
|
|
204
|
+
const projectPath = join(sessionsDir, projectDir);
|
|
205
|
+
|
|
206
|
+
let files: string[];
|
|
207
|
+
try {
|
|
208
|
+
files = readdirSync(projectPath).filter((f) => f.endsWith(".jsonl"));
|
|
209
|
+
} catch {
|
|
210
|
+
continue;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
for (const file of files) {
|
|
214
|
+
const filePath = join(projectPath, file);
|
|
215
|
+
|
|
216
|
+
if (alreadyIngested.has(filePath)) {
|
|
217
|
+
totalSkipped++;
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
try {
|
|
222
|
+
const content = readFileSync(filePath, "utf-8");
|
|
223
|
+
const lines = content.split("\n").filter(Boolean);
|
|
224
|
+
const chunks = chunkText(lines);
|
|
225
|
+
|
|
226
|
+
if (chunks.length === 0) {
|
|
227
|
+
// Mark as processed even if empty (no need to re-parse)
|
|
228
|
+
db.prepare("INSERT OR IGNORE INTO ingest_log (file_path, chunks_count) VALUES (?, 0)").run(filePath);
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
let fileIngested = 0;
|
|
233
|
+
for (const chunk of chunks) {
|
|
234
|
+
await learn(chunk, "session", filePath, projectDir);
|
|
235
|
+
fileIngested++;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
db.prepare("INSERT OR IGNORE INTO ingest_log (file_path, chunks_count) VALUES (?, ?)").run(filePath, fileIngested);
|
|
239
|
+
totalIngested += fileIngested;
|
|
240
|
+
|
|
241
|
+
if (fileIngested > 0) {
|
|
242
|
+
console.log(` ${file}: ${fileIngested} chunks`);
|
|
243
|
+
}
|
|
244
|
+
} catch (err) {
|
|
245
|
+
totalErrors++;
|
|
246
|
+
console.error(` Error ${file}: ${(err as Error).message}`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
console.log(`\nDone: ${totalIngested} ingested, ${totalSkipped} skipped (already done), ${totalErrors} errors`);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Run CLI if called directly
|
|
255
|
+
if (process.argv[1]?.endsWith("ingest.ts") || process.argv[1]?.endsWith("ingest.js")) {
|
|
256
|
+
ingestSessions().catch(console.error);
|
|
257
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { getDb } from "./ingest.js";
|
|
2
|
+
import { embed, embeddingToBuffer } from "./embed.js";
|
|
3
|
+
|
|
4
|
+
export interface SearchResult {
|
|
5
|
+
content: string;
|
|
6
|
+
category: string;
|
|
7
|
+
score: number;
|
|
8
|
+
created_at: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export async function search(query: string, limit: number = 10): Promise<SearchResult[]> {
|
|
12
|
+
const db = getDb();
|
|
13
|
+
const queryEmbedding = await embed(query);
|
|
14
|
+
const buf = embeddingToBuffer(queryEmbedding);
|
|
15
|
+
|
|
16
|
+
const rows = db
|
|
17
|
+
.prepare(
|
|
18
|
+
`SELECT m.content, m.category, m.created_at, vec_distance_cosine(e.embedding, ?) as distance
|
|
19
|
+
FROM memory_vec e
|
|
20
|
+
JOIN memories m ON m.id = e.rowid
|
|
21
|
+
ORDER BY distance ASC
|
|
22
|
+
LIMIT CAST(? AS INTEGER)`
|
|
23
|
+
)
|
|
24
|
+
.all(buf, limit) as Array<{ content: string; category: string; created_at: string; distance: number }>;
|
|
25
|
+
|
|
26
|
+
return rows.map((r) => ({
|
|
27
|
+
content: r.content,
|
|
28
|
+
category: r.category,
|
|
29
|
+
score: 1 - r.distance,
|
|
30
|
+
created_at: r.created_at,
|
|
31
|
+
}));
|
|
32
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { search } from "./search.js";
|
|
5
|
+
import { learn, getContext } from "./ingest.js";
|
|
6
|
+
|
|
7
|
+
const server = new McpServer({
|
|
8
|
+
name: "rex-memory",
|
|
9
|
+
version: "1.0.0",
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
server.tool(
|
|
13
|
+
"rex_search",
|
|
14
|
+
"Search past sessions and learned facts semantically",
|
|
15
|
+
{ query: z.string().describe("Natural language search query"), limit: z.number().optional().default(10) },
|
|
16
|
+
async ({ query, limit }) => {
|
|
17
|
+
const results = await search(query, limit);
|
|
18
|
+
return {
|
|
19
|
+
content: [
|
|
20
|
+
{
|
|
21
|
+
type: "text" as const,
|
|
22
|
+
text: results.length
|
|
23
|
+
? results.map((r) => `[${r.category}] (score: ${r.score.toFixed(3)})\n${r.content}`).join("\n---\n")
|
|
24
|
+
: "No relevant memories found.",
|
|
25
|
+
},
|
|
26
|
+
],
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
);
|
|
30
|
+
|
|
31
|
+
server.tool(
|
|
32
|
+
"rex_learn",
|
|
33
|
+
"Memorize a fact, pattern, or lesson learned",
|
|
34
|
+
{
|
|
35
|
+
fact: z.string().describe("The fact or pattern to remember"),
|
|
36
|
+
category: z.string().optional().default("general").describe("Category: pattern, debug, preference, architecture, lesson"),
|
|
37
|
+
},
|
|
38
|
+
async ({ fact, category }) => {
|
|
39
|
+
await learn(fact, category);
|
|
40
|
+
return {
|
|
41
|
+
content: [{ type: "text" as const, text: `Learned: [${category}] ${fact.slice(0, 80)}...` }],
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
server.tool(
|
|
47
|
+
"rex_context",
|
|
48
|
+
"Get relevant context for the current project",
|
|
49
|
+
{
|
|
50
|
+
project_path: z.string().describe("Absolute path to the project directory"),
|
|
51
|
+
},
|
|
52
|
+
async ({ project_path }) => {
|
|
53
|
+
const context = await getContext(project_path);
|
|
54
|
+
return {
|
|
55
|
+
content: [{ type: "text" as const, text: context || "No context found for this project." }],
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
async function main() {
|
|
61
|
+
const transport = new StdioServerTransport();
|
|
62
|
+
await server.connect(transport);
|
|
63
|
+
console.error("REX Memory MCP server running on stdio");
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
main().catch((err) => {
|
|
67
|
+
console.error("Fatal:", err);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
});
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "Node16",
|
|
5
|
+
"moduleResolution": "Node16",
|
|
6
|
+
"outDir": "dist",
|
|
7
|
+
"rootDir": "src",
|
|
8
|
+
"strict": true,
|
|
9
|
+
"esModuleInterop": true,
|
|
10
|
+
"skipLibCheck": true,
|
|
11
|
+
"declaration": true
|
|
12
|
+
},
|
|
13
|
+
"include": ["src"]
|
|
14
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "rex-claude",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "REX — Config unifiée + MCP memory server + activity logger pour Claude Code",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"rex": "./dist/cli.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/",
|
|
11
|
+
"dotfiles/",
|
|
12
|
+
"memory/src/",
|
|
13
|
+
"memory/package.json",
|
|
14
|
+
"memory/package-lock.json",
|
|
15
|
+
"memory/tsconfig.json",
|
|
16
|
+
"activity/",
|
|
17
|
+
"tmux/"
|
|
18
|
+
],
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "tsc",
|
|
21
|
+
"prepublishOnly": "npm run build"
|
|
22
|
+
},
|
|
23
|
+
"publishConfig": {
|
|
24
|
+
"access": "public"
|
|
25
|
+
},
|
|
26
|
+
"keywords": [
|
|
27
|
+
"claude-code",
|
|
28
|
+
"claude",
|
|
29
|
+
"mcp",
|
|
30
|
+
"dev-tools",
|
|
31
|
+
"dotfiles"
|
|
32
|
+
],
|
|
33
|
+
"author": "Kevin <kevin@dstudio.company>",
|
|
34
|
+
"license": "MIT",
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/node": "^25.3.3",
|
|
37
|
+
"typescript": "^5.7.0"
|
|
38
|
+
}
|
|
39
|
+
}
|
package/tmux/.tmux.conf
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# REX tmux config — optimized for Claude Code agent teams
|
|
2
|
+
|
|
3
|
+
# Modern terminal + colors
|
|
4
|
+
set -g default-terminal "tmux-256color"
|
|
5
|
+
set -ga terminal-overrides ",xterm-256color:Tc"
|
|
6
|
+
|
|
7
|
+
# Shell
|
|
8
|
+
set -g default-shell /bin/zsh
|
|
9
|
+
|
|
10
|
+
# Longer scrollback for agent output
|
|
11
|
+
set -g history-limit 50000
|
|
12
|
+
|
|
13
|
+
# Faster escape (important for Claude Code responsiveness)
|
|
14
|
+
set -sg escape-time 10
|
|
15
|
+
|
|
16
|
+
# Mouse support
|
|
17
|
+
set -g mouse on
|
|
18
|
+
|
|
19
|
+
# Start windows/panes at 1
|
|
20
|
+
set -g base-index 1
|
|
21
|
+
setw -g pane-base-index 1
|
|
22
|
+
|
|
23
|
+
# Renumber windows on close
|
|
24
|
+
set -g renumber-windows on
|
|
25
|
+
|
|
26
|
+
# Activity monitoring (useful for agent teams)
|
|
27
|
+
setw -g monitor-activity on
|
|
28
|
+
set -g visual-activity off
|
|
29
|
+
|
|
30
|
+
# Status bar
|
|
31
|
+
set -g status-position bottom
|
|
32
|
+
set -g status-interval 5
|
|
33
|
+
set -g status-style "bg=colour235,fg=colour248"
|
|
34
|
+
set -g status-left "#[bg=colour31,fg=colour255,bold] #S #[default] "
|
|
35
|
+
set -g status-left-length 30
|
|
36
|
+
set -g status-right "#[fg=colour245]%H:%M #[fg=colour31]#h"
|
|
37
|
+
set -g status-right-length 50
|
|
38
|
+
|
|
39
|
+
# Window status
|
|
40
|
+
setw -g window-status-format " #I:#W "
|
|
41
|
+
setw -g window-status-current-format "#[bg=colour31,fg=colour255,bold] #I:#W "
|
|
42
|
+
|
|
43
|
+
# Pane borders
|
|
44
|
+
set -g pane-border-style "fg=colour238"
|
|
45
|
+
set -g pane-active-border-style "fg=colour31"
|
|
46
|
+
|
|
47
|
+
# Allow long-running processes (agent teams)
|
|
48
|
+
set -g remain-on-exit off
|
|
49
|
+
|
|
50
|
+
# Increase message display time
|
|
51
|
+
set -g display-time 2000
|
|
52
|
+
set -g display-panes-time 2000
|
|
53
|
+
|
|
54
|
+
# Focus events (needed for some editors/tools)
|
|
55
|
+
set -g focus-events on
|
|
56
|
+
|
|
57
|
+
# Prefix: Ctrl-a (easier than Ctrl-b)
|
|
58
|
+
unbind C-b
|
|
59
|
+
set -g prefix C-a
|
|
60
|
+
bind C-a send-prefix
|
|
61
|
+
|
|
62
|
+
# Split panes with | and -
|
|
63
|
+
bind | split-window -h -c "#{pane_current_path}"
|
|
64
|
+
bind - split-window -v -c "#{pane_current_path}"
|
|
65
|
+
|
|
66
|
+
# Navigate panes with Alt+arrow
|
|
67
|
+
bind -n M-Left select-pane -L
|
|
68
|
+
bind -n M-Right select-pane -R
|
|
69
|
+
bind -n M-Up select-pane -U
|
|
70
|
+
bind -n M-Down select-pane -D
|
|
71
|
+
|
|
72
|
+
# Reload config
|
|
73
|
+
bind r source-file ~/.tmux.conf \; display "Config reloaded"
|