@context-os/cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/dist/packages/core/src/index.js +88 -0
  2. package/dist/workspace-cli/src/commands/archive.js +51 -0
  3. package/dist/workspace-cli/src/commands/context.js +35 -0
  4. package/dist/workspace-cli/src/commands/decide.js +47 -0
  5. package/dist/workspace-cli/src/commands/extract.js +50 -0
  6. package/dist/workspace-cli/src/commands/health.js +77 -0
  7. package/dist/workspace-cli/src/commands/init.js +74 -0
  8. package/dist/workspace-cli/src/commands/prune.js +33 -0
  9. package/dist/workspace-cli/src/commands/search.js +33 -0
  10. package/dist/workspace-cli/src/commands/status.js +48 -0
  11. package/dist/workspace-cli/src/commands/summary.js +26 -0
  12. package/dist/workspace-cli/src/commands/sync.js +43 -0
  13. package/dist/workspace-cli/src/commands/tag.js +44 -0
  14. package/dist/workspace-cli/src/commands/today.js +40 -0
  15. package/dist/workspace-cli/src/commands/validate.js +136 -0
  16. package/dist/workspace-cli/src/index.js +40 -0
  17. package/dist/workspace-cli/src/tests/smoke.test.js +28 -0
  18. package/dist/workspace-cli/src/utils.js +1 -0
  19. package/package.json +54 -0
  20. package/templates/dot-gitignore +12 -0
  21. package/templates/root/anti-patterns.md +19 -0
  22. package/templates/root/changelog.md +15 -0
  23. package/templates/root/decisions.md +37 -0
  24. package/templates/root/personality.md +32 -0
  25. package/templates/root/preferences.md +25 -0
  26. package/templates/root/skills.md +18 -0
  27. package/templates/root/soul.md +44 -0
  28. package/templates/schemas/context.schema.json +25 -0
  29. package/templates/schemas/decision.schema.json +24 -0
  30. package/templates/schemas/memory.schema.json +24 -0
  31. package/templates/schemas/soul.schema.json +27 -0
@@ -0,0 +1,88 @@
1
+ import path from 'node:path';
2
+ import fs from 'node:fs';
3
+ import { spawn } from 'node:child_process';
4
+ /**
5
+ * Discovers the workspace root by looking for root/soul.md in parent directories.
6
+ */
7
+ export function findWorkspaceRoot() {
8
+ let current = process.cwd();
9
+ const root = path.parse(current).root;
10
+ while (current !== root) {
11
+ if (fs.existsSync(path.join(current, "root", "soul.md"))) {
12
+ return fs.realpathSync(current);
13
+ }
14
+ current = path.dirname(current);
15
+ }
16
+ return fs.realpathSync(process.cwd()); // Fallback to CWD
17
+ }
18
+ export const workspaceRoot = findWorkspaceRoot();
19
+ /**
20
+ * Standard ContextOS "Buckets" for security isolation.
21
+ */
22
+ export const ALLOWED_BUCKETS = [
23
+ "projects",
24
+ "knowledge",
25
+ "schemas",
26
+ "archive",
27
+ "log",
28
+ "orgs",
29
+ "root"
30
+ ];
31
+ /**
32
+ * Validates that a path is within the workspace root and inside an allowed bucket.
33
+ */
34
+ export function validatePath(requestedPath) {
35
+ const resolvedPath = path.resolve(workspaceRoot, requestedPath);
36
+ let fullPath;
37
+ try {
38
+ fullPath = fs.realpathSync(resolvedPath);
39
+ }
40
+ catch (e) {
41
+ fullPath = resolvedPath;
42
+ }
43
+ const relativePath = path.relative(workspaceRoot, fullPath);
44
+ // Security check: must be within the workspace root
45
+ if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
46
+ throw new Error(`Security violation: Path ${requestedPath} is outside the allowed ContextOS workspace root.`);
47
+ }
48
+ // Enterprise check: must be within an allowed bucket
49
+ const isAllowed = ALLOWED_BUCKETS.some(bucket => {
50
+ const bucketRoot = path.join(workspaceRoot, bucket);
51
+ const bucketRelative = path.relative(bucketRoot, fullPath);
52
+ return !bucketRelative.startsWith("..") && !path.isAbsolute(bucketRelative);
53
+ });
54
+ if (!isAllowed) {
55
+ throw new Error(`Security violation: Path ${requestedPath} is outside the allowed bucket (projects, orgs, knowledge, schemas, etc).`);
56
+ }
57
+ return { fullPath, relativePath };
58
+ }
59
+ /**
60
+ * Checks if a path is in a read-only bucket for agents.
61
+ */
62
+ export function isReadOnly(filePath) {
63
+ const { fullPath } = validatePath(filePath);
64
+ const readOnlyBuckets = ["knowledge", "schemas", "root"];
65
+ return readOnlyBuckets.some(bucket => {
66
+ const bucketRoot = path.join(workspaceRoot, bucket);
67
+ const bucketRelative = path.relative(bucketRoot, fullPath);
68
+ return !bucketRelative.startsWith("..") && !path.isAbsolute(bucketRelative);
69
+ });
70
+ }
71
+ /**
72
+ * Executes an atomic git transaction (Add + Commit).
73
+ */
74
+ export async function gitCommit(filePath, message) {
75
+ return new Promise((resolve, reject) => {
76
+ const add = spawn("git", ["add", filePath], { cwd: workspaceRoot });
77
+ add.on("close", (code) => {
78
+ if (code !== 0 && code !== null) {
79
+ return reject(new Error(`Git add failed with code ${code}`));
80
+ }
81
+ const commit = spawn("git", ["commit", "-m", message], { cwd: workspaceRoot });
82
+ commit.on("close", (code) => {
83
+ // If code is not 0, it might be "nothing to commit" which is fine for our tools
84
+ resolve();
85
+ });
86
+ });
87
+ });
88
+ }
@@ -0,0 +1,51 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import { gitCommit } from "../utils.js";
6
+ export function archiveCommand(program) {
7
+ program
8
+ .command("archive")
9
+ .description("Move a completed project to the archive and extract learnings")
10
+ .argument("<project>", "Project name to archive")
11
+ .action(async (project) => {
12
+ const spinner = ora(`Archiving project ${chalk.cyan(project)}...`).start();
13
+ try {
14
+ const workspaceRoot = process.cwd();
15
+ const projectDir = path.join(workspaceRoot, "projects", project);
16
+ const archiveDir = path.join(workspaceRoot, "archive", "projects", project);
17
+ if (!(await fs.pathExists(projectDir))) {
18
+ spinner.fail(chalk.red(`Project ${project} not found in projects/`));
19
+ return;
20
+ }
21
+ spinner.text = `Applying #cold tags recursively to ${project}...`;
22
+ const projectFiles = await fs.readdir(projectDir, { recursive: true });
23
+ for (const file of projectFiles) {
24
+ const filePath = path.join(projectDir, file);
25
+ if ((await fs.stat(filePath)).isFile() && filePath.endsWith(".md")) {
26
+ let content = await fs.readFile(filePath, "utf-8");
27
+ const activeTags = ["#hot", "#warm"];
28
+ let modified = false;
29
+ activeTags.forEach(t => {
30
+ if (content.includes(t)) {
31
+ content = content.replace(new RegExp(`${t}`, 'g'), '#cold');
32
+ modified = true;
33
+ }
34
+ });
35
+ if (modified) {
36
+ await fs.writeFile(filePath, content, "utf-8");
37
+ }
38
+ }
39
+ }
40
+ spinner.text = `Moving ${project} to archive...`;
41
+ await fs.ensureDir(path.dirname(archiveDir));
42
+ await fs.move(projectDir, archiveDir, { overwrite: true });
43
+ await gitCommit(archiveDir, `feat(cli): archive project ${project}`);
44
+ spinner.succeed(chalk.green(`Project ${project} archived successfully to archive/projects/${project}`));
45
+ console.log(chalk.yellow(`\n[Intelligence] Archive complete. Next step: 'workspace extract ${project}' to distill learnings.`));
46
+ }
47
+ catch (error) {
48
+ spinner.fail(chalk.red(`Archive failed: ${error.message}`));
49
+ }
50
+ });
51
+ }
@@ -0,0 +1,35 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function contextCommand(program) {
6
+ program
7
+ .command("context")
8
+ .description("Print agent boot context for a project")
9
+ .argument("<project>", "Project name")
10
+ .action(async (project) => {
11
+ const spinner = ora(`Loading context for ${chalk.cyan(project)}...`).start();
12
+ try {
13
+ const workspaceRoot = process.cwd();
14
+ const projectDir = path.join(workspaceRoot, "projects", project);
15
+ if (!(await fs.pathExists(projectDir))) {
16
+ spinner.fail(chalk.red(`Project ${project} not found.`));
17
+ return;
18
+ }
19
+ const files = ["CONTEXT.md", "memory.md", "tasks/active.md"];
20
+ let aggregatedContext = `\n${chalk.bold("--- 🚀 BOOT CONTEXT: " + project + " ---")}\n`;
21
+ for (const file of files) {
22
+ const filePath = path.join(projectDir, file);
23
+ if (await fs.pathExists(filePath)) {
24
+ const content = await fs.readFile(filePath, "utf-8");
25
+ aggregatedContext += `\n${chalk.yellow("📁 " + file)}\n${content}\n`;
26
+ }
27
+ }
28
+ spinner.succeed(chalk.green(`Loaded context for ${project}`));
29
+ console.log(aggregatedContext);
30
+ }
31
+ catch (error) {
32
+ spinner.fail(chalk.red(`Context failed: ${error.message}`));
33
+ }
34
+ });
35
+ }
@@ -0,0 +1,47 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import { gitCommit } from "../utils.js";
6
+ export function decideCommand(program) {
7
+ program
8
+ .command("decide")
9
+ .description("Log an architectural decision (ADR)")
10
+ .argument("<project>", "Project name")
11
+ .argument("<title>", "Decision title")
12
+ .argument("<context>", "Context of the decision")
13
+ .argument("<decision>", "The decision made")
14
+ .argument("<rationale>", "Rationale for the decision")
15
+ .action(async (project, title, context, decision, rationale) => {
16
+ const spinner = ora(`Logging decision for ${chalk.cyan(project)}...`).start();
17
+ try {
18
+ const workspaceRoot = process.cwd();
19
+ const projectDir = path.join(workspaceRoot, "projects", project);
20
+ const decisionsFile = path.join(projectDir, "decisions.md");
21
+ if (!(await fs.pathExists(projectDir))) {
22
+ spinner.fail(chalk.red(`Project ${project} does not exist.`));
23
+ return;
24
+ }
25
+ const date = new Date().toISOString().split("T")[0];
26
+ const adrId = `ADR-${Math.floor(Math.random() * 10000).toString().padStart(4, "0")}`;
27
+ const adrContent = `
28
+ ## [${adrId}] ${title}
29
+
30
+ #hot
31
+
32
+ - **Date**: ${date}
33
+ - **Status**: Accepted
34
+ - **Context**: ${context}
35
+ - **Decision**: ${decision}
36
+ - **Rationale**: ${rationale}
37
+ \n---\n`;
38
+ await fs.ensureDir(path.dirname(decisionsFile));
39
+ await fs.appendFile(decisionsFile, adrContent, "utf-8");
40
+ await gitCommit(decisionsFile, `feat(cli): log decision ${adrId} for ${project}`);
41
+ spinner.succeed(chalk.green(`Logged decision ${adrId} in ${project}/decisions.md`));
42
+ }
43
+ catch (error) {
44
+ spinner.fail(chalk.red(`Decide failed: ${error.message}`));
45
+ }
46
+ });
47
+ }
@@ -0,0 +1,50 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function extractCommand(program) {
6
+ program
7
+ .command("extract")
8
+ .description("Analyze project context and help distill learnings into the Knowledge Base")
9
+ .argument("<project>", "Project name to analyze")
10
+ .action(async (project) => {
11
+ const spinner = ora(`Analyzing project ${chalk.cyan(project)} for intelligence distillation...`).start();
12
+ try {
13
+ const workspaceRoot = process.cwd();
14
+ const projectDir = path.join(workspaceRoot, "projects", project);
15
+ const archiveDir = path.join(workspaceRoot, "archive", "projects", project);
16
+ // Check in projects/ or archive/projects/
17
+ const targetPath = (await fs.pathExists(projectDir)) ? projectDir :
18
+ (await fs.pathExists(archiveDir)) ? archiveDir : null;
19
+ if (!targetPath) {
20
+ spinner.fail(chalk.red(`Project ${project} not found in projects/ or archive/projects/`));
21
+ return;
22
+ }
23
+ const decisionsFile = path.join(targetPath, "decisions.md");
24
+ const memoryFile = path.join(targetPath, "memory.md");
25
+ const insights = [];
26
+ if (await fs.pathExists(decisionsFile)) {
27
+ const content = await fs.readFile(decisionsFile, "utf-8");
28
+ const decisionCount = (content.match(/\[ADR-\d+\]/g) || []).length;
29
+ insights.push(`${chalk.cyan(decisionCount)} architectural decisions found in decisions.md`);
30
+ }
31
+ else {
32
+ insights.push(chalk.yellow(`Warning: No decisions.md found for ADR extraction.`));
33
+ }
34
+ if (await fs.pathExists(memoryFile)) {
35
+ insights.push(`Project memory file found for context distillation.`);
36
+ }
37
+ spinner.succeed(chalk.green(`Analysis of ${project} complete.`));
38
+ console.log(chalk.bold("\n--- Extraction Guide ---"));
39
+ insights.forEach(insight => console.log(`- ${insight}`));
40
+ console.log(chalk.cyan("\n[Intelligence Layer] Next steps:"));
41
+ console.log(`1. Review the ADRs in ${targetPath}/decisions.md`);
42
+ console.log(`2. Distill reusable patterns into knowledge/domains/`);
43
+ console.log(`3. Use 'workspace tag <file' to mark files as #warm or #permanent.`);
44
+ console.log(`4. Archive project if not already done using 'workspace archive ${project}'.`);
45
+ }
46
+ catch (error) {
47
+ spinner.fail(chalk.red(`Extraction guide failed: ${error.message}`));
48
+ }
49
+ });
50
+ }
@@ -0,0 +1,77 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function healthCommand(program) {
6
+ program
7
+ .command("health")
8
+ .description("Perform a workspace integrity audit")
9
+ .action(async () => {
10
+ const spinner = ora("Auditing workspace health...").start();
11
+ try {
12
+ const workspaceRoot = process.cwd();
13
+ const projectsDir = path.join(workspaceRoot, "projects");
14
+ const issuesFound = [];
15
+ const staleFiles = [];
16
+ if (!(await fs.pathExists(projectsDir))) {
17
+ spinner.fail(chalk.red("Projects directory not found."));
18
+ return;
19
+ }
20
+ const projects = await fs.readdir(projectsDir);
21
+ const SEVEN_DAYS_MS = 7 * 24 * 60 * 60 * 1000;
22
+ const now = Date.now();
23
+ for (const project of projects) {
24
+ const projectPath = path.join(projectsDir, project);
25
+ const stats = await fs.stat(projectPath);
26
+ if (stats.isDirectory()) {
27
+ const memoryFile = path.join(projectPath, "memory.md");
28
+ const changelogFile = path.join(projectPath, "changelog.md");
29
+ const decisionsFile = path.join(projectPath, "decisions.md");
30
+ if (!(await fs.pathExists(memoryFile))) {
31
+ issuesFound.push(`[${project}] Missing memory.md`);
32
+ }
33
+ if (!(await fs.pathExists(changelogFile))) {
34
+ issuesFound.push(`[${project}] Missing changelog.md`);
35
+ }
36
+ if (!(await fs.pathExists(decisionsFile))) {
37
+ issuesFound.push(`[${project}] Missing decisions.md`);
38
+ }
39
+ // Check for stale #hot files
40
+ const files = await fs.readdir(projectPath, { recursive: true });
41
+ for (const file of files) {
42
+ const filePath = path.join(projectPath, file);
43
+ const fileStats = await fs.stat(filePath);
44
+ if (fileStats.isFile() && filePath.endsWith(".md")) {
45
+ const content = await fs.readFile(filePath, "utf-8");
46
+ if (content.includes("#hot")) {
47
+ const mtime = fileStats.mtimeMs;
48
+ if (now - mtime > SEVEN_DAYS_MS) {
49
+ staleFiles.push(`[${project}] ${file} has stale #hot tag (> 7 days since update)`);
50
+ }
51
+ }
52
+ }
53
+ }
54
+ }
55
+ }
56
+ if (issuesFound.length > 0 || staleFiles.length > 0) {
57
+ spinner.warn(chalk.yellow(`Workspace audit complete: Found ${issuesFound.length} issues and ${staleFiles.length} stale tags.`));
58
+ if (issuesFound.length > 0) {
59
+ console.log(chalk.red("\nCritical Issues:"));
60
+ issuesFound.forEach((issue) => console.log(`- ${issue}`));
61
+ }
62
+ if (staleFiles.length > 0) {
63
+ console.log(chalk.yellow("\nStale Context Warnings:"));
64
+ staleFiles.forEach((stale) => console.log(`- ${stale}`));
65
+ console.log(chalk.cyan("\nTip: Run 'workspace archive <project>' or 'workspace tag <file> #warm' to update lifecycle state."));
66
+ }
67
+ }
68
+ else {
69
+ spinner.succeed(chalk.green("Workspace integrity audit complete: 0 issues found."));
70
+ }
71
+ console.log(chalk.cyan("\n[Intelligence] Audit complete. Use 'workspace extract' to distill learnings from completed projects."));
72
+ }
73
+ catch (error) {
74
+ spinner.fail(chalk.red(`Health audit failed: ${error.message}`));
75
+ }
76
+ });
77
+ }
@@ -0,0 +1,74 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import { fileURLToPath } from "url";
6
+ import { gitCommit } from "../utils.js";
7
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
8
+ export function initCommand(program) {
9
+ program
10
+ .command("init")
11
+ .description("Initialize a new ContextOS project or full workspace")
12
+ .argument("[name]", "Project name (optional for initial workspace setup)")
13
+ .option("-o, --org <org>", "Organization name", "personal")
14
+ .action(async (name, options) => {
15
+ const spinner = ora("Checking workspace status...").start();
16
+ try {
17
+ const workspaceRoot = process.cwd();
18
+ const soulPath = path.join(workspaceRoot, "root", "soul.md");
19
+ const templatesDir = path.resolve(__dirname, "..", "templates");
20
+ // 1. Workspace Bootstrapping (Zero-Clone Support)
21
+ if (!(await fs.pathExists(soulPath))) {
22
+ spinner.text = chalk.yellow("No ContextOS workspace detected. Bootstrapping new workspace...");
23
+ const folders = ["root", "schemas", "projects", "knowledge"];
24
+ for (const folder of folders) {
25
+ const src = path.join(templatesDir, folder);
26
+ if (await fs.pathExists(src)) {
27
+ await fs.copy(src, path.join(workspaceRoot, folder));
28
+ }
29
+ else {
30
+ await fs.ensureDir(path.join(workspaceRoot, folder));
31
+ }
32
+ }
33
+ // Copy .gitignore
34
+ const gitignoreSrc = path.join(templatesDir, "dot-gitignore");
35
+ if (await fs.pathExists(gitignoreSrc)) {
36
+ await fs.copy(gitignoreSrc, path.join(workspaceRoot, ".gitignore"));
37
+ }
38
+ spinner.info(chalk.green("Workspace structure created."));
39
+ spinner.start("Initializing project...");
40
+ }
41
+ // If no name provided, we just did the workspace setup
42
+ if (!name) {
43
+ spinner.succeed(chalk.green("ContextOS workspace initialized successfully."));
44
+ return;
45
+ }
46
+ // 2. Project Initialization
47
+ const projectDir = path.join(workspaceRoot, "projects", name);
48
+ if (await fs.pathExists(projectDir)) {
49
+ spinner.fail(chalk.red(`Project directory already exists at ${projectDir}`));
50
+ return;
51
+ }
52
+ // Create structure
53
+ await fs.ensureDir(projectDir);
54
+ await fs.ensureDir(path.join(projectDir, "tasks"));
55
+ await fs.ensureDir(path.join(projectDir, "decisions"));
56
+ // Create blanks
57
+ const templateMap = {
58
+ "CONTEXT.md": "# Project Context\n\n#hot",
59
+ "memory.md": "# Project Memory\n\n#hot",
60
+ "phases.md": "# Project Phases\n\n#hot",
61
+ "SOUL.md": "# Project Soul\n\n#hot",
62
+ "HEARTBEAT.md": "# Project Heartbeat\n\n#hot"
63
+ };
64
+ for (const [file, content] of Object.entries(templateMap)) {
65
+ await fs.writeFile(path.join(projectDir, file), content);
66
+ }
67
+ await gitCommit(workspaceRoot, `feat(cli): initialize project ${name}`);
68
+ spinner.succeed(chalk.green(`Project '${name}' initialized successfullly.`));
69
+ }
70
+ catch (error) {
71
+ spinner.fail(chalk.red(`Init failed: ${error.message}`));
72
+ }
73
+ });
74
+ }
@@ -0,0 +1,33 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function pruneCommand(program) {
6
+ program
7
+ .command("prune")
8
+ .description("Remove stale logs and clean up the workspace")
9
+ .action(async () => {
10
+ const spinner = ora("Cleaning up workspace...").start();
11
+ try {
12
+ const workspaceRoot = process.cwd();
13
+ const dailyDir = path.join(workspaceRoot, "daily");
14
+ const tmpDir = path.join(workspaceRoot, "tmp");
15
+ let prunedCount = 0;
16
+ // 1. Clean up tmp/
17
+ if (await fs.pathExists(tmpDir)) {
18
+ const files = await fs.readdir(tmpDir);
19
+ for (const file of files) {
20
+ await fs.remove(path.join(tmpDir, file));
21
+ prunedCount++;
22
+ }
23
+ }
24
+ // 2. Identify redundant headers or stale session files
25
+ // (Simplified for now - we'll just check if anything is tagged with #stale)
26
+ spinner.succeed(chalk.green(`Workspace pruned: ${prunedCount} temporary items removed.`));
27
+ console.log(chalk.yellow(`\n[Intelligence] Pruning complete. Next step: 'workspace archive' for finished projects.`));
28
+ }
29
+ catch (error) {
30
+ spinner.fail(chalk.red(`Prune failed: ${error.message}`));
31
+ }
32
+ });
33
+ }
@@ -0,0 +1,33 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import { exec } from "child_process";
4
+ import { promisify } from "util";
5
+ const execAsync = promisify(exec);
6
+ export function searchCommand(program) {
7
+ program
8
+ .command("search")
9
+ .description("Search across the workspace")
10
+ .argument("<query>", "Search query string")
11
+ .action(async (query) => {
12
+ const spinner = ora(`Searching for ${chalk.cyan(query)}...`).start();
13
+ try {
14
+ const workspaceRoot = process.cwd();
15
+ // Use grep -rnI as a robust default
16
+ const command = `grep -rnIE "${query}" . | head -n 20`;
17
+ const { stdout } = await execAsync(command, { cwd: workspaceRoot });
18
+ if (!stdout) {
19
+ spinner.info(chalk.yellow("No results found."));
20
+ return;
21
+ }
22
+ spinner.succeed(chalk.green("Search results:"));
23
+ console.log(`\n${stdout}`);
24
+ }
25
+ catch (error) {
26
+ if (error.code === 1) {
27
+ spinner.info(chalk.yellow("No results found."));
28
+ return;
29
+ }
30
+ spinner.fail(chalk.red(`Search failed: ${error.message}`));
31
+ }
32
+ });
33
+ }
@@ -0,0 +1,48 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function statusCommand(program) {
6
+ program
7
+ .command("status")
8
+ .description("Show workspace health and project status")
9
+ .action(async () => {
10
+ const spinner = ora("Checking workspace status...").start();
11
+ try {
12
+ const workspaceRoot = process.cwd();
13
+ const projectsDir = path.join(workspaceRoot, "projects");
14
+ if (!(await fs.pathExists(projectsDir))) {
15
+ spinner.fail(chalk.red("No projects directory found."));
16
+ return;
17
+ }
18
+ const projects = await fs.readdir(projectsDir);
19
+ spinner.succeed(chalk.green(`Workspace: ${chalk.bold(path.basename(workspaceRoot))}`));
20
+ console.log(chalk.cyan(`\n📁 Projects [${projects.length}]:`));
21
+ for (const project of projects) {
22
+ const projectPath = path.join(projectsDir, project);
23
+ const stat = await fs.stat(projectPath);
24
+ if (stat.isDirectory()) {
25
+ const memoryPath = path.join(projectPath, "memory.md");
26
+ let memoryStatus = chalk.gray("(no memory)");
27
+ if (await fs.pathExists(memoryPath)) {
28
+ const memoryStat = await fs.stat(memoryPath);
29
+ memoryStatus = chalk.dim(`(last modified: ${memoryStat.mtime.toDateString()})`);
30
+ }
31
+ console.log(` - ${chalk.bold(project)} ${memoryStatus}`);
32
+ }
33
+ }
34
+ const date = new Date().toISOString().split("T")[0];
35
+ const dailyFile = path.join(workspaceRoot, "daily", `${date}.md`);
36
+ console.log(chalk.cyan(`\n📝 Daily Log:`));
37
+ if (await fs.pathExists(dailyFile)) {
38
+ console.log(` - ${chalk.green("ACTIVE")} ${chalk.dim(`(today: ${date}.md)`)}`);
39
+ }
40
+ else {
41
+ console.log(` - ${chalk.yellow("MISSING")} ${chalk.dim("(run 'workspace today' to initialize)")}`);
42
+ }
43
+ }
44
+ catch (error) {
45
+ spinner.fail(chalk.red(`Status failed: ${error.message}`));
46
+ }
47
+ });
48
+ }
@@ -0,0 +1,26 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function summaryCommand(program) {
6
+ program
7
+ .command("summary")
8
+ .description("Generate weekly summary")
9
+ .argument("<type>", "Summary type (e.g., week)")
10
+ .action(async (type) => {
11
+ const spinner = ora(`Generating ${type} summary...`).start();
12
+ try {
13
+ const workspaceRoot = process.cwd();
14
+ const summaryDir = path.join(workspaceRoot, "docs", "summaries");
15
+ const date = new Date().toISOString().split("T")[0];
16
+ const summaryFile = path.join(summaryDir, `${date}-${type}.md`);
17
+ await fs.ensureDir(summaryDir);
18
+ const summaryContent = `# ${type.charAt(0).toUpperCase() + type.slice(1)} Summary — ${date}\n\n## 🚀 Highlights\n- \n\n## 📦 Shipped\n- \n\n## 📝 Strategic Context\n- \n`;
19
+ await fs.writeFile(summaryFile, summaryContent);
20
+ spinner.succeed(chalk.green(`${type.charAt(0).toUpperCase() + type.slice(1)} summary generated at ${summaryFile}`));
21
+ }
22
+ catch (error) {
23
+ spinner.fail(chalk.red(`Summary failed: ${error.message}`));
24
+ }
25
+ });
26
+ }
@@ -0,0 +1,43 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ export function syncCommand(program) {
6
+ program
7
+ .command("sync")
8
+ .description("Sync memory, changelog, and daily logs")
9
+ .argument("[project]", "Project name to sync context for")
10
+ .action(async (project) => {
11
+ const spinner = ora("Syncing workspace context...").start();
12
+ try {
13
+ const workspaceRoot = process.cwd();
14
+ const date = new Date().toISOString().split("T")[0];
15
+ // Simulating sync logic: ensuring memory.md has a "Last Sync" timestamp
16
+ if (project) {
17
+ const projectDir = path.join(workspaceRoot, "projects", project);
18
+ const memoryPath = path.join(projectDir, "memory.md");
19
+ if (await fs.pathExists(memoryPath)) {
20
+ let content = await fs.readFile(memoryPath, "utf-8");
21
+ const syncMark = `\n> [!NOTE]\n> Last Sync: ${date} ${new Date().toLocaleTimeString()}\n`;
22
+ if (!content.includes("Last Sync:")) {
23
+ await fs.appendFile(memoryPath, syncMark);
24
+ }
25
+ else {
26
+ content = content.replace(/> \[!NOTE\]\n> Last Sync: .*/, syncMark.trim());
27
+ await fs.writeFile(memoryPath, content);
28
+ }
29
+ spinner.succeed(chalk.green(`Synced memory for ${project}`));
30
+ }
31
+ else {
32
+ spinner.fail(chalk.red(`Memory file not found for ${project}`));
33
+ }
34
+ }
35
+ else {
36
+ spinner.succeed(chalk.green("Global workspace sync complete."));
37
+ }
38
+ }
39
+ catch (error) {
40
+ spinner.fail(chalk.red(`Sync failed: ${error.message}`));
41
+ }
42
+ });
43
+ }
@@ -0,0 +1,44 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import { gitCommit } from "../utils.js";
6
+ export function tagCommand(program) {
7
+ program
8
+ .command("tag")
9
+ .description("Update the lifecycle tag of a context file")
10
+ .argument("<file>", "Path to the file to tag")
11
+ .argument("<tag>", "The new tag (e.g., #hot, #warm, #cold, #permanent)")
12
+ .action(async (file, tag) => {
13
+ const spinner = ora(`Updating tag for ${chalk.cyan(file)} to ${chalk.yellow(tag)}...`).start();
14
+ try {
15
+ const workspaceRoot = process.cwd();
16
+ const filePath = path.join(workspaceRoot, file);
17
+ if (!(await fs.pathExists(filePath))) {
18
+ spinner.fail(chalk.red(`File ${file} not found.`));
19
+ return;
20
+ }
21
+ let content = await fs.readFile(filePath, "utf-8");
22
+ const lifecycleTags = ["#hot", "#warm", "#cold", "#permanent"];
23
+ // Remove existing lifecycle tags
24
+ lifecycleTags.forEach(t => {
25
+ content = content.replace(new RegExp(`${t}(\\s*|\\n*)`, 'g'), '');
26
+ });
27
+ // Add new tag at the top (after h1 if exists, else very top)
28
+ if (content.startsWith("# ")) {
29
+ const lines = content.split("\n");
30
+ lines.splice(1, 0, `\n${tag}\n`);
31
+ content = lines.join("\n");
32
+ }
33
+ else {
34
+ content = `${tag}\n\n${content}`;
35
+ }
36
+ await fs.writeFile(filePath, content, "utf-8");
37
+ await gitCommit(file, `refactor(cli): update lifecycle tag of ${file} to ${tag}`);
38
+ spinner.succeed(chalk.green(`File ${file} tagged with ${tag} successfully.`));
39
+ }
40
+ catch (error) {
41
+ spinner.fail(chalk.red(`Tagging failed: ${error.message}`));
42
+ }
43
+ });
44
+ }
@@ -0,0 +1,40 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import { gitCommit } from "../utils.js";
6
+ export function todayCommand(program) {
7
+ program
8
+ .command("today")
9
+ .alias("daily")
10
+ .description("Open or create today's daily log")
11
+ .action(async () => {
12
+ const spinner = ora("Setting up today's log...").start();
13
+ try {
14
+ const workspaceRoot = process.cwd();
15
+ const date = new Date().toISOString().split("T")[0];
16
+ const dailyDir = path.join(workspaceRoot, "daily");
17
+ const dailyFile = path.join(dailyDir, `${date}.md`);
18
+ await fs.ensureDir(dailyDir);
19
+ if (!(await fs.pathExists(dailyFile))) {
20
+ const templatesDir = path.join(workspaceRoot, "config", "templates");
21
+ const logTemplatePath = path.join(templatesDir, "daily-log.md");
22
+ let templateContent = `# Daily Log — ${date}\n\n#hot\n\n## 🎯 Focus\n- \n\n## ✅ Completed\n- \n\n## 📝 Notes\n- \n`;
23
+ if (await fs.pathExists(logTemplatePath)) {
24
+ templateContent = await fs.readFile(logTemplatePath, "utf-8");
25
+ templateContent = templateContent.replace("{{date}}", date);
26
+ }
27
+ await fs.writeFile(dailyFile, templateContent);
28
+ await gitCommit(dailyFile, `feat(cli): create daily log for ${date}`);
29
+ spinner.succeed(chalk.green(`Created today's log at ${dailyFile}`));
30
+ }
31
+ else {
32
+ spinner.info(chalk.blue(`Today's log already exists at ${dailyFile}`));
33
+ }
34
+ console.log(chalk.cyan(`\nRun 'workspace sync' after your session to finalize context.`));
35
+ }
36
+ catch (error) {
37
+ spinner.fail(chalk.red(`Today failed: ${error.message}`));
38
+ }
39
+ });
40
+ }
@@ -0,0 +1,136 @@
1
+ import chalk from "chalk";
2
+ import ora from "ora";
3
+ import fs from "fs-extra";
4
+ import path from "path";
5
+ import AjvModule from "ajv";
6
+ import addFormatsModule from "ajv-formats";
7
+ import fm from "front-matter";
8
+ const Ajv = AjvModule.default || AjvModule;
9
+ const addFormats = addFormatsModule.default || addFormatsModule;
10
+ const ajv = new Ajv({ allErrors: true });
11
+ addFormats(ajv);
12
+ export function validateCommand(program) {
13
+ program
14
+ .command("validate")
15
+ .description("Validate workspace files against JSON schemas")
16
+ .action(async () => {
17
+ const spinner = ora("Validating workspace integrity...").start();
18
+ try {
19
+ const findWorkspaceRoot = () => {
20
+ let current = process.cwd();
21
+ const root = "/";
22
+ while (current !== root) {
23
+ if (fs.existsSync(path.join(current, "root", "soul.md"))) {
24
+ return current;
25
+ }
26
+ current = path.dirname(current);
27
+ }
28
+ return process.cwd();
29
+ };
30
+ const workspaceRoot = findWorkspaceRoot();
31
+ const schemasDir = path.join(workspaceRoot, "schemas");
32
+ const projectsDir = path.join(workspaceRoot, "projects");
33
+ const starterDir = path.join(workspaceRoot, "workspace-starter");
34
+ if (!(await fs.pathExists(schemasDir))) {
35
+ spinner.fail(chalk.red(`Schemas directory not found at ${schemasDir}`));
36
+ process.exit(1);
37
+ }
38
+ const validFiles = [
39
+ { name: "SOUL.md", schema: "soul.schema.json", required: true },
40
+ { name: "CONTEXT.md", schema: "context.schema.json", required: true },
41
+ { name: "memory.md", schema: "memory.schema.json" },
42
+ { name: "decisions.md", schema: "decision.schema.json" }
43
+ ];
44
+ let totalIssues = 0;
45
+ // Find all project-like directories (projects/* and workspace-starter)
46
+ const projectPaths = [];
47
+ if (await fs.pathExists(projectsDir)) {
48
+ const projects = await fs.readdir(projectsDir);
49
+ for (const p of projects) {
50
+ const fullPath = path.join(projectsDir, p);
51
+ if ((await fs.stat(fullPath)).isDirectory()) {
52
+ projectPaths.push(fullPath);
53
+ }
54
+ }
55
+ }
56
+ if (await fs.pathExists(starterDir)) {
57
+ projectPaths.push(starterDir);
58
+ }
59
+ for (const projectPath of projectPaths) {
60
+ const projectName = path.basename(projectPath);
61
+ const filesInDir = await fs.readdir(projectPath);
62
+ for (const config of validFiles) {
63
+ // Case-insensitive search
64
+ const fileName = filesInDir.find(f => f.toLowerCase() === config.name.toLowerCase());
65
+ if (!fileName) {
66
+ if (config.required) {
67
+ totalIssues++;
68
+ console.log(chalk.red(`\n❌ Missing required file in ${projectName}: ${config.name}`));
69
+ }
70
+ continue;
71
+ }
72
+ const filePath = path.join(projectPath, fileName);
73
+ const schemaPath = path.join(schemasDir, config.schema);
74
+ const schema = await fs.readJson(schemaPath);
75
+ const validate = ajv.compile(schema);
76
+ const content = await fs.readFile(filePath, "utf-8");
77
+ const data = extractMetadata(content);
78
+ const valid = validate(data);
79
+ if (!valid) {
80
+ totalIssues++;
81
+ console.log(chalk.red(`\n❌ Schema error in ${projectName}/${fileName}:`));
82
+ validate.errors?.forEach((err) => {
83
+ console.log(chalk.yellow(` - ${err.instancePath || 'root'} ${err.message}`));
84
+ });
85
+ }
86
+ }
87
+ }
88
+ if (totalIssues === 0) {
89
+ spinner.succeed(chalk.green("Workspace validation successful! All files conform to schema."));
90
+ }
91
+ else {
92
+ spinner.fail(chalk.red(`Workspace validation failed with ${totalIssues} issues.`));
93
+ process.exit(1);
94
+ }
95
+ }
96
+ catch (error) {
97
+ spinner.fail(chalk.red(`Validation error: ${error.message}`));
98
+ process.exit(1);
99
+ }
100
+ });
101
+ }
102
+ /**
103
+ * Robustly extract metadata from a markdown file.
104
+ * Prioritizes Frontmatter, falls back to Section mapping.
105
+ */
106
+ function extractMetadata(content) {
107
+ let data = {};
108
+ try {
109
+ const parse = fm.default || fm;
110
+ if (content.trim().startsWith("---")) {
111
+ const parsed = parse(content);
112
+ data = parsed.attributes || {};
113
+ }
114
+ }
115
+ catch (e) {
116
+ console.error(chalk.yellow(` ! Frontmatter parse failed, falling back to sections.`));
117
+ }
118
+ // SUPPLEMENT with section parsing if frontmatter is missing fields
119
+ const sections = content.split(/^## /m).slice(1);
120
+ sections.forEach(s => {
121
+ const lines = s.split("\n");
122
+ const title = lines[0].trim();
123
+ const body = lines.slice(1).join("\n").trim();
124
+ if (!data[title] || (Array.isArray(data[title]) && data[title].length === 0)) {
125
+ if (["Core Principles", "Behavioral Rules", "Goals", "Capabilities", "Constraints", "Tags", "Active Tasks", "Backlog"].includes(title)) {
126
+ data[title] = body.split("\n")
127
+ .map(l => l.replace(/^[-*]\s*/, "").trim())
128
+ .filter(l => l.length > 0);
129
+ }
130
+ else {
131
+ data[title] = body;
132
+ }
133
+ }
134
+ });
135
+ return data;
136
+ }
@@ -0,0 +1,40 @@
1
+ #!/usr/bin/env node
2
+ import { Command } from "commander";
3
+ import { initCommand } from "./commands/init.js";
4
+ import { todayCommand } from "./commands/today.js";
5
+ import { statusCommand } from "./commands/status.js";
6
+ import { decideCommand } from "./commands/decide.js";
7
+ import { syncCommand } from "./commands/sync.js";
8
+ import { summaryCommand } from "./commands/summary.js";
9
+ import { contextCommand } from "./commands/context.js";
10
+ import { searchCommand } from "./commands/search.js";
11
+ import { archiveCommand } from "./commands/archive.js";
12
+ import { pruneCommand } from "./commands/prune.js";
13
+ import { healthCommand } from "./commands/health.js";
14
+ import { extractCommand } from "./commands/extract.js";
15
+ import { tagCommand } from "./commands/tag.js";
16
+ import { validateCommand } from "./commands/validate.js";
17
+ const program = new Command();
18
+ program
19
+ .name("workspace")
20
+ .description("ContextOS Developer Interface Layer")
21
+ .version("1.0.0");
22
+ // Register Commands
23
+ initCommand(program);
24
+ todayCommand(program);
25
+ statusCommand(program);
26
+ decideCommand(program);
27
+ syncCommand(program);
28
+ summaryCommand(program);
29
+ contextCommand(program);
30
+ searchCommand(program);
31
+ archiveCommand(program);
32
+ pruneCommand(program);
33
+ healthCommand(program);
34
+ extractCommand(program);
35
+ tagCommand(program);
36
+ validateCommand(program);
37
+ program.parse(process.argv);
38
+ if (!process.argv.slice(2).length) {
39
+ program.outputHelp();
40
+ }
@@ -0,0 +1,28 @@
1
+ import assert from "node:assert";
2
+ import { execSync } from "node:child_process";
3
+ import path from "node:path";
4
+ import { fileURLToPath } from 'node:url';
5
+ describe("CLI Experience Layer (Smoke Tests)", () => {
6
+ const __filename = fileURLToPath(import.meta.url);
7
+ const __dirname = path.dirname(__filename);
8
+ const cliPath = path.resolve(__dirname, "..", "index.js");
9
+ it("should display help information", () => {
10
+ const output = execSync(`node ${cliPath} --help`).toString();
11
+ assert.ok(output.includes("ContextOS Developer Interface Layer"));
12
+ });
13
+ it("should have all core commands registered", () => {
14
+ const output = execSync(`node ${cliPath} --help`).toString();
15
+ const expectedCommands = [
16
+ "init", "today", "status", "decide", "sync",
17
+ "summary", "context", "search", "archive",
18
+ "prune", "health", "extract", "tag", "validate"
19
+ ];
20
+ expectedCommands.forEach(cmd => {
21
+ assert.ok(output.includes(cmd));
22
+ });
23
+ });
24
+ it("should report version 1.0.0", () => {
25
+ const output = execSync(`node ${cliPath} --version`).toString();
26
+ assert.strictEqual(output.trim(), "1.0.0");
27
+ });
28
+ });
@@ -0,0 +1 @@
1
+ export { gitCommit } from "@context-os/core";
package/package.json ADDED
@@ -0,0 +1,54 @@
1
+ {
2
+ "name": "@context-os/cli",
3
+ "version": "1.0.0",
4
+ "description": "ContextOS Developer Interface Layer",
5
+ "keywords": [
6
+ "context-os",
7
+ "mcp",
8
+ "ai-agents",
9
+ "workspace-intelligence"
10
+ ],
11
+ "homepage": "https://github.com/sairam0424/ContextOS",
12
+ "type": "module",
13
+ "publishConfig": {
14
+ "access": "public"
15
+ },
16
+ "main": "dist/index.js",
17
+ "bin": {
18
+ "context-os": "./dist/index.js"
19
+ },
20
+ "files": [
21
+ "dist",
22
+ "templates",
23
+ "README.md"
24
+ ],
25
+ "engines": {
26
+ "node": ">=18"
27
+ },
28
+ "scripts": {
29
+ "build": "tsc && node ../scripts/sync-templates.js",
30
+ "watch": "tsc -w",
31
+ "start": "node dist/index.js",
32
+ "test": "npm run build && mocha dist/tests/**/*.test.js"
33
+ },
34
+ "dependencies": {
35
+ "ajv": "^8.18.0",
36
+ "chalk": "^5.3.0",
37
+ "commander": "^12.1.0",
38
+ "conf": "^13.1.0",
39
+ "execa": "^9.5.2",
40
+ "front-matter": "^4.0.2",
41
+ "fs-extra": "^11.2.0",
42
+ "ora": "^8.0.1",
43
+ "@context-os/core": "1.0.0"
44
+ },
45
+ "devDependencies": {
46
+ "@types/chai": "^5.2.3",
47
+ "@types/fs-extra": "^11.0.4",
48
+ "@types/mocha": "^10.0.10",
49
+ "@types/node": "^22.13.10",
50
+ "chai": "^5.2.0",
51
+ "mocha": "^11.1.0",
52
+ "typescript": "^5.8.2"
53
+ }
54
+ }
@@ -0,0 +1,12 @@
1
+ node_modules/
2
+ .env
3
+ .DS_Store
4
+ *.log
5
+ dist/
6
+ build/
7
+ coverage/
8
+ .vscode/
9
+ .idea/
10
+
11
+ prompts/
12
+
@@ -0,0 +1,19 @@
1
+ # anti-patterns.md
2
+
3
+ ## Engineering Anti-Patterns
4
+
5
+ - **"Magic" Fixes**: Solving a problem without documenting the mechanism or updating common agent knowledge.
6
+ - **Flat Hierarchies**: Dumping configuration or data into the root without indexing it in the relevant scope ring.
7
+ - **Permission Overstretching**: Attempting to execute commands in restricted directories without verifying access.
8
+
9
+ ## Decision Anti-Patterns
10
+
11
+ - **Un-indexed ADRs**: Making architectural changes without updating the global `decisions.md`.
12
+ - **Placeholder Dependency**: Relying on "TBD" or generic statements for core configuration.
13
+ - **Context Siloing**: Keeping decisions or logic in memory rather than a persistent file.
14
+
15
+ ## Agent Interaction Anti-Patterns
16
+
17
+ - **Sycophancy**: Wasting context with polite but useless phrases.
18
+ - **Ambiguous Drafting**: Creating instructions that require more than one clarify-response cycle.
19
+ - **Instruction Skipping**: Skipping sections of a structured prompt or protocol.
@@ -0,0 +1,15 @@
1
+ # changelog.md
2
+
3
+ ## [2026-03-30] — v0.1.0
4
+
5
+ ### Added
6
+ - Initial **Identity Layer** in the `root/` directory.
7
+ - `soul.md`: Defined core identity and ranked values hierarchy.
8
+ - `personality.md`: Operationalized response and interaction preferences.
9
+ - `preferences.md`: Established standard tech stack and tool defaults.
10
+ - `skills.md`: Created an agent-visible capability inventory.
11
+ - `anti-patterns.md`: Documented negative constraints and architectural debt.
12
+ - `decisions.md`: Reformatted to formal ADR-style records (DEC-001 to DEC-003).
13
+
14
+ ### Notes
15
+ - This version marks the transition from structural scaffolding (Day 1) to a functional, value-driven identity (Day 2).
@@ -0,0 +1,37 @@
1
+ # decisions.md
2
+
3
+ ---
4
+
5
+ **ID**: DEC-001
6
+ **Date**: 2026-03-30
7
+ **Status**: ACCEPTED
8
+ **Title**: Initialize Workspace as Localized Context OS
9
+
10
+ **Context**: Need a structured, agent-readable workspace context system.
11
+ **Decision**: Create a file-first architecture with localized roots (`/Users/sairamugge/Desktop/ContextOS/`).
12
+ **Rationale**: System permissions prevent global directory creation in `~/`.
13
+ **Consequences**: Portability requires path normalization to the current workspace root.
14
+
15
+ ---
16
+
17
+ **ID**: DEC-002
18
+ **Date**: 2026-03-30
19
+ **Status**: ACCEPTED
20
+ **Title**: Implement Double-Hook Learning Loop
21
+
22
+ **Context**: Agents repeating mistakes across sessions.
23
+ **Decision**: Enforce a mandatory `AGENTS_LEARNING.md` read/write protocol.
24
+ **Rationale**: Continuous improvement (Reflexive Learning) is a core value.
25
+ **Consequences**: Increased context usage at the start of every session.
26
+
27
+ ---
28
+
29
+ **ID**: DEC-003
30
+ **Date**: 2026-03-30
31
+ **Status**: ACCEPTED
32
+ **Title**: Establish Ranked Identity Layer (Day 2)
33
+
34
+ **Context**: Need predictable agent behavior and strategic alignment.
35
+ **Decision**: Build a multi-file "Identity Layer" in `root/` with a ranked values hierarchy.
36
+ **Rationale**: Values-based conflict resolution provides a consistent decision model for agents.
37
+ **Consequences**: High-fidelity operational baseline established for all future tasks.
@@ -0,0 +1,32 @@
1
+ # personality.md
2
+
3
+ ## Response Preferences
4
+
5
+ - **Length**: Medium; concise, but never at the expense of necessary context.
6
+ - **Format**: Markdown with high reliance on hierarchical structures.
7
+ - **Depth**: Technical and deep; prioritize the "Why" and "Mechanism" over just the "Surface Result."
8
+
9
+ ## Tone Preferences
10
+
11
+ - **Voice**: Professional Agentic Architect.
12
+ - **Humor**: None preferred; focus on precision.
13
+ - **Technical level**: Expert/Architect.
14
+
15
+ ## Working Patterns
16
+
17
+ - **Focus style**: Comprehensive execution; build the whole feature, then fix the details.
18
+ - **Context switching**: Minimize switching; complete one scope ring before moving to another.
19
+ - **Review preference**: Detailed analysis of changes before and after implementation.
20
+
21
+ ## Agent Collaboration Rules
22
+
23
+ - **When stuck**: Analyze `AGENTS_LEARNING.md` for similar patterns. If no resolution: Ask for architectural intent.
24
+ - **Disagreements**: Prioritize `soul.md` values hierarchy.
25
+ - **Updates frequency**: After every completed task.
26
+
27
+ ## Hard Rules
28
+
29
+ - **No sycophancy**: Do not apologize for system errors; provide a technical root cause and a fix.
30
+ - **No filler language**: Eliminate "Sure," "I can," and "I will." Direct action is expected.
31
+ - **Be direct and useful**: If a task is inefficient, suggest a better way before starting.
32
+ - **The Double-Hook**: Never ignore any past agentic learning.
@@ -0,0 +1,25 @@
1
+ # preferences.md
2
+
3
+ ## Tech Stack Preferences
4
+
5
+ - **Frontend**: Not applicable at this stage; prioritizing Markdown for structure.
6
+ - **Backend**: Python for agents; Shell for system-level operations.
7
+ - **Infra**: Workspace root at `/Users/sairamugge/Desktop/ContextOS/`.
8
+
9
+ ## Package Managers
10
+
11
+ - **JS**: `npm` for standard tooling; `npx` for temporary operations.
12
+ - **Python**: `pip` with `pyproject.toml` or `requirements.txt` for dependencies.
13
+ - **Others**: Git for all version control.
14
+
15
+ ## Coding Preferences
16
+
17
+ - **Naming**: `kebab-case` for file names; `snake_case` for Python keys; `PascalCase` for classes.
18
+ - **Structure**: Modules must be self-contained; avoid global shared variables without explicit indexing.
19
+ - **Patterns**: First Principles—Solve the problem at the lowest level of the dependency chain first.
20
+
21
+ ## Forbidden / Avoided
22
+
23
+ - **Placeholder "TBD"**: Forbidden in finalized documents without a pending task reference.
24
+ - **Hardcoded Home Paths**: Use absolute paths tied to the localized workspace root.
25
+ - **Sycophancy**: Forbidden from agent communication.
@@ -0,0 +1,18 @@
1
+ # skills.md
2
+
3
+ ## Core Skills
4
+
5
+ - **Strategic Systems Architect**: Specialist level; ability to design complex, multi-agent context systems.
6
+ - **Agentic Infrastructure Designer**: Expert level; building the foundational layers of the Context OS.
7
+ - **Cognitive Model Designer**: Specialist level; translating human values into agent-readable structures.
8
+
9
+ ## Advanced Areas
10
+
11
+ - **Markdown-first Documentation**: High depth; creating structured, indexed documentation.
12
+ - **Git version control**: Standard; branch management for concurrent operational tasks.
13
+ - **Context Routing**: Advanced; implementing complex context loading and inheritance rules.
14
+
15
+ ## Learning Goals
16
+
17
+ - **Recursive Intelligence Improvement**: Increasing the fidelity and precision of the "Double-Hook" learning loop.
18
+ - **Cross-Agent Handoff Optimization**: Reducing friction during multi-agent session transitions.
@@ -0,0 +1,44 @@
1
+ # soul.md
2
+
3
+ _Last updated: 2026-03-30 | Version: 0.1.0_
4
+
5
+ ## Identity
6
+ The user is a **Strategic Systems Architect** focused on building high-leverage agentic ecosystems. This workspace is a laboratory for experimenting with "Context OS" principles and operationalizing AI-driven workflows.
7
+
8
+ ## Core Values (Ranked)
9
+ 1. **Agentic Autonomy**: Empower systems to act independently with high fidelity.
10
+ 2. **Structural Clarity**: Every piece of data must have a clear, indexed location.
11
+ 3. **Compound Knowledge**: Learnings must be captured and recycled to prevent regression.
12
+
13
+ ### Conflict Resolution
14
+ When **Agentic Autonomy** conflicts with **Structural Clarity**:
15
+ → **Structural Clarity** wins. An agent must not act if its actions compromise the integrity of the workspace.
16
+
17
+ ## Decision Framework
18
+ - **Primary method**: Leverage—Prioritize solutions that provide the greatest gain for the least recurring effort.
19
+ - **Secondary filter**: Reversibility—Adopt experimental patterns only if they can be easily rolled back or refactored.
20
+ - **Bias**: Quality over Speed—The foundational layer must be pixel-perfect before scaling.
21
+
22
+ ## Working Philosophy
23
+ - **Optimize for**: Predictability and long-term maintenance.
24
+ - **Avoid**: One-off "magic" fixes that aren't documented as patterns.
25
+ - **Default approach**: File-first configuration.
26
+
27
+ ## Communication Style
28
+ - **Tone**: Precise, technical, and objective.
29
+ - **Structure**: Bulleted, hierarchical, and concise.
30
+ - **Feedback style**: Direct and specific; focus on the "Why."
31
+ - **Pushback rule**: If a request violates a core value, an agent **must** push back and offer an alternative aligned with the system's mission.
32
+
33
+ ## Current Focus
34
+ - **Primary mission**: Building the Identity Layer of the Context OS.
35
+ - **Constraints**: System permissions restriction (must use localized roots).
36
+
37
+ ## Agent Instructions
38
+ - **Always**: Check `AGENTS_LEARNING.md` before execution.
39
+ - **Never**: Create files without a corresponding index update if applicable.
40
+ - **When unsure**: Ask for architectural confirmation before creating new root-level folders.
41
+
42
+ ## Anti-Patterns
43
+ - **The "TBD" Trap**: Creating placeholders without a clear path to resolution.
44
+ - **Context Siloing**: Keeping decisions in conversation memory rather than persistent files.
@@ -0,0 +1,25 @@
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "ContextOS Context Schema",
4
+ "description": "Schema for project CONTEXT.md files.",
5
+ "type": "object",
6
+ "required": ["Overview", "Goals"],
7
+ "properties": {
8
+ "Overview": { "type": "string" },
9
+ "Goals": {
10
+ "type": "array",
11
+ "items": { "type": "string" },
12
+ "minItems": 1
13
+ },
14
+ "Stack": {
15
+ "type": "array",
16
+ "items": { "type": "string" }
17
+ },
18
+ "Metadata": {
19
+ "type": "object",
20
+ "properties": {
21
+ "Tags": { "type": "array", "items": { "type": "string" } }
22
+ }
23
+ }
24
+ }
25
+ }
@@ -0,0 +1,24 @@
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "ContextOS Decision Schema",
4
+ "description": "Schema for project decisions.md files.",
5
+ "type": "object",
6
+ "properties": {
7
+ "Decisions": {
8
+ "type": "array",
9
+ "items": {
10
+ "type": "object",
11
+ "required": ["Id", "Title", "Date", "Status", "Context", "Decision", "Rationale"],
12
+ "properties": {
13
+ "Id": { "type": "string", "pattern": "^ADR-\\d{4}$" },
14
+ "Title": { "type": "string" },
15
+ "Date": { "type": "string", "format": "date" },
16
+ "Status": { "type": "string", "enum": ["Proposed", "Accepted", "Superseded", "Deprecated"] },
17
+ "Context": { "type": "string" },
18
+ "Decision": { "type": "string" },
19
+ "Rationale": { "type": "string" }
20
+ }
21
+ }
22
+ }
23
+ }
24
+ }
@@ -0,0 +1,24 @@
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "ContextOS Memory Schema",
4
+ "description": "Schema for project memory.md files.",
5
+ "type": "object",
6
+ "required": ["Overview"],
7
+ "properties": {
8
+ "Overview": { "type": "string" },
9
+ "Learnings": {
10
+ "type": "array",
11
+ "items": { "type": "string" }
12
+ },
13
+ "Patterns": {
14
+ "type": "array",
15
+ "items": { "type": "string" }
16
+ },
17
+ "Metadata": {
18
+ "type": "object",
19
+ "properties": {
20
+ "Tags": { "type": "array", "items": { "type": "string" } }
21
+ }
22
+ }
23
+ }
24
+ }
@@ -0,0 +1,27 @@
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "ContextOS Soul Schema",
4
+ "description": "Schema for project SOUL.md files defining identity and values.",
5
+ "type": "object",
6
+ "required": ["Identity", "Core Principles", "Behavioral Rules"],
7
+ "properties": {
8
+ "Identity": {
9
+ "type": "string",
10
+ "description": "High-level description of the project's purpose."
11
+ },
12
+ "Core Principles": {
13
+ "type": "array",
14
+ "items": { "type": "string" },
15
+ "minItems": 1
16
+ },
17
+ "Behavioral Rules": {
18
+ "type": "array",
19
+ "items": { "type": "string" },
20
+ "minItems": 1
21
+ },
22
+ "Constraints": {
23
+ "type": "array",
24
+ "items": { "type": "string" }
25
+ }
26
+ }
27
+ }