@neethan/joa 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +209 -0
- package/dist/cli/main.js +702 -0
- package/dist/cli/output.js +22 -0
- package/dist/core/bootstrap.js +39 -0
- package/dist/core/config.js +131 -0
- package/dist/core/context.js +1 -0
- package/dist/core/db.js +254 -0
- package/dist/core/entry.js +84 -0
- package/dist/core/errors.js +24 -0
- package/dist/core/formatters.js +54 -0
- package/dist/core/ids.js +26 -0
- package/dist/core/import.js +34 -0
- package/dist/core/index.js +19 -0
- package/dist/core/journal.js +65 -0
- package/dist/core/log.js +49 -0
- package/dist/core/query.js +114 -0
- package/dist/core/status.js +30 -0
- package/dist/core/sync.js +94 -0
- package/dist/core/time.js +50 -0
- package/dist/mcp/server.js +149 -0
- package/package.json +59 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
// Error classes — exported so callers can catch by type
|
|
2
|
+
export { JoaError, ValidationError, InvalidThreadId, DatabaseError, JournalWriteError, ConfigError, } from "./errors.js";
|
|
3
|
+
// Entry serialization
|
|
4
|
+
export { serializeEntry } from "./entry.js";
|
|
5
|
+
// Import
|
|
6
|
+
export { importEntries } from "./import.js";
|
|
7
|
+
// Operations
|
|
8
|
+
export { log } from "./log.js";
|
|
9
|
+
export { query } from "./query.js";
|
|
10
|
+
export { status } from "./status.js";
|
|
11
|
+
// Storage lifecycle
|
|
12
|
+
export { openDatabase } from "./db.js";
|
|
13
|
+
export { checkAndSyncIfStale, rebuildIndex } from "./sync.js";
|
|
14
|
+
// Config
|
|
15
|
+
export { loadConfig, defaultConfig, getDevice, resolveDbPath, resolveJournalsPath, } from "./config.js";
|
|
16
|
+
// IDs (needed by MCP session management in 1B)
|
|
17
|
+
export { sessionId, entryId, threadId } from "./ids.js";
|
|
18
|
+
// Bootstrap
|
|
19
|
+
export { bootstrap, validateAgentName } from "./bootstrap.js";
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { appendFile, mkdir, readFile, readdir } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { serializeEntry } from "./entry.js";
|
|
4
|
+
import { JournalWriteError } from "./errors.js";
|
|
5
|
+
import { todayDate } from "./time.js";
|
|
6
|
+
/**
|
|
7
|
+
* Appends a single entry to the date-based JSONL file.
|
|
8
|
+
* Uses O_APPEND semantics — atomic for entries < ~4KB on local POSIX filesystems.
|
|
9
|
+
* Creates the journals directory if it does not exist.
|
|
10
|
+
*/
|
|
11
|
+
export async function appendEntry(entry, journalsDir) {
|
|
12
|
+
try {
|
|
13
|
+
await mkdir(journalsDir, { recursive: true });
|
|
14
|
+
const filename = `${todayDate()}.jsonl`;
|
|
15
|
+
const filePath = join(journalsDir, filename);
|
|
16
|
+
const line = `${JSON.stringify(serializeEntry(entry))}\n`;
|
|
17
|
+
await appendFile(filePath, line, "utf8");
|
|
18
|
+
}
|
|
19
|
+
catch (cause) {
|
|
20
|
+
throw new JournalWriteError("Failed to append entry to journal", { cause });
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Lists all YYYY-MM-DD.jsonl files in journalsDir, sorted by date ascending.
|
|
25
|
+
* Returns empty array if directory does not exist.
|
|
26
|
+
*/
|
|
27
|
+
export async function listJournalFiles(journalsDir) {
|
|
28
|
+
try {
|
|
29
|
+
const files = await readdir(journalsDir);
|
|
30
|
+
return files
|
|
31
|
+
.filter((f) => /^\d{4}-\d{2}-\d{2}\.jsonl$/.test(f))
|
|
32
|
+
.sort()
|
|
33
|
+
.map((f) => join(journalsDir, f));
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
return [];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Reads all valid entry rows from a single JSONL file.
|
|
41
|
+
* Skips malformed lines (logs a warning to stderr). Never throws on parse errors.
|
|
42
|
+
*/
|
|
43
|
+
export async function readJournalFile(filePath) {
|
|
44
|
+
const content = await readFile(filePath, "utf8");
|
|
45
|
+
const lines = content.split("\n").filter((l) => l.trim().length > 0);
|
|
46
|
+
const entries = [];
|
|
47
|
+
for (const line of lines) {
|
|
48
|
+
try {
|
|
49
|
+
const parsed = JSON.parse(line);
|
|
50
|
+
if (typeof parsed !== "object" ||
|
|
51
|
+
parsed === null ||
|
|
52
|
+
typeof parsed.id !== "string" ||
|
|
53
|
+
typeof parsed.summary !== "string") {
|
|
54
|
+
console.warn(`Skipping invalid entry in ${filePath}: missing required fields: ${line.slice(0, 80)}`);
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
const row = parsed;
|
|
58
|
+
entries.push(row);
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
console.warn(`Skipping malformed JSONL line in ${filePath}: ${line.slice(0, 80)}`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return entries;
|
|
65
|
+
}
|
package/dist/core/log.js
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { normalizeCategory, validateEntryInput } from "./entry.js";
|
|
2
|
+
import { entryId, threadId as generateThreadId } from "./ids.js";
|
|
3
|
+
import { appendEntry } from "./journal.js";
|
|
4
|
+
import { nowUtc } from "./time.js";
|
|
5
|
+
function resolveThreadId(tid) {
|
|
6
|
+
if (tid === undefined || tid === null)
|
|
7
|
+
return null;
|
|
8
|
+
if (tid === "new")
|
|
9
|
+
return generateThreadId();
|
|
10
|
+
return tid; // already validated by validateEntryInput
|
|
11
|
+
}
|
|
12
|
+
/** Write a journal entry. JSONL first (source of truth), then SQLite (derived index). */
|
|
13
|
+
export async function log(input, ctx) {
|
|
14
|
+
// 1. Validate before any I/O
|
|
15
|
+
validateEntryInput(input);
|
|
16
|
+
// 2. Resolve thread_id
|
|
17
|
+
const resolvedThreadId = resolveThreadId(input.thread_id);
|
|
18
|
+
// 3. Build entry
|
|
19
|
+
const entry = {
|
|
20
|
+
id: entryId(),
|
|
21
|
+
timestamp: nowUtc(),
|
|
22
|
+
category: normalizeCategory(input.category),
|
|
23
|
+
summary: input.summary.trim(),
|
|
24
|
+
thread_id: resolvedThreadId,
|
|
25
|
+
session_id: ctx.sessionId,
|
|
26
|
+
agent: ctx.agent,
|
|
27
|
+
device: ctx.device,
|
|
28
|
+
resources: input.resources ?? [],
|
|
29
|
+
tags: [...new Set([...(input.tags ?? []), ...ctx.defaultTags])],
|
|
30
|
+
detail: input.detail ?? {},
|
|
31
|
+
annotations: input.annotations ?? {},
|
|
32
|
+
};
|
|
33
|
+
// 4. JSONL first — source of truth
|
|
34
|
+
await appendEntry(entry, ctx.journalsDir);
|
|
35
|
+
// 5. SQLite second — derived index
|
|
36
|
+
try {
|
|
37
|
+
ctx.db.writeEntry(entry);
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
// Entry is durable in JSONL; index will recover on next startup
|
|
41
|
+
return {
|
|
42
|
+
entry_id: entry.id,
|
|
43
|
+
thread_id: entry.thread_id,
|
|
44
|
+
status: "ok",
|
|
45
|
+
warning: "index_sync_failed",
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
return { entry_id: entry.id, thread_id: entry.thread_id, status: "ok" };
|
|
49
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { deserializeEntry } from "./entry.js";
|
|
2
|
+
import { formatCompact, formatJson, formatMd } from "./formatters.js";
|
|
3
|
+
import { parseSince } from "./time.js";
|
|
4
|
+
const MAX_LIMIT = 1000;
|
|
5
|
+
const MAX_SEARCH_LENGTH = 500;
|
|
6
|
+
/** Truncate to at most `max` Unicode codepoints without splitting surrogate pairs. */
|
|
7
|
+
function unicodeTruncate(s, max) {
|
|
8
|
+
if (s.length <= max)
|
|
9
|
+
return s;
|
|
10
|
+
let count = 0;
|
|
11
|
+
let i = 0;
|
|
12
|
+
while (i < s.length && count < max) {
|
|
13
|
+
const code = s.codePointAt(i) ?? 0;
|
|
14
|
+
i += code > 0xffff ? 2 : 1;
|
|
15
|
+
count++;
|
|
16
|
+
}
|
|
17
|
+
return s.slice(0, i);
|
|
18
|
+
}
|
|
19
|
+
/** Escape FTS5 special characters by wrapping in double quotes. */
|
|
20
|
+
function escapeFts(query) {
|
|
21
|
+
// Strip null bytes — they cause "unterminated string" in FTS5
|
|
22
|
+
const clean = query.replace(/\0/g, "");
|
|
23
|
+
return `"${clean.replace(/"/g, '""')}"`;
|
|
24
|
+
}
|
|
25
|
+
function formatThreadSummary(threads) {
|
|
26
|
+
if (threads.length === 0)
|
|
27
|
+
return "No active threads.";
|
|
28
|
+
return threads
|
|
29
|
+
.map((t) => {
|
|
30
|
+
const agents = t.agents ? ` (${t.agents})` : "";
|
|
31
|
+
return `- **${t.first_summary}**\n Thread: ${t.thread_id} \u00b7 ${t.entry_count} entries \u00b7 Last active: ${t.last_active_at}${agents}`;
|
|
32
|
+
})
|
|
33
|
+
.join("\n\n");
|
|
34
|
+
}
|
|
35
|
+
/** Query entries with optional preset, filters, and formatting. */
|
|
36
|
+
export function query(input, ctx, config) {
|
|
37
|
+
const format = input.format ?? "md";
|
|
38
|
+
const presetConfig = input.preset ? config.presets[input.preset] : undefined;
|
|
39
|
+
// Threads preset is special — uses the thread_summary view
|
|
40
|
+
if (input.preset === "threads") {
|
|
41
|
+
const limit = input.limit ?? presetConfig?.thread_limit ?? 20;
|
|
42
|
+
const threads = ctx.db.queryThreadSummary(limit);
|
|
43
|
+
const rendered = formatThreadSummary(threads);
|
|
44
|
+
return {
|
|
45
|
+
entries: [],
|
|
46
|
+
rendered,
|
|
47
|
+
total: threads.length,
|
|
48
|
+
format,
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
// Build query params from preset + direct input
|
|
52
|
+
const params = {};
|
|
53
|
+
// Apply preset defaults
|
|
54
|
+
if (input.preset === "catchup") {
|
|
55
|
+
params.since = parseSince("7d");
|
|
56
|
+
params.limit = input.limit ?? presetConfig?.default_limit ?? 50;
|
|
57
|
+
}
|
|
58
|
+
else if (input.preset === "timeline") {
|
|
59
|
+
params.limit = input.limit ?? presetConfig?.default_limit ?? 50;
|
|
60
|
+
}
|
|
61
|
+
else if (input.preset === "decisions") {
|
|
62
|
+
params.category = "decision";
|
|
63
|
+
params.limit = input.limit ?? presetConfig?.default_limit ?? 50;
|
|
64
|
+
}
|
|
65
|
+
else if (input.preset === "changes") {
|
|
66
|
+
params.category = "file change";
|
|
67
|
+
params.limit = input.limit ?? presetConfig?.default_limit ?? 50;
|
|
68
|
+
}
|
|
69
|
+
// Direct params override preset defaults
|
|
70
|
+
if (input.thread_id)
|
|
71
|
+
params.thread_id = input.thread_id;
|
|
72
|
+
if (input.session_id)
|
|
73
|
+
params.session_id = input.session_id;
|
|
74
|
+
if (input.category)
|
|
75
|
+
params.category = input.category.trim().toLowerCase();
|
|
76
|
+
if (input.agent)
|
|
77
|
+
params.agent = input.agent;
|
|
78
|
+
if (input.device)
|
|
79
|
+
params.device = input.device;
|
|
80
|
+
if (input.tags)
|
|
81
|
+
params.tags = input.tags;
|
|
82
|
+
if (input.since)
|
|
83
|
+
params.since = parseSince(input.since);
|
|
84
|
+
if (input.until)
|
|
85
|
+
params.until = parseSince(input.until);
|
|
86
|
+
if (input.limit !== undefined)
|
|
87
|
+
params.limit = input.limit;
|
|
88
|
+
params.limit = Math.min(params.limit ?? 50, MAX_LIMIT);
|
|
89
|
+
// FTS search — escape special chars, truncate long queries (Unicode-safe)
|
|
90
|
+
if (input.search) {
|
|
91
|
+
const trimmed = unicodeTruncate(input.search, MAX_SEARCH_LENGTH);
|
|
92
|
+
params.search = escapeFts(trimmed);
|
|
93
|
+
}
|
|
94
|
+
// Get total count (before limit)
|
|
95
|
+
const countParams = { ...params };
|
|
96
|
+
countParams.limit = undefined;
|
|
97
|
+
const total = ctx.db.countEntries(countParams);
|
|
98
|
+
// Query entries
|
|
99
|
+
const rows = ctx.db.queryEntries(params);
|
|
100
|
+
const entries = rows.map(deserializeEntry);
|
|
101
|
+
// Format output
|
|
102
|
+
let rendered;
|
|
103
|
+
switch (format) {
|
|
104
|
+
case "json":
|
|
105
|
+
rendered = formatJson(entries);
|
|
106
|
+
break;
|
|
107
|
+
case "compact":
|
|
108
|
+
rendered = formatCompact(entries);
|
|
109
|
+
break;
|
|
110
|
+
default:
|
|
111
|
+
rendered = formatMd(entries);
|
|
112
|
+
}
|
|
113
|
+
return { entries, rendered, total, format };
|
|
114
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { readdirSync } from "node:fs";
|
|
2
|
+
import { resolveDbPath, resolveJournalsPath } from "./config.js";
|
|
3
|
+
/** Returns journal health and stats. */
|
|
4
|
+
export function status(ctx, config, sessionId) {
|
|
5
|
+
const entriesByCategory = ctx.db.countByCategory();
|
|
6
|
+
const totalEntries = Object.values(entriesByCategory).reduce((sum, n) => sum + n, 0);
|
|
7
|
+
const range = ctx.db.getEntryTimestampRange();
|
|
8
|
+
const dbPath = resolveDbPath(config);
|
|
9
|
+
const journalsDir = resolveJournalsPath(config);
|
|
10
|
+
let journalFiles = 0;
|
|
11
|
+
try {
|
|
12
|
+
const files = readdirSync(journalsDir);
|
|
13
|
+
journalFiles = files.filter((f) => /^\d{4}-\d{2}-\d{2}\.jsonl$/.test(f)).length;
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
// Directory doesn't exist yet
|
|
17
|
+
}
|
|
18
|
+
return {
|
|
19
|
+
total_entries: totalEntries,
|
|
20
|
+
entries_by_category: entriesByCategory,
|
|
21
|
+
oldest_entry: range.oldest,
|
|
22
|
+
newest_entry: range.newest,
|
|
23
|
+
current_session_id: sessionId,
|
|
24
|
+
db_path: dbPath,
|
|
25
|
+
journals_dir: journalsDir,
|
|
26
|
+
journal_files: journalFiles,
|
|
27
|
+
db_healthy: ctx.db.isHealthy(),
|
|
28
|
+
db_size_bytes: ctx.db.getDbSizeBytes(),
|
|
29
|
+
};
|
|
30
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { statSync } from "node:fs";
|
|
2
|
+
import { deserializeEntry } from "./entry.js";
|
|
3
|
+
import { listJournalFiles, readJournalFile } from "./journal.js";
|
|
4
|
+
/**
|
|
5
|
+
* Called on every process startup after openDatabase().
|
|
6
|
+
* Checks JSONL file modification times against last_indexed_at in metadata.
|
|
7
|
+
* Performs incremental rebuild if any file is newer.
|
|
8
|
+
*/
|
|
9
|
+
export async function checkAndSyncIfStale(db, journalsDir) {
|
|
10
|
+
const lastIndexed = db.getLastIndexedAt();
|
|
11
|
+
const files = await listJournalFiles(journalsDir);
|
|
12
|
+
if (files.length === 0)
|
|
13
|
+
return;
|
|
14
|
+
const lastIndexedMs = lastIndexed ? new Date(lastIndexed).getTime() : 0;
|
|
15
|
+
const staleFiles = files.filter((f) => {
|
|
16
|
+
try {
|
|
17
|
+
const mtime = statSync(f).mtimeMs;
|
|
18
|
+
return mtime > lastIndexedMs;
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
if (staleFiles.length === 0)
|
|
25
|
+
return;
|
|
26
|
+
// Capture max mtime of stale files BEFORE processing to avoid data loss window
|
|
27
|
+
let maxMtimeMs = 0;
|
|
28
|
+
for (const f of staleFiles) {
|
|
29
|
+
try {
|
|
30
|
+
const mtime = statSync(f).mtimeMs;
|
|
31
|
+
if (mtime > maxMtimeMs)
|
|
32
|
+
maxMtimeMs = mtime;
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
/* already filtered */
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
for (const file of staleFiles) {
|
|
39
|
+
const rows = await readJournalFile(file);
|
|
40
|
+
const entries = [];
|
|
41
|
+
for (const row of rows) {
|
|
42
|
+
try {
|
|
43
|
+
entries.push(deserializeEntry(row));
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
console.warn(`Skipping entry during sync: ${row.id}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
if (entries.length > 0) {
|
|
50
|
+
db.writeEntries(entries);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
// Use the max mtime instead of nowUtc() to avoid missing files modified during sync
|
|
54
|
+
db.setLastIndexedAt(new Date(maxMtimeMs).toISOString());
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Full reconstruction of SQLite entries + FTS from all JSONL files.
|
|
58
|
+
* For use when the index is corrupted or during joa rebuild.
|
|
59
|
+
*/
|
|
60
|
+
export async function rebuildIndex(db, journalsDir) {
|
|
61
|
+
db.clearEntries();
|
|
62
|
+
const files = await listJournalFiles(journalsDir);
|
|
63
|
+
// Capture max mtime of all files BEFORE processing
|
|
64
|
+
let maxMtimeMs = 0;
|
|
65
|
+
for (const f of files) {
|
|
66
|
+
try {
|
|
67
|
+
const mtime = statSync(f).mtimeMs;
|
|
68
|
+
if (mtime > maxMtimeMs)
|
|
69
|
+
maxMtimeMs = mtime;
|
|
70
|
+
}
|
|
71
|
+
catch {
|
|
72
|
+
/* skip unreadable files */
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
for (const file of files) {
|
|
76
|
+
const rows = await readJournalFile(file);
|
|
77
|
+
const entries = [];
|
|
78
|
+
for (const row of rows) {
|
|
79
|
+
try {
|
|
80
|
+
entries.push(deserializeEntry(row));
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
console.warn(`Skipping entry during rebuild: ${row.id}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (entries.length > 0) {
|
|
87
|
+
db.writeEntries(entries);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
db.rebuildFts();
|
|
91
|
+
if (maxMtimeMs > 0) {
|
|
92
|
+
db.setLastIndexedAt(new Date(maxMtimeMs).toISOString());
|
|
93
|
+
}
|
|
94
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { ValidationError } from "./errors.js";
|
|
2
|
+
const RELATIVE_RE = /^(\d+)([dwm])$/;
|
|
3
|
+
/**
|
|
4
|
+
* Parses a time string into an ISO 8601 UTC timestamp.
|
|
5
|
+
* Accepts: relative ("1d", "7d", "2w", "1m") or ISO date/datetime strings.
|
|
6
|
+
*/
|
|
7
|
+
export function parseSince(since) {
|
|
8
|
+
const match = since.match(RELATIVE_RE);
|
|
9
|
+
if (match) {
|
|
10
|
+
const amount = Number.parseInt(match[1] ?? "0", 10);
|
|
11
|
+
const unit = match[2] ?? "d";
|
|
12
|
+
const now = Date.now();
|
|
13
|
+
let ms;
|
|
14
|
+
switch (unit) {
|
|
15
|
+
case "d":
|
|
16
|
+
ms = amount * 24 * 60 * 60 * 1000;
|
|
17
|
+
break;
|
|
18
|
+
case "w":
|
|
19
|
+
ms = amount * 7 * 24 * 60 * 60 * 1000;
|
|
20
|
+
break;
|
|
21
|
+
case "m":
|
|
22
|
+
ms = amount * 30 * 24 * 60 * 60 * 1000;
|
|
23
|
+
break;
|
|
24
|
+
default:
|
|
25
|
+
ms = 0;
|
|
26
|
+
}
|
|
27
|
+
return new Date(now - ms).toISOString();
|
|
28
|
+
}
|
|
29
|
+
// ISO date "YYYY-MM-DD" → start of that UTC day
|
|
30
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(since)) {
|
|
31
|
+
return `${since}T00:00:00.000Z`;
|
|
32
|
+
}
|
|
33
|
+
// ISO datetime — validate before pass through
|
|
34
|
+
if (!/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z$/.test(since)) {
|
|
35
|
+
throw new ValidationError(`Invalid ISO 8601 timestamp: "${since}". Expected format: YYYY-MM-DDTHH:mm:ss.sssZ or YYYY-MM-DDTHH:mm:ssZ`);
|
|
36
|
+
}
|
|
37
|
+
return since;
|
|
38
|
+
}
|
|
39
|
+
/** Returns today's date as "YYYY-MM-DD" using local system time. */
|
|
40
|
+
export function todayDate() {
|
|
41
|
+
const now = new Date();
|
|
42
|
+
const y = now.getFullYear();
|
|
43
|
+
const m = String(now.getMonth() + 1).padStart(2, "0");
|
|
44
|
+
const d = String(now.getDate()).padStart(2, "0");
|
|
45
|
+
return `${y}-${m}-${d}`;
|
|
46
|
+
}
|
|
47
|
+
/** Returns the current UTC timestamp as ISOTimestamp. */
|
|
48
|
+
export function nowUtc() {
|
|
49
|
+
return new Date().toISOString();
|
|
50
|
+
}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
3
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { bootstrap, log, query, status } from "../core/index.js";
|
|
6
|
+
const pkg = JSON.parse(readFileSync(new URL("../../package.json", import.meta.url), "utf8"));
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
// Helpers
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
function textContent(text) {
|
|
11
|
+
return { content: [{ type: "text", text }] };
|
|
12
|
+
}
|
|
13
|
+
function errorResponse(toolName, err) {
|
|
14
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
15
|
+
console.error(`${toolName} error: ${message}`);
|
|
16
|
+
return { ...textContent(`Error: ${message}`), isError: true };
|
|
17
|
+
}
|
|
18
|
+
// Agent name is set by the CLI dispatcher via JOA_MCP_AGENT env var
|
|
19
|
+
// before dynamic-importing this module (see src/cli/main.ts "mcp" case).
|
|
20
|
+
const { config, readCtx, logCtx, sid } = await bootstrap({
|
|
21
|
+
agent: process.env.JOA_MCP_AGENT ?? "mcp",
|
|
22
|
+
});
|
|
23
|
+
const server = new McpServer({
|
|
24
|
+
name: "joa",
|
|
25
|
+
version: pkg.version,
|
|
26
|
+
});
|
|
27
|
+
// ---------------------------------------------------------------------------
|
|
28
|
+
// joa_log
|
|
29
|
+
// ---------------------------------------------------------------------------
|
|
30
|
+
server.registerTool("joa_log", {
|
|
31
|
+
title: "Log Entry",
|
|
32
|
+
description: "Log a journal entry to joa. Records observations, decisions, file changes, and other agent activity.",
|
|
33
|
+
inputSchema: {
|
|
34
|
+
category: z.string().describe("Entry category (e.g. observation, decision, file change)"),
|
|
35
|
+
summary: z.string().describe("Short summary of the entry"),
|
|
36
|
+
thread_id: z
|
|
37
|
+
.union([z.literal("new"), z.string().startsWith("th_")])
|
|
38
|
+
.nullable()
|
|
39
|
+
.optional()
|
|
40
|
+
.describe("Thread ID or 'new' to start a thread"),
|
|
41
|
+
detail: z.record(z.string(), z.unknown()).optional().describe("Additional structured detail"),
|
|
42
|
+
resources: z
|
|
43
|
+
.array(z.string())
|
|
44
|
+
.optional()
|
|
45
|
+
.describe("File paths or URLs related to this entry"),
|
|
46
|
+
tags: z.array(z.string()).optional().describe("Tags for categorization"),
|
|
47
|
+
annotations: z.record(z.string(), z.unknown()).optional().describe("Metadata annotations"),
|
|
48
|
+
},
|
|
49
|
+
}, async (args) => {
|
|
50
|
+
try {
|
|
51
|
+
const result = await log({
|
|
52
|
+
category: args.category,
|
|
53
|
+
summary: args.summary,
|
|
54
|
+
thread_id: args.thread_id,
|
|
55
|
+
detail: args.detail,
|
|
56
|
+
resources: args.resources,
|
|
57
|
+
tags: args.tags,
|
|
58
|
+
annotations: args.annotations,
|
|
59
|
+
}, logCtx);
|
|
60
|
+
return textContent(JSON.stringify({
|
|
61
|
+
entry_id: result.entry_id,
|
|
62
|
+
thread_id: result.thread_id,
|
|
63
|
+
status: result.status,
|
|
64
|
+
...(result.warning ? { warning: result.warning } : {}),
|
|
65
|
+
}));
|
|
66
|
+
}
|
|
67
|
+
catch (err) {
|
|
68
|
+
return errorResponse("joa_log", err);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
// ---------------------------------------------------------------------------
|
|
72
|
+
// joa_query
|
|
73
|
+
// ---------------------------------------------------------------------------
|
|
74
|
+
server.registerTool("joa_query", {
|
|
75
|
+
title: "Query Entries",
|
|
76
|
+
description: "Query journal entries from joa. Supports presets (catchup, threads, timeline, decisions, changes), full-text search, and filters.",
|
|
77
|
+
inputSchema: {
|
|
78
|
+
preset: z
|
|
79
|
+
.enum(["catchup", "threads", "timeline", "decisions", "changes"])
|
|
80
|
+
.optional()
|
|
81
|
+
.describe("Query preset"),
|
|
82
|
+
thread_id: z.string().optional().describe("Filter by thread ID"),
|
|
83
|
+
session_id: z.string().optional().describe("Filter by session ID"),
|
|
84
|
+
category: z.string().optional().describe("Filter by category"),
|
|
85
|
+
agent: z.string().optional().describe("Filter by agent name"),
|
|
86
|
+
device: z.string().optional().describe("Filter by device name"),
|
|
87
|
+
search: z.string().optional().describe("Full-text search term"),
|
|
88
|
+
tags: z.array(z.string()).optional().describe("Filter by tags (AND semantics)"),
|
|
89
|
+
since: z.string().optional().describe("Time filter: 1d, 7d, 2w, 1m, or ISO date"),
|
|
90
|
+
until: z.string().optional().describe("Time upper bound"),
|
|
91
|
+
limit: z.number().int().min(1).max(1000).optional().describe("Max entries to return"),
|
|
92
|
+
format: z.enum(["md", "json", "compact"]).optional().describe("Output format (default: md)"),
|
|
93
|
+
},
|
|
94
|
+
}, async (args) => {
|
|
95
|
+
try {
|
|
96
|
+
const result = query({
|
|
97
|
+
preset: args.preset,
|
|
98
|
+
thread_id: args.thread_id,
|
|
99
|
+
session_id: args.session_id,
|
|
100
|
+
category: args.category,
|
|
101
|
+
agent: args.agent,
|
|
102
|
+
device: args.device,
|
|
103
|
+
search: args.search,
|
|
104
|
+
tags: args.tags,
|
|
105
|
+
since: args.since,
|
|
106
|
+
until: args.until,
|
|
107
|
+
limit: args.limit,
|
|
108
|
+
format: args.format ?? "md",
|
|
109
|
+
}, readCtx, config);
|
|
110
|
+
let text = result.rendered;
|
|
111
|
+
if (result.total > result.entries.length) {
|
|
112
|
+
text += `\n\n_Showing ${result.entries.length} of ${result.total} entries_`;
|
|
113
|
+
}
|
|
114
|
+
return textContent(text);
|
|
115
|
+
}
|
|
116
|
+
catch (err) {
|
|
117
|
+
return errorResponse("joa_query", err);
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
// ---------------------------------------------------------------------------
|
|
121
|
+
// joa_status
|
|
122
|
+
// ---------------------------------------------------------------------------
|
|
123
|
+
server.registerTool("joa_status", {
|
|
124
|
+
title: "Journal Status",
|
|
125
|
+
description: "Get journal health stats: entry count, categories, timestamps, DB health.",
|
|
126
|
+
inputSchema: {},
|
|
127
|
+
}, async () => {
|
|
128
|
+
try {
|
|
129
|
+
const s = status(readCtx, config, sid);
|
|
130
|
+
return textContent(JSON.stringify(s, null, 2));
|
|
131
|
+
}
|
|
132
|
+
catch (err) {
|
|
133
|
+
return errorResponse("joa_status", err);
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
// ---------------------------------------------------------------------------
|
|
137
|
+
// Start transport
|
|
138
|
+
// ---------------------------------------------------------------------------
|
|
139
|
+
const transport = new StdioServerTransport();
|
|
140
|
+
await server.connect(transport);
|
|
141
|
+
console.error("joa MCP server started");
|
|
142
|
+
// Clean shutdown
|
|
143
|
+
function shutdown() {
|
|
144
|
+
console.error("joa MCP server shutting down");
|
|
145
|
+
logCtx.db.close();
|
|
146
|
+
process.exit(0);
|
|
147
|
+
}
|
|
148
|
+
process.on("SIGINT", shutdown);
|
|
149
|
+
process.on("SIGTERM", shutdown);
|
package/package.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@neethan/joa",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Journal of Agents — persistent activity journal for AI agents",
|
|
6
|
+
"bin": {
|
|
7
|
+
"joa": "dist/cli/main.js"
|
|
8
|
+
},
|
|
9
|
+
"files": ["dist/"],
|
|
10
|
+
"engines": {
|
|
11
|
+
"node": ">=18"
|
|
12
|
+
},
|
|
13
|
+
"license": "MIT",
|
|
14
|
+
"author": "neethan <neethan@hallid.ai> (https://github.com/neethanwu)",
|
|
15
|
+
"homepage": "https://github.com/neethanwu/joa#readme",
|
|
16
|
+
"bugs": {
|
|
17
|
+
"url": "https://github.com/neethanwu/joa/issues"
|
|
18
|
+
},
|
|
19
|
+
"repository": {
|
|
20
|
+
"type": "git",
|
|
21
|
+
"url": "git+https://github.com/neethanwu/joa.git"
|
|
22
|
+
},
|
|
23
|
+
"keywords": [
|
|
24
|
+
"ai",
|
|
25
|
+
"agent",
|
|
26
|
+
"journal",
|
|
27
|
+
"mcp",
|
|
28
|
+
"memory",
|
|
29
|
+
"claude",
|
|
30
|
+
"cursor",
|
|
31
|
+
"llm",
|
|
32
|
+
"activity-log"
|
|
33
|
+
],
|
|
34
|
+
"scripts": {
|
|
35
|
+
"test": "bun test",
|
|
36
|
+
"build": "rm -rf dist && tsc -p tsconfig.build.json && test -s dist/cli/main.js && printf '#!/usr/bin/env node\\n' | cat - dist/cli/main.js > dist/cli/main.tmp && mv dist/cli/main.tmp dist/cli/main.js && chmod +x dist/cli/main.js",
|
|
37
|
+
"prepublishOnly": "bun run build",
|
|
38
|
+
"lint": "bunx biome check .",
|
|
39
|
+
"lint:fix": "bunx biome check --write .",
|
|
40
|
+
"typecheck": "bunx tsc --noEmit"
|
|
41
|
+
},
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"@clack/prompts": "^1.0.1",
|
|
44
|
+
"@modelcontextprotocol/sdk": "^1.27.1",
|
|
45
|
+
"better-sqlite3": "^12.6.2",
|
|
46
|
+
"js-yaml": "^4.1.0",
|
|
47
|
+
"ulidx": "^2.3.0",
|
|
48
|
+
"zod": "^4.3.6"
|
|
49
|
+
},
|
|
50
|
+
"devDependencies": {
|
|
51
|
+
"@biomejs/biome": "^1.9.0",
|
|
52
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
53
|
+
"@types/js-yaml": "^4.0.9",
|
|
54
|
+
"@types/node": "^25.3.1",
|
|
55
|
+
"bun-types": "^1.3.9",
|
|
56
|
+
"typescript": "^5.7.0"
|
|
57
|
+
},
|
|
58
|
+
"trustedDependencies": ["@biomejs/biome", "better-sqlite3"]
|
|
59
|
+
}
|