@biaoo/tiangong-wiki 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +167 -0
- package/README.zh-CN.md +167 -0
- package/SKILL.md +116 -0
- package/agents/openai.yaml +4 -0
- package/assets/config.example.env +18 -0
- package/assets/templates/achievement.md +32 -0
- package/assets/templates/bridge.md +33 -0
- package/assets/templates/concept.md +47 -0
- package/assets/templates/faq.md +31 -0
- package/assets/templates/lesson.md +31 -0
- package/assets/templates/method.md +31 -0
- package/assets/templates/misconception.md +35 -0
- package/assets/templates/person.md +31 -0
- package/assets/templates/research-note.md +34 -0
- package/assets/templates/resume.md +34 -0
- package/assets/templates/source-summary.md +35 -0
- package/assets/vllm/qwen3_5_openai_developer.jinja +182 -0
- package/assets/wiki.config.default.json +193 -0
- package/dist/commands/check-config.js +77 -0
- package/dist/commands/create.js +32 -0
- package/dist/commands/daemon.js +186 -0
- package/dist/commands/dashboard.js +112 -0
- package/dist/commands/doctor.js +22 -0
- package/dist/commands/export-graph.js +28 -0
- package/dist/commands/export-index.js +31 -0
- package/dist/commands/find.js +36 -0
- package/dist/commands/fts.js +32 -0
- package/dist/commands/graph.js +35 -0
- package/dist/commands/init.js +48 -0
- package/dist/commands/lint.js +35 -0
- package/dist/commands/list.js +28 -0
- package/dist/commands/page-info.js +24 -0
- package/dist/commands/search.js +32 -0
- package/dist/commands/setup.js +15 -0
- package/dist/commands/stat.js +20 -0
- package/dist/commands/sync.js +38 -0
- package/dist/commands/template.js +71 -0
- package/dist/commands/type.js +88 -0
- package/dist/commands/vault.js +64 -0
- package/dist/core/agent.js +201 -0
- package/dist/core/cli-env.js +129 -0
- package/dist/core/codex-workflow.js +233 -0
- package/dist/core/config.js +126 -0
- package/dist/core/db.js +292 -0
- package/dist/core/embedding.js +104 -0
- package/dist/core/frontmatter.js +287 -0
- package/dist/core/indexer.js +241 -0
- package/dist/core/onboarding.js +967 -0
- package/dist/core/page-files.js +91 -0
- package/dist/core/paths.js +161 -0
- package/dist/core/presenters.js +23 -0
- package/dist/core/query.js +58 -0
- package/dist/core/runtime.js +20 -0
- package/dist/core/sync.js +235 -0
- package/dist/core/synology.js +412 -0
- package/dist/core/template-evolution.js +38 -0
- package/dist/core/vault-processing.js +742 -0
- package/dist/core/vault.js +594 -0
- package/dist/core/workflow-context.js +188 -0
- package/dist/core/workflow-result.js +162 -0
- package/dist/core/workspace-bootstrap.js +30 -0
- package/dist/core/workspace-skills.js +220 -0
- package/dist/daemon/client.js +147 -0
- package/dist/daemon/server.js +807 -0
- package/dist/daemon/state.js +53 -0
- package/dist/dashboard/assets/index-1FgAUZ28.css +1 -0
- package/dist/dashboard/assets/index-6A0PWT4X.js +154 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-400-normal-BEIGL1Tu.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-400-normal-ugxPyKxw.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-500-normal-DJqRU3vO.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-500-normal-DmUKJPL_.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-700-normal-BWTpRfYl.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-cyrillic-700-normal-CEoEElIJ.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-400-normal-B9oWc5Lo.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-400-normal-C190GLew.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-500-normal-D7SFKleX.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-500-normal-JpySY46c.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-700-normal-C6CZE3T8.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-greek-700-normal-DEigVDxa.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-400-normal-6-qcROiO.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-400-normal-V6pRDFza.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-500-normal-BWZEU5yA.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-500-normal-CJOVTJB7.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-700-normal-BYuf6tUa.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-700-normal-D3wTyLJW.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-400-normal-Bc8Ftmh3.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-400-normal-fXTG6kC5.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-500-normal-Cut-4mMH.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-500-normal-ckzbgY84.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-700-normal-CZipNAKV.woff2 +0 -0
- package/dist/dashboard/assets/jetbrains-mono-latin-ext-700-normal-CxPITLHs.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-400-normal-CqNFfHCs.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-500-normal-DNRqzVM1.woff +0 -0
- package/dist/dashboard/assets/jetbrains-mono-vietnamese-700-normal-BDLVIk2r.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-400-normal-BnQMeOim.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-400-normal-CJ-V5oYT.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-500-normal-CNSSEhBt.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-500-normal-lFbtlQH6.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-700-normal-CwsQ-cCU.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-700-normal-RjhwGPKo.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-400-normal-CfP_5XZW.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-400-normal-DRPE3kg4.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-500-normal-3dgZTiw9.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-500-normal-DUe3BAxM.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-700-normal-BQnZhY3m.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-latin-ext-700-normal-HVCqSBdx.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-400-normal-B7xT_GF5.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-400-normal-BIWiOVfw.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-500-normal-BTqKIpxg.woff +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-500-normal-BmEvtly_.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-700-normal-DMty7AZE.woff2 +0 -0
- package/dist/dashboard/assets/space-grotesk-vietnamese-700-normal-Duxec5Rn.woff +0 -0
- package/dist/dashboard/index.html +18 -0
- package/dist/index.js +86 -0
- package/dist/operations/dashboard.js +1231 -0
- package/dist/operations/export.js +110 -0
- package/dist/operations/query.js +649 -0
- package/dist/operations/type-template.js +210 -0
- package/dist/operations/write.js +143 -0
- package/dist/types/config.js +1 -0
- package/dist/types/page.js +1 -0
- package/dist/utils/case.js +22 -0
- package/dist/utils/errors.js +26 -0
- package/dist/utils/fs.js +77 -0
- package/dist/utils/output.js +33 -0
- package/dist/utils/process.js +60 -0
- package/dist/utils/segmenter.js +24 -0
- package/dist/utils/slug.js +10 -0
- package/dist/utils/time.js +24 -0
- package/package.json +64 -0
- package/references/cli-interface.md +312 -0
- package/references/env.md +122 -0
- package/references/template-design-guide.md +271 -0
- package/references/vault-to-wiki-instruction.md +110 -0
- package/references/wiki-maintenance-instruction.md +190 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import matter from "gray-matter";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { getTemplate } from "../core/config.js";
|
|
4
|
+
import { EmbeddingClient } from "../core/embedding.js";
|
|
5
|
+
import { loadRuntimeConfig, openRuntimeDb } from "../core/runtime.js";
|
|
6
|
+
import { ensureDirSync, pathExistsSync, readTextFileSync, writeTextFileSync } from "../utils/fs.js";
|
|
7
|
+
import { AppError } from "../utils/errors.js";
|
|
8
|
+
function distanceToSimilarity(distance) {
|
|
9
|
+
return 1 / (1 + distance);
|
|
10
|
+
}
|
|
11
|
+
function normalizeKeywords(raw) {
|
|
12
|
+
return (raw ?? "")
|
|
13
|
+
.split(",")
|
|
14
|
+
.map((item) => item.trim())
|
|
15
|
+
.filter(Boolean);
|
|
16
|
+
}
|
|
17
|
+
function parsePositiveLimit(value, label, fallback) {
|
|
18
|
+
const normalized = value ?? fallback;
|
|
19
|
+
const limit = Number.parseInt(String(normalized), 10);
|
|
20
|
+
if (!Number.isFinite(limit) || limit <= 0) {
|
|
21
|
+
throw new AppError(`Invalid ${label} value: ${value}`, "config");
|
|
22
|
+
}
|
|
23
|
+
return limit;
|
|
24
|
+
}
|
|
25
|
+
function toTypeDescriptor(pageType, definition, wikiRoot) {
|
|
26
|
+
return {
|
|
27
|
+
pageType,
|
|
28
|
+
file: definition.file,
|
|
29
|
+
filePath: path.resolve(wikiRoot, definition.file),
|
|
30
|
+
columns: Object.keys(definition.columns),
|
|
31
|
+
edges: Object.keys(definition.edges),
|
|
32
|
+
summaryFields: definition.summaryFields,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function templateSkeleton(pageType, title) {
|
|
36
|
+
return matter.stringify([
|
|
37
|
+
"## Summary",
|
|
38
|
+
"",
|
|
39
|
+
"- Add a concise overview.",
|
|
40
|
+
"",
|
|
41
|
+
"## Details",
|
|
42
|
+
"",
|
|
43
|
+
"- Expand the template fields and sections for this page type.",
|
|
44
|
+
].join("\n"), {
|
|
45
|
+
pageType,
|
|
46
|
+
title,
|
|
47
|
+
nodeId: "",
|
|
48
|
+
status: "draft",
|
|
49
|
+
visibility: "private",
|
|
50
|
+
sourceRefs: [],
|
|
51
|
+
relatedPages: [],
|
|
52
|
+
tags: [],
|
|
53
|
+
createdAt: "2026-04-06",
|
|
54
|
+
updatedAt: "2026-04-06",
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
export function listTypes(env = process.env) {
|
|
58
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
59
|
+
return Object.entries(config.templates)
|
|
60
|
+
.map(([pageType, definition]) => toTypeDescriptor(pageType, definition, paths.wikiRoot))
|
|
61
|
+
.sort((left, right) => left.pageType.localeCompare(right.pageType));
|
|
62
|
+
}
|
|
63
|
+
export function showType(env = process.env, pageType) {
|
|
64
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
65
|
+
const definition = config.templates[pageType];
|
|
66
|
+
if (!definition) {
|
|
67
|
+
throw new AppError(`Unknown type: ${pageType}`, "not_found");
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
...toTypeDescriptor(pageType, definition, paths.wikiRoot),
|
|
71
|
+
columns: definition.columns,
|
|
72
|
+
edges: definition.edges,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
export async function recommendTypes(env = process.env, options) {
|
|
76
|
+
const embeddingClient = EmbeddingClient.fromEnv(env);
|
|
77
|
+
if (!embeddingClient) {
|
|
78
|
+
throw new AppError("Embedding not configured", "not_configured");
|
|
79
|
+
}
|
|
80
|
+
const limit = parsePositiveLimit(options.limit, "--limit", 5);
|
|
81
|
+
const keywords = normalizeKeywords(options.keywords);
|
|
82
|
+
const queryText = [options.text.trim(), keywords.length > 0 ? `keywords: ${keywords.join(", ")}` : ""]
|
|
83
|
+
.filter(Boolean)
|
|
84
|
+
.join("\n\n");
|
|
85
|
+
const [queryEmbedding] = await embeddingClient.embedBatch([queryText]);
|
|
86
|
+
const neighborLimit = Math.max(limit * 8, 24);
|
|
87
|
+
const { db } = openRuntimeDb(env);
|
|
88
|
+
try {
|
|
89
|
+
const hasVectors = db.prepare("SELECT COUNT(*) AS count FROM vec_pages").get().count;
|
|
90
|
+
if (hasVectors === 0) {
|
|
91
|
+
throw new AppError("No page embeddings found. Run tiangong-wiki sync with embedding enabled first.", "not_configured");
|
|
92
|
+
}
|
|
93
|
+
const rows = db
|
|
94
|
+
.prepare(`
|
|
95
|
+
SELECT
|
|
96
|
+
pages.page_type AS pageType,
|
|
97
|
+
pages.id AS pageId,
|
|
98
|
+
pages.title AS title,
|
|
99
|
+
vec_pages.distance AS distance
|
|
100
|
+
FROM vec_pages
|
|
101
|
+
JOIN pages ON pages.id = vec_pages.page_id
|
|
102
|
+
WHERE vec_pages.embedding MATCH ?
|
|
103
|
+
AND k = ?
|
|
104
|
+
ORDER BY vec_pages.distance
|
|
105
|
+
LIMIT ?
|
|
106
|
+
`)
|
|
107
|
+
.all(new Float32Array(queryEmbedding), neighborLimit, neighborLimit);
|
|
108
|
+
if (rows.length === 0) {
|
|
109
|
+
throw new AppError("No similar embedded pages found for type recommendation.", "runtime");
|
|
110
|
+
}
|
|
111
|
+
const grouped = new Map();
|
|
112
|
+
for (const row of rows) {
|
|
113
|
+
const similarity = distanceToSimilarity(Number(row.distance));
|
|
114
|
+
const bucket = grouped.get(row.pageType) ?? {
|
|
115
|
+
totalSimilarity: 0,
|
|
116
|
+
maxSimilarity: 0,
|
|
117
|
+
supportCount: 0,
|
|
118
|
+
hits: [],
|
|
119
|
+
};
|
|
120
|
+
bucket.totalSimilarity += similarity;
|
|
121
|
+
bucket.maxSimilarity = Math.max(bucket.maxSimilarity, similarity);
|
|
122
|
+
bucket.supportCount += 1;
|
|
123
|
+
bucket.hits.push({
|
|
124
|
+
pageType: row.pageType,
|
|
125
|
+
pageId: row.pageId,
|
|
126
|
+
title: row.title,
|
|
127
|
+
similarity,
|
|
128
|
+
});
|
|
129
|
+
grouped.set(row.pageType, bucket);
|
|
130
|
+
}
|
|
131
|
+
const recommendations = [...grouped.entries()]
|
|
132
|
+
.map(([pageType, bucket]) => {
|
|
133
|
+
const topHits = bucket.hits
|
|
134
|
+
.sort((left, right) => right.similarity - left.similarity)
|
|
135
|
+
.slice(0, 3);
|
|
136
|
+
return {
|
|
137
|
+
pageType,
|
|
138
|
+
score: Number(bucket.totalSimilarity.toFixed(6)),
|
|
139
|
+
signals: [
|
|
140
|
+
`supportCount:${bucket.supportCount}`,
|
|
141
|
+
`maxSimilarity:${bucket.maxSimilarity.toFixed(4)}`,
|
|
142
|
+
`avgSimilarity:${(bucket.totalSimilarity / bucket.supportCount).toFixed(4)}`,
|
|
143
|
+
],
|
|
144
|
+
similarPages: topHits.map((hit) => `${hit.pageId}@${hit.similarity.toFixed(4)}`),
|
|
145
|
+
};
|
|
146
|
+
})
|
|
147
|
+
.sort((left, right) => right.score - left.score || left.pageType.localeCompare(right.pageType))
|
|
148
|
+
.slice(0, limit);
|
|
149
|
+
return {
|
|
150
|
+
query: { text: options.text, keywords },
|
|
151
|
+
recommendations,
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
finally {
|
|
155
|
+
db.close();
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
export function listTemplates(env = process.env) {
|
|
159
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
160
|
+
return Object.entries(config.templates).map(([pageType, definition]) => ({
|
|
161
|
+
pageType,
|
|
162
|
+
file: definition.file,
|
|
163
|
+
filePath: path.resolve(paths.wikiRoot, definition.file),
|
|
164
|
+
}));
|
|
165
|
+
}
|
|
166
|
+
export function showTemplate(env = process.env, pageType) {
|
|
167
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
168
|
+
getTemplate(config, pageType);
|
|
169
|
+
const filePath = path.resolve(paths.wikiRoot, config.templates[pageType].file);
|
|
170
|
+
return {
|
|
171
|
+
pageType,
|
|
172
|
+
filePath,
|
|
173
|
+
content: readTextFileSync(filePath),
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
export function createTemplate(env = process.env, options) {
|
|
177
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
178
|
+
if (config.templates[options.type]) {
|
|
179
|
+
throw new AppError(`Template already exists: ${options.type}`, "config");
|
|
180
|
+
}
|
|
181
|
+
const templateRelativePath = path.join("templates", `${options.type}.md`).split(path.sep).join("/");
|
|
182
|
+
const templatePath = path.resolve(paths.wikiRoot, templateRelativePath);
|
|
183
|
+
ensureDirSync(path.dirname(templatePath));
|
|
184
|
+
if (pathExistsSync(templatePath)) {
|
|
185
|
+
throw new AppError(`Template file already exists: ${templatePath}`, "config");
|
|
186
|
+
}
|
|
187
|
+
writeTextFileSync(templatePath, templateSkeleton(options.type, options.title));
|
|
188
|
+
const updatedConfig = {
|
|
189
|
+
schemaVersion: config.schemaVersion,
|
|
190
|
+
customColumns: config.customColumns,
|
|
191
|
+
defaultSummaryFields: config.defaultSummaryFields,
|
|
192
|
+
vaultFileTypes: config.vaultFileTypes,
|
|
193
|
+
commonEdges: config.commonEdges,
|
|
194
|
+
templates: {
|
|
195
|
+
...config.templates,
|
|
196
|
+
[options.type]: {
|
|
197
|
+
file: templateRelativePath,
|
|
198
|
+
columns: {},
|
|
199
|
+
edges: {},
|
|
200
|
+
summaryFields: [],
|
|
201
|
+
},
|
|
202
|
+
},
|
|
203
|
+
};
|
|
204
|
+
writeTextFileSync(paths.configPath, `${JSON.stringify(updatedConfig, null, 2)}\n`);
|
|
205
|
+
return {
|
|
206
|
+
pageType: options.type,
|
|
207
|
+
templatePath,
|
|
208
|
+
configPath: paths.configPath,
|
|
209
|
+
};
|
|
210
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import { getTemplate } from "../core/config.js";
|
|
2
|
+
import { resolveAgentSettings } from "../core/paths.js";
|
|
3
|
+
import { createPageFromTemplate } from "../core/page-files.js";
|
|
4
|
+
import { loadRuntimeConfig } from "../core/runtime.js";
|
|
5
|
+
import { syncWorkspace } from "../core/sync.js";
|
|
6
|
+
import { getVaultQueueItem, processVaultQueueBatch } from "../core/vault-processing.js";
|
|
7
|
+
import { AppError } from "../utils/errors.js";
|
|
8
|
+
function assertValidSyncCommandOptions(options) {
|
|
9
|
+
if (options.vaultFileId && !options.process) {
|
|
10
|
+
throw new AppError("--vault-file requires --process.", "config");
|
|
11
|
+
}
|
|
12
|
+
if (options.process && options.targetPaths && options.targetPaths.length > 0) {
|
|
13
|
+
throw new AppError("--process cannot be combined with --path.", "config");
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
function assertQueueProcessingEnabled(env) {
|
|
17
|
+
const agentSettings = resolveAgentSettings(env, { strict: true });
|
|
18
|
+
if (!agentSettings.enabled) {
|
|
19
|
+
throw new AppError("Queue processing requires WIKI_AGENT_ENABLED=true.", "config");
|
|
20
|
+
}
|
|
21
|
+
if (agentSettings.batchSize === 0) {
|
|
22
|
+
throw new AppError("Queue processing requires WIKI_AGENT_BATCH_SIZE > 0.", "config");
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function buildQueueProcessResult(queueResult, meta = {}) {
|
|
26
|
+
if (!queueResult.enabled) {
|
|
27
|
+
throw new AppError("Queue processing is disabled.", "config");
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
...queueResult,
|
|
31
|
+
enabled: true,
|
|
32
|
+
requestedFileId: meta.requestedFileId ?? null,
|
|
33
|
+
currentStatus: meta.currentStatus ?? null,
|
|
34
|
+
noopReason: meta.noopReason ?? null,
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
function buildQueueProcessNoopResult(fileId, status) {
|
|
38
|
+
return {
|
|
39
|
+
enabled: true,
|
|
40
|
+
requestedFileId: fileId,
|
|
41
|
+
processed: 0,
|
|
42
|
+
done: 0,
|
|
43
|
+
skipped: 0,
|
|
44
|
+
errored: 0,
|
|
45
|
+
items: [],
|
|
46
|
+
currentStatus: status,
|
|
47
|
+
noopReason: status === "done" ? "already_done" : "already_skipped",
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
export async function runSync(env = process.env, options = {}) {
|
|
51
|
+
return syncWorkspace({
|
|
52
|
+
...options,
|
|
53
|
+
env,
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
async function processQueueAfterSync(env, options) {
|
|
57
|
+
assertQueueProcessingEnabled(env);
|
|
58
|
+
if (!options.vaultFileId) {
|
|
59
|
+
return buildQueueProcessResult(await processVaultQueueBatch(env, {
|
|
60
|
+
workflowRunner: options.workflowRunner,
|
|
61
|
+
}));
|
|
62
|
+
}
|
|
63
|
+
const requestedFileId = options.vaultFileId;
|
|
64
|
+
const queueItem = getVaultQueueItem(env, requestedFileId);
|
|
65
|
+
if (!queueItem) {
|
|
66
|
+
throw new AppError(`Vault queue item not found: ${requestedFileId}`, "not_found");
|
|
67
|
+
}
|
|
68
|
+
if (queueItem.status === "processing") {
|
|
69
|
+
throw new AppError(`Vault queue item is already processing: ${requestedFileId}`, "runtime", {
|
|
70
|
+
fileId: requestedFileId,
|
|
71
|
+
status: queueItem.status,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
if (queueItem.status === "done" || queueItem.status === "skipped") {
|
|
75
|
+
return buildQueueProcessNoopResult(requestedFileId, queueItem.status);
|
|
76
|
+
}
|
|
77
|
+
const queueResult = await processVaultQueueBatch(env, {
|
|
78
|
+
maxItems: 1,
|
|
79
|
+
filterFileIds: [requestedFileId],
|
|
80
|
+
workflowRunner: options.workflowRunner,
|
|
81
|
+
});
|
|
82
|
+
if (queueResult.processed > 0) {
|
|
83
|
+
return buildQueueProcessResult(queueResult, {
|
|
84
|
+
requestedFileId,
|
|
85
|
+
currentStatus: getVaultQueueItem(env, requestedFileId)?.status ?? null,
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
const current = getVaultQueueItem(env, requestedFileId);
|
|
89
|
+
if (!current) {
|
|
90
|
+
throw new AppError(`Vault queue item not found: ${requestedFileId}`, "not_found");
|
|
91
|
+
}
|
|
92
|
+
if (current.status === "done" || current.status === "skipped") {
|
|
93
|
+
return buildQueueProcessNoopResult(requestedFileId, current.status);
|
|
94
|
+
}
|
|
95
|
+
if (current.status === "processing") {
|
|
96
|
+
throw new AppError(`Vault queue item is already processing: ${requestedFileId}`, "runtime", {
|
|
97
|
+
fileId: requestedFileId,
|
|
98
|
+
status: current.status,
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
throw new AppError(`Failed to claim vault queue item for processing: ${requestedFileId}`, "runtime", {
|
|
102
|
+
fileId: requestedFileId,
|
|
103
|
+
status: current.status,
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
export async function runSyncCommand(env = process.env, options = {}) {
|
|
107
|
+
assertValidSyncCommandOptions(options);
|
|
108
|
+
const syncResult = await runSync(env, {
|
|
109
|
+
targetPaths: options.targetPaths,
|
|
110
|
+
force: options.force,
|
|
111
|
+
skipEmbedding: options.skipEmbedding,
|
|
112
|
+
});
|
|
113
|
+
if (!options.process) {
|
|
114
|
+
return {
|
|
115
|
+
...syncResult,
|
|
116
|
+
queueProcess: null,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
return {
|
|
120
|
+
...syncResult,
|
|
121
|
+
queueProcess: await processQueueAfterSync(env, {
|
|
122
|
+
vaultFileId: options.vaultFileId,
|
|
123
|
+
workflowRunner: options.workflowRunner,
|
|
124
|
+
}),
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
export async function createPage(env = process.env, options) {
|
|
128
|
+
const { paths, config } = loadRuntimeConfig(env);
|
|
129
|
+
getTemplate(config, options.type);
|
|
130
|
+
const created = createPageFromTemplate(paths, config, {
|
|
131
|
+
pageType: options.type,
|
|
132
|
+
title: options.title,
|
|
133
|
+
nodeId: options.nodeId ?? undefined,
|
|
134
|
+
});
|
|
135
|
+
await syncWorkspace({
|
|
136
|
+
env,
|
|
137
|
+
targetPaths: [created.pageId],
|
|
138
|
+
});
|
|
139
|
+
return {
|
|
140
|
+
created: created.pageId,
|
|
141
|
+
filePath: created.filePath,
|
|
142
|
+
};
|
|
143
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export function camelToSnake(input) {
|
|
2
|
+
return input
|
|
3
|
+
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
|
|
4
|
+
.replace(/[-\s]+/g, "_")
|
|
5
|
+
.toLowerCase();
|
|
6
|
+
}
|
|
7
|
+
export function snakeToCamel(input) {
|
|
8
|
+
return input.replace(/_([a-z0-9])/g, (_, char) => char.toUpperCase());
|
|
9
|
+
}
|
|
10
|
+
export function kebabToCamel(input) {
|
|
11
|
+
return input.replace(/-([a-z0-9])/g, (_, char) => char.toUpperCase());
|
|
12
|
+
}
|
|
13
|
+
export function kebabToSnake(input) {
|
|
14
|
+
return input.replace(/-/g, "_");
|
|
15
|
+
}
|
|
16
|
+
export function humanizeFieldName(input) {
|
|
17
|
+
return input
|
|
18
|
+
.replace(/([a-z0-9])([A-Z])/g, "$1 $2")
|
|
19
|
+
.replace(/[_-]+/g, " ")
|
|
20
|
+
.replace(/\s+/g, " ")
|
|
21
|
+
.trim();
|
|
22
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export class AppError extends Error {
|
|
2
|
+
type;
|
|
3
|
+
exitCode;
|
|
4
|
+
details;
|
|
5
|
+
constructor(message, type = "runtime", details) {
|
|
6
|
+
super(message);
|
|
7
|
+
this.name = "AppError";
|
|
8
|
+
this.type = type;
|
|
9
|
+
this.exitCode = type === "config" ? 2 : 1;
|
|
10
|
+
this.details = details;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
export function asAppError(error) {
|
|
14
|
+
if (error instanceof AppError) {
|
|
15
|
+
return error;
|
|
16
|
+
}
|
|
17
|
+
if (error instanceof Error) {
|
|
18
|
+
return new AppError(error.message, "runtime");
|
|
19
|
+
}
|
|
20
|
+
return new AppError(String(error), "runtime");
|
|
21
|
+
}
|
|
22
|
+
export function assertCondition(condition, message, type = "runtime") {
|
|
23
|
+
if (!condition) {
|
|
24
|
+
throw new AppError(message, type);
|
|
25
|
+
}
|
|
26
|
+
}
|
package/dist/utils/fs.js
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync, statSync, writeFileSync, } from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
export function ensureDirSync(dirPath) {
|
|
5
|
+
mkdirSync(dirPath, { recursive: true });
|
|
6
|
+
}
|
|
7
|
+
export function pathExistsSync(filePath) {
|
|
8
|
+
return existsSync(filePath);
|
|
9
|
+
}
|
|
10
|
+
export function readTextFileSync(filePath) {
|
|
11
|
+
return readFileSync(filePath, "utf8");
|
|
12
|
+
}
|
|
13
|
+
export function writeTextFileSync(filePath, content) {
|
|
14
|
+
ensureDirSync(path.dirname(filePath));
|
|
15
|
+
writeFileSync(filePath, content, "utf8");
|
|
16
|
+
}
|
|
17
|
+
export function copyFileIfMissingSync(sourcePath, targetPath) {
|
|
18
|
+
if (existsSync(targetPath)) {
|
|
19
|
+
return false;
|
|
20
|
+
}
|
|
21
|
+
ensureDirSync(path.dirname(targetPath));
|
|
22
|
+
copyFileSync(sourcePath, targetPath);
|
|
23
|
+
return true;
|
|
24
|
+
}
|
|
25
|
+
export function copyDirectoryContentsSync(sourceDir, targetDir) {
|
|
26
|
+
ensureDirSync(targetDir);
|
|
27
|
+
for (const entry of readdirSync(sourceDir, { withFileTypes: true })) {
|
|
28
|
+
const sourcePath = path.join(sourceDir, entry.name);
|
|
29
|
+
const targetPath = path.join(targetDir, entry.name);
|
|
30
|
+
if (entry.isDirectory()) {
|
|
31
|
+
copyDirectoryContentsSync(sourcePath, targetPath);
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
ensureDirSync(path.dirname(targetPath));
|
|
35
|
+
copyFileSync(sourcePath, targetPath);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export function isDirectoryEmptySync(dirPath) {
|
|
39
|
+
if (!existsSync(dirPath)) {
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
return readdirSync(dirPath).length === 0;
|
|
43
|
+
}
|
|
44
|
+
export function listFilesRecursiveSync(rootDir, extension) {
|
|
45
|
+
if (!existsSync(rootDir)) {
|
|
46
|
+
return [];
|
|
47
|
+
}
|
|
48
|
+
const results = [];
|
|
49
|
+
const visit = (dirPath) => {
|
|
50
|
+
for (const entry of readdirSync(dirPath, { withFileTypes: true })) {
|
|
51
|
+
const entryPath = path.join(dirPath, entry.name);
|
|
52
|
+
if (entry.isDirectory()) {
|
|
53
|
+
visit(entryPath);
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
if (extension && path.extname(entry.name) !== extension) {
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
results.push(entryPath);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
visit(rootDir);
|
|
63
|
+
return results.sort();
|
|
64
|
+
}
|
|
65
|
+
export function sha256Buffer(buffer) {
|
|
66
|
+
return createHash("sha256").update(buffer).digest("hex");
|
|
67
|
+
}
|
|
68
|
+
export function sha256Text(text) {
|
|
69
|
+
return createHash("sha256").update(text, "utf8").digest("hex");
|
|
70
|
+
}
|
|
71
|
+
export function sha256FileSync(filePath) {
|
|
72
|
+
return sha256Buffer(readFileSync(filePath));
|
|
73
|
+
}
|
|
74
|
+
export function fileStatSync(filePath) {
|
|
75
|
+
const stats = statSync(filePath);
|
|
76
|
+
return { size: stats.size, mtimeMs: stats.mtimeMs };
|
|
77
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { AppError, asAppError } from "./errors.js";
|
|
2
|
+
export function writeJson(payload, stream = process.stdout) {
|
|
3
|
+
stream.write(`${JSON.stringify(payload, null, 2)}\n`);
|
|
4
|
+
}
|
|
5
|
+
export function writeText(text, stream = process.stdout) {
|
|
6
|
+
stream.write(text.endsWith("\n") ? text : `${text}\n`);
|
|
7
|
+
}
|
|
8
|
+
export function handleCliError(error) {
|
|
9
|
+
const appError = asAppError(error);
|
|
10
|
+
writeJson({
|
|
11
|
+
error: appError.message,
|
|
12
|
+
type: appError.type,
|
|
13
|
+
...(appError.details === undefined ? {} : { details: appError.details }),
|
|
14
|
+
}, process.stderr);
|
|
15
|
+
process.exit(appError.exitCode);
|
|
16
|
+
}
|
|
17
|
+
export function formatKeyValueLines(values) {
|
|
18
|
+
return Object.entries(values)
|
|
19
|
+
.map(([key, value]) => `${key}: ${value === undefined ? "" : String(value)}`)
|
|
20
|
+
.join("\n");
|
|
21
|
+
}
|
|
22
|
+
export function parseOutputFormat(value) {
|
|
23
|
+
if (value === "json") {
|
|
24
|
+
return "json";
|
|
25
|
+
}
|
|
26
|
+
return "text";
|
|
27
|
+
}
|
|
28
|
+
export function ensureTextOrJson(value) {
|
|
29
|
+
if (value && value !== "text" && value !== "json") {
|
|
30
|
+
throw new AppError(`Unsupported format: ${value}`, "config");
|
|
31
|
+
}
|
|
32
|
+
return parseOutputFormat(value);
|
|
33
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { openSync } from "node:fs";
|
|
3
|
+
import { AppError } from "./errors.js";
|
|
4
|
+
export function getCurrentInvocation() {
|
|
5
|
+
const [, argv1, argv2] = process.argv;
|
|
6
|
+
if (!argv1) {
|
|
7
|
+
return { command: process.execPath, args: [] };
|
|
8
|
+
}
|
|
9
|
+
const looksLikeTsx = argv1.includes("tsx");
|
|
10
|
+
if (looksLikeTsx && argv2) {
|
|
11
|
+
return {
|
|
12
|
+
command: process.execPath,
|
|
13
|
+
args: [argv1, argv2],
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
return {
|
|
17
|
+
command: process.execPath,
|
|
18
|
+
args: [argv1],
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
export function spawnDetachedCurrentProcess(extraArgs, options = {}) {
|
|
22
|
+
const invocation = getCurrentInvocation();
|
|
23
|
+
const stdio = options.logFile
|
|
24
|
+
? ["ignore", openSync(options.logFile, "a"), openSync(options.logFile, "a")]
|
|
25
|
+
: "ignore";
|
|
26
|
+
const child = spawn(invocation.command, [...invocation.args, ...extraArgs], {
|
|
27
|
+
detached: true,
|
|
28
|
+
stdio,
|
|
29
|
+
env: options.env ?? process.env,
|
|
30
|
+
});
|
|
31
|
+
child.unref();
|
|
32
|
+
return child.pid;
|
|
33
|
+
}
|
|
34
|
+
export function openTarget(target) {
|
|
35
|
+
let command = "";
|
|
36
|
+
let args = [];
|
|
37
|
+
if (process.platform === "darwin") {
|
|
38
|
+
command = "open";
|
|
39
|
+
args = [target];
|
|
40
|
+
}
|
|
41
|
+
else if (process.platform === "win32") {
|
|
42
|
+
command = "cmd";
|
|
43
|
+
args = ["/c", "start", "", target];
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
command = "xdg-open";
|
|
47
|
+
args = [target];
|
|
48
|
+
}
|
|
49
|
+
try {
|
|
50
|
+
const child = spawn(command, args, {
|
|
51
|
+
detached: true,
|
|
52
|
+
stdio: "ignore",
|
|
53
|
+
shell: false,
|
|
54
|
+
});
|
|
55
|
+
child.unref();
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
throw new AppError(`Failed to open target ${target}: ${error instanceof Error ? error.message : String(error)}`, "runtime");
|
|
59
|
+
}
|
|
60
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
const SEGMENTER = new Intl.Segmenter("zh-Hans", { granularity: "word" });
|
|
2
|
+
const CJK_PATTERN = /[\p{Script=Han}\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Hangul}]/u;
|
|
3
|
+
const QUERY_TOKEN_PATTERN = /[\p{Letter}\p{Number}_-]+/gu;
|
|
4
|
+
function normalizeWhitespace(text) {
|
|
5
|
+
return text.trim().replace(/\s+/g, " ");
|
|
6
|
+
}
|
|
7
|
+
export function segmentForFts(text) {
|
|
8
|
+
const normalized = normalizeWhitespace(text);
|
|
9
|
+
if (!normalized) {
|
|
10
|
+
return "";
|
|
11
|
+
}
|
|
12
|
+
const tokens = [...SEGMENTER.segment(normalized)]
|
|
13
|
+
.filter((segment) => segment.isWordLike)
|
|
14
|
+
.map((segment) => segment.segment.trim())
|
|
15
|
+
.filter(Boolean);
|
|
16
|
+
return tokens.length > 0 ? tokens.join(" ") : normalized;
|
|
17
|
+
}
|
|
18
|
+
export function normalizeFtsQuery(query) {
|
|
19
|
+
const normalized = normalizeWhitespace(query);
|
|
20
|
+
if (!normalized) {
|
|
21
|
+
return "";
|
|
22
|
+
}
|
|
23
|
+
return normalized.replace(QUERY_TOKEN_PATTERN, (token) => CJK_PATTERN.test(token) ? segmentForFts(token) : token);
|
|
24
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
function pad(value) {
|
|
2
|
+
return String(value).padStart(2, "0");
|
|
3
|
+
}
|
|
4
|
+
export function toOffsetIso(date = new Date()) {
|
|
5
|
+
const offsetMinutes = -date.getTimezoneOffset();
|
|
6
|
+
const sign = offsetMinutes >= 0 ? "+" : "-";
|
|
7
|
+
const absoluteOffset = Math.abs(offsetMinutes);
|
|
8
|
+
const offsetHours = Math.floor(absoluteOffset / 60);
|
|
9
|
+
const offsetRemainder = absoluteOffset % 60;
|
|
10
|
+
return [
|
|
11
|
+
`${date.getFullYear()}-${pad(date.getMonth() + 1)}-${pad(date.getDate())}`,
|
|
12
|
+
`T${pad(date.getHours())}:${pad(date.getMinutes())}:${pad(date.getSeconds())}`,
|
|
13
|
+
`${sign}${pad(offsetHours)}:${pad(offsetRemainder)}`,
|
|
14
|
+
].join("");
|
|
15
|
+
}
|
|
16
|
+
export function toDateOnly(date = new Date()) {
|
|
17
|
+
return `${date.getFullYear()}-${pad(date.getMonth() + 1)}-${pad(date.getDate())}`;
|
|
18
|
+
}
|
|
19
|
+
export function makeSyncId(date = new Date()) {
|
|
20
|
+
return `sync-${date.getFullYear()}-${pad(date.getMonth() + 1)}-${pad(date.getDate())}-${pad(date.getHours())}${pad(date.getMinutes())}${pad(date.getSeconds())}-${String(date.getMilliseconds()).padStart(3, "0")}`;
|
|
21
|
+
}
|
|
22
|
+
export function addSeconds(date, seconds) {
|
|
23
|
+
return new Date(date.getTime() + seconds * 1000);
|
|
24
|
+
}
|