structured-context 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +348 -0
- package/dist/commands/diagram.d.ts +5 -0
- package/dist/commands/diagram.js +12 -0
- package/dist/commands/docs.d.ts +1 -0
- package/dist/commands/docs.js +67 -0
- package/dist/commands/dump.d.ts +2 -0
- package/dist/commands/dump.js +6 -0
- package/dist/commands/plugins.d.ts +1 -0
- package/dist/commands/plugins.js +23 -0
- package/dist/commands/render.d.ts +6 -0
- package/dist/commands/render.js +35 -0
- package/dist/commands/schemas.d.ts +6 -0
- package/dist/commands/schemas.js +268 -0
- package/dist/commands/show.d.ts +4 -0
- package/dist/commands/show.js +7 -0
- package/dist/commands/spaces.d.ts +1 -0
- package/dist/commands/spaces.js +36 -0
- package/dist/commands/template-sync.d.ts +3 -0
- package/dist/commands/template-sync.js +13 -0
- package/dist/commands/validate-file.d.ts +28 -0
- package/dist/commands/validate-file.js +133 -0
- package/dist/commands/validate.d.ts +16 -0
- package/dist/commands/validate.js +349 -0
- package/dist/config.d.ts +38 -0
- package/dist/config.js +179 -0
- package/dist/constants.d.ts +6 -0
- package/dist/constants.js +6 -0
- package/dist/filter/augment-nodes.d.ts +23 -0
- package/dist/filter/augment-nodes.js +95 -0
- package/dist/filter/expand-include.d.ts +62 -0
- package/dist/filter/expand-include.js +181 -0
- package/dist/filter/filter-nodes.d.ts +21 -0
- package/dist/filter/filter-nodes.js +73 -0
- package/dist/filter/parse-expression.d.ts +20 -0
- package/dist/filter/parse-expression.js +60 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +161 -0
- package/dist/integrations/miro/cache.d.ts +21 -0
- package/dist/integrations/miro/cache.js +55 -0
- package/dist/integrations/miro/client.d.ts +99 -0
- package/dist/integrations/miro/client.js +118 -0
- package/dist/integrations/miro/layout.d.ts +28 -0
- package/dist/integrations/miro/layout.js +72 -0
- package/dist/integrations/miro/styles.d.ts +11 -0
- package/dist/integrations/miro/styles.js +65 -0
- package/dist/integrations/miro/sync.d.ts +8 -0
- package/dist/integrations/miro/sync.js +347 -0
- package/dist/plugin-api.d.ts +12 -0
- package/dist/plugin-api.js +7 -0
- package/dist/plugins/index.d.ts +3 -0
- package/dist/plugins/index.js +3 -0
- package/dist/plugins/loader.d.ts +21 -0
- package/dist/plugins/loader.js +104 -0
- package/dist/plugins/markdown/index.d.ts +48 -0
- package/dist/plugins/markdown/index.js +51 -0
- package/dist/plugins/markdown/parse-embedded.d.ts +90 -0
- package/dist/plugins/markdown/parse-embedded.js +663 -0
- package/dist/plugins/markdown/read-space.d.ts +7 -0
- package/dist/plugins/markdown/read-space.js +89 -0
- package/dist/plugins/markdown/render-bullets.d.ts +2 -0
- package/dist/plugins/markdown/render-bullets.js +42 -0
- package/dist/plugins/markdown/render-mermaid.d.ts +2 -0
- package/dist/plugins/markdown/render-mermaid.js +57 -0
- package/dist/plugins/markdown/template-sync.d.ts +16 -0
- package/dist/plugins/markdown/template-sync.js +294 -0
- package/dist/plugins/markdown/util.d.ts +19 -0
- package/dist/plugins/markdown/util.js +80 -0
- package/dist/plugins/util.d.ts +60 -0
- package/dist/plugins/util.js +7 -0
- package/dist/read/read-space.d.ts +2 -0
- package/dist/read/read-space.js +22 -0
- package/dist/read/resolve-graph-edges.d.ts +11 -0
- package/dist/read/resolve-graph-edges.js +201 -0
- package/dist/read/wikilink-utils.d.ts +16 -0
- package/dist/read/wikilink-utils.js +38 -0
- package/dist/render/registry.d.ts +13 -0
- package/dist/render/registry.js +22 -0
- package/dist/render/render.d.ts +4 -0
- package/dist/render/render.js +28 -0
- package/dist/schema/evaluate-rule.d.ts +30 -0
- package/dist/schema/evaluate-rule.js +82 -0
- package/dist/schema/metadata-contract.d.ts +538 -0
- package/dist/schema/metadata-contract.js +115 -0
- package/dist/schema/schema-refs.d.ts +22 -0
- package/dist/schema/schema-refs.js +168 -0
- package/dist/schema/schema.d.ts +27 -0
- package/dist/schema/schema.js +378 -0
- package/dist/schema/validate-graph.d.ts +24 -0
- package/dist/schema/validate-graph.js +141 -0
- package/dist/schema/validate-rules.d.ts +10 -0
- package/dist/schema/validate-rules.js +51 -0
- package/dist/schemas/_ost_strict.json +81 -0
- package/dist/schemas/_sctx_base.json +72 -0
- package/dist/schemas/general.json +261 -0
- package/dist/schemas/generated/_structured_context_schema_meta.json +191 -0
- package/dist/schemas/knowledge_wiki.json +206 -0
- package/dist/schemas/strict_ost.json +97 -0
- package/dist/space-graph.d.ts +28 -0
- package/dist/space-graph.js +82 -0
- package/dist/types.d.ts +145 -0
- package/dist/types.js +0 -0
- package/docs/concepts.md +391 -0
- package/docs/config.md +140 -0
- package/docs/rules.md +120 -0
- package/docs/schemas.md +340 -0
- package/package.json +69 -0
- package/schemas/_ost_strict.json +81 -0
- package/schemas/_sctx_base.json +72 -0
- package/schemas/general.json +261 -0
- package/schemas/generated/_structured_context_schema_meta.json +191 -0
- package/schemas/knowledge_wiki.json +206 -0
- package/schemas/strict_ost.json +97 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { readFileSync } from 'node:fs';
|
|
2
|
+
import { basename, join, resolve } from 'node:path';
|
|
3
|
+
import { Glob } from 'bun';
|
|
4
|
+
import matter from 'gray-matter';
|
|
5
|
+
import { extractSchemaTypeNames } from '../../schema/schema';
|
|
6
|
+
import { extractEmbeddedNodes, ON_A_PAGE_TYPES } from './parse-embedded';
|
|
7
|
+
import { applyFieldMap, coerceDates, inferTypeFromPath } from './util';
|
|
8
|
+
export function readSpaceOnAPage(context) {
|
|
9
|
+
const { space, resolvedSchemaPath, schema: { metadata }, } = context;
|
|
10
|
+
const filePath = resolve(space.path);
|
|
11
|
+
const raw = readFileSync(filePath, 'utf-8');
|
|
12
|
+
const { data: frontmatter, content: body } = matter(raw);
|
|
13
|
+
const pageType = frontmatter.type;
|
|
14
|
+
if (pageType !== undefined && !ON_A_PAGE_TYPES.includes(pageType)) {
|
|
15
|
+
throw new Error(`Expected a space_on_a_page file but got type "${pageType}" in ${filePath}. ` +
|
|
16
|
+
`Use a directory path to validate a space containing typed node files.`);
|
|
17
|
+
}
|
|
18
|
+
const hierarchyLevels = metadata.hierarchy?.levels;
|
|
19
|
+
if (!hierarchyLevels || hierarchyLevels.length === 0) {
|
|
20
|
+
throw new Error(`Schema at ${resolvedSchemaPath} must define "$metadata.hierarchy.levels" to read a space_on_a_page file.`);
|
|
21
|
+
}
|
|
22
|
+
const pageTitle = basename(filePath, '.md');
|
|
23
|
+
const { nodes, preambleNodeCount, terminatedHeadings } = extractEmbeddedNodes(body, {
|
|
24
|
+
pageTitle,
|
|
25
|
+
pageType: 'space_on_a_page',
|
|
26
|
+
metadata,
|
|
27
|
+
});
|
|
28
|
+
return { nodes, parseIgnored: terminatedHeadings, diagnostics: { kind: 'page', preambleNodeCount } };
|
|
29
|
+
}
|
|
30
|
+
export async function readSpaceDirectory(context, options) {
|
|
31
|
+
const { space, schema: { metadata }, } = context;
|
|
32
|
+
const directory = resolve(space.path);
|
|
33
|
+
const mdCfg = context.pluginConfig;
|
|
34
|
+
const fieldMap = mdCfg.fieldMap;
|
|
35
|
+
const templateDir = mdCfg.templateDir;
|
|
36
|
+
const absoluteTemplateDir = templateDir ? resolve(templateDir) : undefined;
|
|
37
|
+
const typeInferenceCfg = mdCfg.typeInference;
|
|
38
|
+
const knownTypes = typeInferenceCfg?.mode !== 'off' ? extractSchemaTypeNames(context.schema, context.schemaRefRegistry) : undefined;
|
|
39
|
+
const files = await Array.fromAsync(new Glob('**/*.md').scan({ cwd: directory, followSymlinks: true }));
|
|
40
|
+
const nodes = [];
|
|
41
|
+
const skipped = [];
|
|
42
|
+
const nonSpace = [];
|
|
43
|
+
for (const file of files) {
|
|
44
|
+
const absoluteFilePath = resolve(directory, file);
|
|
45
|
+
if (absoluteTemplateDir && absoluteFilePath.startsWith(absoluteTemplateDir)) {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
const content = readFileSync(join(directory, file), 'utf-8');
|
|
49
|
+
const parsed = matter(content);
|
|
50
|
+
if (!parsed.data || Object.keys(parsed.data).length === 0) {
|
|
51
|
+
skipped.push(file);
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
const data = coerceDates(applyFieldMap(parsed.data, fieldMap));
|
|
55
|
+
if (!data.type && typeInferenceCfg && knownTypes) {
|
|
56
|
+
data.type = inferTypeFromPath(file, typeInferenceCfg, knownTypes, context.schema.metadata.typeAliases);
|
|
57
|
+
}
|
|
58
|
+
if (!data.type) {
|
|
59
|
+
nonSpace.push(file);
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
if (ON_A_PAGE_TYPES.includes(data.type) && !options?.includeOnAPageFiles) {
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
const pageType = data.type;
|
|
66
|
+
const fileBase = basename(file, '.md');
|
|
67
|
+
const title = data.title ?? fileBase;
|
|
68
|
+
nodes.push({
|
|
69
|
+
label: file,
|
|
70
|
+
title,
|
|
71
|
+
schemaData: { title, ...data },
|
|
72
|
+
linkTargets: [title, fileBase],
|
|
73
|
+
type: pageType,
|
|
74
|
+
});
|
|
75
|
+
if (!ON_A_PAGE_TYPES.includes(pageType)) {
|
|
76
|
+
const { nodes: embedded, terminatedHeadings } = extractEmbeddedNodes(parsed.content, {
|
|
77
|
+
pageTitle: fileBase,
|
|
78
|
+
pageType,
|
|
79
|
+
metadata,
|
|
80
|
+
fieldMap,
|
|
81
|
+
});
|
|
82
|
+
nodes.push(...embedded);
|
|
83
|
+
for (const heading of terminatedHeadings) {
|
|
84
|
+
nonSpace.push(`${file} > ${heading}`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return { nodes, parseIgnored: [...skipped, ...nonSpace], diagnostics: { kind: 'directory' } };
|
|
89
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export function renderBullets(graph) {
|
|
2
|
+
const { hierarchyRoots, orphans, nonHierarchy, hierarchyChildren: children } = graph;
|
|
3
|
+
const lines = [];
|
|
4
|
+
const seen = new Set();
|
|
5
|
+
function renderNode(node, depth) {
|
|
6
|
+
const indent = ' '.repeat(depth);
|
|
7
|
+
const type = node.resolvedType;
|
|
8
|
+
const title = node.title;
|
|
9
|
+
const nodeChildren = children.get(title) ?? [];
|
|
10
|
+
if (seen.has(title)) {
|
|
11
|
+
if (nodeChildren.length > 0) {
|
|
12
|
+
lines.push(`${indent}- ${type}: ${title} (*)`);
|
|
13
|
+
}
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
seen.add(title);
|
|
17
|
+
lines.push(`${indent}- ${type}: ${title}`);
|
|
18
|
+
for (const child of nodeChildren) {
|
|
19
|
+
renderNode(child, depth + 1);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
for (const root of hierarchyRoots) {
|
|
23
|
+
renderNode(root, 0);
|
|
24
|
+
}
|
|
25
|
+
if (orphans.length > 0) {
|
|
26
|
+
lines.push('');
|
|
27
|
+
lines.push('Orphans (missing parent):');
|
|
28
|
+
for (const node of orphans) {
|
|
29
|
+
renderNode(node, 0);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (nonHierarchy.length > 0) {
|
|
33
|
+
lines.push('');
|
|
34
|
+
lines.push('Other (not in hierarchy):');
|
|
35
|
+
for (const node of nonHierarchy) {
|
|
36
|
+
const type = node.resolvedType;
|
|
37
|
+
const title = node.title;
|
|
38
|
+
lines.push(` - ${type}: ${title}`);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return lines.join('\n');
|
|
42
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
function escapeMermaidString(str) {
|
|
2
|
+
return str.replace(/"/g, '"');
|
|
3
|
+
}
|
|
4
|
+
function safeNodeId(id) {
|
|
5
|
+
return id.replace(/[^a-zA-Z0-9_-]/g, '_');
|
|
6
|
+
}
|
|
7
|
+
export function renderMermaid(graph) {
|
|
8
|
+
const { hierarchyRoots, orphans, hierarchyChildren: children, hierarchyTitles: hierarchyNodeSet } = graph;
|
|
9
|
+
let mmd = 'graph TD\n';
|
|
10
|
+
mmd += ' classDef vision fill:#ff9999,stroke:#ff0000,stroke-width:2px\n';
|
|
11
|
+
mmd += ' classDef mission fill:#99ccff,stroke:#0066cc,stroke-width:2px\n';
|
|
12
|
+
mmd += ' classDef goal fill:#99ff99,stroke:#00cc00,stroke-width:2px\n';
|
|
13
|
+
mmd += ' classDef opportunity fill:#ffcc99,stroke:#cc9900,stroke-width:2px\n';
|
|
14
|
+
mmd += ' classDef solution fill:#cc99ff,stroke:#6600cc,stroke-width:2px\n';
|
|
15
|
+
mmd += ' classDef identified fill:#f0f0f0,stroke:#999999,stroke-dasharray: 5 5\n';
|
|
16
|
+
mmd += ' classDef wondering fill:#fff0cc,stroke:#cccc00,stroke-dasharray: 5 5\n';
|
|
17
|
+
mmd += ' classDef exploring fill:#ffcc99,stroke:#cc9900,stroke-dasharray: 5 5\n';
|
|
18
|
+
mmd += ' classDef active fill:#99ff99,stroke:#00cc00,stroke-width:2px\n';
|
|
19
|
+
mmd += ' classDef paused fill:#ffcc99,stroke:#cc9900,stroke-width:2px\n';
|
|
20
|
+
mmd += ' classDef completed fill:#ccccff,stroke:#6666cc,stroke-width:2px\n';
|
|
21
|
+
mmd += ' classDef archived fill:#e0e0e0,stroke:#999999,stroke-width:2px\n';
|
|
22
|
+
const addedNodes = new Set();
|
|
23
|
+
function addNodeAndChildren(node) {
|
|
24
|
+
const nodeId = node.title;
|
|
25
|
+
if (addedNodes.has(nodeId))
|
|
26
|
+
return;
|
|
27
|
+
addedNodes.add(nodeId);
|
|
28
|
+
const type = node.resolvedType;
|
|
29
|
+
const status = node.schemaData.status;
|
|
30
|
+
const priority = node.schemaData.priority;
|
|
31
|
+
const label = priority ? `${nodeId} (${priority})` : nodeId;
|
|
32
|
+
const className = `${type}_${status}`;
|
|
33
|
+
const safeId = safeNodeId(nodeId);
|
|
34
|
+
const escapedLabel = escapeMermaidString(label);
|
|
35
|
+
mmd += ` ${safeId}["${escapedLabel}"]:::${className}\n`;
|
|
36
|
+
const nodeChildren = children.get(nodeId) ?? [];
|
|
37
|
+
for (const child of nodeChildren) {
|
|
38
|
+
const childTitle = child.title;
|
|
39
|
+
if (hierarchyNodeSet.has(childTitle)) {
|
|
40
|
+
const safeChildId = safeNodeId(childTitle);
|
|
41
|
+
mmd += ` ${safeId} --> ${safeChildId}\n`;
|
|
42
|
+
addNodeAndChildren(child);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
for (const root of hierarchyRoots) {
|
|
47
|
+
addNodeAndChildren(root);
|
|
48
|
+
}
|
|
49
|
+
if (orphans.length > 0) {
|
|
50
|
+
mmd += '\n subgraph Orphans\n';
|
|
51
|
+
for (const orphan of orphans) {
|
|
52
|
+
addNodeAndChildren(orphan);
|
|
53
|
+
}
|
|
54
|
+
mmd += ' end\n';
|
|
55
|
+
}
|
|
56
|
+
return mmd;
|
|
57
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { AnySchemaObject } from 'ajv';
|
|
2
|
+
import type { HierarchyLevel, Relationship, SchemaWithMetadata } from '../../plugin-api';
|
|
3
|
+
import type { PluginContext, TemplateSyncOptions } from '../util';
|
|
4
|
+
export interface TypeVariant {
|
|
5
|
+
required: string[];
|
|
6
|
+
optional: string[];
|
|
7
|
+
properties: Record<string, AnySchemaObject>;
|
|
8
|
+
example: Record<string, string | number | boolean>;
|
|
9
|
+
description: string | undefined;
|
|
10
|
+
relationships: Relationship[];
|
|
11
|
+
/** Hierarchy child levels where this type is the parent and the level has templateFormat/matchers. */
|
|
12
|
+
hierarchyChildren: HierarchyLevel[];
|
|
13
|
+
}
|
|
14
|
+
export declare function getTypeVariants(schema: SchemaWithMetadata, schemaRefRegistry: Map<string, AnySchemaObject>): Map<string, TypeVariant>;
|
|
15
|
+
export declare function generateNewContent(nodeType: string, variant: TypeVariant, schema: SchemaWithMetadata, schemaRefRegistry: Map<string, AnySchemaObject>, allVariants: Map<string, TypeVariant>, body?: string, fieldMap?: Record<string, string>): string;
|
|
16
|
+
export declare function templateSync(context: PluginContext, options: TemplateSyncOptions): Promise<true | null>;
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { Glob } from 'bun';
|
|
4
|
+
import matter from 'gray-matter';
|
|
5
|
+
import yaml from 'js-yaml';
|
|
6
|
+
import { mergeVariantProperties, resolveRef } from '../../schema/schema-refs';
|
|
7
|
+
import { invertFieldMap } from './util';
|
|
8
|
+
// Fields derived from the filesystem — present at validation time but not written to frontmatter
|
|
9
|
+
const DERIVED_FIELDS = new Set(['title', 'content']);
|
|
10
|
+
function enumPlaceholder(def) {
|
|
11
|
+
return def.enum?.join('|') ?? '';
|
|
12
|
+
}
|
|
13
|
+
function withEnumPlaceholders(example, properties, schema, schemaRefRegistry) {
|
|
14
|
+
return Object.fromEntries(Object.entries(example).map(([key, value]) => {
|
|
15
|
+
const def = resolveRef(properties[key], schema, schemaRefRegistry);
|
|
16
|
+
return def && 'enum' in def ? [key, enumPlaceholder(def)] : [key, value];
|
|
17
|
+
}));
|
|
18
|
+
}
|
|
19
|
+
function commentedHint(fieldName, propDef, schema, schemaRefRegistry) {
|
|
20
|
+
const def = resolveRef(propDef, schema, schemaRefRegistry);
|
|
21
|
+
let value;
|
|
22
|
+
const defTyped = def;
|
|
23
|
+
if (defTyped?.enum) {
|
|
24
|
+
value = enumPlaceholder(defTyped);
|
|
25
|
+
}
|
|
26
|
+
else if (defTyped?.type === 'integer') {
|
|
27
|
+
value = String(Math.ceil(((defTyped.minimum ?? 1) + (defTyped.maximum ?? 5)) / 2));
|
|
28
|
+
}
|
|
29
|
+
else if (defTyped?.type === 'array') {
|
|
30
|
+
value = '[]';
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
value = '""';
|
|
34
|
+
}
|
|
35
|
+
const description = defTyped?.description;
|
|
36
|
+
return `# ${fieldName}: ${value}${description ? ` # ${description}` : ''}`;
|
|
37
|
+
}
|
|
38
|
+
export function getTypeVariants(schema, schemaRefRegistry) {
|
|
39
|
+
const map = new Map();
|
|
40
|
+
for (const variant of schema.oneOf) {
|
|
41
|
+
const typeName = variant.properties?.type?.const;
|
|
42
|
+
if (!typeName || typeName === 'dashboard' || typeName === 'ost_on_a_page')
|
|
43
|
+
continue;
|
|
44
|
+
if (!variant.examples?.[0])
|
|
45
|
+
continue;
|
|
46
|
+
const required = variant.required.filter((k) => k !== 'type' && !DERIVED_FIELDS.has(k));
|
|
47
|
+
const allProperties = Object.fromEntries(Object.entries(mergeVariantProperties(variant, schema, schemaRefRegistry).properties).filter(([k]) => k !== 'type' && !DERIVED_FIELDS.has(k)));
|
|
48
|
+
const optional = Object.keys(allProperties).filter((k) => !required.includes(k));
|
|
49
|
+
const example = variant.examples[0];
|
|
50
|
+
const description = variant.description;
|
|
51
|
+
const allRelationships = schema.metadata.relationships ?? [];
|
|
52
|
+
const typeRelationships = allRelationships.filter((rel) => rel.parent === typeName);
|
|
53
|
+
const allLevels = schema.metadata.hierarchy?.levels ?? [];
|
|
54
|
+
const typeIdx = allLevels.findIndex((l) => l.type === typeName);
|
|
55
|
+
const hierarchyChildren = typeIdx !== -1 && typeIdx < allLevels.length - 1
|
|
56
|
+
? allLevels.slice(typeIdx + 1).filter((l) => l.templateFormat && l.matchers)
|
|
57
|
+
: [];
|
|
58
|
+
map.set(typeName, {
|
|
59
|
+
required,
|
|
60
|
+
optional,
|
|
61
|
+
properties: allProperties,
|
|
62
|
+
example,
|
|
63
|
+
description,
|
|
64
|
+
relationships: typeRelationships,
|
|
65
|
+
hierarchyChildren,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
return map;
|
|
69
|
+
}
|
|
70
|
+
export function generateNewContent(nodeType, variant, schema, schemaRefRegistry, allVariants, body = '\nTODO\n', fieldMap = {}) {
|
|
71
|
+
const { example, optional, properties, description } = variant;
|
|
72
|
+
const exampleKeys = new Set(Object.keys(example));
|
|
73
|
+
// fieldMap is file→canonical; invert once to get canonical→file for template output
|
|
74
|
+
const canonicalToFile = invertFieldMap(fieldMap);
|
|
75
|
+
const toFileKey = (k) => canonicalToFile[k] ?? k;
|
|
76
|
+
const toCanonicalKey = (k) => fieldMap[k] ?? k;
|
|
77
|
+
const exampleWithPlaceholders = withEnumPlaceholders(example, properties, schema, schemaRefRegistry);
|
|
78
|
+
const remappedExample = Object.fromEntries(Object.entries(exampleWithPlaceholders).map(([k, v]) => [toFileKey(k), v]));
|
|
79
|
+
let frontmatterYaml = yaml.dump(remappedExample, { lineWidth: -1 }).trim();
|
|
80
|
+
// Append property descriptions as comments
|
|
81
|
+
// YAML lines use file field names; look up properties using canonical key.
|
|
82
|
+
const lines = frontmatterYaml.split('\n');
|
|
83
|
+
const commentedLines = lines.map((line) => {
|
|
84
|
+
const match = line.match(/^([^:]+):/);
|
|
85
|
+
if (match) {
|
|
86
|
+
const fileKey = match[1].trim();
|
|
87
|
+
const canonicalKey = toCanonicalKey(fileKey);
|
|
88
|
+
const propDef = resolveRef(properties[canonicalKey], schema, schemaRefRegistry);
|
|
89
|
+
const propDescription = propDef?.description;
|
|
90
|
+
if (propDescription) {
|
|
91
|
+
return `${line} # ${propDescription}`;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return line;
|
|
95
|
+
});
|
|
96
|
+
frontmatterYaml = `# Template for a \`${nodeType}\`${schema.title ? ` from schema: ${schema.title}` : ''}\n${description ? `# ${description}\n` : ''}${commentedLines.join('\n')}`;
|
|
97
|
+
const hints = optional
|
|
98
|
+
.filter((field) => !exampleKeys.has(field))
|
|
99
|
+
.map((field) => commentedHint(toFileKey(field), properties[field], schema, schemaRefRegistry));
|
|
100
|
+
const newFrontmatter = hints.length > 0 ? `${frontmatterYaml}\n${hints.join('\n')}` : frontmatterYaml;
|
|
101
|
+
let relationshipStubs = '';
|
|
102
|
+
for (const rel of variant.relationships) {
|
|
103
|
+
const matcher = rel.matchers?.[0] || rel.type;
|
|
104
|
+
if (body.includes(`### ${matcher}`)) {
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
107
|
+
const childVariant = allVariants.get(rel.type);
|
|
108
|
+
const childExample = childVariant?.example || {};
|
|
109
|
+
if (rel.templateFormat === 'table' && rel.embeddedTemplateFields) {
|
|
110
|
+
const header = `| ${rel.embeddedTemplateFields.join(' | ')} |`;
|
|
111
|
+
const sep = `| ${rel.embeddedTemplateFields.map(() => '---|').join('')}`;
|
|
112
|
+
const exampleValues = rel.embeddedTemplateFields.map((field) => {
|
|
113
|
+
const val = childExample[field];
|
|
114
|
+
return val !== undefined ? String(val) : ' ';
|
|
115
|
+
});
|
|
116
|
+
const exampleRow = `| ${exampleValues.join(' | ')} |`;
|
|
117
|
+
relationshipStubs += `\n### ${matcher}\n\n${header}\n${sep}\n${exampleRow}\n`;
|
|
118
|
+
}
|
|
119
|
+
else if (rel.templateFormat === 'heading') {
|
|
120
|
+
let stub = `\n### ${matcher}\n\n`;
|
|
121
|
+
if (childVariant) {
|
|
122
|
+
// Include inline fields from example if it's a heading
|
|
123
|
+
const fields = Object.entries(childExample)
|
|
124
|
+
.filter(([k]) => k !== 'type' && k !== 'title' && k !== 'parent')
|
|
125
|
+
.map(([k, v]) => `[${k}:: ${v}]`)
|
|
126
|
+
.join(' ');
|
|
127
|
+
stub += `${fields}${fields ? ' ' : ''}TODO: Describe ${rel.type}\n`;
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
stub += `TODO: Describe ${rel.type}\n`;
|
|
131
|
+
}
|
|
132
|
+
relationshipStubs += stub;
|
|
133
|
+
}
|
|
134
|
+
else if (rel.templateFormat === 'list') {
|
|
135
|
+
let stub = `\n### ${matcher}\n\n- [type:: ${rel.type}] `;
|
|
136
|
+
if (childVariant) {
|
|
137
|
+
const fields = Object.entries(childExample)
|
|
138
|
+
.filter(([k]) => k !== 'type' && k !== 'title' && k !== 'parent')
|
|
139
|
+
.map(([k, v]) => `[${k}:: ${v}]`)
|
|
140
|
+
.join(' ');
|
|
141
|
+
stub += `${fields}${fields ? ' ' : ''}TODO`;
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
stub += 'TODO';
|
|
145
|
+
}
|
|
146
|
+
relationshipStubs += `${stub}\n`;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
for (const level of variant.hierarchyChildren) {
|
|
150
|
+
const matcher = level.matchers?.[0] || level.type;
|
|
151
|
+
if (body.includes(`### ${matcher}`)) {
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
const childVariant = allVariants.get(level.type);
|
|
155
|
+
const childExample = childVariant?.example || {};
|
|
156
|
+
if (level.templateFormat === 'table' && level.embeddedTemplateFields) {
|
|
157
|
+
const header = `| ${level.embeddedTemplateFields.join(' | ')} |`;
|
|
158
|
+
const sep = `| ${level.embeddedTemplateFields.map(() => '---|').join('')}`;
|
|
159
|
+
const exampleValues = level.embeddedTemplateFields.map((field) => {
|
|
160
|
+
const val = childExample[field];
|
|
161
|
+
return val !== undefined ? String(val) : ' ';
|
|
162
|
+
});
|
|
163
|
+
const exampleRow = `| ${exampleValues.join(' | ')} |`;
|
|
164
|
+
relationshipStubs += `\n### ${matcher}\n\n${header}\n${sep}\n${exampleRow}\n`;
|
|
165
|
+
}
|
|
166
|
+
else if (level.templateFormat === 'list') {
|
|
167
|
+
let stub = `\n### ${matcher}\n\n- [type:: ${level.type}] `;
|
|
168
|
+
if (childVariant) {
|
|
169
|
+
const fields = Object.entries(childExample)
|
|
170
|
+
.filter(([k]) => k !== 'type' && k !== 'title' && k !== 'parent')
|
|
171
|
+
.map(([k, v]) => `[${k}:: ${v}]`)
|
|
172
|
+
.join(' ');
|
|
173
|
+
stub += `${fields}${fields ? ' ' : ''}TODO`;
|
|
174
|
+
}
|
|
175
|
+
else {
|
|
176
|
+
stub += 'TODO';
|
|
177
|
+
}
|
|
178
|
+
relationshipStubs += `${stub}\n`;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
const finalBody = relationshipStubs ? `\n${relationshipStubs}${body.trimStart()}` : body;
|
|
182
|
+
return `---\n${newFrontmatter}\n---${finalBody}`;
|
|
183
|
+
}
|
|
184
|
+
export async function templateSync(context, options) {
|
|
185
|
+
const { templateDir, templatePrefix = '', fieldMap = {} } = context.pluginConfig;
|
|
186
|
+
if (!templateDir) {
|
|
187
|
+
console.error('Error: templateDir not set in markdown config for this space');
|
|
188
|
+
process.exit(1);
|
|
189
|
+
}
|
|
190
|
+
const { schema, schemaRefRegistry } = context;
|
|
191
|
+
const typeVariants = getTypeVariants(schema, schemaRefRegistry);
|
|
192
|
+
const matchedTypes = new Set();
|
|
193
|
+
const files = await Array.fromAsync(new Glob('*.md').scan({ cwd: templateDir, absolute: true }));
|
|
194
|
+
const dryRun = options.dryRun ?? false;
|
|
195
|
+
let filesModified = 0;
|
|
196
|
+
let filesCreated = 0;
|
|
197
|
+
console.log(`\n🔄 Template Sync`);
|
|
198
|
+
console.log('━'.repeat(50));
|
|
199
|
+
if (dryRun)
|
|
200
|
+
console.log('(dry run — no files will be modified)\n');
|
|
201
|
+
for (const file of files.sort()) {
|
|
202
|
+
const filename = file.split('/').pop();
|
|
203
|
+
const content = readFileSync(file, 'utf-8');
|
|
204
|
+
const fmMatch = content.match(/^---\n[\s\S]*?\n---/);
|
|
205
|
+
if (!fmMatch) {
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
const body = content.slice(fmMatch[0].length);
|
|
209
|
+
const parsed = matter(content);
|
|
210
|
+
const nodeType = parsed.data.type;
|
|
211
|
+
if (!nodeType) {
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
const variant = typeVariants.get(nodeType);
|
|
215
|
+
if (!variant) {
|
|
216
|
+
continue;
|
|
217
|
+
}
|
|
218
|
+
matchedTypes.add(nodeType);
|
|
219
|
+
// Warning if filename doesn't match convention
|
|
220
|
+
const expectedFilename = `${templatePrefix}${nodeType}.md`;
|
|
221
|
+
if (filename !== expectedFilename) {
|
|
222
|
+
console.log(`⚠ ${filename}: type "${nodeType}" should be named "${expectedFilename}"`);
|
|
223
|
+
}
|
|
224
|
+
const newContent = generateNewContent(nodeType, variant, schema, schemaRefRegistry, typeVariants, body, fieldMap);
|
|
225
|
+
if (newContent === content) {
|
|
226
|
+
console.log(`✓ ${filename}`);
|
|
227
|
+
}
|
|
228
|
+
else {
|
|
229
|
+
console.log(`📝 ${filename}: updated`);
|
|
230
|
+
if (dryRun) {
|
|
231
|
+
// Simple line-based diff for preview
|
|
232
|
+
const oldLines = content.split('\n');
|
|
233
|
+
const newLines = newContent.split('\n');
|
|
234
|
+
const maxLines = Math.max(oldLines.length, newLines.length);
|
|
235
|
+
let inFrontmatter = true;
|
|
236
|
+
for (let i = 0; i < maxLines; i++) {
|
|
237
|
+
const isFmEnd = inFrontmatter && oldLines[i] === '---' && i > 0;
|
|
238
|
+
if (isFmEnd)
|
|
239
|
+
inFrontmatter = false;
|
|
240
|
+
if (oldLines[i] !== newLines[i]) {
|
|
241
|
+
if (i < oldLines.length)
|
|
242
|
+
console.log(`\x1b[31m- ${oldLines[i] || ''}\x1b[0m`);
|
|
243
|
+
if (i < newLines.length)
|
|
244
|
+
console.log(`\x1b[32m+ ${newLines[i] || ''}\x1b[0m`);
|
|
245
|
+
}
|
|
246
|
+
else if (i < 15) {
|
|
247
|
+
// Show some context but not too much
|
|
248
|
+
console.log(` ${oldLines[i]}`);
|
|
249
|
+
}
|
|
250
|
+
if (isFmEnd) {
|
|
251
|
+
// After frontmatter ends, only continue if next line differs
|
|
252
|
+
if (i + 1 >= maxLines || oldLines[i + 1] === newLines[i + 1]) {
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
console.log('');
|
|
258
|
+
}
|
|
259
|
+
if (!dryRun) {
|
|
260
|
+
writeFileSync(file, newContent);
|
|
261
|
+
filesModified++;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
// Handle missing types
|
|
266
|
+
const missingTypes = Array.from(typeVariants.keys()).filter((t) => !matchedTypes.has(t));
|
|
267
|
+
if (missingTypes.length > 0) {
|
|
268
|
+
console.log(`\nMissing templates for: ${missingTypes.join(', ')}`);
|
|
269
|
+
if (options.createMissing) {
|
|
270
|
+
for (const type of missingTypes) {
|
|
271
|
+
const variant = typeVariants.get(type);
|
|
272
|
+
const newContent = generateNewContent(type, variant, schema, schemaRefRegistry, typeVariants, undefined, fieldMap);
|
|
273
|
+
const newFilename = `${templatePrefix}${type}.md`;
|
|
274
|
+
const newFilePath = join(templateDir, newFilename);
|
|
275
|
+
console.log(`✨ ${newFilename}: creating`);
|
|
276
|
+
if (!dryRun) {
|
|
277
|
+
writeFileSync(newFilePath, newContent);
|
|
278
|
+
filesCreated++;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
console.log('(use --create-missing to scaffold them)');
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
console.log(`\n${'━'.repeat(50)}`);
|
|
287
|
+
if (dryRun) {
|
|
288
|
+
console.log('No files modified or created (dry run)\n');
|
|
289
|
+
}
|
|
290
|
+
else {
|
|
291
|
+
console.log(`${filesModified} file(s) updated, ${filesCreated} file(s) created\n`);
|
|
292
|
+
}
|
|
293
|
+
return true;
|
|
294
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { TypeInferenceConfig } from '.';
|
|
2
|
+
export declare function inferTypeFromPath(filePath: string, config: TypeInferenceConfig, knownTypes: Set<string>, typeAliases: Record<string, string> | undefined): string | undefined;
|
|
3
|
+
/**
|
|
4
|
+
* Coerce Date objects in frontmatter/YAML data to ISO date strings (YYYY-MM-DD).
|
|
5
|
+
* gray-matter and js-yaml parse unquoted ISO dates (e.g. `date: 2026-03-31`) as
|
|
6
|
+
* JavaScript Date objects, which are not valid JSON and fail string type validation.
|
|
7
|
+
*/
|
|
8
|
+
export declare function coerceDates(data: Record<string, unknown>): Record<string, unknown>;
|
|
9
|
+
/**
|
|
10
|
+
* Apply field remapping to a data object.
|
|
11
|
+
* Renames keys according to fieldMap (file field name → canonical field name).
|
|
12
|
+
* Fields not in the map are passed through unchanged.
|
|
13
|
+
*/
|
|
14
|
+
export declare function applyFieldMap(data: Record<string, unknown>, fieldMap: Record<string, string> | undefined): Record<string, unknown>;
|
|
15
|
+
/**
|
|
16
|
+
* Invert a fieldMap (file→canonical) to produce a reverse map (canonical→file).
|
|
17
|
+
* Used for write operations (e.g. template-sync) to translate back to file field names.
|
|
18
|
+
*/
|
|
19
|
+
export declare function invertFieldMap(fieldMap: Record<string, string>): Record<string, string>;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { posix } from 'node:path';
|
|
2
|
+
export function inferTypeFromPath(filePath, config, knownTypes, typeAliases) {
|
|
3
|
+
if (config.mode === 'off')
|
|
4
|
+
return undefined;
|
|
5
|
+
const normalized = filePath.replace(/\\/g, '/');
|
|
6
|
+
const dir = posix.dirname(normalized);
|
|
7
|
+
if (dir === '.')
|
|
8
|
+
return undefined;
|
|
9
|
+
if (config.folderMap) {
|
|
10
|
+
const normalizedMap = Object.fromEntries(Object.entries(config.folderMap).map(([k, v]) => [k.replace(/\\/g, '/').replace(/\/+$/, ''), v]));
|
|
11
|
+
let bestKey;
|
|
12
|
+
for (const key of Object.keys(normalizedMap)) {
|
|
13
|
+
if (dir === key || dir.startsWith(`${key}/`)) {
|
|
14
|
+
if (!bestKey || key.length > bestKey.length)
|
|
15
|
+
bestKey = key;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
if (!bestKey)
|
|
19
|
+
return undefined;
|
|
20
|
+
const value = normalizedMap[bestKey];
|
|
21
|
+
if (typeAliases?.[value] !== undefined)
|
|
22
|
+
return typeAliases[value];
|
|
23
|
+
if (knownTypes.has(value))
|
|
24
|
+
return value;
|
|
25
|
+
throw new Error(`typeInference.folderMap: "${value}" does not resolve to a known type or alias (from key "${bestKey}")`);
|
|
26
|
+
}
|
|
27
|
+
const leafDir = posix.basename(dir).toLowerCase();
|
|
28
|
+
for (const type of knownTypes) {
|
|
29
|
+
if (type.toLowerCase() === leafDir)
|
|
30
|
+
return type;
|
|
31
|
+
}
|
|
32
|
+
if (typeAliases) {
|
|
33
|
+
for (const [alias, canonical] of Object.entries(typeAliases)) {
|
|
34
|
+
if (alias.toLowerCase() === leafDir)
|
|
35
|
+
return canonical;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return undefined;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Coerce Date objects in frontmatter/YAML data to ISO date strings (YYYY-MM-DD).
|
|
42
|
+
* gray-matter and js-yaml parse unquoted ISO dates (e.g. `date: 2026-03-31`) as
|
|
43
|
+
* JavaScript Date objects, which are not valid JSON and fail string type validation.
|
|
44
|
+
*/
|
|
45
|
+
export function coerceDates(data) {
|
|
46
|
+
const result = {};
|
|
47
|
+
for (const [key, value] of Object.entries(data)) {
|
|
48
|
+
if (value instanceof Date) {
|
|
49
|
+
result[key] = value.toISOString().slice(0, 10);
|
|
50
|
+
}
|
|
51
|
+
else if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
|
|
52
|
+
result[key] = coerceDates(value);
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
result[key] = value;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return result;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Apply field remapping to a data object.
|
|
62
|
+
* Renames keys according to fieldMap (file field name → canonical field name).
|
|
63
|
+
* Fields not in the map are passed through unchanged.
|
|
64
|
+
*/
|
|
65
|
+
export function applyFieldMap(data, fieldMap) {
|
|
66
|
+
if (!fieldMap || Object.keys(fieldMap).length === 0)
|
|
67
|
+
return data;
|
|
68
|
+
const result = {};
|
|
69
|
+
for (const [key, value] of Object.entries(data)) {
|
|
70
|
+
result[fieldMap[key] ?? key] = value;
|
|
71
|
+
}
|
|
72
|
+
return result;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Invert a fieldMap (file→canonical) to produce a reverse map (canonical→file).
|
|
76
|
+
* Used for write operations (e.g. template-sync) to translate back to file field names.
|
|
77
|
+
*/
|
|
78
|
+
export function invertFieldMap(fieldMap) {
|
|
79
|
+
return Object.fromEntries(Object.entries(fieldMap).map(([src, canonical]) => [canonical, src]));
|
|
80
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import type { AnySchemaObject } from 'ajv';
|
|
2
|
+
import type { SpaceGraph } from '../space-graph';
|
|
3
|
+
import type { BaseNode, SpaceContext } from '../types';
|
|
4
|
+
export declare const PLUGIN_PREFIX = "sctx-";
|
|
5
|
+
export declare const CONFIG_PLUGINS_DIR = "plugins";
|
|
6
|
+
/** Normalize a plugin name to its canonical prefixed form. */
|
|
7
|
+
export declare function normalizePluginName(name: string): string;
|
|
8
|
+
export type PluginContext = SpaceContext & {
|
|
9
|
+
/** Validated config for this plugin invocation. */
|
|
10
|
+
pluginConfig: Record<string, unknown>;
|
|
11
|
+
};
|
|
12
|
+
export type ParseResult = {
|
|
13
|
+
nodes: BaseNode[];
|
|
14
|
+
/** Paths/items the plugin skipped during parsing, for any reason. */
|
|
15
|
+
parseIgnored: string[];
|
|
16
|
+
/** Plugin diagnostics: keyed scalar or list values. */
|
|
17
|
+
diagnostics: Record<string, number | string | string[]>;
|
|
18
|
+
};
|
|
19
|
+
export type ParseHook = (context: PluginContext) => Promise<ParseResult | null>;
|
|
20
|
+
export type TemplateSyncOptions = {
|
|
21
|
+
dryRun?: boolean;
|
|
22
|
+
createMissing?: boolean;
|
|
23
|
+
};
|
|
24
|
+
export type TemplateSyncHook = (context: PluginContext, options: TemplateSyncOptions) => Promise<true | null>;
|
|
25
|
+
/** A single render output format provided by a plugin. */
|
|
26
|
+
export type RenderFormat = {
|
|
27
|
+
/** Short name, unique within the plugin (e.g. 'bullets', 'mermaid'). */
|
|
28
|
+
name: string;
|
|
29
|
+
/** Human-readable description shown in `render list`. */
|
|
30
|
+
description: string;
|
|
31
|
+
};
|
|
32
|
+
/** Options passed to a render function. */
|
|
33
|
+
export type RenderOptions = {
|
|
34
|
+
/** The format name being rendered (e.g. 'bullets', 'mermaid'). */
|
|
35
|
+
format: string;
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* The render hook on a plugin: declares available formats and handles rendering.
|
|
39
|
+
* Unlike parse/templateSync (first-responder), render hooks are additive —
|
|
40
|
+
* all formats from all plugins are available simultaneously.
|
|
41
|
+
*/
|
|
42
|
+
export type RenderHook = {
|
|
43
|
+
formats: RenderFormat[];
|
|
44
|
+
render: (context: PluginContext, graph: SpaceGraph, options: RenderOptions) => Promise<string> | string;
|
|
45
|
+
};
|
|
46
|
+
/**
|
|
47
|
+
* Plugin contract:
|
|
48
|
+
* - A hook not implemented on the plugin → that plugin is skipped for that operation.
|
|
49
|
+
* - parse/templateSync return `T | null` → null means "didn't handle, try next plugin".
|
|
50
|
+
* - render is additive: all plugins' formats are registered and dispatched by name.
|
|
51
|
+
*/
|
|
52
|
+
export type StructuredContextPlugin = {
|
|
53
|
+
name: string;
|
|
54
|
+
/** JSON Schema used to validate the plugin's config block. Fields with `format: 'path'`
|
|
55
|
+
* are resolved relative to the config directory by `resolveConfigPaths` in the loader. */
|
|
56
|
+
configSchema: AnySchemaObject;
|
|
57
|
+
parse?: ParseHook;
|
|
58
|
+
templateSync?: TemplateSyncHook;
|
|
59
|
+
render?: RenderHook;
|
|
60
|
+
};
|