@iyulab/m3l 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +202 -0
- package/dist/cli.d.ts +14 -0
- package/dist/cli.js +150 -0
- package/dist/index.d.ts +23 -0
- package/dist/index.js +46 -0
- package/dist/lexer.d.ts +5 -0
- package/dist/lexer.js +421 -0
- package/dist/parser.d.ts +9 -0
- package/dist/parser.js +704 -0
- package/dist/reader.d.ts +21 -0
- package/dist/reader.js +84 -0
- package/dist/resolver.d.ts +6 -0
- package/dist/resolver.js +148 -0
- package/dist/types.d.ts +135 -0
- package/dist/types.js +1 -0
- package/dist/validator.d.ts +5 -0
- package/dist/validator.js +196 -0
- package/package.json +34 -0
package/dist/reader.d.ts
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export interface M3LFile {
|
|
2
|
+
path: string;
|
|
3
|
+
content: string;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Read M3L files from a path (file or directory).
|
|
7
|
+
* If path is a directory, scans for **\/*.m3l.md files.
|
|
8
|
+
* If an m3l.config.yaml exists in the directory, uses its sources patterns.
|
|
9
|
+
*/
|
|
10
|
+
export declare function readM3LFiles(inputPath: string): Promise<M3LFile[]>;
|
|
11
|
+
/**
|
|
12
|
+
* Wrap a string content as an M3LFile.
|
|
13
|
+
*/
|
|
14
|
+
export declare function readM3LString(content: string, filename?: string): M3LFile;
|
|
15
|
+
/**
|
|
16
|
+
* Read project config from m3l.config.yaml if it exists.
|
|
17
|
+
*/
|
|
18
|
+
export declare function readProjectConfig(dirPath: string): Promise<{
|
|
19
|
+
name?: string;
|
|
20
|
+
version?: string;
|
|
21
|
+
} | null>;
|
package/dist/reader.js
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { readFileSync, statSync, existsSync } from 'fs';
|
|
2
|
+
import { join, resolve as resolvePath } from 'path';
|
|
3
|
+
import fg from 'fast-glob';
|
|
4
|
+
/**
|
|
5
|
+
* Read M3L files from a path (file or directory).
|
|
6
|
+
* If path is a directory, scans for **\/*.m3l.md files.
|
|
7
|
+
* If an m3l.config.yaml exists in the directory, uses its sources patterns.
|
|
8
|
+
*/
|
|
9
|
+
export async function readM3LFiles(inputPath) {
|
|
10
|
+
const resolved = resolvePath(inputPath);
|
|
11
|
+
const stat = statSync(resolved);
|
|
12
|
+
if (stat.isFile()) {
|
|
13
|
+
return [readSingleFile(resolved)];
|
|
14
|
+
}
|
|
15
|
+
if (stat.isDirectory()) {
|
|
16
|
+
// Check for m3l.config.yaml
|
|
17
|
+
const configPath = join(resolved, 'm3l.config.yaml');
|
|
18
|
+
if (existsSync(configPath)) {
|
|
19
|
+
return readFromConfig(configPath, resolved);
|
|
20
|
+
}
|
|
21
|
+
// Default: scan for all .m3l.md files
|
|
22
|
+
return scanDirectory(resolved);
|
|
23
|
+
}
|
|
24
|
+
throw new Error(`Path is neither a file nor a directory: ${resolved}`);
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Wrap a string content as an M3LFile.
|
|
28
|
+
*/
|
|
29
|
+
export function readM3LString(content, filename = 'inline.m3l.md') {
|
|
30
|
+
return { path: filename, content };
|
|
31
|
+
}
|
|
32
|
+
function readSingleFile(filePath) {
|
|
33
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
34
|
+
return { path: filePath, content };
|
|
35
|
+
}
|
|
36
|
+
async function scanDirectory(dirPath) {
|
|
37
|
+
const pattern = '**/*.{m3l.md,m3l}';
|
|
38
|
+
const files = await fg(pattern, {
|
|
39
|
+
cwd: dirPath,
|
|
40
|
+
absolute: true,
|
|
41
|
+
onlyFiles: true,
|
|
42
|
+
});
|
|
43
|
+
return files.sort().map(f => readSingleFile(f));
|
|
44
|
+
}
|
|
45
|
+
async function readFromConfig(configPath, baseDir) {
|
|
46
|
+
const yamlContent = readFileSync(configPath, 'utf-8');
|
|
47
|
+
// Dynamic import yaml to parse config
|
|
48
|
+
const { parse: parseYaml } = await import('yaml');
|
|
49
|
+
const config = parseYaml(yamlContent);
|
|
50
|
+
if (!config.sources || config.sources.length === 0) {
|
|
51
|
+
return scanDirectory(baseDir);
|
|
52
|
+
}
|
|
53
|
+
const allFiles = [];
|
|
54
|
+
const seen = new Set();
|
|
55
|
+
for (const pattern of config.sources) {
|
|
56
|
+
const files = await fg(pattern, {
|
|
57
|
+
cwd: baseDir,
|
|
58
|
+
absolute: true,
|
|
59
|
+
onlyFiles: true,
|
|
60
|
+
});
|
|
61
|
+
for (const f of files.sort()) {
|
|
62
|
+
if (!seen.has(f)) {
|
|
63
|
+
seen.add(f);
|
|
64
|
+
allFiles.push(readSingleFile(f));
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return allFiles;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Read project config from m3l.config.yaml if it exists.
|
|
72
|
+
*/
|
|
73
|
+
export async function readProjectConfig(dirPath) {
|
|
74
|
+
const configPath = join(resolvePath(dirPath), 'm3l.config.yaml');
|
|
75
|
+
if (!existsSync(configPath))
|
|
76
|
+
return null;
|
|
77
|
+
const yamlContent = readFileSync(configPath, 'utf-8');
|
|
78
|
+
const { parse: parseYaml } = await import('yaml');
|
|
79
|
+
const config = parseYaml(yamlContent);
|
|
80
|
+
return {
|
|
81
|
+
name: config?.name,
|
|
82
|
+
version: config?.version,
|
|
83
|
+
};
|
|
84
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { ParsedFile, M3LAST, ProjectInfo } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Resolve and merge multiple parsed file ASTs into a single M3LAST.
|
|
4
|
+
* Handles: inheritance resolution, duplicate detection, reference validation.
|
|
5
|
+
*/
|
|
6
|
+
export declare function resolve(files: ParsedFile[], project?: ProjectInfo): M3LAST;
|
package/dist/resolver.js
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Resolve and merge multiple parsed file ASTs into a single M3LAST.
|
|
3
|
+
* Handles: inheritance resolution, duplicate detection, reference validation.
|
|
4
|
+
*/
|
|
5
|
+
export function resolve(files, project) {
|
|
6
|
+
const errors = [];
|
|
7
|
+
const warnings = [];
|
|
8
|
+
// Collect all elements from all files
|
|
9
|
+
const allModels = [];
|
|
10
|
+
const allEnums = [];
|
|
11
|
+
const allInterfaces = [];
|
|
12
|
+
const allViews = [];
|
|
13
|
+
const sources = [];
|
|
14
|
+
for (const file of files) {
|
|
15
|
+
sources.push(file.source);
|
|
16
|
+
allModels.push(...file.models);
|
|
17
|
+
allEnums.push(...file.enums);
|
|
18
|
+
allInterfaces.push(...file.interfaces);
|
|
19
|
+
allViews.push(...file.views);
|
|
20
|
+
}
|
|
21
|
+
// Build name maps
|
|
22
|
+
const modelMap = new Map();
|
|
23
|
+
const enumMap = new Map();
|
|
24
|
+
const interfaceMap = new Map();
|
|
25
|
+
const allNamedMap = new Map();
|
|
26
|
+
// Check for duplicate model names
|
|
27
|
+
for (const model of allModels) {
|
|
28
|
+
checkDuplicate(model.name, 'model', model, modelMap, allNamedMap, errors);
|
|
29
|
+
modelMap.set(model.name, model);
|
|
30
|
+
allNamedMap.set(model.name, { type: 'model', loc: { file: model.source, line: model.line } });
|
|
31
|
+
}
|
|
32
|
+
for (const en of allEnums) {
|
|
33
|
+
checkDuplicate(en.name, 'enum', en, enumMap, allNamedMap, errors);
|
|
34
|
+
enumMap.set(en.name, en);
|
|
35
|
+
allNamedMap.set(en.name, { type: 'enum', loc: { file: en.source, line: en.line } });
|
|
36
|
+
}
|
|
37
|
+
for (const iface of allInterfaces) {
|
|
38
|
+
checkDuplicate(iface.name, 'interface', iface, interfaceMap, allNamedMap, errors);
|
|
39
|
+
interfaceMap.set(iface.name, iface);
|
|
40
|
+
allNamedMap.set(iface.name, { type: 'interface', loc: { file: iface.source, line: iface.line } });
|
|
41
|
+
}
|
|
42
|
+
for (const view of allViews) {
|
|
43
|
+
allNamedMap.set(view.name, { type: 'view', loc: { file: view.source, line: view.line } });
|
|
44
|
+
}
|
|
45
|
+
// Resolve inheritance
|
|
46
|
+
for (const model of allModels) {
|
|
47
|
+
resolveInheritance(model, modelMap, interfaceMap, allNamedMap, errors);
|
|
48
|
+
}
|
|
49
|
+
// Check duplicate field names within each model
|
|
50
|
+
for (const model of [...allModels, ...allViews]) {
|
|
51
|
+
checkDuplicateFields(model, errors);
|
|
52
|
+
}
|
|
53
|
+
// Detect namespace from first file if available
|
|
54
|
+
const projectInfo = project || {};
|
|
55
|
+
if (!projectInfo.name) {
|
|
56
|
+
const ns = files.find(f => f.namespace)?.namespace;
|
|
57
|
+
if (ns)
|
|
58
|
+
projectInfo.name = ns;
|
|
59
|
+
}
|
|
60
|
+
return {
|
|
61
|
+
project: projectInfo,
|
|
62
|
+
sources,
|
|
63
|
+
models: allModels,
|
|
64
|
+
enums: allEnums,
|
|
65
|
+
interfaces: allInterfaces,
|
|
66
|
+
views: allViews,
|
|
67
|
+
errors,
|
|
68
|
+
warnings,
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
function checkDuplicate(name, kind, item, map, allMap, errors) {
|
|
72
|
+
const existing = allMap.get(name);
|
|
73
|
+
if (existing) {
|
|
74
|
+
errors.push({
|
|
75
|
+
code: 'E005',
|
|
76
|
+
severity: 'error',
|
|
77
|
+
file: item.source,
|
|
78
|
+
line: item.line,
|
|
79
|
+
col: 1,
|
|
80
|
+
message: `Duplicate ${kind} name "${name}" (first defined in ${existing.loc.file}:${existing.loc.line})`,
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
function resolveInheritance(model, modelMap, interfaceMap, allNamedMap, errors) {
|
|
85
|
+
if (model.inherits.length === 0)
|
|
86
|
+
return;
|
|
87
|
+
const inheritedFields = [];
|
|
88
|
+
const resolved = new Set();
|
|
89
|
+
const visiting = new Set();
|
|
90
|
+
function collectFields(name, fromModel) {
|
|
91
|
+
if (resolved.has(name) || visiting.has(name))
|
|
92
|
+
return;
|
|
93
|
+
visiting.add(name);
|
|
94
|
+
const parent = modelMap.get(name) || interfaceMap.get(name);
|
|
95
|
+
if (!parent) {
|
|
96
|
+
if (!allNamedMap.has(name)) {
|
|
97
|
+
errors.push({
|
|
98
|
+
code: 'E007',
|
|
99
|
+
severity: 'error',
|
|
100
|
+
file: fromModel.source,
|
|
101
|
+
line: fromModel.line,
|
|
102
|
+
col: 1,
|
|
103
|
+
message: `Unresolved inheritance reference "${name}" in model "${fromModel.name}"`,
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
visiting.delete(name);
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
// Recursively resolve parent's parents first
|
|
110
|
+
for (const grandparent of parent.inherits) {
|
|
111
|
+
collectFields(grandparent, fromModel);
|
|
112
|
+
}
|
|
113
|
+
// Add parent's own fields
|
|
114
|
+
for (const field of parent.fields) {
|
|
115
|
+
if (!inheritedFields.some(f => f.name === field.name)) {
|
|
116
|
+
inheritedFields.push({ ...field });
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
visiting.delete(name);
|
|
120
|
+
resolved.add(name);
|
|
121
|
+
}
|
|
122
|
+
for (const parentName of model.inherits) {
|
|
123
|
+
collectFields(parentName, model);
|
|
124
|
+
}
|
|
125
|
+
// Prepend inherited fields before model's own fields
|
|
126
|
+
if (inheritedFields.length > 0) {
|
|
127
|
+
model.fields = [...inheritedFields, ...model.fields];
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
function checkDuplicateFields(model, errors) {
|
|
131
|
+
const seen = new Map();
|
|
132
|
+
for (const field of model.fields) {
|
|
133
|
+
const existing = seen.get(field.name);
|
|
134
|
+
if (existing) {
|
|
135
|
+
errors.push({
|
|
136
|
+
code: 'E005',
|
|
137
|
+
severity: 'error',
|
|
138
|
+
file: field.loc.file,
|
|
139
|
+
line: field.loc.line,
|
|
140
|
+
col: 1,
|
|
141
|
+
message: `Duplicate field name "${field.name}" in ${model.type} "${model.name}" (first at line ${existing.loc.line})`,
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
seen.set(field.name, field);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
/** Source location for error reporting */
|
|
2
|
+
export interface SourceLocation {
|
|
3
|
+
file: string;
|
|
4
|
+
line: number;
|
|
5
|
+
col: number;
|
|
6
|
+
}
|
|
7
|
+
export type TokenType = 'namespace' | 'model' | 'enum' | 'interface' | 'view' | 'section' | 'field' | 'nested_item' | 'blockquote' | 'horizontal_rule' | 'blank' | 'text';
|
|
8
|
+
export interface Token {
|
|
9
|
+
type: TokenType;
|
|
10
|
+
raw: string;
|
|
11
|
+
line: number;
|
|
12
|
+
indent: number;
|
|
13
|
+
data?: Record<string, unknown>;
|
|
14
|
+
}
|
|
15
|
+
export type FieldKind = 'stored' | 'computed' | 'lookup' | 'rollup';
|
|
16
|
+
export interface FieldAttribute {
|
|
17
|
+
name: string;
|
|
18
|
+
args?: unknown[];
|
|
19
|
+
}
|
|
20
|
+
export interface EnumValue {
|
|
21
|
+
name: string;
|
|
22
|
+
description?: string;
|
|
23
|
+
type?: string;
|
|
24
|
+
value?: unknown;
|
|
25
|
+
}
|
|
26
|
+
export interface FieldNode {
|
|
27
|
+
name: string;
|
|
28
|
+
label?: string;
|
|
29
|
+
type?: string;
|
|
30
|
+
params?: (string | number)[];
|
|
31
|
+
nullable: boolean;
|
|
32
|
+
array: boolean;
|
|
33
|
+
kind: FieldKind;
|
|
34
|
+
default_value?: string;
|
|
35
|
+
description?: string;
|
|
36
|
+
attributes: FieldAttribute[];
|
|
37
|
+
framework_attrs?: string[];
|
|
38
|
+
lookup?: {
|
|
39
|
+
path: string;
|
|
40
|
+
};
|
|
41
|
+
rollup?: {
|
|
42
|
+
target: string;
|
|
43
|
+
fk: string;
|
|
44
|
+
aggregate: string;
|
|
45
|
+
field?: string;
|
|
46
|
+
where?: string;
|
|
47
|
+
};
|
|
48
|
+
computed?: {
|
|
49
|
+
expression: string;
|
|
50
|
+
};
|
|
51
|
+
enum_values?: EnumValue[];
|
|
52
|
+
fields?: FieldNode[];
|
|
53
|
+
loc: SourceLocation;
|
|
54
|
+
}
|
|
55
|
+
export interface ModelNode {
|
|
56
|
+
name: string;
|
|
57
|
+
label?: string;
|
|
58
|
+
type: 'model' | 'enum' | 'interface' | 'view';
|
|
59
|
+
source: string;
|
|
60
|
+
line: number;
|
|
61
|
+
inherits: string[];
|
|
62
|
+
description?: string;
|
|
63
|
+
fields: FieldNode[];
|
|
64
|
+
sections: {
|
|
65
|
+
indexes: unknown[];
|
|
66
|
+
relations: unknown[];
|
|
67
|
+
behaviors: unknown[];
|
|
68
|
+
metadata: Record<string, unknown>;
|
|
69
|
+
[key: string]: unknown;
|
|
70
|
+
};
|
|
71
|
+
materialized?: boolean;
|
|
72
|
+
source_def?: ViewSourceDef;
|
|
73
|
+
refresh?: {
|
|
74
|
+
strategy: string;
|
|
75
|
+
interval?: string;
|
|
76
|
+
};
|
|
77
|
+
loc: SourceLocation;
|
|
78
|
+
}
|
|
79
|
+
export interface ViewSourceDef {
|
|
80
|
+
from: string;
|
|
81
|
+
joins?: {
|
|
82
|
+
model: string;
|
|
83
|
+
on: string;
|
|
84
|
+
}[];
|
|
85
|
+
where?: string;
|
|
86
|
+
order_by?: string;
|
|
87
|
+
group_by?: string[];
|
|
88
|
+
}
|
|
89
|
+
export interface EnumNode {
|
|
90
|
+
name: string;
|
|
91
|
+
label?: string;
|
|
92
|
+
type: 'enum';
|
|
93
|
+
source: string;
|
|
94
|
+
line: number;
|
|
95
|
+
description?: string;
|
|
96
|
+
values: EnumValue[];
|
|
97
|
+
loc: SourceLocation;
|
|
98
|
+
}
|
|
99
|
+
export interface ProjectInfo {
|
|
100
|
+
name?: string;
|
|
101
|
+
version?: string;
|
|
102
|
+
}
|
|
103
|
+
export interface Diagnostic {
|
|
104
|
+
code: string;
|
|
105
|
+
severity: 'error' | 'warning';
|
|
106
|
+
file: string;
|
|
107
|
+
line: number;
|
|
108
|
+
col: number;
|
|
109
|
+
message: string;
|
|
110
|
+
}
|
|
111
|
+
export interface ParsedFile {
|
|
112
|
+
source: string;
|
|
113
|
+
namespace?: string;
|
|
114
|
+
models: ModelNode[];
|
|
115
|
+
enums: EnumNode[];
|
|
116
|
+
interfaces: ModelNode[];
|
|
117
|
+
views: ModelNode[];
|
|
118
|
+
}
|
|
119
|
+
export interface M3LAST {
|
|
120
|
+
project: ProjectInfo;
|
|
121
|
+
sources: string[];
|
|
122
|
+
models: ModelNode[];
|
|
123
|
+
enums: EnumNode[];
|
|
124
|
+
interfaces: ModelNode[];
|
|
125
|
+
views: ModelNode[];
|
|
126
|
+
errors: Diagnostic[];
|
|
127
|
+
warnings: Diagnostic[];
|
|
128
|
+
}
|
|
129
|
+
export interface ValidateOptions {
|
|
130
|
+
strict?: boolean;
|
|
131
|
+
}
|
|
132
|
+
export interface ValidateResult {
|
|
133
|
+
errors: Diagnostic[];
|
|
134
|
+
warnings: Diagnostic[];
|
|
135
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Validate a resolved M3L AST for semantic errors and style warnings.
|
|
3
|
+
*/
|
|
4
|
+
export function validate(ast, options = {}) {
|
|
5
|
+
const errors = [...ast.errors];
|
|
6
|
+
const warnings = [...ast.warnings];
|
|
7
|
+
const allModels = [...ast.models, ...ast.views];
|
|
8
|
+
const modelMap = new Map();
|
|
9
|
+
for (const m of allModels) {
|
|
10
|
+
modelMap.set(m.name, m);
|
|
11
|
+
}
|
|
12
|
+
// E001: @rollup FK missing @reference
|
|
13
|
+
for (const model of allModels) {
|
|
14
|
+
for (const field of model.fields) {
|
|
15
|
+
if (field.kind === 'rollup' && field.rollup) {
|
|
16
|
+
validateRollupReference(field, model, modelMap, errors);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
// E002: @lookup path FK missing @reference
|
|
21
|
+
for (const model of allModels) {
|
|
22
|
+
for (const field of model.fields) {
|
|
23
|
+
if (field.kind === 'lookup' && field.lookup) {
|
|
24
|
+
validateLookupReference(field, model, modelMap, errors);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
// E004: View @from references model not found
|
|
29
|
+
for (const view of ast.views) {
|
|
30
|
+
if (view.source_def?.from) {
|
|
31
|
+
if (!modelMap.has(view.source_def.from)) {
|
|
32
|
+
errors.push({
|
|
33
|
+
code: 'E004',
|
|
34
|
+
severity: 'error',
|
|
35
|
+
file: view.source,
|
|
36
|
+
line: view.line,
|
|
37
|
+
col: 1,
|
|
38
|
+
message: `View "${view.name}" references model "${view.source_def.from}" which is not defined`,
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
// E005: Duplicate field names (already checked in resolver, but re-check for safety)
|
|
44
|
+
for (const model of allModels) {
|
|
45
|
+
const seen = new Set();
|
|
46
|
+
for (const field of model.fields) {
|
|
47
|
+
if (seen.has(field.name)) {
|
|
48
|
+
errors.push({
|
|
49
|
+
code: 'E005',
|
|
50
|
+
severity: 'error',
|
|
51
|
+
file: field.loc.file,
|
|
52
|
+
line: field.loc.line,
|
|
53
|
+
col: 1,
|
|
54
|
+
message: `Duplicate field name "${field.name}" in ${model.type} "${model.name}"`,
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
seen.add(field.name);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// Strict mode warnings
|
|
61
|
+
if (options.strict) {
|
|
62
|
+
for (const model of allModels) {
|
|
63
|
+
for (const field of model.fields) {
|
|
64
|
+
// W001: Field line length > 80 chars
|
|
65
|
+
// We check the source loc raw length — approximate using field attributes count
|
|
66
|
+
checkFieldLineLength(field, model, warnings);
|
|
67
|
+
// W003: Framework attrs without backtick (already processed in lexer, skip)
|
|
68
|
+
// W004: Lookup chain > 3 hops
|
|
69
|
+
if (field.kind === 'lookup' && field.lookup) {
|
|
70
|
+
const hops = field.lookup.path.split('.').length;
|
|
71
|
+
if (hops > 3) {
|
|
72
|
+
warnings.push({
|
|
73
|
+
code: 'W004',
|
|
74
|
+
severity: 'warning',
|
|
75
|
+
file: field.loc.file,
|
|
76
|
+
line: field.loc.line,
|
|
77
|
+
col: 1,
|
|
78
|
+
message: `Lookup chain "${field.lookup.path}" exceeds 3 hops (${hops} hops)`,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
// W002: Object nesting > 3 levels
|
|
84
|
+
checkNestingDepth(model.fields, 1, model, warnings);
|
|
85
|
+
}
|
|
86
|
+
// W006: Inline enum missing values: key
|
|
87
|
+
for (const model of allModels) {
|
|
88
|
+
for (const field of model.fields) {
|
|
89
|
+
if (field.type === 'enum' && field.enum_values && field.enum_values.length > 0) {
|
|
90
|
+
// The lexer/parser would have marked whether values: key was used
|
|
91
|
+
// For now, we check based on presence — if enum_values exist without
|
|
92
|
+
// the values: wrapper, the parser still collects them.
|
|
93
|
+
// This warning is informational for style.
|
|
94
|
+
// We'll rely on a flag set during parsing in the future.
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return { errors, warnings };
|
|
100
|
+
}
|
|
101
|
+
function validateRollupReference(field, model, modelMap, errors) {
|
|
102
|
+
const rollup = field.rollup;
|
|
103
|
+
const targetModel = modelMap.get(rollup.target);
|
|
104
|
+
if (!targetModel) {
|
|
105
|
+
// Target model doesn't exist — this is E007 (already caught in resolver)
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
// Check that the FK field in the target model has @reference or @fk
|
|
109
|
+
const fkField = targetModel.fields.find(f => f.name === rollup.fk);
|
|
110
|
+
if (!fkField) {
|
|
111
|
+
// FK field doesn't exist in target
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
const hasReference = fkField.attributes.some(a => a.name === 'reference' || a.name === 'fk');
|
|
115
|
+
if (!hasReference) {
|
|
116
|
+
errors.push({
|
|
117
|
+
code: 'E001',
|
|
118
|
+
severity: 'error',
|
|
119
|
+
file: field.loc.file,
|
|
120
|
+
line: field.loc.line,
|
|
121
|
+
col: 1,
|
|
122
|
+
message: `@rollup on "${field.name}" targets "${rollup.target}.${rollup.fk}" which has no @reference or @fk attribute`,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
function validateLookupReference(field, model, modelMap, errors) {
|
|
127
|
+
const lookupPath = field.lookup.path;
|
|
128
|
+
const segments = lookupPath.split('.');
|
|
129
|
+
if (segments.length < 2)
|
|
130
|
+
return;
|
|
131
|
+
const fkFieldName = segments[0];
|
|
132
|
+
// Find the FK field in the current model
|
|
133
|
+
const fkField = model.fields.find(f => f.name === fkFieldName);
|
|
134
|
+
if (!fkField)
|
|
135
|
+
return; // Missing field — different issue
|
|
136
|
+
const hasReference = fkField.attributes.some(a => a.name === 'reference' || a.name === 'fk');
|
|
137
|
+
if (!hasReference) {
|
|
138
|
+
errors.push({
|
|
139
|
+
code: 'E002',
|
|
140
|
+
severity: 'error',
|
|
141
|
+
file: field.loc.file,
|
|
142
|
+
line: field.loc.line,
|
|
143
|
+
col: 1,
|
|
144
|
+
message: `@lookup on "${field.name}" references FK "${fkFieldName}" which has no @reference or @fk attribute`,
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
function checkFieldLineLength(field, model, warnings) {
|
|
149
|
+
// Reconstruct approximate line length
|
|
150
|
+
let len = 2 + field.name.length; // "- name"
|
|
151
|
+
if (field.label)
|
|
152
|
+
len += field.label.length + 2; // "(label)"
|
|
153
|
+
if (field.type) {
|
|
154
|
+
len += 2 + field.type.length; // ": type"
|
|
155
|
+
if (field.params)
|
|
156
|
+
len += field.params.join(',').length + 2;
|
|
157
|
+
}
|
|
158
|
+
if (field.nullable)
|
|
159
|
+
len += 1;
|
|
160
|
+
if (field.default_value)
|
|
161
|
+
len += 3 + field.default_value.length;
|
|
162
|
+
for (const attr of field.attributes) {
|
|
163
|
+
len += 2 + attr.name.length; // " @name"
|
|
164
|
+
if (attr.args)
|
|
165
|
+
len += attr.args.join(',').length + 2;
|
|
166
|
+
}
|
|
167
|
+
if (field.description)
|
|
168
|
+
len += 3 + field.description.length;
|
|
169
|
+
if (len > 80) {
|
|
170
|
+
warnings.push({
|
|
171
|
+
code: 'W001',
|
|
172
|
+
severity: 'warning',
|
|
173
|
+
file: field.loc.file,
|
|
174
|
+
line: field.loc.line,
|
|
175
|
+
col: 1,
|
|
176
|
+
message: `Field "${field.name}" line length (~${len} chars) exceeds 80 character guideline`,
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
function checkNestingDepth(fields, depth, model, warnings) {
|
|
181
|
+
for (const field of fields) {
|
|
182
|
+
if (field.fields && field.fields.length > 0) {
|
|
183
|
+
if (depth >= 3) {
|
|
184
|
+
warnings.push({
|
|
185
|
+
code: 'W002',
|
|
186
|
+
severity: 'warning',
|
|
187
|
+
file: field.loc.file,
|
|
188
|
+
line: field.loc.line,
|
|
189
|
+
col: 1,
|
|
190
|
+
message: `Object nesting depth exceeds 3 levels at field "${field.name}" in "${model.name}"`,
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
checkNestingDepth(field.fields, depth + 1, model, warnings);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@iyulab/m3l",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "M3L parser and CLI tool — parse .m3l.md files into JSON AST",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"bin": {
|
|
9
|
+
"m3l": "dist/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"scripts": {
|
|
12
|
+
"build": "tsc",
|
|
13
|
+
"test": "vitest run",
|
|
14
|
+
"test:watch": "vitest",
|
|
15
|
+
"lint": "tsc --noEmit"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"commander": "^13.0.0",
|
|
19
|
+
"yaml": "^2.7.0",
|
|
20
|
+
"fast-glob": "^3.3.0"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"typescript": "^5.7.0",
|
|
24
|
+
"vitest": "^3.0.0",
|
|
25
|
+
"@types/node": "^22.0.0"
|
|
26
|
+
},
|
|
27
|
+
"engines": {
|
|
28
|
+
"node": ">=20"
|
|
29
|
+
},
|
|
30
|
+
"files": [
|
|
31
|
+
"dist"
|
|
32
|
+
],
|
|
33
|
+
"license": "MIT"
|
|
34
|
+
}
|