rip-lang 3.14.5 → 3.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -5
- package/bin/rip +5 -0
- package/docs/AGENTS.md +1 -1
- package/docs/RIP-LANG.md +17 -5
- package/docs/RIP-SCHEMA.md +4 -4
- package/docs/demo/README.md +43 -0
- package/docs/demo/components/_layout.rip +28 -0
- package/docs/demo/components/about.rip +36 -0
- package/docs/demo/components/card.rip +10 -0
- package/docs/demo/components/counter.rip +33 -0
- package/docs/demo/components/index.rip +30 -0
- package/docs/demo/components/todos.rip +48 -0
- package/docs/demo/css/styles.css +472 -0
- package/docs/dist/rip.js +3211 -4619
- package/docs/dist/rip.min.js +270 -683
- package/docs/dist/rip.min.js.br +0 -0
- package/docs/example/index.json +6 -6
- package/docs/extensions/duckdb/index.html +7 -5
- package/docs/extensions/duckdb/manifest.json +1 -1
- package/docs/extensions/duckdb/v1.5.2/linux_amd64/ripdb.duckdb_extension.gz +0 -0
- package/docs/extensions/duckdb/v1.5.2/osx_arm64/ripdb.duckdb_extension.gz +0 -0
- package/package.json +11 -3
- package/src/AGENTS.md +105 -9
- package/src/{ui.rip → app.rip} +24 -2
- package/src/browser.js +154 -37
- package/src/compiler.js +87 -9
- package/src/grammar/grammar.rip +1 -1
- package/src/grammar/solar.rip +0 -1
- package/src/lexer.js +25 -3
- package/src/parser.js +4 -4
- package/src/typecheck.js +3 -2
- package/src/types-emit.js +1021 -0
- package/src/types.js +11 -1035
- package/src/schema.js +0 -3389
package/src/schema.js
DELETED
|
@@ -1,3389 +0,0 @@
|
|
|
1
|
-
import { parser } from './parser.js';
|
|
2
|
-
|
|
3
|
-
// Schema System — inline `schema` declarations compile to runtime validator
|
|
4
|
-
// and ORM plans.
|
|
5
|
-
//
|
|
6
|
-
// Architecture (parallels types.js and components.js sidecars):
|
|
7
|
-
//
|
|
8
|
-
// installSchemaSupport(Lexer, CodeEmitter)
|
|
9
|
-
// Adds rewriteSchema() to Lexer.prototype and emitSchema() to
|
|
10
|
-
// CodeEmitter.prototype.
|
|
11
|
-
//
|
|
12
|
-
// rewriteSchema()
|
|
13
|
-
// Token-stream pass. Recognizes `schema [:kind] INDENT ... OUTDENT`
|
|
14
|
-
// blocks at expression-start positions, parses the body with a
|
|
15
|
-
// schema-specific sub-parser, and collapses the whole region into
|
|
16
|
-
// `SCHEMA SCHEMA_BODY` where SCHEMA_BODY carries a structured
|
|
17
|
-
// descriptor on its .data. The main Rip grammar only sees two
|
|
18
|
-
// tiny productions. Schema body syntax never reaches the main
|
|
19
|
-
// parser.
|
|
20
|
-
//
|
|
21
|
-
// emitSchema(head, rest, context)
|
|
22
|
-
// CodeEmitter dispatch. Reads the structured descriptor off the
|
|
23
|
-
// SCHEMA_BODY node's metadata and emits a `__schema({...})` runtime
|
|
24
|
-
// call. For Phase 1 the emission is a self-describing object; the
|
|
25
|
-
// runtime (__schema) lands in Phase 3.
|
|
26
|
-
//
|
|
27
|
-
// hasSchemas(source)
|
|
28
|
-
// Cheap regex probe for the presence of a schema declaration.
|
|
29
|
-
// Parallels hasTypeAnnotations. Used by typecheck.js and the LSP to
|
|
30
|
-
// skip work on files without schemas.
|
|
31
|
-
//
|
|
32
|
-
// Two body sub-modes:
|
|
33
|
-
//
|
|
34
|
-
// fielded — kinds :input, :shape, :model, :mixin. Permitted line forms:
|
|
35
|
-
// field IDENTIFIER[!|?|#]* TYPE [, constraints] [, attrs]
|
|
36
|
-
// directive @NAME [args]
|
|
37
|
-
// callable NAME: (-> | ~>) body
|
|
38
|
-
//
|
|
39
|
-
// enum — kind :enum. Permitted line forms:
|
|
40
|
-
// bare IDENTIFIER
|
|
41
|
-
// valued IDENTIFIER : Literal
|
|
42
|
-
//
|
|
43
|
-
// Anything else at schema top level is a schema-mode-aware compile error
|
|
44
|
-
// with a helpful message.
|
|
45
|
-
|
|
46
|
-
const VALID_KINDS = new Set(['input', 'shape', 'model', 'mixin', 'enum']);
|
|
47
|
-
const KIND_DEFAULT = 'input';
|
|
48
|
-
|
|
49
|
-
const HOOK_NAMES = new Set([
|
|
50
|
-
'beforeValidation', 'afterValidation',
|
|
51
|
-
'beforeSave', 'afterSave',
|
|
52
|
-
'beforeCreate', 'afterCreate',
|
|
53
|
-
'beforeUpdate', 'afterUpdate',
|
|
54
|
-
'beforeDestroy', 'afterDestroy',
|
|
55
|
-
]);
|
|
56
|
-
|
|
57
|
-
// Positions where `schema` can legitimately start an expression.
|
|
58
|
-
// If the prev token is one of these tags, the identifier `schema` is a
|
|
59
|
-
// candidate for retagging to SCHEMA.
|
|
60
|
-
const EXPR_START_PREV = new Set([
|
|
61
|
-
'TERMINATOR', 'INDENT', 'OUTDENT',
|
|
62
|
-
'=', '+=', '-=', '*=', '/=', '%=', '**=', '//=', '%%=',
|
|
63
|
-
'?=', '??=', '&&=', '||=', '&=', '|=', '^=', '<<=', '>>=', '>>>=',
|
|
64
|
-
'READONLY_ASSIGN', 'REACTIVE_ASSIGN', 'COMPUTED_ASSIGN',
|
|
65
|
-
'RETURN', 'THROW', 'YIELD', 'AWAIT', 'EXPORT',
|
|
66
|
-
',', '(', '[', '{', 'CALL_START', 'PARAM_START', 'INDEX_START',
|
|
67
|
-
'->', '=>', ':', 'WHEN', 'THEN', 'IF', 'UNLESS',
|
|
68
|
-
'UNARY', '!', 'NOT',
|
|
69
|
-
]);
|
|
70
|
-
|
|
71
|
-
// ============================================================================
|
|
72
|
-
// hasSchemas — fast probe
|
|
73
|
-
// ============================================================================
|
|
74
|
-
|
|
75
|
-
// True when source looks like it contains a schema declaration. We look
|
|
76
|
-
// for `schema` followed by either a `:kind` symbol or by a newline +
|
|
77
|
-
// deeper indent. Conservative: a false positive just means typecheck
|
|
78
|
-
// pays a bit more work, never wrong behavior.
|
|
79
|
-
export function hasSchemas(source) {
|
|
80
|
-
if (typeof source !== 'string') return false;
|
|
81
|
-
if (!/\bschema\b/.test(source)) return false;
|
|
82
|
-
return /(?:^|[\s=,(\[{:])schema(?:\s*:[A-Za-z_$][\w$]*|\s*\n[ \t]+\S)/m.test(source);
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
// ============================================================================
|
|
86
|
-
// installSchemaSupport — prototype installation
|
|
87
|
-
// ============================================================================
|
|
88
|
-
|
|
89
|
-
export function installSchemaSupport(Lexer, CodeEmitter) {
|
|
90
|
-
if (Lexer) {
|
|
91
|
-
Lexer.prototype.rewriteSchema = function() {
|
|
92
|
-
rewriteSchema(this);
|
|
93
|
-
};
|
|
94
|
-
// Captured body tokens need the tail rewriter passes before parsing.
|
|
95
|
-
// parseBodyTokens runs those passes on a fresh Lexer instance.
|
|
96
|
-
parseBodyTokens._LexerCtor = Lexer;
|
|
97
|
-
}
|
|
98
|
-
if (CodeEmitter) {
|
|
99
|
-
CodeEmitter.prototype.emitSchema = function(head, rest, context) {
|
|
100
|
-
return emitSchemaNode(this, head, rest, context);
|
|
101
|
-
};
|
|
102
|
-
CodeEmitter.prototype.getSchemaRuntime = function() {
|
|
103
|
-
return getSchemaRuntime();
|
|
104
|
-
};
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
// ============================================================================
|
|
109
|
-
// Lexer pass: rewriteSchema
|
|
110
|
-
// ============================================================================
|
|
111
|
-
|
|
112
|
-
// Known keys for the `schema.<key> = <value>` file-level pragma. Each
|
|
113
|
-
// pragma takes effect from its declaration forward and is scoped to the
|
|
114
|
-
// current compilation unit — schemas in other files are unaffected.
|
|
115
|
-
// Extend this map when new pragma keys land.
|
|
116
|
-
const SCHEMA_PRAGMA_KEYS = new Set(['defaultMaxString']);
|
|
117
|
-
|
|
118
|
-
function rewriteSchema(lexer) {
|
|
119
|
-
let tokens = lexer.tokens;
|
|
120
|
-
// File-scoped config, updated in-place as pragmas are encountered, then
|
|
121
|
-
// snapshotted into each schema descriptor at collapse time so post-pragma
|
|
122
|
-
// changes don't mutate earlier schemas retroactively.
|
|
123
|
-
let config = { defaultMaxString: null };
|
|
124
|
-
// Top-level INDENT/OUTDENT depth. Pragmas are file-level only so we
|
|
125
|
-
// reject them inside function / class / block bodies — otherwise a
|
|
126
|
-
// pragma nested in `foo = ->` would leak to module-scope schemas
|
|
127
|
-
// declared later on. Schemas themselves get collapsed out of the
|
|
128
|
-
// token stream before their internal INDENT/OUTDENT reach this
|
|
129
|
-
// counter, so depth reflects only user-written nesting.
|
|
130
|
-
let depth = 0;
|
|
131
|
-
let i = 0;
|
|
132
|
-
while (i < tokens.length) {
|
|
133
|
-
let t = tokens[i];
|
|
134
|
-
if (t[0] === 'INDENT') depth++;
|
|
135
|
-
else if (t[0] === 'OUTDENT') depth--;
|
|
136
|
-
let consumed = matchSchemaPragma(tokens, i, config, depth);
|
|
137
|
-
if (consumed > 0) {
|
|
138
|
-
tokens.splice(i, consumed);
|
|
139
|
-
continue;
|
|
140
|
-
}
|
|
141
|
-
if (isSchemaStart(tokens, i)) {
|
|
142
|
-
collapseSchemaAt(lexer, tokens, i, config);
|
|
143
|
-
}
|
|
144
|
-
i++;
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
// Recognize `schema.<key> = <value>` at statement position. Returns the
|
|
149
|
-
// number of tokens consumed (including any trailing TERMINATOR) when the
|
|
150
|
-
// pragma is applied, or 0 when the sequence isn't a pragma. Unknown keys
|
|
151
|
-
// and non-literal values error loudly — silently ignoring a typo like
|
|
152
|
-
// `schema.defaultMacString = 100` would bake a wrong value into every
|
|
153
|
-
// downstream schema.
|
|
154
|
-
function matchSchemaPragma(tokens, i, config, depth) {
|
|
155
|
-
let t = tokens[i];
|
|
156
|
-
if (!t || t[0] !== 'IDENTIFIER' || t[1] !== 'schema') return 0;
|
|
157
|
-
if (tokens[i + 1]?.[0] !== '.') return 0;
|
|
158
|
-
let keyTok = tokens[i + 2];
|
|
159
|
-
if (!keyTok || keyTok[0] !== 'PROPERTY') return 0;
|
|
160
|
-
if (tokens[i + 3]?.[0] !== '=') return 0;
|
|
161
|
-
// Pragmas must start a statement — the `schema` identifier must be
|
|
162
|
-
// preceded by nothing, TERMINATOR, INDENT, or OUTDENT so we don't
|
|
163
|
-
// accidentally rewrite `foo.schema.defaultMaxString = 100` or similar.
|
|
164
|
-
let prev = tokens[i - 1];
|
|
165
|
-
if (prev) {
|
|
166
|
-
let ptag = prev[0];
|
|
167
|
-
if (ptag !== 'TERMINATOR' && ptag !== 'INDENT' && ptag !== 'OUTDENT') return 0;
|
|
168
|
-
}
|
|
169
|
-
let key = keyTok[1];
|
|
170
|
-
if (!SCHEMA_PRAGMA_KEYS.has(key)) {
|
|
171
|
-
throw schemaError(keyTok,
|
|
172
|
-
`Unknown schema pragma 'schema.${key}'. Known pragmas: ${[...SCHEMA_PRAGMA_KEYS].join(', ')}.`);
|
|
173
|
-
}
|
|
174
|
-
if (depth > 0) {
|
|
175
|
-
throw schemaError(keyTok,
|
|
176
|
-
`Schema pragma 'schema.${key}' must be declared at file top level. It was found inside a nested block (function / class / if / loop body), where it would leak into later top-level schemas.`);
|
|
177
|
-
}
|
|
178
|
-
let valTok = tokens[i + 4];
|
|
179
|
-
if (!valTok || valTok[0] !== 'NUMBER') {
|
|
180
|
-
throw schemaError(valTok || keyTok,
|
|
181
|
-
`Pragma 'schema.${key}' requires a number literal. Example: schema.${key} = 100.`);
|
|
182
|
-
}
|
|
183
|
-
let n = Number(valTok[1]);
|
|
184
|
-
if (!Number.isFinite(n) || n < 0 || !Number.isInteger(n)) {
|
|
185
|
-
throw schemaError(valTok,
|
|
186
|
-
`Pragma 'schema.${key}' expects a non-negative integer (got ${valTok[1]}). Use 0 to disable.`);
|
|
187
|
-
}
|
|
188
|
-
// `0` means "no default cap" — explicit way to reset a pragma mid-file.
|
|
189
|
-
config[key] = n === 0 ? null : n;
|
|
190
|
-
// Consume trailing TERMINATOR so the pragma line leaves no blank statement behind.
|
|
191
|
-
let end = i + 5;
|
|
192
|
-
if (tokens[end]?.[0] === 'TERMINATOR') end++;
|
|
193
|
-
return end - i;
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
function isSchemaStart(tokens, i) {
|
|
197
|
-
let t = tokens[i];
|
|
198
|
-
if (!t || t[0] !== 'IDENTIFIER' || t[1] !== 'schema') return false;
|
|
199
|
-
// Skip property access — `x.schema` is lexed as PROPERTY, not IDENTIFIER.
|
|
200
|
-
// Still guard against generated IDENTIFIER tokens in odd positions.
|
|
201
|
-
let prev = tokens[i - 1];
|
|
202
|
-
if (prev) {
|
|
203
|
-
let ptag = prev[0];
|
|
204
|
-
if (ptag === '.' || ptag === '?.') return false;
|
|
205
|
-
if (prev[0] === 'IDENTIFIER' || prev[0] === 'PROPERTY' ||
|
|
206
|
-
prev[0] === ')' || prev[0] === ']' || prev[0] === '}' ||
|
|
207
|
-
prev[0] === 'STRING' || prev[0] === 'NUMBER') {
|
|
208
|
-
// `x schema` is an implicit call of x on schema — not a decl.
|
|
209
|
-
if (!EXPR_START_PREV.has(ptag)) return false;
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
// What follows determines the body form:
|
|
213
|
-
// SYMBOL? then INDENT — indented block body.
|
|
214
|
-
// SYMBOL? then `TERMINATOR ;` — inline body (one-liner), with field
|
|
215
|
-
// entries separated by more `;`
|
|
216
|
-
// terminators up to the newline.
|
|
217
|
-
let j = i + 1;
|
|
218
|
-
if (tokens[j]?.[0] === 'SYMBOL') j++;
|
|
219
|
-
if (tokens[j]?.[0] === 'TERMINATOR') {
|
|
220
|
-
if (tokens[j][1] === ';') return true;
|
|
221
|
-
j++;
|
|
222
|
-
}
|
|
223
|
-
return tokens[j]?.[0] === 'INDENT';
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
// Collapse `IDENTIFIER 'schema' [SYMBOL kind] [TERMINATOR] INDENT ... OUTDENT`
|
|
227
|
-
// at position i into `SCHEMA SCHEMA_BODY`. SCHEMA_BODY carries a structured
|
|
228
|
-
// descriptor on .data. `config` snapshots any `schema.<key>` pragmas in
|
|
229
|
-
// effect at this point so later pragma changes don't retroactively alter
|
|
230
|
-
// earlier schemas.
|
|
231
|
-
function collapseSchemaAt(lexer, tokens, i, config) {
|
|
232
|
-
let schemaTok = tokens[i];
|
|
233
|
-
let kindToken = null;
|
|
234
|
-
let kind = KIND_DEFAULT;
|
|
235
|
-
let j = i + 1;
|
|
236
|
-
|
|
237
|
-
if (tokens[j]?.[0] === 'SYMBOL') {
|
|
238
|
-
kindToken = tokens[j];
|
|
239
|
-
let k = kindToken[1];
|
|
240
|
-
if (!VALID_KINDS.has(k)) {
|
|
241
|
-
throw schemaError(kindToken,
|
|
242
|
-
`Unknown schema kind :${k}. Expected one of :input, :shape, :model, :mixin, :enum.`);
|
|
243
|
-
}
|
|
244
|
-
kind = k;
|
|
245
|
-
j++;
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
let bodyTokens;
|
|
249
|
-
let endIdx;
|
|
250
|
-
if (tokens[j]?.[0] === 'TERMINATOR' && tokens[j][1] === ';') {
|
|
251
|
-
// Inline one-liner: `schema [:kind]; field; field; ...` up to the
|
|
252
|
-
// next `\n` TERMINATOR at depth 0. The `;` separators are already
|
|
253
|
-
// TERMINATOR tokens, so splitBodyLines handles them unchanged.
|
|
254
|
-
// Arrows (`->`, `~>`, `!>`) would make the body ambiguous with
|
|
255
|
-
// subsequent `;`-separated fields, so methods/computed/hooks/
|
|
256
|
-
// transforms are rejected on the inline form.
|
|
257
|
-
let inlineStart = j + 1;
|
|
258
|
-
let end = inlineStart;
|
|
259
|
-
let depth = 0;
|
|
260
|
-
// Rip's lexer collapses `;\n` into a single `;`-valued TERMINATOR,
|
|
261
|
-
// so value-based "end of inline" detection alone misses trailing
|
|
262
|
-
// `X = schema :shape; name!;\ny = 1`. We track the inline body's
|
|
263
|
-
// starting row and break the moment a token's row advances past
|
|
264
|
-
// it at depth 0 — that captures both plain `\n` and the folded
|
|
265
|
-
// `;\n` case.
|
|
266
|
-
let startRow = tokens[inlineStart]?.loc?.r ?? null;
|
|
267
|
-
while (end < tokens.length) {
|
|
268
|
-
let tk = tokens[end];
|
|
269
|
-
let tag = tk[0];
|
|
270
|
-
if (depth === 0 && startRow != null && tk.loc && tk.loc.r > startRow) break;
|
|
271
|
-
if (tag === '(' || tag === '[' || tag === '{' ||
|
|
272
|
-
tag === 'CALL_START' || tag === 'INDEX_START' || tag === 'PARAM_START') depth++;
|
|
273
|
-
else if (tag === ')' || tag === ']' || tag === '}' ||
|
|
274
|
-
tag === 'CALL_END' || tag === 'INDEX_END' || tag === 'PARAM_END') depth--;
|
|
275
|
-
// Inline body ends at the first depth-0 newline OR at any
|
|
276
|
-
// INDENT/OUTDENT — INDENT would mean the user opened a block
|
|
277
|
-
// (incompatible with inline), and OUTDENT means we're exiting
|
|
278
|
-
// a surrounding block and must leave that token in place for
|
|
279
|
-
// the outer scanner's depth bookkeeping.
|
|
280
|
-
else if (depth === 0 && tag === 'TERMINATOR' && tk[1] !== ';') break;
|
|
281
|
-
else if (depth === 0 && (tag === 'INDENT' || tag === 'OUTDENT')) break;
|
|
282
|
-
// Arrows (`->` method/hook/transform, `~>` computed, `!>` eager
|
|
283
|
-
// derived) make field bodies ambiguous with subsequent
|
|
284
|
-
// `;`-separated entries on the same line, so reject them early
|
|
285
|
-
// with a clear message that points users at the indented form.
|
|
286
|
-
// `~>` lexes as EFFECT; `!>` lexes as UNARY_MATH '!' + COMPARE '>'.
|
|
287
|
-
else if (depth === 0 && tag === '->') {
|
|
288
|
-
throw schemaError(tk, `Inline schema body does not support '->' (method/hook/transform). Use the indented form.`);
|
|
289
|
-
}
|
|
290
|
-
else if (depth === 0 && tag === 'EFFECT') {
|
|
291
|
-
throw schemaError(tk, `Inline schema body does not support '~>' (computed getter). Use the indented form.`);
|
|
292
|
-
}
|
|
293
|
-
else if (depth === 0 && tag === 'UNARY_MATH' && tk[1] === '!' &&
|
|
294
|
-
tokens[end + 1]?.[0] === 'COMPARE' && tokens[end + 1][1] === '>') {
|
|
295
|
-
throw schemaError(tk, `Inline schema body does not support '!>' (eager derived). Use the indented form.`);
|
|
296
|
-
}
|
|
297
|
-
end++;
|
|
298
|
-
}
|
|
299
|
-
// A trailing TERMINATOR at the boundary (`;` that the lexer folded
|
|
300
|
-
// with `\n`, or a plain `\n` that happened to land inside our
|
|
301
|
-
// capture range) must remain in the token stream as a statement
|
|
302
|
-
// separator between this schema and whatever follows on the next
|
|
303
|
-
// line. Trim it out of the body / splice span so the parser
|
|
304
|
-
// keeps seeing it. splitBodyLines is safe with a body that
|
|
305
|
-
// doesn't end in TERMINATOR.
|
|
306
|
-
while (end > inlineStart && tokens[end - 1][0] === 'TERMINATOR') end--;
|
|
307
|
-
bodyTokens = tokens.slice(inlineStart, end);
|
|
308
|
-
endIdx = end;
|
|
309
|
-
// Empty inline body (`X = schema :shape;` with nothing after the
|
|
310
|
-
// leading `;`) is almost always a typo — an indented body that
|
|
311
|
-
// wasn't written, or a stray `;` on an otherwise complete decl.
|
|
312
|
-
// Fail loud rather than emit a schema with no entries.
|
|
313
|
-
if (!bodyTokens.length) {
|
|
314
|
-
throw schemaError(schemaTok,
|
|
315
|
-
`Inline schema body is empty. Either add '; field; …' entries after 'schema${kindToken ? ' :' + kind : ''};' or switch to the indented form.`);
|
|
316
|
-
}
|
|
317
|
-
} else {
|
|
318
|
-
if (tokens[j]?.[0] === 'TERMINATOR') j++;
|
|
319
|
-
if (tokens[j]?.[0] !== 'INDENT') {
|
|
320
|
-
throw schemaError(schemaTok,
|
|
321
|
-
`Expected indented schema body after 'schema${kindToken ? ' :' + kind : ''}'.`);
|
|
322
|
-
}
|
|
323
|
-
let indentIdx = j;
|
|
324
|
-
let outdentIdx = findMatchingOutdent(tokens, indentIdx);
|
|
325
|
-
if (outdentIdx < 0) {
|
|
326
|
-
throw schemaError(tokens[indentIdx], 'Unterminated schema body.');
|
|
327
|
-
}
|
|
328
|
-
bodyTokens = tokens.slice(indentIdx + 1, outdentIdx);
|
|
329
|
-
endIdx = outdentIdx + 1; // include the OUTDENT itself in the replaced span
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
let descriptor = parseSchemaBody(kind, bodyTokens, {
|
|
333
|
-
schemaLoc: schemaTok.loc,
|
|
334
|
-
kindLoc: kindToken?.loc ?? null,
|
|
335
|
-
kind,
|
|
336
|
-
// Snapshot pragmas in effect at this decl so later pragma writes
|
|
337
|
-
// don't retroactively change already-parsed schemas.
|
|
338
|
-
defaultMaxString: config?.defaultMaxString ?? null,
|
|
339
|
-
});
|
|
340
|
-
|
|
341
|
-
// Replace range `[i, endIdx-1]` with `SCHEMA SCHEMA_BODY`.
|
|
342
|
-
let schemaNewTok = mkToken('SCHEMA', 'schema', schemaTok);
|
|
343
|
-
let bodyNewTok = mkToken('SCHEMA_BODY', kind, schemaTok);
|
|
344
|
-
bodyNewTok.data = { descriptor };
|
|
345
|
-
tokens.splice(i, endIdx - i, schemaNewTok, bodyNewTok);
|
|
346
|
-
}
|
|
347
|
-
|
|
348
|
-
// ============================================================================
|
|
349
|
-
// Sub-parser — fielded and enum modes
|
|
350
|
-
// ============================================================================
|
|
351
|
-
|
|
352
|
-
function parseSchemaBody(kind, bodyTokens, ctx) {
|
|
353
|
-
let entries = [];
|
|
354
|
-
let lines = splitBodyLines(bodyTokens);
|
|
355
|
-
|
|
356
|
-
// Kind inference: a body whose first non-empty line begins with a
|
|
357
|
-
// SYMBOL token is unambiguously an enum. Promote the default :input
|
|
358
|
-
// kind to :enum so `schema\n :draft\n :active` needs no marker.
|
|
359
|
-
// Explicit `:input` or any other kind stays as written.
|
|
360
|
-
if (kind === KIND_DEFAULT && !ctx.kindLoc && lines.length > 0 &&
|
|
361
|
-
lines[0][0]?.[0] === 'SYMBOL') {
|
|
362
|
-
kind = 'enum';
|
|
363
|
-
ctx.kind = 'enum';
|
|
364
|
-
}
|
|
365
|
-
|
|
366
|
-
if (kind === 'enum') {
|
|
367
|
-
for (let line of lines) {
|
|
368
|
-
parseEnumLine(line, entries);
|
|
369
|
-
}
|
|
370
|
-
} else {
|
|
371
|
-
for (let line of lines) {
|
|
372
|
-
parseFieldedLine(kind, line, entries, ctx);
|
|
373
|
-
}
|
|
374
|
-
// Capability-matrix enforcement by kind. `@mixin` is allowed as a
|
|
375
|
-
// field-inclusion directive on every fielded kind because it adds
|
|
376
|
-
// fields (not behavior). Other directives are restricted per the
|
|
377
|
-
// matrix in the language reference.
|
|
378
|
-
if (kind === 'mixin') {
|
|
379
|
-
for (let e of entries) {
|
|
380
|
-
if (e.tag === 'method' || e.tag === 'computed' || e.tag === 'hook') {
|
|
381
|
-
throw schemaError({ loc: e.headerLoc || e.loc },
|
|
382
|
-
`:mixin schemas are fields-only. '${e.name}' is a ${e.tag}; move it to a :shape or :model.`);
|
|
383
|
-
}
|
|
384
|
-
if (e.tag === 'ensure') {
|
|
385
|
-
throw schemaError({ loc: e.headerLoc || e.loc },
|
|
386
|
-
`:mixin schemas don't accept @ensure refinements. Move the invariant to a :shape or :model that composes this mixin.`);
|
|
387
|
-
}
|
|
388
|
-
if (e.tag === 'directive' && e.name !== 'mixin') {
|
|
389
|
-
throw schemaError({ loc: e.loc },
|
|
390
|
-
`:mixin schemas only accept '@mixin Name' directives. '@${e.name}' is not allowed.`);
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
} else if (kind === 'input') {
|
|
394
|
-
// :input accepts fields, @mixin, and @ensure (cross-field predicates
|
|
395
|
-
// are a natural fit for form validation — "passwords must match").
|
|
396
|
-
// Other methods, computed getters, hooks, and non-mixin directives
|
|
397
|
-
// are rejected.
|
|
398
|
-
for (let e of entries) {
|
|
399
|
-
if (e.tag === 'method' || e.tag === 'computed' || e.tag === 'hook') {
|
|
400
|
-
throw schemaError({ loc: e.headerLoc || e.loc },
|
|
401
|
-
`:input schemas are fields-only. '${e.name}' is a ${e.tag}; use :shape or :model if you need behavior.`);
|
|
402
|
-
}
|
|
403
|
-
if (e.tag === 'directive' && e.name !== 'mixin') {
|
|
404
|
-
throw schemaError({ loc: e.loc },
|
|
405
|
-
`:input schemas only accept '@mixin Name' and '@ensure'. '@${e.name}' is not allowed.`);
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
} else if (kind === 'shape') {
|
|
409
|
-
// :shape accepts fields, methods, computed, and @mixin. Hooks
|
|
410
|
-
// and ORM-bound directives (timestamps, softDelete, index,
|
|
411
|
-
// belongs_to, has_many, has_one, link) are :model-only.
|
|
412
|
-
for (let e of entries) {
|
|
413
|
-
if (e.tag === 'hook') {
|
|
414
|
-
throw schemaError({ loc: e.headerLoc || e.loc },
|
|
415
|
-
`:shape schemas don't have lifecycle hooks. '${e.name}' runs only on :model; move it or remove it.`);
|
|
416
|
-
}
|
|
417
|
-
if (e.tag === 'directive' && e.name !== 'mixin') {
|
|
418
|
-
throw schemaError({ loc: e.loc },
|
|
419
|
-
`:shape schemas only accept '@mixin Name'. '@${e.name}' is :model-only.`);
|
|
420
|
-
}
|
|
421
|
-
}
|
|
422
|
-
}
|
|
423
|
-
}
|
|
424
|
-
|
|
425
|
-
return {
|
|
426
|
-
kind,
|
|
427
|
-
loc: ctx.schemaLoc,
|
|
428
|
-
kindLoc: ctx.kindLoc,
|
|
429
|
-
entries,
|
|
430
|
-
};
|
|
431
|
-
}
|
|
432
|
-
|
|
433
|
-
// Split top-level lines inside a schema body. Nested INDENT/OUTDENT stays
|
|
434
|
-
// inside its owning line (belongs to a callable body, multi-line
|
|
435
|
-
// constraints, etc.). Each returned line is the raw sub-stream of tokens
|
|
436
|
-
// for that line (no outer TERMINATORs).
|
|
437
|
-
function splitBodyLines(tokens) {
|
|
438
|
-
let lines = [];
|
|
439
|
-
let cur = [];
|
|
440
|
-
let depth = 0;
|
|
441
|
-
for (let t of tokens) {
|
|
442
|
-
let tag = t[0];
|
|
443
|
-
if (tag === 'INDENT') depth++;
|
|
444
|
-
if (tag === 'OUTDENT') depth--;
|
|
445
|
-
if (tag === 'TERMINATOR' && depth === 0) {
|
|
446
|
-
if (cur.length) { lines.push(cur); cur = []; }
|
|
447
|
-
continue;
|
|
448
|
-
}
|
|
449
|
-
cur.push(t);
|
|
450
|
-
}
|
|
451
|
-
if (cur.length) lines.push(cur);
|
|
452
|
-
return lines;
|
|
453
|
-
}
|
|
454
|
-
|
|
455
|
-
// Fielded body: field, directive, or callable.
|
|
456
|
-
// Field-line grammar (v2, locked):
|
|
457
|
-
//
|
|
458
|
-
// name[!|?|#]* [type] [range] [default] [regex] [attrs] [, -> transform]
|
|
459
|
-
//
|
|
460
|
-
// Invariants enforced here:
|
|
461
|
-
// 1. Line classification: IDENTIFIER-start = field; PROPERTY-start (the
|
|
462
|
-
// lexer absorbs trailing `:` into the identifier's tag) = callable.
|
|
463
|
-
// 2. Type slot is optional — default is `string`. Identifier types
|
|
464
|
-
// (`email`, `integer`, …), array suffix (`string[]`), and string-
|
|
465
|
-
// literal unions (`"M" | "F" | "U"`) are the three valid shapes.
|
|
466
|
-
// 3. Literal unions require 2+ members, all string literals, no mixing
|
|
467
|
-
// with identifier types or null. Nullability is carried by the `?`
|
|
468
|
-
// modifier, not by union membership.
|
|
469
|
-
// 4. The `->` transform is TERMINAL — nothing follows it on the line.
|
|
470
|
-
// 5. Comma before `->` is required when anything precedes the arrow
|
|
471
|
-
// (type, range, regex, default, attrs). Only the bare form
|
|
472
|
-
// `name! -> body` parses comma-less, because there's nothing to
|
|
473
|
-
// elide.
|
|
474
|
-
// 6. Each comma-separated rest part is one of: `[…]` default,
|
|
475
|
-
// `{…}` attrs, `/regex/` pattern, `n..n` range, `-> transform`.
|
|
476
|
-
// The head token uniquely identifies the form. Duplicates of any
|
|
477
|
-
// single form are rejected.
|
|
478
|
-
// VARCHAR-like primitive types — the `schema.defaultMaxString` pragma
|
|
479
|
-
// applies a default `max` to these when no explicit range/regex/literals
|
|
480
|
-
// are declared. `text` stays uncapped by design (it's the opt-out for
|
|
481
|
-
// long-form content); `uuid` has fixed length; `json`/`any` aren't strings.
|
|
482
|
-
const VARCHAR_TYPES = new Set(['string', 'email', 'url', 'phone', 'zip']);
|
|
483
|
-
|
|
484
|
-
function parseFieldedLine(kind, line, entries, ctx) {
|
|
485
|
-
let first = line[0];
|
|
486
|
-
if (!first) return;
|
|
487
|
-
|
|
488
|
-
// Directive: @NAME [args]
|
|
489
|
-
if (first[0] === '@') {
|
|
490
|
-
let nameTok = line[1];
|
|
491
|
-
if (!nameTok || (nameTok[0] !== 'IDENTIFIER' && nameTok[0] !== 'PROPERTY')) {
|
|
492
|
-
throw schemaError(first, "Expected directive name after '@'.");
|
|
493
|
-
}
|
|
494
|
-
let argTokens = line.slice(2);
|
|
495
|
-
let dname = nameTok[1];
|
|
496
|
-
|
|
497
|
-
// `@ensure` is a refinement directive with its own grammar — it takes
|
|
498
|
-
// either an inline `"msg", (args) -> body` or a bracketed array of
|
|
499
|
-
// those pairs. Emits one `tag: "ensure"` entry per refinement; the
|
|
500
|
-
// per-entry shape mirrors methods so compileCallableFn-style codegen
|
|
501
|
-
// can fire.
|
|
502
|
-
if (dname === 'ensure') {
|
|
503
|
-
let pairs = parseEnsurePairs(argTokens, first);
|
|
504
|
-
for (let p of pairs) {
|
|
505
|
-
entries.push({
|
|
506
|
-
tag: 'ensure',
|
|
507
|
-
name: 'ensure',
|
|
508
|
-
message: p.message,
|
|
509
|
-
paramTokens: p.paramTokens,
|
|
510
|
-
bodyTokens: p.bodyTokens,
|
|
511
|
-
loc: p.loc,
|
|
512
|
-
headerLoc: first.loc,
|
|
513
|
-
});
|
|
514
|
-
}
|
|
515
|
-
return;
|
|
516
|
-
}
|
|
517
|
-
|
|
518
|
-
// Pre-parse structured args so shadow-TS and runtime-codegen share
|
|
519
|
-
// the same descriptor shape. Relation and mixin directives get a
|
|
520
|
-
// `[{target, optional?}]` array; other directives leave `args` unset.
|
|
521
|
-
let args = null;
|
|
522
|
-
if (dname === 'belongs_to' || dname === 'has_many' || dname === 'has_one' ||
|
|
523
|
-
dname === 'one' || dname === 'many' || dname === 'mixin') {
|
|
524
|
-
let t0 = argTokens[0];
|
|
525
|
-
if (t0 && (t0[0] === 'IDENTIFIER' || t0[0] === 'PROPERTY')) {
|
|
526
|
-
let optional = t0.data?.predicate === true;
|
|
527
|
-
if (!optional && argTokens[1]?.[0] === '?') optional = true;
|
|
528
|
-
args = [{ target: t0[1], optional }];
|
|
529
|
-
}
|
|
530
|
-
}
|
|
531
|
-
entries.push({
|
|
532
|
-
tag: 'directive',
|
|
533
|
-
name: dname,
|
|
534
|
-
args,
|
|
535
|
-
argTokens,
|
|
536
|
-
loc: first.loc,
|
|
537
|
-
});
|
|
538
|
-
return;
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
// The identifier regex absorbs a trailing `:` by retagging the ident as
|
|
542
|
-
// PROPERTY and emitting a separate `:` token. So a line starting with
|
|
543
|
-
// PROPERTY is always a callable (`name: -> body` or `name: ~> body`);
|
|
544
|
-
// a line starting with IDENTIFIER is always a field.
|
|
545
|
-
if (first[0] === 'PROPERTY') {
|
|
546
|
-
parseCallableLine(kind, first, line, entries);
|
|
547
|
-
return;
|
|
548
|
-
}
|
|
549
|
-
if (first[0] !== 'IDENTIFIER') {
|
|
550
|
-
throw schemaError(first,
|
|
551
|
-
`Unexpected ${first[0]} at schema top level. Allowed: fields ('name! type'), directives ('@name'), methods ('name: -> body'), or computed getters ('name: ~> body').`);
|
|
552
|
-
}
|
|
553
|
-
|
|
554
|
-
let name = first[1];
|
|
555
|
-
|
|
556
|
-
// Guard: `name:` without the colon absorbed — shouldn't happen but
|
|
557
|
-
// produces a friendly error if it does.
|
|
558
|
-
if (line[1]?.[0] === ':') {
|
|
559
|
-
throw schemaError(line[1],
|
|
560
|
-
`Schema fields use 'name type' (space, no colon). For methods or computed use 'name: -> body' or 'name: ~> body'.`);
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
// Field: IDENTIFIER [modifiers] TYPE [, constraints] [, attrs]
|
|
564
|
-
let modifiers = collectModifiers(first);
|
|
565
|
-
let pos = 1;
|
|
566
|
-
|
|
567
|
-
// Adjacent `!`, `#`, `?` modifier tokens. `!` and `?` are absorbed into
|
|
568
|
-
// the IDENTIFIER's data by the main lexer. `#` arrives as a standalone
|
|
569
|
-
// token because the schema commentToken exception kicks in when `#` is
|
|
570
|
-
// adjacent to an identifier. A modifier must be unspaced from the
|
|
571
|
-
// token it follows, so we check the preceding token's `.spaced` flag
|
|
572
|
-
// (which the whitespace pass sets to true when whitespace follows).
|
|
573
|
-
while (pos < line.length) {
|
|
574
|
-
let tk = line[pos];
|
|
575
|
-
let adjacent = line[pos - 1] && !line[pos - 1].spaced;
|
|
576
|
-
if (!adjacent) break;
|
|
577
|
-
if (tk[0] === '#' || tk[0] === '?' || tk[0] === '!') {
|
|
578
|
-
modifiers.push(tk[0]);
|
|
579
|
-
pos++;
|
|
580
|
-
continue;
|
|
581
|
-
}
|
|
582
|
-
break;
|
|
583
|
-
}
|
|
584
|
-
|
|
585
|
-
// Reject a stray colon here — gives a clear diagnostic for the common
|
|
586
|
-
// mistake `name: type` instead of `name type`.
|
|
587
|
-
let typeFirst = line[pos];
|
|
588
|
-
if (typeFirst?.[0] === ':') {
|
|
589
|
-
throw schemaError(typeFirst,
|
|
590
|
-
`Schema fields use 'name type' (space, no colon). Got 'name:'. For methods/computed use 'name: -> body' or 'name: ~> body'.`);
|
|
591
|
-
}
|
|
592
|
-
|
|
593
|
-
// Type: IDENTIFIER (optionally followed by `[]` for array) OR a
|
|
594
|
-
// string-literal union like `"M" | "F" | "U"`. The type slot is
|
|
595
|
-
// OPTIONAL — if the next token isn't a type-starting token, the
|
|
596
|
-
// field defaults to `string` and we fall through to constraint
|
|
597
|
-
// parsing.
|
|
598
|
-
let typeName = 'string';
|
|
599
|
-
let literals = null;
|
|
600
|
-
if (typeFirst?.[0] === 'IDENTIFIER') {
|
|
601
|
-
typeName = typeFirst[1];
|
|
602
|
-
pos++;
|
|
603
|
-
} else if (typeFirst?.[0] === 'STRING') {
|
|
604
|
-
// Literal union: collect alternating STRING | STRING | STRING...
|
|
605
|
-
literals = [JSON.parse(typeFirst[1])];
|
|
606
|
-
pos++;
|
|
607
|
-
while (line[pos]?.[0] === '|' && line[pos + 1]?.[0] === 'STRING') {
|
|
608
|
-
pos++; // consume '|'
|
|
609
|
-
literals.push(JSON.parse(line[pos][1]));
|
|
610
|
-
pos++;
|
|
611
|
-
}
|
|
612
|
-
// Forbid mixing with identifier types or null/undefined.
|
|
613
|
-
if (line[pos]?.[0] === '|') {
|
|
614
|
-
let next = line[pos + 1];
|
|
615
|
-
let tag = next?.[0] ?? '<end>';
|
|
616
|
-
throw schemaError(next || line[pos],
|
|
617
|
-
`Literal unions contain string literals only. '${tag}' is not allowed as a union member. Use the '?' modifier for nullability.`);
|
|
618
|
-
}
|
|
619
|
-
if (literals.length < 2) {
|
|
620
|
-
throw schemaError(typeFirst,
|
|
621
|
-
`Literal union needs at least two string literals. Use '${JSON.stringify(literals[0])}' as a default with '[${JSON.stringify(literals[0])}]' instead.`);
|
|
622
|
-
}
|
|
623
|
-
typeName = 'literal-union';
|
|
624
|
-
}
|
|
625
|
-
let array = false;
|
|
626
|
-
// `string[]` tokenizes as IDENTIFIER INDEX_START INDEX_END (or `[` `]`
|
|
627
|
-
// depending on context; closeOpenIndexes retags the empty bracket pair
|
|
628
|
-
// as INDEX_START/INDEX_END when it follows an indexable token).
|
|
629
|
-
let openTag = line[pos]?.[0];
|
|
630
|
-
let closeTag = line[pos + 1]?.[0];
|
|
631
|
-
if ((openTag === '[' || openTag === 'INDEX_START') &&
|
|
632
|
-
(closeTag === ']' || closeTag === 'INDEX_END')) {
|
|
633
|
-
array = true;
|
|
634
|
-
pos += 2;
|
|
635
|
-
}
|
|
636
|
-
|
|
637
|
-
// Remaining tokens on the line are a mix of `[…]` constraints (default,
|
|
638
|
-
// regex), `{…}` attrs, and `n..n` range constraints. Each form is
|
|
639
|
-
// self-identifying by its head token shape. Raw token slices are
|
|
640
|
-
// captured here and semantic-parsed at compile time.
|
|
641
|
-
let rest = line.slice(pos);
|
|
642
|
-
|
|
643
|
-
// Comma-required rule: if a type was consumed and the next token is
|
|
644
|
-
// `->` (no comma separator), reject with a clear diagnostic. The
|
|
645
|
-
// comma is a structural boundary between the field declaration and
|
|
646
|
-
// the transform; skipping it makes `email!# email -> fn` read as
|
|
647
|
-
// if 'email' were an argument to the arrow, which it isn't.
|
|
648
|
-
let typeConsumed = typeFirst?.[0] === 'IDENTIFIER' || typeFirst?.[0] === 'STRING';
|
|
649
|
-
if (typeConsumed && rest[0]?.[0] === '->') {
|
|
650
|
-
throw schemaError(rest[0],
|
|
651
|
-
`Field '${name}' has a transform after the type; a comma is required before '->'. Write '${name} ${typeName}, -> …'.`);
|
|
652
|
-
}
|
|
653
|
-
let constraintTokens = null;
|
|
654
|
-
let attrsTokens = null;
|
|
655
|
-
let rangeTokens = null;
|
|
656
|
-
let regexToken = null;
|
|
657
|
-
let transformTokens = null;
|
|
658
|
-
|
|
659
|
-
if (rest.length > 0) {
|
|
660
|
-
// The leading comma is only required when a type was consumed. If
|
|
661
|
-
// the type slot was empty, constraints may follow the modifiers
|
|
662
|
-
// directly (`name? [1, 20]`). Both shapes produce the same parts.
|
|
663
|
-
if (rest[0]?.[0] === ',') {
|
|
664
|
-
rest = rest.slice(1);
|
|
665
|
-
}
|
|
666
|
-
// Split top-level by commas. Multi-line trailers (`name! type,\n
|
|
667
|
-
// [8, 100]`) introduce surrounding INDENT/OUTDENT tokens that
|
|
668
|
-
// don't affect semantics — strip them from each part so the head
|
|
669
|
-
// is the literal `[` or `{`.
|
|
670
|
-
let parts = splitTopLevelByComma(rest);
|
|
671
|
-
for (let i = 0; i < parts.length; i++) {
|
|
672
|
-
let part = parts[i];
|
|
673
|
-
// Strip leading INDENT/TERMINATOR so we can inspect the head token.
|
|
674
|
-
while (part.length && (part[0][0] === 'INDENT' || part[0][0] === 'TERMINATOR')) {
|
|
675
|
-
part = part.slice(1);
|
|
676
|
-
}
|
|
677
|
-
if (!part.length) continue;
|
|
678
|
-
|
|
679
|
-
// A `->` at the head of a part is the transform arrow — the
|
|
680
|
-
// preceding comma separated it out. `->` elsewhere in the part
|
|
681
|
-
// (after content) means the user wrote something like
|
|
682
|
-
// `email -> fn` without the separator; the comma is required
|
|
683
|
-
// as a structural boundary between the field declaration and
|
|
684
|
-
// the transform.
|
|
685
|
-
if (part[0][0] !== '->') {
|
|
686
|
-
let innerArrow = findTopLevelArrowIdx(part);
|
|
687
|
-
if (innerArrow > 0) {
|
|
688
|
-
throw schemaError(part[innerArrow],
|
|
689
|
-
`Field '${name}' has a transform after other content; a comma is required before '->'. Write 'name! <constraints>, -> <body>'.`);
|
|
690
|
-
}
|
|
691
|
-
}
|
|
692
|
-
let head = part[0];
|
|
693
|
-
// For non-transform parts, also strip trailing OUTDENT/TERMINATOR.
|
|
694
|
-
// Transform parts own their INDENT/OUTDENT wrapping — parseBodyTokens
|
|
695
|
-
// handles it.
|
|
696
|
-
if (head[0] !== '->') {
|
|
697
|
-
while (part.length && (part[part.length - 1][0] === 'OUTDENT' || part[part.length - 1][0] === 'TERMINATOR')) {
|
|
698
|
-
part = part.slice(0, -1);
|
|
699
|
-
}
|
|
700
|
-
if (!part.length) continue;
|
|
701
|
-
head = part[0];
|
|
702
|
-
}
|
|
703
|
-
if (head[0] === '[' || head[0] === 'INDEX_START') {
|
|
704
|
-
if (constraintTokens) {
|
|
705
|
-
throw schemaError(head,
|
|
706
|
-
`Field '${name}' has more than one '[…]' constraint. At most one default / regex bracket per field.`);
|
|
707
|
-
}
|
|
708
|
-
constraintTokens = part;
|
|
709
|
-
} else if (head[0] === '{') {
|
|
710
|
-
if (attrsTokens) {
|
|
711
|
-
throw schemaError(head,
|
|
712
|
-
`Field '${name}' has more than one '{…}' attrs bracket.`);
|
|
713
|
-
}
|
|
714
|
-
attrsTokens = part;
|
|
715
|
-
} else if (isRangeConstraintTokens(part)) {
|
|
716
|
-
if (rangeTokens) {
|
|
717
|
-
throw schemaError(head,
|
|
718
|
-
`Field '${name}' has more than one range constraint. Only one 'min..max' per field.`);
|
|
719
|
-
}
|
|
720
|
-
rangeTokens = part;
|
|
721
|
-
} else if (head[0] === 'REGEX' && part.length === 1) {
|
|
722
|
-
if (regexToken) {
|
|
723
|
-
throw schemaError(head,
|
|
724
|
-
`Field '${name}' has more than one regex constraint.`);
|
|
725
|
-
}
|
|
726
|
-
regexToken = head;
|
|
727
|
-
} else if (head[0] === '->') {
|
|
728
|
-
// Transform part. Must be the last comma-separated part on the
|
|
729
|
-
// line (transform is terminal).
|
|
730
|
-
if (i !== parts.length - 1) {
|
|
731
|
-
throw schemaError(head,
|
|
732
|
-
`Transform '-> …' must be the last element on the field line for '${name}'.`);
|
|
733
|
-
}
|
|
734
|
-
transformTokens = part.slice(1);
|
|
735
|
-
} else {
|
|
736
|
-
throw schemaError(head,
|
|
737
|
-
`Unexpected trailer for field '${name}'. Expected '[…]' default, '{…}' attrs, '/regex/', 'min..max' range, or '-> transform'.`);
|
|
738
|
-
}
|
|
739
|
-
}
|
|
740
|
-
}
|
|
741
|
-
|
|
742
|
-
// Array suffix is incompatible with literal-union types in v2.
|
|
743
|
-
if (array && literals) {
|
|
744
|
-
throw schemaError(typeFirst,
|
|
745
|
-
`Array-of-literal-union is not supported. Use 'string[]' if you need an array of strings.`);
|
|
746
|
-
}
|
|
747
|
-
|
|
748
|
-
// The `schema.defaultMaxString` pragma baked into this schema's ctx
|
|
749
|
-
// is a candidate for any VARCHAR-like primitive that isn't already
|
|
750
|
-
// narrowed by a regex or literal-union. The final "fill it in only
|
|
751
|
-
// if max is still absent" decision happens in mergeFieldConstraints
|
|
752
|
-
// so open-ended ranges (`5..` → only min) still get the pragma's max.
|
|
753
|
-
// Using `!= null` (not truthy) keeps future non-positive pragma
|
|
754
|
-
// values valid if more keys land here.
|
|
755
|
-
let defaultMax = null;
|
|
756
|
-
if (ctx?.defaultMaxString != null && !regexToken && !literals &&
|
|
757
|
-
VARCHAR_TYPES.has(typeName)) {
|
|
758
|
-
defaultMax = ctx.defaultMaxString;
|
|
759
|
-
}
|
|
760
|
-
|
|
761
|
-
entries.push({
|
|
762
|
-
tag: 'field',
|
|
763
|
-
name,
|
|
764
|
-
modifiers,
|
|
765
|
-
typeName,
|
|
766
|
-
array,
|
|
767
|
-
literals,
|
|
768
|
-
constraintTokens,
|
|
769
|
-
attrsTokens,
|
|
770
|
-
rangeTokens,
|
|
771
|
-
regexToken,
|
|
772
|
-
transformTokens,
|
|
773
|
-
defaultMax,
|
|
774
|
-
loc: first.loc,
|
|
775
|
-
});
|
|
776
|
-
}
|
|
777
|
-
|
|
778
|
-
// Scan a constraint part for a top-level `->` (depth-zero arrow). Returns
|
|
779
|
-
// the index of the arrow or -1 if absent. Used to split parts like
|
|
780
|
-
// `8..100 -> transform` without requiring a comma between them.
|
|
781
|
-
function findTopLevelArrowIdx(tokens) {
|
|
782
|
-
let depth = 0;
|
|
783
|
-
for (let i = 0; i < tokens.length; i++) {
|
|
784
|
-
let tag = tokens[i][0];
|
|
785
|
-
if (tag === '(' || tag === '[' || tag === '{' ||
|
|
786
|
-
tag === 'CALL_START' || tag === 'INDEX_START' ||
|
|
787
|
-
tag === 'PARAM_START') depth++;
|
|
788
|
-
else if (tag === ')' || tag === ']' || tag === '}' ||
|
|
789
|
-
tag === 'CALL_END' || tag === 'INDEX_END' ||
|
|
790
|
-
tag === 'PARAM_END') depth--;
|
|
791
|
-
else if (depth === 0 && tag === '->') return i;
|
|
792
|
-
}
|
|
793
|
-
return -1;
|
|
794
|
-
}
|
|
795
|
-
|
|
796
|
-
// Range constraint: `min..max` with optional leading `-` on either
|
|
797
|
-
// endpoint. Either endpoint may be omitted for open-ended ranges —
|
|
798
|
-
// `..N` is "at most N" (no min), `N..` is "at least N" (no max). At
|
|
799
|
-
// least one endpoint must be present; a bare `..` is rejected.
|
|
800
|
-
// Operates on a top-level comma-split part; stripping any surrounding
|
|
801
|
-
// INDENT/OUTDENT is handled by the caller.
|
|
802
|
-
function isRangeConstraintTokens(tokens) {
|
|
803
|
-
let i = 0;
|
|
804
|
-
// Left endpoint (optional).
|
|
805
|
-
let hasLeft = false;
|
|
806
|
-
if (tokens[i]?.[0] === '-' && tokens[i + 1]?.[0] === 'NUMBER') { i += 2; hasLeft = true; }
|
|
807
|
-
else if (tokens[i]?.[0] === 'NUMBER') { i++; hasLeft = true; }
|
|
808
|
-
// Dots.
|
|
809
|
-
if (tokens[i]?.[0] !== '..') return false;
|
|
810
|
-
i++;
|
|
811
|
-
// Right endpoint (optional).
|
|
812
|
-
let hasRight = false;
|
|
813
|
-
if (tokens[i]?.[0] === '-' && tokens[i + 1]?.[0] === 'NUMBER') { i += 2; hasRight = true; }
|
|
814
|
-
else if (tokens[i]?.[0] === 'NUMBER') { i++; hasRight = true; }
|
|
815
|
-
// Need at least one endpoint, and nothing trailing.
|
|
816
|
-
return (hasLeft || hasRight) && i === tokens.length;
|
|
817
|
-
}
|
|
818
|
-
|
|
819
|
-
function parseCallableLine(kind, headerTok, line, entries) {
|
|
820
|
-
let name = headerTok[1];
|
|
821
|
-
let colonTok = line[1];
|
|
822
|
-
if (!colonTok || colonTok[0] !== ':') {
|
|
823
|
-
throw schemaError(headerTok,
|
|
824
|
-
`Expected ':' after '${name}' before arrow.`);
|
|
825
|
-
}
|
|
826
|
-
// Three arrow forms:
|
|
827
|
-
// name: -> body — method / hook
|
|
828
|
-
// name: ~> body — lazy computed getter (EFFECT token)
|
|
829
|
-
// name: !> body — eager derived field (UNARY_MATH '!' + COMPARE '>')
|
|
830
|
-
let arrowTok = line[2];
|
|
831
|
-
let nextTok = line[3];
|
|
832
|
-
let arrow, arrowLoc, bodyStart;
|
|
833
|
-
if (arrowTok && arrowTok[0] === '->') {
|
|
834
|
-
arrow = '->';
|
|
835
|
-
arrowLoc = arrowTok.loc;
|
|
836
|
-
bodyStart = 3;
|
|
837
|
-
} else if (arrowTok && arrowTok[0] === 'EFFECT') {
|
|
838
|
-
arrow = '~>';
|
|
839
|
-
arrowLoc = arrowTok.loc;
|
|
840
|
-
bodyStart = 3;
|
|
841
|
-
} else if (arrowTok && arrowTok[0] === 'UNARY_MATH' && arrowTok[1] === '!' &&
|
|
842
|
-
nextTok && nextTok[0] === 'COMPARE' && nextTok[1] === '>' &&
|
|
843
|
-
!arrowTok.spaced) {
|
|
844
|
-
arrow = '!>';
|
|
845
|
-
arrowLoc = arrowTok.loc;
|
|
846
|
-
bodyStart = 4;
|
|
847
|
-
} else {
|
|
848
|
-
throw schemaError(colonTok,
|
|
849
|
-
`Schema top-level '${name}:' must be followed by '->' (method/hook), '~>' (computed getter), or '!>' (eager derived).`);
|
|
850
|
-
}
|
|
851
|
-
let bodyTokens = line.slice(bodyStart);
|
|
852
|
-
let isHook = HOOK_NAMES.has(name);
|
|
853
|
-
let entryTag;
|
|
854
|
-
if (arrow === '~>') {
|
|
855
|
-
entryTag = 'computed';
|
|
856
|
-
} else if (arrow === '!>') {
|
|
857
|
-
entryTag = 'derived';
|
|
858
|
-
} else if (kind === 'model' && isHook) {
|
|
859
|
-
entryTag = 'hook';
|
|
860
|
-
} else {
|
|
861
|
-
entryTag = 'method';
|
|
862
|
-
}
|
|
863
|
-
entries.push({
|
|
864
|
-
tag: entryTag,
|
|
865
|
-
name,
|
|
866
|
-
arrow,
|
|
867
|
-
paramTokens: [],
|
|
868
|
-
bodyTokens,
|
|
869
|
-
headerLoc: headerTok.loc,
|
|
870
|
-
arrowLoc,
|
|
871
|
-
});
|
|
872
|
-
}
|
|
873
|
-
|
|
874
|
-
// Parse `@ensure` arguments into one or more refinement pairs. Accepts two
|
|
875
|
-
// forms:
|
|
876
|
-
//
|
|
877
|
-
// inline: `@ensure "msg", (args) -> body`
|
|
878
|
-
// array: `@ensure [ "msg", (args) -> body
|
|
879
|
-
// , "msg", (args) -> body
|
|
880
|
-
// , ... ]`
|
|
881
|
-
//
|
|
882
|
-
// Both forms compile to the SAME entry shape — each pair becomes one
|
|
883
|
-
// `{tag: "ensure", message, paramTokens, bodyTokens}` entry. Downstream
|
|
884
|
-
// runtime code can't tell the two source forms apart.
|
|
885
|
-
//
|
|
886
|
-
// The directive arrives wrapped in the implicit CALL_START/CALL_END pair
|
|
887
|
-
// because Rip sees `@ensure args...` as a call; we strip that wrapper
|
|
888
|
-
// before looking for the array bracket or the inline string.
|
|
889
|
-
function parseEnsurePairs(argTokens, directiveTok) {
|
|
890
|
-
let tokens = argTokens;
|
|
891
|
-
if (!tokens.length) {
|
|
892
|
-
throw schemaError(directiveTok,
|
|
893
|
-
"@ensure requires 'message, (x) -> body' or '[...]' array of pairs.");
|
|
894
|
-
}
|
|
895
|
-
// Strip implicit call wrapper if present.
|
|
896
|
-
if (tokens[0]?.[0] === 'CALL_START' &&
|
|
897
|
-
tokens[tokens.length - 1]?.[0] === 'CALL_END') {
|
|
898
|
-
tokens = tokens.slice(1, -1);
|
|
899
|
-
}
|
|
900
|
-
if (!tokens.length) {
|
|
901
|
-
throw schemaError(directiveTok,
|
|
902
|
-
"@ensure requires 'message, (x) -> body' or '[...]' array of pairs.");
|
|
903
|
-
}
|
|
904
|
-
|
|
905
|
-
let first = tokens[0];
|
|
906
|
-
// Array form: tokens start with `[` (or INDEX_START).
|
|
907
|
-
if (first[0] === '[' || first[0] === 'INDEX_START') {
|
|
908
|
-
let inner = extractEnsureBracketInner(tokens, first);
|
|
909
|
-
let parts = splitEnsureElements(inner);
|
|
910
|
-
if (parts.length === 0) {
|
|
911
|
-
throw schemaError(first, "@ensure [...] must contain at least one 'message, fn' pair.");
|
|
912
|
-
}
|
|
913
|
-
if (parts.length % 2 !== 0) {
|
|
914
|
-
throw schemaError(first,
|
|
915
|
-
`@ensure [...] must have pairs of 'message, fn' (got ${parts.length} elements; odd count).`);
|
|
916
|
-
}
|
|
917
|
-
let pairs = [];
|
|
918
|
-
for (let i = 0; i < parts.length; i += 2) {
|
|
919
|
-
pairs.push(extractEnsurePair(parts[i], parts[i + 1], first));
|
|
920
|
-
}
|
|
921
|
-
return pairs;
|
|
922
|
-
}
|
|
923
|
-
|
|
924
|
-
// Inline form: STRING, (args) -> body
|
|
925
|
-
let parts = splitTopLevelByComma(tokens);
|
|
926
|
-
if (parts.length < 2) {
|
|
927
|
-
throw schemaError(first,
|
|
928
|
-
"@ensure inline form must be 'message, (x) -> body'. Did you forget the comma?");
|
|
929
|
-
}
|
|
930
|
-
if (parts.length > 2) {
|
|
931
|
-
throw schemaError(first,
|
|
932
|
-
`@ensure inline form takes exactly 'message, fn' (got ${parts.length} comma-separated parts). Use '@ensure [...]' for multiple refinements.`);
|
|
933
|
-
}
|
|
934
|
-
return [extractEnsurePair(parts[0], parts[1], first)];
|
|
935
|
-
}
|
|
936
|
-
|
|
937
|
-
// Walk `[ ... ]` tokens and return the inner slice. Rejects trailing
|
|
938
|
-
// tokens after the close bracket. Strips an outermost INDENT/OUTDENT
|
|
939
|
-
// pair if the bracket body is multi-line (Rip wraps multi-line array
|
|
940
|
-
// contents in one), since @ensure splits pairs at depth 0 and that
|
|
941
|
-
// outer wrap would hide every internal comma/newline.
|
|
942
|
-
function extractEnsureBracketInner(tokens, openTok) {
|
|
943
|
-
let depth = 0;
|
|
944
|
-
let inner = [];
|
|
945
|
-
for (let i = 0; i < tokens.length; i++) {
|
|
946
|
-
let t = tokens[i];
|
|
947
|
-
let tag = t[0];
|
|
948
|
-
if (tag === '[' || tag === 'INDEX_START') {
|
|
949
|
-
depth++;
|
|
950
|
-
if (depth === 1) continue;
|
|
951
|
-
}
|
|
952
|
-
if (tag === ']' || tag === 'INDEX_END') {
|
|
953
|
-
depth--;
|
|
954
|
-
if (depth === 0) {
|
|
955
|
-
if (i < tokens.length - 1) {
|
|
956
|
-
throw schemaError(tokens[i + 1],
|
|
957
|
-
"@ensure [...] must be the only argument — extra tokens after ']'.");
|
|
958
|
-
}
|
|
959
|
-
// Strip outer INDENT/OUTDENT pair if it wraps the whole inner.
|
|
960
|
-
if (inner.length >= 2 &&
|
|
961
|
-
inner[0][0] === 'INDENT' &&
|
|
962
|
-
inner[inner.length - 1][0] === 'OUTDENT') {
|
|
963
|
-
let wd = 0, matched = false;
|
|
964
|
-
for (let k = 0; k < inner.length; k++) {
|
|
965
|
-
if (inner[k][0] === 'INDENT') wd++;
|
|
966
|
-
else if (inner[k][0] === 'OUTDENT') {
|
|
967
|
-
wd--;
|
|
968
|
-
if (wd === 0 && k === inner.length - 1) { matched = true; break; }
|
|
969
|
-
if (wd === 0) break;
|
|
970
|
-
}
|
|
971
|
-
}
|
|
972
|
-
if (matched) inner = inner.slice(1, -1);
|
|
973
|
-
}
|
|
974
|
-
return inner;
|
|
975
|
-
}
|
|
976
|
-
}
|
|
977
|
-
if (depth >= 1) inner.push(t);
|
|
978
|
-
}
|
|
979
|
-
throw schemaError(openTok, "@ensure: unclosed '['.");
|
|
980
|
-
}
|
|
981
|
-
|
|
982
|
-
// Split an @ensure array body into elements. Mirrors Rip's array-literal
|
|
983
|
-
// rule: both `,` and newlines (TERMINATOR) are element separators at
|
|
984
|
-
// depth 0. This lets users write rows without trailing commas:
|
|
985
|
-
//
|
|
986
|
-
// @ensure [
|
|
987
|
-
// "msg1", (u) -> body
|
|
988
|
-
// "msg2", (u) -> body <-- no comma needed between pairs
|
|
989
|
-
// ]
|
|
990
|
-
function splitEnsureElements(tokens) {
|
|
991
|
-
let parts = [];
|
|
992
|
-
let cur = [];
|
|
993
|
-
let depth = 0;
|
|
994
|
-
for (let t of tokens) {
|
|
995
|
-
let tag = t[0];
|
|
996
|
-
if (tag === '(' || tag === '[' || tag === '{' ||
|
|
997
|
-
tag === 'CALL_START' || tag === 'INDEX_START' ||
|
|
998
|
-
tag === 'PARAM_START' || tag === 'INDENT') depth++;
|
|
999
|
-
if (tag === ')' || tag === ']' || tag === '}' ||
|
|
1000
|
-
tag === 'CALL_END' || tag === 'INDEX_END' ||
|
|
1001
|
-
tag === 'PARAM_END' || tag === 'OUTDENT') depth--;
|
|
1002
|
-
if (depth === 0 && (tag === ',' || tag === 'TERMINATOR')) {
|
|
1003
|
-
if (cur.length) { parts.push(cur); cur = []; }
|
|
1004
|
-
continue;
|
|
1005
|
-
}
|
|
1006
|
-
cur.push(t);
|
|
1007
|
-
}
|
|
1008
|
-
if (cur.length) parts.push(cur);
|
|
1009
|
-
return parts;
|
|
1010
|
-
}
|
|
1011
|
-
|
|
1012
|
-
// Extract one refinement pair from `messagePart` and `fnPart` (two token
|
|
1013
|
-
// slices already split by splitTopLevelByComma). Validates shape at parse
|
|
1014
|
-
// time so typos surface with targeted diagnostics instead of runtime
|
|
1015
|
-
// "expected function" noise.
|
|
1016
|
-
function extractEnsurePair(messagePart, fnPart, refTok) {
|
|
1017
|
-
if (!messagePart || !messagePart.length) {
|
|
1018
|
-
throw schemaError(refTok, "@ensure: missing message (expected a string literal).");
|
|
1019
|
-
}
|
|
1020
|
-
if (messagePart.length !== 1 || messagePart[0][0] !== 'STRING') {
|
|
1021
|
-
throw schemaError(messagePart[0] || refTok,
|
|
1022
|
-
"@ensure: each refinement's first element must be a string literal message.");
|
|
1023
|
-
}
|
|
1024
|
-
let msgTok = messagePart[0];
|
|
1025
|
-
let message = JSON.parse(msgTok[1]);
|
|
1026
|
-
|
|
1027
|
-
if (!fnPart || !fnPart.length) {
|
|
1028
|
-
throw schemaError(msgTok, "@ensure: missing function after message.");
|
|
1029
|
-
}
|
|
1030
|
-
// The fn part should open with `(` / PARAM_START and contain `->`. An
|
|
1031
|
-
// `->` with no params (e.g. `-> true`) is rejected — refinements must
|
|
1032
|
-
// declare the object parameter explicitly.
|
|
1033
|
-
let t0 = fnPart[0];
|
|
1034
|
-
if (t0[0] !== '(' && t0[0] !== 'PARAM_START') {
|
|
1035
|
-
throw schemaError(t0,
|
|
1036
|
-
"@ensure: expected '(args) -> body' after the message. Predicates must declare their parameter explicitly — '(u) -> ...'.");
|
|
1037
|
-
}
|
|
1038
|
-
// Walk matching paren to find PARAM_END.
|
|
1039
|
-
let depth = 1;
|
|
1040
|
-
let pos = 1;
|
|
1041
|
-
let paramTokens = [];
|
|
1042
|
-
while (pos < fnPart.length && depth > 0) {
|
|
1043
|
-
let t = fnPart[pos];
|
|
1044
|
-
let tag = t[0];
|
|
1045
|
-
if (tag === '(' || tag === 'PARAM_START') depth++;
|
|
1046
|
-
if (tag === ')' || tag === 'PARAM_END') {
|
|
1047
|
-
depth--;
|
|
1048
|
-
if (depth === 0) { pos++; break; }
|
|
1049
|
-
}
|
|
1050
|
-
paramTokens.push(t);
|
|
1051
|
-
pos++;
|
|
1052
|
-
}
|
|
1053
|
-
if (depth !== 0) {
|
|
1054
|
-
throw schemaError(t0, "@ensure: unclosed '(' in predicate parameters.");
|
|
1055
|
-
}
|
|
1056
|
-
let arrowTok = fnPart[pos];
|
|
1057
|
-
if (!arrowTok || arrowTok[0] !== '->') {
|
|
1058
|
-
throw schemaError(arrowTok || fnPart[pos - 1] || msgTok,
|
|
1059
|
-
"@ensure: expected '->' after predicate parameters.");
|
|
1060
|
-
}
|
|
1061
|
-
let bodyTokens = fnPart.slice(pos + 1);
|
|
1062
|
-
if (!bodyTokens.length) {
|
|
1063
|
-
throw schemaError(arrowTok, "@ensure: predicate function body is empty.");
|
|
1064
|
-
}
|
|
1065
|
-
return { message, paramTokens, bodyTokens, loc: msgTok.loc };
|
|
1066
|
-
}
|
|
1067
|
-
|
|
1068
|
-
// Extract param names from `(u)` or `(u, opts)` token slice. Accepts
|
|
1069
|
-
// plain identifiers only (no destructuring, defaults, or rest args —
|
|
1070
|
-
// refinements don't need that complexity yet).
|
|
1071
|
-
function ensureParamNames(paramTokens, refTok) {
|
|
1072
|
-
if (!paramTokens.length) return [];
|
|
1073
|
-
let parts = splitTopLevelByComma(paramTokens);
|
|
1074
|
-
return parts.map(part => {
|
|
1075
|
-
let pTokens = part.filter(t => t[0] !== 'TERMINATOR');
|
|
1076
|
-
if (pTokens.length !== 1 || pTokens[0][0] !== 'IDENTIFIER') {
|
|
1077
|
-
throw schemaError(pTokens[0] || refTok,
|
|
1078
|
-
"@ensure: predicate parameters must be plain identifiers.");
|
|
1079
|
-
}
|
|
1080
|
-
return pTokens[0][1];
|
|
1081
|
-
});
|
|
1082
|
-
}
|
|
1083
|
-
|
|
1084
|
-
function parseEnumLine(line, entries) {
|
|
1085
|
-
let first = line[0];
|
|
1086
|
-
if (!first) return;
|
|
1087
|
-
// Enum member forms:
|
|
1088
|
-
// :admin bare symbol → maps to name string "admin"
|
|
1089
|
-
// :pending 0 valued symbol → maps "pending" (and 0) to 0
|
|
1090
|
-
//
|
|
1091
|
-
// Values are any literal (number, string, boolean, null, regex).
|
|
1092
|
-
// Mixing bare and valued members in one enum is permitted but
|
|
1093
|
-
// unusual: the Map is heterogeneous when you do it — bare entries
|
|
1094
|
-
// hold name strings, valued entries hold their literal. Keep the
|
|
1095
|
-
// members uniform if that matters for downstream consumers.
|
|
1096
|
-
if (first[0] === '@') {
|
|
1097
|
-
let nameTok = line[1];
|
|
1098
|
-
let dname = nameTok && (nameTok[0] === 'IDENTIFIER' || nameTok[0] === 'PROPERTY')
|
|
1099
|
-
? nameTok[1] : 'directive';
|
|
1100
|
-
throw schemaError(first,
|
|
1101
|
-
`:enum schemas don't accept '@${dname}'. Enums hold only :symbol members. Move the invariant to a :shape or :model that uses this enum as a field type.`);
|
|
1102
|
-
}
|
|
1103
|
-
if (first[0] !== 'SYMBOL') {
|
|
1104
|
-
throw schemaError(first,
|
|
1105
|
-
`Enum member must be a :symbol. Use ':${first[1] ?? 'name'}' for a bare member or ':${first[1] ?? 'name'} value' for a valued one.`);
|
|
1106
|
-
}
|
|
1107
|
-
let name = first[1];
|
|
1108
|
-
let second = line[1];
|
|
1109
|
-
if (!second) {
|
|
1110
|
-
entries.push({ tag: 'enum-member', name, value: undefined, loc: first.loc });
|
|
1111
|
-
return;
|
|
1112
|
-
}
|
|
1113
|
-
if (second[0] === ':') {
|
|
1114
|
-
throw schemaError(second,
|
|
1115
|
-
`Enum member ':${name}' — drop the ':' before the value. Use ':${name} value'.`);
|
|
1116
|
-
}
|
|
1117
|
-
if (line.length > 2) {
|
|
1118
|
-
throw schemaError(line[2],
|
|
1119
|
-
`Extra tokens after enum member ':${name}' value.`);
|
|
1120
|
-
}
|
|
1121
|
-
entries.push({
|
|
1122
|
-
tag: 'enum-member',
|
|
1123
|
-
name,
|
|
1124
|
-
value: literalOf(second),
|
|
1125
|
-
loc: first.loc,
|
|
1126
|
-
});
|
|
1127
|
-
}
|
|
1128
|
-
|
|
1129
|
-
// ============================================================================
|
|
1130
|
-
// Codegen — emitSchema
|
|
1131
|
-
// ============================================================================
|
|
1132
|
-
|
|
1133
|
-
function emitSchemaNode(emitter, head, rest, context) {
|
|
1134
|
-
// rest[0] is the SCHEMA_BODY node. The parser metadata bridge wraps the
|
|
1135
|
-
// token value in `new String()` and copies token.data fields onto it, so
|
|
1136
|
-
// the descriptor surfaces as `node.descriptor` here.
|
|
1137
|
-
let node = rest[0];
|
|
1138
|
-
let descriptor = readDescriptor(node);
|
|
1139
|
-
if (!descriptor) {
|
|
1140
|
-
throw new Error('schema: missing descriptor on SCHEMA_BODY token');
|
|
1141
|
-
}
|
|
1142
|
-
emitter.usesSchemas = true;
|
|
1143
|
-
|
|
1144
|
-
// The binding name is threaded through `_schemaName` by emitAssignment
|
|
1145
|
-
// (parallels `_componentName`). When present, we embed it so SchemaError,
|
|
1146
|
-
// generated class name, and debug output all have a stable identity.
|
|
1147
|
-
let schemaName = emitter._schemaName || null;
|
|
1148
|
-
|
|
1149
|
-
let parts = [`kind: ${JSON.stringify(descriptor.kind)}`];
|
|
1150
|
-
if (schemaName) parts.push(`name: ${JSON.stringify(schemaName)}`);
|
|
1151
|
-
parts.push(`entries: [${descriptor.entries.map(e => entryLiteral(emitter, e)).join(', ')}]`);
|
|
1152
|
-
return `__schema({${parts.join(', ')}})`;
|
|
1153
|
-
}
|
|
1154
|
-
|
|
1155
|
-
function readDescriptor(node) {
|
|
1156
|
-
if (node && typeof node === 'object') {
|
|
1157
|
-
if (node.descriptor) return node.descriptor;
|
|
1158
|
-
if (node.data?.descriptor) return node.data.descriptor;
|
|
1159
|
-
}
|
|
1160
|
-
return null;
|
|
1161
|
-
}
|
|
1162
|
-
|
|
1163
|
-
function entryLiteral(emitter, e) {
|
|
1164
|
-
switch (e.tag) {
|
|
1165
|
-
case 'field': {
|
|
1166
|
-
let obj = [
|
|
1167
|
-
`tag: "field"`,
|
|
1168
|
-
`name: ${JSON.stringify(e.name)}`,
|
|
1169
|
-
`modifiers: ${JSON.stringify(e.modifiers)}`,
|
|
1170
|
-
`typeName: ${JSON.stringify(e.typeName)}`,
|
|
1171
|
-
`array: ${e.array ? 'true' : 'false'}`,
|
|
1172
|
-
];
|
|
1173
|
-
if (e.literals) {
|
|
1174
|
-
obj.push(`literals: ${JSON.stringify(e.literals)}`);
|
|
1175
|
-
}
|
|
1176
|
-
let range = e.rangeTokens ? compileRangeTokens(e.rangeTokens, e) : null;
|
|
1177
|
-
let bracket = e.constraintTokens ? compileConstraintsLiteral(e.constraintTokens, e) : null;
|
|
1178
|
-
let regex = e.regexToken ? regexLiteralOf(e.regexToken) : null;
|
|
1179
|
-
let merged = mergeFieldConstraints(range, bracket, regex, e);
|
|
1180
|
-
if (merged) obj.push(`constraints: ${merged}`);
|
|
1181
|
-
if (e.transformTokens) {
|
|
1182
|
-
obj.push(`transform: ${compileTransformFn(emitter, e.transformTokens)}`);
|
|
1183
|
-
}
|
|
1184
|
-
return `{${obj.join(', ')}}`;
|
|
1185
|
-
}
|
|
1186
|
-
case 'directive': {
|
|
1187
|
-
let obj = [`tag: "directive"`, `name: ${JSON.stringify(e.name)}`];
|
|
1188
|
-
let args = compileDirectiveArgsLiteral(e.name, e.argTokens || []);
|
|
1189
|
-
if (args) obj.push(`args: ${args}`);
|
|
1190
|
-
return `{${obj.join(', ')}}`;
|
|
1191
|
-
}
|
|
1192
|
-
case 'ensure': {
|
|
1193
|
-
let fnCode = compileEnsureFn(emitter, e);
|
|
1194
|
-
let obj = [
|
|
1195
|
-
`tag: "ensure"`,
|
|
1196
|
-
`message: ${JSON.stringify(e.message)}`,
|
|
1197
|
-
`fn: ${fnCode}`,
|
|
1198
|
-
];
|
|
1199
|
-
return `{${obj.join(', ')}}`;
|
|
1200
|
-
}
|
|
1201
|
-
case 'computed':
|
|
1202
|
-
case 'method':
|
|
1203
|
-
case 'hook':
|
|
1204
|
-
case 'derived': {
|
|
1205
|
-
let fnCode = compileCallableFn(emitter, e);
|
|
1206
|
-
let obj = [
|
|
1207
|
-
`tag: ${JSON.stringify(e.tag)}`,
|
|
1208
|
-
`name: ${JSON.stringify(e.name)}`,
|
|
1209
|
-
`fn: ${fnCode}`,
|
|
1210
|
-
];
|
|
1211
|
-
return `{${obj.join(', ')}}`;
|
|
1212
|
-
}
|
|
1213
|
-
case 'enum-member': {
|
|
1214
|
-
let obj = [`tag: "enum-member"`, `name: ${JSON.stringify(e.name)}`];
|
|
1215
|
-
if (e.value !== undefined) obj.push(`value: ${JSON.stringify(e.value)}`);
|
|
1216
|
-
return `{${obj.join(', ')}}`;
|
|
1217
|
-
}
|
|
1218
|
-
default:
|
|
1219
|
-
return `{tag: "unknown"}`;
|
|
1220
|
-
}
|
|
1221
|
-
}
|
|
1222
|
-
|
|
1223
|
-
// Compile a callable body (`-> body` or `~> body`) to a JS `function(...)`
|
|
1224
|
-
// expression with dynamic `this`. Both computed getters and methods are
|
|
1225
|
-
// emitted using the Rip thin-arrow codegen, which naturally produces a
|
|
1226
|
-
// `function() { ... }` (Rip `->` is NOT a JS arrow). This gives us the
|
|
1227
|
-
// right `this` semantics for instance-attached methods and proto getters.
|
|
1228
|
-
function compileCallableFn(emitter, entry) {
|
|
1229
|
-
let bodySexpr = parseBodyTokens(entry.bodyTokens);
|
|
1230
|
-
if (!bodySexpr) {
|
|
1231
|
-
// Empty body — emit a no-op.
|
|
1232
|
-
return `(function() {})`;
|
|
1233
|
-
}
|
|
1234
|
-
// Wrap as a thin-arrow with no params. `emit` in value context produces
|
|
1235
|
-
// a parenthesized function expression.
|
|
1236
|
-
let arrowSexpr = ['->', [], bodySexpr];
|
|
1237
|
-
return emitter.emit(arrowSexpr, 'value');
|
|
1238
|
-
}
|
|
1239
|
-
|
|
1240
|
-
// Compile an inline field transform body (`-> body`). The body receives
|
|
1241
|
-
// the raw input object via Rip's implicit `it` parameter; no explicit
|
|
1242
|
-
// params are emitted. Transform runs on .parse() only, not on hydrate.
|
|
1243
|
-
function compileTransformFn(emitter, bodyTokens) {
|
|
1244
|
-
let bodySexpr = parseBodyTokens(bodyTokens);
|
|
1245
|
-
if (!bodySexpr) {
|
|
1246
|
-
return `(function() { return undefined; })`;
|
|
1247
|
-
}
|
|
1248
|
-
let arrowSexpr = ['->', [], bodySexpr];
|
|
1249
|
-
return emitter.emit(arrowSexpr, 'value');
|
|
1250
|
-
}
|
|
1251
|
-
|
|
1252
|
-
// Compile an `@ensure` predicate — `(args) -> body` — into a thin-arrow
|
|
1253
|
-
// function expression with explicit params. Unlike transforms (which use
|
|
1254
|
-
// implicit `it`), refinements require the parameter to be named so the
|
|
1255
|
-
// contract of "what the predicate sees" is visible at the call site.
|
|
1256
|
-
function compileEnsureFn(emitter, entry) {
|
|
1257
|
-
let bodySexpr = parseBodyTokens(entry.bodyTokens);
|
|
1258
|
-
if (!bodySexpr) {
|
|
1259
|
-
return `(function() { return undefined; })`;
|
|
1260
|
-
}
|
|
1261
|
-
let params = ensureParamNames(entry.paramTokens, entry);
|
|
1262
|
-
let arrowSexpr = ['->', params, bodySexpr];
|
|
1263
|
-
return emitter.emit(arrowSexpr, 'value');
|
|
1264
|
-
}
|
|
1265
|
-
|
|
1266
|
-
// ----------------------------------------------------------------------------
|
|
1267
|
-
// Compile-time constraint + directive argument evaluation
|
|
1268
|
-
// ----------------------------------------------------------------------------
|
|
1269
|
-
//
|
|
1270
|
-
// Constraints are captured as raw token slices during the lexer pass; this
|
|
1271
|
-
// layer evaluates them into a normalized {min?, max?, default?, regex?}
|
|
1272
|
-
// shape shared by runtime validation and DDL emission. Only literal-
|
|
1273
|
-
// deterministic values are accepted — identifiers, calls, and arbitrary
|
|
1274
|
-
// expressions are rejected.
|
|
1275
|
-
//
|
|
1276
|
-
// v2 constraint grammar (each form is self-identifying by token shape):
|
|
1277
|
-
// `min..max` — range: string length / array length / numeric value
|
|
1278
|
-
// `[value]` — default: a single literal payload in brackets
|
|
1279
|
-
// `/regex/` — pattern: bare regex literal, no wrapping brackets
|
|
1280
|
-
// `{key: val}` — attrs: object literal for `unique`, `index`, etc.
|
|
1281
|
-
// `-> body` — transform: terminal, comma-required before arrow
|
|
1282
|
-
// when anything precedes (see parseFieldedLine)
|
|
1283
|
-
//
|
|
1284
|
-
// Pre-v2 multi-element bracket forms (`[n, n]`, `[n, n, n]`, `[/re/]`) are
|
|
1285
|
-
// explicitly rejected with migration diagnostics pointing at the new form.
|
|
1286
|
-
function compileConstraintsLiteral(tokens, fieldEntry) {
|
|
1287
|
-
let inner = tokens.slice(1, -1);
|
|
1288
|
-
let items = splitTopLevelByComma(inner);
|
|
1289
|
-
if (!items.length) return { c: null };
|
|
1290
|
-
|
|
1291
|
-
let values = items.map(part => evalLiteralTokens(part, fieldEntry));
|
|
1292
|
-
|
|
1293
|
-
if (values.length === 1) {
|
|
1294
|
-
let v = values[0];
|
|
1295
|
-
if (v instanceof RegExp) {
|
|
1296
|
-
throw schemaError(tokens[0],
|
|
1297
|
-
`Regex constraints are written bare, not in brackets. Replace '[${v}]' with '${v}'.`);
|
|
1298
|
-
}
|
|
1299
|
-
return { c: { default: v } };
|
|
1300
|
-
}
|
|
1301
|
-
|
|
1302
|
-
if (values.length === 2 && typeof values[0] === 'number' && typeof values[1] === 'number') {
|
|
1303
|
-
throw schemaError(tokens[0],
|
|
1304
|
-
`Size/value ranges use 'min..max' syntax, not brackets. Replace '[${values[0]}, ${values[1]}]' with '${values[0]}..${values[1]}'.`);
|
|
1305
|
-
}
|
|
1306
|
-
if (values.length === 3 && values.every(v => typeof v === 'number')) {
|
|
1307
|
-
throw schemaError(tokens[0],
|
|
1308
|
-
`Range + default is two separate constraints in v2. Replace '[${values[0]}, ${values[1]}, ${values[2]}]' with '${values[0]}..${values[1]}, [${values[2]}]'.`);
|
|
1309
|
-
}
|
|
1310
|
-
throw schemaError(tokens[0],
|
|
1311
|
-
`Constraint bracket takes a single default value in v2. Got ${values.length} elements.`);
|
|
1312
|
-
}
|
|
1313
|
-
|
|
1314
|
-
// Extract a regex literal from a bare REGEX token. The lexer's raw text
|
|
1315
|
-
// includes the surrounding `/.../` plus any flags.
|
|
1316
|
-
function regexLiteralOf(tok) {
|
|
1317
|
-
let raw = tok[1];
|
|
1318
|
-
let m = /^\/((?:\\.|[^\\/])+)\/([a-z]*)$/.exec(raw);
|
|
1319
|
-
if (!m) throw schemaError(tok, `Invalid regex literal ${JSON.stringify(raw)}.`);
|
|
1320
|
-
try {
|
|
1321
|
-
return new RegExp(m[1], m[2]);
|
|
1322
|
-
} catch (e) {
|
|
1323
|
-
throw schemaError(tok, `Invalid regex '${raw}': ${e.message}`);
|
|
1324
|
-
}
|
|
1325
|
-
}
|
|
1326
|
-
|
|
1327
|
-
// Evaluate a range token slice into {min?, max?}. Caller has already
|
|
1328
|
-
// verified shape via isRangeConstraintTokens. Open-ended forms omit
|
|
1329
|
-
// the corresponding key rather than emitting undefined, so downstream
|
|
1330
|
-
// constraint serialization stays clean.
|
|
1331
|
-
function compileRangeTokens(tokens, fieldEntry) {
|
|
1332
|
-
let i = 0;
|
|
1333
|
-
let readOneAt = () => {
|
|
1334
|
-
let sign = 1;
|
|
1335
|
-
if (tokens[i]?.[0] === '-') { sign = -1; i++; }
|
|
1336
|
-
let numTok = tokens[i++];
|
|
1337
|
-
let v = evalLiteralTokens([numTok], fieldEntry);
|
|
1338
|
-
if (typeof v !== 'number') {
|
|
1339
|
-
throw schemaError(numTok, `Range endpoints must be numeric literals.`);
|
|
1340
|
-
}
|
|
1341
|
-
return sign * v;
|
|
1342
|
-
};
|
|
1343
|
-
let min;
|
|
1344
|
-
if (tokens[i]?.[0] !== '..') min = readOneAt();
|
|
1345
|
-
i++; // consume `..`
|
|
1346
|
-
let max;
|
|
1347
|
-
if (i < tokens.length) max = readOneAt();
|
|
1348
|
-
if (min !== undefined && max !== undefined && min > max) {
|
|
1349
|
-
throw schemaError(tokens[0],
|
|
1350
|
-
`Range '${min}..${max}' is reversed. Write the smaller endpoint first.`);
|
|
1351
|
-
}
|
|
1352
|
-
let out = {};
|
|
1353
|
-
if (min !== undefined) out.min = min;
|
|
1354
|
-
if (max !== undefined) out.max = max;
|
|
1355
|
-
return out;
|
|
1356
|
-
}
|
|
1357
|
-
|
|
1358
|
-
// Merge the optional range, bracket-default, and bare-regex constraints
|
|
1359
|
-
// into a single literal object. Each source contributes disjoint keys
|
|
1360
|
-
// by construction — range sets min/max, bracket sets default, regex
|
|
1361
|
-
// sets regex.
|
|
1362
|
-
function mergeFieldConstraints(range, bracketLiteral, regex, fieldEntry) {
|
|
1363
|
-
let c = (bracketLiteral && bracketLiteral.c) || {};
|
|
1364
|
-
// Track whether this field's range used open-left shorthand (`..N`).
|
|
1365
|
-
// The implicit-min sugar is gated on *syntax* (range omitted its
|
|
1366
|
-
// min) rather than on merged state, so a future sugar that also
|
|
1367
|
-
// writes to c.min can't accidentally trigger the implicit.
|
|
1368
|
-
let openLeftRange = range && range.min === undefined;
|
|
1369
|
-
if (range) {
|
|
1370
|
-
if (range.min !== undefined) c.min = range.min;
|
|
1371
|
-
if (range.max !== undefined) c.max = range.max;
|
|
1372
|
-
// Open-min shorthand (`..N`) with a `!` modifier implies min=1 —
|
|
1373
|
-
// "required and non-empty" is the default reading for required
|
|
1374
|
-
// varchar-like fields. Gated on openLeftRange syntactically so
|
|
1375
|
-
// adding more sugar layers later doesn't trigger this by accident.
|
|
1376
|
-
if (openLeftRange && c.min === undefined && fieldEntry?.modifiers?.includes('!')) {
|
|
1377
|
-
c.min = 1;
|
|
1378
|
-
}
|
|
1379
|
-
}
|
|
1380
|
-
if (regex) {
|
|
1381
|
-
c.regex = regex;
|
|
1382
|
-
}
|
|
1383
|
-
// File-level `schema.defaultMaxString` pragma fills in max only when
|
|
1384
|
-
// the field didn't narrow the max any other way — parseFieldedLine
|
|
1385
|
-
// suppresses defaultMax on regex / literal-union fields already, so
|
|
1386
|
-
// this last check covers the open-ended `N..` case (min set, max
|
|
1387
|
-
// still unbounded) where the pragma should fill the gap.
|
|
1388
|
-
if (fieldEntry?.defaultMax != null && c.max === undefined) {
|
|
1389
|
-
c.max = fieldEntry.defaultMax;
|
|
1390
|
-
}
|
|
1391
|
-
// Post-merge consistency check. Sugar (`!` implicit min=1) and the
|
|
1392
|
-
// pragma default max can compose with a user-written explicit max to
|
|
1393
|
-
// produce min > max — e.g. `name! ..0` would naively emit
|
|
1394
|
-
// `{min: 1, max: 0}`, a constraint no value can satisfy. The
|
|
1395
|
-
// parse-time reversed-range check only sees syntactically-present
|
|
1396
|
-
// endpoints, so we re-validate here after every sugar has been
|
|
1397
|
-
// applied. Error message names the actual sources so the user can
|
|
1398
|
-
// pinpoint which side to fix.
|
|
1399
|
-
if (c.min !== undefined && c.max !== undefined && c.min > c.max) {
|
|
1400
|
-
let minSrc = (range && range.min !== undefined) ? `range min ${range.min}` : 'implicit min=1 from `!`';
|
|
1401
|
-
let maxSrc = (range && range.max !== undefined)
|
|
1402
|
-
? `range max ${range.max}`
|
|
1403
|
-
: `pragma defaultMaxString=${fieldEntry?.defaultMax}`;
|
|
1404
|
-
throw schemaError({ loc: fieldEntry?.loc },
|
|
1405
|
-
`Field '${fieldEntry?.name}' would have impossible constraints min=${c.min} > max=${c.max} after sugar is applied (${minSrc} vs ${maxSrc}). Write an explicit range or drop the conflicting pragma.`);
|
|
1406
|
-
}
|
|
1407
|
-
if (c.min === undefined && c.max === undefined && c.default === undefined && c.regex === undefined) {
|
|
1408
|
-
return null;
|
|
1409
|
-
}
|
|
1410
|
-
return constraintLiteral(c);
|
|
1411
|
-
}
|
|
1412
|
-
|
|
1413
|
-
function constraintLiteral(c) {
|
|
1414
|
-
let parts = [];
|
|
1415
|
-
if (c.min !== undefined) parts.push(`min: ${serializeLiteral(c.min)}`);
|
|
1416
|
-
if (c.max !== undefined) parts.push(`max: ${serializeLiteral(c.max)}`);
|
|
1417
|
-
if (c.default !== undefined) parts.push(`default: ${serializeLiteral(c.default)}`);
|
|
1418
|
-
if (c.regex !== undefined) parts.push(`regex: ${c.regex.toString()}`);
|
|
1419
|
-
return parts.length ? `{${parts.join(', ')}}` : null;
|
|
1420
|
-
}
|
|
1421
|
-
|
|
1422
|
-
function serializeLiteral(v) {
|
|
1423
|
-
if (v === null) return 'null';
|
|
1424
|
-
if (v === undefined) return 'undefined';
|
|
1425
|
-
if (typeof v === 'string') return JSON.stringify(v);
|
|
1426
|
-
if (typeof v === 'number' || typeof v === 'boolean') return String(v);
|
|
1427
|
-
if (v instanceof RegExp) return v.toString();
|
|
1428
|
-
return JSON.stringify(v);
|
|
1429
|
-
}
|
|
1430
|
-
|
|
1431
|
-
// Compile directive args to a JS literal list or null. Each directive has
|
|
1432
|
-
// its own arg shape — we centralize the parsing here so Layer 2 can rely
|
|
1433
|
-
// on normalized structures.
|
|
1434
|
-
function compileDirectiveArgsLiteral(name, tokens) {
|
|
1435
|
-
// @idStart requires its arg, so validate before the generic empty-bail.
|
|
1436
|
-
if (name === 'idStart' && !tokens.length) {
|
|
1437
|
-
throw schemaError(null,
|
|
1438
|
-
'@idStart requires an integer literal, e.g. @idStart 10001.');
|
|
1439
|
-
}
|
|
1440
|
-
if (!tokens.length) return null;
|
|
1441
|
-
|
|
1442
|
-
// Relation directives: `@belongs_to Org`, `@belongs_to Org?`,
|
|
1443
|
-
// `@has_many Order`, `@has_one Profile`, `@one X`, `@many X`.
|
|
1444
|
-
if (name === 'belongs_to' || name === 'has_many' || name === 'has_one' ||
|
|
1445
|
-
name === 'one' || name === 'many' || name === 'mixin') {
|
|
1446
|
-
let t0 = tokens[0];
|
|
1447
|
-
if (!t0 || (t0[0] !== 'IDENTIFIER' && t0[0] !== 'PROPERTY')) {
|
|
1448
|
-
throw schemaError(t0 || tokens[tokens.length - 1],
|
|
1449
|
-
`@${name} requires a target name.`);
|
|
1450
|
-
}
|
|
1451
|
-
let target = t0[1];
|
|
1452
|
-
// `@belongs_to User?` tokenizes as IDENTIFIER "User" with
|
|
1453
|
-
// data.predicate=true. A trailing `?` in a later token position is
|
|
1454
|
-
// also accepted for robustness.
|
|
1455
|
-
let optional = t0.data?.predicate === true;
|
|
1456
|
-
let pos = 1;
|
|
1457
|
-
if (!optional && tokens[pos]?.[0] === '?') { optional = true; pos++; }
|
|
1458
|
-
let parts = [`target: ${JSON.stringify(target)}`];
|
|
1459
|
-
if (optional) parts.push('optional: true');
|
|
1460
|
-
return `[{${parts.join(', ')}}]`;
|
|
1461
|
-
}
|
|
1462
|
-
|
|
1463
|
-
// `@index field` or `@index [a, b]` or `@index [a, b] #` for unique.
|
|
1464
|
-
if (name === 'index') {
|
|
1465
|
-
let fields = [];
|
|
1466
|
-
let unique = false;
|
|
1467
|
-
let pos = 0;
|
|
1468
|
-
if (tokens[pos]?.[0] === 'IDENTIFIER' || tokens[pos]?.[0] === 'PROPERTY') {
|
|
1469
|
-
fields.push(tokens[pos][1]);
|
|
1470
|
-
pos++;
|
|
1471
|
-
} else if (tokens[pos]?.[0] === '[' || tokens[pos]?.[0] === 'INDEX_START') {
|
|
1472
|
-
let inner = [];
|
|
1473
|
-
let depth = 1;
|
|
1474
|
-
pos++;
|
|
1475
|
-
while (pos < tokens.length && depth > 0) {
|
|
1476
|
-
let t = tokens[pos];
|
|
1477
|
-
if (t[0] === '[' || t[0] === 'INDEX_START') depth++;
|
|
1478
|
-
if (t[0] === ']' || t[0] === 'INDEX_END') {
|
|
1479
|
-
depth--;
|
|
1480
|
-
if (depth === 0) { pos++; break; }
|
|
1481
|
-
}
|
|
1482
|
-
inner.push(t);
|
|
1483
|
-
pos++;
|
|
1484
|
-
}
|
|
1485
|
-
for (let part of splitTopLevelByComma(inner)) {
|
|
1486
|
-
if (part[0] && (part[0][0] === 'IDENTIFIER' || part[0][0] === 'PROPERTY')) {
|
|
1487
|
-
fields.push(part[0][1]);
|
|
1488
|
-
}
|
|
1489
|
-
}
|
|
1490
|
-
}
|
|
1491
|
-
if (tokens[pos]?.[0] === '#') unique = true;
|
|
1492
|
-
let parts = [`fields: ${JSON.stringify(fields)}`];
|
|
1493
|
-
if (unique) parts.push('unique: true');
|
|
1494
|
-
return `[{${parts.join(', ')}}]`;
|
|
1495
|
-
}
|
|
1496
|
-
|
|
1497
|
-
// @idStart N sets the seed value for the table's auto-id sequence.
|
|
1498
|
-
// Accepts a single integer literal (optionally negative). Consumed by
|
|
1499
|
-
// .toSQL(); models that never call .toSQL() simply ignore it.
|
|
1500
|
-
if (name === 'idStart') {
|
|
1501
|
-
let tok = tokens[0];
|
|
1502
|
-
let sign = 1;
|
|
1503
|
-
let numTok = tok;
|
|
1504
|
-
if (tok && tok[0] === '-' && tokens[1] && tokens[1][0] === 'NUMBER') {
|
|
1505
|
-
sign = -1;
|
|
1506
|
-
numTok = tokens[1];
|
|
1507
|
-
}
|
|
1508
|
-
if (!numTok || numTok[0] !== 'NUMBER') {
|
|
1509
|
-
throw schemaError(tok || tokens[tokens.length - 1],
|
|
1510
|
-
'@idStart requires an integer literal, e.g. @idStart 10001.');
|
|
1511
|
-
}
|
|
1512
|
-
let n = sign * Number(numTok[1]);
|
|
1513
|
-
if (!Number.isInteger(n)) {
|
|
1514
|
-
throw schemaError(numTok,
|
|
1515
|
-
'@idStart requires an integer literal; got ' + numTok[1] + '.');
|
|
1516
|
-
}
|
|
1517
|
-
return '[{value: ' + n + '}]';
|
|
1518
|
-
}
|
|
1519
|
-
|
|
1520
|
-
// Bare flag-like directives (@timestamps, @softDelete) don't take args.
|
|
1521
|
-
// Anything else — capture as raw literal tokens conservatively.
|
|
1522
|
-
return null;
|
|
1523
|
-
}
|
|
1524
|
-
|
|
1525
|
-
// Evaluate a small expression as a literal. Accepts NUMBER, STRING, BOOL,
|
|
1526
|
-
// NULL, UNDEFINED, REGEX, SYMBOL (returns its name string — for enum-member
|
|
1527
|
-
// defaults like `[:draft]`), and unary minus on NUMBER. Anything else throws.
|
|
1528
|
-
function evalLiteralTokens(tokens, fieldEntry) {
|
|
1529
|
-
if (!tokens.length) {
|
|
1530
|
-
throw schemaError(null, 'Empty constraint value.');
|
|
1531
|
-
}
|
|
1532
|
-
let first = tokens[0];
|
|
1533
|
-
let tag = first[0];
|
|
1534
|
-
if (tokens.length === 1) {
|
|
1535
|
-
if (tag === 'NUMBER') return Number(first[1]);
|
|
1536
|
-
if (tag === 'STRING') return JSON.parse(first[1]);
|
|
1537
|
-
if (tag === 'BOOL') return first[1] === 'true';
|
|
1538
|
-
if (tag === 'NULL') return null;
|
|
1539
|
-
if (tag === 'UNDEFINED') return undefined;
|
|
1540
|
-
if (tag === 'REGEX') return parseRegexLiteral(first[1]);
|
|
1541
|
-
if (tag === 'SYMBOL') return first[1];
|
|
1542
|
-
}
|
|
1543
|
-
if (tokens.length === 2 && tag === '-' && tokens[1][0] === 'NUMBER') {
|
|
1544
|
-
return -Number(tokens[1][1]);
|
|
1545
|
-
}
|
|
1546
|
-
// Deterministic but not literal — IDENTIFIER references aren't supported.
|
|
1547
|
-
throw schemaError(first,
|
|
1548
|
-
`Constraint values must be literals (number, string, boolean, null, regex, :symbol). Got ${tag}.`);
|
|
1549
|
-
}
|
|
1550
|
-
|
|
1551
|
-
function parseRegexLiteral(val) {
|
|
1552
|
-
let s = typeof val === 'string' ? val : String(val);
|
|
1553
|
-
let m = s.match(/^\/(.*)\/([gimsuy]*)$/s);
|
|
1554
|
-
return m ? new RegExp(m[1], m[2]) : new RegExp(s);
|
|
1555
|
-
}
|
|
1556
|
-
|
|
1557
|
-
// Run the tail rewriter passes on a captured body token slice, then feed
|
|
1558
|
-
// the result through parser.parse() via a temporary lex adapter. The
|
|
1559
|
-
// returned s-expression is the parsed body — either a single statement or
|
|
1560
|
-
// a block of statements — ready to wrap in `['->', [], body]`.
|
|
1561
|
-
function parseBodyTokens(bodyTokens) {
|
|
1562
|
-
if (!bodyTokens || !bodyTokens.length) return null;
|
|
1563
|
-
|
|
1564
|
-
// The body tokens were captured by rewriteSchema BEFORE rewriteTypes,
|
|
1565
|
-
// tagPostfixConditionals, rewriteTaggedTemplates, addImplicitBracesAndParens,
|
|
1566
|
-
// and addImplicitCallCommas ran. Run those tail passes on a sub-lexer
|
|
1567
|
-
// whose `this.tokens` is the body slice.
|
|
1568
|
-
let LexerCtor = parseBodyTokens._LexerCtor;
|
|
1569
|
-
if (!LexerCtor) {
|
|
1570
|
-
throw new Error('schema: parseBodyTokens called before Lexer was wired');
|
|
1571
|
-
}
|
|
1572
|
-
let sub = Object.create(LexerCtor.prototype);
|
|
1573
|
-
let toks = bodyTokens.slice();
|
|
1574
|
-
// Multi-line callable bodies open with a matched INDENT ... OUTDENT pair
|
|
1575
|
-
// wrapping the statements. parser.parse() expects a Body (list of Lines),
|
|
1576
|
-
// not a leading INDENT, so strip the outer pair when the first INDENT's
|
|
1577
|
-
// matching OUTDENT is the last token.
|
|
1578
|
-
if (toks.length >= 2 && toks[0]?.[0] === 'INDENT') {
|
|
1579
|
-
let depth = 0;
|
|
1580
|
-
let lastOutdent = -1;
|
|
1581
|
-
for (let k = 0; k < toks.length; k++) {
|
|
1582
|
-
if (toks[k][0] === 'INDENT') depth++;
|
|
1583
|
-
else if (toks[k][0] === 'OUTDENT') {
|
|
1584
|
-
depth--;
|
|
1585
|
-
if (depth === 0) { lastOutdent = k; break; }
|
|
1586
|
-
}
|
|
1587
|
-
}
|
|
1588
|
-
if (lastOutdent === toks.length - 1) {
|
|
1589
|
-
toks = toks.slice(1, -1);
|
|
1590
|
-
}
|
|
1591
|
-
}
|
|
1592
|
-
sub.tokens = toks;
|
|
1593
|
-
sub.seenFor = sub.seenImport = sub.seenExport = false;
|
|
1594
|
-
sub.ends = [];
|
|
1595
|
-
sub.indent = 0;
|
|
1596
|
-
sub.outdebt = 0;
|
|
1597
|
-
sub.indents = [];
|
|
1598
|
-
// Ensure a terminating TERMINATOR so parser.parse() sees a clean EOF.
|
|
1599
|
-
let lastTag = sub.tokens[sub.tokens.length - 1]?.[0];
|
|
1600
|
-
if (lastTag !== 'TERMINATOR') {
|
|
1601
|
-
sub.tokens.push(mkToken('TERMINATOR', '\n', bodyTokens[bodyTokens.length - 1]));
|
|
1602
|
-
}
|
|
1603
|
-
try {
|
|
1604
|
-
sub.rewriteTypes?.();
|
|
1605
|
-
sub.tagPostfixConditionals?.();
|
|
1606
|
-
sub.rewriteTaggedTemplates?.();
|
|
1607
|
-
sub.addImplicitBracesAndParens?.();
|
|
1608
|
-
sub.addImplicitCallCommas?.();
|
|
1609
|
-
} catch (e) {
|
|
1610
|
-
// If a tail pass throws, surface a clean schema error.
|
|
1611
|
-
throw schemaError(bodyTokens[0], `schema: failed to compile body: ${e.message}`);
|
|
1612
|
-
}
|
|
1613
|
-
let tokens = sub.tokens.filter(t => t[0] !== 'TYPE_DECL');
|
|
1614
|
-
|
|
1615
|
-
// Swap parser.lexer, parse, restore.
|
|
1616
|
-
let savedLexer = parser.lexer;
|
|
1617
|
-
parser.lexer = {
|
|
1618
|
-
tokens, pos: 0,
|
|
1619
|
-
setInput() {},
|
|
1620
|
-
lex() {
|
|
1621
|
-
if (this.pos >= this.tokens.length) return 1;
|
|
1622
|
-
let token = this.tokens[this.pos++];
|
|
1623
|
-
let val = token[1];
|
|
1624
|
-
if (token.data) {
|
|
1625
|
-
val = new String(val);
|
|
1626
|
-
Object.assign(val, token.data);
|
|
1627
|
-
}
|
|
1628
|
-
this.text = val;
|
|
1629
|
-
this.loc = token.loc;
|
|
1630
|
-
this.line = token.loc?.r;
|
|
1631
|
-
return token[0];
|
|
1632
|
-
},
|
|
1633
|
-
};
|
|
1634
|
-
let sexpr;
|
|
1635
|
-
try {
|
|
1636
|
-
sexpr = parser.parse('');
|
|
1637
|
-
} finally {
|
|
1638
|
-
parser.lexer = savedLexer;
|
|
1639
|
-
}
|
|
1640
|
-
|
|
1641
|
-
// sexpr is `['program', ...statements]`. Unwrap to a body we can feed
|
|
1642
|
-
// a thin-arrow AST. One statement → the statement itself. Multiple →
|
|
1643
|
-
// ['block', ...].
|
|
1644
|
-
if (!Array.isArray(sexpr) || sexpr[0] !== 'program') return null;
|
|
1645
|
-
let stmts = sexpr.slice(1);
|
|
1646
|
-
if (stmts.length === 0) return null;
|
|
1647
|
-
if (stmts.length === 1) return stmts[0];
|
|
1648
|
-
return ['block', ...stmts];
|
|
1649
|
-
}
|
|
1650
|
-
|
|
1651
|
-
// ============================================================================
|
|
1652
|
-
// Helpers
|
|
1653
|
-
// ============================================================================
|
|
1654
|
-
|
|
1655
|
-
function collectModifiers(identToken) {
|
|
1656
|
-
let mods = [];
|
|
1657
|
-
let d = identToken.data;
|
|
1658
|
-
if (d?.await === true) mods.push('!');
|
|
1659
|
-
if (d?.predicate === true) mods.push('?');
|
|
1660
|
-
return mods;
|
|
1661
|
-
}
|
|
1662
|
-
|
|
1663
|
-
function findMatchingOutdent(tokens, indentIdx) {
|
|
1664
|
-
let depth = 0;
|
|
1665
|
-
for (let j = indentIdx; j < tokens.length; j++) {
|
|
1666
|
-
if (tokens[j][0] === 'INDENT') depth++;
|
|
1667
|
-
else if (tokens[j][0] === 'OUTDENT') {
|
|
1668
|
-
depth--;
|
|
1669
|
-
if (depth === 0) return j;
|
|
1670
|
-
}
|
|
1671
|
-
}
|
|
1672
|
-
return -1;
|
|
1673
|
-
}
|
|
1674
|
-
|
|
1675
|
-
function splitTopLevelByComma(tokens) {
|
|
1676
|
-
let parts = [];
|
|
1677
|
-
let cur = [];
|
|
1678
|
-
let depth = 0;
|
|
1679
|
-
for (let t of tokens) {
|
|
1680
|
-
let tag = t[0];
|
|
1681
|
-
if (tag === '(' || tag === '[' || tag === '{' ||
|
|
1682
|
-
tag === 'CALL_START' || tag === 'INDEX_START' ||
|
|
1683
|
-
tag === 'PARAM_START' || tag === 'INDENT') depth++;
|
|
1684
|
-
if (tag === ')' || tag === ']' || tag === '}' ||
|
|
1685
|
-
tag === 'CALL_END' || tag === 'INDEX_END' ||
|
|
1686
|
-
tag === 'PARAM_END' || tag === 'OUTDENT') depth--;
|
|
1687
|
-
if (tag === ',' && depth === 0) {
|
|
1688
|
-
if (cur.length) parts.push(cur);
|
|
1689
|
-
cur = [];
|
|
1690
|
-
continue;
|
|
1691
|
-
}
|
|
1692
|
-
cur.push(t);
|
|
1693
|
-
}
|
|
1694
|
-
if (cur.length) parts.push(cur);
|
|
1695
|
-
return parts;
|
|
1696
|
-
}
|
|
1697
|
-
|
|
1698
|
-
function literalOf(tok) {
|
|
1699
|
-
let tag = tok[0], val = tok[1];
|
|
1700
|
-
if (tag === 'NUMBER') return Number(val);
|
|
1701
|
-
if (tag === 'STRING') return JSON.parse(val);
|
|
1702
|
-
if (tag === 'BOOL') return val === 'true';
|
|
1703
|
-
if (tag === 'NULL') return null;
|
|
1704
|
-
if (tag === 'UNDEFINED') return undefined;
|
|
1705
|
-
return val;
|
|
1706
|
-
}
|
|
1707
|
-
|
|
1708
|
-
function mkToken(tag, value, origin) {
|
|
1709
|
-
let t = [tag, value];
|
|
1710
|
-
t.pre = 0;
|
|
1711
|
-
t.data = null;
|
|
1712
|
-
t.loc = origin?.loc ?? { r: 0, c: 0, n: 0 };
|
|
1713
|
-
t.spaced = false;
|
|
1714
|
-
t.newLine = false;
|
|
1715
|
-
t.generated = true;
|
|
1716
|
-
if (origin) t.origin = origin;
|
|
1717
|
-
return t;
|
|
1718
|
-
}
|
|
1719
|
-
|
|
1720
|
-
function schemaError(tok, message) {
|
|
1721
|
-
let loc = tok?.loc || { r: 0, c: 0 };
|
|
1722
|
-
let err = new Error(message);
|
|
1723
|
-
err.name = 'SchemaSyntaxError';
|
|
1724
|
-
err.loc = loc;
|
|
1725
|
-
err.line = loc.r;
|
|
1726
|
-
err.column = loc.c;
|
|
1727
|
-
err.phase = 'schema';
|
|
1728
|
-
err.code = 'E_SCHEMA';
|
|
1729
|
-
return err;
|
|
1730
|
-
}
|
|
1731
|
-
|
|
1732
|
-
// ============================================================================
|
|
1733
|
-
// Runtime — injected into compiled output when the source uses `schema`
|
|
1734
|
-
// ============================================================================
|
|
1735
|
-
//
|
|
1736
|
-
// Four-layer architecture (D22):
|
|
1737
|
-
// Layer 1 — Descriptor: the object passed to `__schema({...})`. Raw
|
|
1738
|
-
// metadata from compiler, plus real functions for callables.
|
|
1739
|
-
// Layer 2 — Normalized: fields map / methods map / computed map / hooks
|
|
1740
|
-
// map / directives / enum members. Built lazily on first
|
|
1741
|
-
// downstream need. Collision and kind-legality checks live
|
|
1742
|
-
// here (Phase 4 tightens them).
|
|
1743
|
-
// Layer 3 — Validator plan: compiled validator tree. Built on first
|
|
1744
|
-
// `.parse` / `.safe` / `.ok`.
|
|
1745
|
-
// Layer 4 — ORM plan (Phase 4) and DDL plan (Phase 4) — not in Phase 3.
|
|
1746
|
-
//
|
|
1747
|
-
// Public API per kind (v1):
|
|
1748
|
-
// .parse(data) throws SchemaError on failure, returns value
|
|
1749
|
-
// .safe(data) {ok: true, value, errors: null} | {ok: false, value: null, errors: [...]}
|
|
1750
|
-
// .ok(data) boolean, fast path (no allocation)
|
|
1751
|
-
//
|
|
1752
|
-
// Result `value` shape:
|
|
1753
|
-
// :shape — generated class instance (fields enumerable own props,
|
|
1754
|
-
// methods non-enumerable prototype fns, computed non-enumerable
|
|
1755
|
-
// prototype getters)
|
|
1756
|
-
// :input — plain object (same class-instance plumbing; Phase 3 treats
|
|
1757
|
-
// :input like :shape sans methods for consistency)
|
|
1758
|
-
// :enum — the member value (or name when the enum is bare)
|
|
1759
|
-
// :mixin — non-instantiable; raises `Cannot parse :mixin`
|
|
1760
|
-
// :model — Phase 4 (the class additionally wires ORM methods)
|
|
1761
|
-
|
|
1762
|
-
// Schema runtime ABI version. Bump when the shape of a __schema({...})
|
|
1763
|
-
// descriptor or any cross-bundle-visible runtime surface changes
|
|
1764
|
-
// incompatibly. Two bundles that disagree on this number can't share
|
|
1765
|
-
// one runtime, so a mismatch at load time throws rather than silently
|
|
1766
|
-
// fragmenting. Tracks runtime contract — not the rip-lang product
|
|
1767
|
-
// semver.
|
|
1768
|
-
const SCHEMA_RUNTIME_ABI_VERSION = 1;
|
|
1769
|
-
|
|
1770
|
-
const SCHEMA_RUNTIME = `
|
|
1771
|
-
// ---- Rip Schema Runtime ----------------------------------------------------
|
|
1772
|
-
// Four layers, lazy compilation:
|
|
1773
|
-
// 1 (descriptor) object passed to __schema({...}). Raw metadata.
|
|
1774
|
-
// 2 (normalized) fields/methods/computed/hooks/relations/constraints.
|
|
1775
|
-
// Collision checks. Table name derivation. Built once.
|
|
1776
|
-
// 3 (validator) compiled validator plan. Built on first .parse.
|
|
1777
|
-
// 4a (ORM plan) built on first .find/.create/.save.
|
|
1778
|
-
// 4b (DDL plan) built on first .toSQL(). Independent of 4a.
|
|
1779
|
-
//
|
|
1780
|
-
// Instance-singleton model:
|
|
1781
|
-
// The runtime installs itself on globalThis.__ripSchema the first time a
|
|
1782
|
-
// compiled bundle executes. Subsequent bundles that inject the same runtime
|
|
1783
|
-
// template detect the existing installation and bind to it instead of
|
|
1784
|
-
// re-running the body — giving every bundle a single shared registry,
|
|
1785
|
-
// adapter, and class identity. The IIFE wrapper below enforces that.
|
|
1786
|
-
|
|
1787
|
-
var { __schema, SchemaError, __SchemaRegistry, __schemaSetAdapter } = (function() {
|
|
1788
|
-
if (typeof globalThis !== 'undefined' && globalThis.__ripSchema) {
|
|
1789
|
-
if (globalThis.__ripSchema.__version !== ${SCHEMA_RUNTIME_ABI_VERSION}) {
|
|
1790
|
-
throw new Error(
|
|
1791
|
-
"rip-schema runtime version mismatch: loaded runtime is v" +
|
|
1792
|
-
globalThis.__ripSchema.__version +
|
|
1793
|
-
", but this bundle expects v" + ${SCHEMA_RUNTIME_ABI_VERSION} +
|
|
1794
|
-
". Two compiled Rip bundles with incompatible schema runtimes are loaded in the same process."
|
|
1795
|
-
);
|
|
1796
|
-
}
|
|
1797
|
-
return globalThis.__ripSchema;
|
|
1798
|
-
}
|
|
1799
|
-
|
|
1800
|
-
class SchemaError extends Error {
|
|
1801
|
-
constructor(issues, schemaName, schemaKind) {
|
|
1802
|
-
super(__schemaFormatIssues(issues, schemaName));
|
|
1803
|
-
this.name = 'SchemaError';
|
|
1804
|
-
this.issues = issues;
|
|
1805
|
-
this.schemaName = schemaName || null;
|
|
1806
|
-
this.schemaKind = schemaKind || null;
|
|
1807
|
-
}
|
|
1808
|
-
}
|
|
1809
|
-
|
|
1810
|
-
function __schemaFormatIssues(issues, name) {
|
|
1811
|
-
if (!issues || !issues.length) return 'SchemaError';
|
|
1812
|
-
const head = name ? name + ': ' : '';
|
|
1813
|
-
return head + issues.map(i => i.message || i.error || 'invalid').join('; ');
|
|
1814
|
-
}
|
|
1815
|
-
|
|
1816
|
-
// Reserved names are hoisted to module scope — they're pure data and
|
|
1817
|
-
// rebuilding them per _normalize() call wastes allocations. Static: names
|
|
1818
|
-
// that become class-level methods on :model (parse, find, toSQL, …).
|
|
1819
|
-
// Instance: names that become instance methods (save, destroy, toJSON, …).
|
|
1820
|
-
// A declared field, method, computed, or derived that collides with
|
|
1821
|
-
// either set on a :model raises a collision error during normalize.
|
|
1822
|
-
const __SCHEMA_RESERVED_STATIC = new Set([
|
|
1823
|
-
'parse','safe','ok','find','findMany','where','all','first','count','create','toSQL',
|
|
1824
|
-
]);
|
|
1825
|
-
const __SCHEMA_RESERVED_INSTANCE = new Set([
|
|
1826
|
-
'save','destroy','reload','ok','errors','toJSON',
|
|
1827
|
-
]);
|
|
1828
|
-
const __SCHEMA_RESERVED = new Set([...__SCHEMA_RESERVED_STATIC, ...__SCHEMA_RESERVED_INSTANCE]);
|
|
1829
|
-
|
|
1830
|
-
const __schemaTypes = {
|
|
1831
|
-
string: v => typeof v === 'string',
|
|
1832
|
-
number: v => typeof v === 'number' && !Number.isNaN(v),
|
|
1833
|
-
integer: v => Number.isInteger(v),
|
|
1834
|
-
boolean: v => typeof v === 'boolean',
|
|
1835
|
-
date: v => v instanceof Date && !Number.isNaN(v.getTime()),
|
|
1836
|
-
datetime: v => v instanceof Date && !Number.isNaN(v.getTime()),
|
|
1837
|
-
email: v => typeof v === 'string' && /^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$/.test(v),
|
|
1838
|
-
url: v => typeof v === 'string' && /^https?:\\/\\/.+/.test(v),
|
|
1839
|
-
uuid: v => typeof v === 'string' && /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(v),
|
|
1840
|
-
phone: v => typeof v === 'string' && /^[\\d\\s\\-+()]+$/.test(v),
|
|
1841
|
-
zip: v => typeof v === 'string' && /^\\d{5}(-\\d{4})?$/.test(v),
|
|
1842
|
-
text: v => typeof v === 'string',
|
|
1843
|
-
json: v => v !== undefined,
|
|
1844
|
-
any: () => true,
|
|
1845
|
-
};
|
|
1846
|
-
|
|
1847
|
-
function __schemaCheckValue(v, typeName) {
|
|
1848
|
-
const check = __schemaTypes[typeName];
|
|
1849
|
-
return check ? check(v) : true;
|
|
1850
|
-
}
|
|
1851
|
-
|
|
1852
|
-
// Validate a single value against a typeName, returning either null (ok)
|
|
1853
|
-
// or an array of issues relative to the value's own root. Primitive
|
|
1854
|
-
// typenames dispatch through the __schemaTypes map; typenames that
|
|
1855
|
-
// resolve to a registered :shape / :input / :model validate the value
|
|
1856
|
-
// as a nested object; typenames that resolve to a :enum enforce
|
|
1857
|
-
// membership. Unknown typenames stay permissive so forward-references
|
|
1858
|
-
// and cross-module names do not hard-fail — matches pre-registry behavior.
|
|
1859
|
-
function __schemaValidateValue(v, typeName) {
|
|
1860
|
-
const prim = __schemaTypes[typeName];
|
|
1861
|
-
if (prim) {
|
|
1862
|
-
return prim(v) ? null : [{field: '', error: 'type', message: 'must be ' + typeName}];
|
|
1863
|
-
}
|
|
1864
|
-
const subDef = __SchemaRegistry.get(typeName);
|
|
1865
|
-
if (!subDef) return null;
|
|
1866
|
-
if (subDef.kind === 'enum') {
|
|
1867
|
-
const errs = subDef._validateEnum(v, true);
|
|
1868
|
-
return errs.length ? [{field: '', error: 'enum', message: errs[0].message}] : null;
|
|
1869
|
-
}
|
|
1870
|
-
if (subDef.kind === 'mixin') {
|
|
1871
|
-
return [{field: '', error: 'type', message: ':mixin ' + typeName + ' is not usable as a field type'}];
|
|
1872
|
-
}
|
|
1873
|
-
if (v === null || typeof v !== 'object' || Array.isArray(v)) {
|
|
1874
|
-
return [{field: '', error: 'type', message: 'must be a ' + typeName + ' object'}];
|
|
1875
|
-
}
|
|
1876
|
-
const subErrs = subDef._validateFields(v, true);
|
|
1877
|
-
return subErrs.length ? subErrs : null;
|
|
1878
|
-
}
|
|
1879
|
-
|
|
1880
|
-
// Merge a child path segment into an existing field path. Produces
|
|
1881
|
-
// 'addr.street' for object descent, 'items[0].name' for array descent.
|
|
1882
|
-
function __schemaJoinField(head, child) {
|
|
1883
|
-
if (!child) return head;
|
|
1884
|
-
return head + (child.startsWith('[') ? child : '.' + child);
|
|
1885
|
-
}
|
|
1886
|
-
|
|
1887
|
-
// Rewrite a child issue's message so the leading "<childField> " token
|
|
1888
|
-
// (present on most leaf messages: "name is required", "id must be
|
|
1889
|
-
// integer") is replaced by the joined parent path — avoiding the
|
|
1890
|
-
// duplicated "items[1].id id must be integer" reading.
|
|
1891
|
-
function __schemaRewriteMessage(joinedField, childField, childMessage) {
|
|
1892
|
-
if (!childField) return joinedField + ' ' + childMessage;
|
|
1893
|
-
if (childMessage.startsWith(childField)) {
|
|
1894
|
-
return joinedField + childMessage.slice(childField.length);
|
|
1895
|
-
}
|
|
1896
|
-
return joinedField + ': ' + childMessage;
|
|
1897
|
-
}
|
|
1898
|
-
|
|
1899
|
-
// Naming utilities (snake_case column/table names, irregular plurals).
|
|
1900
|
-
function __schemaSnake(s) { return s.replace(/([a-z0-9])([A-Z])/g, '$1_$2').toLowerCase(); }
|
|
1901
|
-
const __SCHEMA_UNCOUNTABLE = new Set(['equipment','information','rice','money','species','series','fish','sheep','data']);
|
|
1902
|
-
const __SCHEMA_IRREGULAR = new Map([['person','people'],['man','men'],['woman','women'],['child','children'],['tooth','teeth'],['foot','feet'],['mouse','mice']]);
|
|
1903
|
-
function __schemaPluralize(w) {
|
|
1904
|
-
const lw = w.toLowerCase();
|
|
1905
|
-
if (__SCHEMA_UNCOUNTABLE.has(lw)) return w;
|
|
1906
|
-
if (__SCHEMA_IRREGULAR.has(lw)) return __SCHEMA_IRREGULAR.get(lw);
|
|
1907
|
-
// Preserve case of the input — pluralizer operates on the trailing form
|
|
1908
|
-
// but keeps the rest unchanged, so orderItem becomes orderItems
|
|
1909
|
-
// and User becomes Users.
|
|
1910
|
-
if (/[^aeiouy]y$/i.test(w)) return w.slice(0, -1) + 'ies';
|
|
1911
|
-
if (/(s|x|z|ch|sh)$/i.test(w)) return w + 'es';
|
|
1912
|
-
return w + 's';
|
|
1913
|
-
}
|
|
1914
|
-
function __schemaTableName(model) { return __schemaPluralize(__schemaSnake(model)); }
|
|
1915
|
-
function __schemaFkName(model) { return __schemaSnake(model) + '_id'; }
|
|
1916
|
-
|
|
1917
|
-
// ---- Registry ---------------------------------------------------------------
|
|
1918
|
-
// Process-global, resettable, with placeholder state for forward/circular
|
|
1919
|
-
// references. Duplicate registration of the same model name is a hard error.
|
|
1920
|
-
|
|
1921
|
-
const __SchemaRegistry = {
|
|
1922
|
-
_entries: new Map(),
|
|
1923
|
-
register(def) {
|
|
1924
|
-
// Named schemas of any kind land here. Relations look up :model,
|
|
1925
|
-
// @mixin Name looks up :mixin. Algebra (.extend etc.) accepts :shape
|
|
1926
|
-
// and derived shapes. Kind is checked at lookup time.
|
|
1927
|
-
if (!def.name) return;
|
|
1928
|
-
// Most recent registration wins. Recompilation produces a fresh
|
|
1929
|
-
// __SchemaDef with the same name; the registry rebinds. Cross-
|
|
1930
|
-
// module name collisions should be avoided — schema names are
|
|
1931
|
-
// app-global identifiers for relation resolution.
|
|
1932
|
-
this._entries.set(def.name, { def, kind: def.kind });
|
|
1933
|
-
},
|
|
1934
|
-
get(name) {
|
|
1935
|
-
const entry = this._entries.get(name);
|
|
1936
|
-
return entry ? entry.def : null;
|
|
1937
|
-
},
|
|
1938
|
-
getKind(name, kind) {
|
|
1939
|
-
const entry = this._entries.get(name);
|
|
1940
|
-
return entry && entry.kind === kind ? entry.def : null;
|
|
1941
|
-
},
|
|
1942
|
-
has(name) { return this._entries.has(name); },
|
|
1943
|
-
reset() { this._entries.clear(); },
|
|
1944
|
-
};
|
|
1945
|
-
|
|
1946
|
-
// ---- DB adapter seam --------------------------------------------------------
|
|
1947
|
-
// Default adapter uses fetch to rip-db /sql. Tests can swap with
|
|
1948
|
-
// __schemaSetAdapter(...) before running queries.
|
|
1949
|
-
|
|
1950
|
-
function __schemaDefaultAdapter() {
|
|
1951
|
-
const url = (typeof process !== 'undefined' && process.env?.DB_URL) || 'http://localhost:4213';
|
|
1952
|
-
return {
|
|
1953
|
-
async query(sql, params) {
|
|
1954
|
-
const body = params && params.length ? { sql, params } : { sql };
|
|
1955
|
-
const res = await fetch(url + '/sql', {
|
|
1956
|
-
method: 'POST',
|
|
1957
|
-
headers: { 'Content-Type': 'application/json' },
|
|
1958
|
-
body: JSON.stringify(body),
|
|
1959
|
-
});
|
|
1960
|
-
const data = await res.json();
|
|
1961
|
-
if (data.error) throw new Error(data.error);
|
|
1962
|
-
return data;
|
|
1963
|
-
}
|
|
1964
|
-
};
|
|
1965
|
-
}
|
|
1966
|
-
|
|
1967
|
-
let __schemaAdapter = __schemaDefaultAdapter();
|
|
1968
|
-
function __schemaSetAdapter(a) { __schemaAdapter = a; }
|
|
1969
|
-
|
|
1970
|
-
// ---- Query builder ----------------------------------------------------------
|
|
1971
|
-
|
|
1972
|
-
class __SchemaQuery {
|
|
1973
|
-
constructor(def, opts = {}) {
|
|
1974
|
-
this._def = def;
|
|
1975
|
-
this._clauses = [];
|
|
1976
|
-
this._params = [];
|
|
1977
|
-
this._limit = null;
|
|
1978
|
-
this._offset = null;
|
|
1979
|
-
this._order = null;
|
|
1980
|
-
this._includeDeleted = opts.includeDeleted === true;
|
|
1981
|
-
}
|
|
1982
|
-
where(cond, ...params) {
|
|
1983
|
-
if (typeof cond === 'string') {
|
|
1984
|
-
this._clauses.push(cond);
|
|
1985
|
-
this._params.push(...params);
|
|
1986
|
-
} else if (cond && typeof cond === 'object') {
|
|
1987
|
-
for (const [k, v] of Object.entries(cond)) {
|
|
1988
|
-
const col = __schemaSnake(k);
|
|
1989
|
-
if (v === null || v === undefined) {
|
|
1990
|
-
this._clauses.push('"' + col + '" IS NULL');
|
|
1991
|
-
} else {
|
|
1992
|
-
this._clauses.push('"' + col + '" = ?');
|
|
1993
|
-
this._params.push(v);
|
|
1994
|
-
}
|
|
1995
|
-
}
|
|
1996
|
-
}
|
|
1997
|
-
return this;
|
|
1998
|
-
}
|
|
1999
|
-
limit(n) { this._limit = n; return this; }
|
|
2000
|
-
offset(n) { this._offset = n; return this; }
|
|
2001
|
-
order(spec) { this._order = spec; return this; }
|
|
2002
|
-
orderBy(spec) { return this.order(spec); }
|
|
2003
|
-
_buildSQL() {
|
|
2004
|
-
const n = this._def._normalize();
|
|
2005
|
-
const table = n.tableName;
|
|
2006
|
-
const parts = ['SELECT * FROM "' + table + '"'];
|
|
2007
|
-
const where = [...this._clauses];
|
|
2008
|
-
if (!this._includeDeleted && n.softDelete) where.push('"deleted_at" IS NULL');
|
|
2009
|
-
if (where.length) parts.push('WHERE ' + where.join(' AND '));
|
|
2010
|
-
if (this._order) parts.push('ORDER BY ' + this._order);
|
|
2011
|
-
if (this._limit != null) parts.push('LIMIT ' + this._limit);
|
|
2012
|
-
if (this._offset != null) parts.push('OFFSET ' + this._offset);
|
|
2013
|
-
return parts.join(' ');
|
|
2014
|
-
}
|
|
2015
|
-
async all() {
|
|
2016
|
-
const sql = this._buildSQL();
|
|
2017
|
-
const res = await __schemaAdapter.query(sql, this._params);
|
|
2018
|
-
return (res.data || []).map(row => this._def._hydrate(res.columns, row));
|
|
2019
|
-
}
|
|
2020
|
-
async first() {
|
|
2021
|
-
this._limit = 1;
|
|
2022
|
-
const arr = await this.all();
|
|
2023
|
-
return arr[0] || null;
|
|
2024
|
-
}
|
|
2025
|
-
async count() {
|
|
2026
|
-
const n = this._def._normalize();
|
|
2027
|
-
const parts = ['SELECT COUNT(*) FROM "' + n.tableName + '"'];
|
|
2028
|
-
const where = [...this._clauses];
|
|
2029
|
-
if (!this._includeDeleted && n.softDelete) where.push('"deleted_at" IS NULL');
|
|
2030
|
-
if (where.length) parts.push('WHERE ' + where.join(' AND '));
|
|
2031
|
-
const res = await __schemaAdapter.query(parts.join(' '), this._params);
|
|
2032
|
-
return res.data?.[0]?.[0] || 0;
|
|
2033
|
-
}
|
|
2034
|
-
}
|
|
2035
|
-
|
|
2036
|
-
// ---- __SchemaDef ------------------------------------------------------------
|
|
2037
|
-
|
|
2038
|
-
class __SchemaDef {
|
|
2039
|
-
constructor(desc) {
|
|
2040
|
-
this._desc = desc;
|
|
2041
|
-
this.kind = desc.kind;
|
|
2042
|
-
this.name = desc.name || null;
|
|
2043
|
-
this._norm = null;
|
|
2044
|
-
this._klass = null;
|
|
2045
|
-
this._sourceModel = null;
|
|
2046
|
-
}
|
|
2047
|
-
|
|
2048
|
-
_normalize() {
|
|
2049
|
-
if (this._norm) return this._norm;
|
|
2050
|
-
|
|
2051
|
-
const fields = new Map();
|
|
2052
|
-
const methods = new Map();
|
|
2053
|
-
const computed = new Map();
|
|
2054
|
-
const derived = new Map();
|
|
2055
|
-
const hooks = new Map();
|
|
2056
|
-
const directives = [];
|
|
2057
|
-
const enumMembers = new Map();
|
|
2058
|
-
const relations = new Map();
|
|
2059
|
-
const ensures = [];
|
|
2060
|
-
let timestamps = false;
|
|
2061
|
-
let softDelete = false;
|
|
2062
|
-
|
|
2063
|
-
const collision = (n, where) => {
|
|
2064
|
-
throw new SchemaError(
|
|
2065
|
-
[{field: n, error: 'collision', message: n + ' collides with ' + where}],
|
|
2066
|
-
this.name, this.kind);
|
|
2067
|
-
};
|
|
2068
|
-
const noteCollision = (n) => {
|
|
2069
|
-
if (fields.has(n)) collision(n, 'field');
|
|
2070
|
-
if (methods.has(n)) collision(n, 'method');
|
|
2071
|
-
if (computed.has(n)) collision(n, 'computed');
|
|
2072
|
-
if (hooks.has(n)) collision(n, 'hook');
|
|
2073
|
-
if (relations.has(n)) collision(n, 'relation');
|
|
2074
|
-
if (this.kind === 'model' && __SCHEMA_RESERVED.has(n)) collision(n, 'reserved ORM name');
|
|
2075
|
-
};
|
|
2076
|
-
|
|
2077
|
-
for (const e of this._desc.entries) {
|
|
2078
|
-
switch (e.tag) {
|
|
2079
|
-
case 'field':
|
|
2080
|
-
noteCollision(e.name);
|
|
2081
|
-
fields.set(e.name, {
|
|
2082
|
-
name: e.name,
|
|
2083
|
-
required: e.modifiers.includes('!'),
|
|
2084
|
-
unique: e.modifiers.includes('#'),
|
|
2085
|
-
optional: e.modifiers.includes('?'),
|
|
2086
|
-
typeName: e.typeName,
|
|
2087
|
-
literals: e.literals || null,
|
|
2088
|
-
array: e.array === true,
|
|
2089
|
-
constraints: e.constraints || null,
|
|
2090
|
-
transform: e.transform || null,
|
|
2091
|
-
});
|
|
2092
|
-
break;
|
|
2093
|
-
case 'method':
|
|
2094
|
-
noteCollision(e.name);
|
|
2095
|
-
methods.set(e.name, e.fn);
|
|
2096
|
-
break;
|
|
2097
|
-
case 'computed':
|
|
2098
|
-
noteCollision(e.name);
|
|
2099
|
-
computed.set(e.name, e.fn);
|
|
2100
|
-
break;
|
|
2101
|
-
case 'derived':
|
|
2102
|
-
noteCollision(e.name);
|
|
2103
|
-
derived.set(e.name, e.fn);
|
|
2104
|
-
break;
|
|
2105
|
-
case 'hook':
|
|
2106
|
-
if (hooks.has(e.name)) collision(e.name, 'duplicate hook');
|
|
2107
|
-
hooks.set(e.name, e.fn);
|
|
2108
|
-
break;
|
|
2109
|
-
case 'directive': {
|
|
2110
|
-
directives.push({ name: e.name, args: e.args || [] });
|
|
2111
|
-
// @mixin is recorded but further handling is deferred to the
|
|
2112
|
-
// post-pass so we can dedupe diamond includes and detect
|
|
2113
|
-
// cycles with a full expansion stack. All other directives
|
|
2114
|
-
// get their relation / timestamps / softDelete processing now.
|
|
2115
|
-
if (e.name === 'mixin') break;
|
|
2116
|
-
if (e.name === 'timestamps') timestamps = true;
|
|
2117
|
-
if (e.name === 'softDelete') softDelete = true;
|
|
2118
|
-
const rel = __schemaNormalizeDirectiveRelation(e, this.name);
|
|
2119
|
-
if (rel) {
|
|
2120
|
-
noteCollision(rel.accessor);
|
|
2121
|
-
relations.set(rel.accessor, rel);
|
|
2122
|
-
}
|
|
2123
|
-
break;
|
|
2124
|
-
}
|
|
2125
|
-
case 'enum-member':
|
|
2126
|
-
enumMembers.set(e.name, e.value !== undefined ? e.value : e.name);
|
|
2127
|
-
break;
|
|
2128
|
-
case 'ensure':
|
|
2129
|
-
// @ensure entries are schema-level invariants (cross-field
|
|
2130
|
-
// predicates). Declaration order is preserved so diagnostics
|
|
2131
|
-
// come out in the order authored.
|
|
2132
|
-
ensures.push({ message: e.message, fn: e.fn });
|
|
2133
|
-
break;
|
|
2134
|
-
}
|
|
2135
|
-
}
|
|
2136
|
-
|
|
2137
|
-
// @mixin expansion (Phase 5). Depth-first, dedupes diamond includes
|
|
2138
|
-
// in the same host expansion, detects cycles with full chain.
|
|
2139
|
-
if (this.kind === 'model' || this.kind === 'shape' || this.kind === 'input' ||
|
|
2140
|
-
this.kind === 'mixin') {
|
|
2141
|
-
__schemaExpandMixins(this, fields, directives, {
|
|
2142
|
-
stack: [this.name || '<anon>'],
|
|
2143
|
-
seen: new Set([this.name || '<anon>']),
|
|
2144
|
-
onCollision: (name, src) => collision(name, 'mixin-included field from ' + src),
|
|
2145
|
-
});
|
|
2146
|
-
}
|
|
2147
|
-
|
|
2148
|
-
// Add implicit primary key for :model unless a field already marked primary.
|
|
2149
|
-
const primaryKey = 'id';
|
|
2150
|
-
const tableName = this.kind === 'model' ? __schemaTableName(this.name) : null;
|
|
2151
|
-
|
|
2152
|
-
this._norm = {
|
|
2153
|
-
fields, methods, computed, derived, hooks, directives, enumMembers, relations,
|
|
2154
|
-
ensures,
|
|
2155
|
-
timestamps, softDelete, primaryKey, tableName,
|
|
2156
|
-
};
|
|
2157
|
-
return this._norm;
|
|
2158
|
-
}
|
|
2159
|
-
|
|
2160
|
-
// Run eager-derived entries (!>) — one pass, in declaration order.
|
|
2161
|
-
//
|
|
2162
|
-
// Invariants worth keeping in mind here:
|
|
2163
|
-
// - Fires at parse/safe time AND at DB hydrate time (declared fields
|
|
2164
|
-
// are populated by then in both paths).
|
|
2165
|
-
// - NOT re-run on field mutation — the value is materialized once at
|
|
2166
|
-
// instance creation and stays. Use ~> for live recomputation.
|
|
2167
|
-
// - Stored as own enumerable properties, so they round-trip through
|
|
2168
|
-
// Object.keys and JSON.stringify. Excluded from DB persistence by
|
|
2169
|
-
// _getSaveableData (writes declared fields only).
|
|
2170
|
-
// - Thrown errors propagate. parse() wraps them into SchemaError
|
|
2171
|
-
// before surfacing; safe() captures into {error: 'derived'}
|
|
2172
|
-
// issues; hydrate lets them crash fast as data-integrity signals.
|
|
2173
|
-
_applyEagerDerived(inst) {
|
|
2174
|
-
const norm = this._normalize();
|
|
2175
|
-
if (!norm.derived.size) return;
|
|
2176
|
-
for (const [n, fn] of norm.derived) {
|
|
2177
|
-
const v = fn.call(inst);
|
|
2178
|
-
Object.defineProperty(inst, n, {
|
|
2179
|
-
value: v, enumerable: true, writable: true, configurable: true,
|
|
2180
|
-
});
|
|
2181
|
-
}
|
|
2182
|
-
}
|
|
2183
|
-
|
|
2184
|
-
// Run '@ensure' predicates — schema-level cross-field invariants —
|
|
2185
|
-
// against a fully-typed, fully-defaulted data object. Returns [] if
|
|
2186
|
-
// all pass, or an array of {field: '', error: 'ensure', message}
|
|
2187
|
-
// issues for every failing predicate.
|
|
2188
|
-
//
|
|
2189
|
-
// Naming: '_applyEnsures' mirrors '_applyTransforms' and
|
|
2190
|
-
// '_applyEagerDerived' — runtime method name matches the directive
|
|
2191
|
-
// it services. The industry term for this pattern is 'refinement'
|
|
2192
|
-
// (Zod's '.refine', design-by-contract postconditions); in Rip the
|
|
2193
|
-
// user-visible name is '@ensure' and the code tracks that.
|
|
2194
|
-
//
|
|
2195
|
-
// Semantics:
|
|
2196
|
-
// - Truthy return → pass; falsy → fail with the declared message.
|
|
2197
|
-
// - Thrown exception → fail with the declared message (the thrown
|
|
2198
|
-
// error's own message is used only if the @ensure declared no
|
|
2199
|
-
// message, which can't happen via the parser since message is
|
|
2200
|
-
// required — but downstream code-built defs might omit it).
|
|
2201
|
-
// - All @ensures run; declaration order preserved in output.
|
|
2202
|
-
// - Caller short-circuits: per-field validation errors skip this
|
|
2203
|
-
// step entirely (predicates assume field types are correct).
|
|
2204
|
-
// - Skipped on _hydrate — trusted DB data bypasses @ensures.
|
|
2205
|
-
_applyEnsures(data) {
|
|
2206
|
-
const norm = this._normalize();
|
|
2207
|
-
if (!norm.ensures.length) return [];
|
|
2208
|
-
const errs = [];
|
|
2209
|
-
for (const r of norm.ensures) {
|
|
2210
|
-
let ok = false;
|
|
2211
|
-
try {
|
|
2212
|
-
ok = !!r.fn(data);
|
|
2213
|
-
} catch (e) {
|
|
2214
|
-
errs.push({
|
|
2215
|
-
field: '', error: 'ensure',
|
|
2216
|
-
message: r.message || e?.message || 'ensure failed',
|
|
2217
|
-
});
|
|
2218
|
-
continue;
|
|
2219
|
-
}
|
|
2220
|
-
if (!ok) {
|
|
2221
|
-
errs.push({
|
|
2222
|
-
field: '', error: 'ensure',
|
|
2223
|
-
message: r.message || 'ensure failed',
|
|
2224
|
-
});
|
|
2225
|
-
}
|
|
2226
|
-
}
|
|
2227
|
-
return errs;
|
|
2228
|
-
}
|
|
2229
|
-
|
|
2230
|
-
_getClass() {
|
|
2231
|
-
if (this._klass) return this._klass;
|
|
2232
|
-
const norm = this._normalize();
|
|
2233
|
-
const name = this.name || 'Schema';
|
|
2234
|
-
const def = this;
|
|
2235
|
-
|
|
2236
|
-
const fieldNames = [...norm.fields.keys()];
|
|
2237
|
-
const klass = ({[name]: class {
|
|
2238
|
-
constructor(data, persisted = false) {
|
|
2239
|
-
// Internal state is non-enumerable so Object.keys(inst) lists
|
|
2240
|
-
// only declared fields that received a value.
|
|
2241
|
-
Object.defineProperty(this, '_dirty', { value: new Set(), enumerable: false, writable: false, configurable: true });
|
|
2242
|
-
Object.defineProperty(this, '_persisted', { value: persisted === true, enumerable: false, writable: true, configurable: true });
|
|
2243
|
-
Object.defineProperty(this, '_snapshot', { value: null, enumerable: false, writable: true, configurable: true });
|
|
2244
|
-
if (data && typeof data === 'object') {
|
|
2245
|
-
for (const k of fieldNames) {
|
|
2246
|
-
if (k in data && data[k] !== undefined) this[k] = data[k];
|
|
2247
|
-
}
|
|
2248
|
-
}
|
|
2249
|
-
}
|
|
2250
|
-
}})[name];
|
|
2251
|
-
|
|
2252
|
-
for (const [n, fn] of norm.methods) {
|
|
2253
|
-
Object.defineProperty(klass.prototype, n, {
|
|
2254
|
-
value: fn, writable: true, enumerable: false, configurable: true,
|
|
2255
|
-
});
|
|
2256
|
-
}
|
|
2257
|
-
for (const [n, fn] of norm.computed) {
|
|
2258
|
-
Object.defineProperty(klass.prototype, n, {
|
|
2259
|
-
get: fn, enumerable: false, configurable: true,
|
|
2260
|
-
});
|
|
2261
|
-
}
|
|
2262
|
-
|
|
2263
|
-
// Relation methods: user.organization(). Accepts no args; returns
|
|
2264
|
-
// a promise to a target-model instance (or array for has_many).
|
|
2265
|
-
for (const [acc, rel] of norm.relations) {
|
|
2266
|
-
Object.defineProperty(klass.prototype, acc, {
|
|
2267
|
-
enumerable: false, configurable: true,
|
|
2268
|
-
value: async function() { return __schemaResolveRelation(def, this, rel); },
|
|
2269
|
-
});
|
|
2270
|
-
}
|
|
2271
|
-
|
|
2272
|
-
// Instance ORM methods — only for :model kind.
|
|
2273
|
-
if (this.kind === 'model') {
|
|
2274
|
-
Object.defineProperty(klass.prototype, 'save', {
|
|
2275
|
-
enumerable: false, configurable: true, writable: true,
|
|
2276
|
-
value: async function() { return __schemaSave(def, this); },
|
|
2277
|
-
});
|
|
2278
|
-
Object.defineProperty(klass.prototype, 'destroy', {
|
|
2279
|
-
enumerable: false, configurable: true, writable: true,
|
|
2280
|
-
value: async function() { return __schemaDestroy(def, this); },
|
|
2281
|
-
});
|
|
2282
|
-
Object.defineProperty(klass.prototype, 'ok', {
|
|
2283
|
-
enumerable: false, configurable: true, writable: true,
|
|
2284
|
-
value: function() { return def._validateFields(this, false); },
|
|
2285
|
-
});
|
|
2286
|
-
Object.defineProperty(klass.prototype, 'errors', {
|
|
2287
|
-
enumerable: false, configurable: true, writable: true,
|
|
2288
|
-
value: function() { return def._validateFields(this, true); },
|
|
2289
|
-
});
|
|
2290
|
-
// toJSON mirrors the instance's own enumerable properties, which by
|
|
2291
|
-
// construction are: the primary key, declared fields, @timestamps
|
|
2292
|
-
// columns, @softDelete timestamp, @belongs_to FK columns, and any
|
|
2293
|
-
// !> eager-derived fields. Internal state (_dirty, _persisted,
|
|
2294
|
-
// _snapshot) is defined non-enumerable; methods and ~> computed
|
|
2295
|
-
// getters live on the prototype. So iterating own keys picks up
|
|
2296
|
-
// exactly the user-facing wire shape without special-casing each
|
|
2297
|
-
// category — and stays correct when new implicit columns get added
|
|
2298
|
-
// to the runtime.
|
|
2299
|
-
Object.defineProperty(klass.prototype, 'toJSON', {
|
|
2300
|
-
enumerable: false, configurable: true, writable: true,
|
|
2301
|
-
value: function() {
|
|
2302
|
-
const out = {};
|
|
2303
|
-
for (const k of Object.keys(this)) out[k] = this[k];
|
|
2304
|
-
return out;
|
|
2305
|
-
},
|
|
2306
|
-
});
|
|
2307
|
-
}
|
|
2308
|
-
|
|
2309
|
-
this._klass = klass;
|
|
2310
|
-
return klass;
|
|
2311
|
-
}
|
|
2312
|
-
|
|
2313
|
-
_hydrate(columns, row) {
|
|
2314
|
-
// DB rows are trusted: hydrate into a class instance without
|
|
2315
|
-
// revalidating. Column names arrive snake_case; declared fields live
|
|
2316
|
-
// under their camelCase names, and implicit columns (id, created_at,
|
|
2317
|
-
// updated_at, relation FKs) surface under their camelCase equivalents.
|
|
2318
|
-
// Each snake_case column name also aliases the camelCase property via
|
|
2319
|
-
// a non-enumerable accessor so order.user_id and order.userId read
|
|
2320
|
-
// the same slot — useful when DB column names leak into user code
|
|
2321
|
-
// via raw SQL helpers.
|
|
2322
|
-
const data = {};
|
|
2323
|
-
for (let i = 0; i < columns.length; i++) {
|
|
2324
|
-
data[__schemaCamel(columns[i].name)] = row[i];
|
|
2325
|
-
}
|
|
2326
|
-
const k = this._getClass();
|
|
2327
|
-
const inst = new k(data, true);
|
|
2328
|
-
for (const key of Object.keys(data)) {
|
|
2329
|
-
if (!(key in inst)) {
|
|
2330
|
-
Object.defineProperty(inst, key, {
|
|
2331
|
-
value: data[key], enumerable: true, writable: true, configurable: true,
|
|
2332
|
-
});
|
|
2333
|
-
}
|
|
2334
|
-
}
|
|
2335
|
-
for (let i = 0; i < columns.length; i++) {
|
|
2336
|
-
const snake = columns[i].name;
|
|
2337
|
-
const camel = __schemaCamel(snake);
|
|
2338
|
-
if (snake !== camel && !(snake in inst)) {
|
|
2339
|
-
Object.defineProperty(inst, snake, {
|
|
2340
|
-
enumerable: false, configurable: true,
|
|
2341
|
-
get() { return this[camel]; },
|
|
2342
|
-
set(v) { this[camel] = v; },
|
|
2343
|
-
});
|
|
2344
|
-
}
|
|
2345
|
-
}
|
|
2346
|
-
// Eager-derived fields re-run on hydrate — they're not persisted
|
|
2347
|
-
// and must be re-computed from the declared fields now present.
|
|
2348
|
-
this._applyEagerDerived(inst);
|
|
2349
|
-
return inst;
|
|
2350
|
-
}
|
|
2351
|
-
|
|
2352
|
-
_validateFields(data, collect) {
|
|
2353
|
-
const norm = this._normalize();
|
|
2354
|
-
const errors = collect ? [] : null;
|
|
2355
|
-
for (const [n, f] of norm.fields) {
|
|
2356
|
-
const v = data == null ? undefined : data[n];
|
|
2357
|
-
if (v === undefined || v === null) {
|
|
2358
|
-
if (f.required) {
|
|
2359
|
-
if (!collect) return false;
|
|
2360
|
-
errors.push({field: n, error: 'required', message: n + ' is required'});
|
|
2361
|
-
}
|
|
2362
|
-
continue;
|
|
2363
|
-
}
|
|
2364
|
-
if (f.array) {
|
|
2365
|
-
if (!Array.isArray(v)) {
|
|
2366
|
-
if (!collect) return false;
|
|
2367
|
-
errors.push({field: n, error: 'type', message: n + ' must be an array'});
|
|
2368
|
-
continue;
|
|
2369
|
-
}
|
|
2370
|
-
let bad = false;
|
|
2371
|
-
for (let i = 0; i < v.length; i++) {
|
|
2372
|
-
const issues = __schemaValidateValue(v[i], f.typeName);
|
|
2373
|
-
if (issues) {
|
|
2374
|
-
if (!collect) return false;
|
|
2375
|
-
const head = n + '[' + i + ']';
|
|
2376
|
-
for (const e of issues) {
|
|
2377
|
-
const joined = __schemaJoinField(head, e.field);
|
|
2378
|
-
errors.push({
|
|
2379
|
-
field: joined,
|
|
2380
|
-
error: e.error,
|
|
2381
|
-
message: __schemaRewriteMessage(joined, e.field, e.message),
|
|
2382
|
-
});
|
|
2383
|
-
}
|
|
2384
|
-
bad = true;
|
|
2385
|
-
}
|
|
2386
|
-
}
|
|
2387
|
-
if (bad) continue;
|
|
2388
|
-
} else if (f.typeName === 'literal-union') {
|
|
2389
|
-
if (!f.literals.includes(v)) {
|
|
2390
|
-
if (!collect) return false;
|
|
2391
|
-
errors.push({field: n, error: 'enum', message: n + ' must be one of ' + f.literals.map(l => JSON.stringify(l)).join(', ')});
|
|
2392
|
-
continue;
|
|
2393
|
-
}
|
|
2394
|
-
} else {
|
|
2395
|
-
const issues = __schemaValidateValue(v, f.typeName);
|
|
2396
|
-
if (issues) {
|
|
2397
|
-
if (!collect) return false;
|
|
2398
|
-
for (const e of issues) {
|
|
2399
|
-
const joined = __schemaJoinField(n, e.field);
|
|
2400
|
-
errors.push({
|
|
2401
|
-
field: joined,
|
|
2402
|
-
error: e.error,
|
|
2403
|
-
message: __schemaRewriteMessage(joined, e.field, e.message),
|
|
2404
|
-
});
|
|
2405
|
-
}
|
|
2406
|
-
continue;
|
|
2407
|
-
}
|
|
2408
|
-
}
|
|
2409
|
-
// Apply constraint checks.
|
|
2410
|
-
const c = f.constraints;
|
|
2411
|
-
if (c) {
|
|
2412
|
-
if (typeof v === 'string') {
|
|
2413
|
-
if (c.min != null && v.length < c.min) { if (!collect) return false; errors.push({field: n, error: 'min', message: n + ' must be at least ' + c.min + ' chars'}); }
|
|
2414
|
-
if (c.max != null && v.length > c.max) { if (!collect) return false; errors.push({field: n, error: 'max', message: n + ' must be at most ' + c.max + ' chars'}); }
|
|
2415
|
-
if (c.regex && !c.regex.test(v)) { if (!collect) return false; errors.push({field: n, error: 'pattern', message: n + ' is invalid'}); }
|
|
2416
|
-
} else if (typeof v === 'number') {
|
|
2417
|
-
if (c.min != null && v < c.min) { if (!collect) return false; errors.push({field: n, error: 'min', message: n + ' must be >= ' + c.min}); }
|
|
2418
|
-
if (c.max != null && v > c.max) { if (!collect) return false; errors.push({field: n, error: 'max', message: n + ' must be <= ' + c.max}); }
|
|
2419
|
-
}
|
|
2420
|
-
}
|
|
2421
|
-
}
|
|
2422
|
-
return collect ? errors : true;
|
|
2423
|
-
}
|
|
2424
|
-
|
|
2425
|
-
_applyDefaults(data) {
|
|
2426
|
-
const norm = this._normalize();
|
|
2427
|
-
for (const [n, f] of norm.fields) {
|
|
2428
|
-
if ((data[n] === undefined || data[n] === null) && f.constraints?.default !== undefined) {
|
|
2429
|
-
const d = f.constraints.default;
|
|
2430
|
-
data[n] = (typeof d === 'object' && d !== null && !(d instanceof RegExp))
|
|
2431
|
-
? structuredClone(d) : d;
|
|
2432
|
-
}
|
|
2433
|
-
}
|
|
2434
|
-
return data;
|
|
2435
|
-
}
|
|
2436
|
-
|
|
2437
|
-
// Inline field transforms run once during parse (and safe/ok), never
|
|
2438
|
-
// during DB hydrate. Each transform receives the whole raw input
|
|
2439
|
-
// object as 'it'; its return value becomes the field's candidate
|
|
2440
|
-
// value before default + validation. Transform errors surface as
|
|
2441
|
-
// {error: 'transform'} issues on the final result.
|
|
2442
|
-
_applyTransforms(raw, working) {
|
|
2443
|
-
const norm = this._normalize();
|
|
2444
|
-
const errors = [];
|
|
2445
|
-
for (const [n, f] of norm.fields) {
|
|
2446
|
-
if (!f.transform) continue;
|
|
2447
|
-
try {
|
|
2448
|
-
working[n] = f.transform(raw);
|
|
2449
|
-
} catch (e) {
|
|
2450
|
-
errors.push({field: n, error: 'transform', message: e?.message || String(e)});
|
|
2451
|
-
}
|
|
2452
|
-
}
|
|
2453
|
-
return errors;
|
|
2454
|
-
}
|
|
2455
|
-
|
|
2456
|
-
_validateEnum(data, collect) {
|
|
2457
|
-
const norm = this._normalize();
|
|
2458
|
-
for (const [n, v] of norm.enumMembers) {
|
|
2459
|
-
if (data === n || data === v) return collect ? [] : true;
|
|
2460
|
-
}
|
|
2461
|
-
if (!collect) return false;
|
|
2462
|
-
const members = [...norm.enumMembers.keys()].join(', ');
|
|
2463
|
-
return [{field: '', error: 'enum', message: (this.name || 'enum') + ' expected one of: ' + members}];
|
|
2464
|
-
}
|
|
2465
|
-
|
|
2466
|
-
_materializeEnum(data) {
|
|
2467
|
-
const norm = this._normalize();
|
|
2468
|
-
for (const [n, v] of norm.enumMembers) {
|
|
2469
|
-
if (data === n || data === v) return v;
|
|
2470
|
-
}
|
|
2471
|
-
return data;
|
|
2472
|
-
}
|
|
2473
|
-
|
|
2474
|
-
// Canonical field parse pipeline — run per-field in declaration order,
|
|
2475
|
-
// then an after-fields pass for eager-derived. This is the SINGLE
|
|
2476
|
-
// source of truth for parse-time field semantics; _hydrate bypasses
|
|
2477
|
-
// steps 1-5 entirely (DB rows arrive canonical) and picks up at step 7.
|
|
2478
|
-
//
|
|
2479
|
-
// 1. Obtain raw candidate — transform(raw) if declared, else raw[name]
|
|
2480
|
-
// 2. Apply default — if candidate missing/undefined
|
|
2481
|
-
// 3. Required check — optional/required/nullability
|
|
2482
|
-
// 4. Type validation — primitive / literal-union / array
|
|
2483
|
-
// 5. Constraint checks — range, regex, attrs
|
|
2484
|
-
// 6. Assign to instance — own enumerable property
|
|
2485
|
-
// 7. Eager-derived pass — run !> entries in declaration order
|
|
2486
|
-
//
|
|
2487
|
-
// Transforms (step 1) run on parse/safe/ok only. Hydrate skips them
|
|
2488
|
-
// because DB columns already hold the canonical values. Eager-derived
|
|
2489
|
-
// (step 7) fires on BOTH paths so hydrated instances have the same
|
|
2490
|
-
// shape as parsed ones.
|
|
2491
|
-
parse(data) {
|
|
2492
|
-
if (this.kind === 'mixin') {
|
|
2493
|
-
throw new Error(":mixin schema '" + (this.name || 'anon') + "' is not instantiable");
|
|
2494
|
-
}
|
|
2495
|
-
if (this.kind === 'enum') {
|
|
2496
|
-
const errs = this._validateEnum(data, true);
|
|
2497
|
-
if (errs.length) throw new SchemaError(errs, this.name, this.kind);
|
|
2498
|
-
return this._materializeEnum(data);
|
|
2499
|
-
}
|
|
2500
|
-
const raw = data || {};
|
|
2501
|
-
const working = { ...raw };
|
|
2502
|
-
const transformErrors = this._applyTransforms(raw, working);
|
|
2503
|
-
this._applyDefaults(working);
|
|
2504
|
-
const errs = transformErrors.concat(this._validateFields(working, true));
|
|
2505
|
-
if (errs.length) throw new SchemaError(errs, this.name, this.kind);
|
|
2506
|
-
// @ensure runs AFTER per-field validation so predicates can
|
|
2507
|
-
// assume declared fields are typed and defaulted. A field-level
|
|
2508
|
-
// failure short-circuits: we never reach this line with errs.
|
|
2509
|
-
const ensureErrs = this._applyEnsures(working);
|
|
2510
|
-
if (ensureErrs.length) throw new SchemaError(ensureErrs, this.name, this.kind);
|
|
2511
|
-
const klass = this._getClass();
|
|
2512
|
-
const inst = new klass(working, false);
|
|
2513
|
-
this._applyEagerDerived(inst);
|
|
2514
|
-
return inst;
|
|
2515
|
-
}
|
|
2516
|
-
|
|
2517
|
-
safe(data) {
|
|
2518
|
-
if (this.kind === 'mixin') {
|
|
2519
|
-
return {ok: false, value: null, errors: [{field: '', error: 'mixin', message: 'not instantiable'}]};
|
|
2520
|
-
}
|
|
2521
|
-
if (this.kind === 'enum') {
|
|
2522
|
-
const errs = this._validateEnum(data, true);
|
|
2523
|
-
if (errs.length) return {ok: false, value: null, errors: errs};
|
|
2524
|
-
return {ok: true, value: this._materializeEnum(data), errors: null};
|
|
2525
|
-
}
|
|
2526
|
-
const raw = data || {};
|
|
2527
|
-
const working = { ...raw };
|
|
2528
|
-
const transformErrors = this._applyTransforms(raw, working);
|
|
2529
|
-
this._applyDefaults(working);
|
|
2530
|
-
const errs = transformErrors.concat(this._validateFields(working, true));
|
|
2531
|
-
if (errs.length) return {ok: false, value: null, errors: errs};
|
|
2532
|
-
const ensureErrs = this._applyEnsures(working);
|
|
2533
|
-
if (ensureErrs.length) return {ok: false, value: null, errors: ensureErrs};
|
|
2534
|
-
const klass = this._getClass();
|
|
2535
|
-
const inst = new klass(working, false);
|
|
2536
|
-
try { this._applyEagerDerived(inst); }
|
|
2537
|
-
catch (e) {
|
|
2538
|
-
return {ok: false, value: null, errors: [{field: '', error: 'derived', message: e?.message || String(e)}]};
|
|
2539
|
-
}
|
|
2540
|
-
return {ok: true, value: inst, errors: null};
|
|
2541
|
-
}
|
|
2542
|
-
|
|
2543
|
-
ok(data) {
|
|
2544
|
-
if (this.kind === 'mixin') return false;
|
|
2545
|
-
if (this.kind === 'enum') return this._validateEnum(data, false);
|
|
2546
|
-
const raw = data || {};
|
|
2547
|
-
const working = { ...raw };
|
|
2548
|
-
const transformErrors = this._applyTransforms(raw, working);
|
|
2549
|
-
if (transformErrors.length) return false;
|
|
2550
|
-
this._applyDefaults(working);
|
|
2551
|
-
if (!this._validateFields(working, false)) return false;
|
|
2552
|
-
// Per-field validation passed — @ensure predicates are the final gate.
|
|
2553
|
-
return this._applyEnsures(working).length === 0;
|
|
2554
|
-
}
|
|
2555
|
-
|
|
2556
|
-
// ---- :model static ORM methods --------------------------------------------
|
|
2557
|
-
|
|
2558
|
-
async find(id) {
|
|
2559
|
-
this._assertModel('find');
|
|
2560
|
-
const norm = this._normalize();
|
|
2561
|
-
const soft = norm.softDelete ? ' AND "deleted_at" IS NULL' : '';
|
|
2562
|
-
const sql = 'SELECT * FROM "' + norm.tableName + '" WHERE "' + norm.primaryKey + '" = ?' + soft + ' LIMIT 1';
|
|
2563
|
-
const res = await __schemaAdapter.query(sql, [id]);
|
|
2564
|
-
if (!res.rows) return null;
|
|
2565
|
-
return this._hydrate(res.columns, res.data[0]);
|
|
2566
|
-
}
|
|
2567
|
-
|
|
2568
|
-
where(cond, ...params) {
|
|
2569
|
-
this._assertModel('where');
|
|
2570
|
-
return new __SchemaQuery(this).where(cond, ...params);
|
|
2571
|
-
}
|
|
2572
|
-
|
|
2573
|
-
all() {
|
|
2574
|
-
this._assertModel('all');
|
|
2575
|
-
return new __SchemaQuery(this).all();
|
|
2576
|
-
}
|
|
2577
|
-
|
|
2578
|
-
first() {
|
|
2579
|
-
this._assertModel('first');
|
|
2580
|
-
return new __SchemaQuery(this).first();
|
|
2581
|
-
}
|
|
2582
|
-
|
|
2583
|
-
count() {
|
|
2584
|
-
this._assertModel('count');
|
|
2585
|
-
return new __SchemaQuery(this).count();
|
|
2586
|
-
}
|
|
2587
|
-
|
|
2588
|
-
async create(data) {
|
|
2589
|
-
this._assertModel('create');
|
|
2590
|
-
// Input keys may be snake_case or camelCase; the runtime
|
|
2591
|
-
// canonicalizes to camelCase so instance properties line up with
|
|
2592
|
-
// declared field names.
|
|
2593
|
-
const klass = this._getClass();
|
|
2594
|
-
const canonical = {};
|
|
2595
|
-
if (data && typeof data === 'object') {
|
|
2596
|
-
for (const k of Object.keys(data)) canonical[__schemaCamel(k)] = data[k];
|
|
2597
|
-
}
|
|
2598
|
-
const inst = new klass(this._applyDefaults(canonical), false);
|
|
2599
|
-
// FK columns like user_id canonicalize to userId and need to
|
|
2600
|
-
// round-trip through the INSERT path, so attach them as own
|
|
2601
|
-
// properties even though they aren't declared fields.
|
|
2602
|
-
for (const [k, v] of Object.entries(canonical)) {
|
|
2603
|
-
if (!(k in inst)) {
|
|
2604
|
-
Object.defineProperty(inst, k, { value: v, enumerable: true, writable: true, configurable: true });
|
|
2605
|
-
}
|
|
2606
|
-
}
|
|
2607
|
-
await __schemaSave(this, inst);
|
|
2608
|
-
return inst;
|
|
2609
|
-
}
|
|
2610
|
-
|
|
2611
|
-
toSQL(options) {
|
|
2612
|
-
this._assertModel('toSQL');
|
|
2613
|
-
return __schemaToSQL(this, options);
|
|
2614
|
-
}
|
|
2615
|
-
|
|
2616
|
-
_assertModel(api) {
|
|
2617
|
-
if (this.kind !== 'model') {
|
|
2618
|
-
throw new Error('schema: .' + api + '() is :model-only (got :' + this.kind + ')');
|
|
2619
|
-
}
|
|
2620
|
-
}
|
|
2621
|
-
|
|
2622
|
-
// ---- Schema algebra (Phase 6) --------------------------------------------
|
|
2623
|
-
// Invariant: every algebra operation returns a :shape. Model algebra
|
|
2624
|
-
// strips ORM; :shape algebra drops behavior. Derived shapes preserve
|
|
2625
|
-
// field metadata (constraints, defaults, modifiers) from the source
|
|
2626
|
-
// normalized descriptor.
|
|
2627
|
-
|
|
2628
|
-
pick(...keys) {
|
|
2629
|
-
return __schemaDerive(this, (src) => {
|
|
2630
|
-
const names = __schemaFlatten(keys);
|
|
2631
|
-
const out = new Map();
|
|
2632
|
-
for (const k of names) {
|
|
2633
|
-
if (!src.has(k)) throw new Error("pick: unknown field '" + k + "' on " + (this.name || 'schema'));
|
|
2634
|
-
out.set(k, src.get(k));
|
|
2635
|
-
}
|
|
2636
|
-
return out;
|
|
2637
|
-
});
|
|
2638
|
-
}
|
|
2639
|
-
|
|
2640
|
-
omit(...keys) {
|
|
2641
|
-
return __schemaDerive(this, (src) => {
|
|
2642
|
-
const drop = new Set(__schemaFlatten(keys));
|
|
2643
|
-
const out = new Map();
|
|
2644
|
-
for (const [k, v] of src) if (!drop.has(k)) out.set(k, v);
|
|
2645
|
-
return out;
|
|
2646
|
-
});
|
|
2647
|
-
}
|
|
2648
|
-
|
|
2649
|
-
partial() {
|
|
2650
|
-
return __schemaDerive(this, (src) => {
|
|
2651
|
-
const out = new Map();
|
|
2652
|
-
for (const [k, v] of src) out.set(k, { ...v, required: false });
|
|
2653
|
-
return out;
|
|
2654
|
-
});
|
|
2655
|
-
}
|
|
2656
|
-
|
|
2657
|
-
required(...keys) {
|
|
2658
|
-
return __schemaDerive(this, (src) => {
|
|
2659
|
-
const req = new Set(__schemaFlatten(keys));
|
|
2660
|
-
const out = new Map();
|
|
2661
|
-
for (const [k, v] of src) out.set(k, { ...v, required: req.has(k) ? true : v.required });
|
|
2662
|
-
return out;
|
|
2663
|
-
});
|
|
2664
|
-
}
|
|
2665
|
-
|
|
2666
|
-
extend(other) {
|
|
2667
|
-
if (!(other instanceof __SchemaDef)) {
|
|
2668
|
-
throw new Error('extend(): argument must be a schema value');
|
|
2669
|
-
}
|
|
2670
|
-
return __schemaDerive(this, (src) => {
|
|
2671
|
-
const merged = new Map(src);
|
|
2672
|
-
const otherFields = other._normalize().fields;
|
|
2673
|
-
for (const [k, v] of otherFields) {
|
|
2674
|
-
if (merged.has(k)) {
|
|
2675
|
-
throw new Error("extend(): field '" + k + "' collides between " + (this.name || 'schema') + " and " + (other.name || 'other'));
|
|
2676
|
-
}
|
|
2677
|
-
merged.set(k, v);
|
|
2678
|
-
}
|
|
2679
|
-
return merged;
|
|
2680
|
-
});
|
|
2681
|
-
}
|
|
2682
|
-
}
|
|
2683
|
-
|
|
2684
|
-
function __schemaFlatten(keys) {
|
|
2685
|
-
const out = [];
|
|
2686
|
-
for (const k of keys) {
|
|
2687
|
-
if (typeof k === 'symbol') out.push(Symbol.keyFor(k) || k.description);
|
|
2688
|
-
else if (Array.isArray(k)) for (const kk of k) out.push(typeof kk === 'symbol' ? (Symbol.keyFor(kk) || kk.description) : kk);
|
|
2689
|
-
else out.push(k);
|
|
2690
|
-
}
|
|
2691
|
-
return out;
|
|
2692
|
-
}
|
|
2693
|
-
|
|
2694
|
-
// Schema algebra — .pick / .omit / .partial / .required / .extend all
|
|
2695
|
-
// land here. The v2 invariants encoded in this function:
|
|
2696
|
-
//
|
|
2697
|
-
// - Derived schemas are always kind: 'shape', regardless of source kind.
|
|
2698
|
-
// ORM surface on :model is dropped.
|
|
2699
|
-
// - Field semantics SURVIVE algebra: type, literals, constraints,
|
|
2700
|
-
// inline transforms. Transforms-survive means a derived schema can
|
|
2701
|
-
// still read raw-input keys that aren't in its declared output shape.
|
|
2702
|
-
// - Instance behavior DOES NOT survive: methods, computed (~>), eager
|
|
2703
|
-
// derived (!>), and hooks all get dropped because the rebuilt
|
|
2704
|
-
// descriptor has no callable entries.
|
|
2705
|
-
// - _sourceModel propagates through chained algebra so tooling can
|
|
2706
|
-
// trace derived shapes back to the origin :model.
|
|
2707
|
-
function __schemaDerive(source, transform) {
|
|
2708
|
-
const src = source._normalize().fields;
|
|
2709
|
-
const derivedFields = transform(src);
|
|
2710
|
-
const entries = [];
|
|
2711
|
-
for (const [, f] of derivedFields) {
|
|
2712
|
-
const mods = [];
|
|
2713
|
-
if (f.required) mods.push('!');
|
|
2714
|
-
if (f.unique) mods.push('#');
|
|
2715
|
-
if (f.optional && !f.required) mods.push('?');
|
|
2716
|
-
entries.push({
|
|
2717
|
-
tag: 'field', name: f.name, modifiers: mods,
|
|
2718
|
-
typeName: f.typeName, array: f.array,
|
|
2719
|
-
literals: f.literals || null,
|
|
2720
|
-
constraints: f.constraints,
|
|
2721
|
-
transform: f.transform || null,
|
|
2722
|
-
});
|
|
2723
|
-
}
|
|
2724
|
-
const name = (source.name || 'Schema') + 'Derived';
|
|
2725
|
-
const derived = new __SchemaDef({ kind: 'shape', name, entries });
|
|
2726
|
-
// sourceModel propagates through chained algebra. Tooling can follow
|
|
2727
|
-
// the chain back to the original :model for projection hints.
|
|
2728
|
-
derived._sourceModel = source._sourceModel || (source.kind === 'model' ? source : null);
|
|
2729
|
-
return derived;
|
|
2730
|
-
}
|
|
2731
|
-
|
|
2732
|
-
function __schemaCamel(col) { return String(col).replace(/_([a-z])/g, (_, c) => c.toUpperCase()); }
|
|
2733
|
-
|
|
2734
|
-
function __schemaNormalizeDirectiveRelation(directive, ownerModel) {
|
|
2735
|
-
const args = directive.args;
|
|
2736
|
-
if (!args || !args.length) return null;
|
|
2737
|
-
const a = args[0];
|
|
2738
|
-
const name = directive.name;
|
|
2739
|
-
if (name === 'belongs_to') {
|
|
2740
|
-
const targetLc = a.target[0].toLowerCase() + a.target.slice(1);
|
|
2741
|
-
return { kind: 'belongsTo', target: a.target, accessor: targetLc, foreignKey: __schemaFkName(a.target), optional: !!a.optional };
|
|
2742
|
-
}
|
|
2743
|
-
if (name === 'has_one' || name === 'one') {
|
|
2744
|
-
const targetLc = a.target[0].toLowerCase() + a.target.slice(1);
|
|
2745
|
-
return { kind: 'hasOne', target: a.target, accessor: targetLc, foreignKey: __schemaFkName(ownerModel), optional: !!a.optional };
|
|
2746
|
-
}
|
|
2747
|
-
if (name === 'has_many' || name === 'many') {
|
|
2748
|
-
const targetLc = a.target[0].toLowerCase() + a.target.slice(1);
|
|
2749
|
-
return { kind: 'hasMany', target: a.target, accessor: __schemaPluralize(targetLc), foreignKey: __schemaFkName(ownerModel), optional: !!a.optional };
|
|
2750
|
-
}
|
|
2751
|
-
return null;
|
|
2752
|
-
}
|
|
2753
|
-
|
|
2754
|
-
function __schemaExpandMixins(host, fields, directives, ctx) {
|
|
2755
|
-
for (const d of directives) {
|
|
2756
|
-
if (d.name !== 'mixin' || !d.args || !d.args[0]) continue;
|
|
2757
|
-
const target = d.args[0].target;
|
|
2758
|
-
if (!target) continue;
|
|
2759
|
-
if (ctx.stack.includes(target)) {
|
|
2760
|
-
throw new SchemaError(
|
|
2761
|
-
[{field: '', error: 'mixin-cycle', message: 'mixin cycle: ' + ctx.stack.concat(target).join(' -> ')}],
|
|
2762
|
-
host.name, host.kind);
|
|
2763
|
-
}
|
|
2764
|
-
if (ctx.seen.has(target)) continue;
|
|
2765
|
-
const mx = __SchemaRegistry.getKind(target, 'mixin');
|
|
2766
|
-
if (!mx) {
|
|
2767
|
-
throw new SchemaError(
|
|
2768
|
-
[{field: '', error: 'mixin-missing', message: 'unknown mixin: ' + target}],
|
|
2769
|
-
host.name, host.kind);
|
|
2770
|
-
}
|
|
2771
|
-
ctx.seen.add(target);
|
|
2772
|
-
ctx.stack.push(target);
|
|
2773
|
-
// Recurse into nested mixins first (depth-first).
|
|
2774
|
-
const childDirectives = mx._desc.entries.filter(e => e.tag === 'directive' && e.name === 'mixin')
|
|
2775
|
-
.map(e => ({ name: e.name, args: e.args || [] }));
|
|
2776
|
-
__schemaExpandMixins(host, fields, childDirectives, ctx);
|
|
2777
|
-
// Then contribute the mixin's own fields.
|
|
2778
|
-
for (const e of mx._desc.entries) {
|
|
2779
|
-
if (e.tag !== 'field') continue;
|
|
2780
|
-
if (fields.has(e.name)) {
|
|
2781
|
-
throw new SchemaError(
|
|
2782
|
-
[{field: e.name, error: 'mixin-collision', message: e.name + ' from mixin ' + target + ' collides with existing field'}],
|
|
2783
|
-
host.name, host.kind);
|
|
2784
|
-
}
|
|
2785
|
-
fields.set(e.name, {
|
|
2786
|
-
name: e.name,
|
|
2787
|
-
required: e.modifiers.includes('!'),
|
|
2788
|
-
unique: e.modifiers.includes('#'),
|
|
2789
|
-
optional: e.modifiers.includes('?'),
|
|
2790
|
-
typeName: e.typeName,
|
|
2791
|
-
literals: e.literals || null,
|
|
2792
|
-
array: e.array === true,
|
|
2793
|
-
constraints: e.constraints || null,
|
|
2794
|
-
transform: e.transform || null,
|
|
2795
|
-
});
|
|
2796
|
-
}
|
|
2797
|
-
ctx.stack.pop();
|
|
2798
|
-
}
|
|
2799
|
-
}
|
|
2800
|
-
|
|
2801
|
-
async function __schemaResolveRelation(def, inst, rel) {
|
|
2802
|
-
const target = __SchemaRegistry.get(rel.target);
|
|
2803
|
-
if (!target) throw new Error('schema: unknown relation target "' + rel.target + '" from ' + (def.name || 'anon'));
|
|
2804
|
-
const pk = def._normalize().primaryKey;
|
|
2805
|
-
if (rel.kind === 'belongsTo') {
|
|
2806
|
-
const fk = inst[__schemaCamel(rel.foreignKey)];
|
|
2807
|
-
return fk != null ? await target.find(fk) : null;
|
|
2808
|
-
}
|
|
2809
|
-
if (rel.kind === 'hasOne') {
|
|
2810
|
-
return await target.where({ [rel.foreignKey]: inst[pk] }).first();
|
|
2811
|
-
}
|
|
2812
|
-
if (rel.kind === 'hasMany') {
|
|
2813
|
-
return await target.where({ [rel.foreignKey]: inst[pk] }).all();
|
|
2814
|
-
}
|
|
2815
|
-
return null;
|
|
2816
|
-
}
|
|
2817
|
-
|
|
2818
|
-
// ---- Save / Destroy --------------------------------------------------------
|
|
2819
|
-
// Rails-style lifecycle (D18):
|
|
2820
|
-
// beforeValidation -> validate -> afterValidation ->
|
|
2821
|
-
// beforeSave -> (beforeCreate|beforeUpdate) -> INSERT/UPDATE ->
|
|
2822
|
-
// (afterCreate|afterUpdate) -> afterSave
|
|
2823
|
-
// Destroy:
|
|
2824
|
-
// beforeDestroy -> DELETE -> afterDestroy
|
|
2825
|
-
|
|
2826
|
-
async function __schemaRunHook(def, inst, name) {
|
|
2827
|
-
const fn = def._normalize().hooks.get(name);
|
|
2828
|
-
if (fn) await fn.call(inst);
|
|
2829
|
-
}
|
|
2830
|
-
|
|
2831
|
-
async function __schemaSave(def, inst) {
|
|
2832
|
-
const norm = def._normalize();
|
|
2833
|
-
const isNew = !inst._persisted;
|
|
2834
|
-
|
|
2835
|
-
await __schemaRunHook(def, inst, 'beforeValidation');
|
|
2836
|
-
const errs = def._validateFields(inst, true);
|
|
2837
|
-
if (errs.length) throw new SchemaError(errs, def.name, def.kind);
|
|
2838
|
-
await __schemaRunHook(def, inst, 'afterValidation');
|
|
2839
|
-
|
|
2840
|
-
await __schemaRunHook(def, inst, 'beforeSave');
|
|
2841
|
-
if (isNew) await __schemaRunHook(def, inst, 'beforeCreate');
|
|
2842
|
-
else await __schemaRunHook(def, inst, 'beforeUpdate');
|
|
2843
|
-
|
|
2844
|
-
if (isNew) {
|
|
2845
|
-
const cols = [], placeholders = [], values = [];
|
|
2846
|
-
for (const [n, f] of norm.fields) {
|
|
2847
|
-
const v = inst[n];
|
|
2848
|
-
if (v == null) continue;
|
|
2849
|
-
cols.push('"' + __schemaSnake(n) + '"');
|
|
2850
|
-
placeholders.push('?');
|
|
2851
|
-
values.push(__schemaSerialize(v, f));
|
|
2852
|
-
}
|
|
2853
|
-
// Include relation FKs. belongsTo FKs are camelCase properties on
|
|
2854
|
-
// the instance (e.g. organizationId for organization_id).
|
|
2855
|
-
for (const [, rel] of norm.relations) {
|
|
2856
|
-
if (rel.kind !== 'belongsTo') continue;
|
|
2857
|
-
const fkCamel = __schemaCamel(rel.foreignKey);
|
|
2858
|
-
const v = inst[fkCamel];
|
|
2859
|
-
if (v != null) {
|
|
2860
|
-
cols.push('"' + rel.foreignKey + '"');
|
|
2861
|
-
placeholders.push('?');
|
|
2862
|
-
values.push(v);
|
|
2863
|
-
}
|
|
2864
|
-
}
|
|
2865
|
-
const sql = 'INSERT INTO "' + norm.tableName + '" (' + cols.join(', ') + ') VALUES (' + placeholders.join(', ') + ') RETURNING *';
|
|
2866
|
-
const res = await __schemaAdapter.query(sql, values);
|
|
2867
|
-
if (res.data?.[0] && res.columns) {
|
|
2868
|
-
for (let i = 0; i < res.columns.length; i++) {
|
|
2869
|
-
const snake = res.columns[i].name;
|
|
2870
|
-
const key = __schemaCamel(snake);
|
|
2871
|
-
if (!(key in inst)) {
|
|
2872
|
-
Object.defineProperty(inst, key, { value: res.data[0][i], enumerable: true, writable: true, configurable: true });
|
|
2873
|
-
} else {
|
|
2874
|
-
inst[key] = res.data[0][i];
|
|
2875
|
-
}
|
|
2876
|
-
if (snake !== key && !(snake in inst)) {
|
|
2877
|
-
Object.defineProperty(inst, snake, {
|
|
2878
|
-
enumerable: false, configurable: true,
|
|
2879
|
-
get() { return this[key]; },
|
|
2880
|
-
set(v) { this[key] = v; },
|
|
2881
|
-
});
|
|
2882
|
-
}
|
|
2883
|
-
}
|
|
2884
|
-
}
|
|
2885
|
-
// Now that the RETURNING columns (id, @timestamps, FKs) are on the
|
|
2886
|
-
// instance, !> eager-derived fields can see them. Mirrors the hydrate
|
|
2887
|
-
// path, which runs _applyEagerDerived once all declared fields are
|
|
2888
|
-
// populated. Per-docs semantics ("materialize once, not reactive")
|
|
2889
|
-
// still hold — we're firing once, at end of construction, not on
|
|
2890
|
-
// subsequent mutations.
|
|
2891
|
-
def._applyEagerDerived(inst);
|
|
2892
|
-
inst._persisted = true;
|
|
2893
|
-
} else {
|
|
2894
|
-
const sets = [], values = [];
|
|
2895
|
-
for (const [n, f] of norm.fields) {
|
|
2896
|
-
sets.push('"' + __schemaSnake(n) + '" = ?');
|
|
2897
|
-
values.push(__schemaSerialize(inst[n], f));
|
|
2898
|
-
}
|
|
2899
|
-
if (sets.length) {
|
|
2900
|
-
const pk = norm.primaryKey;
|
|
2901
|
-
values.push(inst[pk]);
|
|
2902
|
-
const sql = 'UPDATE "' + norm.tableName + '" SET ' + sets.join(', ') + ' WHERE "' + pk + '" = ?';
|
|
2903
|
-
await __schemaAdapter.query(sql, values);
|
|
2904
|
-
}
|
|
2905
|
-
}
|
|
2906
|
-
inst._dirty.clear();
|
|
2907
|
-
|
|
2908
|
-
if (isNew) await __schemaRunHook(def, inst, 'afterCreate');
|
|
2909
|
-
else await __schemaRunHook(def, inst, 'afterUpdate');
|
|
2910
|
-
await __schemaRunHook(def, inst, 'afterSave');
|
|
2911
|
-
return inst;
|
|
2912
|
-
}
|
|
2913
|
-
|
|
2914
|
-
async function __schemaDestroy(def, inst) {
|
|
2915
|
-
if (!inst._persisted) return inst;
|
|
2916
|
-
const norm = def._normalize();
|
|
2917
|
-
await __schemaRunHook(def, inst, 'beforeDestroy');
|
|
2918
|
-
if (norm.softDelete) {
|
|
2919
|
-
const now = new Date().toISOString();
|
|
2920
|
-
await __schemaAdapter.query('UPDATE "' + norm.tableName + '" SET "deleted_at" = ? WHERE "' + norm.primaryKey + '" = ?', [now, inst[norm.primaryKey]]);
|
|
2921
|
-
inst.deletedAt = now;
|
|
2922
|
-
} else {
|
|
2923
|
-
await __schemaAdapter.query('DELETE FROM "' + norm.tableName + '" WHERE "' + norm.primaryKey + '" = ?', [inst[norm.primaryKey]]);
|
|
2924
|
-
inst._persisted = false;
|
|
2925
|
-
}
|
|
2926
|
-
await __schemaRunHook(def, inst, 'afterDestroy');
|
|
2927
|
-
return inst;
|
|
2928
|
-
}
|
|
2929
|
-
|
|
2930
|
-
function __schemaSerialize(v, field) {
|
|
2931
|
-
if (field && field.typeName === 'json' && v != null && typeof v === 'object') {
|
|
2932
|
-
return JSON.stringify(v);
|
|
2933
|
-
}
|
|
2934
|
-
return v;
|
|
2935
|
-
}
|
|
2936
|
-
|
|
2937
|
-
// ---- DDL emission (.toSQL) --------------------------------------------------
|
|
2938
|
-
// Layer 4b: runs on first .toSQL() call. Independent of ORM — scripts
|
|
2939
|
-
// that build schema from DDL never touch .find/.create.
|
|
2940
|
-
|
|
2941
|
-
const __SCHEMA_SQL_TYPES = {
|
|
2942
|
-
string: 'VARCHAR', text: 'TEXT', integer: 'INTEGER', number: 'DOUBLE',
|
|
2943
|
-
boolean: 'BOOLEAN', date: 'DATE', datetime: 'TIMESTAMP', email: 'VARCHAR',
|
|
2944
|
-
url: 'VARCHAR', uuid: 'UUID', phone: 'VARCHAR', zip: 'VARCHAR', json: 'JSON', any: 'JSON',
|
|
2945
|
-
};
|
|
2946
|
-
|
|
2947
|
-
function __schemaToSQL(def, options) {
|
|
2948
|
-
const opts = options || {};
|
|
2949
|
-
const { dropFirst = false, header } = opts;
|
|
2950
|
-
const norm = def._normalize();
|
|
2951
|
-
const blocks = [];
|
|
2952
|
-
if (header) blocks.push(header);
|
|
2953
|
-
|
|
2954
|
-
const table = norm.tableName;
|
|
2955
|
-
const seq = table + '_seq';
|
|
2956
|
-
if (dropFirst) {
|
|
2957
|
-
blocks.push('DROP TABLE IF EXISTS ' + table + ' CASCADE;\\nDROP SEQUENCE IF EXISTS ' + seq + ';');
|
|
2958
|
-
}
|
|
2959
|
-
|
|
2960
|
-
// Sequence seed: explicit option wins over @idStart directive wins over 1.
|
|
2961
|
-
// DuckDB 1.5.2 does not implement ALTER SEQUENCE ... RESTART WITH N, so the
|
|
2962
|
-
// baseline has to be set at creation — hence the knob lives here, not in a
|
|
2963
|
-
// post-create migration.
|
|
2964
|
-
let idStart = 1;
|
|
2965
|
-
for (const d of norm.directives) {
|
|
2966
|
-
if (d.name === 'idStart' && d.args?.[0] && Number.isInteger(d.args[0].value)) {
|
|
2967
|
-
idStart = d.args[0].value;
|
|
2968
|
-
}
|
|
2969
|
-
}
|
|
2970
|
-
if (opts.idStart !== undefined) {
|
|
2971
|
-
if (!Number.isInteger(opts.idStart)) {
|
|
2972
|
-
throw new Error('schema.toSQL(): idStart must be an integer; got ' + String(opts.idStart));
|
|
2973
|
-
}
|
|
2974
|
-
idStart = opts.idStart;
|
|
2975
|
-
}
|
|
2976
|
-
|
|
2977
|
-
const columns = [];
|
|
2978
|
-
const indexes = [];
|
|
2979
|
-
columns.push(' ' + norm.primaryKey + " INTEGER PRIMARY KEY DEFAULT nextval('" + seq + "')");
|
|
2980
|
-
|
|
2981
|
-
for (const [n, f] of norm.fields) {
|
|
2982
|
-
columns.push(__schemaColumnDDL(n, f));
|
|
2983
|
-
if (f.unique) {
|
|
2984
|
-
indexes.push('CREATE UNIQUE INDEX idx_' + table + '_' + __schemaSnake(n) + ' ON ' + table + ' ("' + __schemaSnake(n) + '");');
|
|
2985
|
-
}
|
|
2986
|
-
}
|
|
2987
|
-
|
|
2988
|
-
for (const [, rel] of norm.relations) {
|
|
2989
|
-
if (rel.kind !== 'belongsTo') continue;
|
|
2990
|
-
const refTable = __schemaTableName(rel.target);
|
|
2991
|
-
const notNull = rel.optional ? '' : ' NOT NULL';
|
|
2992
|
-
columns.push(' ' + rel.foreignKey + ' INTEGER' + notNull + ' REFERENCES ' + refTable + '(id)');
|
|
2993
|
-
}
|
|
2994
|
-
|
|
2995
|
-
if (norm.timestamps) {
|
|
2996
|
-
columns.push(' created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP');
|
|
2997
|
-
columns.push(' updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP');
|
|
2998
|
-
}
|
|
2999
|
-
if (norm.softDelete) {
|
|
3000
|
-
columns.push(' deleted_at TIMESTAMP');
|
|
3001
|
-
}
|
|
3002
|
-
|
|
3003
|
-
// @index directives
|
|
3004
|
-
for (const d of norm.directives) {
|
|
3005
|
-
if (d.name !== 'index') continue;
|
|
3006
|
-
const ixArgs = d.args?.[0] || {};
|
|
3007
|
-
const fields = (ixArgs.fields || []).map(__schemaSnake);
|
|
3008
|
-
if (!fields.length) continue;
|
|
3009
|
-
const u = ixArgs.unique ? 'UNIQUE ' : '';
|
|
3010
|
-
indexes.push('CREATE ' + u + 'INDEX idx_' + table + '_' + fields.join('_') + ' ON ' + table + ' (' + fields.map(f => '"' + f + '"').join(', ') + ');');
|
|
3011
|
-
}
|
|
3012
|
-
|
|
3013
|
-
blocks.push('CREATE SEQUENCE ' + seq + ' START ' + idStart + ';');
|
|
3014
|
-
blocks.push('CREATE TABLE ' + table + ' (\\n' + columns.join(',\\n') + '\\n);');
|
|
3015
|
-
if (indexes.length) blocks.push(indexes.join('\\n'));
|
|
3016
|
-
|
|
3017
|
-
return blocks.join('\\n\\n') + '\\n';
|
|
3018
|
-
}
|
|
3019
|
-
|
|
3020
|
-
function __schemaColumnDDL(name, field) {
|
|
3021
|
-
let base = __SCHEMA_SQL_TYPES[field.typeName] || 'VARCHAR';
|
|
3022
|
-
if (field.array) base = 'JSON';
|
|
3023
|
-
if (base === 'VARCHAR' && field.constraints?.max != null) {
|
|
3024
|
-
base = 'VARCHAR(' + field.constraints.max + ')';
|
|
3025
|
-
}
|
|
3026
|
-
const parts = [' ' + __schemaSnake(name) + ' ' + base];
|
|
3027
|
-
if (field.required) parts.push('NOT NULL');
|
|
3028
|
-
if (field.unique) parts.push('UNIQUE');
|
|
3029
|
-
if (field.constraints?.default !== undefined) {
|
|
3030
|
-
parts.push('DEFAULT ' + __schemaSQLDefault(field.constraints.default));
|
|
3031
|
-
}
|
|
3032
|
-
return parts.join(' ');
|
|
3033
|
-
}
|
|
3034
|
-
|
|
3035
|
-
function __schemaSQLDefault(v) {
|
|
3036
|
-
if (v === true) return 'true';
|
|
3037
|
-
if (v === false) return 'false';
|
|
3038
|
-
if (v === null) return 'NULL';
|
|
3039
|
-
if (typeof v === 'number') return String(v);
|
|
3040
|
-
if (typeof v === 'string') return "'" + v.replace(/'/g, "''") + "'";
|
|
3041
|
-
return "'" + String(v).replace(/'/g, "''") + "'";
|
|
3042
|
-
}
|
|
3043
|
-
|
|
3044
|
-
function __schema(descriptor) {
|
|
3045
|
-
const def = new __SchemaDef(descriptor);
|
|
3046
|
-
// Every user-declared named schema lands in the registry so
|
|
3047
|
-
// nested-typed fields (address! Address, items! OrderItem[],
|
|
3048
|
-
// role! Role) can resolve their type reference at validate time.
|
|
3049
|
-
// Algebra-derived schemas (.pick/.omit/.partial/…) bypass this
|
|
3050
|
-
// factory so their synthetic names don't shadow the source.
|
|
3051
|
-
if (def.name) __SchemaRegistry.register(def);
|
|
3052
|
-
return def;
|
|
3053
|
-
}
|
|
3054
|
-
|
|
3055
|
-
const exports = {
|
|
3056
|
-
__schema, SchemaError, __SchemaRegistry, __schemaSetAdapter,
|
|
3057
|
-
__version: ${SCHEMA_RUNTIME_ABI_VERSION},
|
|
3058
|
-
};
|
|
3059
|
-
if (typeof globalThis !== 'undefined') globalThis.__ripSchema = exports;
|
|
3060
|
-
return exports;
|
|
3061
|
-
})();
|
|
3062
|
-
|
|
3063
|
-
// === End Schema Runtime ===
|
|
3064
|
-
`;
|
|
3065
|
-
|
|
3066
|
-
function getSchemaRuntime() {
|
|
3067
|
-
return SCHEMA_RUNTIME.trimStart();
|
|
3068
|
-
}
|
|
3069
|
-
|
|
3070
|
-
// ============================================================================
|
|
3071
|
-
// Shadow TypeScript — Phase 3.5
|
|
3072
|
-
// ============================================================================
|
|
3073
|
-
//
|
|
3074
|
-
// Emits virtual `.d.ts` / `.ts` declarations for :input, :shape, and :enum
|
|
3075
|
-
// schemas so the TS language service can offer autocomplete and catch
|
|
3076
|
-
// AST-shape mistakes before Phase 4 layers in :model/ORM/algebra. Written
|
|
3077
|
-
// to mirror `emitComponentTypes()` in src/types.js — same prototype:
|
|
3078
|
-
// `emitSchemaTypes(sexpr, lines)` returns true when any schema declaration
|
|
3079
|
-
// was found (drives preamble injection), mutates `lines` with declarations.
|
|
3080
|
-
//
|
|
3081
|
-
// Type surface (locked with peer AI):
|
|
3082
|
-
//
|
|
3083
|
-
// interface Schema<T> {
|
|
3084
|
-
// parse(data: unknown): T;
|
|
3085
|
-
// safe(data: unknown): SchemaSafeResult<T>;
|
|
3086
|
-
// ok(data: unknown): boolean;
|
|
3087
|
-
// }
|
|
3088
|
-
//
|
|
3089
|
-
// `:input` emits declare const Foo: Schema<FooValue>;
|
|
3090
|
-
// `:shape` emits declare const Foo: Schema<FooInstance>; where
|
|
3091
|
-
// FooInstance = FooData & {methods/readonly getters}.
|
|
3092
|
-
// `:enum` emits declare const Role: { parse(...): Role; ok(d): d is Role; ... }
|
|
3093
|
-
//
|
|
3094
|
-
// Methods are typed `(...args: any[]) => unknown`. Computed are
|
|
3095
|
-
// `readonly name: unknown`. Body inference is out of scope for 3.5.
|
|
3096
|
-
|
|
3097
|
-
export const SCHEMA_INTRINSIC_DECLS = [
|
|
3098
|
-
'interface SchemaIssue { field: string; error: string; message: string; }',
|
|
3099
|
-
'type SchemaSafeResult<T> = { ok: true; value: T; errors: null } | { ok: false; value: null; errors: SchemaIssue[] };',
|
|
3100
|
-
// Base Schema interface. `Out` is the parsed value type; `In` is the
|
|
3101
|
-
// data shape (defaults to unknown). Algebra methods are parameterized
|
|
3102
|
-
// over `In` so chained operations on a typed :shape or :model derive
|
|
3103
|
-
// correctly; when `In` defaults to unknown, `keyof In` is `never` and
|
|
3104
|
-
// algebra methods don't autocomplete — which is the right behavior
|
|
3105
|
-
// for :input schemas where the input shape isn't statically known.
|
|
3106
|
-
'interface Schema<Out, In = unknown> {',
|
|
3107
|
-
' parse(data: In): Out;',
|
|
3108
|
-
' safe(data: In): SchemaSafeResult<Out>;',
|
|
3109
|
-
' ok(data: unknown): boolean;',
|
|
3110
|
-
' pick<K extends keyof In>(...keys: K[]): Schema<Pick<In, K>, Pick<In, K>>;',
|
|
3111
|
-
' omit<K extends keyof In>(...keys: K[]): Schema<Omit<In, K>, Omit<In, K>>;',
|
|
3112
|
-
' partial(): Schema<Partial<In>, Partial<In>>;',
|
|
3113
|
-
' required<K extends keyof In>(...keys: K[]): Schema<Omit<In, K> & Required<Pick<In, K>>, Omit<In, K> & Required<Pick<In, K>>>;',
|
|
3114
|
-
' extend<U>(other: Schema<U>): Schema<In & U, In & U>;',
|
|
3115
|
-
'}',
|
|
3116
|
-
// Chainable query builder for :model.
|
|
3117
|
-
'interface SchemaQuery<T> {',
|
|
3118
|
-
' all(): Promise<T[]>;',
|
|
3119
|
-
' first(): Promise<T | null>;',
|
|
3120
|
-
' count(): Promise<number>;',
|
|
3121
|
-
' limit(n: number): SchemaQuery<T>;',
|
|
3122
|
-
' offset(n: number): SchemaQuery<T>;',
|
|
3123
|
-
' order(spec: string): SchemaQuery<T>;',
|
|
3124
|
-
'}',
|
|
3125
|
-
// ModelSchema extends the base schema surface with ORM methods. Algebra
|
|
3126
|
-
// over `Data` (not `Instance`) so derived shapes reflect runtime
|
|
3127
|
-
// behavior-dropping semantics.
|
|
3128
|
-
'interface ModelSchema<Instance, Data = unknown> extends Schema<Instance, Data> {',
|
|
3129
|
-
' find(id: unknown): Promise<Instance | null>;',
|
|
3130
|
-
' findMany(ids: unknown[]): Promise<Instance[]>;',
|
|
3131
|
-
' where(cond: Record<string, unknown> | string, ...params: unknown[]): SchemaQuery<Instance>;',
|
|
3132
|
-
' all(limit?: number): Promise<Instance[]>;',
|
|
3133
|
-
' first(): Promise<Instance | null>;',
|
|
3134
|
-
' count(cond?: Record<string, unknown>): Promise<number>;',
|
|
3135
|
-
' create(data: Partial<Data>): Promise<Instance>;',
|
|
3136
|
-
' toSQL(options?: { dropFirst?: boolean; header?: string; idStart?: number }): string;',
|
|
3137
|
-
'}',
|
|
3138
|
-
];
|
|
3139
|
-
|
|
3140
|
-
const RIP_TYPE_TO_TS = {
|
|
3141
|
-
string: 'string',
|
|
3142
|
-
text: 'string',
|
|
3143
|
-
email: 'string',
|
|
3144
|
-
url: 'string',
|
|
3145
|
-
uuid: 'string',
|
|
3146
|
-
phone: 'string',
|
|
3147
|
-
zip: 'string',
|
|
3148
|
-
number: 'number',
|
|
3149
|
-
integer: 'number',
|
|
3150
|
-
boolean: 'boolean',
|
|
3151
|
-
date: 'Date',
|
|
3152
|
-
datetime: 'Date',
|
|
3153
|
-
json: 'unknown',
|
|
3154
|
-
any: 'any',
|
|
3155
|
-
};
|
|
3156
|
-
|
|
3157
|
-
function mapFieldType(entry) {
|
|
3158
|
-
if (entry.typeName === 'literal-union' && entry.literals?.length) {
|
|
3159
|
-
return entry.literals.map(l => JSON.stringify(l)).join(' | ');
|
|
3160
|
-
}
|
|
3161
|
-
let base = RIP_TYPE_TO_TS[entry.typeName] ?? entry.typeName;
|
|
3162
|
-
return entry.array ? `${base}[]` : base;
|
|
3163
|
-
}
|
|
3164
|
-
|
|
3165
|
-
// Extract descriptor from a SCHEMA_BODY s-expr node. Grammar reduces
|
|
3166
|
-
// `['schema', SCHEMA_BODY_VAL]` where the value is the String wrapper
|
|
3167
|
-
// carrying `.descriptor` via the metadata bridge.
|
|
3168
|
-
function descriptorFromSchemaNode(schemaNode) {
|
|
3169
|
-
if (!Array.isArray(schemaNode)) return null;
|
|
3170
|
-
let head = schemaNode[0]?.valueOf?.() ?? schemaNode[0];
|
|
3171
|
-
if (head !== 'schema') return null;
|
|
3172
|
-
let body = schemaNode[1];
|
|
3173
|
-
if (!body || typeof body !== 'object') return null;
|
|
3174
|
-
if (body.descriptor) return body.descriptor;
|
|
3175
|
-
if (body.data?.descriptor) return body.data.descriptor;
|
|
3176
|
-
return null;
|
|
3177
|
-
}
|
|
3178
|
-
|
|
3179
|
-
// Walk the parsed s-expression collecting every named schema declaration.
|
|
3180
|
-
// Mixins are emitted first so subsequent :shape/:model type aliases can
|
|
3181
|
-
// reference them in `& Timestamps`-style intersections. Within a group,
|
|
3182
|
-
// source order is preserved. Returns true when at least one schema was
|
|
3183
|
-
// found (drives intrinsic preamble injection).
|
|
3184
|
-
export function emitSchemaTypes(sexpr, lines) {
|
|
3185
|
-
const collected = [];
|
|
3186
|
-
collectSchemas(sexpr, collected);
|
|
3187
|
-
if (!collected.length) return false;
|
|
3188
|
-
|
|
3189
|
-
// Set of locally-known schema names (for relation-accessor type
|
|
3190
|
-
// resolution — same-file targets get typed, unknown targets degrade).
|
|
3191
|
-
const known = new Set(collected.map(c => c.name));
|
|
3192
|
-
const byName = new Map(collected.map(c => [c.name, c]));
|
|
3193
|
-
|
|
3194
|
-
// Mixin types first so type aliases down-file can reference them.
|
|
3195
|
-
for (const c of collected) {
|
|
3196
|
-
if (c.descriptor.kind === 'mixin') emitOneSchemaType(c, byName, known, lines);
|
|
3197
|
-
}
|
|
3198
|
-
for (const c of collected) {
|
|
3199
|
-
if (c.descriptor.kind !== 'mixin') emitOneSchemaType(c, byName, known, lines);
|
|
3200
|
-
}
|
|
3201
|
-
return true;
|
|
3202
|
-
}
|
|
3203
|
-
|
|
3204
|
-
function collectSchemas(sexpr, out) {
|
|
3205
|
-
if (!Array.isArray(sexpr)) return;
|
|
3206
|
-
const head = sexpr[0]?.valueOf?.() ?? sexpr[0];
|
|
3207
|
-
let exported = false;
|
|
3208
|
-
let assignNode = null;
|
|
3209
|
-
if (head === 'export' && Array.isArray(sexpr[1])) {
|
|
3210
|
-
const inner = sexpr[1];
|
|
3211
|
-
const innerHead = inner[0]?.valueOf?.() ?? inner[0];
|
|
3212
|
-
if (innerHead === '=') { exported = true; assignNode = inner; }
|
|
3213
|
-
else collectSchemas(sexpr[1], out);
|
|
3214
|
-
} else if (head === '=') {
|
|
3215
|
-
assignNode = sexpr;
|
|
3216
|
-
} else if (head === 'program' || head === 'block') {
|
|
3217
|
-
for (let i = 1; i < sexpr.length; i++) {
|
|
3218
|
-
if (Array.isArray(sexpr[i])) collectSchemas(sexpr[i], out);
|
|
3219
|
-
}
|
|
3220
|
-
}
|
|
3221
|
-
if (assignNode && Array.isArray(assignNode[2])) {
|
|
3222
|
-
const name = assignNode[1]?.valueOf?.() ?? assignNode[1];
|
|
3223
|
-
const descriptor = descriptorFromSchemaNode(assignNode[2]);
|
|
3224
|
-
if (typeof name === 'string' && descriptor) {
|
|
3225
|
-
out.push({ name, descriptor, exported });
|
|
3226
|
-
}
|
|
3227
|
-
}
|
|
3228
|
-
}
|
|
3229
|
-
|
|
3230
|
-
function emitOneSchemaType(collected, byName, known, lines) {
|
|
3231
|
-
const { name, descriptor, exported } = collected;
|
|
3232
|
-
const exp = exported ? 'export ' : '';
|
|
3233
|
-
const decl = exported ? '' : 'declare ';
|
|
3234
|
-
|
|
3235
|
-
if (descriptor.kind === 'enum') {
|
|
3236
|
-
const members = [];
|
|
3237
|
-
for (const e of descriptor.entries) {
|
|
3238
|
-
if (e.tag !== 'enum-member') continue;
|
|
3239
|
-
const v = e.value !== undefined ? e.value : e.name;
|
|
3240
|
-
members.push(typeof v === 'string' ? JSON.stringify(v) : String(v));
|
|
3241
|
-
}
|
|
3242
|
-
const union = members.length ? members.join(' | ') : 'never';
|
|
3243
|
-
lines.push(`${exp}type ${name} = ${union};`);
|
|
3244
|
-
lines.push(`${exp}${decl}const ${name}: { parse(data: unknown): ${name}; safe(data: unknown): SchemaSafeResult<${name}>; ok(data: unknown): data is ${name}; };`);
|
|
3245
|
-
return;
|
|
3246
|
-
}
|
|
3247
|
-
|
|
3248
|
-
if (descriptor.kind === 'mixin') {
|
|
3249
|
-
// :mixin is declaration-time-only; expose it as a field type alias
|
|
3250
|
-
// so hosts that `@mixin Foo` can intersect it into their Data type.
|
|
3251
|
-
// No value declaration — mixins aren't user-facing runtime values.
|
|
3252
|
-
const fieldProps = fieldPropList(descriptor);
|
|
3253
|
-
lines.push(`${exp}type ${name} = { ${fieldProps.join('; ')} };`);
|
|
3254
|
-
return;
|
|
3255
|
-
}
|
|
3256
|
-
|
|
3257
|
-
const fieldProps = fieldPropList(descriptor);
|
|
3258
|
-
const mixinRefs = mixinIntersections(descriptor, byName);
|
|
3259
|
-
const methods = [];
|
|
3260
|
-
const computed = [];
|
|
3261
|
-
for (const e of descriptor.entries) {
|
|
3262
|
-
if (e.tag === 'method') {
|
|
3263
|
-
methods.push(`${e.name}: (...args: any[]) => unknown`);
|
|
3264
|
-
} else if (e.tag === 'computed') {
|
|
3265
|
-
computed.push(`readonly ${e.name}: unknown`);
|
|
3266
|
-
}
|
|
3267
|
-
// hooks are intentionally omitted — they fire automatically and
|
|
3268
|
-
// shouldn't appear in autocomplete.
|
|
3269
|
-
}
|
|
3270
|
-
|
|
3271
|
-
const dataBase = `{ ${fieldProps.join('; ')} }`;
|
|
3272
|
-
const dataType = mixinRefs.length ? `${dataBase} & ${mixinRefs.join(' & ')}` : dataBase;
|
|
3273
|
-
|
|
3274
|
-
if (descriptor.kind === 'model') {
|
|
3275
|
-
const dataName = `${name}Data`;
|
|
3276
|
-
const instName = `${name}Instance`;
|
|
3277
|
-
const relationAccessors = modelRelationAccessors(descriptor, known);
|
|
3278
|
-
const instanceExtras = [
|
|
3279
|
-
...computed,
|
|
3280
|
-
...methods,
|
|
3281
|
-
...relationAccessors,
|
|
3282
|
-
`save(): Promise<${instName}>`,
|
|
3283
|
-
`destroy(): Promise<${instName}>`,
|
|
3284
|
-
`ok(): boolean`,
|
|
3285
|
-
`errors(): SchemaIssue[]`,
|
|
3286
|
-
`toJSON(): ${dataName}`,
|
|
3287
|
-
];
|
|
3288
|
-
lines.push(`${exp}type ${dataName} = ${dataType};`);
|
|
3289
|
-
lines.push(`${exp}type ${instName} = ${dataName} & { ${instanceExtras.join('; ')} };`);
|
|
3290
|
-
lines.push(`${exp}${decl}const ${name}: ModelSchema<${instName}, ${dataName}>;`);
|
|
3291
|
-
return;
|
|
3292
|
-
}
|
|
3293
|
-
|
|
3294
|
-
if (descriptor.kind === 'shape') {
|
|
3295
|
-
const dataName = `${name}Data`;
|
|
3296
|
-
const instName = `${name}Instance`;
|
|
3297
|
-
const hasBehavior = methods.length + computed.length > 0;
|
|
3298
|
-
lines.push(`${exp}type ${dataName} = ${dataType};`);
|
|
3299
|
-
if (hasBehavior) {
|
|
3300
|
-
lines.push(`${exp}type ${instName} = ${dataName} & { ${[...computed, ...methods].join('; ')} };`);
|
|
3301
|
-
lines.push(`${exp}${decl}const ${name}: Schema<${instName}, ${dataName}>;`);
|
|
3302
|
-
} else {
|
|
3303
|
-
lines.push(`${exp}${decl}const ${name}: Schema<${dataName}, ${dataName}>;`);
|
|
3304
|
-
}
|
|
3305
|
-
return;
|
|
3306
|
-
}
|
|
3307
|
-
|
|
3308
|
-
// :input — parse returns the Data shape directly (no behavior).
|
|
3309
|
-
const valueName = `${name}Value`;
|
|
3310
|
-
lines.push(`${exp}type ${valueName} = ${dataType};`);
|
|
3311
|
-
lines.push(`${exp}${decl}const ${name}: Schema<${valueName}, ${valueName}>;`);
|
|
3312
|
-
}
|
|
3313
|
-
|
|
3314
|
-
// Return an array of mixin type-reference strings for `& Foo & Bar` joins.
|
|
3315
|
-
function mixinIntersections(descriptor, byName) {
|
|
3316
|
-
const refs = [];
|
|
3317
|
-
for (const e of descriptor.entries) {
|
|
3318
|
-
if (e.tag !== 'directive' || e.name !== 'mixin') continue;
|
|
3319
|
-
const args = e.args;
|
|
3320
|
-
const target = args && args[0] && args[0].target;
|
|
3321
|
-
if (!target) continue;
|
|
3322
|
-
const known = byName && byName.get(target);
|
|
3323
|
-
if (known && known.descriptor.kind === 'mixin') {
|
|
3324
|
-
refs.push(target);
|
|
3325
|
-
}
|
|
3326
|
-
}
|
|
3327
|
-
return refs;
|
|
3328
|
-
}
|
|
3329
|
-
|
|
3330
|
-
// Emit relation accessor type declarations for :model instances. For
|
|
3331
|
-
// targets declared in the same file we emit a typed Promise; for
|
|
3332
|
-
// unknown (cross-file) targets we degrade to `Promise<unknown>` rather
|
|
3333
|
-
// than emit an unresolved bare name.
|
|
3334
|
-
function modelRelationAccessors(descriptor, known) {
|
|
3335
|
-
const out = [];
|
|
3336
|
-
for (const e of descriptor.entries) {
|
|
3337
|
-
if (e.tag !== 'directive') continue;
|
|
3338
|
-
const args = e.args;
|
|
3339
|
-
if (!args || !args[0]) continue;
|
|
3340
|
-
const target = args[0].target;
|
|
3341
|
-
if (!target) continue;
|
|
3342
|
-
const optional = args[0].optional === true;
|
|
3343
|
-
const targetLc = target[0].toLowerCase() + target.slice(1);
|
|
3344
|
-
const instName = `${target}Instance`;
|
|
3345
|
-
const isKnown = known && known.has(target);
|
|
3346
|
-
if (e.name === 'belongs_to') {
|
|
3347
|
-
const retT = isKnown ? (optional ? `${instName} | null` : `${instName} | null`) : 'unknown';
|
|
3348
|
-
out.push(`${targetLc}(): Promise<${retT}>`);
|
|
3349
|
-
} else if (e.name === 'has_one' || e.name === 'one') {
|
|
3350
|
-
const retT = isKnown ? `${instName} | null` : 'unknown';
|
|
3351
|
-
out.push(`${targetLc}(): Promise<${retT}>`);
|
|
3352
|
-
} else if (e.name === 'has_many' || e.name === 'many') {
|
|
3353
|
-
const retT = isKnown ? `${instName}[]` : 'unknown[]';
|
|
3354
|
-
const pluralLc = __schemaClientPluralize(targetLc);
|
|
3355
|
-
out.push(`${pluralLc}(): Promise<${retT}>`);
|
|
3356
|
-
}
|
|
3357
|
-
}
|
|
3358
|
-
return out;
|
|
3359
|
-
}
|
|
3360
|
-
|
|
3361
|
-
// Minimal pluralizer for accessor names. Keep in sync with the runtime
|
|
3362
|
-
// __schemaPluralize rules (same surface for declaration parity).
|
|
3363
|
-
function __schemaClientPluralize(w) {
|
|
3364
|
-
const lw = w.toLowerCase();
|
|
3365
|
-
if (/[^aeiouy]y$/i.test(w)) return w.slice(0, -1) + 'ies';
|
|
3366
|
-
if (/(s|x|z|ch|sh)$/i.test(w)) return w + 'es';
|
|
3367
|
-
return w + 's';
|
|
3368
|
-
}
|
|
3369
|
-
|
|
3370
|
-
function fieldPropList(descriptor) {
|
|
3371
|
-
const props = [];
|
|
3372
|
-
for (const e of descriptor.entries) {
|
|
3373
|
-
if (e.tag !== 'field') continue;
|
|
3374
|
-
const required = e.modifiers.includes('!');
|
|
3375
|
-
const mark = required ? '' : '?';
|
|
3376
|
-
props.push(`${e.name}${mark}: ${mapFieldType(e)}`);
|
|
3377
|
-
}
|
|
3378
|
-
return props;
|
|
3379
|
-
}
|
|
3380
|
-
|
|
3381
|
-
// Eagerly install the runtime on globalThis at module load so downstream
|
|
3382
|
-
// compilation units emitted with `skipRuntimes: true` (a common test-harness
|
|
3383
|
-
// setting) can pick up `{__schema, SchemaError}` without a separate bootstrap
|
|
3384
|
-
// step. The same pattern is used by the reactive and component runtimes.
|
|
3385
|
-
if (typeof globalThis !== 'undefined' && !globalThis.__ripSchema) {
|
|
3386
|
-
try { (0, eval)(SCHEMA_RUNTIME); } catch {}
|
|
3387
|
-
}
|
|
3388
|
-
|
|
3389
|
-
export { SCHEMA_RUNTIME };
|