@sqldoc/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +33 -0
- package/src/__tests__/ast/sqlparser-ts.test.ts +117 -0
- package/src/__tests__/blocks.test.ts +80 -0
- package/src/__tests__/compile.test.ts +407 -0
- package/src/__tests__/compiler/compile.test.ts +363 -0
- package/src/__tests__/lint-rules.test.ts +249 -0
- package/src/__tests__/lint.test.ts +270 -0
- package/src/__tests__/parser.test.ts +169 -0
- package/src/__tests__/tags.sql +15 -0
- package/src/__tests__/validator.test.ts +210 -0
- package/src/ast/adapter.ts +10 -0
- package/src/ast/index.ts +3 -0
- package/src/ast/sqlparser-ts.ts +218 -0
- package/src/ast/types.ts +28 -0
- package/src/blocks.ts +242 -0
- package/src/compiler/compile.ts +783 -0
- package/src/compiler/config.ts +102 -0
- package/src/compiler/index.ts +29 -0
- package/src/compiler/types.ts +320 -0
- package/src/index.ts +72 -0
- package/src/lint.ts +127 -0
- package/src/loader.ts +102 -0
- package/src/parser.ts +202 -0
- package/src/ts-import.ts +70 -0
- package/src/types.ts +111 -0
- package/src/utils.ts +31 -0
- package/src/validator.ts +324 -0
|
@@ -0,0 +1,783 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core compiler pipeline: parse tags, build context, invoke generators.
|
|
3
|
+
*
|
|
4
|
+
* compile() takes pre-parsed inputs (source, plugins, statements, config)
|
|
5
|
+
* and orchestrates generateSQL/generateCode for each tag occurrence.
|
|
6
|
+
*
|
|
7
|
+
* Supports two compilation tiers:
|
|
8
|
+
* - Tier 1 (no Atlas): Uses block resolution from sqlparser-ts (VSCode, validation)
|
|
9
|
+
* - Tier 2 (Atlas): Uses Atlas-parsed schema with tags already matched to objects
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import type { SqlAstAdapter } from '../ast/adapter.ts'
|
|
13
|
+
import type { SqlCommentOn, SqlStatement } from '../ast/types.ts'
|
|
14
|
+
import type { TagBlock } from '../blocks.ts'
|
|
15
|
+
import { buildBlocks } from '../blocks.ts'
|
|
16
|
+
import { parse, parseArgs } from '../parser.ts'
|
|
17
|
+
import type { SqlTarget } from '../types.ts'
|
|
18
|
+
import type {
|
|
19
|
+
CodeOutput,
|
|
20
|
+
CompilerOutput,
|
|
21
|
+
DocsMeta,
|
|
22
|
+
NamespaceConfig,
|
|
23
|
+
NamespacePlugin,
|
|
24
|
+
ResolvedConfig,
|
|
25
|
+
SqlOutput,
|
|
26
|
+
TagContext,
|
|
27
|
+
TagOutput,
|
|
28
|
+
} from './types.ts'
|
|
29
|
+
|
|
30
|
+
// ── Internal Atlas types (mirrors @sqldoc/atlas without importing) ────
|
|
31
|
+
// Core must NOT depend on @sqldoc/atlas. These mirror the shapes for internal use.
|
|
32
|
+
// Schema fields use lowercase (matching marshal.go json tags).
|
|
33
|
+
// Attr variants (Tag, Comment, Check) use PascalCase (map[string]string in Go).
|
|
34
|
+
|
|
35
|
+
interface InternalAtlasRealm {
|
|
36
|
+
schemas: InternalAtlasSchema[]
|
|
37
|
+
attrs?: InternalAtlasAttr[]
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
interface InternalAtlasSchema {
|
|
41
|
+
name: string
|
|
42
|
+
tables?: InternalAtlasTable[]
|
|
43
|
+
views?: InternalAtlasView[]
|
|
44
|
+
attrs?: InternalAtlasAttr[]
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
interface InternalAtlasTable {
|
|
48
|
+
name: string
|
|
49
|
+
columns?: InternalAtlasColumn[]
|
|
50
|
+
attrs?: InternalAtlasAttr[]
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
interface InternalAtlasView {
|
|
54
|
+
name: string
|
|
55
|
+
columns?: InternalAtlasColumn[]
|
|
56
|
+
attrs?: InternalAtlasAttr[]
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
interface InternalAtlasColumn {
|
|
60
|
+
name: string
|
|
61
|
+
type?: { raw?: string; null?: boolean; T?: string }
|
|
62
|
+
attrs?: InternalAtlasAttr[]
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
type InternalAtlasAttr = { Name: string; Args: string } | Record<string, unknown>
|
|
66
|
+
|
|
67
|
+
// ── Public API ───────────────────────────────────────────────────────
|
|
68
|
+
|
|
69
|
+
export interface CompileOptions {
|
|
70
|
+
/** SQL file content */
|
|
71
|
+
source: string
|
|
72
|
+
/** SQL file path (for import resolution) */
|
|
73
|
+
filePath: string
|
|
74
|
+
/** Loaded namespace plugins (already resolved, keyed by namespace name) */
|
|
75
|
+
plugins: Map<string, NamespacePlugin>
|
|
76
|
+
/** Parsed SQL statements from AST adapter */
|
|
77
|
+
statements: SqlStatement[]
|
|
78
|
+
/** The AST adapter instance (initialized) */
|
|
79
|
+
adapter: SqlAstAdapter
|
|
80
|
+
/** Project config */
|
|
81
|
+
config: ResolvedConfig
|
|
82
|
+
/** Atlas-parsed schema realm (Tier 2). When provided, uses Atlas tag-to-object matching instead of block resolution. */
|
|
83
|
+
atlasRealm?: unknown
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export function compile(options: CompileOptions): CompilerOutput {
|
|
87
|
+
const { source, filePath, plugins, statements, config, adapter, atlasRealm } = options
|
|
88
|
+
|
|
89
|
+
// Tier 2: Atlas realm provided — use Atlas tag-to-object matching
|
|
90
|
+
if (atlasRealm) {
|
|
91
|
+
const result = compileAtlas(
|
|
92
|
+
atlasRealm as InternalAtlasRealm,
|
|
93
|
+
filePath,
|
|
94
|
+
plugins,
|
|
95
|
+
statements,
|
|
96
|
+
config,
|
|
97
|
+
source,
|
|
98
|
+
adapter,
|
|
99
|
+
)
|
|
100
|
+
// Merge parser-derived tags that Atlas doesn't see (e.g. @lint.ignore)
|
|
101
|
+
// These tags have no SQL output so Atlas never encounters them
|
|
102
|
+
const tags = parse(source).tags
|
|
103
|
+
if (tags.length > 0) {
|
|
104
|
+
const docLines = source.split('\n')
|
|
105
|
+
const blocks = buildBlocks(tags, source, docLines, statements)
|
|
106
|
+
const parserFileTags = buildFileTags(blocks)
|
|
107
|
+
mergeParserTags(result.fileTags, parserFileTags)
|
|
108
|
+
}
|
|
109
|
+
return result
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Tier 1: No Atlas realm — use block resolution
|
|
113
|
+
return compileTier1(source, filePath, plugins, statements, config, adapter)
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// ── Tier 1: Block resolution (existing logic) ─────────────────────────
|
|
117
|
+
|
|
118
|
+
function compileTier1(
|
|
119
|
+
source: string,
|
|
120
|
+
filePath: string,
|
|
121
|
+
plugins: Map<string, NamespacePlugin>,
|
|
122
|
+
statements: SqlStatement[],
|
|
123
|
+
config: ResolvedConfig,
|
|
124
|
+
adapter: SqlAstAdapter,
|
|
125
|
+
): CompilerOutput {
|
|
126
|
+
const sqlOutputs: SqlOutput[] = []
|
|
127
|
+
const codeOutputs: CodeOutput[] = []
|
|
128
|
+
const docsMeta: DocsMeta[] = []
|
|
129
|
+
const errors: Array<{ namespace: string; message: string }> = []
|
|
130
|
+
|
|
131
|
+
// 1. Parse source to extract tags
|
|
132
|
+
const { tags } = parse(source)
|
|
133
|
+
if (tags.length === 0) {
|
|
134
|
+
return { sourceFile: filePath, mergedSql: source, sqlOutputs, codeOutputs, errors, docsMeta, fileTags: [] }
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// 2. Build tag blocks (group consecutive comment-line tags, resolve to SQL object)
|
|
138
|
+
const docLines = source.split('\n')
|
|
139
|
+
const blocks = buildBlocks(tags, source, docLines, statements)
|
|
140
|
+
|
|
141
|
+
// 3. Build file-level tag summary (all tags across file grouped by object)
|
|
142
|
+
const fileTags = buildFileTags(blocks)
|
|
143
|
+
|
|
144
|
+
// 4. Process each block
|
|
145
|
+
for (const block of blocks) {
|
|
146
|
+
const { objectName, target, columnName, columnType, astNode } = block.ast
|
|
147
|
+
|
|
148
|
+
// Group tags by namespace within this block
|
|
149
|
+
for (const tag of block.tags) {
|
|
150
|
+
const plugin = plugins.get(tag.namespace)
|
|
151
|
+
if (!plugin) continue
|
|
152
|
+
const tagHandler = plugin.onTag ?? plugin.generateSQL
|
|
153
|
+
if (!tagHandler && !plugin.generateCode) continue
|
|
154
|
+
|
|
155
|
+
// Build namespaceTags: all tags from same namespace on same object
|
|
156
|
+
const namespaceTags = block.tags
|
|
157
|
+
.filter((t) => t.namespace === tag.namespace)
|
|
158
|
+
.map((t) => ({
|
|
159
|
+
tag: t.tag,
|
|
160
|
+
args: parsedArgsToValue(t.rawArgs),
|
|
161
|
+
}))
|
|
162
|
+
|
|
163
|
+
// Build siblingTags: all tags from ALL namespaces on same object
|
|
164
|
+
const siblingTags = block.tags.map((t) => ({
|
|
165
|
+
namespace: t.namespace,
|
|
166
|
+
tag: t.tag,
|
|
167
|
+
args: parsedArgsToValue(t.rawArgs),
|
|
168
|
+
}))
|
|
169
|
+
|
|
170
|
+
const ctx: TagContext = {
|
|
171
|
+
target,
|
|
172
|
+
objectName: objectName ?? 'unknown',
|
|
173
|
+
columnName,
|
|
174
|
+
columnType,
|
|
175
|
+
tag: {
|
|
176
|
+
name: tag.tag,
|
|
177
|
+
args: parsedArgsToValue(tag.rawArgs),
|
|
178
|
+
},
|
|
179
|
+
namespaceTags,
|
|
180
|
+
siblingTags,
|
|
181
|
+
fileTags,
|
|
182
|
+
astNode: astNode ?? null,
|
|
183
|
+
fileStatements: statements,
|
|
184
|
+
config: (config.namespaces?.[tag.namespace] ?? {}) as NamespaceConfig,
|
|
185
|
+
filePath,
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
invokePlugin(plugin, tagHandler, ctx, tag, sqlOutputs, codeOutputs, docsMeta, errors)
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// 5. Build merged SQL: original source + generated SQL appended
|
|
193
|
+
const mergedSql = buildMergedOutput(source, sqlOutputs, adapter)
|
|
194
|
+
|
|
195
|
+
return { sourceFile: filePath, mergedSql, sqlOutputs, codeOutputs, errors, docsMeta, fileTags }
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// ── Tier 2: Atlas realm compilation ───────────────────────────────────
|
|
199
|
+
|
|
200
|
+
function compileAtlas(
|
|
201
|
+
realm: InternalAtlasRealm,
|
|
202
|
+
filePath: string,
|
|
203
|
+
plugins: Map<string, NamespacePlugin>,
|
|
204
|
+
statements: SqlStatement[],
|
|
205
|
+
config: ResolvedConfig,
|
|
206
|
+
source: string,
|
|
207
|
+
adapter: SqlAstAdapter,
|
|
208
|
+
): CompilerOutput {
|
|
209
|
+
const sqlOutputs: SqlOutput[] = []
|
|
210
|
+
const codeOutputs: CodeOutput[] = []
|
|
211
|
+
const docsMeta: DocsMeta[] = []
|
|
212
|
+
const errors: Array<{ namespace: string; message: string }> = []
|
|
213
|
+
|
|
214
|
+
// Collect all tag occurrences across the realm for fileTags building
|
|
215
|
+
const allTagOccurrences: Array<{
|
|
216
|
+
objectName: string
|
|
217
|
+
target: SqlTarget
|
|
218
|
+
namespace: string
|
|
219
|
+
tag: string | null
|
|
220
|
+
args: Record<string, unknown> | unknown[]
|
|
221
|
+
}> = []
|
|
222
|
+
|
|
223
|
+
for (const schema of realm.schemas) {
|
|
224
|
+
// Process tables
|
|
225
|
+
if (schema.tables) {
|
|
226
|
+
for (const table of schema.tables) {
|
|
227
|
+
processAtlasObject(
|
|
228
|
+
table,
|
|
229
|
+
'table',
|
|
230
|
+
table.name,
|
|
231
|
+
realm,
|
|
232
|
+
filePath,
|
|
233
|
+
plugins,
|
|
234
|
+
statements,
|
|
235
|
+
config,
|
|
236
|
+
sqlOutputs,
|
|
237
|
+
codeOutputs,
|
|
238
|
+
docsMeta,
|
|
239
|
+
errors,
|
|
240
|
+
allTagOccurrences,
|
|
241
|
+
)
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// Process views
|
|
246
|
+
if (schema.views) {
|
|
247
|
+
for (const view of schema.views) {
|
|
248
|
+
processAtlasObject(
|
|
249
|
+
view,
|
|
250
|
+
'view',
|
|
251
|
+
view.name,
|
|
252
|
+
realm,
|
|
253
|
+
filePath,
|
|
254
|
+
plugins,
|
|
255
|
+
statements,
|
|
256
|
+
config,
|
|
257
|
+
sqlOutputs,
|
|
258
|
+
codeOutputs,
|
|
259
|
+
docsMeta,
|
|
260
|
+
errors,
|
|
261
|
+
allTagOccurrences,
|
|
262
|
+
)
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Build fileTags from collected tag occurrences
|
|
268
|
+
const fileTags = buildAtlasFileTags(allTagOccurrences)
|
|
269
|
+
|
|
270
|
+
// Build merged SQL output
|
|
271
|
+
const mergedSql = buildMergedOutput(source, sqlOutputs, adapter)
|
|
272
|
+
|
|
273
|
+
return { sourceFile: filePath, mergedSql, sqlOutputs, codeOutputs, errors, docsMeta, fileTags }
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/** Process a single Atlas object (table or view) and its columns for tag invocation */
|
|
277
|
+
function processAtlasObject(
|
|
278
|
+
obj: InternalAtlasTable | InternalAtlasView,
|
|
279
|
+
target: SqlTarget,
|
|
280
|
+
objectName: string,
|
|
281
|
+
realm: InternalAtlasRealm,
|
|
282
|
+
filePath: string,
|
|
283
|
+
plugins: Map<string, NamespacePlugin>,
|
|
284
|
+
statements: SqlStatement[],
|
|
285
|
+
config: ResolvedConfig,
|
|
286
|
+
sqlOutputs: SqlOutput[],
|
|
287
|
+
codeOutputs: CodeOutput[],
|
|
288
|
+
docsMeta: DocsMeta[],
|
|
289
|
+
errors: Array<{ namespace: string; message: string }>,
|
|
290
|
+
allTagOccurrences: Array<{
|
|
291
|
+
objectName: string
|
|
292
|
+
target: SqlTarget
|
|
293
|
+
namespace: string
|
|
294
|
+
tag: string | null
|
|
295
|
+
args: Record<string, unknown> | unknown[]
|
|
296
|
+
}>,
|
|
297
|
+
): void {
|
|
298
|
+
// Extract tags from object-level attrs
|
|
299
|
+
const objectTags = findAtlasTags(obj.attrs)
|
|
300
|
+
|
|
301
|
+
// Collect all tags on this object (object-level + column-level) for siblingTags
|
|
302
|
+
const allObjectTagsParsed = objectTags.map((t) => {
|
|
303
|
+
const split = splitTagName(t.Name)
|
|
304
|
+
return { namespace: split.namespace, tag: split.tag, argsStr: t.Args }
|
|
305
|
+
})
|
|
306
|
+
|
|
307
|
+
// Also collect column tags for sibling awareness
|
|
308
|
+
if (obj.columns) {
|
|
309
|
+
for (const col of obj.columns) {
|
|
310
|
+
const colTags = findAtlasTags(col.attrs)
|
|
311
|
+
for (const ct of colTags) {
|
|
312
|
+
const split = splitTagName(ct.Name)
|
|
313
|
+
allObjectTagsParsed.push({ namespace: split.namespace, tag: split.tag, argsStr: ct.Args })
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Process object-level tags (table/view level)
|
|
319
|
+
for (const atag of objectTags) {
|
|
320
|
+
const { namespace, tag: tagName } = splitTagName(atag.Name)
|
|
321
|
+
const plugin = plugins.get(namespace)
|
|
322
|
+
if (!plugin) continue
|
|
323
|
+
const tagHandler = plugin.onTag ?? plugin.generateSQL
|
|
324
|
+
if (!tagHandler && !plugin.generateCode) continue
|
|
325
|
+
|
|
326
|
+
const args = parseAtlasArgs(atag.Args)
|
|
327
|
+
|
|
328
|
+
// Build namespaceTags: all tags from same namespace on this object
|
|
329
|
+
const namespaceTags = allObjectTagsParsed
|
|
330
|
+
.filter((t) => t.namespace === namespace)
|
|
331
|
+
.map((t) => ({ tag: t.tag, args: parseAtlasArgs(t.argsStr) }))
|
|
332
|
+
|
|
333
|
+
// Build siblingTags: all tags from ALL namespaces on this object
|
|
334
|
+
const siblingTags = allObjectTagsParsed.map((t) => ({
|
|
335
|
+
namespace: t.namespace,
|
|
336
|
+
tag: t.tag,
|
|
337
|
+
args: parseAtlasArgs(t.argsStr),
|
|
338
|
+
}))
|
|
339
|
+
|
|
340
|
+
// Track for fileTags
|
|
341
|
+
allTagOccurrences.push({ objectName, target, namespace, tag: tagName, args })
|
|
342
|
+
|
|
343
|
+
const ctx: TagContext = {
|
|
344
|
+
target,
|
|
345
|
+
objectName,
|
|
346
|
+
tag: { name: tagName, args },
|
|
347
|
+
namespaceTags,
|
|
348
|
+
siblingTags,
|
|
349
|
+
fileTags: [], // Placeholder — will be set after all objects processed
|
|
350
|
+
astNode: null,
|
|
351
|
+
fileStatements: statements,
|
|
352
|
+
config: (config.namespaces?.[namespace] ?? {}) as NamespaceConfig,
|
|
353
|
+
filePath,
|
|
354
|
+
atlasTable: obj,
|
|
355
|
+
atlasRealm: realm,
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
invokePlugin(
|
|
359
|
+
plugin,
|
|
360
|
+
tagHandler,
|
|
361
|
+
ctx,
|
|
362
|
+
{ namespace, tag: tagName, rawArgs: atag.Args },
|
|
363
|
+
sqlOutputs,
|
|
364
|
+
codeOutputs,
|
|
365
|
+
docsMeta,
|
|
366
|
+
errors,
|
|
367
|
+
)
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// Process column-level tags
|
|
371
|
+
if (obj.columns) {
|
|
372
|
+
for (const col of obj.columns) {
|
|
373
|
+
const colTags = findAtlasTags(col.attrs)
|
|
374
|
+
for (const atag of colTags) {
|
|
375
|
+
const { namespace, tag: tagName } = splitTagName(atag.Name)
|
|
376
|
+
const plugin = plugins.get(namespace)
|
|
377
|
+
if (!plugin) continue
|
|
378
|
+
const tagHandler = plugin.onTag ?? plugin.generateSQL
|
|
379
|
+
if (!tagHandler && !plugin.generateCode) continue
|
|
380
|
+
|
|
381
|
+
const args = parseAtlasArgs(atag.Args)
|
|
382
|
+
const columnType = col.type?.raw ?? col.type?.T
|
|
383
|
+
|
|
384
|
+
// Build namespaceTags
|
|
385
|
+
const namespaceTags = allObjectTagsParsed
|
|
386
|
+
.filter((t) => t.namespace === namespace)
|
|
387
|
+
.map((t) => ({ tag: t.tag, args: parseAtlasArgs(t.argsStr) }))
|
|
388
|
+
|
|
389
|
+
// Build siblingTags
|
|
390
|
+
const siblingTags = allObjectTagsParsed.map((t) => ({
|
|
391
|
+
namespace: t.namespace,
|
|
392
|
+
tag: t.tag,
|
|
393
|
+
args: parseAtlasArgs(t.argsStr),
|
|
394
|
+
}))
|
|
395
|
+
|
|
396
|
+
// Track for fileTags — use table.column as objectName so templates can look up per-column
|
|
397
|
+
allTagOccurrences.push({
|
|
398
|
+
objectName: `${objectName}.${col.name}`,
|
|
399
|
+
target: 'column',
|
|
400
|
+
namespace,
|
|
401
|
+
tag: tagName,
|
|
402
|
+
args,
|
|
403
|
+
})
|
|
404
|
+
|
|
405
|
+
const ctx: TagContext = {
|
|
406
|
+
target: 'column',
|
|
407
|
+
objectName,
|
|
408
|
+
columnName: col.name,
|
|
409
|
+
columnType,
|
|
410
|
+
tag: { name: tagName, args },
|
|
411
|
+
namespaceTags,
|
|
412
|
+
siblingTags,
|
|
413
|
+
fileTags: [], // Placeholder
|
|
414
|
+
astNode: null,
|
|
415
|
+
fileStatements: statements,
|
|
416
|
+
config: (config.namespaces?.[namespace] ?? {}) as NamespaceConfig,
|
|
417
|
+
filePath,
|
|
418
|
+
atlasTable: obj,
|
|
419
|
+
atlasColumn: col,
|
|
420
|
+
atlasRealm: realm,
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
invokePlugin(
|
|
424
|
+
plugin,
|
|
425
|
+
tagHandler,
|
|
426
|
+
ctx,
|
|
427
|
+
{ namespace, tag: tagName, rawArgs: atag.Args },
|
|
428
|
+
sqlOutputs,
|
|
429
|
+
codeOutputs,
|
|
430
|
+
docsMeta,
|
|
431
|
+
errors,
|
|
432
|
+
)
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
// ── Shared plugin invocation ──────────────────────────────────────────
|
|
439
|
+
|
|
440
|
+
function invokePlugin(
|
|
441
|
+
plugin: NamespacePlugin,
|
|
442
|
+
tagHandler: ((ctx: TagContext) => SqlOutput[] | TagOutput | undefined) | undefined,
|
|
443
|
+
ctx: TagContext,
|
|
444
|
+
tag: { namespace: string; tag: string | null; rawArgs: string | null },
|
|
445
|
+
sqlOutputs: SqlOutput[],
|
|
446
|
+
codeOutputs: CodeOutput[],
|
|
447
|
+
docsMeta: DocsMeta[],
|
|
448
|
+
errors: Array<{ namespace: string; message: string }>,
|
|
449
|
+
): void {
|
|
450
|
+
// Call onTag (or legacy generateSQL)
|
|
451
|
+
if (tagHandler) {
|
|
452
|
+
try {
|
|
453
|
+
const result = tagHandler(ctx)
|
|
454
|
+
if (result) {
|
|
455
|
+
const tagLabel = tag.tag
|
|
456
|
+
? `@${tag.namespace}.${tag.tag}${tag.rawArgs ? `(${tag.rawArgs})` : ''}`
|
|
457
|
+
: `@${tag.namespace}${tag.rawArgs ? `(${tag.rawArgs})` : ''}`
|
|
458
|
+
|
|
459
|
+
// Support both SqlOutput[] and TagOutput return types
|
|
460
|
+
const sqlResults = Array.isArray(result) ? result : result.sql
|
|
461
|
+
if (sqlResults && sqlResults.length > 0) {
|
|
462
|
+
for (const out of sqlResults) {
|
|
463
|
+
if (!out.sourceTag) out.sourceTag = tagLabel
|
|
464
|
+
}
|
|
465
|
+
sqlOutputs.push(...sqlResults)
|
|
466
|
+
}
|
|
467
|
+
if (!Array.isArray(result) && result.docs) {
|
|
468
|
+
docsMeta.push(result.docs)
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
} catch (err: any) {
|
|
472
|
+
errors.push({
|
|
473
|
+
namespace: tag.namespace,
|
|
474
|
+
message: err?.message ?? String(err),
|
|
475
|
+
})
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
// Call generateCode
|
|
480
|
+
if (plugin.generateCode) {
|
|
481
|
+
try {
|
|
482
|
+
const result = plugin.generateCode(ctx)
|
|
483
|
+
if (result && result.length > 0) {
|
|
484
|
+
codeOutputs.push(...result)
|
|
485
|
+
}
|
|
486
|
+
} catch (err: any) {
|
|
487
|
+
errors.push({
|
|
488
|
+
namespace: tag.namespace,
|
|
489
|
+
message: err?.message ?? String(err),
|
|
490
|
+
})
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
// ── Merged SQL output ─────────────────────────────────────────────────
|
|
496
|
+
|
|
497
|
+
function buildMergedOutput(source: string, sqlOutputs: SqlOutput[], adapter: SqlAstAdapter): string {
|
|
498
|
+
const sourceComments = adapter.parseComments(source)
|
|
499
|
+
const generatedSql = sqlOutputs.map((o) => o.sql).join('\n')
|
|
500
|
+
const generatedComments = generatedSql.trim() ? adapter.parseComments(generatedSql) : []
|
|
501
|
+
return buildMergedSql(source, sqlOutputs, sourceComments, generatedComments)
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
/** Comment out COMMENT ON statements in source SQL using line numbers */
|
|
505
|
+
function commentOutSourceComments(source: string, comments: SqlCommentOn[]): string {
|
|
506
|
+
if (comments.length === 0) return source
|
|
507
|
+
|
|
508
|
+
const lines = source.split('\n')
|
|
509
|
+
const commentedLines = new Set<number>()
|
|
510
|
+
|
|
511
|
+
for (const c of comments) {
|
|
512
|
+
// c.line is 1-based, array is 0-based
|
|
513
|
+
const startLine = c.line - 1
|
|
514
|
+
for (let j = startLine; j < lines.length; j++) {
|
|
515
|
+
commentedLines.add(j)
|
|
516
|
+
if (lines[j].trim().endsWith(';')) break
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
return lines.map((line, i) => (commentedLines.has(i) ? `-- ${line}` : line)).join('\n')
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
function buildMergedSql(
|
|
524
|
+
source: string,
|
|
525
|
+
sqlOutputs: SqlOutput[],
|
|
526
|
+
sourceComments: SqlCommentOn[],
|
|
527
|
+
generatedComments: SqlCommentOn[],
|
|
528
|
+
): string {
|
|
529
|
+
// 1. Build comment merge map: target -> content[]
|
|
530
|
+
const commentMap = new Map<string, string[]>()
|
|
531
|
+
for (const c of sourceComments) {
|
|
532
|
+
commentMap.set(c.targetKey, [c.content])
|
|
533
|
+
}
|
|
534
|
+
for (const c of generatedComments) {
|
|
535
|
+
const existing = commentMap.get(c.targetKey) ?? []
|
|
536
|
+
existing.push(c.content)
|
|
537
|
+
commentMap.set(c.targetKey, existing)
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
// 2. Build set of generated comment targetKeys for filtering
|
|
541
|
+
const generatedTargets = new Set(generatedComments.map((c) => c.targetKey))
|
|
542
|
+
|
|
543
|
+
// 3. Filter out COMMENT ON outputs from the generated SQL list
|
|
544
|
+
const otherOutputs = sqlOutputs.filter((output) => {
|
|
545
|
+
// Check if this output matches any generated comment target
|
|
546
|
+
for (const target of generatedTargets) {
|
|
547
|
+
if (output.sql.includes(target)) return false
|
|
548
|
+
}
|
|
549
|
+
return true
|
|
550
|
+
})
|
|
551
|
+
|
|
552
|
+
// 4. Strip @tag comments, @import lines, and comment out original COMMENT ON
|
|
553
|
+
const strippedSource = stripTagsAndImports(source)
|
|
554
|
+
const cleanSource = commentOutSourceComments(strippedSource, sourceComments).trimEnd()
|
|
555
|
+
|
|
556
|
+
// 5. Build merged COMMENT ON statements
|
|
557
|
+
const mergedComments: string[] = []
|
|
558
|
+
for (const [target, contents] of commentMap) {
|
|
559
|
+
const unique = [...new Set(contents)]
|
|
560
|
+
const escaped = unique.map((c) => c.replace(/'/g, "''")).join('\\n')
|
|
561
|
+
const sql =
|
|
562
|
+
unique.length > 1
|
|
563
|
+
? `COMMENT ON ${target} IS E'${escaped}';`
|
|
564
|
+
: `COMMENT ON ${target} IS '${unique[0].replace(/'/g, "''")}';`
|
|
565
|
+
mergedComments.push(sql)
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
if (otherOutputs.length === 0 && mergedComments.length === 0) {
|
|
569
|
+
return source
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
const parts = [cleanSource]
|
|
573
|
+
parts.push('')
|
|
574
|
+
parts.push('-- Generated by sqldoc')
|
|
575
|
+
|
|
576
|
+
let lastSourceTag = ''
|
|
577
|
+
for (const output of otherOutputs) {
|
|
578
|
+
if (output.sourceTag && output.sourceTag !== lastSourceTag) {
|
|
579
|
+
parts.push(`-- sqldoc: ${output.sourceTag}`)
|
|
580
|
+
lastSourceTag = output.sourceTag
|
|
581
|
+
}
|
|
582
|
+
if (output.comment) {
|
|
583
|
+
parts.push(`-- ${output.comment}`)
|
|
584
|
+
}
|
|
585
|
+
parts.push(output.sql)
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
for (const sql of mergedComments) {
|
|
589
|
+
parts.push(sql)
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
parts.push('')
|
|
593
|
+
return parts.join('\n')
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
// ── Atlas helpers ─────────────────────────────────────────────────────
|
|
597
|
+
|
|
598
|
+
/**
|
|
599
|
+
* Split an Atlas tag Name into namespace and tag name.
|
|
600
|
+
* - "audit.track" -> { namespace: "audit", tag: "track" }
|
|
601
|
+
* - "searchable" -> { namespace: "searchable", tag: null } ($self pattern)
|
|
602
|
+
*/
|
|
603
|
+
function splitTagName(name: string): { namespace: string; tag: string | null } {
|
|
604
|
+
const dotIdx = name.indexOf('.')
|
|
605
|
+
if (dotIdx === -1) return { namespace: name, tag: null }
|
|
606
|
+
return { namespace: name.substring(0, dotIdx), tag: name.substring(dotIdx + 1) }
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
/**
|
|
610
|
+
* Type guard: check if an Atlas attr is a tag (has Name + Args, no Expr).
|
|
611
|
+
* Mirrors isTag from @sqldoc/atlas without importing.
|
|
612
|
+
*/
|
|
613
|
+
function isAtlasTag(attr: InternalAtlasAttr): attr is { Name: string; Args: string } {
|
|
614
|
+
return (
|
|
615
|
+
typeof attr === 'object' &&
|
|
616
|
+
attr !== null &&
|
|
617
|
+
'Name' in attr &&
|
|
618
|
+
typeof (attr as any).Name === 'string' &&
|
|
619
|
+
'Args' in attr &&
|
|
620
|
+
typeof (attr as any).Args === 'string' &&
|
|
621
|
+
!('Expr' in attr)
|
|
622
|
+
)
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
/** Extract all tag attrs from a mixed Attrs array */
|
|
626
|
+
function findAtlasTags(attrs?: InternalAtlasAttr[]): Array<{ Name: string; Args: string }> {
|
|
627
|
+
if (!attrs) return []
|
|
628
|
+
return attrs.filter(isAtlasTag)
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
/** Parse Atlas tag args string into parsed values using the parser's parseArgs */
|
|
632
|
+
function parseAtlasArgs(argsStr: string): Record<string, unknown> | unknown[] {
|
|
633
|
+
if (!argsStr) return {}
|
|
634
|
+
return parseArgs(argsStr).values
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
/** Build fileTags from collected Atlas tag occurrences */
|
|
638
|
+
function buildAtlasFileTags(
|
|
639
|
+
occurrences: Array<{
|
|
640
|
+
objectName: string
|
|
641
|
+
target: SqlTarget
|
|
642
|
+
namespace: string
|
|
643
|
+
tag: string | null
|
|
644
|
+
args: Record<string, unknown> | unknown[]
|
|
645
|
+
}>,
|
|
646
|
+
): TagContext['fileTags'] {
|
|
647
|
+
const map = new Map<
|
|
648
|
+
string,
|
|
649
|
+
{
|
|
650
|
+
objectName: string
|
|
651
|
+
target: SqlTarget
|
|
652
|
+
tags: Array<{ namespace: string; tag: string | null; args: Record<string, unknown> | unknown[] }>
|
|
653
|
+
}
|
|
654
|
+
>()
|
|
655
|
+
|
|
656
|
+
for (const occ of occurrences) {
|
|
657
|
+
const key = `${occ.objectName}:${occ.target}`
|
|
658
|
+
if (!map.has(key)) {
|
|
659
|
+
map.set(key, {
|
|
660
|
+
objectName: occ.objectName,
|
|
661
|
+
target: occ.target,
|
|
662
|
+
tags: [],
|
|
663
|
+
})
|
|
664
|
+
}
|
|
665
|
+
map.get(key)!.tags.push({
|
|
666
|
+
namespace: occ.namespace,
|
|
667
|
+
tag: occ.tag,
|
|
668
|
+
args: occ.args,
|
|
669
|
+
})
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
return Array.from(map.values())
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
// ── Tier 1 helpers ───────────────────────────────────────────────────
|
|
676
|
+
|
|
677
|
+
function buildFileTags(blocks: TagBlock[]): TagContext['fileTags'] {
|
|
678
|
+
return blocks.map((block) => {
|
|
679
|
+
// Use table.column format for column targets (matches Atlas convention)
|
|
680
|
+
const objectName =
|
|
681
|
+
block.ast.target === 'column' && block.ast.columnName
|
|
682
|
+
? `${block.ast.objectName ?? 'unknown'}.${block.ast.columnName}`
|
|
683
|
+
: (block.ast.objectName ?? 'unknown')
|
|
684
|
+
|
|
685
|
+
return {
|
|
686
|
+
objectName,
|
|
687
|
+
target: block.ast.target,
|
|
688
|
+
tags: block.tags.map((t) => ({
|
|
689
|
+
namespace: t.namespace,
|
|
690
|
+
tag: t.tag,
|
|
691
|
+
args: parsedArgsToValue(t.rawArgs),
|
|
692
|
+
})),
|
|
693
|
+
}
|
|
694
|
+
})
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
function parsedArgsToValue(rawArgs: string | null): Record<string, unknown> | unknown[] {
|
|
698
|
+
if (rawArgs === null) return {}
|
|
699
|
+
const parsed = parseArgs(rawArgs)
|
|
700
|
+
return parsed.values
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
/**
|
|
704
|
+
* Merge parser-derived tags into Atlas-derived fileTags.
|
|
705
|
+
* Adds tags that Atlas doesn't see (e.g. @lint.ignore) to the right objects.
|
|
706
|
+
*/
|
|
707
|
+
function mergeParserTags(atlasFileTags: TagContext['fileTags'], parserFileTags: TagContext['fileTags']): void {
|
|
708
|
+
// Index existing objects by name
|
|
709
|
+
const byName = new Map<string, (typeof atlasFileTags)[0]>()
|
|
710
|
+
for (const obj of atlasFileTags) {
|
|
711
|
+
byName.set(obj.objectName, obj)
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
for (const pObj of parserFileTags) {
|
|
715
|
+
for (const pTag of pObj.tags) {
|
|
716
|
+
// Only add tags that Atlas doesn't already have (non-SQL-generating tags)
|
|
717
|
+
const existing = byName.get(pObj.objectName)
|
|
718
|
+
if (existing) {
|
|
719
|
+
// Check if this tag already exists
|
|
720
|
+
const alreadyHas = existing.tags.some((t) => t.namespace === pTag.namespace && t.tag === pTag.tag)
|
|
721
|
+
if (!alreadyHas) {
|
|
722
|
+
existing.tags.push(pTag)
|
|
723
|
+
}
|
|
724
|
+
} else {
|
|
725
|
+
// Object not in Atlas (e.g. it's on a function or something Atlas didn't process)
|
|
726
|
+
const newObj = { objectName: pObj.objectName, target: pObj.target, tags: [pTag] }
|
|
727
|
+
atlasFileTags.push(newObj)
|
|
728
|
+
byName.set(pObj.objectName, newObj)
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
/**
|
|
735
|
+
* Strip @tag comments and @import lines from the source SQL.
|
|
736
|
+
* - Lines that are entirely comment with @tags are removed
|
|
737
|
+
* - Inline @tags (after SQL on the same line) have the comment portion removed
|
|
738
|
+
* - Blank `--` comments left behind are removed
|
|
739
|
+
* - Consecutive blank lines are collapsed
|
|
740
|
+
*/
|
|
741
|
+
function stripTagsAndImports(source: string): string {
|
|
742
|
+
const TAG_RE = /@\w+(?:\.\w+)?(?:\([^)]*\))?/
|
|
743
|
+
const IMPORT_RE = /^\s*--\s*@import\s/
|
|
744
|
+
const lines = source.split('\n')
|
|
745
|
+
const result: string[] = []
|
|
746
|
+
|
|
747
|
+
for (const line of lines) {
|
|
748
|
+
// Remove @import lines entirely
|
|
749
|
+
if (IMPORT_RE.test(line)) continue
|
|
750
|
+
|
|
751
|
+
// Check if line has a comment with a tag
|
|
752
|
+
const commentIdx = line.indexOf('--')
|
|
753
|
+
if (commentIdx >= 0) {
|
|
754
|
+
const commentPart = line.substring(commentIdx)
|
|
755
|
+
if (TAG_RE.test(commentPart)) {
|
|
756
|
+
const sqlPart = line.substring(0, commentIdx).trimEnd()
|
|
757
|
+
if (sqlPart) {
|
|
758
|
+
// Inline tag — keep the SQL, remove the comment
|
|
759
|
+
result.push(sqlPart)
|
|
760
|
+
}
|
|
761
|
+
// Else: entire line was a tag comment — skip it
|
|
762
|
+
continue
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
// Remove blank comments (just `--` with optional whitespace)
|
|
767
|
+
if (/^\s*--\s*$/.test(line)) continue
|
|
768
|
+
|
|
769
|
+
result.push(line)
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
// Collapse consecutive blank lines to a single blank line
|
|
773
|
+
const collapsed: string[] = []
|
|
774
|
+
let lastBlank = false
|
|
775
|
+
for (const line of result) {
|
|
776
|
+
const isBlank = line.trim() === ''
|
|
777
|
+
if (isBlank && lastBlank) continue
|
|
778
|
+
collapsed.push(line)
|
|
779
|
+
lastBlank = isBlank
|
|
780
|
+
}
|
|
781
|
+
|
|
782
|
+
return collapsed.join('\n')
|
|
783
|
+
}
|