nx-md-parser 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,531 @@
1
+ /**
2
+ * Real-World Integration Example
3
+ *
4
+ * Shows how to use the markdown transformer in a production NX project
5
+ * with nx-helpers for advanced merging capabilities
6
+ */
7
+
8
+ import {
9
+ JSONTransformer,
10
+ Schema,
11
+ mergeTransformResults,
12
+ jsonToMarkdown,
13
+ mergeNoRedundancy,
14
+ mergeMultiple,
15
+ mergeWithRoles,
16
+ toCamelCase,
17
+ anyToMarkdownString
18
+ } from '../src';
19
+ import { json2mdWithTables } from 'nx-helpers';
20
+
21
+ // ============================================================================
22
+ // Example 1: Asset Analysis Pipeline
23
+ // ============================================================================
24
+
25
+ interface AssetAnalysis {
26
+ asset: {
27
+ type: string;
28
+ name: string;
29
+ ip: string;
30
+ };
31
+ analysis: {
32
+ shortAnswer: string;
33
+ fullAnswer: string;
34
+ };
35
+ metadata: {
36
+ assumptions: string[];
37
+ unknowns: string[];
38
+ evidence: string[];
39
+ };
40
+ }
41
+
42
+ const assetAnalysisSchema = Schema.object({
43
+ asset: Schema.object({
44
+ type: Schema.string(),
45
+ name: Schema.string(),
46
+ ip: Schema.string(),
47
+ }),
48
+ analysis: Schema.object({
49
+ shortAnswer: Schema.string(),
50
+ fullAnswer: Schema.string(),
51
+ }),
52
+ metadata: Schema.object({
53
+ assumptions: Schema.array(Schema.string()),
54
+ unknowns: Schema.array(Schema.string()),
55
+ evidence: Schema.array(Schema.string()),
56
+ }),
57
+ });
58
+
59
+ /**
60
+ * Process multiple markdown analysis documents and merge them
61
+ */
62
+ export function processMultipleAnalyses(markdownDocs: string[]): AssetAnalysis | null {
63
+ const transformer = new JSONTransformer(assetAnalysisSchema);
64
+ const results = markdownDocs.map(md => transformer.transformMarkdown(md));
65
+
66
+ // Check for failures
67
+ const failed = results.filter(r => r.status === 'failed');
68
+ if (failed.length > 0) {
69
+ console.error('Some analyses failed:', failed.flatMap(f => f.errors));
70
+ return null;
71
+ }
72
+
73
+ // Merge all results using nx-helpers
74
+ // Arrays will be merged as UNION (deduplicated)
75
+ const merged = mergeMultiple<AssetAnalysis>(
76
+ ...results.map(r => r.result!)
77
+ );
78
+
79
+ return merged;
80
+ }
81
+
82
+ // ============================================================================
83
+ // Example 2: Configuration from Markdown
84
+ // ============================================================================
85
+
86
+ interface ServerConfig {
87
+ servers: Array<{
88
+ name: string;
89
+ ip: string;
90
+ port: number;
91
+ enabled: boolean;
92
+ }>;
93
+ settings: {
94
+ timeout: number;
95
+ retries: number;
96
+ logLevel: string;
97
+ };
98
+ }
99
+
100
+ const configSchema = Schema.object({
101
+ servers: Schema.array(
102
+ Schema.object({
103
+ name: Schema.string(),
104
+ ip: Schema.string(),
105
+ port: Schema.number(),
106
+ enabled: Schema.boolean(),
107
+ })
108
+ ),
109
+ settings: Schema.object({
110
+ timeout: Schema.number(),
111
+ retries: Schema.number(),
112
+ logLevel: Schema.string(),
113
+ }),
114
+ });
115
+
116
+ /**
117
+ * Parse server configuration from markdown and merge with defaults
118
+ */
119
+ export function loadConfigFromMarkdown(
120
+ markdownConfig: string,
121
+ defaultConfig: Partial<ServerConfig>
122
+ ): Partial<ServerConfig> {
123
+ const transformer = new JSONTransformer(configSchema);
124
+ const result = transformer.transformMarkdown(markdownConfig);
125
+
126
+ if (result.status === 'failed') {
127
+ throw new Error(`Config parsing failed: ${result.errors?.join(', ')}`);
128
+ }
129
+
130
+ // Merge with defaults using nx-helpers
131
+ // This ensures arrays are deduplicated and objects are deep-merged
132
+ return mergeNoRedundancy(defaultConfig, result.result!);
133
+ }
134
+
135
+ // ============================================================================
136
+ // Example 3: Multi-Source Data Aggregation
137
+ // ============================================================================
138
+
139
+ interface ProjectData {
140
+ metadata: {
141
+ name: string;
142
+ version: string;
143
+ tags: string[];
144
+ };
145
+ requirements: {
146
+ functional: string[];
147
+ nonFunctional: string[];
148
+ };
149
+ risks: {
150
+ identified: string[];
151
+ mitigation: string[];
152
+ };
153
+ }
154
+
155
+ /**
156
+ * Aggregate project data from multiple markdown sources
157
+ * (e.g., requirements.md, risks.md, metadata.md)
158
+ */
159
+ export function aggregateProjectData(sources: {
160
+ requirements?: string;
161
+ risks?: string;
162
+ metadata?: string;
163
+ }): ProjectData {
164
+ const metadataSchema = Schema.object({
165
+ metadata: Schema.object({
166
+ name: Schema.string(),
167
+ version: Schema.string(),
168
+ tags: Schema.array(Schema.string()),
169
+ }),
170
+ });
171
+
172
+ const requirementsSchema = Schema.object({
173
+ requirements: Schema.object({
174
+ functional: Schema.array(Schema.string()),
175
+ nonFunctional: Schema.array(Schema.string()),
176
+ }),
177
+ });
178
+
179
+ const risksSchema = Schema.object({
180
+ risks: Schema.object({
181
+ identified: Schema.array(Schema.string()),
182
+ mitigation: Schema.array(Schema.string()),
183
+ }),
184
+ });
185
+
186
+ const results: Partial<ProjectData>[] = [];
187
+
188
+ if (sources.metadata) {
189
+ const t = new JSONTransformer(metadataSchema);
190
+ const result = t.transformMarkdown(sources.metadata);
191
+ if (result.status !== 'failed') results.push(result.result!);
192
+ }
193
+
194
+ if (sources.requirements) {
195
+ const t = new JSONTransformer(requirementsSchema);
196
+ const result = t.transformMarkdown(sources.requirements);
197
+ if (result.status !== 'failed') results.push(result.result!);
198
+ }
199
+
200
+ if (sources.risks) {
201
+ const t = new JSONTransformer(risksSchema);
202
+ const result = t.transformMarkdown(sources.risks);
203
+ if (result.status !== 'failed') results.push(result.result!);
204
+ }
205
+
206
+ // Merge all using nx-helpers
207
+ return mergeMultiple<ProjectData>(...results);
208
+ }
209
+
210
+ // ============================================================================
211
+ // Example 4: Validation Pipeline with Error Handling
212
+ // ============================================================================
213
+
214
+ interface ValidationResult<T> {
215
+ success: boolean;
216
+ data?: T;
217
+ errors: string[];
218
+ warnings: string[];
219
+ }
220
+
221
+ /**
222
+ * Transform markdown with comprehensive validation and error handling
223
+ */
224
+ export function validateAndTransform<T>(
225
+ markdown: string,
226
+ schema: any,
227
+ options?: {
228
+ requireValidation?: boolean; // If true, only accept "validated" status
229
+ logFixes?: boolean;
230
+ }
231
+ ): ValidationResult<T> {
232
+ const transformer = new JSONTransformer(schema);
233
+ const result = transformer.transformMarkdown(markdown);
234
+
235
+ if (result.status === 'failed') {
236
+ return {
237
+ success: false,
238
+ errors: result.errors || ['Unknown error'],
239
+ warnings: [],
240
+ };
241
+ }
242
+
243
+ const warnings: string[] = [];
244
+
245
+ if (result.status === 'fixed') {
246
+ if (options?.requireValidation) {
247
+ return {
248
+ success: false,
249
+ errors: ['Validation required but fixes were applied'],
250
+ warnings: result.fixes || [],
251
+ };
252
+ }
253
+
254
+ if (options?.logFixes) {
255
+ console.log('Applied fixes:', result.fixes);
256
+ }
257
+
258
+ warnings.push(...(result.fixes || []));
259
+ }
260
+
261
+ return {
262
+ success: true,
263
+ data: result.result as T,
264
+ errors: [],
265
+ warnings,
266
+ };
267
+ }
268
+
269
+ // ============================================================================
270
+ // Example 5: Incremental Updates with nx-helpers
271
+ // ============================================================================
272
+
273
+ /**
274
+ * Update existing configuration by merging new markdown data
275
+ * Uses nx-helpers to intelligently merge arrays and nested objects
276
+ */
277
+ export function incrementalConfigUpdate<T>(
278
+ currentConfig: T,
279
+ updateMarkdown: string,
280
+ schema: any
281
+ ): T {
282
+ const transformer = new JSONTransformer(schema);
283
+ const result = transformer.transformMarkdown(updateMarkdown);
284
+
285
+ if (result.status === 'failed') {
286
+ console.error('Update failed:', result.errors);
287
+ return currentConfig; // Return unchanged
288
+ }
289
+
290
+ // Use nx-helpers mergeNoRedundancy for intelligent merging
291
+ // - Arrays: merged as UNION (deduplicated)
292
+ // - Objects: deep merged
293
+ // - Primitives: new value wins
294
+ return mergeNoRedundancy(currentConfig, result.result!);
295
+ }
296
+
297
+ // ============================================================================
298
+ // Usage Examples
299
+ // ============================================================================
300
+
301
+ if (require.main === module) {
302
+ // Example 1: Multi-source aggregation
303
+ const projectData = aggregateProjectData({
304
+ metadata: `### Metadata
305
+ name: My Project
306
+ version: 1.0.0
307
+
308
+ ### Tags
309
+ - typescript
310
+ - nx`,
311
+ requirements: `### Requirements
312
+ #### Functional
313
+ - User authentication
314
+ - Data export
315
+
316
+ #### Non Functional
317
+ - Performance: < 100ms response
318
+ - Security: OAuth 2.0`,
319
+ risks: `### Risks
320
+ #### Identified
321
+ - Database scalability
322
+ - Third-party API dependency
323
+
324
+ #### Mitigation
325
+ - Implement caching
326
+ - Add fallback mechanisms`,
327
+ });
328
+
329
+ console.log('Aggregated Project Data:');
330
+ console.log(JSON.stringify(projectData, null, 2));
331
+
332
+ // Example 2: Incremental update
333
+ const currentConfig = {
334
+ servers: [
335
+ { name: 'server1', ip: '192.168.1.1', port: 8080, enabled: true },
336
+ ],
337
+ settings: {
338
+ timeout: 30,
339
+ retries: 3,
340
+ logLevel: 'info',
341
+ },
342
+ };
343
+
344
+ const updateMarkdown = `### Servers
345
+ - name: server2
346
+ ip: 192.168.1.2
347
+ port: 8081
348
+ enabled: true
349
+
350
+ ### Settings
351
+ timeout: 60
352
+ logLevel: debug`;
353
+
354
+ const updated = incrementalConfigUpdate(currentConfig, updateMarkdown, configSchema);
355
+
356
+ console.log('\nIncremental Update Result:');
357
+ console.log(JSON.stringify(updated, null, 2));
358
+ // Arrays are merged (server1 + server2), objects deep-merged (timeout, logLevel updated)
359
+ }
360
+
361
+ // ============================================================================
362
+ // Example 6: JSON to Markdown Generation & Advanced nx-helpers Features
363
+ // ============================================================================
364
+
365
+ console.log('\n\n======================================================================');
366
+ console.log('EXAMPLE 6: JSON to Markdown Generation & Advanced nx-helpers Features');
367
+ console.log('======================================================================\n');
368
+
369
+ // Example data that might come from your markdown transformations
370
+ const projectData = {
371
+ title: "nx-md-parser",
372
+ version: "1.0.0",
373
+ description: "A powerful markdown to JSON transformer",
374
+ features: [
375
+ "Schema validation",
376
+ "Auto-fixing capabilities",
377
+ "nx-helpers integration"
378
+ ],
379
+ metadata: {
380
+ dependencies: {
381
+ "nx-helpers": "^1.2.7",
382
+ typescript: "^5.3.0"
383
+ }
384
+ }
385
+ };
386
+
387
+ // Simpler test data like in nx-helpers README
388
+ const simpleData = {
389
+ title: "Project Alpha",
390
+ metadata: { version: 1, tags: ["ai", "nx"] }
391
+ };
392
+
393
+ // 6A: Convert JSON back to markdown using our utility
394
+ console.log('6A: JSON to Markdown Conversion:');
395
+ console.log('----------------------------------');
396
+ console.log('Complex data:');
397
+ console.log(jsonToMarkdown(projectData));
398
+ console.log('\nSimple data (like nx-helpers example):');
399
+ console.log(jsonToMarkdown(simpleData));
400
+ console.log();
401
+
402
+ // 6B: Demonstrate toCamelCase utility
403
+ console.log('6B: CamelCase Conversion Examples:');
404
+ console.log('-----------------------------------');
405
+ console.log('toCamelCase("hello world"):', toCamelCase("hello world"));
406
+ console.log('toCamelCase("user-profile-settings"):', toCamelCase("user-profile-settings"));
407
+ console.log('toCamelCase("API_KEY"):', toCamelCase("API_KEY"));
408
+ console.log();
409
+
410
+ // 6C: Demonstrate mergeWithRoles for structured merging
411
+ console.log('6C: Advanced Merging with Roles:');
412
+ console.log('---------------------------------');
413
+ const roleBasedData = [
414
+ { role: 'frontend', value: { framework: 'React', language: 'TypeScript' } },
415
+ { role: 'backend', value: { framework: 'Node.js', database: 'PostgreSQL' } },
416
+ { role: 'devops', value: { cloud: 'AWS', ci: 'GitHub Actions' } }
417
+ ];
418
+
419
+ const mergedByRole = mergeWithRoles(roleBasedData);
420
+ console.log('Merged by role:', JSON.stringify(mergedByRole, null, 2));
421
+ console.log();
422
+
423
+ // 6D: Demonstrate anyToMarkdownString with different options
424
+ console.log('6D: Advanced Markdown Generation Options:');
425
+ console.log('------------------------------------------');
426
+ console.log('Default formatting:');
427
+ console.log(anyToMarkdownString(projectData));
428
+ console.log('\nWith custom heading level (starts at h3):');
429
+ console.log(anyToMarkdownString(projectData, { level: 3 }));
430
+ console.log('\nWith JSON fallback fencing:');
431
+ console.log(anyToMarkdownString({ complex: { nested: { data: 'value' } } }, { fenceJsonFallback: true }));
432
+
433
+ // ============================================================================
434
+ // Example 7: Role-Based Merging with mergeWithRoles
435
+ // ============================================================================
436
+
437
+ console.log('\n\n======================================================================');
438
+ console.log('EXAMPLE 7: Role-Based Merging with mergeWithRoles');
439
+ console.log('======================================================================\n');
440
+
441
+ // 7A: mergeWithRoles - merge data with specific roles
442
+ console.log('7A: Role-Based Data Merging:');
443
+ console.log('------------------------------');
444
+
445
+ const roleBasedItems = [
446
+ { role: 'user-profile', value: { name: 'Alice Johnson', email: 'alice@example.com' } },
447
+ { role: 'user-preferences', value: { theme: 'dark', notifications: true } },
448
+ { role: 'user-permissions', value: { canEdit: true, canDelete: false } },
449
+ { role: 'account-settings', value: { plan: 'premium', storage: '100GB' } }
450
+ ];
451
+
452
+ const mergedByRoleExample = mergeWithRoles(roleBasedItems);
453
+ console.log('Merged by role:', JSON.stringify(mergedByRoleExample, null, 2));
454
+ console.log();
455
+
456
+ // 7B: Comparison with regular merging
457
+ console.log('7B: Comparison with Regular Merging:');
458
+ console.log('-------------------------------------');
459
+
460
+ const regularMerge = mergeMultiple(
461
+ { name: 'Alice Johnson', email: 'alice@example.com' } as any,
462
+ { theme: 'dark', notifications: true } as any,
463
+ { canEdit: true, canDelete: false } as any,
464
+ { plan: 'premium', storage: '100GB' } as any
465
+ );
466
+
467
+ console.log('Regular mergeMultiple result:');
468
+ console.log(JSON.stringify(regularMerge, null, 2));
469
+ console.log();
470
+
471
+ // 7C: Advanced role-based merging with nested data
472
+ console.log('7C: Advanced Role-Based Merging (Nested Data):');
473
+ console.log('------------------------------------------------');
474
+
475
+ const advancedRoles = [
476
+ {
477
+ role: 'frontend-config',
478
+ value: {
479
+ framework: 'React',
480
+ styling: { library: 'styled-components', theme: 'dark' }
481
+ }
482
+ },
483
+ {
484
+ role: 'backend-config',
485
+ value: {
486
+ runtime: 'Node.js',
487
+ database: { type: 'PostgreSQL', connection: { host: 'localhost' } }
488
+ }
489
+ },
490
+ {
491
+ role: 'deployment-config',
492
+ value: {
493
+ environment: 'production',
494
+ scaling: { minInstances: 2, maxInstances: 10 }
495
+ }
496
+ }
497
+ ];
498
+
499
+ const advancedMerged = mergeWithRoles(advancedRoles);
500
+ console.log('Advanced role-based merge:');
501
+ console.log(JSON.stringify(advancedMerged, null, 2));
502
+
503
+ // ============================================================================
504
+ // Example 8: Schema Loading from Files (Bonus)
505
+ // ============================================================================
506
+
507
+ console.log('\n\n======================================================================');
508
+ console.log('EXAMPLE 8: Schema Loading from Files (Bonus)');
509
+ console.log('======================================================================\n');
510
+
511
+ // Note: This example demonstrates the API but requires a schema.json file
512
+ console.log('8A: Schema Loading API:');
513
+ console.log('------------------------');
514
+ console.log('// Create a schema.json file with your schema definition:');
515
+ console.log(`{
516
+ "type": "object",
517
+ "properties": {
518
+ "title": { "type": "string" },
519
+ "tags": { "type": "array", "items": { "type": "string" } },
520
+ "active": { "type": "boolean" }
521
+ }
522
+ }`);
523
+
524
+ // Then load it:
525
+ // const transformer = createTransformerFromSchemaFile('./schema.json');
526
+ // const result = transformer.transformMarkdown(markdown);
527
+
528
+ console.log('\nSchema loading functions available:');
529
+ console.log('- loadSchemaFromFile(filePath)');
530
+ console.log('- createTransformerFromSchemaFile(schemaFilePath)');
531
+ console.log('\nThese functions use nx-helpers loadJson for robust file loading.');