@mitre/hdf-parsers 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +55 -0
- package/README.md +437 -0
- package/dist/flatten.d.ts +31 -0
- package/dist/flatten.d.ts.map +1 -0
- package/dist/flatten.js +235 -0
- package/dist/flatten.js.map +1 -0
- package/dist/index.d.ts +31 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +177 -0
- package/dist/index.js.map +1 -0
- package/package.json +56 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# License
|
|
2
|
+
|
|
3
|
+
Copyright © 2025 The MITRE Corporation.
|
|
4
|
+
|
|
5
|
+
Approved for Public Release; Distribution Unlimited. Case Number 18-3678.
|
|
6
|
+
|
|
7
|
+
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
8
|
+
not use this file except in compliance with the License. You may obtain a
|
|
9
|
+
copy of the License at
|
|
10
|
+
|
|
11
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
|
|
13
|
+
Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
15
|
+
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
16
|
+
License for the specific language governing permissions and limitations
|
|
17
|
+
under the License.
|
|
18
|
+
|
|
19
|
+
## Redistribution Terms
|
|
20
|
+
|
|
21
|
+
Redistribution and use in source and binary forms, with or without
|
|
22
|
+
modification, are permitted provided that the following conditions are
|
|
23
|
+
met:
|
|
24
|
+
|
|
25
|
+
- Redistributions of source code must retain the above copyright/digital
|
|
26
|
+
rights legend, this list of conditions and the following Notice.
|
|
27
|
+
- Redistributions in binary form must reproduce the above
|
|
28
|
+
copyright/digital rights legend, this list of conditions and the
|
|
29
|
+
following Notice in the documentation and/or other materials provided
|
|
30
|
+
with the distribution.
|
|
31
|
+
- Neither the name of The MITRE Corporation nor the names of its contributors
|
|
32
|
+
may be used to endorse or promote products derived from this software
|
|
33
|
+
without specific prior written permission.
|
|
34
|
+
|
|
35
|
+
## Notice
|
|
36
|
+
|
|
37
|
+
The MITRE Corporation grants permission to reproduce, distribute, modify, and
|
|
38
|
+
otherwise use this software to the extent permitted by the licensed terms
|
|
39
|
+
provided in the LICENSE file included with this project.
|
|
40
|
+
|
|
41
|
+
This software was produced by The MITRE Corporation for the U.S. Government
|
|
42
|
+
under contract. As such the U.S. Government has certain use and data
|
|
43
|
+
rights in this software. No use other than those granted to the U.S.
|
|
44
|
+
Government, or to those acting on behalf of the U.S. Government, under
|
|
45
|
+
these contract arrangements is authorized without the express written
|
|
46
|
+
permission of The MITRE Corporation.
|
|
47
|
+
|
|
48
|
+
Some files in this codebase were generated by generative AI, under the
|
|
49
|
+
direction and review of The MITRE Corporation employees, for the purpose of
|
|
50
|
+
development efficiency. All AI-generated code functionality was validated
|
|
51
|
+
by standard quality and assurance testing.
|
|
52
|
+
|
|
53
|
+
For further information, please contact The MITRE Corporation,
|
|
54
|
+
Contracts Management Office, 7515 Colshire Drive, McLean, VA 22102-7539,
|
|
55
|
+
(703) 983-6000.
|
package/README.md
ADDED
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
# @mitre/hdf-parsers
|
|
2
|
+
|
|
3
|
+
Parse and load Heimdall Data Format (HDF) documents with validation. Provides a simple, type-safe API for reading HDF Results and Baselines from JSON with automatic schema validation.
|
|
4
|
+
|
|
5
|
+
## Scope and Responsibilities
|
|
6
|
+
|
|
7
|
+
**hdf-parsers** provides validated parsing of HDF documents:
|
|
8
|
+
- Parse HDF Results and Baseline documents from JSON
|
|
9
|
+
- Automatic schema validation via hdf-validators
|
|
10
|
+
- Auto-detection of document type (Results vs Baseline)
|
|
11
|
+
- Type-safe output using hdf-schema types
|
|
12
|
+
- Detailed error reporting with validation messages
|
|
13
|
+
- Support for both TypeScript and Go implementations
|
|
14
|
+
|
|
15
|
+
### hdf-parsers vs. hdf-validators
|
|
16
|
+
|
|
17
|
+
| hdf-parsers | hdf-validators |
|
|
18
|
+
|-------------|----------------|
|
|
19
|
+
| Parse JSON → typed objects | Validate JSON against schema |
|
|
20
|
+
| "Load and validate this HDF file" | "Is this valid HDF?" |
|
|
21
|
+
| Returns typed HdfResults/HdfBaseline | Returns validation errors |
|
|
22
|
+
| One-step parse + validate | Schema validation only |
|
|
23
|
+
| Used by CLI commands and tools | Used internally by parsers |
|
|
24
|
+
|
|
25
|
+
**Example:**
|
|
26
|
+
- `validateResults(data)` → `{ valid: true, errors: [] }` (validators - just validates)
|
|
27
|
+
- `parseResults(json)` → `{ success: true, data: HdfResults }` (parsers - validates AND parses)
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npm install @mitre/hdf-parsers
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
### TypeScript
|
|
38
|
+
|
|
39
|
+
```typescript
|
|
40
|
+
import { parseResults, parseBaseline, parse } from '@mitre/hdf-parsers';
|
|
41
|
+
|
|
42
|
+
// Parse HDF Results
|
|
43
|
+
const json = '{"baselines":[...],"targets":[],"statistics":{}}';
|
|
44
|
+
const result = parseResults(json);
|
|
45
|
+
|
|
46
|
+
if (result.success) {
|
|
47
|
+
console.log('Parsed HDF Results:', result.data);
|
|
48
|
+
console.log('Number of baselines:', result.data.baselines?.length);
|
|
49
|
+
} else {
|
|
50
|
+
console.error('Parse failed:', result.error);
|
|
51
|
+
}
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
```typescript
|
|
55
|
+
// Parse HDF Baseline
|
|
56
|
+
const baselineJson = '{"name":"My Baseline","requirements":[...],...}';
|
|
57
|
+
const baselineResult = parseBaseline(baselineJson);
|
|
58
|
+
|
|
59
|
+
if (baselineResult.success) {
|
|
60
|
+
console.log('Baseline name:', baselineResult.data.name);
|
|
61
|
+
console.log('Requirements:', baselineResult.data.requirements.length);
|
|
62
|
+
}
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
```typescript
|
|
66
|
+
// Auto-detect document type
|
|
67
|
+
const unknownJson = '...'; // Could be Results or Baseline
|
|
68
|
+
const autoResult = parse(unknownJson);
|
|
69
|
+
|
|
70
|
+
if (autoResult.success) {
|
|
71
|
+
console.log('Document type:', autoResult.type); // "results" or "baseline"
|
|
72
|
+
console.log('Parsed data:', autoResult.data);
|
|
73
|
+
}
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
// Parse from Uint8Array (e.g., file reads)
|
|
78
|
+
import { readFileSync } from 'fs';
|
|
79
|
+
|
|
80
|
+
const bytes = readFileSync('scan-results.json');
|
|
81
|
+
const result = parseResults(bytes);
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
### Go
|
|
85
|
+
|
|
86
|
+
```go
|
|
87
|
+
package main
|
|
88
|
+
|
|
89
|
+
import (
|
|
90
|
+
"fmt"
|
|
91
|
+
"os"
|
|
92
|
+
|
|
93
|
+
parsers "github.com/mitre/hdf-parsers/go"
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
func main() {
|
|
97
|
+
// Read HDF file
|
|
98
|
+
data, err := os.ReadFile("results.json")
|
|
99
|
+
if err != nil {
|
|
100
|
+
panic(err)
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Parse HDF Results
|
|
104
|
+
result := parsers.ParseResults(data)
|
|
105
|
+
|
|
106
|
+
if result.Success {
|
|
107
|
+
fmt.Println("✓ Parsed HDF Results")
|
|
108
|
+
fmt.Printf("Baselines: %d\n", len(result.Data.Baselines))
|
|
109
|
+
} else {
|
|
110
|
+
fmt.Println("✗ Parse failed:")
|
|
111
|
+
fmt.Println(result.Error)
|
|
112
|
+
os.Exit(1)
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
```go
|
|
118
|
+
// Parse HDF Baseline
|
|
119
|
+
result := parsers.ParseBaseline(baselineData)
|
|
120
|
+
|
|
121
|
+
if result.Success {
|
|
122
|
+
fmt.Println("Baseline name:", result.Data.Name)
|
|
123
|
+
fmt.Printf("Requirements: %d\n", len(result.Data.Requirements))
|
|
124
|
+
}
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
```go
|
|
128
|
+
// Auto-detect document type
|
|
129
|
+
result := parsers.Parse(data)
|
|
130
|
+
|
|
131
|
+
if result.Success {
|
|
132
|
+
fmt.Println("Document type:", result.Type) // "results" or "baseline"
|
|
133
|
+
}
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
## API Reference
|
|
137
|
+
|
|
138
|
+
### TypeScript
|
|
139
|
+
|
|
140
|
+
#### `parseResults(input: string | Uint8Array): ParseResult<HdfResults>`
|
|
141
|
+
|
|
142
|
+
Parse HDF Results document from JSON string or bytes.
|
|
143
|
+
|
|
144
|
+
- **Parameters:**
|
|
145
|
+
- `input` - JSON string or Uint8Array to parse
|
|
146
|
+
- **Returns:** `ParseResult<HdfResults>` with parsed data or error
|
|
147
|
+
|
|
148
|
+
#### `parseBaseline(input: string | Uint8Array): ParseResult<HdfBaseline>`
|
|
149
|
+
|
|
150
|
+
Parse HDF Baseline document from JSON string or bytes.
|
|
151
|
+
|
|
152
|
+
- **Parameters:**
|
|
153
|
+
- `input` - JSON string or Uint8Array to parse
|
|
154
|
+
- **Returns:** `ParseResult<HdfBaseline>` with parsed data or error
|
|
155
|
+
|
|
156
|
+
#### `parse(input: string | Uint8Array): ParseResult<HdfResults | HdfBaseline>`
|
|
157
|
+
|
|
158
|
+
Parse HDF document with auto-detection of type (Results vs Baseline).
|
|
159
|
+
|
|
160
|
+
- **Parameters:**
|
|
161
|
+
- `input` - JSON string or Uint8Array to parse
|
|
162
|
+
- **Returns:** `ParseResult` with parsed data, type indicator, or error
|
|
163
|
+
|
|
164
|
+
#### `ParseResult<T>`
|
|
165
|
+
|
|
166
|
+
```typescript
|
|
167
|
+
interface ParseResult<T> {
|
|
168
|
+
success: boolean; // True if parsing succeeded
|
|
169
|
+
data?: T; // Parsed data (undefined if failed)
|
|
170
|
+
error?: string; // Error message (undefined if succeeded)
|
|
171
|
+
type?: 'results' | 'baseline'; // Document type (only for parse())
|
|
172
|
+
}
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
### Go
|
|
176
|
+
|
|
177
|
+
#### `ParseResults(input []byte) ResultsParseResult`
|
|
178
|
+
|
|
179
|
+
Parse HDF Results document from JSON bytes.
|
|
180
|
+
|
|
181
|
+
- **Parameters:**
|
|
182
|
+
- `input` - JSON bytes to parse
|
|
183
|
+
- **Returns:** `ResultsParseResult` with parsed data or error
|
|
184
|
+
|
|
185
|
+
#### `ParseBaseline(input []byte) BaselineParseResult`
|
|
186
|
+
|
|
187
|
+
Parse HDF Baseline document from JSON bytes.
|
|
188
|
+
|
|
189
|
+
- **Parameters:**
|
|
190
|
+
- `input` - JSON bytes to parse
|
|
191
|
+
- **Returns:** `BaselineParseResult` with parsed data or error
|
|
192
|
+
|
|
193
|
+
#### `Parse(input []byte) ParseResult`
|
|
194
|
+
|
|
195
|
+
Parse HDF document with auto-detection of type.
|
|
196
|
+
|
|
197
|
+
- **Parameters:**
|
|
198
|
+
- `input` - JSON bytes to parse
|
|
199
|
+
- **Returns:** `ParseResult` with parsed data, type indicator, or error
|
|
200
|
+
|
|
201
|
+
#### Parse Result Types
|
|
202
|
+
|
|
203
|
+
```go
|
|
204
|
+
type ResultsParseResult struct {
|
|
205
|
+
Success bool `json:"success"`
|
|
206
|
+
Data *hdf.HDFResults `json:"data,omitempty"`
|
|
207
|
+
Error string `json:"error,omitempty"`
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
type BaselineParseResult struct {
|
|
211
|
+
Success bool `json:"success"`
|
|
212
|
+
Data *hdf.HDFBaseline `json:"data,omitempty"`
|
|
213
|
+
Error string `json:"error,omitempty"`
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
type ParseResult struct {
|
|
217
|
+
Success bool `json:"success"`
|
|
218
|
+
Data interface{} `json:"data,omitempty"`
|
|
219
|
+
Error string `json:"error,omitempty"`
|
|
220
|
+
Type string `json:"type,omitempty"` // "results" or "baseline"
|
|
221
|
+
}
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
## Common Parse Errors
|
|
225
|
+
|
|
226
|
+
### Invalid JSON Syntax
|
|
227
|
+
|
|
228
|
+
```
|
|
229
|
+
error: "Invalid JSON: Unexpected token } in JSON at position 42"
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
Ensure the input is valid JSON. Check for:
|
|
233
|
+
- Missing or extra commas
|
|
234
|
+
- Unquoted property names
|
|
235
|
+
- Trailing commas in objects/arrays
|
|
236
|
+
|
|
237
|
+
### Schema Validation Failure
|
|
238
|
+
|
|
239
|
+
```
|
|
240
|
+
error: "Schema validation failed: baselines: is required"
|
|
241
|
+
```
|
|
242
|
+
|
|
243
|
+
The JSON is valid but doesn't match the HDF schema. Common issues:
|
|
244
|
+
- Missing required fields (baselines, name, requirements)
|
|
245
|
+
- Wrong field types (string instead of number)
|
|
246
|
+
- Invalid enum values (status must be passed/failed/error/etc.)
|
|
247
|
+
|
|
248
|
+
### Empty Input
|
|
249
|
+
|
|
250
|
+
```
|
|
251
|
+
error: "Input is empty"
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
Provide non-empty JSON content.
|
|
255
|
+
|
|
256
|
+
### Trailing Data
|
|
257
|
+
|
|
258
|
+
```
|
|
259
|
+
error: "Invalid JSON: unexpected trailing data after end of object"
|
|
260
|
+
```
|
|
261
|
+
|
|
262
|
+
The JSON has extra characters after the closing brace. Remove any trailing content.
|
|
263
|
+
|
|
264
|
+
## Use Cases
|
|
265
|
+
|
|
266
|
+
### CLI Commands
|
|
267
|
+
|
|
268
|
+
Parse HDF files for CLI operations:
|
|
269
|
+
|
|
270
|
+
```typescript
|
|
271
|
+
import { parseResults } from '@mitre/hdf-parsers';
|
|
272
|
+
import { readFileSync } from 'fs';
|
|
273
|
+
|
|
274
|
+
const data = readFileSync(inputFile, 'utf-8');
|
|
275
|
+
const result = parseResults(data);
|
|
276
|
+
|
|
277
|
+
if (!result.success) {
|
|
278
|
+
console.error(`Failed to parse ${inputFile}: ${result.error}`);
|
|
279
|
+
process.exit(1);
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Process the validated HDF data
|
|
283
|
+
processResults(result.data);
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
### Converter Input Validation
|
|
287
|
+
|
|
288
|
+
Validate HDF input before conversion:
|
|
289
|
+
|
|
290
|
+
```typescript
|
|
291
|
+
import { parseResults } from '@mitre/hdf-parsers';
|
|
292
|
+
|
|
293
|
+
export function convertHdfToCsv(hdfJson: string): string {
|
|
294
|
+
const result = parseResults(hdfJson);
|
|
295
|
+
|
|
296
|
+
if (!result.success) {
|
|
297
|
+
throw new Error(`Invalid HDF input: ${result.error}`);
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// Convert validated data
|
|
301
|
+
return buildCsv(result.data);
|
|
302
|
+
}
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
### HTTP API Endpoints
|
|
306
|
+
|
|
307
|
+
Parse and validate HDF uploads:
|
|
308
|
+
|
|
309
|
+
```typescript
|
|
310
|
+
app.post('/api/upload', async (req, res) => {
|
|
311
|
+
const result = parseResults(req.body);
|
|
312
|
+
|
|
313
|
+
if (!result.success) {
|
|
314
|
+
return res.status(400).json({
|
|
315
|
+
error: 'Invalid HDF document',
|
|
316
|
+
details: result.error
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// Store validated HDF data
|
|
321
|
+
await storeResults(result.data);
|
|
322
|
+
res.json({ success: true });
|
|
323
|
+
});
|
|
324
|
+
```
|
|
325
|
+
|
|
326
|
+
### Type-Safe Processing
|
|
327
|
+
|
|
328
|
+
Get type-safe HDF objects:
|
|
329
|
+
|
|
330
|
+
```typescript
|
|
331
|
+
import { parseResults } from '@mitre/hdf-parsers';
|
|
332
|
+
import type { HdfResults } from '@mitre/hdf-schema';
|
|
333
|
+
|
|
334
|
+
function processResults(data: HdfResults) {
|
|
335
|
+
// TypeScript knows the exact structure
|
|
336
|
+
for (const baseline of data.baselines ?? []) {
|
|
337
|
+
console.log(`Baseline: ${baseline.name}`);
|
|
338
|
+
|
|
339
|
+
for (const req of baseline.requirements ?? []) {
|
|
340
|
+
console.log(` Requirement ${req.id}: ${req.results?.length ?? 0} results`);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
const result = parseResults(jsonData);
|
|
346
|
+
if (result.success) {
|
|
347
|
+
processResults(result.data); // Type-safe!
|
|
348
|
+
}
|
|
349
|
+
```
|
|
350
|
+
|
|
351
|
+
## Error Handling Best Practices
|
|
352
|
+
|
|
353
|
+
### Always Check success Flag
|
|
354
|
+
|
|
355
|
+
```typescript
|
|
356
|
+
const result = parseResults(data);
|
|
357
|
+
|
|
358
|
+
if (!result.success) {
|
|
359
|
+
// Handle error - data is undefined here
|
|
360
|
+
console.error(result.error);
|
|
361
|
+
return;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// TypeScript knows data exists here
|
|
365
|
+
console.log(result.data.baselines);
|
|
366
|
+
```
|
|
367
|
+
|
|
368
|
+
### Provide User-Friendly Error Messages
|
|
369
|
+
|
|
370
|
+
```typescript
|
|
371
|
+
const result = parseResults(userInput);
|
|
372
|
+
|
|
373
|
+
if (!result.success) {
|
|
374
|
+
if (result.error.includes('JSON')) {
|
|
375
|
+
console.error('File contains invalid JSON syntax');
|
|
376
|
+
} else if (result.error.includes('Schema validation')) {
|
|
377
|
+
console.error('File does not match HDF format');
|
|
378
|
+
} else {
|
|
379
|
+
console.error('Failed to parse HDF file');
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
console.error('Details:', result.error);
|
|
383
|
+
}
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
### Log Validation Errors for Debugging
|
|
387
|
+
|
|
388
|
+
```typescript
|
|
389
|
+
import { parseResults } from '@mitre/hdf-parsers';
|
|
390
|
+
import { validateResults } from '@mitre/hdf-validators';
|
|
391
|
+
|
|
392
|
+
// For detailed debugging, use validator directly
|
|
393
|
+
const validationResult = validateResults(jsonData);
|
|
394
|
+
|
|
395
|
+
if (!validationResult.valid) {
|
|
396
|
+
console.error('Validation errors:');
|
|
397
|
+
for (const error of validationResult.errors) {
|
|
398
|
+
console.error(` ${error.field}: ${error.message}`);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// For normal use, parser is simpler
|
|
403
|
+
const parseResult = parseResults(jsonData);
|
|
404
|
+
```
|
|
405
|
+
|
|
406
|
+
## Development
|
|
407
|
+
|
|
408
|
+
```bash
|
|
409
|
+
# Install dependencies
|
|
410
|
+
pnpm install
|
|
411
|
+
|
|
412
|
+
# Run TypeScript tests
|
|
413
|
+
pnpm test:ts
|
|
414
|
+
|
|
415
|
+
# Run Go tests
|
|
416
|
+
pnpm test:go
|
|
417
|
+
|
|
418
|
+
# Run all tests
|
|
419
|
+
pnpm test
|
|
420
|
+
|
|
421
|
+
# Run tests with coverage
|
|
422
|
+
pnpm test:coverage
|
|
423
|
+
|
|
424
|
+
# Build TypeScript package
|
|
425
|
+
pnpm build
|
|
426
|
+
|
|
427
|
+
# Lint code
|
|
428
|
+
pnpm lint
|
|
429
|
+
```
|
|
430
|
+
|
|
431
|
+
## Test Coverage
|
|
432
|
+
|
|
433
|
+
Both TypeScript and Go implementations maintain **>95% test coverage** with comprehensive parsing tests. Run `pnpm test:coverage` to view current coverage report.
|
|
434
|
+
|
|
435
|
+
## License
|
|
436
|
+
|
|
437
|
+
Apache-2.0 © MITRE Corporation
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { HdfResults } from '@mitre/hdf-schema';
|
|
2
|
+
export interface FlattenResult {
|
|
3
|
+
results: HdfResults;
|
|
4
|
+
metadata: FlattenMetadata;
|
|
5
|
+
}
|
|
6
|
+
export interface FlattenMetadata {
|
|
7
|
+
originalBaselineCount: number;
|
|
8
|
+
flattenedBaselineCount: number;
|
|
9
|
+
merges: BaselineMerge[];
|
|
10
|
+
warnings: string[];
|
|
11
|
+
}
|
|
12
|
+
export interface BaselineMerge {
|
|
13
|
+
rootBaseline: string;
|
|
14
|
+
absorbedBaselines: string[];
|
|
15
|
+
controlsBefore: number;
|
|
16
|
+
controlsAfter: number;
|
|
17
|
+
pattern: 'deep' | 'wide' | 'hybrid';
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Flatten overlay/wrapper baselines in an HDF Results document.
|
|
21
|
+
*
|
|
22
|
+
* Handles:
|
|
23
|
+
* - Deep nesting (overlay chains with shared control IDs via parentBaseline)
|
|
24
|
+
* - Wide nesting (wrapper profiles aggregating independent bases)
|
|
25
|
+
* - Hybrid (both patterns in one document)
|
|
26
|
+
*
|
|
27
|
+
* @param results - Parsed HDF Results (from parseResults() or equivalent)
|
|
28
|
+
* @returns FlattenResult with flattened data and merge metadata
|
|
29
|
+
*/
|
|
30
|
+
export declare function flattenOverlays(results: HdfResults): FlattenResult;
|
|
31
|
+
//# sourceMappingURL=flatten.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"flatten.d.ts","sourceRoot":"","sources":["../typescript/flatten.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAA2C,MAAM,mBAAmB,CAAC;AAI7F,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,UAAU,CAAC;IACpB,QAAQ,EAAE,eAAe,CAAC;CAC3B;AAED,MAAM,WAAW,eAAe;IAC9B,qBAAqB,EAAE,MAAM,CAAC;IAC9B,sBAAsB,EAAE,MAAM,CAAC;IAC/B,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,QAAQ,EAAE,MAAM,EAAE,CAAC;CACpB;AAED,MAAM,WAAW,aAAa;IAC5B,YAAY,EAAE,MAAM,CAAC;IACrB,iBAAiB,EAAE,MAAM,EAAE,CAAC;IAC5B,cAAc,EAAE,MAAM,CAAC;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAC;CACrC;AA+GD;;;;;;;;;;GAUG;AACH,wBAAgB,eAAe,CAAC,OAAO,EAAE,UAAU,GAAG,aAAa,CAwJlE"}
|
package/dist/flatten.js
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
// ── Internal Helpers ───────────────────────────────────────
|
|
2
|
+
/** BFS from root, returns names in top-down order. Cycle-safe. */
|
|
3
|
+
function collectTree(root, childrenMap) {
|
|
4
|
+
const order = [];
|
|
5
|
+
const seen = new Set();
|
|
6
|
+
const queue = [root];
|
|
7
|
+
while (queue.length > 0) {
|
|
8
|
+
const name = queue.shift();
|
|
9
|
+
if (seen.has(name))
|
|
10
|
+
continue;
|
|
11
|
+
seen.add(name);
|
|
12
|
+
order.push(name);
|
|
13
|
+
for (const child of (childrenMap.get(name) || [])) {
|
|
14
|
+
queue.push(child);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
return order;
|
|
18
|
+
}
|
|
19
|
+
/** Classify merge pattern based on tree shape */
|
|
20
|
+
function detectPattern(root, childrenMap) {
|
|
21
|
+
const rootChildren = childrenMap.get(root) || [];
|
|
22
|
+
if (rootChildren.length <= 1) {
|
|
23
|
+
return 'deep';
|
|
24
|
+
}
|
|
25
|
+
for (const child of rootChildren) {
|
|
26
|
+
if ((childrenMap.get(child) || []).length > 0) {
|
|
27
|
+
return 'hybrid';
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
return 'wide';
|
|
31
|
+
}
|
|
32
|
+
/** Merge incoming requirement fields onto existing */
|
|
33
|
+
function mergeRequirement(existing, incoming) {
|
|
34
|
+
const result = { ...existing };
|
|
35
|
+
// Impact: incoming always wins (required field, always present)
|
|
36
|
+
result.impact = incoming.impact;
|
|
37
|
+
// Results: keep whichever is non-empty (base has them, overlay doesn't)
|
|
38
|
+
if (incoming.results && incoming.results.length > 0) {
|
|
39
|
+
result.results = incoming.results;
|
|
40
|
+
}
|
|
41
|
+
// Code: incoming wins if non-empty
|
|
42
|
+
if (incoming.code && incoming.code.trim() !== '') {
|
|
43
|
+
result.code = incoming.code;
|
|
44
|
+
}
|
|
45
|
+
// Tags: shallow merge (incoming keys override)
|
|
46
|
+
if (incoming.tags) {
|
|
47
|
+
result.tags = { ...existing.tags, ...incoming.tags };
|
|
48
|
+
}
|
|
49
|
+
// Severity: incoming wins if present, else keep existing
|
|
50
|
+
if (incoming.severity !== undefined) {
|
|
51
|
+
result.severity = incoming.severity;
|
|
52
|
+
}
|
|
53
|
+
// EffectiveStatus: incoming wins only if it has results (otherwise its
|
|
54
|
+
// effectiveStatus is a computed artifact from empty results, not intentional).
|
|
55
|
+
// Overlays typically have empty results — the base has the real test results.
|
|
56
|
+
if (incoming.effectiveStatus !== undefined && incoming.results && incoming.results.length > 0) {
|
|
57
|
+
result.effectiveStatus = incoming.effectiveStatus;
|
|
58
|
+
}
|
|
59
|
+
// Descriptions: merge by label (incoming overrides same label)
|
|
60
|
+
if (incoming.descriptions && incoming.descriptions.length > 0) {
|
|
61
|
+
const descMap = new Map((existing.descriptions || []).map(d => [d.label, d]));
|
|
62
|
+
for (const d of incoming.descriptions) {
|
|
63
|
+
descMap.set(d.label, d);
|
|
64
|
+
}
|
|
65
|
+
result.descriptions = [...descMap.values()];
|
|
66
|
+
}
|
|
67
|
+
return result;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Resolve parentBaseline for a baseline.
|
|
71
|
+
* InSpec parent_profile can use depends-name aliases (e.g., 'k8s' instead of
|
|
72
|
+
* 'k8s-node-stig-baseline'). When the value isn't a direct profile name,
|
|
73
|
+
* find who depends on this baseline — that's the actual parent.
|
|
74
|
+
*/
|
|
75
|
+
function resolveParentBaseline(b, byName, allBaselines) {
|
|
76
|
+
if (!b.parentBaseline)
|
|
77
|
+
return undefined;
|
|
78
|
+
if (byName.has(b.parentBaseline))
|
|
79
|
+
return b.parentBaseline;
|
|
80
|
+
// Alias resolution: find the profile whose depends array includes this baseline
|
|
81
|
+
for (const candidate of allBaselines) {
|
|
82
|
+
if (candidate.depends?.some((d) => d.name === b.name)) {
|
|
83
|
+
return candidate.name;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return undefined; // orphan
|
|
87
|
+
}
|
|
88
|
+
// ── Public API ──────────────────────────────────────────────
|
|
89
|
+
/**
|
|
90
|
+
* Flatten overlay/wrapper baselines in an HDF Results document.
|
|
91
|
+
*
|
|
92
|
+
* Handles:
|
|
93
|
+
* - Deep nesting (overlay chains with shared control IDs via parentBaseline)
|
|
94
|
+
* - Wide nesting (wrapper profiles aggregating independent bases)
|
|
95
|
+
* - Hybrid (both patterns in one document)
|
|
96
|
+
*
|
|
97
|
+
* @param results - Parsed HDF Results (from parseResults() or equivalent)
|
|
98
|
+
* @returns FlattenResult with flattened data and merge metadata
|
|
99
|
+
*/
|
|
100
|
+
export function flattenOverlays(results) {
|
|
101
|
+
const { baselines } = results;
|
|
102
|
+
const warnings = [];
|
|
103
|
+
const merges = [];
|
|
104
|
+
if (!baselines || baselines.length === 0) {
|
|
105
|
+
return {
|
|
106
|
+
results: { ...results, baselines: [] },
|
|
107
|
+
metadata: {
|
|
108
|
+
originalBaselineCount: 0,
|
|
109
|
+
flattenedBaselineCount: 0,
|
|
110
|
+
merges: [],
|
|
111
|
+
warnings: [],
|
|
112
|
+
},
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
// Index baselines by name
|
|
116
|
+
const byName = new Map();
|
|
117
|
+
for (const b of baselines) {
|
|
118
|
+
if (byName.has(b.name)) {
|
|
119
|
+
warnings.push(`Duplicate baseline name "${b.name}" — later entry overwrites earlier`);
|
|
120
|
+
}
|
|
121
|
+
byName.set(b.name, b);
|
|
122
|
+
}
|
|
123
|
+
// Resolve parentBaseline aliases and build parent map
|
|
124
|
+
const resolvedParent = new Map();
|
|
125
|
+
for (const b of baselines) {
|
|
126
|
+
const parent = resolveParentBaseline(b, byName, baselines);
|
|
127
|
+
resolvedParent.set(b.name, parent);
|
|
128
|
+
if (b.parentBaseline && !parent) {
|
|
129
|
+
warnings.push(`Baseline "${b.name}" references nonexistent parent "${b.parentBaseline}"`);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
// Build parent → children adjacency using resolved parents
|
|
133
|
+
const childrenMap = new Map();
|
|
134
|
+
for (const b of baselines) {
|
|
135
|
+
const parent = resolvedParent.get(b.name);
|
|
136
|
+
if (parent) {
|
|
137
|
+
const list = childrenMap.get(parent) || [];
|
|
138
|
+
list.push(b.name);
|
|
139
|
+
childrenMap.set(parent, list);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// Find roots: no resolved parent
|
|
143
|
+
const roots = [];
|
|
144
|
+
const visited = new Set();
|
|
145
|
+
for (const b of baselines) {
|
|
146
|
+
if (!resolvedParent.get(b.name)) {
|
|
147
|
+
roots.push(b.name);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
// Mark reachable from roots (iterative DFS to avoid stack overflow on deep trees)
|
|
151
|
+
function markReachable(start) {
|
|
152
|
+
const stack = [start];
|
|
153
|
+
while (stack.length > 0) {
|
|
154
|
+
const name = stack.pop();
|
|
155
|
+
if (visited.has(name))
|
|
156
|
+
continue;
|
|
157
|
+
visited.add(name);
|
|
158
|
+
const children = childrenMap.get(name);
|
|
159
|
+
if (children) {
|
|
160
|
+
for (const child of children) {
|
|
161
|
+
stack.push(child);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
for (const r of roots) {
|
|
167
|
+
markReachable(r);
|
|
168
|
+
}
|
|
169
|
+
// Detect cycles: unvisited baselines are in cycles
|
|
170
|
+
for (const b of baselines) {
|
|
171
|
+
if (!visited.has(b.name)) {
|
|
172
|
+
warnings.push(`Circular parentBaseline detected involving "${b.name}"`);
|
|
173
|
+
roots.push(b.name);
|
|
174
|
+
markReachable(b.name);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
// Process each root tree
|
|
178
|
+
const flatBaselines = [];
|
|
179
|
+
for (const rootName of roots) {
|
|
180
|
+
const root = byName.get(rootName);
|
|
181
|
+
const treeNames = collectTree(rootName, childrenMap);
|
|
182
|
+
if (treeNames.length === 1) {
|
|
183
|
+
// Standalone baseline — pass through unchanged (preserve depends)
|
|
184
|
+
flatBaselines.push(root);
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
// Bottom-up order: reverse top-down BFS
|
|
188
|
+
const bottomUp = [...treeNames].reverse();
|
|
189
|
+
// Merge requirements across the tree
|
|
190
|
+
const merged = new Map();
|
|
191
|
+
let controlsBefore = 0;
|
|
192
|
+
const absorbed = [];
|
|
193
|
+
for (const name of bottomUp) {
|
|
194
|
+
const b = byName.get(name);
|
|
195
|
+
controlsBefore += b.requirements.length;
|
|
196
|
+
if (name !== rootName) {
|
|
197
|
+
absorbed.push(name);
|
|
198
|
+
}
|
|
199
|
+
for (const req of b.requirements) {
|
|
200
|
+
if (merged.has(req.id)) {
|
|
201
|
+
merged.set(req.id, mergeRequirement(merged.get(req.id), req));
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
merged.set(req.id, { ...req });
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const mergedReqs = [...merged.values()];
|
|
209
|
+
const pattern = detectPattern(rootName, childrenMap);
|
|
210
|
+
merges.push({
|
|
211
|
+
rootBaseline: rootName,
|
|
212
|
+
absorbedBaselines: absorbed,
|
|
213
|
+
controlsBefore,
|
|
214
|
+
controlsAfter: mergedReqs.length,
|
|
215
|
+
pattern,
|
|
216
|
+
});
|
|
217
|
+
const out = {
|
|
218
|
+
...root,
|
|
219
|
+
requirements: mergedReqs,
|
|
220
|
+
};
|
|
221
|
+
delete out.parentBaseline;
|
|
222
|
+
delete out.depends;
|
|
223
|
+
flatBaselines.push(out);
|
|
224
|
+
}
|
|
225
|
+
return {
|
|
226
|
+
results: { ...results, baselines: flatBaselines },
|
|
227
|
+
metadata: {
|
|
228
|
+
originalBaselineCount: baselines.length,
|
|
229
|
+
flattenedBaselineCount: flatBaselines.length,
|
|
230
|
+
merges,
|
|
231
|
+
warnings,
|
|
232
|
+
},
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
//# sourceMappingURL=flatten.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"flatten.js","sourceRoot":"","sources":["../typescript/flatten.ts"],"names":[],"mappings":"AAwBA,8DAA8D;AAE9D,kEAAkE;AAClE,SAAS,WAAW,CAAC,IAAY,EAAE,WAAkC;IACnE,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAC;IAC/B,MAAM,KAAK,GAAG,CAAC,IAAI,CAAC,CAAC;IACrB,OAAO,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,EAAG,CAAC;QAC5B,IAAI,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC;YAAE,SAAS;QAC7B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QACf,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACjB,KAAK,MAAM,KAAK,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC;YAClD,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpB,CAAC;IACH,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,iDAAiD;AACjD,SAAS,aAAa,CAAC,IAAY,EAAE,WAAkC;IACrE,MAAM,YAAY,GAAG,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;IACjD,IAAI,YAAY,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;QAC7B,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,KAAK,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;QACjC,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC9C,OAAO,QAAQ,CAAC;QAClB,CAAC;IACH,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,sDAAsD;AACtD,SAAS,gBAAgB,CACvB,QAA8B,EAC9B,QAA8B;IAE9B,MAAM,MAAM,GAAyB,EAAE,GAAG,QAAQ,EAAE,CAAC;IAErD,gEAAgE;IAChE,MAAM,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAEhC,wEAAwE;IACxE,IAAI,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACpD,MAAM,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IACpC,CAAC;IAED,mCAAmC;IACnC,IAAI,QAAQ,CAAC,IAAI,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,CAAC;QACjD,MAAM,CAAC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;IAC9B,CAAC;IAED,+CAA+C;IAC/C,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC;QAClB,MAAM,CAAC,IAAI,GAAG,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC;IACvD,CAAC;IAED,yDAAyD;IACzD,IAAI,QAAQ,CAAC,QAAQ,KAAK,SAAS,EAAE,CAAC;QACpC,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAC,QAAQ,CAAC;IACtC,CAAC;IAED,uEAAuE;IACvE,+EAA+E;IAC/E,8EAA8E;IAC9E,IAAI,QAAQ,CAAC,eAAe,KAAK,SAAS,IAAI,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC9F,MAAM,CAAC,eAAe,GAAG,QAAQ,CAAC,eAAe,CAAC;IACpD,CAAC;IAED,+DAA+D;IAC/D,IAAI,QAAQ,CAAC,YAAY,IAAI,QAAQ,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC9D,MAAM,OAAO,GAAG,IAAI,GAAG,CACrB,CAAC,QAAQ,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CACrD,CAAC;QACF,KAAK,MAAM,CAAC,IAAI,QAAQ,CAAC,YAAY,EAAE,CAAC;YACtC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC1B,CAAC;QACD,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;IAC9C,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,qBAAqB,CAC5B,CAAoB,EACpB,MAAsC,EACtC,YAAiC;IAEjC,IAAI,CAAC,CAAC,CAAC,cAAc;QAAE,OAAO,SAAS,CAAC;IACxC,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,cAAc,CAAC;QAAE,OAAO,CAAC,CAAC,cAAc,CAAC;IAE1D,gFAAgF;IAChF,KAAK,MAAM,SAAS,IAAI,YAAY,EAAE,CAAC;QACrC,IAAI,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YACtD,OAAO,SAAS,CAAC,IAAI,CAAC;QACxB,CAAC;IACH,CAAC;IACD,OAAO,SAAS,CAAC,CAAC,SAAS;AAC7B,CAAC;AAED,+DAA+D;AAE/D;;;;;;;;;;GAUG;AACH,MAAM,UAAU,eAAe,CAAC,OAAmB;IACjD,MAAM,EAAE,SAAS,EAAE,GAAG,OAAO,CAAC;IAC9B,MAAM,QAAQ,GAAa,EAAE,CAAC;IAC9B,MAAM,MAAM,GAAoB,EAAE,CAAC;IAEnC,IAAI,CAAC,SAAS,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACzC,OAAO;YACL,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,SAAS,EAAE,EAAE,EAAE;YACtC,QAAQ,EAAE;gBACR,qBAAqB,EAAE,CAAC;gBACxB,sBAAsB,EAAE,CAAC;gBACzB,MAAM,EAAE,EAAE;gBACV,QAAQ,EAAE,EAAE;aACb;SACF,CAAC;IACJ,CAAC;IAED,0BAA0B;IAC1B,MAAM,MAAM,GAAG,IAAI,GAAG,EAA6B,CAAC;IACpD,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;QAC1B,IAAI,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YACvB,QAAQ,CAAC,IAAI,CAAC,4BAA4B,CAAC,CAAC,IAAI,oCAAoC,CAAC,CAAC;QACxF,CAAC;QACD,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;IACxB,CAAC;IAED,sDAAsD;IACtD,MAAM,cAAc,GAAG,IAAI,GAAG,EAA8B,CAAC;IAC7D,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;QAC1B,MAAM,MAAM,GAAG,qBAAqB,CAAC,CAAC,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;QAC3D,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;QACnC,IAAI,CAAC,CAAC,cAAc,IAAI,CAAC,MAAM,EAAE,CAAC;YAChC,QAAQ,CAAC,IAAI,CACX,aAAa,CAAC,CAAC,IAAI,oCAAoC,CAAC,CAAC,cAAc,GAAG,CAC3E,CAAC;QACJ,CAAC;IACH,CAAC;IAED,2DAA2D;IAC3D,MAAM,WAAW,GAAG,IAAI,GAAG,EAAoB,CAAC;IAChD,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;QAC1B,MAAM,MAAM,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;YAC3C,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YAClB,WAAW,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IAED,iCAAiC;IACjC,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,MAAM,OAAO,GAAG,IAAI,GAAG,EAAU,CAAC;IAElC,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;QAC1B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YAChC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IAED,kFAAkF;IAClF,SAAS,aAAa,CAAC,KAAa;QAClC,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC;QACtB,OAAO,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;YAC1B,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC;gBAAE,SAAS;YAChC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAClB,MAAM,QAAQ,GAAG,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YACvC,IAAI,QAAQ,EAAE,CAAC;gBACb,KAAK,MAAM,KAAK,IAAI,QAAQ,EAAE,CAAC;oBAC7B,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACpB,CAAC;YACH,CAAC;QACH,CAAC;IACH,CAAC;IACD,KAAK,MAAM,CAAC,IAAI,KAAK,EAAE,CAAC;QACtB,aAAa,CAAC,CAAC,CAAC,CAAC;IACnB,CAAC;IAED,mDAAmD;IACnD,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;QAC1B,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;YACzB,QAAQ,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC;YACxE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YACnB,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACxB,CAAC;IACH,CAAC;IAED,yBAAyB;IACzB,MAAM,aAAa,GAAwB,EAAE,CAAC;IAE9C,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE,CAAC;QAC7B,MAAM,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAC;QACnC,MAAM,SAAS,GAAG,WAAW,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAErD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC3B,kEAAkE;YAClE,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACzB,SAAS;QACX,CAAC;QAED,wCAAwC;QACxC,MAAM,QAAQ,GAAG,CAAC,GAAG,SAAS,CAAC,CAAC,OAAO,EAAE,CAAC;QAE1C,qCAAqC;QACrC,MAAM,MAAM,GAAG,IAAI,GAAG,EAAgC,CAAC;QACvD,IAAI,cAAc,GAAG,CAAC,CAAC;QACvB,MAAM,QAAQ,GAAa,EAAE,CAAC;QAE9B,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE,CAAC;YAC5B,MAAM,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;YAC5B,cAAc,IAAI,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC;YACxC,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;gBACtB,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC;YACD,KAAK,MAAM,GAAG,IAAI,CAAC,CAAC,YAAY,EAAE,CAAC;gBACjC,IAAI,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC;oBACvB,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,gBAAgB,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAE,EAAE,GAAG,CAAC,CAAC,CAAC;gBACjE,CAAC;qBAAM,CAAC;oBACN,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,GAAG,GAAG,EAAE,CAAC,CAAC;gBACjC,CAAC;YACH,CAAC;QACH,CAAC;QAED,MAAM,UAAU,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;QACxC,MAAM,OAAO,GAAG,aAAa,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAErD,MAAM,CAAC,IAAI,CAAC;YACV,YAAY,EAAE,QAAQ;YACtB,iBAAiB,EAAE,QAAQ;YAC3B,cAAc;YACd,aAAa,EAAE,UAAU,CAAC,MAAM;YAChC,OAAO;SACR,CAAC,CAAC;QAEH,MAAM,GAAG,GAAsB;YAC7B,GAAG,IAAI;YACP,YAAY,EAAE,UAAU;SACzB,CAAC;QACF,OAAO,GAAG,CAAC,cAAc,CAAC;QAC1B,OAAO,GAAG,CAAC,OAAO,CAAC;QACnB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC1B,CAAC;IAED,OAAO;QACL,OAAO,EAAE,EAAE,GAAG,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE;QACjD,QAAQ,EAAE;YACR,qBAAqB,EAAE,SAAS,CAAC,MAAM;YACvC,sBAAsB,EAAE,aAAa,CAAC,MAAM;YAC5C,MAAM;YACN,QAAQ;SACT;KACF,CAAC;AACJ,CAAC"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { HdfResults, HdfBaseline } from '@mitre/hdf-schema';
|
|
2
|
+
export { flattenOverlays } from './flatten.js';
|
|
3
|
+
export type { FlattenResult, FlattenMetadata, BaselineMerge } from './flatten.js';
|
|
4
|
+
/**
|
|
5
|
+
* Result of parsing operation
|
|
6
|
+
*/
|
|
7
|
+
export interface ParseResult<T> {
|
|
8
|
+
success: boolean;
|
|
9
|
+
data?: T;
|
|
10
|
+
error?: string;
|
|
11
|
+
type?: 'results' | 'baseline';
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Parse HDF Results document from string or bytes
|
|
15
|
+
* @param input - JSON string or Uint8Array to parse
|
|
16
|
+
* @returns ParseResult with parsed data or error
|
|
17
|
+
*/
|
|
18
|
+
export declare function parseResults(input: string | Uint8Array): ParseResult<HdfResults>;
|
|
19
|
+
/**
|
|
20
|
+
* Parse HDF Baseline document from string or bytes
|
|
21
|
+
* @param input - JSON string or Uint8Array to parse
|
|
22
|
+
* @returns ParseResult with parsed data or error
|
|
23
|
+
*/
|
|
24
|
+
export declare function parseBaseline(input: string | Uint8Array): ParseResult<HdfBaseline>;
|
|
25
|
+
/**
|
|
26
|
+
* Parse HDF document with auto-detection of type
|
|
27
|
+
* @param input - JSON string or Uint8Array to parse
|
|
28
|
+
* @returns ParseResult with parsed data, type indicator, or error
|
|
29
|
+
*/
|
|
30
|
+
export declare function parse(input: string | Uint8Array): ParseResult<HdfResults | HdfBaseline>;
|
|
31
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../typescript/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAC/C,YAAY,EAAE,aAAa,EAAE,eAAe,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAGlF;;GAEG;AACH,MAAM,WAAW,WAAW,CAAC,CAAC;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,CAAC,EAAE,CAAC,CAAC;IACT,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,SAAS,GAAG,UAAU,CAAC;CAC/B;AAED;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,GAAG,WAAW,CAAC,UAAU,CAAC,CA+ChF;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,GAAG,WAAW,CAAC,WAAW,CAAC,CA8ClF;AAED;;;;GAIG;AACH,wBAAgB,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,GAAG,WAAW,CAAC,UAAU,GAAG,WAAW,CAAC,CAqEvF"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
export { flattenOverlays } from './flatten.js';
|
|
2
|
+
import { validateResults, validateBaseline, validate as autoValidate } from '@mitre/hdf-validators';
|
|
3
|
+
/**
|
|
4
|
+
* Parse HDF Results document from string or bytes
|
|
5
|
+
* @param input - JSON string or Uint8Array to parse
|
|
6
|
+
* @returns ParseResult with parsed data or error
|
|
7
|
+
*/
|
|
8
|
+
export function parseResults(input) {
|
|
9
|
+
// Convert Uint8Array to string if needed
|
|
10
|
+
const jsonStr = typeof input === 'string' ? input : new TextDecoder().decode(input);
|
|
11
|
+
// Check for empty input
|
|
12
|
+
if (jsonStr.trim().length === 0) {
|
|
13
|
+
return {
|
|
14
|
+
success: false,
|
|
15
|
+
error: 'Input is empty'
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
// Parse JSON
|
|
19
|
+
let data;
|
|
20
|
+
try {
|
|
21
|
+
data = JSON.parse(jsonStr);
|
|
22
|
+
}
|
|
23
|
+
catch (err) {
|
|
24
|
+
return {
|
|
25
|
+
success: false,
|
|
26
|
+
error: `Invalid JSON: ${err instanceof Error ? err.message : String(err)}`
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
// Check for trailing garbage by re-serializing and comparing length
|
|
30
|
+
// This catches cases like: {"valid":"json"}garbage
|
|
31
|
+
const serialized = JSON.stringify(data);
|
|
32
|
+
const trimmedInput = jsonStr.trim();
|
|
33
|
+
if (serialized.length !== trimmedInput.length && !isWhitespaceEquivalent(serialized, trimmedInput)) {
|
|
34
|
+
return {
|
|
35
|
+
success: false,
|
|
36
|
+
error: 'Invalid JSON: unexpected trailing data after end of object'
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
// Validate against schema
|
|
40
|
+
const validationResult = validateResults(data);
|
|
41
|
+
if (!validationResult.valid) {
|
|
42
|
+
return {
|
|
43
|
+
success: false,
|
|
44
|
+
error: `Schema validation failed: ${validationResult.getErrorMessage()}`
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
return {
|
|
48
|
+
success: true,
|
|
49
|
+
data: data
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Parse HDF Baseline document from string or bytes
|
|
54
|
+
* @param input - JSON string or Uint8Array to parse
|
|
55
|
+
* @returns ParseResult with parsed data or error
|
|
56
|
+
*/
|
|
57
|
+
export function parseBaseline(input) {
|
|
58
|
+
// Convert Uint8Array to string if needed
|
|
59
|
+
const jsonStr = typeof input === 'string' ? input : new TextDecoder().decode(input);
|
|
60
|
+
// Check for empty input
|
|
61
|
+
if (jsonStr.trim().length === 0) {
|
|
62
|
+
return {
|
|
63
|
+
success: false,
|
|
64
|
+
error: 'Input is empty'
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
// Parse JSON
|
|
68
|
+
let data;
|
|
69
|
+
try {
|
|
70
|
+
data = JSON.parse(jsonStr);
|
|
71
|
+
}
|
|
72
|
+
catch (err) {
|
|
73
|
+
return {
|
|
74
|
+
success: false,
|
|
75
|
+
error: `Invalid JSON: ${err instanceof Error ? err.message : String(err)}`
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
// Check for trailing garbage
|
|
79
|
+
const serialized = JSON.stringify(data);
|
|
80
|
+
const trimmedInput = jsonStr.trim();
|
|
81
|
+
if (serialized.length !== trimmedInput.length && !isWhitespaceEquivalent(serialized, trimmedInput)) {
|
|
82
|
+
return {
|
|
83
|
+
success: false,
|
|
84
|
+
error: 'Invalid JSON: unexpected trailing data after end of object'
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
// Validate against schema
|
|
88
|
+
const validationResult = validateBaseline(data);
|
|
89
|
+
if (!validationResult.valid) {
|
|
90
|
+
return {
|
|
91
|
+
success: false,
|
|
92
|
+
error: `Schema validation failed: ${validationResult.getErrorMessage()}`
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
success: true,
|
|
97
|
+
data: data
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Parse HDF document with auto-detection of type
|
|
102
|
+
* @param input - JSON string or Uint8Array to parse
|
|
103
|
+
* @returns ParseResult with parsed data, type indicator, or error
|
|
104
|
+
*/
|
|
105
|
+
export function parse(input) {
|
|
106
|
+
// Convert Uint8Array to string if needed
|
|
107
|
+
const jsonStr = typeof input === 'string' ? input : new TextDecoder().decode(input);
|
|
108
|
+
// Check for empty input
|
|
109
|
+
if (jsonStr.trim().length === 0) {
|
|
110
|
+
return {
|
|
111
|
+
success: false,
|
|
112
|
+
error: 'Input is empty'
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
// Parse JSON
|
|
116
|
+
let data;
|
|
117
|
+
try {
|
|
118
|
+
data = JSON.parse(jsonStr);
|
|
119
|
+
}
|
|
120
|
+
catch (err) {
|
|
121
|
+
return {
|
|
122
|
+
success: false,
|
|
123
|
+
error: `Invalid JSON: ${err instanceof Error ? err.message : String(err)}`
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
// Check for trailing garbage
|
|
127
|
+
const serialized = JSON.stringify(data);
|
|
128
|
+
const trimmedInput = jsonStr.trim();
|
|
129
|
+
if (serialized.length !== trimmedInput.length && !isWhitespaceEquivalent(serialized, trimmedInput)) {
|
|
130
|
+
return {
|
|
131
|
+
success: false,
|
|
132
|
+
error: 'Invalid JSON: unexpected trailing data after end of object'
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
// Auto-validate and detect type
|
|
136
|
+
const validationResult = autoValidate(data);
|
|
137
|
+
if (!validationResult.valid) {
|
|
138
|
+
return {
|
|
139
|
+
success: false,
|
|
140
|
+
error: `Schema validation failed: ${validationResult.getErrorMessage()}`
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
// Determine type based on structure
|
|
144
|
+
if (typeof data === 'object' && data !== null) {
|
|
145
|
+
const obj = data;
|
|
146
|
+
// HDF Results has 'baselines' array at root
|
|
147
|
+
if ('baselines' in obj) {
|
|
148
|
+
return {
|
|
149
|
+
success: true,
|
|
150
|
+
data: data,
|
|
151
|
+
type: 'results'
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
// HDF Baseline has 'name' and 'requirements' at root
|
|
155
|
+
if ('name' in obj && 'requirements' in obj) {
|
|
156
|
+
return {
|
|
157
|
+
success: true,
|
|
158
|
+
data: data,
|
|
159
|
+
type: 'baseline'
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return {
|
|
164
|
+
success: false,
|
|
165
|
+
error: 'Unable to determine HDF document type'
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Check if two JSON strings are equivalent modulo whitespace
|
|
170
|
+
* This is a simple heuristic - we check if one is just whitespace-padded version of other
|
|
171
|
+
*/
|
|
172
|
+
function isWhitespaceEquivalent(a, b) {
|
|
173
|
+
// Remove all whitespace and compare
|
|
174
|
+
const normalizeWhitespace = (s) => s.replace(/\s+/g, '');
|
|
175
|
+
return normalizeWhitespace(a) === normalizeWhitespace(b);
|
|
176
|
+
}
|
|
177
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../typescript/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAE/C,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,QAAQ,IAAI,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAYpG;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,KAA0B;IACrD,yCAAyC;IACzC,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAEpF,wBAAwB;IACxB,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAChC,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,gBAAgB;SACxB,CAAC;IACJ,CAAC;IAED,aAAa;IACb,IAAI,IAAa,CAAC;IAClB,IAAI,CAAC;QACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAC7B,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,iBAAiB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;SAC3E,CAAC;IACJ,CAAC;IAED,oEAAoE;IACpE,mDAAmD;IACnD,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;IACxC,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IACpC,IAAI,UAAU,CAAC,MAAM,KAAK,YAAY,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,UAAU,EAAE,YAAY,CAAC,EAAE,CAAC;QACnG,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,4DAA4D;SACpE,CAAC;IACJ,CAAC;IAED,0BAA0B;IAC1B,MAAM,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IAC/C,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,CAAC;QAC5B,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,6BAA6B,gBAAgB,CAAC,eAAe,EAAE,EAAE;SACzE,CAAC;IACJ,CAAC;IAED,OAAO;QACL,OAAO,EAAE,IAAI;QACb,IAAI,EAAE,IAAkB;KACzB,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,KAA0B;IACtD,yCAAyC;IACzC,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAEpF,wBAAwB;IACxB,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAChC,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,gBAAgB;SACxB,CAAC;IACJ,CAAC;IAED,aAAa;IACb,IAAI,IAAa,CAAC;IAClB,IAAI,CAAC;QACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAC7B,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,iBAAiB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;SAC3E,CAAC;IACJ,CAAC;IAED,6BAA6B;IAC7B,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;IACxC,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IACpC,IAAI,UAAU,CAAC,MAAM,KAAK,YAAY,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,UAAU,EAAE,YAAY,CAAC,EAAE,CAAC;QACnG,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,4DAA4D;SACpE,CAAC;IACJ,CAAC;IAED,0BAA0B;IAC1B,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;IAChD,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,CAAC;QAC5B,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,6BAA6B,gBAAgB,CAAC,eAAe,EAAE,EAAE;SACzE,CAAC;IACJ,CAAC;IAED,OAAO;QACL,OAAO,EAAE,IAAI;QACb,IAAI,EAAE,IAAmB;KAC1B,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,KAAK,CAAC,KAA0B;IAC9C,yCAAyC;IACzC,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAEpF,wBAAwB;IACxB,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAChC,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,gBAAgB;SACxB,CAAC;IACJ,CAAC;IAED,aAAa;IACb,IAAI,IAAa,CAAC;IAClB,IAAI,CAAC;QACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;IAC7B,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,iBAAiB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;SAC3E,CAAC;IACJ,CAAC;IAED,6BAA6B;IAC7B,MAAM,UAAU,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;IACxC,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IACpC,IAAI,UAAU,CAAC,MAAM,KAAK,YAAY,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,UAAU,EAAE,YAAY,CAAC,EAAE,CAAC;QACnG,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,4DAA4D;SACpE,CAAC;IACJ,CAAC;IAED,gCAAgC;IAChC,MAAM,gBAAgB,GAAG,YAAY,CAAC,IAAI,CAAC,CAAC;IAC5C,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,CAAC;QAC5B,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,6BAA6B,gBAAgB,CAAC,eAAe,EAAE,EAAE;SACzE,CAAC;IACJ,CAAC;IAED,oCAAoC;IACpC,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,IAAI,EAAE,CAAC;QAC9C,MAAM,GAAG,GAAG,IAA+B,CAAC;QAE5C,4CAA4C;QAC5C,IAAI,WAAW,IAAI,GAAG,EAAE,CAAC;YACvB,OAAO;gBACL,OAAO,EAAE,IAAI;gBACb,IAAI,EAAE,IAAkB;gBACxB,IAAI,EAAE,SAAS;aAChB,CAAC;QACJ,CAAC;QAED,qDAAqD;QACrD,IAAI,MAAM,IAAI,GAAG,IAAI,cAAc,IAAI,GAAG,EAAE,CAAC;YAC3C,OAAO;gBACL,OAAO,EAAE,IAAI;gBACb,IAAI,EAAE,IAAmB;gBACzB,IAAI,EAAE,UAAU;aACjB,CAAC;QACJ,CAAC;IACH,CAAC;IAED,OAAO;QACL,OAAO,EAAE,KAAK;QACd,KAAK,EAAE,uCAAuC;KAC/C,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,CAAS,EAAE,CAAS;IAClD,oCAAoC;IACpC,MAAM,mBAAmB,GAAG,CAAC,CAAS,EAAU,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzE,OAAO,mBAAmB,CAAC,CAAC,CAAC,KAAK,mBAAmB,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@mitre/hdf-parsers",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "Parse and load HDF documents with validation",
|
|
5
|
+
"publishConfig": {
|
|
6
|
+
"access": "public"
|
|
7
|
+
},
|
|
8
|
+
"type": "module",
|
|
9
|
+
"main": "./dist/index.js",
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"import": "./dist/index.js",
|
|
14
|
+
"types": "./dist/index.d.ts"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"files": [
|
|
18
|
+
"dist"
|
|
19
|
+
],
|
|
20
|
+
"repository": {
|
|
21
|
+
"type": "git",
|
|
22
|
+
"url": "https://github.com/mitre/hdf-libs.git",
|
|
23
|
+
"directory": "hdf-parsers"
|
|
24
|
+
},
|
|
25
|
+
"author": "MITRE Corporation",
|
|
26
|
+
"license": "Apache-2.0",
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"@mitre/hdf-schema": "2.0.0",
|
|
29
|
+
"@mitre/hdf-validators": "2.0.0"
|
|
30
|
+
},
|
|
31
|
+
"engines": {
|
|
32
|
+
"node": ">=20.0.0"
|
|
33
|
+
},
|
|
34
|
+
"keywords": [
|
|
35
|
+
"hdf",
|
|
36
|
+
"heimdall",
|
|
37
|
+
"parser",
|
|
38
|
+
"json"
|
|
39
|
+
],
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@stryker-mutator/core": "^9.5.1",
|
|
42
|
+
"@stryker-mutator/vitest-runner": "^9.5.1"
|
|
43
|
+
},
|
|
44
|
+
"scripts": {
|
|
45
|
+
"build": "pnpm clean && tsc",
|
|
46
|
+
"clean": "rimraf dist",
|
|
47
|
+
"test": "pnpm run test:ts && pnpm run test:go",
|
|
48
|
+
"test:ts": "vitest run",
|
|
49
|
+
"test:go": "cd go && go test ./...",
|
|
50
|
+
"test:watch": "vitest",
|
|
51
|
+
"test:coverage": "vitest run --coverage",
|
|
52
|
+
"type-check": "tsc --noEmit",
|
|
53
|
+
"lint": "eslint typescript",
|
|
54
|
+
"lint:fix": "eslint typescript --fix"
|
|
55
|
+
}
|
|
56
|
+
}
|