@zenoaihq/tson 1.0.2 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -21,17 +21,27 @@ type TSONObject = {
21
21
  * Any valid TSON value
22
22
  */
23
23
  type TSONValue = TSONPrimitive | TSONArray | TSONObject;
24
+ /**
25
+ * Schema info for nested object/array structures
26
+ * - schema: nested schema keys (or null if no schema)
27
+ * - count: array count (or null if single object)
28
+ */
29
+ interface SchemaInfo {
30
+ schema: string[] | null;
31
+ count: number | null;
32
+ }
24
33
  /**
25
34
  * Schema map for nested object structures
26
- * Maps field names to their nested schemas (or null if no schema)
35
+ * Maps field names to their schema info
27
36
  */
28
- type SchemaMap = Record<string, string[] | null>;
37
+ type SchemaMap = Record<string, SchemaInfo>;
29
38
  /**
30
39
  * Result of parsing a key with optional schema notation
31
40
  */
32
41
  interface KeySchema {
33
42
  keyName: string;
34
43
  schema: string[] | null;
44
+ count: number | null;
35
45
  }
36
46
  /**
37
47
  * Result of parsing keys with optional row count
@@ -93,6 +103,154 @@ declare function loads(s: string): TSONValue;
93
103
  */
94
104
  declare function load(filePath: string): Promise<TSONValue>;
95
105
 
106
+ /**
107
+ * TSON Prettify
108
+ *
109
+ * Format TSON strings for human readability.
110
+ * CSV-like format: schema on top, one row/value per line.
111
+ */
112
+ /**
113
+ * Format a compact TSON string for human readability.
114
+ *
115
+ * Uses CSV-like formatting:
116
+ * - Schema declaration on first line
117
+ * - Each row/value on its own line with leading delimiter
118
+ *
119
+ * @example
120
+ * prettify('{@id,name#2|1,Alice|2,Bob}')
121
+ * // Returns: '{@id,name#2\n|1,Alice\n|2,Bob}'
122
+ *
123
+ * @param tsonStr - Compact TSON string
124
+ * @param indent - Indentation string (default: 2 spaces)
125
+ * @returns Pretty-formatted TSON string
126
+ */
127
+ declare function prettify(tsonStr: string, indent?: string): string;
128
+ /**
129
+ * Remove all formatting from a TSON string.
130
+ *
131
+ * @param tsonStr - Pretty-formatted TSON string
132
+ * @returns Compact single-line TSON string
133
+ */
134
+ declare function minify(tsonStr: string): string;
135
+
136
+ /**
137
+ * TSON File Utilities
138
+ *
139
+ * Convenience functions for working with TSON and JSON files.
140
+ * Includes support for:
141
+ * - Loading JSON/TSON files
142
+ * - Saving to TSON with formatting options
143
+ *
144
+ * Note: These functions are Node.js only (use dynamic imports for fs).
145
+ */
146
+ type FormatOption = 'compact' | 'pretty';
147
+ /**
148
+ * Load a JSON file and return JavaScript data structure.
149
+ *
150
+ * @example
151
+ * const data = await loadJson('data.json');
152
+ *
153
+ * @param filepath - Path to JSON file
154
+ * @returns Parsed JavaScript object
155
+ */
156
+ declare function loadJson(filepath: string): Promise<unknown>;
157
+ /**
158
+ * Load a TSON file and return JavaScript data structure.
159
+ *
160
+ * @example
161
+ * const data = await loadTson('data.tson');
162
+ *
163
+ * @param filepath - Path to TSON file
164
+ * @returns Parsed JavaScript object
165
+ */
166
+ declare function loadTson(filepath: string): Promise<unknown>;
167
+ /**
168
+ * Save JavaScript data to a TSON file.
169
+ *
170
+ * @example
171
+ * await saveTson(data, 'output.tson');
172
+ * await saveTson(data, 'output.tson', { format: 'pretty' });
173
+ *
174
+ * @param data - JavaScript object to serialize
175
+ * @param filepath - Output file path
176
+ * @param options - Formatting options
177
+ */
178
+ declare function saveTson(data: unknown, filepath: string, options?: {
179
+ format?: FormatOption;
180
+ indent?: string;
181
+ }): Promise<void>;
182
+ /**
183
+ * Save a TSON string directly to file.
184
+ *
185
+ * @example
186
+ * await saveTsonString(tsonData, 'output.tson');
187
+ * await saveTsonString(tsonData, 'output.tson', { format: 'pretty' });
188
+ *
189
+ * @param tsonStr - TSON formatted string
190
+ * @param filepath - Output file path
191
+ * @param options - Formatting options
192
+ */
193
+ declare function saveTsonString(tsonStr: string, filepath: string, options?: {
194
+ format?: FormatOption | null;
195
+ indent?: string;
196
+ }): Promise<void>;
197
+ /**
198
+ * Convert a JSON file to TSON format.
199
+ *
200
+ * @example
201
+ * const tsonStr = await jsonToTson('data.json');
202
+ * await jsonToTson('data.json', 'data.tson');
203
+ *
204
+ * @param inputPath - Path to input JSON file
205
+ * @param outputPath - Path to output TSON file (optional)
206
+ * @param options - Formatting options
207
+ * @returns TSON string representation
208
+ */
209
+ declare function jsonToTson(inputPath: string, outputPath?: string, options?: {
210
+ format?: FormatOption;
211
+ }): Promise<string>;
212
+ /**
213
+ * Convert a TSON file to JSON format.
214
+ *
215
+ * @example
216
+ * const jsonStr = await tsonToJson('data.tson');
217
+ * await tsonToJson('data.tson', 'data.json');
218
+ *
219
+ * @param inputPath - Path to input TSON file
220
+ * @param outputPath - Path to output JSON file (optional)
221
+ * @param options - Formatting options
222
+ * @returns JSON string representation
223
+ */
224
+ declare function tsonToJson(inputPath: string, outputPath?: string, options?: {
225
+ indent?: number;
226
+ }): Promise<string>;
227
+ /**
228
+ * Read a TSON file as raw string (without parsing).
229
+ *
230
+ * Useful when you want to prettify/minify without deserializing.
231
+ *
232
+ * @param filepath - Path to TSON file
233
+ * @returns Raw TSON string
234
+ */
235
+ declare function readTsonString(filepath: string): Promise<string>;
236
+ /**
237
+ * Prettify a TSON file in place or to a new file.
238
+ *
239
+ * @param inputPath - Path to input TSON file
240
+ * @param outputPath - Path to output file (default: overwrite input)
241
+ * @param indent - Indentation string (default: 2 spaces)
242
+ * @returns Prettified TSON string
243
+ */
244
+ declare function prettifyFile(inputPath: string, outputPath?: string, indent?: string): Promise<string>;
245
+ /**
246
+ * Minify a TSON file in place or to a new file.
247
+ *
248
+ * @param inputPath - Path to input TSON file
249
+ * @param outputPath - Path to output file (default: overwrite input)
250
+ * @returns Minified TSON string
251
+ */
252
+ declare function minifyFile(inputPath: string, outputPath?: string): Promise<string>;
253
+
96
254
  /**
97
255
  * TSON Utility Functions
98
256
  *
@@ -154,20 +312,28 @@ declare function isUniformObjectArray(data: unknown): data is TSONObject[];
154
312
  */
155
313
  declare function splitByDelimiter(text: string, delimiter: string): string[];
156
314
  /**
157
- * Parse a key which may include nested schema notation.
315
+ * Parse a key which may include nested schema notation and optional array count.
316
+ *
317
+ * The array count can be specified INSIDE the parentheses to avoid ambiguity:
318
+ * - `key(@schema#N)` means key is an array of N objects with the given schema
319
+ * - `key(@schema)` means key is a single object with the given schema
158
320
  *
159
321
  * Examples:
160
- * "name" -> { keyName: "name", schema: null }
161
- * "address(@city,zip)" -> { keyName: "address", schema: ["city", "zip"] }
162
- * "location(@coords(@lat,lng))" -> { keyName: "location", schema: ["coords(@lat,lng)"] }
322
+ * "name" -> { keyName: "name", schema: null, count: null }
323
+ * "address(@city,zip)" -> { keyName: "address", schema: ["city", "zip"], count: null }
324
+ * "characters(@name,role#2)" -> { keyName: "characters", schema: ["name", "role"], count: 2 }
163
325
  */
164
326
  declare function parseKeySchema(keyString: string): KeySchema;
165
327
  /**
166
- * Build a mapping of field names to their nested schemas.
328
+ * Build a mapping of field names to their nested schemas and array counts.
167
329
  *
168
330
  * Example:
169
- * ["id", "address(@city,zip)"]
170
- * -> { id: null, address: ["city", "zip"] }
331
+ * ["id", "address(@city,zip)", "items(@x,y#2)"]
332
+ * -> {
333
+ * id: { schema: null, count: null },
334
+ * address: { schema: ["city", "zip"], count: null },
335
+ * items: { schema: ["x", "y"], count: 2 }
336
+ * }
171
337
  */
172
338
  declare function buildSchemaMap(keys: string[]): SchemaMap;
173
339
  /**
@@ -177,4 +343,4 @@ declare function buildSchemaMap(keys: string[]): SchemaMap;
177
343
  */
178
344
  declare function parseKeys(keysStr: string): ParsedKeys;
179
345
 
180
- export { type KeySchema, type ParsedKeys, type SchemaMap, type TSONArray, type TSONObject, type TSONPrimitive, type TSONValue, buildSchemaMap, dump, dumps, escapeString, formatPrimitive, isUniformObjectArray, load, loads, looksLikeNumber, needsQuoting, parseKeySchema, parseKeys, parsePrimitive, splitByDelimiter, unescapeString };
346
+ export { type KeySchema, type ParsedKeys, type SchemaInfo, type SchemaMap, type TSONArray, type TSONObject, type TSONPrimitive, type TSONValue, buildSchemaMap, dump, dumps, escapeString, formatPrimitive, isUniformObjectArray, jsonToTson, load, loadJson, loadTson, loads, looksLikeNumber, minify, minifyFile, needsQuoting, parseKeySchema, parseKeys, parsePrimitive, prettify, prettifyFile, readTsonString, saveTson, saveTsonString, splitByDelimiter, tsonToJson, unescapeString };
package/dist/index.js CHANGED
@@ -190,10 +190,10 @@ function splitByDelimiter(text, delimiter) {
190
190
  function parseKeySchema(keyString) {
191
191
  const trimmed = keyString.trim();
192
192
  if (trimmed.startsWith('"') && trimmed.endsWith('"')) {
193
- return { keyName: unescapeString(trimmed.slice(1, -1)), schema: null };
193
+ return { keyName: unescapeString(trimmed.slice(1, -1)), schema: null, count: null };
194
194
  }
195
195
  if (!trimmed.includes("(")) {
196
- return { keyName: trimmed, schema: null };
196
+ return { keyName: trimmed, schema: null, count: null };
197
197
  }
198
198
  const parenIdx = trimmed.indexOf("(");
199
199
  let keyName = trimmed.slice(0, parenIdx).trim();
@@ -207,14 +207,54 @@ function parseKeySchema(keyString) {
207
207
  if (schemaStr.startsWith("@")) {
208
208
  schemaStr = schemaStr.slice(1);
209
209
  }
210
+ let nestedCount = null;
211
+ const hashIdx = findTrailingHash(schemaStr);
212
+ if (hashIdx !== -1) {
213
+ const countPart = schemaStr.slice(hashIdx + 1).trim();
214
+ const parsedCount = parseInt(countPart, 10);
215
+ if (!isNaN(parsedCount)) {
216
+ nestedCount = parsedCount;
217
+ schemaStr = schemaStr.slice(0, hashIdx).trim();
218
+ }
219
+ }
210
220
  const schemaKeys = splitByDelimiter(schemaStr, ",");
211
- return { keyName, schema: schemaKeys };
221
+ return { keyName, schema: schemaKeys, count: nestedCount };
222
+ }
223
+ function findTrailingHash(schemaStr) {
224
+ let inQuotes = false;
225
+ let depthParen = 0;
226
+ for (let i = schemaStr.length - 1; i >= 0; i--) {
227
+ const char = schemaStr[i];
228
+ if (char === '"') {
229
+ let backslashCount = 0;
230
+ for (let j = i - 1; j >= 0; j--) {
231
+ if (schemaStr[j] === "\\") {
232
+ backslashCount++;
233
+ } else {
234
+ break;
235
+ }
236
+ }
237
+ if (backslashCount % 2 === 0) {
238
+ inQuotes = !inQuotes;
239
+ }
240
+ }
241
+ if (!inQuotes) {
242
+ if (char === ")") {
243
+ depthParen++;
244
+ } else if (char === "(") {
245
+ depthParen--;
246
+ } else if (char === "#" && depthParen === 0) {
247
+ return i;
248
+ }
249
+ }
250
+ }
251
+ return -1;
212
252
  }
213
253
  function buildSchemaMap(keys) {
214
254
  const schemaMap = {};
215
255
  for (const key of keys) {
216
- const { keyName, schema } = parseKeySchema(key);
217
- schemaMap[keyName] = schema;
256
+ const { keyName, schema, count } = parseKeySchema(key);
257
+ schemaMap[keyName] = { schema, count };
218
258
  }
219
259
  return schemaMap;
220
260
  }
@@ -329,7 +369,9 @@ function serializeTabular(arr) {
329
369
  keyStr = `"${escapeString(keyStr)}"`;
330
370
  }
331
371
  if (key in nestedSchemas) {
332
- const schemaKeys = nestedSchemas[key];
372
+ const schemaInfo = nestedSchemas[key];
373
+ const schemaKeys = schemaInfo.schema;
374
+ const nestedCount = schemaInfo.count;
333
375
  const formattedSchemaKeys = schemaKeys.map((sk) => {
334
376
  if (needsQuoting(sk)) {
335
377
  return `"${escapeString(sk)}"`;
@@ -337,7 +379,11 @@ function serializeTabular(arr) {
337
379
  return sk;
338
380
  });
339
381
  const schemaStr = formattedSchemaKeys.join(",");
340
- keyStr = `${keyStr}(@${schemaStr})`;
382
+ if (nestedCount !== null) {
383
+ keyStr = `${keyStr}(@${schemaStr}#${nestedCount})`;
384
+ } else {
385
+ keyStr = `${keyStr}(@${schemaStr})`;
386
+ }
341
387
  }
342
388
  keyParts.push(keyStr);
343
389
  }
@@ -348,7 +394,12 @@ function serializeTabular(arr) {
348
394
  for (const key of keys) {
349
395
  const value = obj[key];
350
396
  if (key in nestedSchemas) {
351
- valueParts.push(serializeSchematizedObject(value, nestedSchemas[key]));
397
+ const schemaInfo = nestedSchemas[key];
398
+ if (schemaInfo.count !== null) {
399
+ valueParts.push(serializeSchematizedArray(value, schemaInfo.schema));
400
+ } else {
401
+ valueParts.push(serializeSchematizedObject(value, schemaInfo.schema));
402
+ }
352
403
  } else {
353
404
  valueParts.push(serializeValue(value));
354
405
  }
@@ -362,19 +413,49 @@ function detectNestedSchemas(arr, keys) {
362
413
  const nestedSchemas = {};
363
414
  for (const key of keys) {
364
415
  const values = arr.map((obj) => obj[key]);
365
- if (!values.every((v) => typeof v === "object" && v !== null && !Array.isArray(v))) {
366
- continue;
367
- }
368
- if (values.length === 0) {
416
+ if (values.every((v) => typeof v === "object" && v !== null && !Array.isArray(v))) {
417
+ if (values.length === 0) {
418
+ continue;
419
+ }
420
+ const firstKeys = Object.keys(values[0]);
421
+ const allSame = values.slice(1).every((v) => {
422
+ const objKeys = Object.keys(v);
423
+ return objKeys.length === firstKeys.length && objKeys.every((k, i) => k === firstKeys[i]);
424
+ });
425
+ if (allSame) {
426
+ nestedSchemas[key] = { schema: firstKeys, count: null };
427
+ }
369
428
  continue;
370
429
  }
371
- const firstKeys = Object.keys(values[0]);
372
- const allSame = values.slice(1).every((v) => {
373
- const objKeys = Object.keys(v);
374
- return objKeys.length === firstKeys.length && objKeys.every((k, i) => k === firstKeys[i]);
375
- });
376
- if (allSame) {
377
- nestedSchemas[key] = firstKeys;
430
+ if (values.every((v) => Array.isArray(v))) {
431
+ if (values.length === 0) {
432
+ continue;
433
+ }
434
+ let allUniform = true;
435
+ let firstSchema = null;
436
+ let arrayCount = null;
437
+ for (const v of values) {
438
+ const arr2 = v;
439
+ if (!isUniformObjectArray(arr2)) {
440
+ allUniform = false;
441
+ break;
442
+ }
443
+ if (arr2.length === 0) {
444
+ continue;
445
+ }
446
+ const vSchema = Object.keys(arr2[0]);
447
+ const vCount = arr2.length;
448
+ if (firstSchema === null) {
449
+ firstSchema = vSchema;
450
+ arrayCount = vCount;
451
+ } else if (vSchema.length !== firstSchema.length || !vSchema.every((k, i) => k === firstSchema[i]) || vCount !== arrayCount) {
452
+ allUniform = false;
453
+ break;
454
+ }
455
+ }
456
+ if (allUniform && firstSchema !== null) {
457
+ nestedSchemas[key] = { schema: firstSchema, count: arrayCount };
458
+ }
378
459
  }
379
460
  }
380
461
  return nestedSchemas;
@@ -390,6 +471,16 @@ function serializeSchematizedObject(obj, schema) {
390
471
  }
391
472
  return "{" + valueParts.join(",") + "}";
392
473
  }
474
+ function serializeSchematizedArray(arr, schema) {
475
+ if (arr.length === 0) {
476
+ return "[]";
477
+ }
478
+ const objParts = [];
479
+ for (const obj of arr) {
480
+ objParts.push(serializeSchematizedObject(obj, schema));
481
+ }
482
+ return "[" + objParts.join(",") + "]";
483
+ }
393
484
 
394
485
  // src/deserializer.ts
395
486
  function loads(s) {
@@ -442,8 +533,14 @@ function parseKeyedObject(content) {
442
533
  }
443
534
  const keysPart = parts[0];
444
535
  const { keys, count } = parseKeys(keysPart);
536
+ if (keys.length === 0 && count !== null) {
537
+ return Array.from({ length: count }, () => ({}));
538
+ }
445
539
  const schemaMap = buildSchemaMap(keys);
446
540
  const fieldNames = keys.map((k) => parseKeySchema(k).keyName);
541
+ if (keys.length === 0) {
542
+ return {};
543
+ }
447
544
  if (parts.length === 1) {
448
545
  throw new Error("Invalid object format: missing values");
449
546
  }
@@ -466,9 +563,11 @@ function parseSingleObject(fieldNames, valuesStr, schemaMap) {
466
563
  for (let i = 0; i < fieldNames.length; i++) {
467
564
  const fieldName = fieldNames[i];
468
565
  const valueStr = values[i];
469
- const schema = schemaMap[fieldName];
566
+ const schemaInfo = schemaMap[fieldName];
567
+ const schema = schemaInfo?.schema;
568
+ const count = schemaInfo?.count;
470
569
  if (schema) {
471
- obj[fieldName] = parseSchematizedValue(valueStr, schema);
570
+ obj[fieldName] = parseSchematizedValue(valueStr, schema, count);
472
571
  } else {
473
572
  obj[fieldName] = parseValue(valueStr);
474
573
  }
@@ -491,7 +590,36 @@ function parseTabularArray(fieldNames, rowParts, schemaMap, expectedCount) {
491
590
  }
492
591
  return result;
493
592
  }
494
- function parseSchematizedValue(valueStr, schema) {
593
+ function parseSchematizedValue(valueStr, schema, expectedCount = null) {
594
+ const trimmed = valueStr.trim();
595
+ if (expectedCount !== null) {
596
+ return parseSchematizedArray(trimmed, schema, expectedCount);
597
+ }
598
+ return parseSingleSchematizedObject(trimmed, schema);
599
+ }
600
+ function parseSchematizedArray(valueStr, schema, expectedCount) {
601
+ const trimmed = valueStr.trim();
602
+ if (!trimmed.startsWith("[") || !trimmed.endsWith("]")) {
603
+ throw new Error(`Schematized array must be wrapped in brackets: ${valueStr}`);
604
+ }
605
+ const content = trimmed.slice(1, -1).trim();
606
+ if (content.length === 0) {
607
+ return [];
608
+ }
609
+ const parts = splitByDelimiter(content, ",");
610
+ if (parts.length !== expectedCount) {
611
+ throw new Error(
612
+ `Array count mismatch: expected ${expectedCount} objects but got ${parts.length}`
613
+ );
614
+ }
615
+ const result = [];
616
+ for (const part of parts) {
617
+ const obj = parseSingleSchematizedObject(part.trim(), schema);
618
+ result.push(obj);
619
+ }
620
+ return result;
621
+ }
622
+ function parseSingleSchematizedObject(valueStr, schema) {
495
623
  const trimmed = valueStr.trim();
496
624
  if (!trimmed.startsWith("{") || !trimmed.endsWith("}")) {
497
625
  throw new Error(`Schematized value must be wrapped in braces: ${valueStr}`);
@@ -511,12 +639,14 @@ function parseSchematizedValue(valueStr, schema) {
511
639
  const obj = {};
512
640
  for (let i = 0; i < fieldNames.length; i++) {
513
641
  const fieldName = fieldNames[i];
514
- const valueStr2 = values[i];
515
- const nestedSchema = nestedSchemaMap[fieldName];
642
+ const valStr = values[i];
643
+ const schemaInfo = nestedSchemaMap[fieldName];
644
+ const nestedSchema = schemaInfo?.schema;
645
+ const nestedCount = schemaInfo?.count;
516
646
  if (nestedSchema) {
517
- obj[fieldName] = parseSchematizedValue(valueStr2, nestedSchema);
647
+ obj[fieldName] = parseSchematizedValue(valStr, nestedSchema, nestedCount);
518
648
  } else {
519
- obj[fieldName] = parseValue(valueStr2);
649
+ obj[fieldName] = parseValue(valStr);
520
650
  }
521
651
  }
522
652
  return obj;
@@ -540,6 +670,203 @@ function parseArray(text) {
540
670
  return result;
541
671
  }
542
672
 
543
- export { buildSchemaMap, dump, dumps, escapeString, formatPrimitive, isUniformObjectArray, load, loads, looksLikeNumber, needsQuoting, parseKeySchema, parseKeys, parsePrimitive, splitByDelimiter, unescapeString };
673
+ // src/prettify.ts
674
+ function prettify(tsonStr, indent = " ") {
675
+ if (!tsonStr || !tsonStr.trim()) {
676
+ return tsonStr;
677
+ }
678
+ return prettifyValue(tsonStr.trim(), indent, 0);
679
+ }
680
+ function minify(tsonStr) {
681
+ if (!tsonStr) {
682
+ return tsonStr;
683
+ }
684
+ const lines = tsonStr.split("\n");
685
+ const result = [];
686
+ for (const line of lines) {
687
+ result.push(line.trim());
688
+ }
689
+ return result.join("");
690
+ }
691
+ function prettifyValue(text, indent, depth) {
692
+ text = text.trim();
693
+ if (!text) {
694
+ return text;
695
+ }
696
+ if (text.startsWith("{@")) {
697
+ return prettifyObject(text, indent, depth);
698
+ }
699
+ if (text.startsWith("{")) {
700
+ return text;
701
+ }
702
+ if (text.startsWith("[")) {
703
+ return prettifyArray(text, indent, depth);
704
+ }
705
+ return text;
706
+ }
707
+ function prettifyObject(text, indent, depth) {
708
+ if (!text.startsWith("{@") || !text.endsWith("}")) {
709
+ return text;
710
+ }
711
+ const content = text.slice(2, -1);
712
+ if (!content) {
713
+ return "{@}";
714
+ }
715
+ const parts = splitTopLevel(content, "|");
716
+ if (parts.length === 0) {
717
+ return text;
718
+ }
719
+ const schema = parts[0];
720
+ const valueRows = parts.slice(1);
721
+ const nextIndent = indent.repeat(depth + 1);
722
+ if (valueRows.length === 0) {
723
+ return `{@${schema}}`;
724
+ }
725
+ const lines = [`{@${schema}`];
726
+ for (const row of valueRows) {
727
+ lines.push(`${nextIndent}|${row}`);
728
+ }
729
+ lines[lines.length - 1] = lines[lines.length - 1] + "}";
730
+ return lines.join("\n");
731
+ }
732
+ function prettifyArray(text, indent, depth) {
733
+ if (!text.startsWith("[") || !text.endsWith("]")) {
734
+ return text;
735
+ }
736
+ const content = text.slice(1, -1);
737
+ if (!content) {
738
+ return "[]";
739
+ }
740
+ const elements = splitTopLevel(content, ",");
741
+ if (elements.length <= 1) {
742
+ return text;
743
+ }
744
+ const nextIndent = indent.repeat(depth + 1);
745
+ const lines = [];
746
+ for (let i = 0; i < elements.length; i++) {
747
+ const prettified = prettifyValue(elements[i].trim(), indent, depth + 1);
748
+ if (i === 0) {
749
+ lines.push(`[${prettified}`);
750
+ } else {
751
+ lines.push(`${nextIndent},${prettified}`);
752
+ }
753
+ }
754
+ lines[lines.length - 1] = lines[lines.length - 1] + "]";
755
+ return lines.join("\n");
756
+ }
757
+ function splitTopLevel(text, delimiter) {
758
+ const parts = [];
759
+ const current = [];
760
+ let depth = 0;
761
+ let inString = false;
762
+ let escapeNext = false;
763
+ for (let i = 0; i < text.length; i++) {
764
+ const char = text[i];
765
+ if (escapeNext) {
766
+ current.push(char);
767
+ escapeNext = false;
768
+ continue;
769
+ }
770
+ if (char === "\\") {
771
+ current.push(char);
772
+ escapeNext = true;
773
+ continue;
774
+ }
775
+ if (char === '"') {
776
+ inString = !inString;
777
+ current.push(char);
778
+ continue;
779
+ }
780
+ if (inString) {
781
+ current.push(char);
782
+ continue;
783
+ }
784
+ if (char === "{" || char === "[") {
785
+ depth++;
786
+ current.push(char);
787
+ } else if (char === "}" || char === "]") {
788
+ depth--;
789
+ current.push(char);
790
+ } else if (char === delimiter && depth === 0) {
791
+ parts.push(current.join(""));
792
+ current.length = 0;
793
+ } else {
794
+ current.push(char);
795
+ }
796
+ }
797
+ if (current.length > 0) {
798
+ parts.push(current.join(""));
799
+ }
800
+ return parts;
801
+ }
802
+
803
+ // src/fileio.ts
804
+ async function loadJson(filepath) {
805
+ const fs = await import('fs/promises');
806
+ const content = await fs.readFile(filepath, "utf-8");
807
+ return JSON.parse(content);
808
+ }
809
+ async function loadTson(filepath) {
810
+ const fs = await import('fs/promises');
811
+ const content = await fs.readFile(filepath, "utf-8");
812
+ return loads(content);
813
+ }
814
+ async function saveTson(data, filepath, options = {}) {
815
+ const fs = await import('fs/promises');
816
+ let tsonStr = dumps(data);
817
+ if (options.format === "pretty") {
818
+ tsonStr = prettify(tsonStr, options.indent || " ");
819
+ }
820
+ await fs.writeFile(filepath, tsonStr, "utf-8");
821
+ }
822
+ async function saveTsonString(tsonStr, filepath, options = {}) {
823
+ const fs = await import('fs/promises');
824
+ if (options.format === "pretty") {
825
+ tsonStr = prettify(tsonStr, options.indent || " ");
826
+ } else if (options.format === "compact") {
827
+ tsonStr = minify(tsonStr);
828
+ }
829
+ await fs.writeFile(filepath, tsonStr, "utf-8");
830
+ }
831
+ async function jsonToTson(inputPath, outputPath, options = {}) {
832
+ const data = await loadJson(inputPath);
833
+ let tsonStr = dumps(data);
834
+ if (options.format === "pretty") {
835
+ tsonStr = prettify(tsonStr);
836
+ }
837
+ if (outputPath) {
838
+ await saveTsonString(tsonStr, outputPath);
839
+ }
840
+ return tsonStr;
841
+ }
842
+ async function tsonToJson(inputPath, outputPath, options = {}) {
843
+ const data = await loadTson(inputPath);
844
+ const jsonStr = JSON.stringify(data, null, options.indent ?? 2);
845
+ if (outputPath) {
846
+ const fs = await import('fs/promises');
847
+ await fs.writeFile(outputPath, jsonStr, "utf-8");
848
+ }
849
+ return jsonStr;
850
+ }
851
+ async function readTsonString(filepath) {
852
+ const fs = await import('fs/promises');
853
+ return fs.readFile(filepath, "utf-8");
854
+ }
855
+ async function prettifyFile(inputPath, outputPath, indent = " ") {
856
+ const tsonStr = await readTsonString(inputPath);
857
+ const prettyStr = prettify(tsonStr, indent);
858
+ const target = outputPath || inputPath;
859
+ await saveTsonString(prettyStr, target);
860
+ return prettyStr;
861
+ }
862
+ async function minifyFile(inputPath, outputPath) {
863
+ const tsonStr = await readTsonString(inputPath);
864
+ const compactStr = minify(tsonStr);
865
+ const target = outputPath || inputPath;
866
+ await saveTsonString(compactStr, target);
867
+ return compactStr;
868
+ }
869
+
870
+ export { buildSchemaMap, dump, dumps, escapeString, formatPrimitive, isUniformObjectArray, jsonToTson, load, loadJson, loadTson, loads, looksLikeNumber, minify, minifyFile, needsQuoting, parseKeySchema, parseKeys, parsePrimitive, prettify, prettifyFile, readTsonString, saveTson, saveTsonString, splitByDelimiter, tsonToJson, unescapeString };
544
871
  //# sourceMappingURL=index.js.map
545
872
  //# sourceMappingURL=index.js.map