jsii-rosetta 1.78.1 → 1.80.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/json.d.ts ADDED
@@ -0,0 +1,28 @@
1
+ /// <reference types="node" />
2
+ import { Readable, Writable } from 'node:stream';
3
+ /**
4
+ * Asynchronously parses a single JSON value from the provided reader. The JSON
5
+ * text might be longer than what could fit in a single string value, since the
6
+ * processing is done in a streaming manner.
7
+ *
8
+ * Prefer using JSON.parse if you know the entire JSON text is always small
9
+ * enough to fit in a string value, as this would have better performance.
10
+ *
11
+ * @param reader the reader from which to consume JSON text.
12
+ *
13
+ * @returns the parse JSON value as a Javascript value.
14
+ */
15
+ export declare function parse(reader: Readable): Promise<any>;
16
+ /**
17
+ * Serializes a possibly large object into the provided writer. The object may
18
+ * be large enough that the JSON text cannot fit in a single string value.
19
+ *
20
+ * Prefer using JSON.stringify if you know the object is always small enough
21
+ * that the JSON text can fit in a single string value, as this would have
22
+ * better performance.
23
+ *
24
+ * @param value the value to be serialized.
25
+ * @param writer the write in which to write the JSON text.
26
+ */
27
+ export declare function stringify(value: any, writer: Writable): Promise<void>;
28
+ //# sourceMappingURL=json.d.ts.map
package/lib/json.js ADDED
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.stringify = exports.parse = void 0;
4
+ const node_stream_1 = require("node:stream");
5
+ const node_util_1 = require("node:util");
6
+ const stream_json_1 = require("stream-json");
7
+ const Assembler = require("stream-json/Assembler");
8
+ const Disassembler_1 = require("stream-json/Disassembler");
9
+ const Stringer_1 = require("stream-json/Stringer");
10
+ // NB: In node 15+, there is a node:stream.promises object that has this built-in.
11
+ const asyncPipeline = (0, node_util_1.promisify)(node_stream_1.pipeline);
12
+ /**
13
+ * Asynchronously parses a single JSON value from the provided reader. The JSON
14
+ * text might be longer than what could fit in a single string value, since the
15
+ * processing is done in a streaming manner.
16
+ *
17
+ * Prefer using JSON.parse if you know the entire JSON text is always small
18
+ * enough to fit in a string value, as this would have better performance.
19
+ *
20
+ * @param reader the reader from which to consume JSON text.
21
+ *
22
+ * @returns the parse JSON value as a Javascript value.
23
+ */
24
+ async function parse(reader) {
25
+ const assembler = new Assembler();
26
+ const jsonParser = (0, stream_json_1.parser)();
27
+ assembler.connectTo(jsonParser);
28
+ return asyncPipeline(reader, jsonParser).then(() => assembler.current);
29
+ }
30
+ exports.parse = parse;
31
+ /**
32
+ * Serializes a possibly large object into the provided writer. The object may
33
+ * be large enough that the JSON text cannot fit in a single string value.
34
+ *
35
+ * Prefer using JSON.stringify if you know the object is always small enough
36
+ * that the JSON text can fit in a single string value, as this would have
37
+ * better performance.
38
+ *
39
+ * @param value the value to be serialized.
40
+ * @param writer the write in which to write the JSON text.
41
+ */
42
+ async function stringify(value, writer) {
43
+ const reader = new node_stream_1.Readable({ objectMode: true });
44
+ reader.push(value);
45
+ reader.push(null);
46
+ return asyncPipeline(reader, (0, Disassembler_1.disassembler)(), (0, Stringer_1.stringer)(), writer);
47
+ }
48
+ exports.stringify = stringify;
49
+ //# sourceMappingURL=json.js.map
@@ -4,6 +4,7 @@ exports.TranslatedSnippet = exports.LanguageTablet = exports.CURRENT_SCHEMA_VERS
4
4
  const fs_1 = require("fs");
5
5
  const path = require("path");
6
6
  const zlib = require("zlib");
7
+ const json_1 = require("../json");
7
8
  const logging = require("../logging");
8
9
  const snippet_1 = require("../snippet");
9
10
  const util_1 = require("../util");
@@ -124,14 +125,17 @@ class LanguageTablet {
124
125
  * compressed and decompress accordingly.
125
126
  */
126
127
  async load(filename) {
127
- let data = await fs_1.promises.readFile(filename);
128
- // Gzip objects start with 1f 8b 08
129
- if (data[0] === 0x1f && data[1] === 0x8b && data[2] === 0x08) {
130
- // This is a gz object, so we decompress it now...
131
- data = zlib.gunzipSync(data);
128
+ let readStream;
129
+ if (await isGzipped(filename)) {
130
+ const gunzip = zlib.createGunzip();
131
+ (0, fs_1.createReadStream)(filename).pipe(gunzip, { end: true });
132
+ readStream = gunzip;
132
133
  this.compressedSource = true;
133
134
  }
134
- const obj = JSON.parse(data.toString('utf-8'));
135
+ else {
136
+ readStream = (0, fs_1.createReadStream)(filename);
137
+ }
138
+ const obj = await (0, json_1.parse)(readStream);
135
139
  if (!obj.toolVersion || !obj.snippets) {
136
140
  throw new Error(`File '${filename}' does not seem to be a Tablet file`);
137
141
  }
@@ -154,11 +158,10 @@ class LanguageTablet {
154
158
  */
155
159
  async save(filename, compress = false) {
156
160
  await fs_1.promises.mkdir(path.dirname(filename), { recursive: true });
157
- let schema = Buffer.from(JSON.stringify(this.toSchema(), null, 2));
158
- if (compress) {
159
- schema = zlib.gzipSync(schema);
160
- }
161
- await fs_1.promises.writeFile(filename, schema);
161
+ const writeStream = (0, fs_1.createWriteStream)(filename, { flags: 'w' });
162
+ const gzip = compress ? zlib.createGzip() : undefined;
163
+ gzip?.pipe(writeStream, { end: true });
164
+ return (0, json_1.stringify)(this.toSchema(), gzip ?? writeStream);
162
165
  }
163
166
  toSchema() {
164
167
  return {
@@ -262,4 +265,15 @@ class TranslatedSnippet {
262
265
  }
263
266
  }
264
267
  exports.TranslatedSnippet = TranslatedSnippet;
268
+ async function isGzipped(filename) {
269
+ const openFile = await fs_1.promises.open(filename, 'r');
270
+ try {
271
+ // Assumes that we can always read 3 bytes if there's that many in the file...
272
+ const { bytesRead, buffer } = await openFile.read(Buffer.alloc(4), 0, 3, 0);
273
+ return bytesRead >= 3 && buffer[0] === 0x1f && buffer[1] === 0x8b && buffer[2] === 0x08;
274
+ }
275
+ finally {
276
+ await openFile.close();
277
+ }
278
+ }
265
279
  //# sourceMappingURL=tablets.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jsii-rosetta",
3
- "version": "1.78.1",
3
+ "version": "1.80.0",
4
4
  "description": "Translate TypeScript code snippets to other languages",
5
5
  "main": "lib/index.js",
6
6
  "bin": {
@@ -16,27 +16,29 @@
16
16
  "package": "package-js"
17
17
  },
18
18
  "devDependencies": {
19
- "@types/commonmark": "^0.27.5",
19
+ "@types/commonmark": "^0.27.6",
20
20
  "@types/mock-fs": "^4.13.1",
21
- "@types/workerpool": "^6.1.1",
21
+ "@types/stream-json": "^1.7.3",
22
+ "@types/workerpool": "^6.4.0",
22
23
  "@types/semver": "^7.3.13",
23
- "jsii-build-tools": "1.78.1",
24
+ "jsii-build-tools": "1.80.0",
24
25
  "jsii-calc": "3.20.120",
25
26
  "memory-streams": "^0.1.3",
26
27
  "mock-fs": "^5.2.0"
27
28
  },
28
29
  "dependencies": {
29
- "@jsii/check-node": "1.78.1",
30
- "@jsii/spec": "1.78.1",
30
+ "@jsii/check-node": "1.80.0",
31
+ "@jsii/spec": "1.80.0",
31
32
  "commonmark": "^0.30.0",
32
33
  "typescript": "~3.9.10",
33
34
  "@xmldom/xmldom": "^0.8.6",
34
35
  "workerpool": "^6.4.0",
35
36
  "yargs": "^16.2.0",
37
+ "stream-json": "^1.7.5",
36
38
  "semver": "^7.3.8",
37
39
  "semver-intersect": "^1.4.0",
38
40
  "fast-glob": "^3.2.12",
39
- "jsii": "1.78.1"
41
+ "jsii": "1.80.0"
40
42
  },
41
43
  "license": "Apache-2.0",
42
44
  "author": {
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=json.test.d.ts.map
@@ -0,0 +1,96 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const node_buffer_1 = require("node:buffer");
4
+ const node_stream_1 = require("node:stream");
5
+ const json_1 = require("../lib/json");
6
+ describe(json_1.parse, () => {
7
+ test('small value', async () => {
8
+ const value = { foo: 'bar', baz: 123 };
9
+ const jsonText = JSON.stringify(value);
10
+ const readable = new node_stream_1.PassThrough();
11
+ readable.end(jsonText);
12
+ expect(await (0, json_1.parse)(readable)).toEqual(value);
13
+ });
14
+ test('value is too large to fit in a single string', async () => {
15
+ // We'll leverage the fact JSON can contain multiple definitions of the same key multiple times...
16
+ const expected = { foo: 'bar', baz: 123, bool: true, null: null, long: 'X'.repeat(102400) };
17
+ const readable = node_stream_1.Readable.from((function* () {
18
+ const chunks = Object.entries(expected).map(([key, value]) => ` ${JSON.stringify(key)}: ${JSON.stringify(value)}`);
19
+ yield '{\n';
20
+ let counter = 2;
21
+ let emitComma = false;
22
+ while (counter < node_buffer_1.kStringMaxLength) {
23
+ for (const chunk of chunks) {
24
+ if (emitComma) {
25
+ yield ',\n';
26
+ counter += 2;
27
+ }
28
+ yield chunk;
29
+ counter += chunk.length;
30
+ emitComma = true;
31
+ }
32
+ }
33
+ yield '\n}\n';
34
+ })());
35
+ const actual = await (0, json_1.parse)(readable);
36
+ expect(actual).toEqual(expected);
37
+ });
38
+ test('invalid JSON input', () => {
39
+ const readable = new node_stream_1.PassThrough();
40
+ readable.end('{"bad": "JSON",');
41
+ return expect((0, json_1.parse)(readable)).rejects.toThrowErrorMatchingInlineSnapshot(`"Parser cannot parse input: expected an object key"`);
42
+ });
43
+ });
44
+ describe(json_1.stringify, () => {
45
+ test('small value', async () => {
46
+ const value = { foo: 'bar', baz: 123 };
47
+ const jsonText = JSON.stringify(value);
48
+ const chunks = new Array();
49
+ const writable = new node_stream_1.Writable({
50
+ write: (chunk, _encoding, callback) => {
51
+ chunks.push(Buffer.from(chunk));
52
+ callback(null);
53
+ },
54
+ });
55
+ await (0, json_1.stringify)(value, writable);
56
+ expect(Buffer.concat(chunks).toString('utf-8')).toBe(jsonText);
57
+ });
58
+ test('value too large for JSON text to fit in a string', async () => {
59
+ const value = { key: 'X'.repeat(node_buffer_1.kStringMaxLength) };
60
+ const chunks = new Array();
61
+ const writable = new node_stream_1.Writable({
62
+ write: (chunk, _encoding, callback) => {
63
+ chunks.push(Buffer.from(chunk));
64
+ callback(null);
65
+ },
66
+ });
67
+ await (0, json_1.stringify)(value, writable);
68
+ expect(headBytes(chunks, 10).toString('utf-8')).toBe('{"key":"XX');
69
+ expect(tailBytes(chunks, 10).toString('utf-8')).toBe('XXXXXXXX"}');
70
+ });
71
+ });
72
+ function headBytes(chunks, count) {
73
+ if (chunks.length === 0) {
74
+ return Buffer.alloc(0);
75
+ }
76
+ const [head, ...tail] = chunks;
77
+ const headSlice = head.slice(0, count);
78
+ if (headSlice.length === count) {
79
+ return headSlice;
80
+ }
81
+ const tailSlice = headBytes(tail, count - headSlice.length);
82
+ return Buffer.concat([headSlice, tailSlice]);
83
+ }
84
+ function tailBytes(chunks, count) {
85
+ if (chunks.length === 0) {
86
+ return Buffer.alloc(0);
87
+ }
88
+ const tail = chunks[chunks.length - 1];
89
+ const tailSlice = tail.slice(Math.max(0, tail.length - count), tail.length);
90
+ if (tailSlice.length === count) {
91
+ return tailSlice;
92
+ }
93
+ const headSlice = tailBytes(chunks.slice(0, chunks.length - 1), count - tailSlice.length);
94
+ return Buffer.concat([headSlice, tailSlice]);
95
+ }
96
+ //# sourceMappingURL=json.test.js.map