@agoric/internal 0.4.0-upgrade-14-dev-0169c7e.0 → 0.4.0-upgrade-16-dev-07b0130.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/README.md +7 -2
  2. package/exported.js +2 -0
  3. package/package.json +32 -16
  4. package/src/batched-deliver.d.ts +9 -6
  5. package/src/batched-deliver.d.ts.map +1 -1
  6. package/src/batched-deliver.js +9 -3
  7. package/src/callback.d.ts +23 -16
  8. package/src/callback.d.ts.map +1 -1
  9. package/src/callback.js +35 -39
  10. package/src/chain-storage-paths.d.ts +2 -3
  11. package/src/chain-storage-paths.d.ts.map +1 -1
  12. package/src/chain-storage-paths.js +2 -3
  13. package/src/config.d.ts +23 -12
  14. package/src/config.d.ts.map +1 -1
  15. package/src/config.js +22 -10
  16. package/src/debug.d.ts +1 -1
  17. package/src/index.d.ts +3 -0
  18. package/src/index.js +7 -1
  19. package/src/install-ses-debug.d.ts +2 -0
  20. package/src/install-ses-debug.d.ts.map +1 -0
  21. package/src/install-ses-debug.js +6 -0
  22. package/src/lib-chainStorage.d.ts +42 -52
  23. package/src/lib-chainStorage.d.ts.map +1 -1
  24. package/src/lib-chainStorage.js +82 -74
  25. package/src/lib-nodejs/engine-gc.d.ts +3 -0
  26. package/src/lib-nodejs/engine-gc.d.ts.map +1 -0
  27. package/src/lib-nodejs/engine-gc.js +22 -0
  28. package/src/lib-nodejs/gc-and-finalize.d.ts +2 -0
  29. package/src/lib-nodejs/gc-and-finalize.d.ts.map +1 -0
  30. package/src/lib-nodejs/gc-and-finalize.js +91 -0
  31. package/src/lib-nodejs/spawnSubprocessWorker.d.ts +15 -0
  32. package/src/lib-nodejs/spawnSubprocessWorker.d.ts.map +1 -0
  33. package/src/lib-nodejs/spawnSubprocessWorker.js +89 -0
  34. package/src/lib-nodejs/waitUntilQuiescent.d.ts +2 -0
  35. package/src/lib-nodejs/waitUntilQuiescent.d.ts.map +1 -0
  36. package/src/lib-nodejs/waitUntilQuiescent.js +18 -0
  37. package/src/lib-nodejs/worker-protocol.d.ts +4 -0
  38. package/src/lib-nodejs/worker-protocol.d.ts.map +1 -0
  39. package/src/lib-nodejs/worker-protocol.js +54 -0
  40. package/src/magic-cookie-test-only.js +2 -2
  41. package/src/marshal.d.ts +20 -0
  42. package/src/marshal.d.ts.map +1 -0
  43. package/src/marshal.js +138 -0
  44. package/src/method-tools.d.ts +1 -0
  45. package/src/method-tools.d.ts.map +1 -1
  46. package/src/method-tools.js +29 -16
  47. package/src/netstring.d.ts +24 -0
  48. package/src/netstring.d.ts.map +1 -0
  49. package/src/netstring.js +124 -0
  50. package/src/node/buffer-line-transform.d.ts +17 -13
  51. package/src/node/buffer-line-transform.d.ts.map +1 -1
  52. package/src/node/buffer-line-transform.js +11 -8
  53. package/src/node/fs-stream.d.ts.map +1 -1
  54. package/src/node/fs-stream.js +2 -3
  55. package/src/node/utils.d.ts +9 -0
  56. package/src/node/utils.d.ts.map +1 -0
  57. package/src/node/utils.js +46 -0
  58. package/src/priority-senders.d.ts +1 -1
  59. package/src/priority-senders.d.ts.map +1 -1
  60. package/src/priority-senders.js +7 -3
  61. package/src/queue.d.ts +1 -1
  62. package/src/queue.d.ts.map +1 -1
  63. package/src/queue.js +7 -8
  64. package/src/scratch.d.ts +1 -1
  65. package/src/scratch.d.ts.map +1 -1
  66. package/src/storage-test-utils.d.ts +43 -81
  67. package/src/storage-test-utils.d.ts.map +1 -1
  68. package/src/storage-test-utils.js +103 -40
  69. package/src/tagged.d.ts +155 -0
  70. package/src/testing-utils.d.ts.map +1 -1
  71. package/src/testing-utils.js +7 -5
  72. package/src/tokens.d.ts +34 -0
  73. package/src/tokens.d.ts.map +1 -0
  74. package/src/tokens.js +35 -0
  75. package/src/typeGuards.d.ts +2 -0
  76. package/src/typeGuards.d.ts.map +1 -1
  77. package/src/typeGuards.js +8 -0
  78. package/src/types.d.ts +46 -0
  79. package/src/types.js +2 -0
  80. package/src/upgrade-api.d.ts +13 -4
  81. package/src/upgrade-api.d.ts.map +1 -1
  82. package/src/upgrade-api.js +26 -18
  83. package/src/utils.d.ts +26 -31
  84. package/src/utils.d.ts.map +1 -1
  85. package/src/utils.js +53 -227
  86. package/CHANGELOG.md +0 -137
@@ -0,0 +1,89 @@
1
+ // this file is loaded by the controller, in the start compartment
2
+ import { spawn } from 'child_process';
3
+ import { makePromiseKit } from '@endo/promise-kit';
4
+ import { NonNullish } from '@agoric/assert';
5
+ import { arrayEncoderStream, arrayDecoderStream } from './worker-protocol.js';
6
+ import {
7
+ netstringEncoderStream,
8
+ netstringDecoderStream,
9
+ } from '../netstring.js';
10
+
11
+ // Start a subprocess from a given executable, and arrange a bidirectional
12
+ // message channel with a "supervisor" within that process. Return a {
13
+ // toChild, fromChild } pair of Streams which accept/emit hardened Arrays of
14
+ // JSON-serializable data.
15
+
16
+ // eslint-disable-next-line no-unused-vars
17
+ function parentLog(first, ...args) {
18
+ // console.error(`--parent: ${first}`, ...args);
19
+ }
20
+
21
+ /** @typedef {import('child_process').IOType} IOType */
22
+ /** @typedef {import('stream').Writable} Writable */
23
+
24
+ // we send on fd3, and receive on fd4. We pass fd1/2 (stdout/err) through, so
25
+ // console log/err from the child shows up normally. We don't use Node's
26
+ // built-in serialization feature ('ipc') because the child process won't
27
+ // always be Node.
28
+ /** @type {IOType[]} */
29
+ const stdio = harden(['inherit', 'inherit', 'inherit', 'pipe', 'pipe']);
30
+
31
+ export function startSubprocessWorker(
32
+ execPath,
33
+ procArgs = [],
34
+ { netstringMaxChunkSize = undefined } = {},
35
+ ) {
36
+ const proc = spawn(execPath, procArgs, { stdio });
37
+
38
+ const toChild = arrayEncoderStream();
39
+ toChild
40
+ .pipe(netstringEncoderStream())
41
+ .pipe(/** @type {Writable} */ (proc.stdio[3]));
42
+ // proc.stdio[4].setEncoding('utf-8');
43
+ const fromChild = NonNullish(proc.stdio[4])
44
+ .pipe(netstringDecoderStream(netstringMaxChunkSize))
45
+ .pipe(arrayDecoderStream());
46
+
47
+ // fromChild.addListener('data', data => parentLog(`fd4 data`, data));
48
+ // toChild.write('hello child');
49
+
50
+ const pk = makePromiseKit();
51
+
52
+ proc.once('exit', code => {
53
+ parentLog('child exit', code);
54
+ pk.resolve(code);
55
+ });
56
+ proc.once('error', e => {
57
+ parentLog('child error', e);
58
+ pk.reject(e);
59
+ });
60
+ parentLog(`waiting on child`);
61
+
62
+ function terminate() {
63
+ proc.kill();
64
+ }
65
+
66
+ // the Transform objects don't like being hardened, so we wrap the methods
67
+ // that get used
68
+ /* @type {typeof fromChild} */
69
+ const wrappedFromChild = {
70
+ on: (...args) =>
71
+ fromChild.on(
72
+ .../** @type {Parameters<(typeof fromChild)['on']>} */ (args),
73
+ ),
74
+ };
75
+ /* @type {typeof toChild} */
76
+ const wrappedToChild = {
77
+ write: (...args) =>
78
+ toChild.write(
79
+ .../** @type {Parameters<(typeof toChild)['write']>} */ (args),
80
+ ),
81
+ };
82
+
83
+ return harden({
84
+ fromChild: wrappedFromChild,
85
+ toChild: wrappedToChild,
86
+ terminate,
87
+ done: pk.promise,
88
+ });
89
+ }
@@ -0,0 +1,2 @@
1
+ export function waitUntilQuiescent(): Promise<void>;
2
+ //# sourceMappingURL=waitUntilQuiescent.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"waitUntilQuiescent.d.ts","sourceRoot":"","sources":["waitUntilQuiescent.js"],"names":[],"mappings":"AAMA,oDAWC"}
@@ -0,0 +1,18 @@
1
+ /* global setImmediate */
2
+ import { makePromiseKit } from '@endo/promise-kit';
3
+
4
+ // This can only be imported from the Start Compartment, where 'setImmediate'
5
+ // is available.
6
+
7
+ export function waitUntilQuiescent() {
8
+ // the delivery might cause some number of (native) Promises to be
9
+ // created and resolved, so we use the IO queue to detect when the
10
+ // Promise queue is empty. The IO queue (setImmediate and setTimeout) is
11
+ // lower-priority than the Promise queue on browsers and Node 11, but on
12
+ // Node 10 it is higher. So this trick requires Node 11.
13
+ // https://jsblog.insiderattack.net/new-changes-to-timers-and-microtasks-from-node-v11-0-0-and-above-68d112743eb3
14
+ /** @type {import('@endo/promise-kit').PromiseKit<void>} */
15
+ const { promise: queueEmptyP, resolve } = makePromiseKit();
16
+ setImmediate(() => resolve());
17
+ return queueEmptyP;
18
+ }
@@ -0,0 +1,4 @@
1
+ export function arrayEncoderStream(): Transform;
2
+ export function arrayDecoderStream(): Transform;
3
+ import { Transform } from 'stream';
4
+ //# sourceMappingURL=worker-protocol.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker-protocol.d.ts","sourceRoot":"","sources":["worker-protocol.js"],"names":[],"mappings":"AAMA,gDAqBC;AAED,gDAwBC;0BApDyB,QAAQ"}
@@ -0,0 +1,54 @@
1
+ /* global Buffer */
2
+ import { Transform } from 'stream';
3
+
4
+ // Transform objects which convert from hardened Arrays of JSON-serializable
5
+ // data into Buffers suitable for netstring conversion.
6
+
7
+ export function arrayEncoderStream() {
8
+ /**
9
+ * @param {any} object
10
+ * @param {BufferEncoding} encoding
11
+ * @param {any} callback
12
+ * @this {{ push: (b: Buffer) => void }}
13
+ */
14
+ function transform(object, encoding, callback) {
15
+ if (!Array.isArray(object)) {
16
+ throw Error('stream requires Arrays');
17
+ }
18
+ let err;
19
+ try {
20
+ this.push(Buffer.from(JSON.stringify(object)));
21
+ } catch (e) {
22
+ err = e;
23
+ }
24
+ callback(err);
25
+ }
26
+ // Array in, Buffer out, hence writableObjectMode
27
+ return new Transform({ transform, writableObjectMode: true });
28
+ }
29
+
30
+ export function arrayDecoderStream() {
31
+ /**
32
+ * @param {Buffer} buf
33
+ * @param {BufferEncoding} encoding
34
+ * @param {any} callback
35
+ * @this {{ push: (b: Buffer) => void }}
36
+ */
37
+ function transform(buf, encoding, callback) {
38
+ let err;
39
+ try {
40
+ if (!Buffer.isBuffer(buf)) {
41
+ throw Error('stream expects Buffers');
42
+ }
43
+ this.push(JSON.parse(buf.toString()));
44
+ } catch (e) {
45
+ err = e;
46
+ }
47
+ // this Transform is a one-to-one conversion of Buffer into Array, so we
48
+ // always consume the input each time we're called
49
+ callback(err);
50
+ }
51
+
52
+ // Buffer in, Array out, hence readableObjectMode
53
+ return new Transform({ transform, readableObjectMode: true });
54
+ }
@@ -3,8 +3,8 @@
3
3
  const cookie = harden({});
4
4
 
5
5
  /**
6
- * Facilitate static analysis to prevent
7
- * demo/test facilities from being bundled in production.
6
+ * Facilitate static analysis to prevent demo/test facilities from being bundled
7
+ * in production.
8
8
  */
9
9
  export const notForProductionUse = () => {
10
10
  return cookie;
@@ -0,0 +1,20 @@
1
+ export function makeBoardRemote({ boardId, iface }: {
2
+ boardId: string | null;
3
+ iface?: string;
4
+ }): BoardRemote;
5
+ export function slotToBoardRemote(boardId: string, iface: string): BoardRemote;
6
+ export function boardSlottingMarshaller(slotToVal?: ((slot: string, iface: string) => any) | undefined): Omit<import("@endo/marshal").Marshal<string | null>, "serialize" | "unserialize">;
7
+ export function unmarshalFromVstorage(data: Map<string, string>, key: string, fromCapData: ReturnType<typeof import("@endo/marshal").makeMarshal>["fromCapData"], index: number): any;
8
+ export function makeHistoryReviver(entries: [string, string][], slotToVal?: ((slot: string, iface?: string) => any) | undefined): {
9
+ getItem: (key: string) => any;
10
+ children: (prefix: string) => string[];
11
+ has: (k: string) => boolean;
12
+ };
13
+ /**
14
+ * Should be a union with Remotable, but that's `any`, making this type
15
+ * meaningless
16
+ */
17
+ export type BoardRemote = {
18
+ getBoardId: () => string | null;
19
+ };
20
+ //# sourceMappingURL=marshal.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"marshal.d.ts","sourceRoot":"","sources":["marshal.js"],"names":[],"mappings":"AAkBO,oDAHI;IAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,GACxC,WAAW,CAKvB;AAMM,2CAHI,MAAM,SACN,MAAM,eAGoB;AAqB9B,4DANW,MAAM,SAAS,MAAM,KAAK,GAAG,gBAClC,IAAI,CAChB,OAAW,eAAe,EAAE,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,EAClD,WAAe,GAAG,aAAa,CAC5B,CAMH;AA6BM,4CATI,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,OACnB,MAAM,eACN,UAAU,CACpB,cAAkB,eAAe,EAAE,WAAW,CAC3C,CAAC,aAAa,CAAC,SACR,MAAM,GAEJ,GAAG,CAwBf;AASM,4CAHI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,sBACX,MAAM,UAAU,MAAM,KAAK,GAAG;mBAQlC,MAAM;uBAEN,MAAM;aAYN,MAAM;EAInB;;;;;0BA9HY;IAAE,UAAU,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;CAAE"}
package/src/marshal.js ADDED
@@ -0,0 +1,138 @@
1
+ // @ts-check
2
+ import { Far } from '@endo/far';
3
+ import { makeMarshal } from '@endo/marshal';
4
+ import { isStreamCell } from './lib-chainStorage.js';
5
+
6
+ const { Fail } = assert;
7
+
8
+ /**
9
+ * Should be a union with Remotable, but that's `any`, making this type
10
+ * meaningless
11
+ *
12
+ * @typedef {{ getBoardId: () => string | null }} BoardRemote
13
+ */
14
+
15
+ /**
16
+ * @param {{ boardId: string | null; iface?: string }} slotInfo
17
+ * @returns {BoardRemote}
18
+ */
19
+ export const makeBoardRemote = ({ boardId, iface }) => {
20
+ const nonalleged = iface ? iface.replace(/^Alleged: /, '') : '';
21
+ return Far(`BoardRemote${nonalleged}`, { getBoardId: () => boardId });
22
+ };
23
+
24
+ /**
25
+ * @param {string} boardId
26
+ * @param {string} iface
27
+ */
28
+ export const slotToBoardRemote = (boardId, iface) =>
29
+ makeBoardRemote({ boardId, iface });
30
+
31
+ /** @param {BoardRemote | object} val */
32
+ const boardValToSlot = val => {
33
+ if ('getBoardId' in val) {
34
+ return val.getBoardId();
35
+ }
36
+ throw Fail`unknown obj in boardSlottingMarshaller.valToSlot ${val}`;
37
+ };
38
+
39
+ /**
40
+ * A marshaller which can serialize getBoardId() -bearing Remotables. This
41
+ * allows the caller to pick their slots. The deserializer is configurable: the
42
+ * default cannot handle Remotable-bearing data.
43
+ *
44
+ * @param {(slot: string, iface: string) => any} [slotToVal]
45
+ * @returns {Omit<
46
+ * import('@endo/marshal').Marshal<string | null>,
47
+ * 'serialize' | 'unserialize'
48
+ * >}
49
+ */
50
+ export const boardSlottingMarshaller = (slotToVal = undefined) => {
51
+ return makeMarshal(boardValToSlot, slotToVal, {
52
+ serializeBodyFormat: 'smallcaps',
53
+ });
54
+ };
55
+
56
+ // TODO: Consolidate with `insistCapData` functions from swingset-liveslots,
57
+ // swingset-xsnap-supervisor, etc.
58
+ /**
59
+ * @param {unknown} data
60
+ * @returns {asserts data is import('@endo/marshal').CapData<string>}
61
+ */
62
+ const assertCapData = data => {
63
+ assert.typeof(data, 'object');
64
+ assert(data);
65
+ assert.typeof(data.body, 'string');
66
+ assert(Array.isArray(data.slots));
67
+ // XXX check that the .slots array elements are actually strings
68
+ };
69
+ harden(assertCapData);
70
+
71
+ /**
72
+ * Read and unmarshal a value from a map representation of vstorage data
73
+ *
74
+ * @param {Map<string, string>} data
75
+ * @param {string} key
76
+ * @param {ReturnType<
77
+ * typeof import('@endo/marshal').makeMarshal
78
+ * >['fromCapData']} fromCapData
79
+ * @param {number} index index of the desired value in a deserialized stream
80
+ * cell
81
+ * @returns {any}
82
+ */
83
+ export const unmarshalFromVstorage = (data, key, fromCapData, index) => {
84
+ const serialized = data.get(key) || Fail`no data for ${key}`;
85
+ assert.typeof(serialized, 'string');
86
+ assert.typeof(index, 'number');
87
+
88
+ const streamCell = JSON.parse(serialized);
89
+ if (!isStreamCell(streamCell)) {
90
+ throw Fail`not a StreamCell: ${streamCell}`;
91
+ }
92
+
93
+ const { values } = streamCell;
94
+ values.length > 0 || Fail`no StreamCell values: ${streamCell}`;
95
+
96
+ const marshalled = values.at(index);
97
+ assert.typeof(marshalled, 'string');
98
+
99
+ /** @type {import('@endo/marshal').CapData<string>} */
100
+ const capData = harden(JSON.parse(marshalled));
101
+ assertCapData(capData);
102
+
103
+ const unmarshalled = fromCapData(capData);
104
+ return unmarshalled;
105
+ };
106
+ harden(unmarshalFromVstorage);
107
+
108
+ /**
109
+ * Provide access to object graphs serialized in vstorage.
110
+ *
111
+ * @param {[string, string][]} entries
112
+ * @param {(slot: string, iface?: string) => any} [slotToVal]
113
+ */
114
+ export const makeHistoryReviver = (entries, slotToVal = undefined) => {
115
+ const board = boardSlottingMarshaller(slotToVal);
116
+ const vsMap = new Map(entries);
117
+ /** @param {...unknown} args } */
118
+ const fromCapData = (...args) =>
119
+ Reflect.apply(board.fromCapData, board, args);
120
+ /** @param {string} key } */
121
+ const getItem = key => unmarshalFromVstorage(vsMap, key, fromCapData, -1);
122
+ /** @param {string} prefix } */
123
+ const children = prefix => {
124
+ prefix.endsWith('.') || Fail`prefix must end with '.'`;
125
+ return harden([
126
+ ...new Set(
127
+ entries
128
+ .map(([k, _]) => k)
129
+ .filter(k => k.startsWith(prefix))
130
+ .map(k => k.slice(prefix.length).split('.')[0]),
131
+ ),
132
+ ]);
133
+ };
134
+ /** @param {string} k } */
135
+ const has = k => vsMap.get(k) !== undefined;
136
+
137
+ return harden({ getItem, children, has });
138
+ };
@@ -1,3 +1,4 @@
1
1
  export function getMethodNames<K extends PropertyKey>(val: Record<K, any>): K[];
2
+ export function getStringMethodNames<K extends PropertyKey>(val: Record<K, any>): string[];
2
3
  export function bindAllMethods<T extends Record<PropertyKey, any>>(obj: T): T;
3
4
  //# sourceMappingURL=method-tools.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"method-tools.d.ts","sourceRoot":"","sources":["method-tools.js"],"names":[],"mappings":"AAyCO,gFAoBN;AAgCM,8EAeJ"}
1
+ {"version":3,"file":"method-tools.d.ts","sourceRoot":"","sources":["method-tools.js"],"names":[],"mappings":"AA0CO,+BAJoB,CAAC,SAAd,WAAY,OACf,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,GACZ,CAAC,EAAE,CAsBf;AAUM,qCAJoB,CAAC,SAAd,WAAY,OACf,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,GACZ,MAAM,EAAE,CAKlB;AA+BI,+BAJiC,CAAC,SAA3B,MAAM,CAAC,WAAW,EAAE,GAAG,CAAE,OAC5B,CAAC,GACC,CAAC,CAiBX"}
@@ -2,7 +2,8 @@
2
2
  import { isObject } from '@endo/marshal';
3
3
 
4
4
  /**
5
- * @file method-tools use dynamic property lookup, which is not Jessie-compatible
5
+ * @file method-tools use dynamic property lookup, which is not
6
+ * Jessie-compatible
6
7
  */
7
8
 
8
9
  const { getPrototypeOf, create, fromEntries, getOwnPropertyDescriptors } =
@@ -12,8 +13,8 @@ const { ownKeys, apply } = Reflect;
12
13
  /**
13
14
  * Prioritize symbols as earlier than strings.
14
15
  *
15
- * @param {string|symbol} a
16
- * @param {string|symbol} b
16
+ * @param {string | symbol} a
17
+ * @param {string | symbol} b
17
18
  * @returns {-1 | 0 | 1}
18
19
  */
19
20
  const compareStringified = (a, b) => {
@@ -62,25 +63,37 @@ export const getMethodNames = val => {
62
63
  };
63
64
  harden(getMethodNames);
64
65
 
66
+ /**
67
+ * The subset of `getMethodNames` containing only string names, without symbols
68
+ *
69
+ * @template {PropertyKey} K
70
+ * @param {Record<K, any>} val
71
+ * @returns {string[]}
72
+ */
73
+ export const getStringMethodNames = val =>
74
+ /** @type {string[]} */ (
75
+ getMethodNames(val).filter(name => typeof name === 'string')
76
+ );
77
+
65
78
  /**
66
79
  * TODO This function exists only to ease the
67
80
  * https://github.com/Agoric/agoric-sdk/pull/5970 transition, from all methods
68
- * being own properties to methods being inherited from a common prototype.
69
- * This transition breaks two patterns used in prior code: autobinding,
70
- * and enumerating methods by enumerating own properties. For both, the
71
- * preferred repairs are
72
- * * autobinding: Replace, for example,
73
- * `foo(obj.method)` with `foo(arg => `obj.method(arg))`. IOW, stop relying
74
- * on expressions like `obj.method` to extract a method still bound to the
75
- * state of `obj` because, for virtual and durable objects,
76
- * they no longer will after #5970.
77
- * * method enumeration: Replace, for example
78
- * `Reflect.ownKeys(obj)` with `getMethodNames(obj)`.
81
+ * being own properties to methods being inherited from a common prototype. This
82
+ * transition breaks two patterns used in prior code: autobinding, and
83
+ * enumerating methods by enumerating own properties. For both, the preferred
84
+ * repairs are
85
+ *
86
+ * - autobinding: Replace, for example, `foo(obj.method)` with `foo(arg =>
87
+ * `obj.method(arg))`. IOW, stop relying on expressions like `obj.method`to
88
+ * extract a method still bound to the state of`obj` because, for virtual and
89
+ * durable objects, they no longer will after #5970.
90
+ * - method enumeration: Replace, for example `Reflect.ownKeys(obj)` with
91
+ * `getMethodNames(obj)`.
79
92
  *
80
93
  * Once all problematic cases have been converted in this manner, this
81
94
  * `bindAllMethods` hack can and TODO should be deleted. However, we currently
82
- * have no reliable static way to track down and fix all autobinding sites.
83
- * For those objects that have not yet been fully repaired by the above two
95
+ * have no reliable static way to track down and fix all autobinding sites. For
96
+ * those objects that have not yet been fully repaired by the above two
84
97
  * techniques, `bindAllMethods` creates an object that acts much like the
85
98
  * pre-#5970 objects, with all their methods as instance-bound own properties.
86
99
  * It does this by making a new object inheriting from `obj` where the new
@@ -0,0 +1,24 @@
1
+ /**
2
+ * @param {Buffer} data
3
+ * @returns {Buffer} netstring-wrapped
4
+ */
5
+ export function encode(data: Buffer): Buffer;
6
+ export function netstringEncoderStream(): Transform;
7
+ /**
8
+ * @param {Buffer} data containing zero or more netstrings and maybe some
9
+ * leftover bytes
10
+ * @param {number} [optMaxChunkSize]
11
+ * @returns {{ leftover: Buffer; payloads: Buffer[] }} zero or more decoded
12
+ * Buffers, one per netstring,
13
+ */
14
+ export function decode(data: Buffer, optMaxChunkSize?: number | undefined): {
15
+ leftover: Buffer;
16
+ payloads: Buffer[];
17
+ };
18
+ /**
19
+ * @param {number} [optMaxChunkSize]
20
+ * @returns {Transform}
21
+ */
22
+ export function netstringDecoderStream(optMaxChunkSize?: number | undefined): Transform;
23
+ import { Transform } from 'stream';
24
+ //# sourceMappingURL=netstring.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"netstring.d.ts","sourceRoot":"","sources":["netstring.js"],"names":[],"mappings":"AASA;;;GAGG;AACH,6BAHW,MAAM,GACJ,MAAM,CAMlB;AAGD,oDAsBC;AAED;;;;;;GAMG;AACH,6BANW,MAAM,yCAGJ;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,EAAE,CAAA;CAAE,CAmCpD;AAED;;;GAGG;AAEH,8EAHa,SAAS,CAoCrB;0BAvHyB,QAAQ"}
@@ -0,0 +1,124 @@
1
+ /* global Buffer */
2
+ import { Fail } from '@agoric/assert';
3
+
4
+ // adapted from 'netstring-stream', https://github.com/tlivings/netstring-stream/
5
+ import { Transform } from 'stream';
6
+
7
+ const COLON = 58;
8
+ const COMMA = 44;
9
+
10
+ /**
11
+ * @param {Buffer} data
12
+ * @returns {Buffer} netstring-wrapped
13
+ */
14
+ export function encode(data) {
15
+ const prefix = Buffer.from(`${data.length}:`);
16
+ const suffix = Buffer.from(',');
17
+ return Buffer.concat([prefix, data, suffix]);
18
+ }
19
+
20
+ // input is a sequence of strings, output is a byte pipe
21
+ export function netstringEncoderStream() {
22
+ /**
23
+ * @param {Buffer} chunk
24
+ * @param {BufferEncoding} encoding
25
+ * @param {any} callback
26
+ * @this {{ push: (b: Buffer) => void }}
27
+ */
28
+ function transform(chunk, encoding, callback) {
29
+ if (!Buffer.isBuffer(chunk)) {
30
+ throw Error('stream requires Buffers');
31
+ }
32
+ let err;
33
+ try {
34
+ this.push(encode(chunk));
35
+ } catch (e) {
36
+ err = e;
37
+ }
38
+ callback(err);
39
+ }
40
+ // (maybe empty) Buffer in, Buffer out. We use writableObjectMode to
41
+ // indicate that empty input buffers are important
42
+ return new Transform({ transform, writableObjectMode: true });
43
+ }
44
+
45
+ /**
46
+ * @param {Buffer} data containing zero or more netstrings and maybe some
47
+ * leftover bytes
48
+ * @param {number} [optMaxChunkSize]
49
+ * @returns {{ leftover: Buffer; payloads: Buffer[] }} zero or more decoded
50
+ * Buffers, one per netstring,
51
+ */
52
+ export function decode(data, optMaxChunkSize) {
53
+ // TODO: it would be more efficient to accumulate pending data in an array,
54
+ // rather than doing a concat each time
55
+ let start = 0;
56
+ const payloads = [];
57
+
58
+ for (;;) {
59
+ const colon = data.indexOf(COLON, start);
60
+ if (colon === -1) {
61
+ break; // still waiting for `${LENGTH}:`
62
+ }
63
+ const sizeString = data.toString('utf-8', start, colon);
64
+ const size = parseInt(sizeString, 10);
65
+ if (!(size > -1)) {
66
+ // reject NaN, all negative numbers
67
+ Fail`unparsable size ${sizeString}, should be integer`;
68
+ }
69
+ if (optMaxChunkSize) {
70
+ size <= optMaxChunkSize ||
71
+ Fail`size ${size} exceeds limit of ${optMaxChunkSize}`;
72
+ }
73
+ if (data.length < colon + 1 + size + 1) {
74
+ break; // still waiting for `${DATA}.`
75
+ }
76
+ data[colon + 1 + size] === COMMA ||
77
+ Fail`malformed netstring: not terminated by comma`;
78
+ payloads.push(data.subarray(colon + 1, colon + 1 + size));
79
+ start = colon + 1 + size + 1;
80
+ }
81
+
82
+ const leftover = data.subarray(start);
83
+ return { leftover, payloads };
84
+ }
85
+
86
+ /**
87
+ * @param {number} [optMaxChunkSize]
88
+ * @returns {Transform}
89
+ */
90
+ // input is a byte pipe, output is a sequence of Buffers
91
+ export function netstringDecoderStream(optMaxChunkSize) {
92
+ let buffered = Buffer.from('');
93
+ /**
94
+ * @param {Buffer} chunk
95
+ * @param {BufferEncoding} encoding
96
+ * @param {any} callback
97
+ * @this {{ push: (b: Buffer) => void }}
98
+ */
99
+ function transform(chunk, encoding, callback) {
100
+ if (!Buffer.isBuffer(chunk)) {
101
+ throw Error('stream requires Buffers');
102
+ }
103
+ buffered = Buffer.concat([buffered, chunk]);
104
+ let err;
105
+ try {
106
+ const { leftover, payloads } = decode(buffered, optMaxChunkSize);
107
+ buffered = leftover;
108
+ for (let i = 0; i < payloads.length; i += 1) {
109
+ this.push(payloads[i]);
110
+ }
111
+ } catch (e) {
112
+ err = e;
113
+ }
114
+ // we buffer all data internally, to accommodate netstrings larger than
115
+ // Transform's default buffer size, and callback() indicates that we've
116
+ // consumed the input
117
+ callback(err);
118
+ }
119
+
120
+ // Buffer in, Buffer out, except that each output Buffer is precious, even
121
+ // empty ones, and without readableObjectMode the Stream will discard empty
122
+ // buffers
123
+ return new Transform({ transform, readableObjectMode: true });
124
+ }
@@ -1,41 +1,45 @@
1
- /// <reference types="node" />
2
1
  /**
3
2
  * @typedef {object} BufferLineTransformOptions
4
- * @property {Buffer | string | number} [break] line break matcher for Buffer.indexOf() (default: 10)
5
- * @property {BufferEncoding} [breakEncoding] if break is a string, the encoding to use
3
+ * @property {Buffer | string | number} [break] line break matcher for
4
+ * Buffer.indexOf() (default: 10)
5
+ * @property {BufferEncoding} [breakEncoding] if break is a string, the encoding
6
+ * to use
6
7
  */
7
8
  export default class BufferLineTransform extends Transform {
8
9
  /**
9
- * The BufferLineTransform is reading String or Buffer content from a Readable stream
10
- * and writing each line as a Buffer in object mode
10
+ * The BufferLineTransform is reading String or Buffer content from a Readable
11
+ * stream and writing each line as a Buffer in object mode
11
12
  *
12
- * @param {import('node:stream').TransformOptions & BufferLineTransformOptions} [options]
13
+ * @param {import('node:stream').TransformOptions &
14
+ * BufferLineTransformOptions} [options]
13
15
  */
14
16
  constructor(options?: (import("stream").TransformOptions & BufferLineTransformOptions) | undefined);
15
17
  _breakValue: string | number | Buffer;
16
18
  _breakEncoding: BufferEncoding | undefined;
17
19
  _breakLength: number;
18
- /** @type {Array<Buffer>} */
19
- _chunks: Array<Buffer>;
20
+ /** @type {Buffer[]} */
21
+ _chunks: Buffer[];
20
22
  /**
21
- * @override
22
23
  * @param {any} chunk
23
24
  * @param {BufferEncoding | 'buffer'} encoding
24
25
  * @param {import('node:stream').TransformCallback} cb
26
+ * @override
25
27
  */
26
- override _transform(chunk: any, encoding: BufferEncoding | 'buffer', cb: import('node:stream').TransformCallback): void;
28
+ override _transform(chunk: any, encoding: BufferEncoding | "buffer", cb: import("node:stream").TransformCallback): void;
27
29
  /** @param {Buffer} line */
28
30
  _writeItem(line: Buffer): void;
29
31
  }
30
32
  export type BufferLineTransformOptions = {
31
33
  /**
32
- * line break matcher for Buffer.indexOf() (default: 10)
34
+ * line break matcher for
35
+ * Buffer.indexOf() (default: 10)
33
36
  */
34
37
  break?: string | number | Buffer | undefined;
35
38
  /**
36
- * if break is a string, the encoding to use
39
+ * if break is a string, the encoding
40
+ * to use
37
41
  */
38
42
  breakEncoding?: BufferEncoding | undefined;
39
43
  };
40
- import { Transform } from "stream";
44
+ import { Transform } from 'node:stream';
41
45
  //# sourceMappingURL=buffer-line-transform.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"buffer-line-transform.d.ts","sourceRoot":"","sources":["buffer-line-transform.js"],"names":[],"mappings":";AAKA;;;;GAIG;AAEH;IACE;;;;;OAKG;IACH,oGAuBC;IAhBC,sCAAmC;IACnC,2CAAmC;IAWnC,qBAA+B;IAE/B,4BAA4B;IAC5B,SADW,MAAM,MAAM,CAAC,CACP;IAGnB;;;;;OAKG;IACH,2BAJW,GAAG,YACH,cAAc,GAAG,QAAQ,MACzB,OAAO,aAAa,EAAE,iBAAiB,QAgDjD;IAeD,2BAA2B;IAC3B,iBADY,MAAM,QAOjB;CACF"}
1
+ {"version":3,"file":"buffer-line-transform.d.ts","sourceRoot":"","sources":["buffer-line-transform.js"],"names":[],"mappings":"AAKA;;;;;;GAMG;AAEH;IACE;;;;;;OAMG;IACH,oGAuBC;IAhBC,sCAAmC;IACnC,2CAAmC;IAWnC,qBAA+B;IAE/B,uBAAuB;IACvB,SADW,MAAM,EAAE,CACF;IAGnB;;;;;OAKG;IACH,2BALW,GAAG,YACH,cAAc,GAAG,QAAQ,MACzB,OAAO,aAAa,EAAE,iBAAiB,QAiDjD;IAeD,2BAA2B;IAC3B,iBADY,MAAM,QAOjB;CACF;;;;;;;;;;;;;0BAtHyB,aAAa"}