ya-struct 0.0.1 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc +9 -9
- package/.github/workflows/CI.yml +1 -1
- package/.prettierrc +5 -0
- package/lib/builder.js +44 -86
- package/lib/index.js +94 -85
- package/lib/marshaller.js +88 -0
- package/lib/refbuf.js +1 -1
- package/lib/types/basic.js +200 -0
- package/lib/types/ctypes.js +160 -0
- package/package.json +7 -9
- package/test/abi.js +124 -64
- package/test/basic.js +51 -41
- package/test/ctypes.js +166 -0
- package/test/ref.js +15 -11
- package/.jsbeautifyrc +0 -24
package/.eslintrc
CHANGED
|
@@ -1,20 +1,18 @@
|
|
|
1
1
|
{
|
|
2
|
-
"parser": "@babel/eslint-parser",
|
|
3
2
|
"parserOptions": {
|
|
4
|
-
"ecmaVersion":
|
|
3
|
+
"ecmaVersion": 2021,
|
|
5
4
|
"sourceType": "module",
|
|
6
|
-
"requireConfigFile": false
|
|
7
|
-
"babelOptions": {
|
|
8
|
-
"plugins": ["@babel/syntax-top-level-await"]
|
|
9
|
-
}
|
|
5
|
+
"requireConfigFile": false
|
|
10
6
|
},
|
|
11
7
|
"env": {
|
|
12
8
|
"node": true,
|
|
13
|
-
"
|
|
9
|
+
"es2020": true
|
|
14
10
|
},
|
|
15
11
|
"rules": {
|
|
16
12
|
"global-require": "off",
|
|
17
|
-
"quote-props": "warn",
|
|
13
|
+
"quote-props": ["warn", "consistent-as-needed"],
|
|
14
|
+
"comma-dangle": ["error", {"arrays": "always-multiline", "objects": "always-multiline", "functions": "never"}],
|
|
15
|
+
"id-length": ["error", {"min": 3, "properties": "always", "exceptions": ["i", "os", "fs"]}],
|
|
18
16
|
"quotes": ["error", "double", { "allowTemplateLiterals": true }],
|
|
19
17
|
"no-plusplus": "error",
|
|
20
18
|
"no-nested-ternary": "error",
|
|
@@ -25,6 +23,8 @@
|
|
|
25
23
|
"no-delete-var": "error",
|
|
26
24
|
"no-param-reassign": "error",
|
|
27
25
|
"no-return-assign": "error",
|
|
26
|
+
"no-import-assign": "error",
|
|
27
|
+
"no-multi-assign": "error",
|
|
28
28
|
"keyword-spacing": "error",
|
|
29
29
|
"max-len": [ "warn", { "code": 140 } ],
|
|
30
30
|
"max-params": ["error", 4],
|
|
@@ -51,6 +51,7 @@
|
|
|
51
51
|
"no-new-object": "error",
|
|
52
52
|
"no-new-wrappers": "error",
|
|
53
53
|
"no-useless-concat": "error",
|
|
54
|
+
"no-unused-vars": ["error", {"ignoreRestSiblings": true}],
|
|
54
55
|
"array-bracket-newline": ["error", "consistent"],
|
|
55
56
|
"func-names": ["error", "never"],
|
|
56
57
|
"func-style": ["error", "expression", { "allowArrowFunctions": true }],
|
|
@@ -58,7 +59,6 @@
|
|
|
58
59
|
"arrow-parens": "error",
|
|
59
60
|
"no-confusing-arrow": "error",
|
|
60
61
|
"prefer-const": "error",
|
|
61
|
-
"prefer-destructuring": ["error", {"object": true, "array": false}],
|
|
62
62
|
"rest-spread-spacing": ["error", "never"],
|
|
63
63
|
"template-curly-spacing": ["error", "never"],
|
|
64
64
|
"prefer-rest-params": "error",
|
package/.github/workflows/CI.yml
CHANGED
package/.prettierrc
ADDED
package/lib/builder.js
CHANGED
|
@@ -1,104 +1,62 @@
|
|
|
1
|
-
|
|
1
|
+
import ctypes from "./types/ctypes.js";
|
|
2
|
+
import basicTypes from "./types/basic.js";
|
|
3
|
+
import marshallerFactory from "./marshaller.js";
|
|
2
4
|
|
|
3
|
-
const
|
|
4
|
-
|
|
5
|
-
"
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
"UInt32BE": 4,
|
|
9
|
-
"BigUInt64LE": 8,
|
|
10
|
-
"BigUInt64BE": 8,
|
|
11
|
-
|
|
12
|
-
"Int8": 1,
|
|
13
|
-
"Int16LE": 2,
|
|
14
|
-
"Int16BE": 2,
|
|
15
|
-
"Int32LE": 4,
|
|
16
|
-
"Int32BE": 4,
|
|
17
|
-
"BigInt64LE": 8,
|
|
18
|
-
"BigInt64BE": 8
|
|
19
|
-
};
|
|
20
|
-
|
|
21
|
-
const dataModelMaps = {
|
|
22
|
-
"LP64": {
|
|
23
|
-
"Pointer": "BigUInt64"
|
|
24
|
-
}
|
|
25
|
-
};
|
|
26
|
-
|
|
27
|
-
const createFieldsViaBuilder = ({ builder, "abi": { endianness, dataModel } = {} }) => {
|
|
28
|
-
let fields = {};
|
|
5
|
+
const createFieldsViaBuilder = ({
|
|
6
|
+
builder,
|
|
7
|
+
abi: { endianness = "LE", dataModel = "LP64", compiler = "gcc" } = {},
|
|
8
|
+
}) => {
|
|
9
|
+
let fieldDefinitions = {};
|
|
29
10
|
|
|
30
11
|
let currentOffset = 0;
|
|
31
12
|
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
} else {
|
|
50
|
-
buffer[`write${type}`](Number(value), offset);
|
|
51
|
-
}
|
|
52
|
-
},
|
|
53
|
-
"bufferType": type,
|
|
54
|
-
offset,
|
|
55
|
-
size
|
|
56
|
-
}
|
|
57
|
-
});
|
|
13
|
+
const fieldBuilderForAbi = ({ abi }) => {
|
|
14
|
+
let result = {};
|
|
15
|
+
Object.keys(abi).forEach((typeName) => {
|
|
16
|
+
result = {
|
|
17
|
+
...result,
|
|
18
|
+
[typeName]: (name) => {
|
|
19
|
+
const def = abi[typeName]({ offset: currentOffset });
|
|
20
|
+
|
|
21
|
+
fieldDefinitions = {
|
|
22
|
+
...fieldDefinitions,
|
|
23
|
+
[name]: def,
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
currentOffset = def.offset + def.size;
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
});
|
|
58
30
|
|
|
59
|
-
|
|
60
|
-
};
|
|
31
|
+
return result;
|
|
61
32
|
};
|
|
62
33
|
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
fieldObject = Object.assign({}, fieldObject, {
|
|
66
|
-
[type]: standardField(type)
|
|
67
|
-
});
|
|
68
|
-
});
|
|
34
|
+
const basicTypesAbi = basicTypes.abi({ dataModel, compiler, endianness });
|
|
35
|
+
const cTypesAbi = ctypes.abi({ dataModel, compiler, endianness });
|
|
69
36
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
"BigInt64",
|
|
75
|
-
"UInt16",
|
|
76
|
-
"UInt32",
|
|
77
|
-
"BigUInt64"
|
|
78
|
-
].forEach((endianType) => {
|
|
79
|
-
fieldObject = Object.assign({}, fieldObject, {
|
|
80
|
-
[endianType]: standardField(`${endianType}${endianness}`)
|
|
81
|
-
});
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
const dataModelMap = dataModelMaps[dataModel] || {};
|
|
86
|
-
Object.keys(dataModelMap).forEach((key) => {
|
|
87
|
-
fieldObject = Object.assign({}, fieldObject, {
|
|
88
|
-
[key]: fieldObject[dataModelMap[key]]
|
|
89
|
-
});
|
|
90
|
-
});
|
|
37
|
+
const field = {
|
|
38
|
+
...fieldBuilderForAbi({ abi: basicTypesAbi }),
|
|
39
|
+
CTypes: fieldBuilderForAbi({ abi: cTypesAbi }),
|
|
40
|
+
};
|
|
91
41
|
|
|
92
|
-
builder({
|
|
42
|
+
builder({ field });
|
|
93
43
|
|
|
94
44
|
const size = currentOffset;
|
|
95
45
|
|
|
46
|
+
const { marshal, unmarshal } = marshallerFactory.create({
|
|
47
|
+
fieldDefinitions,
|
|
48
|
+
size,
|
|
49
|
+
});
|
|
50
|
+
|
|
96
51
|
return {
|
|
97
|
-
fields,
|
|
98
|
-
size
|
|
52
|
+
fields: fieldDefinitions,
|
|
53
|
+
size,
|
|
54
|
+
|
|
55
|
+
marshal,
|
|
56
|
+
unmarshal,
|
|
99
57
|
};
|
|
100
58
|
};
|
|
101
59
|
|
|
102
60
|
export default {
|
|
103
|
-
createFieldsViaBuilder
|
|
61
|
+
createFieldsViaBuilder,
|
|
104
62
|
};
|
package/lib/index.js
CHANGED
|
@@ -1,29 +1,11 @@
|
|
|
1
|
-
import
|
|
1
|
+
import b2a from "buffer2address";
|
|
2
2
|
import os from "os";
|
|
3
3
|
|
|
4
4
|
import fieldBuilder from "./builder.js";
|
|
5
|
-
import refbuf from "./refbuf.js";
|
|
6
5
|
|
|
7
6
|
const defineWithBuilderAndAbi = ({ builder, abi }) => {
|
|
8
|
-
const { fields, size
|
|
9
|
-
|
|
10
|
-
const offsetof = (fieldName) => {
|
|
11
|
-
const field = fields[fieldName];
|
|
12
|
-
if (!field) {
|
|
13
|
-
throw new Error(`field "${fieldName}" not found`);
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
return field.offset;
|
|
17
|
-
};
|
|
18
|
-
|
|
19
|
-
const sizeof = (fieldName) => {
|
|
20
|
-
const field = fields[fieldName];
|
|
21
|
-
if (!field) {
|
|
22
|
-
throw new Error(`field "${fieldName}" not found`);
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
return field.size;
|
|
26
|
-
};
|
|
7
|
+
const { fields, size, marshal, unmarshal } =
|
|
8
|
+
fieldBuilder.createFieldsViaBuilder({ builder, abi });
|
|
27
9
|
|
|
28
10
|
const parse = (buf) => {
|
|
29
11
|
if (!Buffer.isBuffer(buf)) {
|
|
@@ -31,22 +13,22 @@ const defineWithBuilderAndAbi = ({ builder, abi }) => {
|
|
|
31
13
|
}
|
|
32
14
|
|
|
33
15
|
if (buf.length < size) {
|
|
34
|
-
throw new Error(
|
|
16
|
+
throw new Error(
|
|
17
|
+
`given buffer is too small for structure (has ${buf.length} bytes, needs ${size} bytes)`
|
|
18
|
+
);
|
|
35
19
|
}
|
|
36
20
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
Object.keys(fields).forEach((name) => {
|
|
40
|
-
const field = fields[name];
|
|
41
|
-
|
|
42
|
-
result = Object.assign({}, result, {
|
|
43
|
-
[name]: field.readFrom({ "buffer": buf })
|
|
44
|
-
});
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
return result;
|
|
21
|
+
return unmarshal({ buffer: buf });
|
|
48
22
|
};
|
|
49
23
|
|
|
24
|
+
let emptyData = {};
|
|
25
|
+
Object.keys(fields).forEach((fieldName) => {
|
|
26
|
+
emptyData = {
|
|
27
|
+
...emptyData,
|
|
28
|
+
[fieldName]: 0n,
|
|
29
|
+
};
|
|
30
|
+
});
|
|
31
|
+
|
|
50
32
|
const format = (data) => {
|
|
51
33
|
if (typeof data !== "object") {
|
|
52
34
|
throw new Error(`given argument is not a object`);
|
|
@@ -60,91 +42,118 @@ const defineWithBuilderAndAbi = ({ builder, abi }) => {
|
|
|
60
42
|
const value = data[fieldName];
|
|
61
43
|
if (Buffer.isBuffer(value)) {
|
|
62
44
|
buffers = Object.assign({}, buffers, {
|
|
63
|
-
[fieldName]: value
|
|
45
|
+
[fieldName]: value,
|
|
64
46
|
});
|
|
65
47
|
links = [...links, value];
|
|
66
48
|
} else if (typeof value === "bigint") {
|
|
67
49
|
primitives = Object.assign({}, primitives, {
|
|
68
|
-
[fieldName]: value
|
|
50
|
+
[fieldName]: value,
|
|
69
51
|
});
|
|
70
52
|
} else {
|
|
71
|
-
throw new Error(
|
|
53
|
+
throw new Error(
|
|
54
|
+
`only Buffer and BigInt supported, "${fieldName}" was of type "${typeof value}"`
|
|
55
|
+
);
|
|
72
56
|
}
|
|
73
57
|
});
|
|
74
58
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
Object.keys(primitives).forEach((fieldName) => {
|
|
78
|
-
const value = data[fieldName];
|
|
79
|
-
const field = fields[fieldName];
|
|
80
|
-
field.writeTo({ "buffer": result, value });
|
|
81
|
-
});
|
|
59
|
+
let bufferDataToMarshal = {};
|
|
82
60
|
|
|
83
61
|
Object.keys(buffers).forEach((fieldName) => {
|
|
84
|
-
const value = buffer2address(data[fieldName]);
|
|
85
|
-
|
|
86
|
-
|
|
62
|
+
const value = b2a.buffer2address(data[fieldName]);
|
|
63
|
+
|
|
64
|
+
bufferDataToMarshal = {
|
|
65
|
+
...bufferDataToMarshal,
|
|
66
|
+
[fieldName]: value,
|
|
67
|
+
};
|
|
87
68
|
});
|
|
88
69
|
|
|
89
|
-
return
|
|
70
|
+
return marshal({
|
|
71
|
+
data: {
|
|
72
|
+
...emptyData,
|
|
73
|
+
...primitives,
|
|
74
|
+
...bufferDataToMarshal,
|
|
75
|
+
},
|
|
76
|
+
links,
|
|
77
|
+
});
|
|
90
78
|
};
|
|
91
79
|
|
|
92
80
|
return {
|
|
93
|
-
|
|
94
|
-
sizeof,
|
|
81
|
+
fields,
|
|
95
82
|
size,
|
|
96
83
|
|
|
97
84
|
parse,
|
|
98
|
-
format
|
|
85
|
+
format,
|
|
99
86
|
};
|
|
100
87
|
};
|
|
101
88
|
|
|
102
|
-
const
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
return {
|
|
114
|
-
define
|
|
115
|
-
};
|
|
89
|
+
const hostDataModels = {
|
|
90
|
+
x64: {
|
|
91
|
+
win32: "LLP64",
|
|
92
|
+
linux: "LP64",
|
|
93
|
+
},
|
|
94
|
+
arm: {
|
|
95
|
+
linux: "ILP32",
|
|
96
|
+
},
|
|
97
|
+
arm64: {
|
|
98
|
+
linux: "LP64",
|
|
99
|
+
},
|
|
116
100
|
};
|
|
117
101
|
|
|
118
|
-
const
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
} else if (process.platform === "linux") {
|
|
123
|
-
return "LP64";
|
|
124
|
-
} else {
|
|
125
|
-
throw new Error(`unsupported platform ${process.platform}`);
|
|
126
|
-
}
|
|
127
|
-
} else {
|
|
128
|
-
throw new Error(`unsupported CPU architecture ${process.arch}`);
|
|
102
|
+
const findDataModelFor = ({ arch, platform }) => {
|
|
103
|
+
const archDataModels = hostDataModels[arch];
|
|
104
|
+
if (!archDataModels) {
|
|
105
|
+
throw new Error(`unsupported CPU architecture ${arch}`);
|
|
129
106
|
}
|
|
130
|
-
};
|
|
131
107
|
|
|
132
|
-
const
|
|
133
|
-
|
|
134
|
-
|
|
108
|
+
const dataModel = archDataModels[platform];
|
|
109
|
+
if (!dataModel) {
|
|
110
|
+
throw new Error(`unsupported platform ${platform}`);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return dataModel;
|
|
114
|
+
};
|
|
135
115
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
116
|
+
const findHostDataModel = () => {
|
|
117
|
+
return findDataModelFor({
|
|
118
|
+
arch: process.arch,
|
|
119
|
+
platform: process.platform,
|
|
139
120
|
});
|
|
140
121
|
};
|
|
141
122
|
|
|
123
|
+
const findLikelyHostCompiler = () => {
|
|
124
|
+
return "gcc";
|
|
125
|
+
};
|
|
126
|
+
|
|
142
127
|
const define = (builder) => {
|
|
143
|
-
|
|
128
|
+
const abi = ({ endianness, dataModel, compiler }) => {
|
|
129
|
+
return defineWithBuilderAndAbi({
|
|
130
|
+
builder,
|
|
131
|
+
abi: {
|
|
132
|
+
endianness,
|
|
133
|
+
dataModel,
|
|
134
|
+
compiler,
|
|
135
|
+
},
|
|
136
|
+
});
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
const forHost = () => {
|
|
140
|
+
const endianness = os.endianness();
|
|
141
|
+
const dataModel = findHostDataModel();
|
|
142
|
+
const compiler = findLikelyHostCompiler();
|
|
143
|
+
|
|
144
|
+
return abi({
|
|
145
|
+
endianness,
|
|
146
|
+
dataModel,
|
|
147
|
+
compiler,
|
|
148
|
+
});
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
return {
|
|
152
|
+
abi,
|
|
153
|
+
forHost,
|
|
154
|
+
};
|
|
144
155
|
};
|
|
145
156
|
|
|
146
157
|
export default {
|
|
147
158
|
define,
|
|
148
|
-
abi,
|
|
149
|
-
forHost
|
|
150
159
|
};
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import refbuf from "./refbuf.js";
|
|
2
|
+
|
|
3
|
+
const createInterpretingMarshaller = ({ fieldDefinitions, size }) => {
|
|
4
|
+
const findReadWriteMethodName = ({ signed, fieldSizeInBits, endianness }) => {
|
|
5
|
+
const bigOrNot = fieldSizeInBits === 64 ? "Big" : "";
|
|
6
|
+
const signedOrNot = signed ? "" : "U";
|
|
7
|
+
const intType = `Int${fieldSizeInBits}`;
|
|
8
|
+
|
|
9
|
+
return `${bigOrNot}${signedOrNot}${intType}${endianness || ""}`;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
const marshal = ({ data, links }) => {
|
|
13
|
+
const result = refbuf.create({ links, size });
|
|
14
|
+
|
|
15
|
+
Object.keys(fieldDefinitions).forEach((fieldName) => {
|
|
16
|
+
const {
|
|
17
|
+
signed,
|
|
18
|
+
offset,
|
|
19
|
+
size: fieldSize,
|
|
20
|
+
endianness,
|
|
21
|
+
} = fieldDefinitions[fieldName];
|
|
22
|
+
|
|
23
|
+
const fieldSizeInBits = fieldSize * 8;
|
|
24
|
+
const writeFuncName = `write${findReadWriteMethodName({
|
|
25
|
+
signed,
|
|
26
|
+
fieldSizeInBits,
|
|
27
|
+
endianness,
|
|
28
|
+
})}`;
|
|
29
|
+
const valueToWrite = writeFuncName.includes("Big")
|
|
30
|
+
? BigInt(data[fieldName])
|
|
31
|
+
: Number(data[fieldName]);
|
|
32
|
+
|
|
33
|
+
if (!result[writeFuncName]) {
|
|
34
|
+
throw Error(`can't marshal "${fieldName}" ${JSON.stringify(fieldDefinitions[fieldName])}`);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
result[writeFuncName](valueToWrite, offset);
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
return result;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
const unmarshal = ({ buffer }) => {
|
|
44
|
+
let result = {};
|
|
45
|
+
|
|
46
|
+
Object.keys(fieldDefinitions).forEach((fieldName) => {
|
|
47
|
+
const {
|
|
48
|
+
signed,
|
|
49
|
+
offset,
|
|
50
|
+
size: fieldSize,
|
|
51
|
+
endianness,
|
|
52
|
+
} = fieldDefinitions[fieldName];
|
|
53
|
+
|
|
54
|
+
const fieldSizeInBits = fieldSize * 8;
|
|
55
|
+
const readFuncName = `read${findReadWriteMethodName({
|
|
56
|
+
signed,
|
|
57
|
+
fieldSizeInBits,
|
|
58
|
+
endianness,
|
|
59
|
+
})}`;
|
|
60
|
+
|
|
61
|
+
if (!buffer[readFuncName]) {
|
|
62
|
+
throw Error(`can't unmarshal "${fieldName}" ${JSON.stringify(fieldDefinitions[fieldName])}`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const val = buffer[readFuncName](offset);
|
|
66
|
+
|
|
67
|
+
result = {
|
|
68
|
+
...result,
|
|
69
|
+
[fieldName]: BigInt(val),
|
|
70
|
+
};
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
return result;
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
return {
|
|
77
|
+
marshal,
|
|
78
|
+
unmarshal,
|
|
79
|
+
};
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
const create = ({ fieldDefinitions, size }) => {
|
|
83
|
+
return createInterpretingMarshaller({ fieldDefinitions, size });
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
export default {
|
|
87
|
+
create,
|
|
88
|
+
};
|
package/lib/refbuf.js
CHANGED