ya-struct 0.0.1 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.eslintrc CHANGED
@@ -1,20 +1,18 @@
1
1
  {
2
- "parser": "@babel/eslint-parser",
3
2
  "parserOptions": {
4
- "ecmaVersion": 8,
3
+ "ecmaVersion": 2021,
5
4
  "sourceType": "module",
6
- "requireConfigFile": false,
7
- "babelOptions": {
8
- "plugins": ["@babel/syntax-top-level-await"]
9
- }
5
+ "requireConfigFile": false
10
6
  },
11
7
  "env": {
12
8
  "node": true,
13
- "es6": true
9
+ "es2020": true
14
10
  },
15
11
  "rules": {
16
12
  "global-require": "off",
17
- "quote-props": "warn",
13
+ "quote-props": ["warn", "consistent-as-needed"],
14
+ "comma-dangle": ["error", {"arrays": "always-multiline", "objects": "always-multiline", "functions": "never"}],
15
+ "id-length": ["error", {"min": 3, "properties": "always", "exceptions": ["i", "os", "fs"]}],
18
16
  "quotes": ["error", "double", { "allowTemplateLiterals": true }],
19
17
  "no-plusplus": "error",
20
18
  "no-nested-ternary": "error",
@@ -25,6 +23,8 @@
25
23
  "no-delete-var": "error",
26
24
  "no-param-reassign": "error",
27
25
  "no-return-assign": "error",
26
+ "no-import-assign": "error",
27
+ "no-multi-assign": "error",
28
28
  "keyword-spacing": "error",
29
29
  "max-len": [ "warn", { "code": 140 } ],
30
30
  "max-params": ["error", 4],
@@ -51,6 +51,7 @@
51
51
  "no-new-object": "error",
52
52
  "no-new-wrappers": "error",
53
53
  "no-useless-concat": "error",
54
+ "no-unused-vars": ["error", {"ignoreRestSiblings": true}],
54
55
  "array-bracket-newline": ["error", "consistent"],
55
56
  "func-names": ["error", "never"],
56
57
  "func-style": ["error", "expression", { "allowArrowFunctions": true }],
@@ -58,7 +59,6 @@
58
59
  "arrow-parens": "error",
59
60
  "no-confusing-arrow": "error",
60
61
  "prefer-const": "error",
61
- "prefer-destructuring": ["error", {"object": true, "array": false}],
62
62
  "rest-spread-spacing": ["error", "never"],
63
63
  "template-curly-spacing": ["error", "never"],
64
64
  "prefer-rest-params": "error",
@@ -10,7 +10,7 @@ jobs:
10
10
  strategy:
11
11
  matrix:
12
12
  arch: [ amd64, arm64v8, arm32v7 ]
13
- node: [ 12, 13, 14, 15 ]
13
+ node: [ 15, 16 ]
14
14
  fail-fast: false
15
15
 
16
16
  steps:
package/.prettierrc ADDED
@@ -0,0 +1,5 @@
1
+ {
2
+ "trailingComma": "es5",
3
+ "tabWidth": 2,
4
+ "singleQuote": false
5
+ }
package/lib/builder.js CHANGED
@@ -1,104 +1,62 @@
1
- /* global BigInt */
1
+ import ctypes from "./types/ctypes.js";
2
+ import basicTypes from "./types/basic.js";
3
+ import marshallerFactory from "./marshaller.js";
2
4
 
3
- const bufferTypeSizes = {
4
- "UInt8": 1,
5
- "UInt16LE": 2,
6
- "UInt16BE": 2,
7
- "UInt32LE": 4,
8
- "UInt32BE": 4,
9
- "BigUInt64LE": 8,
10
- "BigUInt64BE": 8,
11
-
12
- "Int8": 1,
13
- "Int16LE": 2,
14
- "Int16BE": 2,
15
- "Int32LE": 4,
16
- "Int32BE": 4,
17
- "BigInt64LE": 8,
18
- "BigInt64BE": 8
19
- };
20
-
21
- const dataModelMaps = {
22
- "LP64": {
23
- "Pointer": "BigUInt64"
24
- }
25
- };
26
-
27
- const createFieldsViaBuilder = ({ builder, "abi": { endianness, dataModel } = {} }) => {
28
- let fields = {};
5
+ const createFieldsViaBuilder = ({
6
+ builder,
7
+ abi: { endianness = "LE", dataModel = "LP64", compiler = "gcc" } = {},
8
+ }) => {
9
+ let fieldDefinitions = {};
29
10
 
30
11
  let currentOffset = 0;
31
12
 
32
- const standardField = (type) => {
33
- return (name) => {
34
- const offset = currentOffset;
35
- const size = bufferTypeSizes[type];
36
- if (size === undefined) {
37
- throw new Error(`could not map unknown type "${type}"`);
38
- }
39
-
40
- fields = Object.assign({}, fields, {
41
- [name]: {
42
- name,
43
- "readFrom": ({ buffer }) => {
44
- return BigInt(buffer[`read${type}`](offset));
45
- },
46
- "writeTo": ({ buffer, value }) => {
47
- if (type.indexOf("Big") >= 0) {
48
- buffer[`write${type}`](value, offset);
49
- } else {
50
- buffer[`write${type}`](Number(value), offset);
51
- }
52
- },
53
- "bufferType": type,
54
- offset,
55
- size
56
- }
57
- });
13
+ const fieldBuilderForAbi = ({ abi }) => {
14
+ let result = {};
15
+ Object.keys(abi).forEach((typeName) => {
16
+ result = {
17
+ ...result,
18
+ [typeName]: (name) => {
19
+ const def = abi[typeName]({ offset: currentOffset });
20
+
21
+ fieldDefinitions = {
22
+ ...fieldDefinitions,
23
+ [name]: def,
24
+ };
25
+
26
+ currentOffset = def.offset + def.size;
27
+ },
28
+ };
29
+ });
58
30
 
59
- currentOffset += size;
60
- };
31
+ return result;
61
32
  };
62
33
 
63
- let fieldObject = {};
64
- Object.keys(bufferTypeSizes).forEach((type) => {
65
- fieldObject = Object.assign({}, fieldObject, {
66
- [type]: standardField(type)
67
- });
68
- });
34
+ const basicTypesAbi = basicTypes.abi({ dataModel, compiler, endianness });
35
+ const cTypesAbi = ctypes.abi({ dataModel, compiler, endianness });
69
36
 
70
- if (endianness === "LE" || endianness === "BE") {
71
- [
72
- "Int16",
73
- "Int32",
74
- "BigInt64",
75
- "UInt16",
76
- "UInt32",
77
- "BigUInt64"
78
- ].forEach((endianType) => {
79
- fieldObject = Object.assign({}, fieldObject, {
80
- [endianType]: standardField(`${endianType}${endianness}`)
81
- });
82
- });
83
- }
84
-
85
- const dataModelMap = dataModelMaps[dataModel] || {};
86
- Object.keys(dataModelMap).forEach((key) => {
87
- fieldObject = Object.assign({}, fieldObject, {
88
- [key]: fieldObject[dataModelMap[key]]
89
- });
90
- });
37
+ const field = {
38
+ ...fieldBuilderForAbi({ abi: basicTypesAbi }),
39
+ CTypes: fieldBuilderForAbi({ abi: cTypesAbi }),
40
+ };
91
41
 
92
- builder({ "field": fieldObject });
42
+ builder({ field });
93
43
 
94
44
  const size = currentOffset;
95
45
 
46
+ const { marshal, unmarshal } = marshallerFactory.create({
47
+ fieldDefinitions,
48
+ size,
49
+ });
50
+
96
51
  return {
97
- fields,
98
- size
52
+ fields: fieldDefinitions,
53
+ size,
54
+
55
+ marshal,
56
+ unmarshal,
99
57
  };
100
58
  };
101
59
 
102
60
  export default {
103
- createFieldsViaBuilder
61
+ createFieldsViaBuilder,
104
62
  };
package/lib/index.js CHANGED
@@ -1,29 +1,11 @@
1
- import buffer2address from "buffer2address";
1
+ import b2a from "buffer2address";
2
2
  import os from "os";
3
3
 
4
4
  import fieldBuilder from "./builder.js";
5
- import refbuf from "./refbuf.js";
6
5
 
7
6
  const defineWithBuilderAndAbi = ({ builder, abi }) => {
8
- const { fields, size } = fieldBuilder.createFieldsViaBuilder({ builder, abi });
9
-
10
- const offsetof = (fieldName) => {
11
- const field = fields[fieldName];
12
- if (!field) {
13
- throw new Error(`field "${fieldName}" not found`);
14
- }
15
-
16
- return field.offset;
17
- };
18
-
19
- const sizeof = (fieldName) => {
20
- const field = fields[fieldName];
21
- if (!field) {
22
- throw new Error(`field "${fieldName}" not found`);
23
- }
24
-
25
- return field.size;
26
- };
7
+ const { fields, size, marshal, unmarshal } =
8
+ fieldBuilder.createFieldsViaBuilder({ builder, abi });
27
9
 
28
10
  const parse = (buf) => {
29
11
  if (!Buffer.isBuffer(buf)) {
@@ -31,22 +13,22 @@ const defineWithBuilderAndAbi = ({ builder, abi }) => {
31
13
  }
32
14
 
33
15
  if (buf.length < size) {
34
- throw new Error(`given buffer is too small for structure (has ${buf.length} bytes, needs ${size} bytes)`);
16
+ throw new Error(
17
+ `given buffer is too small for structure (has ${buf.length} bytes, needs ${size} bytes)`
18
+ );
35
19
  }
36
20
 
37
- let result = {};
38
-
39
- Object.keys(fields).forEach((name) => {
40
- const field = fields[name];
41
-
42
- result = Object.assign({}, result, {
43
- [name]: field.readFrom({ "buffer": buf })
44
- });
45
- });
46
-
47
- return result;
21
+ return unmarshal({ buffer: buf });
48
22
  };
49
23
 
24
+ let emptyData = {};
25
+ Object.keys(fields).forEach((fieldName) => {
26
+ emptyData = {
27
+ ...emptyData,
28
+ [fieldName]: 0n,
29
+ };
30
+ });
31
+
50
32
  const format = (data) => {
51
33
  if (typeof data !== "object") {
52
34
  throw new Error(`given argument is not a object`);
@@ -60,91 +42,118 @@ const defineWithBuilderAndAbi = ({ builder, abi }) => {
60
42
  const value = data[fieldName];
61
43
  if (Buffer.isBuffer(value)) {
62
44
  buffers = Object.assign({}, buffers, {
63
- [fieldName]: value
45
+ [fieldName]: value,
64
46
  });
65
47
  links = [...links, value];
66
48
  } else if (typeof value === "bigint") {
67
49
  primitives = Object.assign({}, primitives, {
68
- [fieldName]: value
50
+ [fieldName]: value,
69
51
  });
70
52
  } else {
71
- throw new Error("only Buffer and BigInt supported");
53
+ throw new Error(
54
+ `only Buffer and BigInt supported, "${fieldName}" was of type "${typeof value}"`
55
+ );
72
56
  }
73
57
  });
74
58
 
75
- const result = refbuf.create({ links, size });
76
-
77
- Object.keys(primitives).forEach((fieldName) => {
78
- const value = data[fieldName];
79
- const field = fields[fieldName];
80
- field.writeTo({ "buffer": result, value });
81
- });
59
+ let bufferDataToMarshal = {};
82
60
 
83
61
  Object.keys(buffers).forEach((fieldName) => {
84
- const value = buffer2address(data[fieldName]);
85
- const field = fields[fieldName];
86
- field.writeTo({ "buffer": result, value });
62
+ const value = b2a.buffer2address(data[fieldName]);
63
+
64
+ bufferDataToMarshal = {
65
+ ...bufferDataToMarshal,
66
+ [fieldName]: value,
67
+ };
87
68
  });
88
69
 
89
- return result;
70
+ return marshal({
71
+ data: {
72
+ ...emptyData,
73
+ ...primitives,
74
+ ...bufferDataToMarshal,
75
+ },
76
+ links,
77
+ });
90
78
  };
91
79
 
92
80
  return {
93
- offsetof,
94
- sizeof,
81
+ fields,
95
82
  size,
96
83
 
97
84
  parse,
98
- format
85
+ format,
99
86
  };
100
87
  };
101
88
 
102
- const abi = ({ endianness, dataModel }) => {
103
- const define = (builder) => {
104
- return defineWithBuilderAndAbi({
105
- builder,
106
- "abi": {
107
- endianness,
108
- dataModel
109
- }
110
- });
111
- };
112
-
113
- return {
114
- define
115
- };
89
+ const hostDataModels = {
90
+ x64: {
91
+ win32: "LLP64",
92
+ linux: "LP64",
93
+ },
94
+ arm: {
95
+ linux: "ILP32",
96
+ },
97
+ arm64: {
98
+ linux: "LP64",
99
+ },
116
100
  };
117
101
 
118
- const findHostDataModel = () => {
119
- if (process.arch === "x64") {
120
- if (process.platform === "win32") {
121
- return "LLP64";
122
- } else if (process.platform === "linux") {
123
- return "LP64";
124
- } else {
125
- throw new Error(`unsupported platform ${process.platform}`);
126
- }
127
- } else {
128
- throw new Error(`unsupported CPU architecture ${process.arch}`);
102
+ const findDataModelFor = ({ arch, platform }) => {
103
+ const archDataModels = hostDataModels[arch];
104
+ if (!archDataModels) {
105
+ throw new Error(`unsupported CPU architecture ${arch}`);
129
106
  }
130
- };
131
107
 
132
- const forHost = () => {
133
- const dataModel = findHostDataModel();
134
- const endianness = os.endianness();
108
+ const dataModel = archDataModels[platform];
109
+ if (!dataModel) {
110
+ throw new Error(`unsupported platform ${platform}`);
111
+ }
112
+
113
+ return dataModel;
114
+ };
135
115
 
136
- return abi({
137
- dataModel,
138
- endianness
116
+ const findHostDataModel = () => {
117
+ return findDataModelFor({
118
+ arch: process.arch,
119
+ platform: process.platform,
139
120
  });
140
121
  };
141
122
 
123
+ const findLikelyHostCompiler = () => {
124
+ return "gcc";
125
+ };
126
+
142
127
  const define = (builder) => {
143
- return defineWithBuilderAndAbi({ builder });
128
+ const abi = ({ endianness, dataModel, compiler }) => {
129
+ return defineWithBuilderAndAbi({
130
+ builder,
131
+ abi: {
132
+ endianness,
133
+ dataModel,
134
+ compiler,
135
+ },
136
+ });
137
+ };
138
+
139
+ const forHost = () => {
140
+ const endianness = os.endianness();
141
+ const dataModel = findHostDataModel();
142
+ const compiler = findLikelyHostCompiler();
143
+
144
+ return abi({
145
+ endianness,
146
+ dataModel,
147
+ compiler,
148
+ });
149
+ };
150
+
151
+ return {
152
+ abi,
153
+ forHost,
154
+ };
144
155
  };
145
156
 
146
157
  export default {
147
158
  define,
148
- abi,
149
- forHost
150
159
  };
@@ -0,0 +1,88 @@
1
+ import refbuf from "./refbuf.js";
2
+
3
+ const createInterpretingMarshaller = ({ fieldDefinitions, size }) => {
4
+ const findReadWriteMethodName = ({ signed, fieldSizeInBits, endianness }) => {
5
+ const bigOrNot = fieldSizeInBits === 64 ? "Big" : "";
6
+ const signedOrNot = signed ? "" : "U";
7
+ const intType = `Int${fieldSizeInBits}`;
8
+
9
+ return `${bigOrNot}${signedOrNot}${intType}${endianness || ""}`;
10
+ };
11
+
12
+ const marshal = ({ data, links }) => {
13
+ const result = refbuf.create({ links, size });
14
+
15
+ Object.keys(fieldDefinitions).forEach((fieldName) => {
16
+ const {
17
+ signed,
18
+ offset,
19
+ size: fieldSize,
20
+ endianness,
21
+ } = fieldDefinitions[fieldName];
22
+
23
+ const fieldSizeInBits = fieldSize * 8;
24
+ const writeFuncName = `write${findReadWriteMethodName({
25
+ signed,
26
+ fieldSizeInBits,
27
+ endianness,
28
+ })}`;
29
+ const valueToWrite = writeFuncName.includes("Big")
30
+ ? BigInt(data[fieldName])
31
+ : Number(data[fieldName]);
32
+
33
+ if (!result[writeFuncName]) {
34
+ throw Error(`can't marshal "${fieldName}" ${JSON.stringify(fieldDefinitions[fieldName])}`);
35
+ }
36
+
37
+ result[writeFuncName](valueToWrite, offset);
38
+ });
39
+
40
+ return result;
41
+ };
42
+
43
+ const unmarshal = ({ buffer }) => {
44
+ let result = {};
45
+
46
+ Object.keys(fieldDefinitions).forEach((fieldName) => {
47
+ const {
48
+ signed,
49
+ offset,
50
+ size: fieldSize,
51
+ endianness,
52
+ } = fieldDefinitions[fieldName];
53
+
54
+ const fieldSizeInBits = fieldSize * 8;
55
+ const readFuncName = `read${findReadWriteMethodName({
56
+ signed,
57
+ fieldSizeInBits,
58
+ endianness,
59
+ })}`;
60
+
61
+ if (!buffer[readFuncName]) {
62
+ throw Error(`can't unmarshal "${fieldName}" ${JSON.stringify(fieldDefinitions[fieldName])}`);
63
+ }
64
+
65
+ const val = buffer[readFuncName](offset);
66
+
67
+ result = {
68
+ ...result,
69
+ [fieldName]: BigInt(val),
70
+ };
71
+ });
72
+
73
+ return result;
74
+ };
75
+
76
+ return {
77
+ marshal,
78
+ unmarshal,
79
+ };
80
+ };
81
+
82
+ const create = ({ fieldDefinitions, size }) => {
83
+ return createInterpretingMarshaller({ fieldDefinitions, size });
84
+ };
85
+
86
+ export default {
87
+ create,
88
+ };
package/lib/refbuf.js CHANGED
@@ -7,5 +7,5 @@ const create = ({ links, size }) => {
7
7
  };
8
8
 
9
9
  export default {
10
- create
10
+ create,
11
11
  };