koffi 1.2.0-alpha.4 → 1.2.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -0
- package/package.json +1 -1
- package/src/abi_arm32.cc +10 -24
- package/src/abi_arm64.cc +8 -21
- package/src/abi_riscv64.cc +8 -21
- package/src/abi_x64_sysv.cc +7 -18
- package/src/abi_x64_win.cc +8 -20
- package/src/abi_x86.cc +20 -21
- package/src/abi_x86_fwd.S +11 -5
- package/src/abi_x86_fwd.asm +5 -7
- package/src/call.cc +8 -11
- package/src/call.hh +22 -14
- package/src/ffi.cc +104 -11
- package/src/ffi.hh +12 -4
package/README.md
CHANGED
|
@@ -8,6 +8,7 @@
|
|
|
8
8
|
* [Variadic functions](#variadic-functions)
|
|
9
9
|
* [Asynchronous calls](#asynchronous-calls)
|
|
10
10
|
* [Callbacks](#callbacks)
|
|
11
|
+
* [Memory settings](#memory-settings)
|
|
11
12
|
- [Benchmarks](#benchmarks)
|
|
12
13
|
* [atoi results](#atoi-results)
|
|
13
14
|
* [Raylib results](#raylib-results)
|
|
@@ -342,6 +343,27 @@ console.log(ret);
|
|
|
342
343
|
// This example prints "Hello Niels!" first, and then prints 42
|
|
343
344
|
```
|
|
344
345
|
|
|
346
|
+
## Memory settings
|
|
347
|
+
|
|
348
|
+
For synchronous/normal calls, Koffi uses two preallocated memory blocks, one to construct the C stack and the other to allocate strings and big objects/structs. Unless very big strings or objects (at least more than one page of memory) are used, no extra allocation is needed during calls or callbacks.
|
|
349
|
+
|
|
350
|
+
The size (in bytes) of these preallocated blocks can be changed. Use `koffi.config()` to get an object with the settings, and `koffi.config(obj)` to apply new settings.
|
|
351
|
+
|
|
352
|
+
```js
|
|
353
|
+
let config = koffi.config();
|
|
354
|
+
console.log(config);
|
|
355
|
+
|
|
356
|
+
// {
|
|
357
|
+
// sync_stack_size: 1048576,
|
|
358
|
+
// sync_heap_size: 2097152,
|
|
359
|
+
// async_stack_size: 524288,
|
|
360
|
+
// async_heap_size: 1048576,
|
|
361
|
+
// resident_async_pools: 2
|
|
362
|
+
// }
|
|
363
|
+
```
|
|
364
|
+
|
|
365
|
+
The same is true for asynchronous calls. When an asynchronous call is made, Koffi will allocate new blocks unless there is an unused set of blocks still available. Once the asynchronous call is finished, these blocks are freed if there are more than `resident_async_pools` sets of blocks left around.
|
|
366
|
+
|
|
345
367
|
# Benchmarks
|
|
346
368
|
|
|
347
369
|
In order to run it, go to `koffi/benchmark` and run `../../cnoke/cnoke.js` (or `node ..\..\cnoke\cnoke.js` on Windows) before doing anything else.
|
package/package.json
CHANGED
package/src/abi_arm32.cc
CHANGED
|
@@ -231,8 +231,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
231
231
|
if (RG_UNLIKELY(!AllocStack(8 * 8, 8, &vec_ptr)))
|
|
232
232
|
return false;
|
|
233
233
|
if (func->ret.use_memory) {
|
|
234
|
-
|
|
235
|
-
return false;
|
|
234
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
236
235
|
*(uint8_t **)(gpr_ptr++) = return_ptr;
|
|
237
236
|
}
|
|
238
237
|
|
|
@@ -351,8 +350,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
351
350
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
352
351
|
Napi::Object obj = value.As<Napi::Object>();
|
|
353
352
|
|
|
354
|
-
|
|
355
|
-
return false;
|
|
353
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
356
354
|
|
|
357
355
|
if (param.directions & 1) {
|
|
358
356
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -452,11 +450,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
452
450
|
if (value.IsFunction()) {
|
|
453
451
|
Napi::Function func = value.As<Napi::Function>();
|
|
454
452
|
|
|
455
|
-
|
|
456
|
-
if (RG_UNLIKELY(
|
|
453
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
454
|
+
if (RG_UNLIKELY(!ptr))
|
|
457
455
|
return false;
|
|
458
|
-
|
|
459
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
460
456
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
461
457
|
ptr = value.As<Napi::External<void>>().Data();
|
|
462
458
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -570,10 +566,6 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
570
566
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
571
567
|
Napi::Function func = instance->trampolines[idx].func;
|
|
572
568
|
|
|
573
|
-
// Allow reuse of static trampoline
|
|
574
|
-
instance->free_trampolines |= 1u << idx;
|
|
575
|
-
used_trampolines &= ~(1u << idx);
|
|
576
|
-
|
|
577
569
|
uint64_t *vec_ptr = (uint64_t *)own_sp;
|
|
578
570
|
uint32_t *gpr_ptr = (uint32_t *)(vec_ptr + 8);
|
|
579
571
|
uint32_t *args_ptr = (uint32_t *)caller_sp;
|
|
@@ -690,12 +682,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
690
682
|
gpr_ptr = AlignUp(gpr_ptr, align);
|
|
691
683
|
|
|
692
684
|
if (param.type->size > gpr_size) {
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
// But the object is split between the GPRs and the caller stack.
|
|
697
|
-
if (RG_UNLIKELY(!AllocHeap(param.type->size, 16, &ptr)))
|
|
698
|
-
return;
|
|
685
|
+
// XXX: Expensive, can we do better?
|
|
686
|
+
// The problem is that the object is split between the GPRs and the caller stack.
|
|
687
|
+
uint8_t *ptr = AllocHeap(param.type->size, 16);
|
|
699
688
|
|
|
700
689
|
memcpy(ptr, gpr_ptr, gpr_size);
|
|
701
690
|
memcpy(ptr + gpr_size, args_ptr, param.type->size - gpr_size);
|
|
@@ -855,8 +844,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
855
844
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
856
845
|
Napi::Object obj = value.As<Napi::Object>();
|
|
857
846
|
|
|
858
|
-
|
|
859
|
-
return;
|
|
847
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
860
848
|
|
|
861
849
|
if (!PushObject(obj, type->ref, ptr))
|
|
862
850
|
return;
|
|
@@ -920,11 +908,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
920
908
|
if (value.IsFunction()) {
|
|
921
909
|
Napi::Function func = value.As<Napi::Function>();
|
|
922
910
|
|
|
923
|
-
|
|
924
|
-
if (RG_UNLIKELY(
|
|
911
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
912
|
+
if (RG_UNLIKELY(!ptr))
|
|
925
913
|
return;
|
|
926
|
-
|
|
927
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
928
914
|
} else if (CheckValueTag(instance, value, type)) {
|
|
929
915
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
930
916
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_arm64.cc
CHANGED
|
@@ -221,8 +221,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
221
221
|
if (RG_UNLIKELY(!AllocStack(9 * 8, 8, &gpr_ptr)))
|
|
222
222
|
return false;
|
|
223
223
|
if (func->ret.use_memory) {
|
|
224
|
-
|
|
225
|
-
return false;
|
|
224
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
226
225
|
gpr_ptr[8] = (uint64_t)return_ptr;
|
|
227
226
|
}
|
|
228
227
|
|
|
@@ -338,8 +337,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
338
337
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
339
338
|
Napi::Object obj = value.As<Napi::Object>();
|
|
340
339
|
|
|
341
|
-
|
|
342
|
-
return false;
|
|
340
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
343
341
|
|
|
344
342
|
if (param.directions & 1) {
|
|
345
343
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -394,9 +392,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
394
392
|
args_ptr += (param.type->size + 7) / 8;
|
|
395
393
|
}
|
|
396
394
|
} else {
|
|
397
|
-
uint8_t *ptr;
|
|
398
|
-
if (RG_UNLIKELY(!AllocHeap(param.type->size, 16, &ptr)))
|
|
399
|
-
return false;
|
|
395
|
+
uint8_t *ptr = AllocHeap(param.type->size, 16);
|
|
400
396
|
|
|
401
397
|
if (param.gpr_count) {
|
|
402
398
|
RG_ASSERT(param.gpr_count == 1);
|
|
@@ -460,11 +456,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
460
456
|
if (value.IsFunction()) {
|
|
461
457
|
Napi::Function func = value.As<Napi::Function>();
|
|
462
458
|
|
|
463
|
-
|
|
464
|
-
if (RG_UNLIKELY(
|
|
459
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
460
|
+
if (RG_UNLIKELY(!ptr))
|
|
465
461
|
return false;
|
|
466
|
-
|
|
467
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
468
462
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
469
463
|
ptr = value.As<Napi::External<void>>().Data();
|
|
470
464
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -589,10 +583,6 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
589
583
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
590
584
|
Napi::Function func = instance->trampolines[idx].func;
|
|
591
585
|
|
|
592
|
-
// Allow reuse of static trampoline
|
|
593
|
-
instance->free_trampolines |= 1u << idx;
|
|
594
|
-
used_trampolines &= ~(1u << idx);
|
|
595
|
-
|
|
596
586
|
uint64_t *gpr_ptr = (uint64_t *)own_sp;
|
|
597
587
|
uint64_t *vec_ptr = gpr_ptr + 9;
|
|
598
588
|
uint64_t *args_ptr = (uint64_t *)caller_sp;
|
|
@@ -928,8 +918,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
928
918
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
929
919
|
Napi::Object obj = value.As<Napi::Object>();
|
|
930
920
|
|
|
931
|
-
|
|
932
|
-
return;
|
|
921
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
933
922
|
|
|
934
923
|
if (!PushObject(obj, type->ref, ptr))
|
|
935
924
|
return;
|
|
@@ -987,11 +976,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
987
976
|
if (value.IsFunction()) {
|
|
988
977
|
Napi::Function func = value.As<Napi::Function>();
|
|
989
978
|
|
|
990
|
-
|
|
991
|
-
if (RG_UNLIKELY(
|
|
979
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
980
|
+
if (RG_UNLIKELY(!ptr))
|
|
992
981
|
return;
|
|
993
|
-
|
|
994
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
995
982
|
} else if (CheckValueTag(instance, value, type)) {
|
|
996
983
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
997
984
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_riscv64.cc
CHANGED
|
@@ -180,8 +180,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
180
180
|
if (RG_UNLIKELY(!AllocStack(8 * 8, 8, &vec_ptr)))
|
|
181
181
|
return false;
|
|
182
182
|
if (func->ret.use_memory) {
|
|
183
|
-
|
|
184
|
-
return false;
|
|
183
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
185
184
|
*(uint8_t **)(gpr_ptr++) = return_ptr;
|
|
186
185
|
}
|
|
187
186
|
|
|
@@ -266,8 +265,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
266
265
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
267
266
|
Napi::Object obj = value.As<Napi::Object>();
|
|
268
267
|
|
|
269
|
-
|
|
270
|
-
return false;
|
|
268
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
271
269
|
|
|
272
270
|
if (param.directions & 1) {
|
|
273
271
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -324,9 +322,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
324
322
|
args_ptr += (param.type->size + 7) / 8;
|
|
325
323
|
}
|
|
326
324
|
} else {
|
|
327
|
-
uint8_t *ptr;
|
|
328
|
-
if (RG_UNLIKELY(!AllocHeap(param.type->size, 16, &ptr)))
|
|
329
|
-
return false;
|
|
325
|
+
uint8_t *ptr = AllocHeap(param.type->size, 16);
|
|
330
326
|
|
|
331
327
|
if (param.gpr_count) {
|
|
332
328
|
RG_ASSERT(param.gpr_count == 1);
|
|
@@ -383,11 +379,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
383
379
|
if (value.IsFunction()) {
|
|
384
380
|
Napi::Function func = value.As<Napi::Function>();
|
|
385
381
|
|
|
386
|
-
|
|
387
|
-
if (RG_UNLIKELY(
|
|
382
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
383
|
+
if (RG_UNLIKELY(!ptr))
|
|
388
384
|
return false;
|
|
389
|
-
|
|
390
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
391
385
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
392
386
|
ptr = value.As<Napi::External<void>>().Data();
|
|
393
387
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -513,10 +507,6 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
513
507
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
514
508
|
Napi::Function func = instance->trampolines[idx].func;
|
|
515
509
|
|
|
516
|
-
// Allow reuse of static trampoline
|
|
517
|
-
instance->free_trampolines |= 1u << idx;
|
|
518
|
-
used_trampolines &= ~(1u << idx);
|
|
519
|
-
|
|
520
510
|
uint64_t *gpr_ptr = (uint64_t *)own_sp;
|
|
521
511
|
uint64_t *vec_ptr = gpr_ptr + 8;
|
|
522
512
|
uint64_t *args_ptr = (uint64_t *)caller_sp;
|
|
@@ -747,8 +737,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
747
737
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
748
738
|
Napi::Object obj = value.As<Napi::Object>();
|
|
749
739
|
|
|
750
|
-
|
|
751
|
-
return;
|
|
740
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
752
741
|
|
|
753
742
|
if (!PushObject(obj, type->ref, ptr))
|
|
754
743
|
return;
|
|
@@ -805,11 +794,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
805
794
|
if (value.IsFunction()) {
|
|
806
795
|
Napi::Function func = value.As<Napi::Function>();
|
|
807
796
|
|
|
808
|
-
|
|
809
|
-
if (RG_UNLIKELY(
|
|
797
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
798
|
+
if (RG_UNLIKELY(!ptr))
|
|
810
799
|
return;
|
|
811
|
-
|
|
812
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
813
800
|
} else if (CheckValueTag(instance, value, type)) {
|
|
814
801
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
815
802
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_x64_sysv.cc
CHANGED
|
@@ -260,8 +260,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
260
260
|
if (RG_UNLIKELY(!AllocStack(6 * 8, 8, &gpr_ptr)))
|
|
261
261
|
return false;
|
|
262
262
|
if (func->ret.use_memory) {
|
|
263
|
-
|
|
264
|
-
return false;
|
|
263
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
265
264
|
*(uint8_t **)(gpr_ptr++) = return_ptr;
|
|
266
265
|
}
|
|
267
266
|
|
|
@@ -346,8 +345,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
346
345
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
347
346
|
Napi::Object obj = value.As<Napi::Object>();
|
|
348
347
|
|
|
349
|
-
|
|
350
|
-
return false;
|
|
348
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
351
349
|
|
|
352
350
|
if (param.directions & 1) {
|
|
353
351
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -437,11 +435,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
437
435
|
if (value.IsFunction()) {
|
|
438
436
|
Napi::Function func = value.As<Napi::Function>();
|
|
439
437
|
|
|
440
|
-
|
|
441
|
-
if (RG_UNLIKELY(
|
|
438
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
439
|
+
if (RG_UNLIKELY(!ptr))
|
|
442
440
|
return false;
|
|
443
|
-
|
|
444
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
445
441
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
446
442
|
ptr = value.As<Napi::External<void>>().Data();
|
|
447
443
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -562,10 +558,6 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
562
558
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
563
559
|
Napi::Function func = instance->trampolines[idx].func;
|
|
564
560
|
|
|
565
|
-
// Allow reuse of static trampoline
|
|
566
|
-
instance->free_trampolines |= 1u << idx;
|
|
567
|
-
used_trampolines &= ~(1u << idx);
|
|
568
|
-
|
|
569
561
|
uint64_t *gpr_ptr = (uint64_t *)own_sp;
|
|
570
562
|
uint64_t *xmm_ptr = gpr_ptr + 6;
|
|
571
563
|
uint64_t *args_ptr = (uint64_t *)caller_sp;
|
|
@@ -785,8 +777,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
785
777
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
786
778
|
Napi::Object obj = value.As<Napi::Object>();
|
|
787
779
|
|
|
788
|
-
|
|
789
|
-
return;
|
|
780
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
790
781
|
|
|
791
782
|
if (!PushObject(obj, type->ref, ptr))
|
|
792
783
|
return;
|
|
@@ -860,11 +851,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
860
851
|
if (value.IsFunction()) {
|
|
861
852
|
Napi::Function func = value.As<Napi::Function>();
|
|
862
853
|
|
|
863
|
-
|
|
864
|
-
if (RG_UNLIKELY(
|
|
854
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
855
|
+
if (RG_UNLIKELY(!ptr))
|
|
865
856
|
return;
|
|
866
|
-
|
|
867
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
868
857
|
} else if (CheckValueTag(instance, value, type)) {
|
|
869
858
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
870
859
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_x64_win.cc
CHANGED
|
@@ -105,8 +105,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
105
105
|
if (RG_UNLIKELY(!AllocStack(func->args_size, 16, &args_ptr)))
|
|
106
106
|
return false;
|
|
107
107
|
if (!func->ret.regular) {
|
|
108
|
-
|
|
109
|
-
return false;
|
|
108
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
110
109
|
*(uint8_t **)(args_ptr++) = return_ptr;
|
|
111
110
|
}
|
|
112
111
|
|
|
@@ -192,8 +191,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
192
191
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
193
192
|
Napi::Object obj = value.As<Napi::Object>();
|
|
194
193
|
|
|
195
|
-
|
|
196
|
-
return false;
|
|
194
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
197
195
|
|
|
198
196
|
if (param.directions & 1) {
|
|
199
197
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -227,8 +225,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
227
225
|
if (param.regular) {
|
|
228
226
|
ptr = (uint8_t *)(args_ptr++);
|
|
229
227
|
} else {
|
|
230
|
-
|
|
231
|
-
return false;
|
|
228
|
+
ptr = AllocHeap(param.type->size, 16);
|
|
232
229
|
*(uint8_t **)(args_ptr++) = ptr;
|
|
233
230
|
}
|
|
234
231
|
|
|
@@ -263,11 +260,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
263
260
|
if (value.IsFunction()) {
|
|
264
261
|
Napi::Function func = value.As<Napi::Function>();
|
|
265
262
|
|
|
266
|
-
|
|
267
|
-
if (RG_UNLIKELY(
|
|
263
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
264
|
+
if (RG_UNLIKELY(!ptr))
|
|
268
265
|
return false;
|
|
269
|
-
|
|
270
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
271
266
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
272
267
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
273
268
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -373,10 +368,6 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
373
368
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
374
369
|
Napi::Function func = instance->trampolines[idx].func;
|
|
375
370
|
|
|
376
|
-
// Allow reuse of static trampoline
|
|
377
|
-
instance->free_trampolines |= 1u << idx;
|
|
378
|
-
used_trampolines &= ~(1u << idx);
|
|
379
|
-
|
|
380
371
|
uint64_t *gpr_ptr = (uint64_t *)own_sp;
|
|
381
372
|
uint64_t *xmm_ptr = gpr_ptr + 4;
|
|
382
373
|
uint64_t *args_ptr = (uint64_t *)caller_sp;
|
|
@@ -586,8 +577,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
586
577
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
587
578
|
Napi::Object obj = value.As<Napi::Object>();
|
|
588
579
|
|
|
589
|
-
|
|
590
|
-
return;
|
|
580
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
591
581
|
|
|
592
582
|
if (!PushObject(obj, type->ref, ptr))
|
|
593
583
|
return;
|
|
@@ -643,11 +633,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
643
633
|
if (value.IsFunction()) {
|
|
644
634
|
Napi::Function func = value.As<Napi::Function>();
|
|
645
635
|
|
|
646
|
-
|
|
647
|
-
if (RG_UNLIKELY(
|
|
636
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
637
|
+
if (RG_UNLIKELY(!ptr))
|
|
648
638
|
return;
|
|
649
|
-
|
|
650
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
651
639
|
} else if (CheckValueTag(instance, value, type)) {
|
|
652
640
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
653
641
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_x86.cc
CHANGED
|
@@ -28,6 +28,9 @@ struct BackRegisters {
|
|
|
28
28
|
double d;
|
|
29
29
|
float f;
|
|
30
30
|
bool is_double;
|
|
31
|
+
#ifndef _WIN32
|
|
32
|
+
bool ret4;
|
|
33
|
+
#endif
|
|
31
34
|
};
|
|
32
35
|
|
|
33
36
|
extern "C" uint64_t ForwardCallG(const void *func, uint8_t *sp, uint8_t **out_old_sp);
|
|
@@ -152,8 +155,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
152
155
|
args_ptr += 4;
|
|
153
156
|
}
|
|
154
157
|
if (!func->ret.trivial) {
|
|
155
|
-
|
|
156
|
-
return false;
|
|
158
|
+
return_ptr = AllocHeap(func->ret.type->size, 16);
|
|
157
159
|
*((func->ret.fast ? fast_ptr : args_ptr)++) = (uint32_t)return_ptr;
|
|
158
160
|
}
|
|
159
161
|
|
|
@@ -256,8 +258,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
256
258
|
} else if (IsObject(value) && param.type->ref->primitive == PrimitiveKind::Record) {
|
|
257
259
|
Napi::Object obj = value.As<Napi::Object>();
|
|
258
260
|
|
|
259
|
-
|
|
260
|
-
return false;
|
|
261
|
+
ptr = AllocHeap(param.type->ref->size, 16);
|
|
261
262
|
|
|
262
263
|
if (param.directions & 1) {
|
|
263
264
|
if (!PushObject(obj, param.type->ref, ptr))
|
|
@@ -294,7 +295,7 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
294
295
|
if (!PushObject(obj, param.type, ptr))
|
|
295
296
|
return false;
|
|
296
297
|
} else {
|
|
297
|
-
uint8_t *ptr = (uint8_t *)
|
|
298
|
+
uint8_t *ptr = (uint8_t *)args_ptr;
|
|
298
299
|
if (!PushObject(obj, param.type, ptr))
|
|
299
300
|
return false;
|
|
300
301
|
args_ptr = (uint32_t *)AlignUp(ptr + param.type->size, 4);
|
|
@@ -326,11 +327,9 @@ bool CallData::Prepare(const Napi::CallbackInfo &info)
|
|
|
326
327
|
if (value.IsFunction()) {
|
|
327
328
|
Napi::Function func = value.As<Napi::Function>();
|
|
328
329
|
|
|
329
|
-
|
|
330
|
-
if (RG_UNLIKELY(
|
|
330
|
+
ptr = ReserveTrampoline(param.type->proto, func);
|
|
331
|
+
if (RG_UNLIKELY(!ptr))
|
|
331
332
|
return false;
|
|
332
|
-
|
|
333
|
-
ptr = GetTrampoline(idx, param.type->proto);
|
|
334
333
|
} else if (CheckValueTag(instance, value, param.type)) {
|
|
335
334
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
336
335
|
} else if (IsNullOrUndefined(value)) {
|
|
@@ -437,15 +436,15 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
437
436
|
const FunctionInfo *proto = instance->trampolines[idx].proto;
|
|
438
437
|
Napi::Function func = instance->trampolines[idx].func;
|
|
439
438
|
|
|
440
|
-
// Allow reuse of static trampoline
|
|
441
|
-
instance->free_trampolines |= 1u << idx;
|
|
442
|
-
used_trampolines &= ~(1u << idx);
|
|
443
|
-
|
|
444
439
|
uint32_t *args_ptr = (uint32_t *)caller_sp;
|
|
445
440
|
|
|
446
441
|
uint8_t *return_ptr = !proto->ret.trivial ? (uint8_t *)args_ptr[0] : nullptr;
|
|
447
442
|
args_ptr += !proto->ret.trivial;
|
|
448
443
|
|
|
444
|
+
#ifndef _WIN32
|
|
445
|
+
out_reg->ret4 = !!return_ptr;
|
|
446
|
+
#endif
|
|
447
|
+
|
|
449
448
|
LocalArray<napi_value, MaxParameters> arguments;
|
|
450
449
|
|
|
451
450
|
// Convert to JS arguments
|
|
@@ -540,10 +539,12 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
540
539
|
case PrimitiveKind::Record: {
|
|
541
540
|
RG_ASSERT(!param.fast);
|
|
542
541
|
|
|
543
|
-
uint8_t *ptr =
|
|
544
|
-
|
|
542
|
+
uint8_t *ptr = (uint8_t *)args_ptr;
|
|
543
|
+
|
|
545
544
|
Napi::Object obj2 = PopObject(ptr, param.type);
|
|
546
545
|
arguments.Append(obj2);
|
|
546
|
+
|
|
547
|
+
args_ptr = (uint32_t *)AlignUp(ptr + param.type->size, 4);
|
|
547
548
|
} break;
|
|
548
549
|
case PrimitiveKind::Array: { RG_UNREACHABLE(); } break;
|
|
549
550
|
case PrimitiveKind::Float32: {
|
|
@@ -643,8 +644,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
643
644
|
} else if (IsObject(value) && type->ref->primitive == PrimitiveKind::Record) {
|
|
644
645
|
Napi::Object obj = value.As<Napi::Object>();
|
|
645
646
|
|
|
646
|
-
|
|
647
|
-
return;
|
|
647
|
+
ptr = AllocHeap(type->ref->size, 16);
|
|
648
648
|
|
|
649
649
|
if (!PushObject(obj, type->ref, ptr))
|
|
650
650
|
return;
|
|
@@ -681,6 +681,7 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
681
681
|
}
|
|
682
682
|
|
|
683
683
|
out_reg->f = CopyNumber<float>(value);
|
|
684
|
+
out_reg->is_double = false;
|
|
684
685
|
} break;
|
|
685
686
|
case PrimitiveKind::Float64: {
|
|
686
687
|
if (RG_UNLIKELY(!value.IsNumber() && !value.IsBigInt())) {
|
|
@@ -697,11 +698,9 @@ void CallData::Relay(Size idx, uint8_t *own_sp, uint8_t *caller_sp, BackRegister
|
|
|
697
698
|
if (value.IsFunction()) {
|
|
698
699
|
Napi::Function func = value.As<Napi::Function>();
|
|
699
700
|
|
|
700
|
-
|
|
701
|
-
if (RG_UNLIKELY(
|
|
701
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
702
|
+
if (RG_UNLIKELY(!ptr))
|
|
702
703
|
return;
|
|
703
|
-
|
|
704
|
-
ptr = GetTrampoline(idx, type->proto);
|
|
705
704
|
} else if (CheckValueTag(instance, value, type)) {
|
|
706
705
|
ptr = value.As<Napi::External<uint8_t>>().Data();
|
|
707
706
|
} else if (IsNullOrUndefined(value)) {
|
package/src/abi_x86_fwd.S
CHANGED
|
@@ -139,9 +139,16 @@ ForwardCallRD:
|
|
|
139
139
|
call *RelayCallBack@GOT(%ecx)
|
|
140
140
|
movl 16(%esp), %eax
|
|
141
141
|
movl 20(%esp), %edx
|
|
142
|
+
cmpb $0, 37(%esp)
|
|
143
|
+
jne 2f
|
|
144
|
+
1:
|
|
142
145
|
addl $44, %esp
|
|
143
146
|
.cfi_def_cfa esp, 4
|
|
144
147
|
ret
|
|
148
|
+
2:
|
|
149
|
+
addl $44, %esp
|
|
150
|
+
.cfi_def_cfa esp, 4
|
|
151
|
+
ret $4
|
|
145
152
|
.cfi_endproc
|
|
146
153
|
.endm
|
|
147
154
|
|
|
@@ -166,12 +173,11 @@ ForwardCallRD:
|
|
|
166
173
|
jne 2f
|
|
167
174
|
1:
|
|
168
175
|
flds 32(%esp)
|
|
169
|
-
|
|
176
|
+
addl $44, %esp
|
|
177
|
+
.cfi_def_cfa esp, 4
|
|
178
|
+
ret
|
|
170
179
|
2:
|
|
171
180
|
fldl 24(%esp)
|
|
172
|
-
3:
|
|
173
|
-
movl 16(%esp), %eax
|
|
174
|
-
movl 20(%esp), %edx
|
|
175
181
|
addl $44, %esp
|
|
176
182
|
.cfi_def_cfa esp, 4
|
|
177
183
|
ret
|
|
@@ -267,7 +273,7 @@ CallSwitchStack:
|
|
|
267
273
|
andl $-16, %eax
|
|
268
274
|
movl %eax, 4(%ecx)
|
|
269
275
|
movl 20(%esp), %esp
|
|
270
|
-
subl $
|
|
276
|
+
subl $28, %esp
|
|
271
277
|
movl 8(%ebx), %eax
|
|
272
278
|
movl %eax, 0(%esp)
|
|
273
279
|
movl 12(%ebx), %eax
|
package/src/abi_x86_fwd.asm
CHANGED
|
@@ -138,8 +138,8 @@ trampoline macro ID
|
|
|
138
138
|
lea eax, dword ptr [esp+16]
|
|
139
139
|
mov dword ptr [esp+12], eax
|
|
140
140
|
call RelayCallBack
|
|
141
|
-
mov eax, dword ptr[esp+16]
|
|
142
|
-
mov edx, dword ptr[esp+20]
|
|
141
|
+
mov eax, dword ptr [esp+16]
|
|
142
|
+
mov edx, dword ptr [esp+20]
|
|
143
143
|
add esp, 44
|
|
144
144
|
ret
|
|
145
145
|
endm
|
|
@@ -158,16 +158,14 @@ trampoline_x87 macro ID
|
|
|
158
158
|
lea eax, dword ptr [esp+16]
|
|
159
159
|
mov dword ptr [esp+12], eax
|
|
160
160
|
call RelayCallBack
|
|
161
|
-
cmp byte ptr[esp+36], 0
|
|
161
|
+
cmp byte ptr [esp+36], 0
|
|
162
162
|
jne l2
|
|
163
163
|
l1:
|
|
164
164
|
fld dword ptr [esp+32]
|
|
165
|
-
|
|
165
|
+
add esp, 44
|
|
166
|
+
ret
|
|
166
167
|
l2:
|
|
167
168
|
fld qword ptr [esp+24]
|
|
168
|
-
l3:
|
|
169
|
-
mov eax, dword ptr[esp+16]
|
|
170
|
-
mov edx, dword ptr[esp+20]
|
|
171
169
|
add esp, 44
|
|
172
170
|
ret
|
|
173
171
|
endm
|
package/src/call.cc
CHANGED
|
@@ -298,11 +298,9 @@ bool CallData::PushObject(const Napi::Object &obj, const TypeInfo *type, uint8_t
|
|
|
298
298
|
if (value.IsFunction()) {
|
|
299
299
|
Napi::Function func = value.As<Napi::Function>();
|
|
300
300
|
|
|
301
|
-
|
|
302
|
-
if (RG_UNLIKELY(
|
|
301
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
302
|
+
if (RG_UNLIKELY(!ptr))
|
|
303
303
|
return false;
|
|
304
|
-
|
|
305
|
-
ptr = GetTrampoline(idx, member.type->proto);
|
|
306
304
|
} else if (CheckValueTag(instance, value, member.type)) {
|
|
307
305
|
Napi::External external = value.As<Napi::External<void>>();
|
|
308
306
|
ptr = external.Data();
|
|
@@ -483,11 +481,9 @@ bool CallData::PushArray(const Napi::Value &obj, const TypeInfo *type, uint8_t *
|
|
|
483
481
|
if (value.IsFunction()) {
|
|
484
482
|
Napi::Function func = value.As<Napi::Function>();
|
|
485
483
|
|
|
486
|
-
|
|
487
|
-
if (RG_UNLIKELY(
|
|
484
|
+
ptr = ReserveTrampoline(type->proto, func);
|
|
485
|
+
if (RG_UNLIKELY(!ptr))
|
|
488
486
|
return false;
|
|
489
|
-
|
|
490
|
-
ptr = GetTrampoline(idx, type->ref->proto);
|
|
491
487
|
} else if (CheckValueTag(instance, value, type->ref)) {
|
|
492
488
|
Napi::External external = value.As<Napi::External<void>>();
|
|
493
489
|
ptr = external.Data();
|
|
@@ -567,13 +563,13 @@ bool CallData::PushArray(const Napi::Value &obj, const TypeInfo *type, uint8_t *
|
|
|
567
563
|
return true;
|
|
568
564
|
}
|
|
569
565
|
|
|
570
|
-
|
|
566
|
+
void *CallData::ReserveTrampoline(const FunctionInfo *proto, Napi::Function func)
|
|
571
567
|
{
|
|
572
568
|
uint32_t idx = CountTrailingZeros(instance->free_trampolines);
|
|
573
569
|
|
|
574
570
|
if (RG_UNLIKELY(idx >= MaxTrampolines)) {
|
|
575
571
|
ThrowError<Napi::Error>(env, "Too many callbacks are in use (max = %1)", MaxTrampolines);
|
|
576
|
-
return
|
|
572
|
+
return nullptr;
|
|
577
573
|
}
|
|
578
574
|
|
|
579
575
|
instance->free_trampolines &= ~(1u << idx);
|
|
@@ -582,7 +578,8 @@ Size CallData::ReserveTrampoline(const FunctionInfo *proto, Napi::Function func)
|
|
|
582
578
|
instance->trampolines[idx].proto = proto;
|
|
583
579
|
instance->trampolines[idx].func = func;
|
|
584
580
|
|
|
585
|
-
|
|
581
|
+
void *trampoline = GetTrampoline(idx, proto);
|
|
582
|
+
return trampoline;
|
|
586
583
|
}
|
|
587
584
|
|
|
588
585
|
void CallData::PopObject(Napi::Object obj, const uint8_t *origin, const TypeInfo *type, int16_t realign)
|
package/src/call.hh
CHANGED
|
@@ -41,6 +41,7 @@ class alignas(8) CallData {
|
|
|
41
41
|
InstanceMemory *mem;
|
|
42
42
|
Span<uint8_t> old_stack_mem;
|
|
43
43
|
Span<uint8_t> old_heap_mem;
|
|
44
|
+
|
|
44
45
|
uint32_t used_trampolines = 0;
|
|
45
46
|
|
|
46
47
|
LocalArray<OutObject, MaxOutParameters> out_objects;
|
|
@@ -73,10 +74,10 @@ public:
|
|
|
73
74
|
void DumpForward() const;
|
|
74
75
|
|
|
75
76
|
private:
|
|
76
|
-
template <typename T
|
|
77
|
+
template <typename T>
|
|
77
78
|
bool AllocStack(Size size, Size align, T **out_ptr);
|
|
78
|
-
template <typename T =
|
|
79
|
-
|
|
79
|
+
template <typename T = uint8_t>
|
|
80
|
+
T *AllocHeap(Size size, Size align);
|
|
80
81
|
|
|
81
82
|
const char *PushString(const Napi::Value &value);
|
|
82
83
|
const char16_t *PushString16(const Napi::Value &value);
|
|
@@ -87,7 +88,7 @@ private:
|
|
|
87
88
|
Napi::Object PopObject(const uint8_t *origin, const TypeInfo *type, int16_t realign = 0);
|
|
88
89
|
Napi::Value PopArray(const uint8_t *origin, const TypeInfo *type, int16_t realign = 0);
|
|
89
90
|
|
|
90
|
-
|
|
91
|
+
void *ReserveTrampoline(const FunctionInfo *proto, Napi::Function func);
|
|
91
92
|
};
|
|
92
93
|
|
|
93
94
|
template <typename T>
|
|
@@ -113,25 +114,32 @@ inline bool CallData::AllocStack(Size size, Size align, T **out_ptr)
|
|
|
113
114
|
}
|
|
114
115
|
|
|
115
116
|
template <typename T>
|
|
116
|
-
inline
|
|
117
|
+
inline T *CallData::AllocHeap(Size size, Size align)
|
|
117
118
|
{
|
|
118
119
|
uint8_t *ptr = AlignUp(mem->heap.ptr, align);
|
|
119
120
|
Size delta = size + (ptr - mem->heap.ptr);
|
|
120
121
|
|
|
121
|
-
if (
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
122
|
+
if (RG_LIKELY(size < 4096 && delta <= mem->heap.len)) {
|
|
123
|
+
#ifdef RG_DEBUG
|
|
124
|
+
memset(mem->heap.ptr, 0, (size_t)delta);
|
|
125
|
+
#endif
|
|
126
|
+
|
|
127
|
+
mem->heap.ptr += delta;
|
|
128
|
+
mem->heap.len -= delta;
|
|
125
129
|
|
|
130
|
+
return ptr;
|
|
131
|
+
} else {
|
|
126
132
|
#ifdef RG_DEBUG
|
|
127
|
-
|
|
133
|
+
int flags = (int)Allocator::Flag::Zero;
|
|
134
|
+
#else
|
|
135
|
+
int flags = 0;
|
|
128
136
|
#endif
|
|
129
137
|
|
|
130
|
-
|
|
131
|
-
|
|
138
|
+
ptr = (uint8_t *)Allocator::Allocate(&call_alloc, size + align, flags);
|
|
139
|
+
ptr = AlignUp(ptr, align);
|
|
132
140
|
|
|
133
|
-
|
|
134
|
-
|
|
141
|
+
return ptr;
|
|
142
|
+
}
|
|
135
143
|
}
|
|
136
144
|
|
|
137
145
|
void *GetTrampoline(Size idx, const FunctionInfo *proto);
|
package/src/ffi.cc
CHANGED
|
@@ -43,6 +43,96 @@ namespace RG {
|
|
|
43
43
|
// Value does not matter, the tag system uses memory addresses
|
|
44
44
|
const int TypeInfoMarker = 0xDEADBEEF;
|
|
45
45
|
|
|
46
|
+
static bool ChangeMemorySize(Napi::Value value, Size *out_size)
|
|
47
|
+
{
|
|
48
|
+
const Size MinSize = Kibibytes(1);
|
|
49
|
+
const Size MaxSize = Mebibytes(16);
|
|
50
|
+
|
|
51
|
+
Napi::Env env = value.Env();
|
|
52
|
+
|
|
53
|
+
if (!value.IsNumber()) {
|
|
54
|
+
ThrowError<Napi::TypeError>(env, "Unexpected %1 value for memory size, expected number");
|
|
55
|
+
return env.Null();
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
int64_t size = value.As<Napi::Number>().Int64Value();
|
|
59
|
+
|
|
60
|
+
if (size < MinSize || size > MaxSize) {
|
|
61
|
+
ThrowError<Napi::Error>(env, "Memory size must be between %1 and %2", FmtMemSize(MinSize), FmtMemSize(MaxSize));
|
|
62
|
+
return false;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
*out_size = (Size)size;
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
static Napi::Value GetSetConfig(const Napi::CallbackInfo &info)
|
|
70
|
+
{
|
|
71
|
+
Napi::Env env = info.Env();
|
|
72
|
+
InstanceData *instance = env.GetInstanceData<InstanceData>();
|
|
73
|
+
|
|
74
|
+
if (info.Length()) {
|
|
75
|
+
if (instance->memories.len) {
|
|
76
|
+
ThrowError<Napi::Error>(env, "Cannot change Koffi configuration once a library has been loaded");
|
|
77
|
+
return env.Null();
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (!info[0].IsObject()) {
|
|
81
|
+
ThrowError<Napi::TypeError>(env, "Unexpected %1 value for config, expected object", GetValueType(instance, info[0]));
|
|
82
|
+
return env.Null();
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
Napi::Object obj = info[0].As<Napi::Object>();
|
|
86
|
+
Napi::Array keys = obj.GetPropertyNames();
|
|
87
|
+
|
|
88
|
+
for (uint32_t i = 0; i < keys.Length(); i++) {
|
|
89
|
+
std::string key = ((Napi::Value)keys[i]).As<Napi::String>();
|
|
90
|
+
Napi::Value value = obj[key];
|
|
91
|
+
|
|
92
|
+
if (key == "sync_stack_size") {
|
|
93
|
+
if (!ChangeMemorySize(value, &instance->sync_stack_size))
|
|
94
|
+
return env.Null();
|
|
95
|
+
} else if (key == "sync_heap_size") {
|
|
96
|
+
if (!ChangeMemorySize(value, &instance->sync_heap_size))
|
|
97
|
+
return env.Null();
|
|
98
|
+
} else if (key == "async_stack_size") {
|
|
99
|
+
if (!ChangeMemorySize(value, &instance->async_stack_size))
|
|
100
|
+
return env.Null();
|
|
101
|
+
} else if (key == "async_heap_size") {
|
|
102
|
+
if (!ChangeMemorySize(value, &instance->async_heap_size))
|
|
103
|
+
return env.Null();
|
|
104
|
+
} else if (key == "resident_async_pools") {
|
|
105
|
+
if (!value.IsNumber()) {
|
|
106
|
+
ThrowError<Napi::TypeError>(env, "Unexpected %1 value for resident_async_pools, expected number");
|
|
107
|
+
return env.Null();
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
int64_t n = value.As<Napi::Number>().Int64Value();
|
|
111
|
+
|
|
112
|
+
if (n < 0 || n > RG_LEN(instance->memories.data)) {
|
|
113
|
+
ThrowError<Napi::Error>(env, "Parameter resident_async_pools must be between 0 and %1", RG_LEN(instance->memories.data));
|
|
114
|
+
return env.Null();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
RG_STATIC_ASSERT(DefaultResidentAsyncPools <= RG_LEN(instance->memories.data));
|
|
118
|
+
} else {
|
|
119
|
+
ThrowError<Napi::Error>(env, "Unexpected config member '%1'", key.c_str());
|
|
120
|
+
return env.Null();
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
126
|
+
|
|
127
|
+
obj.Set("sync_stack_size", instance->sync_stack_size);
|
|
128
|
+
obj.Set("sync_heap_size", instance->sync_heap_size);
|
|
129
|
+
obj.Set("async_stack_size", instance->async_stack_size);
|
|
130
|
+
obj.Set("async_heap_size", instance->async_heap_size);
|
|
131
|
+
obj.Set("resident_async_pools", instance->resident_async_pools);
|
|
132
|
+
|
|
133
|
+
return obj;
|
|
134
|
+
}
|
|
135
|
+
|
|
46
136
|
static Napi::Value CreateStructType(const Napi::CallbackInfo &info, bool pad)
|
|
47
137
|
{
|
|
48
138
|
Napi::Env env = info.Env();
|
|
@@ -509,7 +599,7 @@ static Napi::Value GetTypeDefinition(const Napi::CallbackInfo &info)
|
|
|
509
599
|
return type->defn.Value();
|
|
510
600
|
}
|
|
511
601
|
|
|
512
|
-
static InstanceMemory *AllocateMemory(InstanceData *instance)
|
|
602
|
+
static InstanceMemory *AllocateMemory(InstanceData *instance, Size stack_size, Size heap_size)
|
|
513
603
|
{
|
|
514
604
|
for (Size i = 1; i < instance->memories.len; i++) {
|
|
515
605
|
InstanceMemory *mem = instance->memories[i];
|
|
@@ -520,7 +610,7 @@ static InstanceMemory *AllocateMemory(InstanceData *instance)
|
|
|
520
610
|
|
|
521
611
|
InstanceMemory *mem = new InstanceMemory();
|
|
522
612
|
|
|
523
|
-
mem->stack.len =
|
|
613
|
+
mem->stack.len = stack_size;
|
|
524
614
|
#if defined(_WIN32)
|
|
525
615
|
mem->stack.ptr = (uint8_t *)VirtualAlloc(nullptr, mem->stack.len, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
|
|
526
616
|
#elif defined(__APPLE__)
|
|
@@ -530,7 +620,7 @@ static InstanceMemory *AllocateMemory(InstanceData *instance)
|
|
|
530
620
|
#endif
|
|
531
621
|
RG_CRITICAL(mem->stack.ptr, "Failed to allocate %1 of memory", mem->stack.len);
|
|
532
622
|
|
|
533
|
-
mem->heap.len =
|
|
623
|
+
mem->heap.len = heap_size;
|
|
534
624
|
#ifdef _WIN32
|
|
535
625
|
mem->heap.ptr = (uint8_t *)VirtualAlloc(nullptr, mem->heap.len, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
|
|
536
626
|
#else
|
|
@@ -538,7 +628,7 @@ static InstanceMemory *AllocateMemory(InstanceData *instance)
|
|
|
538
628
|
#endif
|
|
539
629
|
RG_CRITICAL(mem->heap.ptr, "Failed to allocate %1 of memory", mem->heap.len);
|
|
540
630
|
|
|
541
|
-
if (instance->memories.
|
|
631
|
+
if (instance->memories.len <= instance->resident_async_pools) {
|
|
542
632
|
instance->memories.Append(mem);
|
|
543
633
|
} else {
|
|
544
634
|
mem->temporary = true;
|
|
@@ -710,7 +800,7 @@ static Napi::Value TranslateAsyncCall(const Napi::CallbackInfo &info)
|
|
|
710
800
|
return env.Null();
|
|
711
801
|
}
|
|
712
802
|
|
|
713
|
-
InstanceMemory *mem = AllocateMemory(instance);
|
|
803
|
+
InstanceMemory *mem = AllocateMemory(instance, instance->async_stack_size, instance->async_heap_size);
|
|
714
804
|
AsyncCall *async = new AsyncCall(env, instance, func, mem, callback);
|
|
715
805
|
|
|
716
806
|
if (async->Prepare(info) && instance->debug) {
|
|
@@ -817,6 +907,10 @@ static Napi::Value LoadSharedLibrary(const Napi::CallbackInfo &info)
|
|
|
817
907
|
return env.Null();
|
|
818
908
|
}
|
|
819
909
|
|
|
910
|
+
if (!instance->memories.len) {
|
|
911
|
+
AllocateMemory(instance, instance->sync_stack_size, instance->sync_heap_size);
|
|
912
|
+
}
|
|
913
|
+
|
|
820
914
|
// Load shared library
|
|
821
915
|
void *module = nullptr;
|
|
822
916
|
#ifdef _WIN32
|
|
@@ -1032,12 +1126,6 @@ InstanceMemory::~InstanceMemory()
|
|
|
1032
1126
|
#endif
|
|
1033
1127
|
}
|
|
1034
1128
|
|
|
1035
|
-
InstanceData::InstanceData()
|
|
1036
|
-
{
|
|
1037
|
-
AllocateMemory(this);
|
|
1038
|
-
RG_ASSERT(memories.len == 1);
|
|
1039
|
-
}
|
|
1040
|
-
|
|
1041
1129
|
InstanceData::~InstanceData()
|
|
1042
1130
|
{
|
|
1043
1131
|
for (InstanceMemory *mem: memories) {
|
|
@@ -1048,16 +1136,21 @@ InstanceData::~InstanceData()
|
|
|
1048
1136
|
template <typename Func>
|
|
1049
1137
|
static void SetExports(Napi::Env env, Func func)
|
|
1050
1138
|
{
|
|
1139
|
+
func("config", Napi::Function::New(env, GetSetConfig));
|
|
1140
|
+
|
|
1051
1141
|
func("struct", Napi::Function::New(env, CreatePaddedStructType));
|
|
1052
1142
|
func("pack", Napi::Function::New(env, CreatePackedStructType));
|
|
1053
1143
|
func("handle", Napi::Function::New(env, CreateHandleType));
|
|
1054
1144
|
func("pointer", Napi::Function::New(env, CreatePointerType));
|
|
1055
1145
|
func("array", Napi::Function::New(env, CreateArrayType));
|
|
1056
1146
|
func("callback", Napi::Function::New(env, CreateCallbackType));
|
|
1147
|
+
|
|
1057
1148
|
func("sizeof", Napi::Function::New(env, GetTypeSize));
|
|
1058
1149
|
func("alignof", Napi::Function::New(env, GetTypeAlign));
|
|
1059
1150
|
func("introspect", Napi::Function::New(env, GetTypeDefinition));
|
|
1151
|
+
|
|
1060
1152
|
func("load", Napi::Function::New(env, LoadSharedLibrary));
|
|
1153
|
+
|
|
1061
1154
|
func("in", Napi::Function::New(env, MarkIn));
|
|
1062
1155
|
func("out", Napi::Function::New(env, MarkOut));
|
|
1063
1156
|
func("inout", Napi::Function::New(env, MarkInOut));
|
package/src/ffi.hh
CHANGED
|
@@ -19,8 +19,11 @@
|
|
|
19
19
|
|
|
20
20
|
namespace RG {
|
|
21
21
|
|
|
22
|
-
static const Size
|
|
23
|
-
static const Size
|
|
22
|
+
static const Size DefaultSyncStackSize = Mebibytes(1);
|
|
23
|
+
static const Size DefaultSyncHeapSize = Mebibytes(2);
|
|
24
|
+
static const Size DefaultAsyncStackSize = Kibibytes(512);
|
|
25
|
+
static const Size DefaultAsyncHeapSize = Mebibytes(1);
|
|
26
|
+
static const int DefaultResidentAsyncPools = 2;
|
|
24
27
|
|
|
25
28
|
static const Size MaxParameters = 32;
|
|
26
29
|
static const Size MaxOutParameters = 8;
|
|
@@ -205,7 +208,6 @@ struct TrampolineInfo {
|
|
|
205
208
|
};
|
|
206
209
|
|
|
207
210
|
struct InstanceData {
|
|
208
|
-
InstanceData();
|
|
209
211
|
~InstanceData();
|
|
210
212
|
|
|
211
213
|
BucketArray<TypeInfo> types;
|
|
@@ -215,12 +217,18 @@ struct InstanceData {
|
|
|
215
217
|
bool debug;
|
|
216
218
|
uint64_t tag_lower;
|
|
217
219
|
|
|
218
|
-
LocalArray<InstanceMemory *,
|
|
220
|
+
LocalArray<InstanceMemory *, 16> memories;
|
|
219
221
|
|
|
220
222
|
TrampolineInfo trampolines[MaxTrampolines];
|
|
221
223
|
uint32_t free_trampolines = UINT32_MAX;
|
|
222
224
|
|
|
223
225
|
BlockAllocator str_alloc;
|
|
226
|
+
|
|
227
|
+
Size sync_stack_size = DefaultSyncStackSize;
|
|
228
|
+
Size sync_heap_size = DefaultSyncHeapSize;
|
|
229
|
+
Size async_stack_size = DefaultAsyncStackSize;
|
|
230
|
+
Size async_heap_size = DefaultAsyncHeapSize;
|
|
231
|
+
int resident_async_pools = DefaultResidentAsyncPools;
|
|
224
232
|
};
|
|
225
233
|
RG_STATIC_ASSERT(MaxTrampolines <= 32);
|
|
226
234
|
|