hzt_asc 1.0.1 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CMakeLists.txt +8 -4
- package/ask.cc +248 -51
- package/ask.h +17 -0
- package/base64.cc +118 -0
- package/base64.h +9 -0
- package/{so → lib}/libarcsoft_face.so +0 -0
- package/linux_so/libarcsoft_face.so +0 -0
- package/{so → linux_so}/libarcsoft_face_engine.so +0 -0
- package/package.json +1 -1
- package/sample/demo.js +70 -26
package/CMakeLists.txt
CHANGED
|
@@ -8,11 +8,14 @@ find_package( OpenCV REQUIRED )
|
|
|
8
8
|
include_directories( ${OpenCV_INCLUDE_DIRS} )
|
|
9
9
|
include_directories(${CMAKE_JS_INC})
|
|
10
10
|
|
|
11
|
+
#find_library(SERIALDEVICE_LIB arcsoft_face)
|
|
12
|
+
|
|
11
13
|
#指定头文件目录
|
|
12
14
|
include_directories(./)
|
|
13
15
|
include_directories(./inc)
|
|
14
16
|
#指定静态和动态文件目录
|
|
15
|
-
link_directories(
|
|
17
|
+
link_directories(linux_so)
|
|
18
|
+
link_directories(lib)
|
|
16
19
|
aux_source_directory(${CMAKE_CURRENT_SOURCE_DIR} SRC)
|
|
17
20
|
|
|
18
21
|
add_library(${PROJECT_NAME} SHARED ${SRC} ${CMAKE_JS_SRC})
|
|
@@ -20,15 +23,16 @@ add_library(${PROJECT_NAME} SHARED ${SRC} ${CMAKE_JS_SRC})
|
|
|
20
23
|
|
|
21
24
|
|
|
22
25
|
|
|
23
|
-
|
|
24
26
|
#链接库文件
|
|
25
|
-
target_link_libraries(${PROJECT_NAME} ${
|
|
27
|
+
target_link_libraries(${PROJECT_NAME} ${CMAKE_JS_LIB})
|
|
26
28
|
|
|
27
29
|
target_link_libraries(${PROJECT_NAME} arcsoft_face)
|
|
30
|
+
#link_directories(./so)
|
|
28
31
|
target_link_libraries(${PROJECT_NAME} arcsoft_face_engine)
|
|
29
32
|
|
|
30
|
-
|
|
33
|
+
|
|
31
34
|
target_link_libraries(${PROJECT_NAME} curl)
|
|
35
|
+
target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} )
|
|
32
36
|
|
|
33
37
|
set_target_properties(${PROJECT_NAME} PROPERTIES PREFIX "" SUFFIX ".node")
|
|
34
38
|
# Include Node-API wrappers
|
package/ask.cc
CHANGED
|
@@ -17,6 +17,8 @@
|
|
|
17
17
|
#include <string.h>
|
|
18
18
|
#include <time.h>
|
|
19
19
|
#include <algorithm>
|
|
20
|
+
|
|
21
|
+
#include "base64.h"
|
|
20
22
|
using namespace std;
|
|
21
23
|
using namespace cv;
|
|
22
24
|
#define SafeFree(p) { if ((p)) free(p); (p) = NULL; }
|
|
@@ -26,8 +28,6 @@ using namespace cv;
|
|
|
26
28
|
#define FACENUM 5
|
|
27
29
|
|
|
28
30
|
|
|
29
|
-
|
|
30
|
-
|
|
31
31
|
Napi::Value errorData(Napi::Env env, string msg) {
|
|
32
32
|
Napi::Object obj = Napi::Object::New(env);
|
|
33
33
|
obj.Set(Napi::String::New(env, "errorMsg"), msg);
|
|
@@ -45,9 +45,11 @@ Napi::Object ASK::Init(Napi::Env env, Napi::Object exports) {
|
|
|
45
45
|
//InstanceMethod("RegisterFaceFeature", &ASK::RegisterFaceFeature),
|
|
46
46
|
//InstanceMethod("SearchFaceFeature", &ASK::SearchFaceFeature),
|
|
47
47
|
InstanceMethod("ImageFaceCompare", &ASK::ImageFaceCompare),
|
|
48
|
+
InstanceMethod("RegisterFaces", &ASK::RegisterFaces),
|
|
48
49
|
InstanceMethod("FaceDetectUrl", &ASK::faceDetectUrl),
|
|
49
50
|
InstanceMethod("FaceFeatureExtractUrl", &ASK::faceFeatureExtractUrl),
|
|
50
|
-
InstanceMethod("ImageFaceCompareUrl", &ASK::ImageFaceCompareUrl)
|
|
51
|
+
InstanceMethod("ImageFaceCompareUrl", &ASK::ImageFaceCompareUrl),
|
|
52
|
+
InstanceMethod("ImageFaceCompareUrl2", &ASK::ImageFaceCompareUrl2)
|
|
51
53
|
});
|
|
52
54
|
exports.Set("ASK", func);
|
|
53
55
|
return exports;
|
|
@@ -90,6 +92,7 @@ ASK::ASK(const Napi::CallbackInfo& info) : Napi::ObjectWrap<ASK>(info) {
|
|
|
90
92
|
|
|
91
93
|
ASK::~ASK()
|
|
92
94
|
{
|
|
95
|
+
delete []mFeatureInfoList;
|
|
93
96
|
ASFUninitEngine(this->handle);
|
|
94
97
|
printf("ASK end\n");
|
|
95
98
|
}
|
|
@@ -197,17 +200,32 @@ Napi::Value ASK::faceFeatureExtract(const Napi::CallbackInfo& info) {
|
|
|
197
200
|
}
|
|
198
201
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
199
202
|
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
200
|
-
ASF_SingleFaceInfo sData;
|
|
203
|
+
ASF_SingleFaceInfo sData = {0};
|
|
201
204
|
getDetectFaceInfo(detectedFaces, sData, i);
|
|
202
205
|
ASF_FaceFeature feature = { 0 };
|
|
203
206
|
res = ASFFaceFeatureExtractEx(this->handle, &offscreen, &sData, &feature);
|
|
207
|
+
//cout<<&(feature.feature)<endl;
|
|
204
208
|
Napi::Object data = Napi::Object::New(env);
|
|
205
209
|
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
206
210
|
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
207
211
|
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
208
212
|
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
209
|
-
|
|
210
|
-
|
|
213
|
+
// reinterpret_cast<MByte*>(buf2.Data());
|
|
214
|
+
// char* ff = encode(reinterpret_cast<const char*>(feature.feature));
|
|
215
|
+
// cout << ff << endl;
|
|
216
|
+
|
|
217
|
+
uint8* cc = base64_encode(reinterpret_cast<const uint8*>(feature.feature), feature.featureSize);
|
|
218
|
+
//cout << "c++base64: " << cc << endl;
|
|
219
|
+
|
|
220
|
+
data.Set(Napi::String::New(env, "feature64"), Napi::String::New(env, reinterpret_cast<const char*>(cc), feature.featureSize/3*4));
|
|
221
|
+
|
|
222
|
+
// Base64Encoder encoder;
|
|
223
|
+
// const string &encodedText = encoder.encode(reinterpret_cast<const char*>(feature.feature));
|
|
224
|
+
// cout << encodedText << endl;
|
|
225
|
+
|
|
226
|
+
// data.Set(Napi::String::New(env, "featureSize"), feature.featureSize);
|
|
227
|
+
// data.Set(Napi::String::New(env, "feature"), Napi::ArrayBuffer::New(env, feature.feature, feature.featureSize));
|
|
228
|
+
// cout << "addr: "<< &(feature.feature) << endl;
|
|
211
229
|
list[i] = data;
|
|
212
230
|
|
|
213
231
|
}
|
|
@@ -247,8 +265,12 @@ Napi::Value ASK::faceFeatureExtractUrl(const Napi::CallbackInfo& info) {
|
|
|
247
265
|
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
248
266
|
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
249
267
|
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
250
|
-
|
|
251
|
-
|
|
268
|
+
uint8* cc = base64_encode(reinterpret_cast<const uint8*>(feature.feature), feature.featureSize);
|
|
269
|
+
//cout << "c++base64: " << cc << endl;
|
|
270
|
+
data.Set(Napi::String::New(env, "feature64"), Napi::String::New(env, reinterpret_cast<const char*>(cc), feature.featureSize/3*4));
|
|
271
|
+
// data.Set(Napi::String::New(env, "featureSize"), feature.featureSize);
|
|
272
|
+
// data.Set(Napi::String::New(env, "feature"), Napi::ArrayBuffer::New(env, feature.feature, feature.featureSize));
|
|
273
|
+
// cout << "addr: "<< &(feature.feature) << endl;
|
|
252
274
|
list[i] = data;
|
|
253
275
|
|
|
254
276
|
}
|
|
@@ -300,6 +322,12 @@ Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
|
300
322
|
return obj;
|
|
301
323
|
}
|
|
302
324
|
|
|
325
|
+
void strToFeatureInfo(string& feature, ASF_FaceFeature& featureInfo) {
|
|
326
|
+
uint8* feature11 = base64_decode(reinterpret_cast<const uint8 *>(feature.c_str()), feature.size());
|
|
327
|
+
featureInfo.featureSize = feature.size() / 4 * 3;
|
|
328
|
+
featureInfo.feature = reinterpret_cast<MByte*>(feature11);
|
|
329
|
+
}
|
|
330
|
+
|
|
303
331
|
|
|
304
332
|
Napi::Value ASK::faceFeatureCompare(const Napi::CallbackInfo& info) {
|
|
305
333
|
Napi::Env env = info.Env();
|
|
@@ -308,47 +336,31 @@ Napi::Value ASK::faceFeatureCompare(const Napi::CallbackInfo& info) {
|
|
|
308
336
|
Napi::Error::New(info.Env(), "Expected exactly one argument").ThrowAsJavaScriptException();
|
|
309
337
|
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
310
338
|
}
|
|
311
|
-
if (!info[0].
|
|
312
|
-
Napi::Error::New(info.Env(), "Expected an
|
|
339
|
+
if (!info[0].IsString()) {
|
|
340
|
+
Napi::Error::New(info.Env(), "Expected an String").ThrowAsJavaScriptException();
|
|
313
341
|
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
314
342
|
}
|
|
315
343
|
|
|
316
|
-
if (!info[1].
|
|
317
|
-
Napi::Error::New(info.Env(), "Expected an
|
|
344
|
+
if (!info[1].IsString()) {
|
|
345
|
+
Napi::Error::New(info.Env(), "Expected an String").ThrowAsJavaScriptException();
|
|
318
346
|
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
319
347
|
}
|
|
320
348
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
349
|
+
string feature1 = info[0].As<Napi::String>().ToString();
|
|
350
|
+
string feature2 = info[1].As<Napi::String>().ToString();
|
|
324
351
|
ASF_FaceFeature featureInfo1 = {};
|
|
325
|
-
featureInfo1.featureSize = buf.ByteLength();
|
|
326
|
-
featureInfo1.feature = reinterpret_cast<MByte*>(buf.Data());
|
|
327
|
-
|
|
328
|
-
Napi::ArrayBuffer buf2 = info[1].As<Napi::ArrayBuffer>();
|
|
329
|
-
//printf("buf2 byte size %d \n", buf2.ByteLength());
|
|
330
352
|
ASF_FaceFeature featureInfo2 = {};
|
|
331
|
-
|
|
332
|
-
|
|
353
|
+
|
|
354
|
+
strToFeatureInfo(feature1, featureInfo1);
|
|
355
|
+
strToFeatureInfo(feature2, featureInfo2);
|
|
356
|
+
|
|
333
357
|
MFloat confidenceLevel;
|
|
334
358
|
MRESULT res = ASFFaceFeatureCompare(this->handle, &featureInfo1, &featureInfo2, &confidenceLevel);
|
|
335
359
|
//cout << "result: " << res << ", score: " << confidenceLevel << endl;
|
|
336
360
|
return Napi::Number::New(env, confidenceLevel);
|
|
337
|
-
}
|
|
338
361
|
|
|
362
|
+
}
|
|
339
363
|
|
|
340
|
-
typedef struct __face_score
|
|
341
|
-
{
|
|
342
|
-
MInt32 searchId;
|
|
343
|
-
MFloat score;
|
|
344
|
-
string name;
|
|
345
|
-
} MFaceSore, *PMFaceSore;
|
|
346
|
-
|
|
347
|
-
typedef struct{
|
|
348
|
-
MInt32 searchId; // 唯一标识符
|
|
349
|
-
LPASF_FaceFeature feature; // 人脸特征值
|
|
350
|
-
MPCChar tag; // 备注
|
|
351
|
-
}ASF_FaceFeatureInfo, *LPASF_FaceFeatureInfo;
|
|
352
364
|
|
|
353
365
|
bool compareScore(MFaceSore& a, MFaceSore& b){
|
|
354
366
|
if (b.score > a.score) return false;
|
|
@@ -387,10 +399,17 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
387
399
|
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
388
400
|
std::string* tag = new std::string();
|
|
389
401
|
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
390
|
-
|
|
402
|
+
|
|
391
403
|
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
392
|
-
|
|
393
|
-
|
|
404
|
+
|
|
405
|
+
std::string* feature = new std::string();
|
|
406
|
+
*feature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
407
|
+
strToFeatureInfo(*feature, *featureInfo);
|
|
408
|
+
|
|
409
|
+
// Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
410
|
+
// featureInfo->featureSize = buf.ByteLength();
|
|
411
|
+
// featureInfo->feature = reinterpret_cast<MByte*>(buf.Data());
|
|
412
|
+
|
|
394
413
|
ASF_FaceFeatureInfo* info = new ASF_FaceFeatureInfo();
|
|
395
414
|
info->searchId = searchId;
|
|
396
415
|
info->tag = (*tag).c_str();
|
|
@@ -409,6 +428,7 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
409
428
|
for(int j = 0; j < len; ++j) {
|
|
410
429
|
MFloat confidenceLevel;
|
|
411
430
|
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, featureInfoList[j].feature, &confidenceLevel);
|
|
431
|
+
cout << confidenceLevel <<endl;
|
|
412
432
|
MFaceSore* pFace = new MFaceSore();
|
|
413
433
|
pFace->searchId = featureInfoList[j].searchId;
|
|
414
434
|
pFace->score = confidenceLevel;
|
|
@@ -440,6 +460,103 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
440
460
|
return obj;
|
|
441
461
|
}
|
|
442
462
|
|
|
463
|
+
Napi::Value ASK::RegisterFaces(const Napi::CallbackInfo& info) {
|
|
464
|
+
Napi::Env env = info.Env();
|
|
465
|
+
int length = info.Length();
|
|
466
|
+
if (length < 1) {
|
|
467
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
468
|
+
return errorData(env, "input expected");
|
|
469
|
+
}
|
|
470
|
+
//输入参数
|
|
471
|
+
if (!info[0].IsArray()) {
|
|
472
|
+
Napi::TypeError::New(env, "array expected").ThrowAsJavaScriptException();
|
|
473
|
+
return errorData(env, "array expected");
|
|
474
|
+
}
|
|
475
|
+
Napi::Array arr = info[0].As<Napi::Array>();
|
|
476
|
+
MUInt32 len = arr.Length();
|
|
477
|
+
ASF_FaceFeatureInfo* featureInfoList = new ASF_FaceFeatureInfo[len];
|
|
478
|
+
for(int i = 0; i < len; ++i) {
|
|
479
|
+
Napi::Object obj = static_cast<Napi::Value>(arr[i]).As<Napi::Object>().ToObject();
|
|
480
|
+
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
481
|
+
std::string* tag = new std::string();
|
|
482
|
+
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
483
|
+
//Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
484
|
+
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
485
|
+
std::string* feature = new std::string();
|
|
486
|
+
*feature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
487
|
+
strToFeatureInfo(*feature, *featureInfo);
|
|
488
|
+
featureInfoList[i].searchId = searchId;
|
|
489
|
+
featureInfoList[i].tag = (*tag).c_str();
|
|
490
|
+
featureInfoList[i].feature = featureInfo;
|
|
491
|
+
}
|
|
492
|
+
mFeatureInfoList = featureInfoList;
|
|
493
|
+
mFeatureLen = len;
|
|
494
|
+
return Napi::Number::New(env, 1);
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
Napi::Value ASK::ImageFaceCompareUrl2(const Napi::CallbackInfo& info) {
|
|
498
|
+
Napi::Env env = info.Env();
|
|
499
|
+
int length = info.Length();
|
|
500
|
+
if (length < 1) {
|
|
501
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
502
|
+
return errorData(env, "input expected");
|
|
503
|
+
}
|
|
504
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
505
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
506
|
+
MUInt8* imageData;
|
|
507
|
+
processFileUrl(filePath.c_str(), imageData, offscreen);
|
|
508
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
509
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
510
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
511
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
512
|
+
if (0 == detectedFaces.faceNum) {
|
|
513
|
+
SafeArrayDelete(imageData);
|
|
514
|
+
return obj;
|
|
515
|
+
}
|
|
516
|
+
//compare
|
|
517
|
+
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
518
|
+
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
519
|
+
ASF_SingleFaceInfo sData;
|
|
520
|
+
getDetectFaceInfo(detectedFaces, sData, i);
|
|
521
|
+
ASF_FaceFeature feature = { 0 };
|
|
522
|
+
res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, &feature);
|
|
523
|
+
MFaceSore* faceScores = new MFaceSore[mFeatureLen];
|
|
524
|
+
for(int j = 0; j < mFeatureLen; ++j) {
|
|
525
|
+
MFloat confidenceLevel;
|
|
526
|
+
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, mFeatureInfoList[j].feature, &confidenceLevel);
|
|
527
|
+
faceScores[j].searchId = mFeatureInfoList[j].searchId;
|
|
528
|
+
faceScores[j].score = confidenceLevel;
|
|
529
|
+
faceScores[j].name = mFeatureInfoList[j].tag;
|
|
530
|
+
}
|
|
531
|
+
sort(faceScores, faceScores + mFeatureLen, compareScore);
|
|
532
|
+
int ll = mFeatureLen > 5? 5 : mFeatureLen;
|
|
533
|
+
Napi::Array mFaceScores = Napi::Array::New(env, ll);
|
|
534
|
+
for(int n = 0; n < ll; ++n) {
|
|
535
|
+
Napi::Object faceScore = Napi::Object::New(env);
|
|
536
|
+
faceScore.Set(Napi::String::New(env, "searchId"), faceScores[n].searchId);
|
|
537
|
+
faceScore.Set(Napi::String::New(env, "score"), faceScores[n].score);
|
|
538
|
+
faceScore.Set(Napi::String::New(env, "name"), faceScores[n].name);
|
|
539
|
+
mFaceScores[n] = faceScore;
|
|
540
|
+
}
|
|
541
|
+
Napi::Object data = Napi::Object::New(env);
|
|
542
|
+
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
543
|
+
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
544
|
+
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
545
|
+
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
546
|
+
data.Set(Napi::String::New(env, "faceScores"), mFaceScores);
|
|
547
|
+
list[i] = data;
|
|
548
|
+
delete []faceScores;
|
|
549
|
+
}
|
|
550
|
+
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
551
|
+
SafeArrayDelete(imageData);
|
|
552
|
+
// for(int i = 0; i < len; ++i) {
|
|
553
|
+
// delete featureInfoList[i].feature;
|
|
554
|
+
// }
|
|
555
|
+
//delete []featureInfoList;
|
|
556
|
+
return obj;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
|
|
443
560
|
Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
444
561
|
Napi::Env env = info.Env();
|
|
445
562
|
int length = info.Length();
|
|
@@ -456,6 +573,7 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
456
573
|
Napi::Object obj = Napi::Object::New(env);
|
|
457
574
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
458
575
|
if (0 == detectedFaces.faceNum) {
|
|
576
|
+
SafeArrayDelete(imageData);
|
|
459
577
|
return obj;
|
|
460
578
|
}
|
|
461
579
|
|
|
@@ -472,15 +590,18 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
472
590
|
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
473
591
|
std::string* tag = new std::string();
|
|
474
592
|
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
475
|
-
Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
593
|
+
//Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
476
594
|
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
info
|
|
483
|
-
featureInfoList[i] =
|
|
595
|
+
std::string* feature = new std::string();
|
|
596
|
+
*feature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
597
|
+
strToFeatureInfo(*feature, *featureInfo);
|
|
598
|
+
// featureInfo->featureSize = buf.ByteLength();
|
|
599
|
+
// featureInfo->feature = reinterpret_cast<MByte*>(buf.Data());;
|
|
600
|
+
//ASF_FaceFeatureInfo* info = new ASF_FaceFeatureInfo();
|
|
601
|
+
featureInfoList[i].searchId = searchId;
|
|
602
|
+
featureInfoList[i].tag = (*tag).c_str();
|
|
603
|
+
featureInfoList[i].feature = featureInfo;
|
|
604
|
+
//featureInfoList[i] = *info;
|
|
484
605
|
}
|
|
485
606
|
|
|
486
607
|
//compare
|
|
@@ -494,11 +615,12 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
494
615
|
for(int j = 0; j < len; ++j) {
|
|
495
616
|
MFloat confidenceLevel;
|
|
496
617
|
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, featureInfoList[j].feature, &confidenceLevel);
|
|
497
|
-
|
|
498
|
-
pFace
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
faceScores[j] =
|
|
618
|
+
//cout << confidenceLevel <<endl;
|
|
619
|
+
//MFaceSore* pFace = new MFaceSore();
|
|
620
|
+
faceScores[j].searchId = featureInfoList[j].searchId;
|
|
621
|
+
faceScores[j].score = confidenceLevel;
|
|
622
|
+
faceScores[j].name = featureInfoList[j].tag;
|
|
623
|
+
// faceScores[j] = *pFace;
|
|
502
624
|
}
|
|
503
625
|
sort(faceScores, faceScores + len, compareScore);
|
|
504
626
|
int ll = len > 5? 5 : len;
|
|
@@ -521,6 +643,9 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
521
643
|
}
|
|
522
644
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
523
645
|
SafeArrayDelete(imageData);
|
|
646
|
+
for(int i = 0; i < len; ++i) {
|
|
647
|
+
delete featureInfoList[i].feature;
|
|
648
|
+
}
|
|
524
649
|
delete []featureInfoList;
|
|
525
650
|
return obj;
|
|
526
651
|
}
|
|
@@ -611,6 +736,7 @@ int ASK::ColorSpaceConversion(MInt32 width, MInt32 height, MInt32 format, MUInt8
|
|
|
611
736
|
//读取文件 图片修正 并颜色通道转换
|
|
612
737
|
void ASK::processFile(const char* filePath, MUInt8* imageData, ASVLOFFSCREEN& offscreen) {
|
|
613
738
|
cv::Mat src = imread(filePath);
|
|
739
|
+
cout<<filePath<<endl;
|
|
614
740
|
int Width = src.cols;
|
|
615
741
|
int Height = src.rows;
|
|
616
742
|
cout << "Width:" << Width << ",Height:"<<Height<<endl;
|
|
@@ -787,3 +913,74 @@ void ASK::processFileUrl(const char* url, MUInt8* imageData, ASVLOFFSCREEN& off
|
|
|
787
913
|
// SafeArrayDelete(imageData);
|
|
788
914
|
// return obj;
|
|
789
915
|
// }
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
// #include <b64/cencode.h>
|
|
921
|
+
// #include <b64/cdecode.h>
|
|
922
|
+
|
|
923
|
+
// #include <stdio.h>
|
|
924
|
+
// #include <stdlib.h>
|
|
925
|
+
// #include <string.h>
|
|
926
|
+
|
|
927
|
+
// #include <assert.h>
|
|
928
|
+
|
|
929
|
+
// /* arbitrary buffer size */
|
|
930
|
+
// #define SIZE 100
|
|
931
|
+
|
|
932
|
+
// char* encode(const char* input)
|
|
933
|
+
// {
|
|
934
|
+
// /* set up a destination buffer large enough to hold the encoded data */
|
|
935
|
+
// char* output = (char*)malloc(1376);
|
|
936
|
+
// /* keep track of our encoded position */
|
|
937
|
+
// char* c = output;
|
|
938
|
+
// /* store the number of bytes encoded by a single call */
|
|
939
|
+
// int cnt = 0;
|
|
940
|
+
// /* we need an encoder state */
|
|
941
|
+
// base64_encodestate s;
|
|
942
|
+
|
|
943
|
+
// /*---------- START ENCODING ----------*/
|
|
944
|
+
// /* initialise the encoder state */
|
|
945
|
+
// base64_init_encodestate(&s);
|
|
946
|
+
// /* gather data from the input and send it to the output */
|
|
947
|
+
// cnt = base64_encode_block(input, strlen(input), c, &s);
|
|
948
|
+
// c += cnt;
|
|
949
|
+
// /* since we have encoded the entire input string, we know that
|
|
950
|
+
// there is no more input data; finalise the encoding */
|
|
951
|
+
// cnt = base64_encode_blockend(c, &s);
|
|
952
|
+
// c += cnt;
|
|
953
|
+
// /*---------- STOP ENCODING ----------*/
|
|
954
|
+
|
|
955
|
+
// /* we want to print the encoded data, so null-terminate it: */
|
|
956
|
+
// *c = 0;
|
|
957
|
+
|
|
958
|
+
// return output;
|
|
959
|
+
// }
|
|
960
|
+
|
|
961
|
+
// char* decode(const char* input)
|
|
962
|
+
// {
|
|
963
|
+
// /* set up a destination buffer large enough to hold the encoded data */
|
|
964
|
+
// char* output = (char*)malloc(1032);
|
|
965
|
+
// /* keep track of our decoded position */
|
|
966
|
+
// char* c = output;
|
|
967
|
+
// /* store the number of bytes decoded by a single call */
|
|
968
|
+
// int cnt = 0;
|
|
969
|
+
// /* we need a decoder state */
|
|
970
|
+
// base64_decodestate s;
|
|
971
|
+
|
|
972
|
+
// /*---------- START DECODING ----------*/
|
|
973
|
+
// /* initialise the decoder state */
|
|
974
|
+
// base64_init_decodestate(&s);
|
|
975
|
+
// /* decode the input data */
|
|
976
|
+
// cnt = base64_decode_block(input, strlen(input), c, &s);
|
|
977
|
+
// c += cnt;
|
|
978
|
+
// /* note: there is no base64_decode_blockend! */
|
|
979
|
+
// /*---------- STOP DECODING ----------*/
|
|
980
|
+
|
|
981
|
+
// /* we want to print the decoded data, so null-terminate it: */
|
|
982
|
+
// *c = 0;
|
|
983
|
+
|
|
984
|
+
// return output;
|
|
985
|
+
// }
|
|
986
|
+
|
package/ask.h
CHANGED
|
@@ -6,6 +6,19 @@
|
|
|
6
6
|
#include "amcomdef.h"
|
|
7
7
|
#include "asvloffscreen.h"
|
|
8
8
|
#include "merror.h"
|
|
9
|
+
#include <string>
|
|
10
|
+
typedef struct __face_score
|
|
11
|
+
{
|
|
12
|
+
MInt32 searchId;
|
|
13
|
+
MFloat score;
|
|
14
|
+
std::string name;
|
|
15
|
+
} MFaceSore, *PMFaceSore;
|
|
16
|
+
|
|
17
|
+
typedef struct{
|
|
18
|
+
MInt32 searchId; // 唯一标识符
|
|
19
|
+
LPASF_FaceFeature feature; // 人脸特征值
|
|
20
|
+
MPCChar tag; // 备注
|
|
21
|
+
}ASF_FaceFeatureInfo, *LPASF_FaceFeatureInfo;
|
|
9
22
|
|
|
10
23
|
class ASK : public Napi::ObjectWrap<ASK> {
|
|
11
24
|
public:
|
|
@@ -28,6 +41,8 @@ class ASK : public Napi::ObjectWrap<ASK> {
|
|
|
28
41
|
Napi::Value faceDetectUrl(const Napi::CallbackInfo& info);
|
|
29
42
|
Napi::Value faceFeatureExtractUrl(const Napi::CallbackInfo& info);
|
|
30
43
|
Napi::Value ImageFaceCompareUrl(const Napi::CallbackInfo& info);
|
|
44
|
+
Napi::Value ImageFaceCompareUrl2(const Napi::CallbackInfo& info);
|
|
45
|
+
Napi::Value RegisterFaces(const Napi::CallbackInfo& info);
|
|
31
46
|
|
|
32
47
|
private:
|
|
33
48
|
void getSdkRelatedInfo();
|
|
@@ -40,6 +55,8 @@ class ASK : public Napi::ObjectWrap<ASK> {
|
|
|
40
55
|
// void mFaceFeatureExtract(ASVLOFFSCREEN& offscree, ASF_MultiFaceInfo* detectedFaces);
|
|
41
56
|
|
|
42
57
|
MHandle handle;
|
|
58
|
+
ASF_FaceFeatureInfo* mFeatureInfoList;
|
|
59
|
+
MUInt32 mFeatureLen;
|
|
43
60
|
};
|
|
44
61
|
|
|
45
62
|
#endif
|
package/base64.cc
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
#include "base64.h"
|
|
2
|
+
#include <vector>
|
|
3
|
+
#include <fstream>
|
|
4
|
+
#include <iostream>
|
|
5
|
+
#include <string>
|
|
6
|
+
#include <stdio.h>
|
|
7
|
+
#include <stdlib.h>
|
|
8
|
+
#include <string.h>
|
|
9
|
+
#include <time.h>
|
|
10
|
+
#include <algorithm>
|
|
11
|
+
#include <assert.h>
|
|
12
|
+
using namespace std;
|
|
13
|
+
|
|
14
|
+
//定义编码字典
|
|
15
|
+
static uint8 alphabet_map[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
|
16
|
+
//定义解码字典
|
|
17
|
+
static uint8 reverse_map[] =
|
|
18
|
+
{
|
|
19
|
+
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
|
|
20
|
+
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
|
|
21
|
+
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 62, 255, 255, 255, 63,
|
|
22
|
+
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 255, 255, 255, 255, 255, 255,
|
|
23
|
+
255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
|
|
24
|
+
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 255, 255, 255, 255, 255,
|
|
25
|
+
255, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
|
|
26
|
+
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 255, 255, 255, 255, 255
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
/*
|
|
30
|
+
* 编码
|
|
31
|
+
* 传入需要编码的数据地址和数据长度
|
|
32
|
+
* 返回:解码后的数据
|
|
33
|
+
*/
|
|
34
|
+
uint8* base64_encode(const uint8 *text, uint32 text_len)
|
|
35
|
+
{
|
|
36
|
+
//计算解码后的数据长度
|
|
37
|
+
//由以上可知 Base64就是将3个字节的数据(24位),拆成4个6位的数据,然后前两位补零
|
|
38
|
+
//将其转化为0-63的数据 然后根据编码字典进行编码
|
|
39
|
+
int encode_length = text_len/3*4;
|
|
40
|
+
if(text_len%3>0)
|
|
41
|
+
{
|
|
42
|
+
encode_length += 4;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
//为编码后数据存放地址申请内存
|
|
46
|
+
uint8 *encode = (uint8*)malloc(encode_length);
|
|
47
|
+
|
|
48
|
+
//编码
|
|
49
|
+
uint32 i, j;
|
|
50
|
+
for (i = 0, j = 0; i+3 <= text_len; i+=3)
|
|
51
|
+
{
|
|
52
|
+
encode[j++] = alphabet_map[text[i]>>2]; //取出第一个字符的前6位并找出对应的结果字符
|
|
53
|
+
encode[j++] = alphabet_map[((text[i]<<4)&0x30)|(text[i+1]>>4)]; //将第一个字符的后2位与第二个字符的前4位进行组合并找到对应的结果字符
|
|
54
|
+
encode[j++] = alphabet_map[((text[i+1]<<2)&0x3c)|(text[i+2]>>6)]; //将第二个字符的后4位与第三个字符的前2位组合并找出对应的结果字符
|
|
55
|
+
encode[j++] = alphabet_map[text[i+2]&0x3f]; //取出第三个字符的后6位并找出结果字符
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
//对于最后不够3个字节的 进行填充
|
|
59
|
+
if (i < text_len)
|
|
60
|
+
{
|
|
61
|
+
uint32 tail = text_len - i;
|
|
62
|
+
if (tail == 1)
|
|
63
|
+
{
|
|
64
|
+
encode[j++] = alphabet_map[text[i]>>2];
|
|
65
|
+
encode[j++] = alphabet_map[(text[i]<<4)&0x30];
|
|
66
|
+
encode[j++] = '=';
|
|
67
|
+
encode[j++] = '=';
|
|
68
|
+
}
|
|
69
|
+
else //tail==2
|
|
70
|
+
{
|
|
71
|
+
encode[j++] = alphabet_map[text[i]>>2];
|
|
72
|
+
encode[j++] = alphabet_map[((text[i]<<4)&0x30)|(text[i+1]>>4)];
|
|
73
|
+
encode[j++] = alphabet_map[(text[i+1]<<2)&0x3c];
|
|
74
|
+
encode[j++] = '=';
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return encode;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
uint8* base64_decode(const uint8 *code, uint32 code_len)
|
|
83
|
+
{
|
|
84
|
+
//由编码处可知,编码后的base64数据一定是4的倍数个字节
|
|
85
|
+
assert((code_len&0x03) == 0); //如果它的条件返回错误,则终止程序执行。4的倍数。
|
|
86
|
+
|
|
87
|
+
//为解码后的数据地址申请内存
|
|
88
|
+
uint8 *plain = (uint8*)malloc(code_len/4*3);
|
|
89
|
+
|
|
90
|
+
//开始解码
|
|
91
|
+
uint32 i, j = 0;
|
|
92
|
+
uint8 quad[4];
|
|
93
|
+
for (i = 0; i < code_len; i+=4)
|
|
94
|
+
{
|
|
95
|
+
for (uint32 k = 0; k < 4; k++)
|
|
96
|
+
{
|
|
97
|
+
quad[k] = reverse_map[code[i+k]];//分组,每组四个分别依次转换为base64表内的十进制数
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
assert(quad[0]<64 && quad[1]<64);
|
|
101
|
+
|
|
102
|
+
plain[j++] = (quad[0]<<2)|(quad[1]>>4); //取出第一个字符对应base64表的十进制数的前6位与第二个字符对应base64表的十进制数的前2位进行组合
|
|
103
|
+
|
|
104
|
+
if (quad[2] >= 64)
|
|
105
|
+
break;
|
|
106
|
+
else if (quad[3] >= 64)
|
|
107
|
+
{
|
|
108
|
+
plain[j++] = (quad[1]<<4)|(quad[2]>>2); //取出第二个字符对应base64表的十进制数的后4位与第三个字符对应base64表的十进制数的前4位进行组合
|
|
109
|
+
break;
|
|
110
|
+
}
|
|
111
|
+
else
|
|
112
|
+
{
|
|
113
|
+
plain[j++] = (quad[1]<<4)|(quad[2]>>2);
|
|
114
|
+
plain[j++] = (quad[2]<<6)|quad[3];//取出第三个字符对应base64表的十进制数的后2位与第4个字符进行组合
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return plain;
|
|
118
|
+
}
|
package/base64.h
ADDED
|
File without changes
|
|
Binary file
|
|
File without changes
|
package/package.json
CHANGED
package/sample/demo.js
CHANGED
|
@@ -15,49 +15,93 @@ const ACTIVEKEY = "82K1-118E-813R-CJQ3";
|
|
|
15
15
|
|
|
16
16
|
let start = Date.now();
|
|
17
17
|
let ask = new addon.ASK(APPID, SDKKEY, ACTIVEKEY);
|
|
18
|
+
let ask2 = new addon.ASK(APPID, SDKKEY, ACTIVEKEY);
|
|
18
19
|
printMem()
|
|
19
20
|
console.log("init end", Date.now() - start);
|
|
20
21
|
|
|
21
|
-
for(let i = 0; i < 1; ++i) {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
for(let i = 0; i < 1; ++i) {
|
|
23
|
+
let num = ask.FaceDetect("/home/haizitong/workspace/hztFace/72b57b47372c490a88663c334e530530.png")
|
|
24
|
+
console.log("FaceDetect", num)
|
|
25
|
+
printMem()
|
|
25
26
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
}
|
|
27
|
+
let det = ask.FaceDetectUrl("http://min.haizitong.com/2/ali/i/a5f91d52783d49989777e8b82b545e2c");
|
|
28
|
+
console.log("det", det);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
let urls = [
|
|
32
|
+
"http://min.haizitong.com/2/ali/i/ba676af499174fbab46525605974ca79",
|
|
33
|
+
"http://min.haizitong.com/2/ali/i/4449177b33604d45ad96497c95a00284",
|
|
34
|
+
"http://min.haizitong.com/2/ali/i/a6daaf38d3924b64a26ba603ab155744",
|
|
35
|
+
"http://min.haizitong.com/2/ali/i/e537426869fd4809a9e8e8dd481b1432"
|
|
36
|
+
];
|
|
29
37
|
|
|
30
38
|
// console.log(num)
|
|
31
39
|
console.log("detect end", Date.now() - start);
|
|
32
40
|
let feature = null;
|
|
41
|
+
let feature2 = null;
|
|
42
|
+
let s1, s2;
|
|
33
43
|
for(let i = 0; i < 1; ++i) {
|
|
34
|
-
let num2 = ask.
|
|
44
|
+
let num2 = ask.FaceFeatureExtractUrl("http://min.haizitong.com/2/ali/i/90780254ca34403cbb542951431e59dc");
|
|
35
45
|
console.log("FaceFeatureExtract", num2);
|
|
36
46
|
if (num2.faceNum && num2.faceRects.length) {
|
|
37
|
-
feature = num2.faceRects[0].
|
|
47
|
+
feature = num2.faceRects[0].feature64;
|
|
48
|
+
// let str = Buffer.from(feature).toString('base64');
|
|
49
|
+
// //console.log("str", str, str.length);
|
|
50
|
+
// s1 = str;
|
|
38
51
|
}
|
|
39
|
-
let num22 = ask.
|
|
40
|
-
console.log("
|
|
52
|
+
let num22 = ask.FaceFeatureExtract(path.join(__dirname, '../images/20200829105722.jpg'));
|
|
53
|
+
console.log("FaceFeatureExtract", num22);
|
|
54
|
+
if (num22.faceNum && num22.faceRects.length) {
|
|
55
|
+
feature2 = num22.faceRects[0].feature64;
|
|
56
|
+
// let str = Buffer.from(feature2).toString('base64');
|
|
57
|
+
// console.log("str", str, str.length);
|
|
58
|
+
// s2 = str;
|
|
59
|
+
}
|
|
60
|
+
//console.log("FaceFeatureExtract2", num22);
|
|
41
61
|
printMem()
|
|
62
|
+
console.log("xx", s1 == s2, feature == feature2)
|
|
42
63
|
}
|
|
43
64
|
// console.log("extract end", Date.now() - start);
|
|
44
65
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
//
|
|
56
|
-
|
|
57
|
-
//
|
|
58
|
-
//
|
|
66
|
+
|
|
67
|
+
function toArrayBuffer(buf) {
|
|
68
|
+
var ab = new ArrayBuffer(buf.length);
|
|
69
|
+
var view = new Uint8Array(ab);
|
|
70
|
+
for (var i = 0; i < buf.length; ++i) {
|
|
71
|
+
view[i] = buf[i];
|
|
72
|
+
}
|
|
73
|
+
return ab;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
//FaceFeatureCompare
|
|
77
|
+
if (feature) {
|
|
78
|
+
// let str = Buffer.from(feature).toString('base64');
|
|
79
|
+
// console.log("str", feature, str, str.length);
|
|
80
|
+
|
|
81
|
+
// let str2 = Buffer.from(feature2).toString('base64');
|
|
82
|
+
// console.log("str2", feature2, str2, str2.length, str2 == str);
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
//console.log("xxx", s1, s2);
|
|
86
|
+
|
|
87
|
+
// let ss1 = Buffer.from(s1, 'base64');
|
|
88
|
+
// let ss2 = Buffer.from(s2, 'base64');
|
|
89
|
+
// console.log("xx", s1 == s2, feature == feature2)
|
|
90
|
+
|
|
91
|
+
//let compareResult = ask.FaceFeatureCompare(toArrayBuffer(ss1), toArrayBuffer(ss2));
|
|
92
|
+
let compareResult = ask.FaceFeatureCompare(feature, feature2);
|
|
93
|
+
console.log("FaceFeatureCompare", compareResult);
|
|
94
|
+
//console.log("FaceFeatureCompare end", Date.now() - start);
|
|
95
|
+
let list = [
|
|
96
|
+
{searchId: 1, tag: "wo", feature: feature2},
|
|
97
|
+
{searchId: 2, tag: "wo2", feature},
|
|
98
|
+
];
|
|
99
|
+
let imageResult = ask.ImageFaceCompare(path.join(__dirname, '../images/20200829105722.jpg'), list, 1);
|
|
100
|
+
console.log("imageResult", JSON.stringify(imageResult, null, 4));
|
|
101
|
+
console.log("ImageFaceCompare end", Date.now() - start);
|
|
59
102
|
|
|
60
|
-
|
|
103
|
+
|
|
104
|
+
}
|
|
61
105
|
|
|
62
106
|
|
|
63
107
|
// let num5 = ask.FaceFeatureInfo(path.join(__dirname, '../images/20200829105722.jpg'))
|