hzt_asc 1.0.4 → 1.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ask.cc +211 -227
- package/ask.h +11 -6
- package/package.json +1 -1
- package/sample/demo.js +1458 -71
package/ask.cc
CHANGED
|
@@ -92,7 +92,7 @@ ASK::ASK(const Napi::CallbackInfo& info) : Napi::ObjectWrap<ASK>(info) {
|
|
|
92
92
|
|
|
93
93
|
ASK::~ASK()
|
|
94
94
|
{
|
|
95
|
-
delete []mFeatureInfoList;
|
|
95
|
+
// delete []mFeatureInfoList;
|
|
96
96
|
ASFUninitEngine(this->handle);
|
|
97
97
|
printf("ASK end\n");
|
|
98
98
|
}
|
|
@@ -117,13 +117,13 @@ Napi::Value ASK::faceDetect(const Napi::CallbackInfo& info) {
|
|
|
117
117
|
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
118
118
|
}
|
|
119
119
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
120
|
+
int Width, Height;
|
|
121
|
+
MUInt8* imageData = processFile(filePath.c_str(), Width, Height);
|
|
120
122
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
121
|
-
|
|
122
|
-
processFile(filePath.c_str(), imageData, offscreen);
|
|
123
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
123
124
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
124
125
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
125
|
-
|
|
126
|
-
|
|
126
|
+
|
|
127
127
|
Napi::Object obj = Napi::Object::New(env);
|
|
128
128
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
129
129
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
@@ -138,11 +138,10 @@ Napi::Value ASK::faceDetect(const Napi::CallbackInfo& info) {
|
|
|
138
138
|
list[i] = data;
|
|
139
139
|
}
|
|
140
140
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
141
|
-
|
|
141
|
+
SafeFree(imageData);
|
|
142
142
|
return obj;
|
|
143
143
|
}
|
|
144
144
|
|
|
145
|
-
|
|
146
145
|
Napi::Value ASK::faceDetectUrl(const Napi::CallbackInfo& info) {
|
|
147
146
|
Napi::Env env = info.Env();
|
|
148
147
|
int length = info.Length();
|
|
@@ -151,13 +150,13 @@ Napi::Value ASK::faceDetectUrl(const Napi::CallbackInfo& info) {
|
|
|
151
150
|
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
152
151
|
}
|
|
153
152
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
153
|
+
int Width, Height;
|
|
154
|
+
MUInt8* imageData = processFileUrl(filePath.c_str(), Width, Height);
|
|
154
155
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
155
|
-
|
|
156
|
-
processFileUrl(filePath.c_str(), imageData, offscreen);
|
|
156
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
157
157
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
158
158
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
159
|
-
|
|
160
|
-
|
|
159
|
+
|
|
161
160
|
Napi::Object obj = Napi::Object::New(env);
|
|
162
161
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
163
162
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
@@ -172,12 +171,10 @@ Napi::Value ASK::faceDetectUrl(const Napi::CallbackInfo& info) {
|
|
|
172
171
|
list[i] = data;
|
|
173
172
|
}
|
|
174
173
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
175
|
-
|
|
174
|
+
SafeFree(imageData);
|
|
176
175
|
return obj;
|
|
177
176
|
}
|
|
178
177
|
|
|
179
|
-
|
|
180
|
-
|
|
181
178
|
Napi::Value ASK::faceFeatureExtract(const Napi::CallbackInfo& info) {
|
|
182
179
|
Napi::Env env = info.Env();
|
|
183
180
|
int length = info.Length();
|
|
@@ -187,15 +184,17 @@ Napi::Value ASK::faceFeatureExtract(const Napi::CallbackInfo& info) {
|
|
|
187
184
|
}
|
|
188
185
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
189
186
|
|
|
187
|
+
int Width, Height;
|
|
188
|
+
MUInt8* imageData = processFile(filePath.c_str(), Width, Height);
|
|
190
189
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
191
|
-
|
|
192
|
-
processFile(filePath.c_str(), imageData, offscreen);
|
|
190
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
193
191
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
194
192
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
195
193
|
|
|
196
194
|
Napi::Object obj = Napi::Object::New(env);
|
|
197
195
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
198
196
|
if (0 == detectedFaces.faceNum) {
|
|
197
|
+
SafeFree(imageData);
|
|
199
198
|
return obj;
|
|
200
199
|
}
|
|
201
200
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
@@ -230,7 +229,7 @@ Napi::Value ASK::faceFeatureExtract(const Napi::CallbackInfo& info) {
|
|
|
230
229
|
|
|
231
230
|
}
|
|
232
231
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
233
|
-
|
|
232
|
+
SafeFree(imageData);
|
|
234
233
|
return obj;
|
|
235
234
|
}
|
|
236
235
|
|
|
@@ -243,15 +242,18 @@ Napi::Value ASK::faceFeatureExtractUrl(const Napi::CallbackInfo& info) {
|
|
|
243
242
|
}
|
|
244
243
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
245
244
|
|
|
245
|
+
int Width, Height;
|
|
246
|
+
MUInt8* imageData = processFileUrl(filePath.c_str(), Width, Height);
|
|
246
247
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
247
|
-
|
|
248
|
-
|
|
248
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
249
|
+
|
|
249
250
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
250
251
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
251
252
|
|
|
252
253
|
Napi::Object obj = Napi::Object::New(env);
|
|
253
254
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
254
255
|
if (0 == detectedFaces.faceNum) {
|
|
256
|
+
SafeFree(imageData);
|
|
255
257
|
return obj;
|
|
256
258
|
}
|
|
257
259
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
@@ -275,11 +277,10 @@ Napi::Value ASK::faceFeatureExtractUrl(const Napi::CallbackInfo& info) {
|
|
|
275
277
|
|
|
276
278
|
}
|
|
277
279
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
278
|
-
|
|
280
|
+
SafeFree(imageData);
|
|
279
281
|
return obj;
|
|
280
282
|
}
|
|
281
283
|
|
|
282
|
-
|
|
283
284
|
Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
284
285
|
Napi::Env env = info.Env();
|
|
285
286
|
int length = info.Length();
|
|
@@ -288,9 +289,10 @@ Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
|
288
289
|
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
289
290
|
}
|
|
290
291
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
292
|
+
int Width, Height;
|
|
293
|
+
MUInt8* imageData = processFile(filePath.c_str(), Width, Height);
|
|
291
294
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
292
|
-
|
|
293
|
-
processFile(filePath.c_str(), imageData, offscreen);
|
|
295
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
294
296
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
295
297
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
296
298
|
|
|
@@ -300,6 +302,7 @@ Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
|
300
302
|
Napi::Object obj = Napi::Object::New(env);
|
|
301
303
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
302
304
|
if (0 == detectedFaces.faceNum) {
|
|
305
|
+
SafeFree(imageData);
|
|
303
306
|
return obj;
|
|
304
307
|
}
|
|
305
308
|
//计算年龄
|
|
@@ -318,7 +321,7 @@ Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
|
318
321
|
genderList[i] = *(gender.genderArray + i);
|
|
319
322
|
}
|
|
320
323
|
obj.Set(Napi::String::New(env, "genders"), genderList);
|
|
321
|
-
|
|
324
|
+
SafeFree(imageData);
|
|
322
325
|
return obj;
|
|
323
326
|
}
|
|
324
327
|
|
|
@@ -328,7 +331,6 @@ void strToFeatureInfo(string& feature, ASF_FaceFeature& featureInfo) {
|
|
|
328
331
|
featureInfo.feature = reinterpret_cast<MByte*>(feature11);
|
|
329
332
|
}
|
|
330
333
|
|
|
331
|
-
|
|
332
334
|
Napi::Value ASK::faceFeatureCompare(const Napi::CallbackInfo& info) {
|
|
333
335
|
Napi::Env env = info.Env();
|
|
334
336
|
//printf("AcceptArrayBuffer from js %d \n", info.Length());
|
|
@@ -361,10 +363,10 @@ Napi::Value ASK::faceFeatureCompare(const Napi::CallbackInfo& info) {
|
|
|
361
363
|
|
|
362
364
|
}
|
|
363
365
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
return
|
|
366
|
+
bool compareScore(const MFaceSore& a, const MFaceSore& b){
|
|
367
|
+
// if (b.score > a.score) return false;
|
|
368
|
+
// return true;
|
|
369
|
+
return a.score > b.score;//
|
|
368
370
|
}
|
|
369
371
|
|
|
370
372
|
Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
@@ -375,14 +377,17 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
375
377
|
return errorData(env, "input expected");
|
|
376
378
|
}
|
|
377
379
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
380
|
+
int Width, Height;
|
|
381
|
+
MUInt8* imageData = processFile(filePath.c_str(), Width, Height);
|
|
378
382
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
379
|
-
|
|
380
|
-
|
|
383
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
384
|
+
|
|
381
385
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
382
386
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
383
387
|
Napi::Object obj = Napi::Object::New(env);
|
|
384
388
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
385
389
|
if (0 == detectedFaces.faceNum) {
|
|
390
|
+
SafeFree(imageData);
|
|
386
391
|
return obj;
|
|
387
392
|
}
|
|
388
393
|
|
|
@@ -412,7 +417,7 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
412
417
|
|
|
413
418
|
ASF_FaceFeatureInfo* info = new ASF_FaceFeatureInfo();
|
|
414
419
|
info->searchId = searchId;
|
|
415
|
-
info->tag = (*tag).c_str();
|
|
420
|
+
info->tag = *tag; //(*tag).c_str();
|
|
416
421
|
info->feature = featureInfo;
|
|
417
422
|
featureInfoList[i] = *info;
|
|
418
423
|
}
|
|
@@ -455,7 +460,7 @@ Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
|
455
460
|
delete []faceScores;
|
|
456
461
|
}
|
|
457
462
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
458
|
-
|
|
463
|
+
SafeFree(imageData);
|
|
459
464
|
delete []featureInfoList;
|
|
460
465
|
return obj;
|
|
461
466
|
}
|
|
@@ -472,69 +477,66 @@ Napi::Value ASK::RegisterFaces(const Napi::CallbackInfo& info) {
|
|
|
472
477
|
Napi::TypeError::New(env, "array expected").ThrowAsJavaScriptException();
|
|
473
478
|
return errorData(env, "array expected");
|
|
474
479
|
}
|
|
475
|
-
Napi::Array arr = info[
|
|
480
|
+
Napi::Array arr = info[0].As<Napi::Array>();
|
|
481
|
+
m_vecFeatureInfoList.clear();
|
|
482
|
+
|
|
476
483
|
MUInt32 len = arr.Length();
|
|
477
|
-
ASF_FaceFeatureInfo* featureInfoList = new ASF_FaceFeatureInfo[len];
|
|
484
|
+
//ASF_FaceFeatureInfo* featureInfoList = new ASF_FaceFeatureInfo[len];
|
|
478
485
|
for(int i = 0; i < len; ++i) {
|
|
479
486
|
Napi::Object obj = static_cast<Napi::Value>(arr[i]).As<Napi::Object>().ToObject();
|
|
480
487
|
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
481
|
-
std::string* tag = new std::string();
|
|
482
|
-
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
483
|
-
//Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
488
|
+
// std::string* tag = new std::string();
|
|
489
|
+
// *tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
490
|
+
// //Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
484
491
|
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
485
|
-
std::string* feature = new std::string();
|
|
486
|
-
*feature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
featureInfoList[i].
|
|
490
|
-
featureInfoList[i].
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
492
|
+
// std::string* feature = new std::string();
|
|
493
|
+
// *feature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
494
|
+
std::string strFeature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
495
|
+
strToFeatureInfo(strFeature, *featureInfo);
|
|
496
|
+
// featureInfoList[i].searchId = searchId;
|
|
497
|
+
// featureInfoList[i].tag = *tag; //(*tag).c_str();
|
|
498
|
+
// featureInfoList[i].feature = featureInfo;
|
|
499
|
+
ASF_FaceFeatureInfo fInfo = {0};
|
|
500
|
+
fInfo.searchId = searchId;
|
|
501
|
+
fInfo.tag = obj.Get(static_cast<napi_value>(Napi::String::New(env,"tag"))).As<Napi::String>().ToString();
|
|
502
|
+
fInfo.feature = featureInfo;
|
|
503
|
+
m_vecFeatureInfoList.push_back(fInfo);
|
|
504
|
+
}
|
|
505
|
+
// mFeatureInfoList = featureInfoList;
|
|
506
|
+
// mFeatureLen = len;
|
|
507
|
+
return Napi::Number::New(env, 1);
|
|
494
508
|
}
|
|
495
509
|
|
|
496
|
-
Napi::
|
|
497
|
-
Napi::Env env = info.Env();
|
|
498
|
-
int length = info.Length();
|
|
499
|
-
if (length < 1) {
|
|
500
|
-
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
501
|
-
return errorData(env, "input expected");
|
|
502
|
-
}
|
|
503
|
-
string filePath = info[0].As<Napi::String>().ToString();
|
|
504
|
-
ASVLOFFSCREEN offscreen = { 0 };
|
|
505
|
-
MUInt8* imageData;
|
|
506
|
-
processFileUrl(filePath.c_str(), imageData, offscreen);
|
|
507
|
-
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
508
|
-
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
509
|
-
Napi::Object obj = Napi::Object::New(env);
|
|
510
|
-
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
511
|
-
if (0 == detectedFaces.faceNum) {
|
|
512
|
-
SafeArrayDelete(imageData);
|
|
513
|
-
return obj;
|
|
514
|
-
}
|
|
515
|
-
//compare
|
|
510
|
+
Napi::Array ASK::faceReg(std::vector<ASF_FaceFeatureInfo>& vecFeatureInfoList, ASF_MultiFaceInfo& detectedFaces, Napi::Env& env, ASVLOFFSCREEN& offscreen) {
|
|
516
511
|
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
517
512
|
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
518
513
|
ASF_SingleFaceInfo sData;
|
|
519
514
|
getDetectFaceInfo(detectedFaces, sData, i);
|
|
520
515
|
ASF_FaceFeature feature = { 0 };
|
|
521
|
-
res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, &feature);
|
|
522
|
-
MFaceSore
|
|
523
|
-
for(int j = 0; j <
|
|
516
|
+
MRESULT res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, &feature);
|
|
517
|
+
vector<MFaceSore> vecfaceScores;
|
|
518
|
+
for(int j = 0; j < vecFeatureInfoList.size(); ++j) {
|
|
519
|
+
ASF_FaceFeatureInfo& fInfo = vecFeatureInfoList[j];
|
|
524
520
|
MFloat confidenceLevel;
|
|
525
|
-
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature,
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
521
|
+
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, fInfo.feature, &confidenceLevel);
|
|
522
|
+
//cout << " confidenceLevel: " << confidenceLevel;
|
|
523
|
+
MFaceSore sInfo;
|
|
524
|
+
sInfo.searchId = fInfo.searchId;
|
|
525
|
+
sInfo.score = confidenceLevel;
|
|
526
|
+
sInfo.name = fInfo.tag;
|
|
527
|
+
vecfaceScores.push_back(sInfo);
|
|
529
528
|
}
|
|
530
|
-
sort(
|
|
531
|
-
int ll =
|
|
529
|
+
sort(vecfaceScores.begin(), vecfaceScores.end(), compareScore);
|
|
530
|
+
int ll = vecfaceScores.size() > 3? 3 : vecfaceScores.size();
|
|
532
531
|
Napi::Array mFaceScores = Napi::Array::New(env, ll);
|
|
532
|
+
//cout << " i=" << i << ", len=" << ll << ",mFeatureLen=" << vecFeatureInfoList.size() << endl;
|
|
533
533
|
for(int n = 0; n < ll; ++n) {
|
|
534
|
+
MFaceSore& sInfo = vecfaceScores[n];
|
|
534
535
|
Napi::Object faceScore = Napi::Object::New(env);
|
|
535
|
-
faceScore.Set(Napi::String::New(env, "searchId"),
|
|
536
|
-
faceScore.Set(Napi::String::New(env, "score"),
|
|
537
|
-
faceScore.Set(Napi::String::New(env, "name"),
|
|
536
|
+
faceScore.Set(Napi::String::New(env, "searchId"), sInfo.searchId);
|
|
537
|
+
faceScore.Set(Napi::String::New(env, "score"), sInfo.score);
|
|
538
|
+
faceScore.Set(Napi::String::New(env, "name"), sInfo.name);
|
|
539
|
+
//cout << "name:" << sInfo.name << ",searchId:" << sInfo.searchId << ",score=" << sInfo.score << endl;
|
|
538
540
|
mFaceScores[n] = faceScore;
|
|
539
541
|
}
|
|
540
542
|
Napi::Object data = Napi::Object::New(env);
|
|
@@ -544,18 +546,41 @@ Napi::Value ASK::ImageFaceCompareUrl2(const Napi::CallbackInfo& info) {
|
|
|
544
546
|
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
545
547
|
data.Set(Napi::String::New(env, "faceScores"), mFaceScores);
|
|
546
548
|
list[i] = data;
|
|
547
|
-
|
|
549
|
+
vecfaceScores.clear();
|
|
550
|
+
}
|
|
551
|
+
//obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
552
|
+
return list;
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
Napi::Value ASK::ImageFaceCompareUrl2(const Napi::CallbackInfo& info) {
|
|
556
|
+
Napi::Env env = info.Env();
|
|
557
|
+
int length = info.Length();
|
|
558
|
+
if (length < 1) {
|
|
559
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
560
|
+
return errorData(env, "input expected");
|
|
548
561
|
}
|
|
562
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
563
|
+
int Width, Height;
|
|
564
|
+
MUInt8* imageData = processFileUrl(filePath.c_str(), Width, Height);
|
|
565
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
566
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
567
|
+
|
|
568
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
569
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
570
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
571
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
572
|
+
if (0 == detectedFaces.faceNum) {
|
|
573
|
+
SafeFree(imageData);
|
|
574
|
+
return obj;
|
|
575
|
+
}
|
|
576
|
+
Napi::Array list = faceReg(this->m_vecFeatureInfoList, detectedFaces, env, offscreen);
|
|
549
577
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
550
|
-
|
|
551
|
-
// for(int i = 0; i < len; ++i) {
|
|
552
|
-
// delete featureInfoList[i].feature;
|
|
553
|
-
// }
|
|
554
|
-
//delete []featureInfoList;
|
|
578
|
+
SafeFree(imageData);
|
|
555
579
|
return obj;
|
|
556
580
|
}
|
|
557
581
|
|
|
558
582
|
|
|
583
|
+
|
|
559
584
|
Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
560
585
|
Napi::Env env = info.Env();
|
|
561
586
|
int length = info.Length();
|
|
@@ -564,15 +589,17 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
564
589
|
return errorData(env, "input expected");
|
|
565
590
|
}
|
|
566
591
|
string filePath = info[0].As<Napi::String>().ToString();
|
|
592
|
+
int Width, Height;
|
|
593
|
+
MUInt8* imageData = processFileUrl(filePath.c_str(), Width, Height);
|
|
567
594
|
ASVLOFFSCREEN offscreen = { 0 };
|
|
568
|
-
|
|
569
|
-
|
|
595
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
596
|
+
|
|
570
597
|
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
571
598
|
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
572
599
|
Napi::Object obj = Napi::Object::New(env);
|
|
573
600
|
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
574
601
|
if (0 == detectedFaces.faceNum) {
|
|
575
|
-
|
|
602
|
+
SafeFree(imageData);
|
|
576
603
|
return obj;
|
|
577
604
|
}
|
|
578
605
|
|
|
@@ -583,69 +610,26 @@ Napi::Value ASK::ImageFaceCompareUrl(const Napi::CallbackInfo& info) {
|
|
|
583
610
|
}
|
|
584
611
|
Napi::Array arr = info[1].As<Napi::Array>();
|
|
585
612
|
MUInt32 len = arr.Length();
|
|
586
|
-
|
|
613
|
+
|
|
614
|
+
std::vector<ASF_FaceFeatureInfo> vecFeatureInfoList;
|
|
587
615
|
for(int i = 0; i < len; ++i) {
|
|
588
616
|
Napi::Object obj = static_cast<Napi::Value>(arr[i]).As<Napi::Object>().ToObject();
|
|
589
617
|
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
590
|
-
std::string
|
|
591
|
-
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
592
|
-
//Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
618
|
+
std::string strFeature = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::String>().ToString();
|
|
593
619
|
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
featureInfoList[i].feature = featureInfo;
|
|
603
|
-
//featureInfoList[i] = *info;
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
//compare
|
|
607
|
-
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
608
|
-
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
609
|
-
ASF_SingleFaceInfo sData;
|
|
610
|
-
getDetectFaceInfo(detectedFaces, sData, i);
|
|
611
|
-
ASF_FaceFeature feature = { 0 };
|
|
612
|
-
res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, &feature);
|
|
613
|
-
MFaceSore* faceScores = new MFaceSore[len];
|
|
614
|
-
for(int j = 0; j < len; ++j) {
|
|
615
|
-
MFloat confidenceLevel;
|
|
616
|
-
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, featureInfoList[j].feature, &confidenceLevel);
|
|
617
|
-
//cout << confidenceLevel <<endl;
|
|
618
|
-
//MFaceSore* pFace = new MFaceSore();
|
|
619
|
-
faceScores[j].searchId = featureInfoList[j].searchId;
|
|
620
|
-
faceScores[j].score = confidenceLevel;
|
|
621
|
-
faceScores[j].name = featureInfoList[j].tag;
|
|
622
|
-
// faceScores[j] = *pFace;
|
|
623
|
-
}
|
|
624
|
-
sort(faceScores, faceScores + len, compareScore);
|
|
625
|
-
int ll = len > 5? 5 : len;
|
|
626
|
-
Napi::Array mFaceScores = Napi::Array::New(env, ll);
|
|
627
|
-
for(int n = 0; n < ll; ++n) {
|
|
628
|
-
Napi::Object faceScore = Napi::Object::New(env);
|
|
629
|
-
faceScore.Set(Napi::String::New(env, "searchId"), faceScores[n].searchId);
|
|
630
|
-
faceScore.Set(Napi::String::New(env, "score"), faceScores[n].score);
|
|
631
|
-
faceScore.Set(Napi::String::New(env, "name"), faceScores[n].name);
|
|
632
|
-
mFaceScores[n] = faceScore;
|
|
633
|
-
}
|
|
634
|
-
Napi::Object data = Napi::Object::New(env);
|
|
635
|
-
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
636
|
-
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
637
|
-
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
638
|
-
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
639
|
-
data.Set(Napi::String::New(env, "faceScores"), mFaceScores);
|
|
640
|
-
list[i] = data;
|
|
641
|
-
delete []faceScores;
|
|
642
|
-
}
|
|
620
|
+
strToFeatureInfo(strFeature, *featureInfo);
|
|
621
|
+
ASF_FaceFeatureInfo fInfo = {0};
|
|
622
|
+
fInfo.searchId = searchId;
|
|
623
|
+
fInfo.tag = obj.Get(static_cast<napi_value>(Napi::String::New(env,"tag"))).As<Napi::String>().ToString();
|
|
624
|
+
fInfo.feature = featureInfo;
|
|
625
|
+
vecFeatureInfoList.push_back(fInfo);
|
|
626
|
+
}
|
|
627
|
+
Napi::Array list = faceReg(vecFeatureInfoList, detectedFaces, env, offscreen);
|
|
643
628
|
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
644
|
-
|
|
629
|
+
SafeFree(imageData);
|
|
645
630
|
for(int i = 0; i < len; ++i) {
|
|
646
|
-
delete
|
|
631
|
+
delete vecFeatureInfoList[i].feature;
|
|
647
632
|
}
|
|
648
|
-
delete []featureInfoList;
|
|
649
633
|
return obj;
|
|
650
634
|
}
|
|
651
635
|
|
|
@@ -732,8 +716,9 @@ int ASK::ColorSpaceConversion(MInt32 width, MInt32 height, MInt32 format, MUInt8
|
|
|
732
716
|
return 1;
|
|
733
717
|
}
|
|
734
718
|
|
|
735
|
-
|
|
736
|
-
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
MUInt8* ASK::processFile(const char* filePath, int& picWidth, int& picHeight) {
|
|
737
722
|
cv::Mat src = imread(filePath);
|
|
738
723
|
cout<<filePath<<endl;
|
|
739
724
|
int Width = src.cols;
|
|
@@ -741,24 +726,29 @@ void ASK::processFile(const char* filePath, MUInt8* imageData, ASVLOFFSCREEN& o
|
|
|
741
726
|
cout << "Width:" << Width << ",Height:"<<Height<<endl;
|
|
742
727
|
int wScore = Width % 4;
|
|
743
728
|
int hScore = Height % 2;
|
|
729
|
+
cv::Mat dest;
|
|
744
730
|
if (wScore != 0 || hScore != 0) {
|
|
745
731
|
Width -= wScore;
|
|
746
732
|
Height -= hScore;
|
|
747
733
|
cv::Mat dst;
|
|
748
734
|
cv::resize(src, dst, cv::Size(Width, Height));
|
|
749
|
-
src.release();
|
|
750
|
-
|
|
735
|
+
src.release();
|
|
736
|
+
cvtColor(dst, dest, COLOR_BGR2YUV_I420);
|
|
737
|
+
dst.release();
|
|
738
|
+
} else {
|
|
739
|
+
cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
751
740
|
}
|
|
752
|
-
cv::Mat dest;
|
|
753
|
-
cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
754
741
|
int len = Height*Width*3/2;
|
|
755
|
-
imageData = (MUInt8*)malloc(len);
|
|
742
|
+
MUInt8* imageData = (MUInt8*)malloc(len);
|
|
756
743
|
memset(imageData, 0, len);
|
|
757
744
|
memcpy(imageData, dest.data, len);
|
|
758
745
|
src.release();
|
|
759
746
|
dest.release();
|
|
760
|
-
|
|
747
|
+
picHeight = Height;
|
|
748
|
+
picWidth = Width;
|
|
749
|
+
return imageData;
|
|
761
750
|
}
|
|
751
|
+
|
|
762
752
|
//curl writefunction to be passed as a parameter
|
|
763
753
|
// we can't ever expect to get the whole image in one piece,
|
|
764
754
|
// every router / hub is entitled to fragment it into parts
|
|
@@ -790,30 +780,35 @@ cv::Mat curlImg(const char *img_url, int timeout=100000)
|
|
|
790
780
|
cout<<res<<"xxxx" << stream.size() << " " << stream.max_size()<<endl;
|
|
791
781
|
return imdecode(stream, -1); // 'keep-as-is'
|
|
792
782
|
}
|
|
793
|
-
|
|
783
|
+
|
|
784
|
+
MUInt8* ASK::processFileUrl(const char* url, int& picWidth, int& picHeight) {
|
|
794
785
|
cv::Mat src = curlImg(url);
|
|
795
786
|
int Width = src.cols;
|
|
796
787
|
int Height = src.rows;
|
|
797
788
|
cout << "Width:" << Width << ",Height:"<<Height<<endl;
|
|
798
789
|
int wScore = Width % 4;
|
|
799
790
|
int hScore = Height % 2;
|
|
791
|
+
cv::Mat dest;
|
|
800
792
|
if (wScore != 0 || hScore != 0) {
|
|
801
793
|
Width -= wScore;
|
|
802
794
|
Height -= hScore;
|
|
803
795
|
cv::Mat dst;
|
|
804
796
|
cv::resize(src, dst, cv::Size(Width, Height));
|
|
805
797
|
src.release();
|
|
806
|
-
|
|
798
|
+
cvtColor(dst, dest, COLOR_BGR2YUV_I420);
|
|
799
|
+
dst.release();
|
|
800
|
+
} else {
|
|
801
|
+
cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
807
802
|
}
|
|
808
|
-
cv::Mat dest;
|
|
809
|
-
cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
810
803
|
int len = Height*Width*3/2;
|
|
811
|
-
imageData = (MUInt8*)malloc(len);
|
|
804
|
+
MUInt8* imageData = (MUInt8*)malloc(len);
|
|
812
805
|
memset(imageData, 0, len);
|
|
813
806
|
memcpy(imageData, dest.data, len);
|
|
814
807
|
src.release();
|
|
815
808
|
dest.release();
|
|
816
|
-
|
|
809
|
+
picHeight = Height;
|
|
810
|
+
picWidth = Width;
|
|
811
|
+
return imageData;
|
|
817
812
|
}
|
|
818
813
|
|
|
819
814
|
|
|
@@ -826,8 +821,65 @@ void ASK::processFileUrl(const char* url, MUInt8* imageData, ASVLOFFSCREEN& off
|
|
|
826
821
|
|
|
827
822
|
|
|
828
823
|
|
|
824
|
+
// //读取文件 图片修正 并颜色通道转换
|
|
825
|
+
// void ASK::processFile(const char* filePath, MUInt8* imageData, ASVLOFFSCREEN& offscreen) {
|
|
826
|
+
// cv::Mat src = imread(filePath);
|
|
827
|
+
// cout<<filePath<<endl;
|
|
828
|
+
// int Width = src.cols;
|
|
829
|
+
// int Height = src.rows;
|
|
830
|
+
// cout << "Width:" << Width << ",Height:"<<Height<<endl;
|
|
831
|
+
// int wScore = Width % 4;
|
|
832
|
+
// int hScore = Height % 2;
|
|
833
|
+
// if (wScore != 0 || hScore != 0) {
|
|
834
|
+
// Width -= wScore;
|
|
835
|
+
// Height -= hScore;
|
|
836
|
+
// cv::Mat dst;
|
|
837
|
+
// cv::resize(src, dst, cv::Size(Width, Height));
|
|
838
|
+
// src.release();
|
|
839
|
+
// src = dst;
|
|
840
|
+
// }
|
|
841
|
+
// cv::Mat dest;
|
|
842
|
+
// cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
843
|
+
// int len = Height*Width*3/2;
|
|
844
|
+
// imageData = (MUInt8*)malloc(len);
|
|
845
|
+
// //imageData = new MUInt8[len];
|
|
846
|
+
// memset(imageData, 0, len);
|
|
847
|
+
// memcpy(imageData, dest.data, len);
|
|
848
|
+
// //free(imageData);
|
|
849
|
+
// src.release();
|
|
850
|
+
// dest.release();
|
|
851
|
+
// ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
852
|
+
// }
|
|
829
853
|
|
|
830
|
-
|
|
854
|
+
// void ASK::processFileUrl(const char* url, MUInt8* imageData, ASVLOFFSCREEN& offscreen) {
|
|
855
|
+
// cv::Mat src = curlImg(url);
|
|
856
|
+
// int Width = src.cols;
|
|
857
|
+
// int Height = src.rows;
|
|
858
|
+
// cout << "Width:" << Width << ",Height:"<<Height<<endl;
|
|
859
|
+
// int wScore = Width % 4;
|
|
860
|
+
// int hScore = Height % 2;
|
|
861
|
+
// cv::Mat dest;
|
|
862
|
+
// if (wScore != 0 || hScore != 0) {
|
|
863
|
+
// Width -= wScore;
|
|
864
|
+
// Height -= hScore;
|
|
865
|
+
// cv::Mat dst;
|
|
866
|
+
// cv::resize(src, dst, cv::Size(Width, Height));
|
|
867
|
+
// src.release();
|
|
868
|
+
// //src = dst;
|
|
869
|
+
// cvtColor(dst, dest, COLOR_BGR2YUV_I420);
|
|
870
|
+
// dst.release();
|
|
871
|
+
// } else {
|
|
872
|
+
// cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
873
|
+
// }
|
|
874
|
+
|
|
875
|
+
// int len = Height*Width*3/2;
|
|
876
|
+
// imageData = (MUInt8*)malloc(len);
|
|
877
|
+
// memset(imageData, 0, len);
|
|
878
|
+
// memcpy(imageData, dest.data, len);
|
|
879
|
+
// src.release();
|
|
880
|
+
// dest.release();
|
|
881
|
+
// ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
882
|
+
// }
|
|
831
883
|
|
|
832
884
|
|
|
833
885
|
|
|
@@ -915,71 +967,3 @@ void ASK::processFileUrl(const char* url, MUInt8* imageData, ASVLOFFSCREEN& off
|
|
|
915
967
|
|
|
916
968
|
|
|
917
969
|
|
|
918
|
-
|
|
919
|
-
// #include <b64/cencode.h>
|
|
920
|
-
// #include <b64/cdecode.h>
|
|
921
|
-
|
|
922
|
-
// #include <stdio.h>
|
|
923
|
-
// #include <stdlib.h>
|
|
924
|
-
// #include <string.h>
|
|
925
|
-
|
|
926
|
-
// #include <assert.h>
|
|
927
|
-
|
|
928
|
-
// /* arbitrary buffer size */
|
|
929
|
-
// #define SIZE 100
|
|
930
|
-
|
|
931
|
-
// char* encode(const char* input)
|
|
932
|
-
// {
|
|
933
|
-
// /* set up a destination buffer large enough to hold the encoded data */
|
|
934
|
-
// char* output = (char*)malloc(1376);
|
|
935
|
-
// /* keep track of our encoded position */
|
|
936
|
-
// char* c = output;
|
|
937
|
-
// /* store the number of bytes encoded by a single call */
|
|
938
|
-
// int cnt = 0;
|
|
939
|
-
// /* we need an encoder state */
|
|
940
|
-
// base64_encodestate s;
|
|
941
|
-
|
|
942
|
-
// /*---------- START ENCODING ----------*/
|
|
943
|
-
// /* initialise the encoder state */
|
|
944
|
-
// base64_init_encodestate(&s);
|
|
945
|
-
// /* gather data from the input and send it to the output */
|
|
946
|
-
// cnt = base64_encode_block(input, strlen(input), c, &s);
|
|
947
|
-
// c += cnt;
|
|
948
|
-
// /* since we have encoded the entire input string, we know that
|
|
949
|
-
// there is no more input data; finalise the encoding */
|
|
950
|
-
// cnt = base64_encode_blockend(c, &s);
|
|
951
|
-
// c += cnt;
|
|
952
|
-
// /*---------- STOP ENCODING ----------*/
|
|
953
|
-
|
|
954
|
-
// /* we want to print the encoded data, so null-terminate it: */
|
|
955
|
-
// *c = 0;
|
|
956
|
-
|
|
957
|
-
// return output;
|
|
958
|
-
// }
|
|
959
|
-
|
|
960
|
-
// char* decode(const char* input)
|
|
961
|
-
// {
|
|
962
|
-
// /* set up a destination buffer large enough to hold the encoded data */
|
|
963
|
-
// char* output = (char*)malloc(1032);
|
|
964
|
-
// /* keep track of our decoded position */
|
|
965
|
-
// char* c = output;
|
|
966
|
-
// /* store the number of bytes decoded by a single call */
|
|
967
|
-
// int cnt = 0;
|
|
968
|
-
// /* we need a decoder state */
|
|
969
|
-
// base64_decodestate s;
|
|
970
|
-
|
|
971
|
-
// /*---------- START DECODING ----------*/
|
|
972
|
-
// /* initialise the decoder state */
|
|
973
|
-
// base64_init_decodestate(&s);
|
|
974
|
-
// /* decode the input data */
|
|
975
|
-
// cnt = base64_decode_block(input, strlen(input), c, &s);
|
|
976
|
-
// c += cnt;
|
|
977
|
-
// /* note: there is no base64_decode_blockend! */
|
|
978
|
-
// /*---------- STOP DECODING ----------*/
|
|
979
|
-
|
|
980
|
-
// /* we want to print the decoded data, so null-terminate it: */
|
|
981
|
-
// *c = 0;
|
|
982
|
-
|
|
983
|
-
// return output;
|
|
984
|
-
// }
|
|
985
|
-
|