hzt_asc 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ArcFace64.dat +1 -0
- package/CMakeLists.txt +46 -0
- package/ask.cc +543 -0
- package/ask.h +40 -0
- package/askface.cc +8 -0
- package/images/20200829105722.jpg +0 -0
- package/images/demo.png +0 -0
- package/images/demo2.png +0 -0
- package/images/test.png +0 -0
- package/inc/amcomdef.h +98 -0
- package/inc/arcsoft_face_sdk.h +378 -0
- package/inc/asvloffscreen.h +183 -0
- package/inc/merror.h +141 -0
- package/index.js +2 -0
- package/package.json +22 -0
- package/sample/ArcFace64.dat +1 -0
- package/sample/demo.js +74 -0
- package/so/libarcsoft_face.so +0 -0
- package/so/libarcsoft_face_engine.so +0 -0
package/ArcFace64.dat
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
EWEPEPEOGMGTELIZJUGECKIUJDBCJTCNISGPBNHLJTJUBHEWGNAKGEGAIOHJDQAJGNCFDRFZJEDMJTIBIVDTAFIVJOHDHLHQFUEOGPITCBHREAFKAYFKIHIEBODIBGGUGQHEBGCTGBCXJTCLFBGSJJFUBUCJEFEMCCHADVBJHKIYGFCYHSAPGFFOHSDWBRAUJBFXIEBUIZFVJAGCFFJJCVCJBTFZDRJLBTFZGHBIDVJVEMEOANDVDYCDFCJPAVERIMDLFHFEEZBXALBIGHDAGTDYIDDUFBDWGZELAPICDAAVGBJDGIFVHWFUBKCQFBESAOAWERGIAPCPDOIIDDAJGRJLBYHIEWCIEXBEDGEVHIFOETAXGFAYABAMGSDABGHLBAJFBPBOASEOFQJIJTIWGYHVEFBXHNFQEUGKIBHVCSHNBTDUGNGSBHCNBECQDVFGEEEHCDDEELHTJPJJHGCOHSANFPGZEJIQAUINCKAFAAEBCVETFSFHILFEEDAXENBSAXHEDVAVGDFSGEBDHCDSAOICGLENARHDBJGQGZBPJFBSEWCFHDJSARINIQGXAVGCHNCMDXDRHDELBIIQBNJVHBCYDOGXGHCJGQCPFTJJFKHFJJGLECDNDNIYINABGUDNGQEIBLHUIIDEIIHZHCFCBCHTARHWDYELECDYEJIIDUIPFVEGAIIBAGCCAKHECPEMCOCGHZFKAIBHBOJQCFDRAWCUBTCQACGUBYDSBLGOAZCPIYBNILAQIECFFCFHFXDPFUBZDVDDBKEZCMAEFAGZDIEHERELGJEEBTEAAIIHHGEBCXCTBHIIFPDZEFCUBMGYIXIKCHGZFTESHMAHIEHLHWEWAQGXAMEQGIBZBEIEANHFFJDPBMCVIKBMESEUFYELASJNGSBRHIBFFGFUADAQDIJREFBYALBNJREZIFGSBDFYBFCQIOFFCXFAAYGSEBGNBGHXCSAQHAIGBVJQHSAQBOCEEKGFBXDBAGDKHRDGBCBBHOJLAKGCBBDUJKHTBNISFICADWHSHZDBAGGGHDJDDDAMAUEZAQBTBFCCBCHTCGBLHSBQHQFZBHJIIFAOERATAXEIDSDYJTEKCKHHDWFTGBGCFJCWBZFHJKCSGCCOBJFGFUHFGLITDLCXFYCFIXGPHTFBHPDSISEUIECDJDFCFGAECKGBBSDWCTJRISGCDTHUHRAH
|
package/CMakeLists.txt
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
cmake_minimum_required(VERSION 3.3)
|
|
2
|
+
cmake_policy(SET CMP0042 NEW)
|
|
3
|
+
set (CMAKE_CXX_STANDARD 11)
|
|
4
|
+
|
|
5
|
+
project (hzt_asc_v3)
|
|
6
|
+
|
|
7
|
+
find_package( OpenCV REQUIRED )
|
|
8
|
+
include_directories( ${OpenCV_INCLUDE_DIRS} )
|
|
9
|
+
include_directories(${CMAKE_JS_INC})
|
|
10
|
+
|
|
11
|
+
#指定头文件目录
|
|
12
|
+
include_directories(./)
|
|
13
|
+
include_directories(./inc)
|
|
14
|
+
#指定静态和动态文件目录
|
|
15
|
+
link_directories(./so)
|
|
16
|
+
aux_source_directory(${CMAKE_CURRENT_SOURCE_DIR} SRC)
|
|
17
|
+
|
|
18
|
+
add_library(${PROJECT_NAME} SHARED ${SRC} ${CMAKE_JS_SRC})
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
#链接库文件
|
|
25
|
+
target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} )
|
|
26
|
+
|
|
27
|
+
target_link_libraries(${PROJECT_NAME} arcsoft_face)
|
|
28
|
+
target_link_libraries(${PROJECT_NAME} arcsoft_face_engine)
|
|
29
|
+
|
|
30
|
+
target_link_libraries(${PROJECT_NAME} ${CMAKE_JS_LIB})
|
|
31
|
+
|
|
32
|
+
set_target_properties(${PROJECT_NAME} PROPERTIES PREFIX "" SUFFIX ".node")
|
|
33
|
+
# Include Node-API wrappers
|
|
34
|
+
execute_process(COMMAND node -p "require('node-addon-api').include"
|
|
35
|
+
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
|
36
|
+
OUTPUT_VARIABLE NODE_ADDON_API_DIR
|
|
37
|
+
)
|
|
38
|
+
string(REGEX REPLACE "[\r\n\"]" "" NODE_ADDON_API_DIR ${NODE_ADDON_API_DIR})
|
|
39
|
+
|
|
40
|
+
target_include_directories(${PROJECT_NAME} PRIVATE ${NODE_ADDON_API_DIR})
|
|
41
|
+
|
|
42
|
+
# define NPI_VERSION
|
|
43
|
+
add_definitions(-DNAPI_VERSION=3)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
#set_property(TARGET ${PROJECT_NAME} PROPERTY POSITION_INDEPENDENT_CODE ON)
|
package/ask.cc
ADDED
|
@@ -0,0 +1,543 @@
|
|
|
1
|
+
#include "ask.h"
|
|
2
|
+
#include <opencv2/opencv.hpp>
|
|
3
|
+
#include <opencv2/highgui/highgui.hpp>
|
|
4
|
+
#include <opencv2/imgproc/imgproc.hpp>
|
|
5
|
+
#include <vector>
|
|
6
|
+
#include <fstream>
|
|
7
|
+
#include <iostream>
|
|
8
|
+
#include "arcsoft_face_sdk.h"
|
|
9
|
+
#include "amcomdef.h"
|
|
10
|
+
#include "asvloffscreen.h"
|
|
11
|
+
#include "merror.h"
|
|
12
|
+
#include <string>
|
|
13
|
+
#include <stdio.h>
|
|
14
|
+
#include <stdlib.h>
|
|
15
|
+
#include <string.h>
|
|
16
|
+
#include <time.h>
|
|
17
|
+
#include <algorithm>
|
|
18
|
+
using namespace std;
|
|
19
|
+
using namespace cv;
|
|
20
|
+
#define SafeFree(p) { if ((p)) free(p); (p) = NULL; }
|
|
21
|
+
#define SafeArrayDelete(p) { if ((p)) delete [] (p); (p) = NULL; }
|
|
22
|
+
#define SafeDelete(p) { if ((p)) delete (p); (p) = NULL; }
|
|
23
|
+
#define NSCALE 16
|
|
24
|
+
#define FACENUM 5
|
|
25
|
+
Napi::Value errorData(Napi::Env env, string msg) {
|
|
26
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
27
|
+
obj.Set(Napi::String::New(env, "errorMsg"), msg);
|
|
28
|
+
return obj;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
Napi::Object ASK::Init(Napi::Env env, Napi::Object exports) {
|
|
32
|
+
Napi::Function func =
|
|
33
|
+
DefineClass(env,
|
|
34
|
+
"ASK",
|
|
35
|
+
{InstanceMethod("FaceDetect", &ASK::faceDetect),
|
|
36
|
+
InstanceMethod("FaceFeatureExtract", &ASK::faceFeatureExtract),
|
|
37
|
+
InstanceMethod("FaceFeatureCompare", &ASK::faceFeatureCompare),
|
|
38
|
+
InstanceMethod("FaceFeatureInfo", &ASK::faceFeatureInfo),
|
|
39
|
+
//InstanceMethod("RegisterFaceFeature", &ASK::RegisterFaceFeature),
|
|
40
|
+
//InstanceMethod("SearchFaceFeature", &ASK::SearchFaceFeature),
|
|
41
|
+
InstanceMethod("ImageFaceCompare", &ASK::ImageFaceCompare)});
|
|
42
|
+
exports.Set("ASK", func);
|
|
43
|
+
return exports;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
ASK::ASK(const Napi::CallbackInfo& info) : Napi::ObjectWrap<ASK>(info) {
|
|
47
|
+
Napi::Env env = info.Env();
|
|
48
|
+
int length = info.Length();
|
|
49
|
+
if (length != 3) {
|
|
50
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
//APPID/SDKKEY/ACTIVEKEY
|
|
54
|
+
string APPID = info[0].As<Napi::String>().ToString();
|
|
55
|
+
string SDKKEY = info[1].As<Napi::String>().ToString();
|
|
56
|
+
string ACTIVEKEY = info[2].As<Napi::String>().ToString();
|
|
57
|
+
//this->getSdkRelatedInfo();
|
|
58
|
+
printf("\n************* Face Recognition *****************\n");
|
|
59
|
+
MRESULT res = MOK;
|
|
60
|
+
MPChar appId = (MPChar)APPID.data();
|
|
61
|
+
MPChar sdkKey = (MPChar)SDKKEY.data();
|
|
62
|
+
MPChar activeKey = (MPChar)ACTIVEKEY.data();
|
|
63
|
+
res = ASFOnlineActivation(appId, sdkKey);
|
|
64
|
+
if (MOK != res && MERR_ASF_ALREADY_ACTIVATED != res) {
|
|
65
|
+
printf("ASFOnlineActivation fail: %d\n", res);
|
|
66
|
+
} else {
|
|
67
|
+
printf("ASFOnlineActivation sucess: %d\n", res);
|
|
68
|
+
}
|
|
69
|
+
//初始化引擎
|
|
70
|
+
MHandle handle = NULL;
|
|
71
|
+
MInt32 mask = ASF_FACE_DETECT | ASF_FACERECOGNITION | ASF_FACE3DANGLE ; // ASF_AGE | ASF_GENDER | | ASF_LIVENESS
|
|
72
|
+
res = ASFInitEngine(ASF_DETECT_MODE_IMAGE, ASF_OP_0_ONLY, NSCALE, FACENUM, mask, &handle);
|
|
73
|
+
if (res != MOK) {
|
|
74
|
+
printf("ASFInitEngine fail: %d\n", res);
|
|
75
|
+
}else {
|
|
76
|
+
printf("ASFInitEngine sucess: %d\n", res);
|
|
77
|
+
}
|
|
78
|
+
this->handle = handle;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
ASK::~ASK()
|
|
82
|
+
{
|
|
83
|
+
ASFUninitEngine(this->handle);
|
|
84
|
+
printf("ASK end\n");
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
void getDetectFaceInfo(ASF_MultiFaceInfo& detectedFaces, ASF_SingleFaceInfo& info, int i) {
|
|
89
|
+
//ASF_SingleFaceInfo info = {0};
|
|
90
|
+
info.faceRect.left = detectedFaces.faceRect[i].left;
|
|
91
|
+
info.faceRect.top = detectedFaces.faceRect[i].top;
|
|
92
|
+
info.faceRect.right = detectedFaces.faceRect[i].right;
|
|
93
|
+
info.faceRect.bottom = detectedFaces.faceRect[i].bottom;
|
|
94
|
+
info.faceOrient = detectedFaces.faceOrient[i];
|
|
95
|
+
//info.faceDataInfo = detectedFaces.faceDataInfoList[i];
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
Napi::Value ASK::faceDetect(const Napi::CallbackInfo& info) {
|
|
100
|
+
Napi::Env env = info.Env();
|
|
101
|
+
int length = info.Length();
|
|
102
|
+
if (length != 1 || !info[0].IsString() ) {
|
|
103
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
104
|
+
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
105
|
+
}
|
|
106
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
107
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
108
|
+
MUInt8* imageData;
|
|
109
|
+
processFile(filePath.c_str(), imageData, offscreen);
|
|
110
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
111
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
112
|
+
//cout << "faceNum:" << detectedFaces.faceNum << endl;
|
|
113
|
+
|
|
114
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
115
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
116
|
+
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
117
|
+
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
118
|
+
ASF_SingleFaceInfo sData;
|
|
119
|
+
getDetectFaceInfo(detectedFaces, sData, i);
|
|
120
|
+
Napi::Object data = Napi::Object::New(env);
|
|
121
|
+
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
122
|
+
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
123
|
+
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
124
|
+
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
125
|
+
list[i] = data;
|
|
126
|
+
}
|
|
127
|
+
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
128
|
+
SafeArrayDelete(imageData);
|
|
129
|
+
return obj;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
Napi::Value ASK::faceFeatureExtract(const Napi::CallbackInfo& info) {
|
|
135
|
+
Napi::Env env = info.Env();
|
|
136
|
+
int length = info.Length();
|
|
137
|
+
if (length != 1 || !info[0].IsString() ) {
|
|
138
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
139
|
+
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
140
|
+
}
|
|
141
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
142
|
+
|
|
143
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
144
|
+
MUInt8* imageData;
|
|
145
|
+
processFile(filePath.c_str(), imageData, offscreen);
|
|
146
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
147
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
148
|
+
|
|
149
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
150
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
151
|
+
if (0 == detectedFaces.faceNum) {
|
|
152
|
+
return obj;
|
|
153
|
+
}
|
|
154
|
+
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
155
|
+
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
156
|
+
ASF_SingleFaceInfo sData;
|
|
157
|
+
getDetectFaceInfo(detectedFaces, sData, i);
|
|
158
|
+
ASF_FaceFeature feature = { 0 };
|
|
159
|
+
res = ASFFaceFeatureExtractEx(this->handle, &offscreen, &sData, &feature);
|
|
160
|
+
Napi::Object data = Napi::Object::New(env);
|
|
161
|
+
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
162
|
+
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
163
|
+
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
164
|
+
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
165
|
+
data.Set(Napi::String::New(env, "featureSize"), feature.featureSize);
|
|
166
|
+
data.Set(Napi::String::New(env, "feature"), Napi::ArrayBuffer::New(env, feature.feature, feature.featureSize));
|
|
167
|
+
list[i] = data;
|
|
168
|
+
|
|
169
|
+
}
|
|
170
|
+
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
171
|
+
SafeArrayDelete(imageData);
|
|
172
|
+
return obj;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
Napi::Value ASK::faceFeatureInfo(const Napi::CallbackInfo& info) {
|
|
178
|
+
Napi::Env env = info.Env();
|
|
179
|
+
int length = info.Length();
|
|
180
|
+
if (length != 1 || !info[0].IsString() ) {
|
|
181
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
182
|
+
return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
183
|
+
}
|
|
184
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
185
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
186
|
+
MUInt8* imageData;
|
|
187
|
+
processFile(filePath.c_str(), imageData, offscreen);
|
|
188
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
189
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
190
|
+
|
|
191
|
+
MInt32 processMask = ASF_AGE | ASF_GENDER | ASF_LIVENESS;
|
|
192
|
+
ASFProcessEx(this->handle, &offscreen, &detectedFaces, processMask);
|
|
193
|
+
|
|
194
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
195
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
196
|
+
if (0 == detectedFaces.faceNum) {
|
|
197
|
+
return obj;
|
|
198
|
+
}
|
|
199
|
+
//计算年龄
|
|
200
|
+
ASF_AgeInfo age = {0};
|
|
201
|
+
ASFGetAge(this->handle, &age);
|
|
202
|
+
Napi::Array ageList = Napi::Array::New(env, age.num);
|
|
203
|
+
for(int i = 0; i < age.num; ++i) {
|
|
204
|
+
ageList[i] = *(age.ageArray + i);
|
|
205
|
+
}
|
|
206
|
+
obj.Set(Napi::String::New(env, "ages"), ageList);
|
|
207
|
+
//计算性别
|
|
208
|
+
ASF_GenderInfo gender = {0};
|
|
209
|
+
ASFGetGender(this->handle, &gender);
|
|
210
|
+
Napi::Array genderList = Napi::Array::New(env, gender.num);
|
|
211
|
+
for(int i = 0; i < gender.num; ++i) {
|
|
212
|
+
genderList[i] = *(gender.genderArray + i);
|
|
213
|
+
}
|
|
214
|
+
obj.Set(Napi::String::New(env, "genders"), genderList);
|
|
215
|
+
SafeArrayDelete(imageData);
|
|
216
|
+
return obj;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
Napi::Value ASK::faceFeatureCompare(const Napi::CallbackInfo& info) {
|
|
221
|
+
Napi::Env env = info.Env();
|
|
222
|
+
//printf("AcceptArrayBuffer from js %d \n", info.Length());
|
|
223
|
+
if (info.Length() != 2) {
|
|
224
|
+
Napi::Error::New(info.Env(), "Expected exactly one argument").ThrowAsJavaScriptException();
|
|
225
|
+
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
226
|
+
}
|
|
227
|
+
if (!info[0].IsArrayBuffer()) {
|
|
228
|
+
Napi::Error::New(info.Env(), "Expected an ArrayBuffer").ThrowAsJavaScriptException();
|
|
229
|
+
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
if (!info[1].IsArrayBuffer()) {
|
|
233
|
+
Napi::Error::New(info.Env(), "Expected an ArrayBuffer").ThrowAsJavaScriptException();
|
|
234
|
+
return errorData(env, "input expected"); //info.Env().Undefined();
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
Napi::ArrayBuffer buf = info[0].As<Napi::ArrayBuffer>();
|
|
238
|
+
//printf("buf byte size %d \n", buf.ByteLength());
|
|
239
|
+
|
|
240
|
+
ASF_FaceFeature featureInfo1 = {};
|
|
241
|
+
featureInfo1.featureSize = buf.ByteLength();
|
|
242
|
+
featureInfo1.feature = reinterpret_cast<MByte*>(buf.Data());
|
|
243
|
+
|
|
244
|
+
Napi::ArrayBuffer buf2 = info[1].As<Napi::ArrayBuffer>();
|
|
245
|
+
//printf("buf2 byte size %d \n", buf2.ByteLength());
|
|
246
|
+
ASF_FaceFeature featureInfo2 = {};
|
|
247
|
+
featureInfo2.featureSize = buf2.ByteLength();
|
|
248
|
+
featureInfo2.feature = reinterpret_cast<MByte*>(buf2.Data());
|
|
249
|
+
MFloat confidenceLevel;
|
|
250
|
+
MRESULT res = ASFFaceFeatureCompare(this->handle, &featureInfo1, &featureInfo2, &confidenceLevel);
|
|
251
|
+
//cout << "result: " << res << ", score: " << confidenceLevel << endl;
|
|
252
|
+
return Napi::Number::New(env, confidenceLevel);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
typedef struct __face_score
|
|
257
|
+
{
|
|
258
|
+
MInt32 searchId;
|
|
259
|
+
MFloat score;
|
|
260
|
+
string name;
|
|
261
|
+
} MFaceSore, *PMFaceSore;
|
|
262
|
+
|
|
263
|
+
typedef struct{
|
|
264
|
+
MInt32 searchId; // 唯一标识符
|
|
265
|
+
LPASF_FaceFeature feature; // 人脸特征值
|
|
266
|
+
MPCChar tag; // 备注
|
|
267
|
+
}ASF_FaceFeatureInfo, *LPASF_FaceFeatureInfo;
|
|
268
|
+
|
|
269
|
+
bool compareScore(MFaceSore& a, MFaceSore& b){
|
|
270
|
+
if (b.score > a.score) return false;
|
|
271
|
+
return true;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
Napi::Value ASK::ImageFaceCompare(const Napi::CallbackInfo& info) {
|
|
275
|
+
Napi::Env env = info.Env();
|
|
276
|
+
int length = info.Length();
|
|
277
|
+
if (length < 3) {
|
|
278
|
+
Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
279
|
+
return errorData(env, "input expected");
|
|
280
|
+
}
|
|
281
|
+
string filePath = info[0].As<Napi::String>().ToString();
|
|
282
|
+
ASVLOFFSCREEN offscreen = { 0 };
|
|
283
|
+
MUInt8* imageData;
|
|
284
|
+
processFile(filePath.c_str(), imageData, offscreen);
|
|
285
|
+
ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
286
|
+
MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
287
|
+
Napi::Object obj = Napi::Object::New(env);
|
|
288
|
+
obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
289
|
+
if (0 == detectedFaces.faceNum) {
|
|
290
|
+
return obj;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
//输入参数
|
|
294
|
+
if (!info[1].IsArray()) {
|
|
295
|
+
Napi::TypeError::New(env, "array expected").ThrowAsJavaScriptException();
|
|
296
|
+
return errorData(env, "array expected");
|
|
297
|
+
}
|
|
298
|
+
Napi::Array arr = info[1].As<Napi::Array>();
|
|
299
|
+
MUInt32 len = arr.Length();
|
|
300
|
+
ASF_FaceFeatureInfo* featureInfoList = new ASF_FaceFeatureInfo[len];
|
|
301
|
+
for(int i = 0; i < len; ++i) {
|
|
302
|
+
Napi::Object obj = static_cast<Napi::Value>(arr[i]).As<Napi::Object>().ToObject();
|
|
303
|
+
MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
304
|
+
std::string* tag = new std::string();
|
|
305
|
+
*tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
306
|
+
Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
307
|
+
ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
308
|
+
featureInfo->featureSize = buf.ByteLength();
|
|
309
|
+
featureInfo->feature = reinterpret_cast<MByte*>(buf.Data());;
|
|
310
|
+
ASF_FaceFeatureInfo* info = new ASF_FaceFeatureInfo();
|
|
311
|
+
info->searchId = searchId;
|
|
312
|
+
info->tag = (*tag).c_str();
|
|
313
|
+
info->feature = featureInfo;
|
|
314
|
+
featureInfoList[i] = *info;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
//compare
|
|
318
|
+
Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
319
|
+
for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
320
|
+
ASF_SingleFaceInfo sData;
|
|
321
|
+
getDetectFaceInfo(detectedFaces, sData, i);
|
|
322
|
+
ASF_FaceFeature feature = { 0 };
|
|
323
|
+
res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, &feature);
|
|
324
|
+
MFaceSore* faceScores = new MFaceSore[len];
|
|
325
|
+
for(int j = 0; j < len; ++j) {
|
|
326
|
+
MFloat confidenceLevel;
|
|
327
|
+
MRESULT res1 = ASFFaceFeatureCompare(this->handle, &feature, featureInfoList[j].feature, &confidenceLevel);
|
|
328
|
+
MFaceSore* pFace = new MFaceSore();
|
|
329
|
+
pFace->searchId = featureInfoList[j].searchId;
|
|
330
|
+
pFace->score = confidenceLevel;
|
|
331
|
+
pFace->name = featureInfoList[j].tag;
|
|
332
|
+
faceScores[j] = *pFace;
|
|
333
|
+
}
|
|
334
|
+
sort(faceScores, faceScores + len, compareScore);
|
|
335
|
+
int ll = len > 5? 5 : len;
|
|
336
|
+
Napi::Array mFaceScores = Napi::Array::New(env, ll);
|
|
337
|
+
for(int n = 0; n < ll; ++n) {
|
|
338
|
+
Napi::Object faceScore = Napi::Object::New(env);
|
|
339
|
+
faceScore.Set(Napi::String::New(env, "searchId"), faceScores[n].searchId);
|
|
340
|
+
faceScore.Set(Napi::String::New(env, "score"), faceScores[n].score);
|
|
341
|
+
faceScore.Set(Napi::String::New(env, "name"), faceScores[n].name);
|
|
342
|
+
mFaceScores[n] = faceScore;
|
|
343
|
+
}
|
|
344
|
+
Napi::Object data = Napi::Object::New(env);
|
|
345
|
+
data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
346
|
+
data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
347
|
+
data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
348
|
+
data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
349
|
+
data.Set(Napi::String::New(env, "faceScores"), mFaceScores);
|
|
350
|
+
list[i] = data;
|
|
351
|
+
delete []faceScores;
|
|
352
|
+
}
|
|
353
|
+
obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
354
|
+
SafeArrayDelete(imageData);
|
|
355
|
+
delete []featureInfoList;
|
|
356
|
+
return obj;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
////获取激活信息(采集离线激活信息、激活文件信息、SDK版本信息)
|
|
362
|
+
void ASK::getSdkRelatedInfo() {
|
|
363
|
+
printf("\n************* ArcFace SDK Info *****************\n");
|
|
364
|
+
MRESULT res = MOK;
|
|
365
|
+
//采集当前设备信息,用于离线激活
|
|
366
|
+
// char* deviceInfo = NULL;
|
|
367
|
+
// res = ASFGetActiveDeviceInfo(&deviceInfo);
|
|
368
|
+
// if (res != MOK) {
|
|
369
|
+
// printf("ASFGetActiveDeviceInfo fail: %d\n", res);
|
|
370
|
+
// } else {
|
|
371
|
+
// printf("ASFGetActiveDeviceInfo sucess: %s\n", deviceInfo);
|
|
372
|
+
// }
|
|
373
|
+
//获取激活文件信息
|
|
374
|
+
ASF_ActiveFileInfo activeFileInfo = { 0 };
|
|
375
|
+
res = ASFGetActiveFileInfo(&activeFileInfo);
|
|
376
|
+
if (res != MOK){
|
|
377
|
+
printf("ASFGetActiveFileInfo fail: %d\n", res);
|
|
378
|
+
}else{
|
|
379
|
+
//这里仅获取了有效期时间,还需要其他信息直接打印即可
|
|
380
|
+
char startDateTime[32];
|
|
381
|
+
timestampToTime(activeFileInfo.startTime, startDateTime, 32);
|
|
382
|
+
printf("startTime: %s\n", startDateTime);
|
|
383
|
+
char endDateTime[32];
|
|
384
|
+
timestampToTime(activeFileInfo.endTime, endDateTime, 32);
|
|
385
|
+
printf("endTime: %s\n", endDateTime);
|
|
386
|
+
}
|
|
387
|
+
//SDK版本信息
|
|
388
|
+
const ASF_VERSION version = ASFGetVersion();
|
|
389
|
+
printf("\nVersion:%s\n", version.Version);
|
|
390
|
+
printf("BuildDate:%s\n", version.BuildDate);
|
|
391
|
+
printf("CopyRight:%s\n", version.CopyRight);
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
//时间戳转换为日期格式
|
|
395
|
+
void ASK::timestampToTime(char* timeStamp, char* dateTime, int dateTimeSize){
|
|
396
|
+
time_t tTimeStamp = atoll(timeStamp);
|
|
397
|
+
struct tm* pTm = gmtime(&tTimeStamp);
|
|
398
|
+
strftime(dateTime, dateTimeSize, "%Y-%m-%d %H:%M:%S", pTm);
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
//图像颜色格式转换
|
|
402
|
+
int ASK::ColorSpaceConversion(MInt32 width, MInt32 height, MInt32 format, MUInt8* imgData, ASVLOFFSCREEN& offscreen){
|
|
403
|
+
offscreen.u32PixelArrayFormat = (unsigned int)format;
|
|
404
|
+
offscreen.i32Width = width;
|
|
405
|
+
offscreen.i32Height = height;
|
|
406
|
+
|
|
407
|
+
switch (offscreen.u32PixelArrayFormat)
|
|
408
|
+
{
|
|
409
|
+
case ASVL_PAF_RGB24_B8G8R8:
|
|
410
|
+
offscreen.pi32Pitch[0] = offscreen.i32Width * 3;
|
|
411
|
+
offscreen.ppu8Plane[0] = imgData;
|
|
412
|
+
break;
|
|
413
|
+
case ASVL_PAF_I420:
|
|
414
|
+
offscreen.pi32Pitch[0] = width;
|
|
415
|
+
offscreen.pi32Pitch[1] = width >> 1;
|
|
416
|
+
offscreen.pi32Pitch[2] = width >> 1;
|
|
417
|
+
offscreen.ppu8Plane[0] = imgData;
|
|
418
|
+
offscreen.ppu8Plane[1] = offscreen.ppu8Plane[0] + offscreen.i32Height*offscreen.i32Width;
|
|
419
|
+
offscreen.ppu8Plane[2] = offscreen.ppu8Plane[0] + offscreen.i32Height*offscreen.i32Width * 5 / 4;
|
|
420
|
+
break;
|
|
421
|
+
case ASVL_PAF_NV12:
|
|
422
|
+
case ASVL_PAF_NV21:
|
|
423
|
+
offscreen.pi32Pitch[0] = offscreen.i32Width;
|
|
424
|
+
offscreen.pi32Pitch[1] = offscreen.pi32Pitch[0];
|
|
425
|
+
offscreen.ppu8Plane[0] = imgData;
|
|
426
|
+
offscreen.ppu8Plane[1] = offscreen.ppu8Plane[0] + offscreen.pi32Pitch[0] * offscreen.i32Height;
|
|
427
|
+
break;
|
|
428
|
+
case ASVL_PAF_YUYV:
|
|
429
|
+
case ASVL_PAF_DEPTH_U16:
|
|
430
|
+
offscreen.pi32Pitch[0] = offscreen.i32Width * 2;
|
|
431
|
+
offscreen.ppu8Plane[0] = imgData;
|
|
432
|
+
break;
|
|
433
|
+
case ASVL_PAF_GRAY:
|
|
434
|
+
offscreen.pi32Pitch[0] = offscreen.i32Width;
|
|
435
|
+
offscreen.ppu8Plane[0] = imgData;
|
|
436
|
+
break;
|
|
437
|
+
default:
|
|
438
|
+
return 0;
|
|
439
|
+
}
|
|
440
|
+
return 1;
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
//读取文件 图片修正 并颜色通道转换
|
|
444
|
+
void ASK::processFile(const char* filePath, MUInt8* imageData, ASVLOFFSCREEN& offscreen) {
|
|
445
|
+
cv::Mat src = imread(filePath);
|
|
446
|
+
int Width = src.cols;
|
|
447
|
+
int Height = src.rows;
|
|
448
|
+
int wScore = Width % 4;
|
|
449
|
+
int hScore = Height % 2;
|
|
450
|
+
if (wScore != 0 || hScore != 0) {
|
|
451
|
+
Width -= wScore;
|
|
452
|
+
Height -= hScore;
|
|
453
|
+
cv::Mat dst;
|
|
454
|
+
cv::resize(src, dst, cv::Size(Width, Height));
|
|
455
|
+
src.release();
|
|
456
|
+
src = dst;
|
|
457
|
+
}
|
|
458
|
+
cv::Mat dest;
|
|
459
|
+
cvtColor(src, dest, COLOR_BGR2YUV_I420);
|
|
460
|
+
int len = Height*Width*3/2;
|
|
461
|
+
imageData = (MUInt8*)malloc(len);
|
|
462
|
+
memset(imageData, 0, len);
|
|
463
|
+
memcpy(imageData, dest.data, len);
|
|
464
|
+
src.release();
|
|
465
|
+
dest.release();
|
|
466
|
+
ColorSpaceConversion(Width, Height, ASVL_PAF_NV21, imageData, offscreen);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
// Napi::Value ASK::RegisterFaceFeature(const Napi::CallbackInfo& info) {
|
|
471
|
+
// Napi::Env env = info.Env();
|
|
472
|
+
// int length = info.Length();
|
|
473
|
+
// if (info.Length() <= 0 || !info[0].IsArray()) {
|
|
474
|
+
// Napi::TypeError::New(env, "array expected").ThrowAsJavaScriptException();
|
|
475
|
+
// return errorData(env, "array expected");
|
|
476
|
+
// }
|
|
477
|
+
// Napi::Array arr = info[0].As<Napi::Array>();
|
|
478
|
+
// MUInt32 len = arr.Length();
|
|
479
|
+
// ASF_FaceFeatureInfo* featureInfoList = new ASF_FaceFeatureInfo[len];
|
|
480
|
+
// for(int i = 0; i < len; ++i) {
|
|
481
|
+
// Napi::Object obj = static_cast<Napi::Value>(arr[i]).As<Napi::Object>().ToObject();
|
|
482
|
+
// MInt32 searchId = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"searchId"))).As<Napi::Number>().Uint32Value();
|
|
483
|
+
// std::string* tag = new std::string();
|
|
484
|
+
// *tag = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"tag"))).As<Napi::String>().ToString();
|
|
485
|
+
// Napi::ArrayBuffer buf = obj.Get(static_cast<napi_value>(Napi::String::New(info.Env(),"feature"))).As<Napi::ArrayBuffer>();
|
|
486
|
+
// ASF_FaceFeature* featureInfo = new ASF_FaceFeature();
|
|
487
|
+
// featureInfo->featureSize = buf.ByteLength();
|
|
488
|
+
// featureInfo->feature = reinterpret_cast<MByte*>(buf.Data());;
|
|
489
|
+
// ASF_FaceFeatureInfo* info = new ASF_FaceFeatureInfo();
|
|
490
|
+
// info->searchId = searchId;
|
|
491
|
+
// info->tag = tag->c_str();
|
|
492
|
+
// info->feature = featureInfo;
|
|
493
|
+
// featureInfoList[i] = *info;
|
|
494
|
+
// }
|
|
495
|
+
// MRESULT res = ASFRegisterFaceFeature(this->handle, featureInfoList, len);
|
|
496
|
+
// delete []featureInfoList;
|
|
497
|
+
// return Napi::Number::New(env, res);
|
|
498
|
+
// }
|
|
499
|
+
|
|
500
|
+
// Napi::Value ASK::SearchFaceFeature(const Napi::CallbackInfo& info) {
|
|
501
|
+
// Napi::Env env = info.Env();
|
|
502
|
+
// int length = info.Length();
|
|
503
|
+
// if (length != 1 || !info[0].IsString() ) {
|
|
504
|
+
// Napi::TypeError::New(env, "input expected").ThrowAsJavaScriptException();
|
|
505
|
+
// return errorData(env, "input expected"); //Napi::Number::New(info.Env(), -1);
|
|
506
|
+
// }
|
|
507
|
+
// string filePath = info[0].As<Napi::String>().ToString();
|
|
508
|
+
|
|
509
|
+
// ASVLOFFSCREEN offscreen = { 0 };
|
|
510
|
+
// MUInt8* imageData;
|
|
511
|
+
// processFile(filePath.c_str(), imageData, offscreen);
|
|
512
|
+
// ASF_MultiFaceInfo detectedFaces = { 0 };
|
|
513
|
+
// MRESULT res = ASFDetectFacesEx(this->handle, &offscreen, &detectedFaces);
|
|
514
|
+
|
|
515
|
+
// Napi::Object obj = Napi::Object::New(env);
|
|
516
|
+
// obj.Set(Napi::String::New(env, "faceNum"), detectedFaces.faceNum);
|
|
517
|
+
// if (0 == detectedFaces.faceNum) {
|
|
518
|
+
// return obj;
|
|
519
|
+
// }
|
|
520
|
+
// Napi::Array list = Napi::Array::New(env, detectedFaces.faceNum);
|
|
521
|
+
// for(int i = 0; i < detectedFaces.faceNum; ++i) {
|
|
522
|
+
// ASF_SingleFaceInfo sData;
|
|
523
|
+
// getDetectFaceInfo(detectedFaces, sData, i);
|
|
524
|
+
// ASF_FaceFeature feature = { 0 };
|
|
525
|
+
// res = ASFFaceFeatureExtractEx(handle, &offscreen, &sData, ASF_REGISTER, 0, &feature);
|
|
526
|
+
|
|
527
|
+
// Napi::Object data = Napi::Object::New(env);
|
|
528
|
+
// data.Set(Napi::String::New(env, "left"), sData.faceRect.left);
|
|
529
|
+
// data.Set(Napi::String::New(env, "top"), sData.faceRect.top);
|
|
530
|
+
// data.Set(Napi::String::New(env, "right"), sData.faceRect.right);
|
|
531
|
+
// data.Set(Napi::String::New(env, "bottom"), sData.faceRect.bottom);
|
|
532
|
+
// MFloat confidenceLevel;
|
|
533
|
+
// ASF_FaceFeatureInfo result = {0};
|
|
534
|
+
// MRESULT re = ASFFaceFeatureCompare_Search(this->handle, &feature, &confidenceLevel, &result);
|
|
535
|
+
// data.Set(Napi::String::New(env, "score"), confidenceLevel);
|
|
536
|
+
// data.Set(Napi::String::New(env, "searchId"), result.searchId);
|
|
537
|
+
// data.Set(Napi::String::New(env, "name"), (string)(result.tag));
|
|
538
|
+
// list[i] = data;
|
|
539
|
+
// }
|
|
540
|
+
// obj.Set(Napi::String::New(env, "faceRects"), list);
|
|
541
|
+
// SafeArrayDelete(imageData);
|
|
542
|
+
// return obj;
|
|
543
|
+
// }
|
package/ask.h
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
#ifndef ASK_H
|
|
2
|
+
#define ASK_H
|
|
3
|
+
|
|
4
|
+
#include <napi.h>
|
|
5
|
+
#include "arcsoft_face_sdk.h"
|
|
6
|
+
#include "amcomdef.h"
|
|
7
|
+
#include "asvloffscreen.h"
|
|
8
|
+
#include "merror.h"
|
|
9
|
+
|
|
10
|
+
class ASK : public Napi::ObjectWrap<ASK> {
|
|
11
|
+
public:
|
|
12
|
+
static Napi::Object Init(Napi::Env env, Napi::Object exports);
|
|
13
|
+
ASK(const Napi::CallbackInfo& info); //APPID/SDKKEY/ACTIVEKEY
|
|
14
|
+
~ASK();
|
|
15
|
+
|
|
16
|
+
Napi::Value faceDetect(const Napi::CallbackInfo& info);
|
|
17
|
+
Napi::Value faceFeatureExtract(const Napi::CallbackInfo& info);
|
|
18
|
+
Napi::Value faceFeatureCompare(const Napi::CallbackInfo& info);
|
|
19
|
+
Napi::Value faceFeatureInfo(const Napi::CallbackInfo& info);
|
|
20
|
+
|
|
21
|
+
// V4.1Pro
|
|
22
|
+
// Napi::Value RegisterFaceFeature(const Napi::CallbackInfo& info);
|
|
23
|
+
// Napi::Value SearchFaceFeature(const Napi::CallbackInfo& info);
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
Napi::Value ImageFaceCompare(const Napi::CallbackInfo& info);
|
|
27
|
+
|
|
28
|
+
private:
|
|
29
|
+
void getSdkRelatedInfo();
|
|
30
|
+
void timestampToTime(char* timeStamp, char* dateTime, int dateTimeSize);
|
|
31
|
+
int ColorSpaceConversion(MInt32 width, MInt32 height, MInt32 format, MUInt8* imgData, ASVLOFFSCREEN& offscreen);
|
|
32
|
+
void processFile(const char* filePath, MUInt8* imageData, ASVLOFFSCREEN& offscreen);
|
|
33
|
+
|
|
34
|
+
// void mFaceDetect(ASVLOFFSCREEN& offscree, ASF_MultiFaceInfo* detectedFaces);
|
|
35
|
+
// void mFaceFeatureExtract(ASVLOFFSCREEN& offscree, ASF_MultiFaceInfo* detectedFaces);
|
|
36
|
+
|
|
37
|
+
MHandle handle;
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
#endif
|
package/askface.cc
ADDED
|
Binary file
|
package/images/demo.png
ADDED
|
Binary file
|
package/images/demo2.png
ADDED
|
Binary file
|
package/images/test.png
ADDED
|
Binary file
|