capacitor-plugin-faceantispoofing 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CapacitorPluginFaceantispoofing.podspec +20 -0
- package/Package.swift +28 -0
- package/README.md +175 -0
- package/android/build.gradle +64 -0
- package/android/src/main/AndroidManifest.xml +2 -0
- package/android/src/main/assets/FaceAntiSpoofing.tflite +0 -0
- package/android/src/main/assets/onet.tflite +0 -0
- package/android/src/main/assets/pnet.tflite +0 -0
- package/android/src/main/assets/rnet.tflite +0 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/FaceAntiSpoofing.java +112 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/FaceAntiSpoofingPlugin.java +178 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/MyUtil.java +174 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/mtcnn/Align.java +28 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/mtcnn/Box.java +73 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/mtcnn/MTCNN.java +268 -0
- package/android/src/main/java/io/github/asephermann/plugins/faceantispoofing/mtcnn/Utils.java +25 -0
- package/android/src/main/res/.gitkeep +0 -0
- package/dist/docs.json +104 -0
- package/dist/esm/definitions.d.ts +22 -0
- package/dist/esm/definitions.js +2 -0
- package/dist/esm/definitions.js.map +1 -0
- package/dist/esm/index.d.ts +4 -0
- package/dist/esm/index.js +7 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/web.d.ts +5 -0
- package/dist/esm/web.js +14 -0
- package/dist/esm/web.js.map +1 -0
- package/dist/plugin.cjs.js +28 -0
- package/dist/plugin.cjs.js.map +1 -0
- package/dist/plugin.js +31 -0
- package/dist/plugin.js.map +1 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/Align.swift +41 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/Box.swift +70 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/FaceAntiSpoofing.swift +105 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/FaceAntiSpoofing.tflite +0 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/FaceAntiSpoofingPlugin.swift +166 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/MTCNN.swift +407 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/Tools.swift +103 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/onet.tflite +0 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/pnet.tflite +0 -0
- package/ios/Sources/FaceAntiSpoofingPlugin/rnet.tflite +0 -0
- package/ios/Tests/FaceAntiSpoofingPluginTests/FaceAntiSpoofingTests.swift +15 -0
- package/package.json +80 -0
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import UIKit
|
|
3
|
+
import TensorFlowLite
|
|
4
|
+
|
|
5
|
+
public class MTCNN {
|
|
6
|
+
private static let pnetModelFileName = "pnet"
|
|
7
|
+
private static let rnetModelFileName = "rnet"
|
|
8
|
+
private static let onetModelFileName = "onet"
|
|
9
|
+
private static let modelFileType = "tflite"
|
|
10
|
+
|
|
11
|
+
private static let factor: Float = 0.709
|
|
12
|
+
private static let pNetThreshold: Float = 0.6
|
|
13
|
+
private static let rNetThreshold: Float = 0.7
|
|
14
|
+
private static let oNetThreshold: Float = 0.7
|
|
15
|
+
|
|
16
|
+
private var pnetInterpreter: Interpreter?
|
|
17
|
+
private var rnetInterpreter: Interpreter?
|
|
18
|
+
private var onetInterpreter: Interpreter?
|
|
19
|
+
|
|
20
|
+
public init() throws {
|
|
21
|
+
var options = InterpreterOptions()
|
|
22
|
+
options.numberOfThreads = 4
|
|
23
|
+
|
|
24
|
+
let pnetModelPath = Tools.filePathForResourceName(name: Self.pnetModelFileName, extension: Self.modelFileType)
|
|
25
|
+
let rnetModelPath = Tools.filePathForResourceName(name: Self.rnetModelFileName, extension: Self.modelFileType)
|
|
26
|
+
let onetModelPath = Tools.filePathForResourceName(name: Self.onetModelFileName, extension: Self.modelFileType)
|
|
27
|
+
|
|
28
|
+
pnetInterpreter = try Interpreter(modelPath: pnetModelPath, options: options)
|
|
29
|
+
rnetInterpreter = try Interpreter(modelPath: rnetModelPath, options: options)
|
|
30
|
+
onetInterpreter = try Interpreter(modelPath: onetModelPath, options: options)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
public func detectFaces(image: UIImage, minFaceSize: CGFloat) -> [Box] {
|
|
34
|
+
var boxes = pNet(image: image, minSize: minFaceSize)
|
|
35
|
+
squareLimit(boxes: &boxes, w: Int(image.size.width), h: Int(image.size.height))
|
|
36
|
+
|
|
37
|
+
boxes = rNet(image: image, boxes: boxes)
|
|
38
|
+
squareLimit(boxes: &boxes, w: Int(image.size.width), h: Int(image.size.height))
|
|
39
|
+
|
|
40
|
+
boxes = oNet(image: image, boxes: boxes)
|
|
41
|
+
|
|
42
|
+
return boxes
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
private func squareLimit(boxes: inout [Box], w: Int, h: Int) {
|
|
46
|
+
for i in 0..<boxes.count {
|
|
47
|
+
boxes[i].toSquareShape()
|
|
48
|
+
boxes[i].limitSquare(w: w, h: h)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// MARK: - PNet
|
|
53
|
+
|
|
54
|
+
private func pNet(image: UIImage, minSize: CGFloat) -> [Box] {
|
|
55
|
+
let whMin = min(image.size.width, image.size.height)
|
|
56
|
+
var currentFaceSize = minSize
|
|
57
|
+
var totalBoxes: [Box] = []
|
|
58
|
+
|
|
59
|
+
while currentFaceSize <= whMin {
|
|
60
|
+
let scale = 12.0 / currentFaceSize
|
|
61
|
+
|
|
62
|
+
guard let img = Tools.scaleImage(image: image, toScale: scale) else {
|
|
63
|
+
currentFaceSize /= Self.factor
|
|
64
|
+
continue
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
let w = Int(img.size.width)
|
|
68
|
+
let h = Int(img.size.height)
|
|
69
|
+
|
|
70
|
+
let outW = Int(ceil(CGFloat(w) * 0.5 - 5) + 0.5)
|
|
71
|
+
let outH = Int(ceil(CGFloat(h) * 0.5 - 5) + 0.5)
|
|
72
|
+
|
|
73
|
+
guard let (prob1, conv4_2) = pNetForward(image: img, w: w, h: h, outW: outW, outH: outH) else {
|
|
74
|
+
currentFaceSize /= Self.factor
|
|
75
|
+
continue
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
var curBoxes: [Box] = []
|
|
79
|
+
generateBoxes(prob1: prob1, conv4_2: conv4_2, scale: scale, boxes: &curBoxes)
|
|
80
|
+
|
|
81
|
+
nms(boxes: &curBoxes, threshold: 0.5, method: "Union")
|
|
82
|
+
|
|
83
|
+
for box in curBoxes {
|
|
84
|
+
if !box.deleted {
|
|
85
|
+
totalBoxes.append(box)
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
currentFaceSize /= Self.factor
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
nms(boxes: &totalBoxes, threshold: 0.7, method: "Union")
|
|
93
|
+
boundingBoxRegression(boxes: &totalBoxes)
|
|
94
|
+
|
|
95
|
+
return updateBoxes(boxes: totalBoxes)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
private func pNetForward(image: UIImage, w: Int, h: Int, outW: Int, outH: Int) -> ([[Float]], [[Float]])? {
|
|
99
|
+
guard let interpreter = pnetInterpreter else { return nil }
|
|
100
|
+
|
|
101
|
+
guard let floats = normalizeImage(image: image, w: w, h: h) else { return nil }
|
|
102
|
+
let transposed = transpose(data: floats, h: h, w: w, c: 3)
|
|
103
|
+
|
|
104
|
+
let inputData = Data(bytes: transposed, count: w * h * 3 * MemoryLayout<Float>.size)
|
|
105
|
+
|
|
106
|
+
do {
|
|
107
|
+
try interpreter.resizeInput(at: 0, to: [1, w, h, 3])
|
|
108
|
+
try interpreter.allocateTensors()
|
|
109
|
+
try interpreter.copy(inputData, toInputAt: 0)
|
|
110
|
+
try interpreter.invoke()
|
|
111
|
+
|
|
112
|
+
guard let prob1Data = try interpreter.output(at: 0) as? [Float],
|
|
113
|
+
let conv4_2Data = try interpreter.output(at: 1) as? [Float] else {
|
|
114
|
+
return nil
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
let prob1 = reshapeTo2D(data: prob1Data, rows: outW, cols: outH * 2)
|
|
118
|
+
let conv4_2 = reshapeTo2D(data: conv4_2Data, rows: outW, cols: outH * 4)
|
|
119
|
+
|
|
120
|
+
let prob1T = transpose2D(data: prob1, rows: outW, cols: outH * 2)
|
|
121
|
+
let conv4_2T = transpose2D(data: conv4_2, rows: outW, cols: outH * 4)
|
|
122
|
+
|
|
123
|
+
return (prob1T, conv4_2T)
|
|
124
|
+
} catch {
|
|
125
|
+
print("PNet forward error: \(error)")
|
|
126
|
+
return nil
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
private func generateBoxes(prob1: [[Float]], conv4_2: [[Float]], scale: CGFloat, boxes: inout [Box]) {
|
|
131
|
+
let h = prob1.count
|
|
132
|
+
let w = prob1[0].count / 2
|
|
133
|
+
|
|
134
|
+
for y in 0..<h {
|
|
135
|
+
for x in 0..<w {
|
|
136
|
+
let score = prob1[y][x * 2 + 1]
|
|
137
|
+
if score > Self.pNetThreshold {
|
|
138
|
+
let box = Box()
|
|
139
|
+
box.score = score
|
|
140
|
+
box.box[0] = Int(round(CGFloat(x * 2) / scale))
|
|
141
|
+
box.box[1] = Int(round(CGFloat(y * 2) / scale))
|
|
142
|
+
box.box[2] = Int(round(CGFloat(x * 2 + 11) / scale))
|
|
143
|
+
box.box[3] = Int(round(CGFloat(y * 2 + 11) / scale))
|
|
144
|
+
for i in 0..<4 {
|
|
145
|
+
box.bbr[i] = conv4_2[y][x * 4 + i]
|
|
146
|
+
}
|
|
147
|
+
boxes.append(box)
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// MARK: - RNet
|
|
154
|
+
|
|
155
|
+
private func rNet(image: UIImage, boxes: [Box]) -> [Box] {
|
|
156
|
+
guard let interpreter = rnetInterpreter else { return boxes }
|
|
157
|
+
|
|
158
|
+
let num = boxes.count
|
|
159
|
+
var rNetIn = [[Float]]()
|
|
160
|
+
|
|
161
|
+
for i in 0..<num {
|
|
162
|
+
guard let cropped = crop(image: image, with: boxes[i], andScale: 24) else { continue }
|
|
163
|
+
if let normalized = normalizeImage(image: cropped, w: 24, h: 24) {
|
|
164
|
+
let transposed = transpose(data: normalized, h: 24, w: 24, c: 3)
|
|
165
|
+
rNetIn.append(transposed)
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
guard let (prob1, conv5_2) = rNetForward(interpreter: interpreter, input: rNetIn, num: num) else {
|
|
170
|
+
return boxes
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
var result = boxes
|
|
174
|
+
for i in 0..<num {
|
|
175
|
+
result[i].score = prob1[i][1]
|
|
176
|
+
for j in 0..<4 {
|
|
177
|
+
result[i].bbr[j] = conv5_2[i][j]
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
for i in 0..<result.count {
|
|
182
|
+
if result[i].score < Self.rNetThreshold {
|
|
183
|
+
result[i].deleted = true
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
nms(boxes: &result, threshold: 0.7, method: "Union")
|
|
188
|
+
boundingBoxRegression(boxes: &result)
|
|
189
|
+
|
|
190
|
+
return updateBoxes(boxes: result)
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
private func rNetForward(interpreter: Interpreter, input: [[Float]], num: Int) -> ([[Float]], [[Float]])? {
|
|
194
|
+
var prob1 = [[Float]](repeating: [Float](repeating: 0, count: 2), count: num)
|
|
195
|
+
var conv5_2 = [[Float]](repeating: [Float](repeating: 0, count: 4), count: num)
|
|
196
|
+
|
|
197
|
+
do {
|
|
198
|
+
try interpreter.allocateTensors()
|
|
199
|
+
|
|
200
|
+
for i in 0..<num {
|
|
201
|
+
let inputData = Data(bytes: input[i], count: 24 * 24 * 3 * MemoryLayout<Float>.size)
|
|
202
|
+
try interpreter.copy(inputData, toInputAt: 0)
|
|
203
|
+
try interpreter.invoke()
|
|
204
|
+
|
|
205
|
+
guard let prob1Data = try interpreter.output(at: 0) as? [Float],
|
|
206
|
+
let conv5_2Data = try interpreter.output(at: 1) as? [Float] else {
|
|
207
|
+
continue
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
prob1[i] = [prob1Data[0], prob1Data[1]]
|
|
211
|
+
conv5_2[i] = [conv5_2Data[0], conv5_2Data[1], conv5_2Data[2], conv5_2Data[3]]
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
return (prob1, conv5_2)
|
|
215
|
+
} catch {
|
|
216
|
+
print("RNet forward error: \(error)")
|
|
217
|
+
return nil
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// MARK: - ONet
|
|
222
|
+
|
|
223
|
+
private func oNet(image: UIImage, boxes: [Box]) -> [Box] {
|
|
224
|
+
guard let interpreter = onetInterpreter else { return boxes }
|
|
225
|
+
|
|
226
|
+
let num = boxes.count
|
|
227
|
+
var oNetIn = [[Float]]()
|
|
228
|
+
|
|
229
|
+
for i in 0..<num {
|
|
230
|
+
guard let cropped = crop(image: image, with: boxes[i], andScale: 48) else { continue }
|
|
231
|
+
if let normalized = normalizeImage(image: cropped, w: 48, h: 48) {
|
|
232
|
+
let transposed = transpose(data: normalized, h: 48, w: 48, c: 3)
|
|
233
|
+
oNetIn.append(transposed)
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
guard let (prob1, conv6_2, conv6_3) = oNetForward(interpreter: interpreter, input: oNetIn, num: num) else {
|
|
238
|
+
return boxes
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
var result = boxes
|
|
242
|
+
for i in 0..<num {
|
|
243
|
+
result[i].score = prob1[i][1]
|
|
244
|
+
for j in 0..<4 {
|
|
245
|
+
result[i].bbr[j] = conv6_2[i][j]
|
|
246
|
+
}
|
|
247
|
+
for j in 0..<5 {
|
|
248
|
+
let x = Int(round(CGFloat(result[i].left()) + CGFloat(conv6_3[i][j]) * CGFloat(result[i].width())))
|
|
249
|
+
let y = Int(round(CGFloat(result[i].top()) + CGFloat(conv6_3[i][j + 5]) * CGFloat(result[i].height())))
|
|
250
|
+
result[i].landmark[j] = CGPoint(x: x, y: y)
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
for i in 0..<result.count {
|
|
255
|
+
if result[i].score < Self.oNetThreshold {
|
|
256
|
+
result[i].deleted = true
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
boundingBoxRegression(boxes: &result)
|
|
261
|
+
nms(boxes: &result, threshold: 0.7, method: "Min")
|
|
262
|
+
|
|
263
|
+
return updateBoxes(boxes: result)
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
private func oNetForward(interpreter: Interpreter, input: [[Float]], num: Int) -> ([[Float]], [[Float]], [[Float]])? {
|
|
267
|
+
var prob1 = [[Float]](repeating: [Float](repeating: 0, count: 2), count: num)
|
|
268
|
+
var conv6_2 = [[Float]](repeating: [Float](repeating: 0, count: 4), count: num)
|
|
269
|
+
var conv6_3 = [[Float]](repeating: [Float](repeating: 0, count: 10), count: num)
|
|
270
|
+
|
|
271
|
+
do {
|
|
272
|
+
try interpreter.allocateTensors()
|
|
273
|
+
|
|
274
|
+
for i in 0..<num {
|
|
275
|
+
let inputData = Data(bytes: input[i], count: 48 * 48 * 3 * MemoryLayout<Float>.size)
|
|
276
|
+
try interpreter.copy(inputData, toInputAt: 0)
|
|
277
|
+
try interpreter.invoke()
|
|
278
|
+
|
|
279
|
+
guard let prob1Data = try interpreter.output(at: 0) as? [Float],
|
|
280
|
+
let conv6_2Data = try interpreter.output(at: 1) as? [Float],
|
|
281
|
+
let conv6_3Data = try interpreter.output(at: 2) as? [Float] else {
|
|
282
|
+
continue
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
prob1[i] = [prob1Data[0], prob1Data[1]]
|
|
286
|
+
conv6_2[i] = [conv6_2Data[0], conv6_2Data[1], conv6_2Data[2], conv6_2Data[3]]
|
|
287
|
+
for j in 0..<10 {
|
|
288
|
+
conv6_3[i][j] = conv6_3Data[j]
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
return (prob1, conv6_2, conv6_3)
|
|
293
|
+
} catch {
|
|
294
|
+
print("ONet forward error: \(error)")
|
|
295
|
+
return nil
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// MARK: - Utility functions
|
|
300
|
+
|
|
301
|
+
private func nms(boxes: inout [Box], threshold: Float, method: String) {
|
|
302
|
+
for i in 0..<boxes.count {
|
|
303
|
+
let box = boxes[i]
|
|
304
|
+
if !box.deleted {
|
|
305
|
+
for j in (i + 1)..<boxes.count {
|
|
306
|
+
let box2 = boxes[j]
|
|
307
|
+
if !box2.deleted {
|
|
308
|
+
let x1 = CGFloat(max(box.box[0], box2.box[0]))
|
|
309
|
+
let y1 = CGFloat(max(box.box[1], box2.box[1]))
|
|
310
|
+
let x2 = CGFloat(min(box.box[2], box2.box[2]))
|
|
311
|
+
let y2 = CGFloat(min(box.box[3], box2.box[3]))
|
|
312
|
+
|
|
313
|
+
if x2 < x1 || y2 < y1 { continue }
|
|
314
|
+
|
|
315
|
+
let areaIoU = Int((x2 - x1 + 1) * (y2 - y1 + 1))
|
|
316
|
+
var iou: Float = 0
|
|
317
|
+
|
|
318
|
+
if method == "Union" {
|
|
319
|
+
iou = Float(areaIoU) / Float(box.area() + box2.area() - areaIoU)
|
|
320
|
+
} else if method == "Min" {
|
|
321
|
+
iou = Float(areaIoU) / Float(min(box.area(), box2.area()))
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if iou >= threshold {
|
|
325
|
+
if box.score > box2.score {
|
|
326
|
+
box2.deleted = true
|
|
327
|
+
} else {
|
|
328
|
+
box.deleted = true
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
private func boundingBoxRegression(boxes: inout [Box]) {
|
|
338
|
+
for i in 0..<boxes.count {
|
|
339
|
+
boxes[i].calibrate()
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
private func updateBoxes(boxes: [Box]) -> [Box] {
|
|
344
|
+
return boxes.filter { !$0.deleted }
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
private func crop(image: UIImage, with box: Box, andScale size: Int) -> UIImage? {
|
|
348
|
+
let rect = box.transform2Rect()
|
|
349
|
+
guard let cropped = Tools.cropImage(image: image, toRect: rect) else { return nil }
|
|
350
|
+
return Tools.scaleImage(image: cropped, toSize: CGSize(width: size, height: size))
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
private func normalizeImage(image: UIImage, w: Int, h: Int) -> [Float]? {
|
|
354
|
+
guard let image_data = Tools.convertUIImageToBitmapRGBA8(image: image) else { return nil }
|
|
355
|
+
|
|
356
|
+
let inputMean: Float = 127.5
|
|
357
|
+
let inputStd: Float = 128.0
|
|
358
|
+
var floats = [Float](repeating: 0, count: w * h * 3)
|
|
359
|
+
|
|
360
|
+
var k = 0
|
|
361
|
+
let size = w * h * 4
|
|
362
|
+
for j in 0..<size {
|
|
363
|
+
if j % 4 == 3 { continue }
|
|
364
|
+
let val = Float(image_data[j])
|
|
365
|
+
let r = ((val > 16) & 0xFF) - inputMean
|
|
366
|
+
let g = ((val > 8) & 0xFF) - inputMean
|
|
367
|
+
let b = (val & 0xFF) - inputMean
|
|
368
|
+
floats[k] = r / inputStd
|
|
369
|
+
k += 1
|
|
370
|
+
}
|
|
371
|
+
image_data.deallocate()
|
|
372
|
+
|
|
373
|
+
return floats
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
private func transpose(data: [Float], h: Int, w: Int, c: Int) -> [Float] {
|
|
377
|
+
var result = [Float](repeating: 0, count: w * h * c)
|
|
378
|
+
for i in 0..<h {
|
|
379
|
+
for j in 0..<w {
|
|
380
|
+
for z in 0..<c {
|
|
381
|
+
result[(j * h + i) * c + z] = data[(i * w + j) * c + z]
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
return result
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
private func transpose2D(data: [[Float]], rows: Int, cols: Int) -> [[Float]] {
|
|
389
|
+
var result = [[Float]](repeating: [Float](repeating: 0, count: cols), count: rows)
|
|
390
|
+
for i in 0..<rows {
|
|
391
|
+
for j in 0..<cols {
|
|
392
|
+
result[j][i] = data[i][j]
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
return result
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
private func reshapeTo2D(data: [Float], rows: Int, cols: Int) -> [[Float]] {
|
|
399
|
+
var result = [[Float]](repeating: [Float](repeating: 0, count: cols), count: rows)
|
|
400
|
+
for i in 0..<rows {
|
|
401
|
+
for j in 0..<cols {
|
|
402
|
+
result[i][j] = data[i * cols + j]
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
return result
|
|
406
|
+
}
|
|
407
|
+
}
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import Foundation
|
|
2
|
+
import UIKit
|
|
3
|
+
|
|
4
|
+
public class Tools {
|
|
5
|
+
public static func filePathForResourceName(name: String, extension ext: String) -> String {
|
|
6
|
+
let bundle = Bundle(for: FaceAntiSpoofingPlugin.self)
|
|
7
|
+
guard let resourcePath = bundle.path(forResource: name, ofType: ext) else {
|
|
8
|
+
fatalError("Resource not found: \(name).\(ext)")
|
|
9
|
+
}
|
|
10
|
+
return resourcePath
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
public static func convertUIImageToBitmapRGBA8(image: UIImage) -> UnsafeMutablePointer<UInt8>? {
|
|
14
|
+
guard let cgImage = image.cgImage else {
|
|
15
|
+
return nil
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
let width = cgImage.width
|
|
19
|
+
let height = cgImage.height
|
|
20
|
+
let bitsPerComponent = 8
|
|
21
|
+
let bytesPerRow = width * 4
|
|
22
|
+
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
|
23
|
+
|
|
24
|
+
var bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue
|
|
25
|
+
bitmapInfo |= CGBitmapInfo.byteOrder32Big.rawValue
|
|
26
|
+
|
|
27
|
+
guard let context = CGContext(
|
|
28
|
+
data: nil,
|
|
29
|
+
width: width,
|
|
30
|
+
height: height,
|
|
31
|
+
bitsPerComponent: bitsPerComponent,
|
|
32
|
+
bytesPerRow: bytesPerRow,
|
|
33
|
+
space: colorSpace,
|
|
34
|
+
bitmapInfo: bitmapInfo
|
|
35
|
+
) else {
|
|
36
|
+
return nil
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height))
|
|
40
|
+
|
|
41
|
+
guard let data = context.data else {
|
|
42
|
+
return nil
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return data.bindMemory(to: UInt8.self, capacity: width * height * 4)
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
public static func convertUIImageToBitmapGray(image: UIImage) -> UnsafeMutablePointer<UInt8>? {
|
|
49
|
+
guard let cgImage = image.cgImage else {
|
|
50
|
+
return nil
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
let width = cgImage.width
|
|
54
|
+
let height = cgImage.height
|
|
55
|
+
let bitsPerComponent = 8
|
|
56
|
+
let bytesPerRow = width
|
|
57
|
+
let colorSpace = CGColorSpaceCreateDeviceGray()
|
|
58
|
+
|
|
59
|
+
guard let context = CGContext(
|
|
60
|
+
data: nil,
|
|
61
|
+
width: width,
|
|
62
|
+
height: height,
|
|
63
|
+
bitsPerComponent: bitsPerComponent,
|
|
64
|
+
bytesPerRow: bytesPerRow,
|
|
65
|
+
space: colorSpace,
|
|
66
|
+
bitmapInfo: 0
|
|
67
|
+
) else {
|
|
68
|
+
return nil
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
let rect = CGRect(x: 0, y: 0, width: width, height: height)
|
|
72
|
+
context.draw(cgImage, in: rect)
|
|
73
|
+
|
|
74
|
+
guard let data = context.data else {
|
|
75
|
+
return nil
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
let ptr = data.bindMemory(to: UInt8.self, capacity: width * height)
|
|
79
|
+
let result = UnsafeMutablePointer<UInt8>.allocate(capacity: width * height)
|
|
80
|
+
memcpy(result, ptr, width * height)
|
|
81
|
+
|
|
82
|
+
return result
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
public static func scaleImage(image: UIImage, toSize size: CGSize) -> UIImage? {
|
|
86
|
+
UIGraphicsBeginImageContextWithOptions(size, false, 1.0)
|
|
87
|
+
defer { UIGraphicsEndImageContext() }
|
|
88
|
+
image.draw(in: CGRect(origin: .zero, size: size))
|
|
89
|
+
return UIGraphicsGetImageFromCurrentImageContext()
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
public static func scaleImage(image: UIImage, toScale scale: CGFloat) -> UIImage? {
|
|
93
|
+
let newSize = CGSize(width: image.size.width * scale, height: image.size.height * scale)
|
|
94
|
+
return scaleImage(image: image, toSize: newSize)
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
public static func cropImage(image: UIImage, toRect rect: CGRect) -> UIImage? {
|
|
98
|
+
guard let cgImage = image.cgImage?.cropping(to: rect) else {
|
|
99
|
+
return nil
|
|
100
|
+
}
|
|
101
|
+
return UIImage(cgImage: cgImage)
|
|
102
|
+
}
|
|
103
|
+
}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import XCTest
|
|
2
|
+
@testable import FaceAntiSpoofingPlugin
|
|
3
|
+
|
|
4
|
+
class FaceAntiSpoofingTests: XCTestCase {
|
|
5
|
+
func testEcho() {
|
|
6
|
+
// This is an example of a functional test case for a plugin.
|
|
7
|
+
// Use XCTAssert and related functions to verify your tests produce the correct results.
|
|
8
|
+
|
|
9
|
+
let implementation = FaceAntiSpoofing()
|
|
10
|
+
let value = "Hello, World!"
|
|
11
|
+
let result = implementation.echo(value)
|
|
12
|
+
|
|
13
|
+
XCTAssertEqual(value, result)
|
|
14
|
+
}
|
|
15
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "capacitor-plugin-faceantispoofing",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Capacitor plugin for Passive Liveness Detection and Face Anti-Spoofing",
|
|
5
|
+
"main": "dist/plugin.cjs.js",
|
|
6
|
+
"module": "dist/esm/index.js",
|
|
7
|
+
"types": "dist/esm/index.d.ts",
|
|
8
|
+
"unpkg": "dist/plugin.js",
|
|
9
|
+
"files": [
|
|
10
|
+
"android/src/main/",
|
|
11
|
+
"android/build.gradle",
|
|
12
|
+
"dist/",
|
|
13
|
+
"ios/Sources",
|
|
14
|
+
"ios/Tests",
|
|
15
|
+
"Package.swift",
|
|
16
|
+
"CapacitorPluginFaceantispoofing.podspec"
|
|
17
|
+
],
|
|
18
|
+
"author": "Asep Herman Nursalam",
|
|
19
|
+
"license": "MIT",
|
|
20
|
+
"repository": {
|
|
21
|
+
"type": "git",
|
|
22
|
+
"url": "git+https://github.com/asephermann/capacitor-plugin-faceantispoofing.git"
|
|
23
|
+
},
|
|
24
|
+
"bugs": {
|
|
25
|
+
"url": "https://github.com/asephermann/capacitor-plugin-faceantispoofing/issues"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"capacitor",
|
|
29
|
+
"plugin",
|
|
30
|
+
"native"
|
|
31
|
+
],
|
|
32
|
+
"scripts": {
|
|
33
|
+
"verify": "npm run verify:ios && npm run verify:android && npm run verify:web",
|
|
34
|
+
"verify:ios": "xcodebuild -scheme CapacitorPluginFaceantispoofing -destination generic/platform=iOS",
|
|
35
|
+
"verify:android": "cd android && ./gradlew clean build test && cd ..",
|
|
36
|
+
"verify:web": "npm run build",
|
|
37
|
+
"lint": "npm run eslint && npm run prettier -- --check && npm run swiftlint -- lint",
|
|
38
|
+
"fmt": "npm run eslint -- --fix && npm run prettier -- --write && npm run swiftlint -- --fix --format",
|
|
39
|
+
"eslint": "eslint . --ext ts",
|
|
40
|
+
"prettier": "prettier \"**/*.{css,html,ts,js,java}\" --plugin=prettier-plugin-java",
|
|
41
|
+
"swiftlint": "node-swiftlint",
|
|
42
|
+
"docgen": "docgen --api FaceAntiSpoofingPlugin --output-readme README.md --output-json dist/docs.json",
|
|
43
|
+
"build": "npm run clean && npm run docgen && tsc && rollup -c rollup.config.mjs",
|
|
44
|
+
"clean": "rimraf ./dist",
|
|
45
|
+
"watch": "tsc --watch",
|
|
46
|
+
"prepublishOnly": "npm run build"
|
|
47
|
+
},
|
|
48
|
+
"devDependencies": {
|
|
49
|
+
"@capacitor/android": "^8.0.0",
|
|
50
|
+
"@capacitor/core": "^8.0.0",
|
|
51
|
+
"@capacitor/docgen": "^0.3.1",
|
|
52
|
+
"@capacitor/ios": "^8.0.0",
|
|
53
|
+
"@ionic/eslint-config": "^0.4.0",
|
|
54
|
+
"@ionic/prettier-config": "^4.0.0",
|
|
55
|
+
"@ionic/swiftlint-config": "^2.0.0",
|
|
56
|
+
"eslint": "^8.57.1",
|
|
57
|
+
"prettier": "^3.6.2",
|
|
58
|
+
"prettier-plugin-java": "^2.7.7",
|
|
59
|
+
"rimraf": "^6.1.0",
|
|
60
|
+
"rollup": "^4.53.2",
|
|
61
|
+
"swiftlint": "^2.0.0",
|
|
62
|
+
"typescript": "^5.9.3"
|
|
63
|
+
},
|
|
64
|
+
"peerDependencies": {
|
|
65
|
+
"@capacitor/core": ">=8.0.0"
|
|
66
|
+
},
|
|
67
|
+
"prettier": "@ionic/prettier-config",
|
|
68
|
+
"swiftlint": "@ionic/swiftlint-config",
|
|
69
|
+
"eslintConfig": {
|
|
70
|
+
"extends": "@ionic/eslint-config/recommended"
|
|
71
|
+
},
|
|
72
|
+
"capacitor": {
|
|
73
|
+
"ios": {
|
|
74
|
+
"src": "ios"
|
|
75
|
+
},
|
|
76
|
+
"android": {
|
|
77
|
+
"src": "android"
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|