@camstack/addon-pipeline 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-analyzer/index.js +723 -0
- package/dist/audio-analyzer/index.js.map +1 -0
- package/dist/audio-analyzer/index.mjs +683 -0
- package/dist/audio-analyzer/index.mjs.map +1 -0
- package/dist/audio-codec-nodeav/index.js +467 -0
- package/dist/audio-codec-nodeav/index.js.map +1 -0
- package/dist/audio-codec-nodeav/index.mjs +467 -0
- package/dist/audio-codec-nodeav/index.mjs.map +1 -0
- package/dist/decoder-nodeav/index.js +929 -0
- package/dist/decoder-nodeav/index.js.map +1 -0
- package/dist/decoder-nodeav/index.mjs +907 -0
- package/dist/decoder-nodeav/index.mjs.map +1 -0
- package/dist/detection-pipeline/index.js +5766 -0
- package/dist/detection-pipeline/index.js.map +1 -0
- package/dist/detection-pipeline/index.mjs +5725 -0
- package/dist/detection-pipeline/index.mjs.map +1 -0
- package/dist/index-D_cl0Qqb.js +5791 -0
- package/dist/index-D_cl0Qqb.js.map +1 -0
- package/dist/index-UbcdLS7a.mjs +5790 -0
- package/dist/index-UbcdLS7a.mjs.map +1 -0
- package/dist/motion-wasm/index.js +476 -0
- package/dist/motion-wasm/index.js.map +1 -0
- package/dist/motion-wasm/index.mjs +454 -0
- package/dist/motion-wasm/index.mjs.map +1 -0
- package/dist/pipeline-runner/index.js +1669 -0
- package/dist/pipeline-runner/index.js.map +1 -0
- package/dist/pipeline-runner/index.mjs +1647 -0
- package/dist/pipeline-runner/index.mjs.map +1 -0
- package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/StreamBrokerPanel.d.ts +21 -0
- package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/index.d.ts +13 -0
- package/dist/stream-broker/@mf-types/widgets.d.ts +2 -0
- package/dist/stream-broker/@mf-types.d.ts +3 -0
- package/dist/stream-broker/@mf-types.zip +0 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_sdk__loadShare__.mjs-h5aXOPSA.mjs +12 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_types__loadShare__.mjs-C-URP6DW.mjs +17 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_ui_mf_2_library__loadShare__.mjs-69eEmXwl.mjs +20 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_tanstack_mf_1_react_mf_2_query__loadShare__.mjs-U1EUeEPs.mjs +104 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_client__loadShare__.mjs-DeouEaSs.mjs +85 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_react_mf_2_query__loadShare__.mjs-DHUwjbb9.mjs +62 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs-DePVYdid.mjs +85 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs_commonjs-proxy-CBlCGyx5.mjs +29 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_1_jsx_mf_2_runtime__loadShare__.mjs-gBEZsQrp.mjs +36 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs-DYEKzzY-.mjs +45 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs_commonjs-proxy-DZchZKbW.mjs +6 -0
- package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom_mf_1_client__loadShare__.mjs-DICOtMTl.mjs +34 -0
- package/dist/stream-broker/_stub.js +752 -0
- package/dist/stream-broker/_virtual_mf-localSharedImportMap___mfe_internal__addon_stream_broker_widgets-D6o1e2ed.mjs +156 -0
- package/dist/stream-broker/client-BK73l2KT.mjs +10063 -0
- package/dist/stream-broker/getErrorShape-BPSzUA7W-TlK8ipWe.mjs +211 -0
- package/dist/stream-broker/hostInit-RCeroTVY.mjs +168 -0
- package/dist/stream-broker/index-BYclbfM0.mjs +15806 -0
- package/dist/stream-broker/index-BhXZh4lQ.mjs +1617 -0
- package/dist/stream-broker/index-BxHaCH3N.mjs +725 -0
- package/dist/stream-broker/index-D2-K2YJ7.mjs +19268 -0
- package/dist/stream-broker/index-IUYKHbxX.mjs +185 -0
- package/dist/stream-broker/index-Ss9m7Jum.mjs +2603 -0
- package/dist/stream-broker/index-ns1fRD30.mjs +435 -0
- package/dist/stream-broker/index-xncRG7-x.mjs +2713 -0
- package/dist/stream-broker/index.js +11171 -0
- package/dist/stream-broker/index.js.map +1 -0
- package/dist/stream-broker/index.mjs +11130 -0
- package/dist/stream-broker/index.mjs.map +1 -0
- package/dist/stream-broker/jsx-runtime-ZdY5pIZz.mjs +55 -0
- package/dist/stream-broker/remoteEntry.js +2973 -0
- package/dist/stream-broker/virtualExposes-pCd777Rp.mjs +42 -0
- package/package.json +258 -0
- package/python/__pycache__/inference_pool.cpython-313.pyc +0 -0
- package/python/inference_pool.py +1088 -0
- package/python/postprocessors/__init__.py +24 -0
- package/python/postprocessors/__pycache__/__init__.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/__init__.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/_safety.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/arcface.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/arcface.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/ctc.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/ctc.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/saliency.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/saliency.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/scrfd.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/scrfd.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/softmax.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/softmax.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/yamnet.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/yamnet.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/yolo.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/yolo.cpython-313.pyc +0 -0
- package/python/postprocessors/__pycache__/yolo_seg.cpython-312.pyc +0 -0
- package/python/postprocessors/__pycache__/yolo_seg.cpython-313.pyc +0 -0
- package/python/postprocessors/arcface.py +31 -0
- package/python/postprocessors/ctc.py +68 -0
- package/python/postprocessors/saliency.py +44 -0
- package/python/postprocessors/scrfd.py +212 -0
- package/python/postprocessors/softmax.py +43 -0
- package/python/postprocessors/yamnet.py +41 -0
- package/python/postprocessors/yolo.py +278 -0
- package/python/postprocessors/yolo_seg.py +247 -0
- package/python/requirements-coreml.txt +4 -0
- package/python/requirements-onnxruntime.txt +3 -0
- package/python/requirements-openvino.txt +3 -0
- package/python/requirements.txt +9 -0
- package/swift/audio-analyzer/apple-sound-classifier +0 -0
- package/swift/audio-analyzer/apple-sound-classifier.swift +213 -0
- package/swift/detection-pipeline/apple-sound-classifier +0 -0
- package/swift/detection-pipeline/apple-sound-classifier.swift +196 -0
- package/wasm/assembly/index.ts +290 -0
- package/wasm/assembly/tsconfig.json +4 -0
- package/wasm/motion.wasm +0 -0
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
#!/usr/bin/env swift
|
|
2
|
+
//
|
|
3
|
+
// apple-sound-classifier.swift
|
|
4
|
+
//
|
|
5
|
+
// Lightweight Swift CLI that reads raw Float32 audio from stdin,
|
|
6
|
+
// runs Apple SoundAnalysis (SNClassifySoundRequest), and writes
|
|
7
|
+
// JSON classification results to stdout.
|
|
8
|
+
//
|
|
9
|
+
// Protocol (binary IPC, same as PythonInferenceEngine):
|
|
10
|
+
// stdin → [4 bytes LE uint32 length][Float32 audio samples]
|
|
11
|
+
// stdout → [4 bytes LE uint32 length][JSON bytes]
|
|
12
|
+
//
|
|
13
|
+
// The analyzer persists across chunks, accumulating audio for the
|
|
14
|
+
// SoundAnalysis time window (~1.5s). Results are emitted when
|
|
15
|
+
// the analyzer has enough data to classify.
|
|
16
|
+
//
|
|
17
|
+
// Build: swiftc -O -o apple-sound-classifier apple-sound-classifier.swift
|
|
18
|
+
// Usage: ./apple-sound-classifier --sample-rate=16000
|
|
19
|
+
|
|
20
|
+
import Foundation
|
|
21
|
+
import SoundAnalysis
|
|
22
|
+
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
// Argument parsing
|
|
25
|
+
// ---------------------------------------------------------------------------
|
|
26
|
+
|
|
27
|
+
var sampleRate: Double = 16000
|
|
28
|
+
var topK: Int = 10
|
|
29
|
+
var minScore: Double = 0.05
|
|
30
|
+
|
|
31
|
+
for arg in CommandLine.arguments.dropFirst() {
|
|
32
|
+
if arg.hasPrefix("--sample-rate=") {
|
|
33
|
+
sampleRate = Double(arg.split(separator: "=").last ?? "16000") ?? 16000
|
|
34
|
+
} else if arg.hasPrefix("--top-k=") {
|
|
35
|
+
topK = Int(arg.split(separator: "=").last ?? "10") ?? 10
|
|
36
|
+
} else if arg.hasPrefix("--min-score=") {
|
|
37
|
+
minScore = Double(arg.split(separator: "=").last ?? "0.05") ?? 0.05
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
// SoundAnalysis setup
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
|
|
45
|
+
class AudioClassifier: NSObject, SNResultsObserving {
|
|
46
|
+
private var results: [[String: Any]] = []
|
|
47
|
+
private var inferenceMs: Double = 0
|
|
48
|
+
private var gotResult = false
|
|
49
|
+
private var startTime: CFAbsoluteTime = 0
|
|
50
|
+
|
|
51
|
+
func startTiming() {
|
|
52
|
+
results = []
|
|
53
|
+
inferenceMs = 0
|
|
54
|
+
gotResult = false
|
|
55
|
+
startTime = CFAbsoluteTimeGetCurrent()
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
func request(_ request: SNRequest, didProduce result: SNResult) {
|
|
59
|
+
guard let classResult = result as? SNClassificationResult else { return }
|
|
60
|
+
for classification in classResult.classifications {
|
|
61
|
+
if classification.confidence >= minScore {
|
|
62
|
+
results.append([
|
|
63
|
+
"className": classification.identifier,
|
|
64
|
+
"score": round(classification.confidence * 1000) / 1000,
|
|
65
|
+
])
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
gotResult = true
|
|
69
|
+
inferenceMs = (CFAbsoluteTimeGetCurrent() - startTime) * 1000
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
func request(_ request: SNRequest, didFailWithError error: Error) {
|
|
73
|
+
fputs("[apple-sound-classifier] Error: \(error.localizedDescription)\n", stderr)
|
|
74
|
+
gotResult = true
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
func requestDidComplete(_ request: SNRequest) {
|
|
78
|
+
// Not used for streaming — results come via didProduce
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
func consumeResult() -> (classifications: [[String: Any]], inferenceMs: Double)? {
|
|
82
|
+
guard gotResult else { return nil }
|
|
83
|
+
let sorted = results.sorted {
|
|
84
|
+
($0["score"] as? Double ?? 0) > ($1["score"] as? Double ?? 0)
|
|
85
|
+
}
|
|
86
|
+
let out = (Array(sorted.prefix(topK)), inferenceMs)
|
|
87
|
+
results = []
|
|
88
|
+
gotResult = false
|
|
89
|
+
return out
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// ---------------------------------------------------------------------------
|
|
94
|
+
// Binary IPC: read [4B length][payload] from stdin
|
|
95
|
+
// ---------------------------------------------------------------------------
|
|
96
|
+
|
|
97
|
+
func readFrame() -> Data? {
|
|
98
|
+
var lengthBytes = [UInt8](repeating: 0, count: 4)
|
|
99
|
+
let readCount = fread(&lengthBytes, 1, 4, stdin)
|
|
100
|
+
if readCount < 4 { return nil } // EOF
|
|
101
|
+
|
|
102
|
+
let length = UInt32(lengthBytes[0])
|
|
103
|
+
| (UInt32(lengthBytes[1]) << 8)
|
|
104
|
+
| (UInt32(lengthBytes[2]) << 16)
|
|
105
|
+
| (UInt32(lengthBytes[3]) << 24)
|
|
106
|
+
|
|
107
|
+
if length == 0 { return Data() }
|
|
108
|
+
|
|
109
|
+
var payload = [UInt8](repeating: 0, count: Int(length))
|
|
110
|
+
let payloadRead = fread(&payload, 1, Int(length), stdin)
|
|
111
|
+
if payloadRead < Int(length) { return nil }
|
|
112
|
+
return Data(payload)
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
func writeFrame(_ data: Data) {
|
|
116
|
+
var length = UInt32(data.count).littleEndian
|
|
117
|
+
_ = withUnsafeBytes(of: &length) { ptr in
|
|
118
|
+
fwrite(ptr.baseAddress!, 1, 4, stdout)
|
|
119
|
+
}
|
|
120
|
+
_ = data.withUnsafeBytes { ptr in
|
|
121
|
+
fwrite(ptr.baseAddress!, 1, data.count, stdout)
|
|
122
|
+
}
|
|
123
|
+
fflush(stdout)
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// ---------------------------------------------------------------------------
|
|
127
|
+
// Main loop — persistent analyzer
|
|
128
|
+
// ---------------------------------------------------------------------------
|
|
129
|
+
|
|
130
|
+
let classifier = AudioClassifier()
|
|
131
|
+
let format = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)!
|
|
132
|
+
|
|
133
|
+
// Create persistent analyzer + request
|
|
134
|
+
let analyzer = SNAudioStreamAnalyzer(format: format)
|
|
135
|
+
var framePosition: AVAudioFramePosition = 0
|
|
136
|
+
|
|
137
|
+
do {
|
|
138
|
+
let request = try SNClassifySoundRequest(classifierIdentifier: .version1)
|
|
139
|
+
request.windowDuration = CMTimeMakeWithSeconds(1.5, preferredTimescale: 48000)
|
|
140
|
+
request.overlapFactor = 0.5
|
|
141
|
+
try analyzer.add(request, withObserver: classifier)
|
|
142
|
+
} catch {
|
|
143
|
+
fputs("[apple-sound-classifier] Setup error: \(error.localizedDescription)\n", stderr)
|
|
144
|
+
exit(1)
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// Signal ready
|
|
148
|
+
let readyMsg = try! JSONSerialization.data(withJSONObject: [
|
|
149
|
+
"status": "ready",
|
|
150
|
+
"backend": "apple-soundanalysis",
|
|
151
|
+
"platform": "darwin",
|
|
152
|
+
])
|
|
153
|
+
writeFrame(readyMsg)
|
|
154
|
+
|
|
155
|
+
while let frameData = readFrame() {
|
|
156
|
+
// Convert raw bytes to Float32 samples
|
|
157
|
+
let float32Count = frameData.count / MemoryLayout<Float32>.size
|
|
158
|
+
guard float32Count > 0 else {
|
|
159
|
+
let errorResult = try! JSONSerialization.data(withJSONObject: [
|
|
160
|
+
"classifications": [] as [Any],
|
|
161
|
+
"inferenceMs": 0,
|
|
162
|
+
] as [String: Any])
|
|
163
|
+
writeFrame(errorResult)
|
|
164
|
+
continue
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
classifier.startTiming()
|
|
168
|
+
|
|
169
|
+
let samples: [Float] = frameData.withUnsafeBytes { ptr in
|
|
170
|
+
let floatPtr = ptr.bindMemory(to: Float32.self)
|
|
171
|
+
return Array(floatPtr)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// Create audio buffer and feed to persistent analyzer
|
|
175
|
+
let frameCount = AVAudioFrameCount(samples.count)
|
|
176
|
+
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCount) else {
|
|
177
|
+
fputs("[apple-sound-classifier] Failed to create audio buffer\n", stderr)
|
|
178
|
+
let errorResult = try! JSONSerialization.data(withJSONObject: [
|
|
179
|
+
"classifications": [] as [Any],
|
|
180
|
+
"inferenceMs": 0,
|
|
181
|
+
] as [String: Any])
|
|
182
|
+
writeFrame(errorResult)
|
|
183
|
+
continue
|
|
184
|
+
}
|
|
185
|
+
buffer.frameLength = frameCount
|
|
186
|
+
let channelData = buffer.floatChannelData![0]
|
|
187
|
+
for i in 0..<Int(frameCount) {
|
|
188
|
+
channelData[i] = samples[i]
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Feed chunk to the persistent analyzer (accumulates audio over time)
|
|
192
|
+
analyzer.analyze(buffer, atAudioFramePosition: framePosition)
|
|
193
|
+
framePosition += AVAudioFramePosition(frameCount)
|
|
194
|
+
|
|
195
|
+
// Check if analyzer produced a result (it will once enough audio is accumulated)
|
|
196
|
+
// Give it a brief moment to process on the internal queue
|
|
197
|
+
Thread.sleep(forTimeInterval: 0.005)
|
|
198
|
+
|
|
199
|
+
if let (classifications, inferenceMs) = classifier.consumeResult() {
|
|
200
|
+
let result = try! JSONSerialization.data(withJSONObject: [
|
|
201
|
+
"classifications": classifications,
|
|
202
|
+
"inferenceMs": round(inferenceMs * 10) / 10,
|
|
203
|
+
] as [String: Any])
|
|
204
|
+
writeFrame(result)
|
|
205
|
+
} else {
|
|
206
|
+
// Not enough audio accumulated yet — return empty
|
|
207
|
+
let result = try! JSONSerialization.data(withJSONObject: [
|
|
208
|
+
"classifications": [] as [Any],
|
|
209
|
+
"inferenceMs": 0,
|
|
210
|
+
] as [String: Any])
|
|
211
|
+
writeFrame(result)
|
|
212
|
+
}
|
|
213
|
+
}
|
|
Binary file
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
#!/usr/bin/env swift
|
|
2
|
+
//
|
|
3
|
+
// apple-sound-classifier.swift
|
|
4
|
+
//
|
|
5
|
+
// Lightweight Swift CLI that reads raw Float32 audio from stdin,
|
|
6
|
+
// runs Apple SoundAnalysis (SNClassifySoundRequest), and writes
|
|
7
|
+
// JSON classification results to stdout.
|
|
8
|
+
//
|
|
9
|
+
// Protocol (binary IPC, same as PythonInferenceEngine):
|
|
10
|
+
// stdin → [4 bytes LE uint32 length][Float32 audio samples]
|
|
11
|
+
// stdout → [4 bytes LE uint32 length][JSON bytes]
|
|
12
|
+
//
|
|
13
|
+
// The process stays alive and handles multiple requests.
|
|
14
|
+
// Send EOF on stdin to exit.
|
|
15
|
+
//
|
|
16
|
+
// Build: swiftc -O -o apple-sound-classifier apple-sound-classifier.swift
|
|
17
|
+
// Usage: ./apple-sound-classifier --sample-rate=16000
|
|
18
|
+
|
|
19
|
+
import Foundation
|
|
20
|
+
import SoundAnalysis
|
|
21
|
+
|
|
22
|
+
// ---------------------------------------------------------------------------
|
|
23
|
+
// Argument parsing
|
|
24
|
+
// ---------------------------------------------------------------------------
|
|
25
|
+
|
|
26
|
+
var sampleRate: Double = 16000
|
|
27
|
+
var topK: Int = 10
|
|
28
|
+
var minScore: Double = 0.05
|
|
29
|
+
|
|
30
|
+
for arg in CommandLine.arguments.dropFirst() {
|
|
31
|
+
if arg.hasPrefix("--sample-rate=") {
|
|
32
|
+
sampleRate = Double(arg.split(separator: "=").last ?? "16000") ?? 16000
|
|
33
|
+
} else if arg.hasPrefix("--top-k=") {
|
|
34
|
+
topK = Int(arg.split(separator: "=").last ?? "10") ?? 10
|
|
35
|
+
} else if arg.hasPrefix("--min-score=") {
|
|
36
|
+
minScore = Double(arg.split(separator: "=").last ?? "0.05") ?? 0.05
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
// SoundAnalysis setup
|
|
42
|
+
// ---------------------------------------------------------------------------
|
|
43
|
+
|
|
44
|
+
class AudioClassifier: NSObject, SNResultsObserving {
|
|
45
|
+
private var results: [[String: Any]] = []
|
|
46
|
+
private var inferenceMs: Double = 0
|
|
47
|
+
private let semaphore = DispatchSemaphore(value: 0)
|
|
48
|
+
private var startTime: CFAbsoluteTime = 0
|
|
49
|
+
|
|
50
|
+
func startTiming() {
|
|
51
|
+
results = []
|
|
52
|
+
inferenceMs = 0
|
|
53
|
+
startTime = CFAbsoluteTimeGetCurrent()
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
func request(_ request: SNRequest, didProduce result: SNResult) {
|
|
57
|
+
guard let classResult = result as? SNClassificationResult else { return }
|
|
58
|
+
for classification in classResult.classifications {
|
|
59
|
+
if classification.confidence >= minScore {
|
|
60
|
+
results.append([
|
|
61
|
+
"className": classification.identifier,
|
|
62
|
+
"score": round(classification.confidence * 1000) / 1000,
|
|
63
|
+
])
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
func request(_ request: SNRequest, didFailWithError error: Error) {
|
|
69
|
+
fputs("[apple-sound-classifier] Error: \(error.localizedDescription)\n", stderr)
|
|
70
|
+
semaphore.signal()
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
func requestDidComplete(_ request: SNRequest) {
|
|
74
|
+
inferenceMs = (CFAbsoluteTimeGetCurrent() - startTime) * 1000
|
|
75
|
+
semaphore.signal()
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
func waitForResult() -> (classifications: [[String: Any]], inferenceMs: Double) {
|
|
79
|
+
semaphore.wait()
|
|
80
|
+
// Sort by score descending, take top K
|
|
81
|
+
let sorted = results.sorted {
|
|
82
|
+
($0["score"] as? Double ?? 0) > ($1["score"] as? Double ?? 0)
|
|
83
|
+
}
|
|
84
|
+
return (Array(sorted.prefix(topK)), inferenceMs)
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// ---------------------------------------------------------------------------
|
|
89
|
+
// Binary IPC: read [4B length][payload] from stdin
|
|
90
|
+
// ---------------------------------------------------------------------------
|
|
91
|
+
|
|
92
|
+
func readFrame() -> Data? {
|
|
93
|
+
var lengthBytes = [UInt8](repeating: 0, count: 4)
|
|
94
|
+
let readCount = fread(&lengthBytes, 1, 4, stdin)
|
|
95
|
+
if readCount < 4 { return nil } // EOF
|
|
96
|
+
|
|
97
|
+
let length = UInt32(lengthBytes[0])
|
|
98
|
+
| (UInt32(lengthBytes[1]) << 8)
|
|
99
|
+
| (UInt32(lengthBytes[2]) << 16)
|
|
100
|
+
| (UInt32(lengthBytes[3]) << 24)
|
|
101
|
+
|
|
102
|
+
if length == 0 { return Data() }
|
|
103
|
+
|
|
104
|
+
var payload = [UInt8](repeating: 0, count: Int(length))
|
|
105
|
+
let payloadRead = fread(&payload, 1, Int(length), stdin)
|
|
106
|
+
if payloadRead < Int(length) { return nil }
|
|
107
|
+
return Data(payload)
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
func writeFrame(_ data: Data) {
|
|
111
|
+
var length = UInt32(data.count).littleEndian
|
|
112
|
+
_ = withUnsafeBytes(of: &length) { ptr in
|
|
113
|
+
fwrite(ptr.baseAddress!, 1, 4, stdout)
|
|
114
|
+
}
|
|
115
|
+
_ = data.withUnsafeBytes { ptr in
|
|
116
|
+
fwrite(ptr.baseAddress!, 1, data.count, stdout)
|
|
117
|
+
}
|
|
118
|
+
fflush(stdout)
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// ---------------------------------------------------------------------------
|
|
122
|
+
// Main loop
|
|
123
|
+
// ---------------------------------------------------------------------------
|
|
124
|
+
|
|
125
|
+
let classifier = AudioClassifier()
|
|
126
|
+
|
|
127
|
+
// Signal ready
|
|
128
|
+
let readyMsg = try! JSONSerialization.data(withJSONObject: [
|
|
129
|
+
"status": "ready",
|
|
130
|
+
"backend": "apple-soundanalysis",
|
|
131
|
+
"platform": "darwin",
|
|
132
|
+
])
|
|
133
|
+
writeFrame(readyMsg)
|
|
134
|
+
|
|
135
|
+
while let frameData = readFrame() {
|
|
136
|
+
classifier.startTiming()
|
|
137
|
+
|
|
138
|
+
// Convert raw bytes to Float32 samples
|
|
139
|
+
let float32Count = frameData.count / MemoryLayout<Float32>.size
|
|
140
|
+
guard float32Count > 0 else {
|
|
141
|
+
let errorResult = try! JSONSerialization.data(withJSONObject: [
|
|
142
|
+
"classifications": [] as [Any],
|
|
143
|
+
"inferenceMs": 0,
|
|
144
|
+
] as [String: Any])
|
|
145
|
+
writeFrame(errorResult)
|
|
146
|
+
continue
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
let samples: [Float] = frameData.withUnsafeBytes { ptr in
|
|
150
|
+
let floatPtr = ptr.bindMemory(to: Float32.self)
|
|
151
|
+
return Array(floatPtr)
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Create an audio buffer in AVAudioPCMBuffer format
|
|
155
|
+
let format = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)!
|
|
156
|
+
let frameCount = AVAudioFrameCount(samples.count)
|
|
157
|
+
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCount) else {
|
|
158
|
+
fputs("[apple-sound-classifier] Failed to create audio buffer\n", stderr)
|
|
159
|
+
let errorResult = try! JSONSerialization.data(withJSONObject: [
|
|
160
|
+
"classifications": [] as [Any],
|
|
161
|
+
"inferenceMs": 0,
|
|
162
|
+
] as [String: Any])
|
|
163
|
+
writeFrame(errorResult)
|
|
164
|
+
continue
|
|
165
|
+
}
|
|
166
|
+
buffer.frameLength = frameCount
|
|
167
|
+
let channelData = buffer.floatChannelData![0]
|
|
168
|
+
for i in 0..<Int(frameCount) {
|
|
169
|
+
channelData[i] = samples[i]
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Run SoundAnalysis
|
|
173
|
+
do {
|
|
174
|
+
let analyzer = SNAudioStreamAnalyzer(format: format)
|
|
175
|
+
let request = try SNClassifySoundRequest(classifierIdentifier: .version1)
|
|
176
|
+
try analyzer.add(request, withObserver: classifier)
|
|
177
|
+
analyzer.analyze(buffer, atAudioFramePosition: 0)
|
|
178
|
+
analyzer.completeAnalysis()
|
|
179
|
+
|
|
180
|
+
let (classifications, inferenceMs) = classifier.waitForResult()
|
|
181
|
+
|
|
182
|
+
let result = try! JSONSerialization.data(withJSONObject: [
|
|
183
|
+
"classifications": classifications,
|
|
184
|
+
"inferenceMs": round(inferenceMs * 10) / 10,
|
|
185
|
+
] as [String: Any])
|
|
186
|
+
writeFrame(result)
|
|
187
|
+
} catch {
|
|
188
|
+
fputs("[apple-sound-classifier] Analysis error: \(error.localizedDescription)\n", stderr)
|
|
189
|
+
let errorResult = try! JSONSerialization.data(withJSONObject: [
|
|
190
|
+
"classifications": [] as [Any],
|
|
191
|
+
"inferenceMs": 0,
|
|
192
|
+
"error": error.localizedDescription,
|
|
193
|
+
] as [String: Any])
|
|
194
|
+
writeFrame(errorResult)
|
|
195
|
+
}
|
|
196
|
+
}
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* WASM Motion Detection — raw pointer approach (no managed arrays)
|
|
3
|
+
*
|
|
4
|
+
* All input/output via linear memory offsets. JS caller writes frames
|
|
5
|
+
* to known offsets, calls detectMotion, reads results from known offsets.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
// Layout in linear memory (set by init):
|
|
9
|
+
// [0..SIZE) = prev frame (gray8)
|
|
10
|
+
// [SIZE..2*SIZE) = curr frame (gray8)
|
|
11
|
+
// [2*SIZE..3*SIZE) = blur buffer A
|
|
12
|
+
// [3*SIZE..4*SIZE) = blur buffer B
|
|
13
|
+
// [4*SIZE..5*SIZE) = diff mask
|
|
14
|
+
// [5*SIZE..5*SIZE+4*SIZE) = label map (i32)
|
|
15
|
+
// [after labels] = parent buf, remap buf, region output
|
|
16
|
+
|
|
17
|
+
const MAX_REGIONS: i32 = 256
|
|
18
|
+
const REGION_STRIDE: i32 = 5
|
|
19
|
+
|
|
20
|
+
let W: i32 = 0
|
|
21
|
+
let H: i32 = 0
|
|
22
|
+
let SIZE: i32 = 0
|
|
23
|
+
|
|
24
|
+
// Offsets into linear memory
|
|
25
|
+
let prevOff: i32 = 0
|
|
26
|
+
let currOff: i32 = 0
|
|
27
|
+
let blurAOff: i32 = 0
|
|
28
|
+
let blurBOff: i32 = 0
|
|
29
|
+
let diffOff: i32 = 0
|
|
30
|
+
let labelOff: i32 = 0
|
|
31
|
+
let parentOff: i32 = 0
|
|
32
|
+
let remapOff: i32 = 0
|
|
33
|
+
let regionOff: i32 = 0
|
|
34
|
+
|
|
35
|
+
/** Call once with frame dimensions. Returns offset for prev frame. */
|
|
36
|
+
export function init(w: i32, h: i32): i32 {
|
|
37
|
+
W = w
|
|
38
|
+
H = h
|
|
39
|
+
SIZE = w * h
|
|
40
|
+
// All buffers in linear memory, sequentially
|
|
41
|
+
prevOff = 1024 // start after stack
|
|
42
|
+
currOff = prevOff + SIZE
|
|
43
|
+
blurAOff = currOff + SIZE
|
|
44
|
+
blurBOff = blurAOff + SIZE
|
|
45
|
+
diffOff = blurBOff + SIZE
|
|
46
|
+
labelOff = diffOff + SIZE
|
|
47
|
+
parentOff = labelOff + SIZE * 4 // i32 per pixel
|
|
48
|
+
remapOff = parentOff + (MAX_REGIONS + 1) * 4
|
|
49
|
+
regionOff = remapOff + (MAX_REGIONS + 1) * 4
|
|
50
|
+
|
|
51
|
+
// Ensure enough memory
|
|
52
|
+
const needed = regionOff + MAX_REGIONS * REGION_STRIDE * 4 + 1024
|
|
53
|
+
const pages = (needed + 65535) >> 16 // 64KB pages
|
|
54
|
+
const currentPages = memory.size()
|
|
55
|
+
if (pages > currentPages) {
|
|
56
|
+
memory.grow(pages - currentPages)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return prevOff
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/** Get offset for writing current frame */
|
|
63
|
+
export function getCurrOffset(): i32 { return currOff }
|
|
64
|
+
/** Get offset for writing previous frame */
|
|
65
|
+
export function getPrevOffset(): i32 { return prevOff }
|
|
66
|
+
/** Get offset for reading region results */
|
|
67
|
+
export function getRegionOffset(): i32 { return regionOff }
|
|
68
|
+
|
|
69
|
+
// ── Box blur (separable) ──
|
|
70
|
+
|
|
71
|
+
function boxBlurH(srcOff: i32, dstOff: i32, w: i32, h: i32, r: i32): void {
|
|
72
|
+
const inv: f32 = 1.0 / <f32>(r + r + 1)
|
|
73
|
+
for (let y: i32 = 0; y < h; y++) {
|
|
74
|
+
const row = y * w
|
|
75
|
+
let sum: i32 = 0
|
|
76
|
+
for (let k: i32 = -r; k <= r; k++) {
|
|
77
|
+
const cx = k < 0 ? 0 : (k >= w ? w - 1 : k)
|
|
78
|
+
sum += <i32>load<u8>(srcOff + row + cx)
|
|
79
|
+
}
|
|
80
|
+
store<u8>(dstOff + row, <u8>(<f32>sum * inv))
|
|
81
|
+
for (let x: i32 = 1; x < w; x++) {
|
|
82
|
+
const addX = x + r < w ? x + r : w - 1
|
|
83
|
+
const remX = x - r - 1 >= 0 ? x - r - 1 : 0
|
|
84
|
+
sum += <i32>load<u8>(srcOff + row + addX) - <i32>load<u8>(srcOff + row + remX)
|
|
85
|
+
store<u8>(dstOff + row + x, <u8>(<f32>sum * inv))
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function boxBlurV(srcOff: i32, dstOff: i32, w: i32, h: i32, r: i32): void {
|
|
91
|
+
const inv: f32 = 1.0 / <f32>(r + r + 1)
|
|
92
|
+
for (let x: i32 = 0; x < w; x++) {
|
|
93
|
+
let sum: i32 = 0
|
|
94
|
+
for (let k: i32 = -r; k <= r; k++) {
|
|
95
|
+
const cy = k < 0 ? 0 : (k >= h ? h - 1 : k)
|
|
96
|
+
sum += <i32>load<u8>(srcOff + cy * w + x)
|
|
97
|
+
}
|
|
98
|
+
store<u8>(dstOff + x, <u8>(<f32>sum * inv))
|
|
99
|
+
for (let y: i32 = 1; y < h; y++) {
|
|
100
|
+
const addY = y + r < h ? y + r : h - 1
|
|
101
|
+
const remY = y - r - 1 >= 0 ? y - r - 1 : 0
|
|
102
|
+
sum += <i32>load<u8>(srcOff + addY * w + x) - <i32>load<u8>(srcOff + remY * w + x)
|
|
103
|
+
store<u8>(dstOff + y * w + x, <u8>(<f32>sum * inv))
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function blur(srcOff: i32, dstOff: i32, tmpOff: i32, w: i32, h: i32, r: i32): void {
|
|
109
|
+
boxBlurH(srcOff, tmpOff, w, h, r)
|
|
110
|
+
boxBlurV(tmpOff, dstOff, w, h, r)
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// ── Diff + threshold ──
|
|
114
|
+
|
|
115
|
+
function diffThreshold(aOff: i32, bOff: i32, outOff: i32, size: i32, thresh: i32): i32 {
|
|
116
|
+
let count: i32 = 0
|
|
117
|
+
for (let i: i32 = 0; i < size; i++) {
|
|
118
|
+
const d = <i32>load<u8>(aOff + i) - <i32>load<u8>(bOff + i)
|
|
119
|
+
const ad = d < 0 ? -d : d
|
|
120
|
+
if (ad > thresh) {
|
|
121
|
+
store<u8>(outOff + i, 255)
|
|
122
|
+
count++
|
|
123
|
+
} else {
|
|
124
|
+
store<u8>(outOff + i, 0)
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return count
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// ── Dilate ──
|
|
131
|
+
|
|
132
|
+
function dilate(off: i32, w: i32, h: i32, r: i32): void {
|
|
133
|
+
for (let y: i32 = 0; y < h; y++) {
|
|
134
|
+
let last: i32 = -r - 1
|
|
135
|
+
for (let x: i32 = 0; x < w; x++) {
|
|
136
|
+
const idx = off + y * w + x
|
|
137
|
+
if (load<u8>(idx)) { last = x }
|
|
138
|
+
else if (x - last <= r) { store<u8>(idx, 255) }
|
|
139
|
+
}
|
|
140
|
+
last = w + r + 1
|
|
141
|
+
for (let x: i32 = w - 1; x >= 0; x--) {
|
|
142
|
+
const idx = off + y * w + x
|
|
143
|
+
if (load<u8>(idx)) { last = x }
|
|
144
|
+
else if (last - x <= r) { store<u8>(idx, 255) }
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
for (let x: i32 = 0; x < w; x++) {
|
|
148
|
+
let last: i32 = -r - 1
|
|
149
|
+
for (let y: i32 = 0; y < h; y++) {
|
|
150
|
+
const idx = off + y * w + x
|
|
151
|
+
if (load<u8>(idx)) { last = y }
|
|
152
|
+
else if (y - last <= r) { store<u8>(idx, 255) }
|
|
153
|
+
}
|
|
154
|
+
last = h + r + 1
|
|
155
|
+
for (let y: i32 = h - 1; y >= 0; y--) {
|
|
156
|
+
const idx = off + y * w + x
|
|
157
|
+
if (load<u8>(idx)) { last = y }
|
|
158
|
+
else if (last - y <= r) { store<u8>(idx, 255) }
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// ── Union-Find CCL ──
|
|
164
|
+
|
|
165
|
+
function ufFind(x: i32): i32 {
|
|
166
|
+
let cur = x
|
|
167
|
+
while (load<i32>(parentOff + (cur << 2)) !== cur) {
|
|
168
|
+
const p = load<i32>(parentOff + (load<i32>(parentOff + (cur << 2)) << 2))
|
|
169
|
+
store<i32>(parentOff + (cur << 2), p)
|
|
170
|
+
cur = p
|
|
171
|
+
}
|
|
172
|
+
return cur
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function ufUnion(a: i32, b: i32): void {
|
|
176
|
+
const ra = ufFind(a), rb = ufFind(b)
|
|
177
|
+
if (ra !== rb) store<i32>(parentOff + (rb << 2), ra)
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function labelCCL(maskOff: i32, labelOff_: i32, w: i32, h: i32): i32 {
|
|
181
|
+
let nextLabel: i32 = 1
|
|
182
|
+
for (let i: i32 = 0; i <= MAX_REGIONS; i++) {
|
|
183
|
+
store<i32>(parentOff + (i << 2), i)
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
for (let y: i32 = 0; y < h; y++) {
|
|
187
|
+
for (let x: i32 = 0; x < w; x++) {
|
|
188
|
+
const idx = y * w + x
|
|
189
|
+
if (!load<u8>(maskOff + idx)) {
|
|
190
|
+
store<i32>(labelOff_ + (idx << 2), 0)
|
|
191
|
+
continue
|
|
192
|
+
}
|
|
193
|
+
const left: i32 = x > 0 ? load<i32>(labelOff_ + ((idx - 1) << 2)) : 0
|
|
194
|
+
const up: i32 = y > 0 ? load<i32>(labelOff_ + (((y - 1) * w + x) << 2)) : 0
|
|
195
|
+
|
|
196
|
+
if (left > 0 && up > 0) {
|
|
197
|
+
store<i32>(labelOff_ + (idx << 2), left)
|
|
198
|
+
if (left !== up) ufUnion(left, up)
|
|
199
|
+
} else if (left > 0) {
|
|
200
|
+
store<i32>(labelOff_ + (idx << 2), left)
|
|
201
|
+
} else if (up > 0) {
|
|
202
|
+
store<i32>(labelOff_ + (idx << 2), up)
|
|
203
|
+
} else if (nextLabel < MAX_REGIONS) {
|
|
204
|
+
store<i32>(labelOff_ + (idx << 2), nextLabel)
|
|
205
|
+
nextLabel++
|
|
206
|
+
} else {
|
|
207
|
+
store<i32>(labelOff_ + (idx << 2), MAX_REGIONS - 1)
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Flatten
|
|
213
|
+
let distinct: i32 = 0
|
|
214
|
+
for (let i: i32 = 0; i <= MAX_REGIONS; i++) store<i32>(remapOff + (i << 2), 0)
|
|
215
|
+
for (let i: i32 = 1; i < nextLabel; i++) {
|
|
216
|
+
const root = ufFind(i)
|
|
217
|
+
if (!load<i32>(remapOff + (root << 2))) {
|
|
218
|
+
distinct++
|
|
219
|
+
store<i32>(remapOff + (root << 2), distinct)
|
|
220
|
+
}
|
|
221
|
+
store<i32>(remapOff + (i << 2), load<i32>(remapOff + (root << 2)))
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
const size = w * h
|
|
225
|
+
for (let i: i32 = 0; i < size; i++) {
|
|
226
|
+
const l = load<i32>(labelOff_ + (i << 2))
|
|
227
|
+
if (l > 0) store<i32>(labelOff_ + (i << 2), load<i32>(remapOff + (l << 2)))
|
|
228
|
+
}
|
|
229
|
+
return distinct
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// ── Main ──
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Detect motion between frames at prevOff and currOff.
|
|
236
|
+
* Caller must write gray pixels to getPrevOffset() and getCurrOffset() first.
|
|
237
|
+
* Returns number of regions. Read bboxes from getRegionOffset():
|
|
238
|
+
* [x, y, w, h, pixelCount] × N as i32 array.
|
|
239
|
+
*/
|
|
240
|
+
export function detectMotion(
|
|
241
|
+
threshold: i32 = 25,
|
|
242
|
+
blurRadius: i32 = 1,
|
|
243
|
+
dilateRadius: i32 = 4,
|
|
244
|
+
minArea: i32 = 200,
|
|
245
|
+
): i32 {
|
|
246
|
+
const w = W, h = H, size = SIZE
|
|
247
|
+
|
|
248
|
+
// Blur prev → blurA (using diffOff as temp — safe, will be overwritten by diffThreshold)
|
|
249
|
+
blur(prevOff, blurAOff, diffOff, w, h, blurRadius)
|
|
250
|
+
// Blur curr → blurB (using diffOff as temp — safe, will be overwritten by diffThreshold)
|
|
251
|
+
blur(currOff, blurBOff, diffOff, w, h, blurRadius)
|
|
252
|
+
// Now blurAOff = blurred prev, blurBOff = blurred curr (no buffer overlap)
|
|
253
|
+
|
|
254
|
+
// Diff
|
|
255
|
+
const changed = diffThreshold(blurAOff, blurBOff, diffOff, size, threshold)
|
|
256
|
+
if (changed < minArea) return 0
|
|
257
|
+
|
|
258
|
+
// Dilate
|
|
259
|
+
if (dilateRadius > 0) dilate(diffOff, w, h, dilateRadius)
|
|
260
|
+
|
|
261
|
+
// CCL
|
|
262
|
+
const numComp = labelCCL(diffOff, labelOff, w, h)
|
|
263
|
+
|
|
264
|
+
// Extract bboxes — return ALL regions (minArea filtering done in JS)
|
|
265
|
+
let regionCount: i32 = 0
|
|
266
|
+
for (let c: i32 = 1; c <= numComp && regionCount < MAX_REGIONS; c++) {
|
|
267
|
+
let minX: i32 = w, minY: i32 = h, maxX: i32 = 0, maxY: i32 = 0, px: i32 = 0
|
|
268
|
+
for (let y: i32 = 0; y < h; y++) {
|
|
269
|
+
for (let x: i32 = 0; x < w; x++) {
|
|
270
|
+
if (load<i32>(labelOff + ((y * w + x) << 2)) === c) {
|
|
271
|
+
if (x < minX) minX = x
|
|
272
|
+
if (x > maxX) maxX = x
|
|
273
|
+
if (y < minY) minY = y
|
|
274
|
+
if (y > maxY) maxY = y
|
|
275
|
+
px++
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
if (px > 0) {
|
|
280
|
+
const off = regionOff + regionCount * REGION_STRIDE * 4
|
|
281
|
+
store<i32>(off, minX)
|
|
282
|
+
store<i32>(off + 4, minY)
|
|
283
|
+
store<i32>(off + 8, maxX - minX + 1)
|
|
284
|
+
store<i32>(off + 12, maxY - minY + 1)
|
|
285
|
+
store<i32>(off + 16, px)
|
|
286
|
+
regionCount++
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
return regionCount
|
|
290
|
+
}
|