opencv-contrib-python-headless 4.13.0.90__cp37-abi3-macosx_14_0_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. cv2/.dylibs/libaom.3.13.1.dylib +0 -0
  2. cv2/.dylibs/libavif.16.3.0.dylib +0 -0
  3. cv2/.dylibs/libdav1d.7.dylib +0 -0
  4. cv2/.dylibs/libvmaf.3.dylib +0 -0
  5. cv2/Error/__init__.pyi +118 -0
  6. cv2/LICENSE-3RD-PARTY.txt +3513 -0
  7. cv2/LICENSE.txt +21 -0
  8. cv2/__init__.py +181 -0
  9. cv2/__init__.pyi +6858 -0
  10. cv2/aruco/__init__.pyi +410 -0
  11. cv2/barcode/__init__.pyi +40 -0
  12. cv2/bgsegm/__init__.pyi +202 -0
  13. cv2/bioinspired/__init__.pyi +121 -0
  14. cv2/ccm/__init__.pyi +167 -0
  15. cv2/colored_kinfu/__init__.pyi +96 -0
  16. cv2/config-3.py +24 -0
  17. cv2/config.py +5 -0
  18. cv2/cuda/__init__.pyi +553 -0
  19. cv2/cv2.abi3.so +0 -0
  20. cv2/data/__init__.py +3 -0
  21. cv2/data/haarcascade_eye.xml +12213 -0
  22. cv2/data/haarcascade_eye_tree_eyeglasses.xml +22619 -0
  23. cv2/data/haarcascade_frontalcatface.xml +14382 -0
  24. cv2/data/haarcascade_frontalcatface_extended.xml +13394 -0
  25. cv2/data/haarcascade_frontalface_alt.xml +24350 -0
  26. cv2/data/haarcascade_frontalface_alt2.xml +20719 -0
  27. cv2/data/haarcascade_frontalface_alt_tree.xml +96484 -0
  28. cv2/data/haarcascade_frontalface_default.xml +33314 -0
  29. cv2/data/haarcascade_fullbody.xml +17030 -0
  30. cv2/data/haarcascade_lefteye_2splits.xml +7390 -0
  31. cv2/data/haarcascade_license_plate_rus_16stages.xml +1404 -0
  32. cv2/data/haarcascade_lowerbody.xml +14056 -0
  33. cv2/data/haarcascade_profileface.xml +29690 -0
  34. cv2/data/haarcascade_righteye_2splits.xml +7407 -0
  35. cv2/data/haarcascade_russian_plate_number.xml +2656 -0
  36. cv2/data/haarcascade_smile.xml +6729 -0
  37. cv2/data/haarcascade_upperbody.xml +28134 -0
  38. cv2/datasets/__init__.pyi +80 -0
  39. cv2/detail/__init__.pyi +627 -0
  40. cv2/dnn/__init__.pyi +549 -0
  41. cv2/dnn_superres/__init__.pyi +37 -0
  42. cv2/dpm/__init__.pyi +10 -0
  43. cv2/dynafu/__init__.pyi +43 -0
  44. cv2/face/__init__.pyi +220 -0
  45. cv2/fisheye/__init__.pyi +88 -0
  46. cv2/flann/__init__.pyi +65 -0
  47. cv2/ft/__init__.pyi +98 -0
  48. cv2/gapi/__init__.py +323 -0
  49. cv2/gapi/__init__.pyi +349 -0
  50. cv2/gapi/core/__init__.pyi +7 -0
  51. cv2/gapi/core/cpu/__init__.pyi +9 -0
  52. cv2/gapi/core/fluid/__init__.pyi +9 -0
  53. cv2/gapi/core/ocl/__init__.pyi +9 -0
  54. cv2/gapi/ie/__init__.pyi +51 -0
  55. cv2/gapi/ie/detail/__init__.pyi +12 -0
  56. cv2/gapi/imgproc/__init__.pyi +5 -0
  57. cv2/gapi/imgproc/fluid/__init__.pyi +9 -0
  58. cv2/gapi/oak/__init__.pyi +37 -0
  59. cv2/gapi/onnx/__init__.pyi +55 -0
  60. cv2/gapi/onnx/ep/__init__.pyi +63 -0
  61. cv2/gapi/ot/__init__.pyi +32 -0
  62. cv2/gapi/ot/cpu/__init__.pyi +9 -0
  63. cv2/gapi/ov/__init__.pyi +74 -0
  64. cv2/gapi/own/__init__.pyi +5 -0
  65. cv2/gapi/own/detail/__init__.pyi +10 -0
  66. cv2/gapi/render/__init__.pyi +5 -0
  67. cv2/gapi/render/ocv/__init__.pyi +9 -0
  68. cv2/gapi/streaming/__init__.pyi +42 -0
  69. cv2/gapi/video/__init__.pyi +10 -0
  70. cv2/gapi/wip/__init__.pyi +43 -0
  71. cv2/gapi/wip/draw/__init__.pyi +119 -0
  72. cv2/gapi/wip/gst/__init__.pyi +17 -0
  73. cv2/gapi/wip/onevpl/__init__.pyi +16 -0
  74. cv2/hfs/__init__.pyi +53 -0
  75. cv2/img_hash/__init__.pyi +116 -0
  76. cv2/instr/__init__.pyi +24 -0
  77. cv2/intensity_transform/__init__.pyi +27 -0
  78. cv2/ipp/__init__.pyi +14 -0
  79. cv2/kinfu/__init__.pyi +133 -0
  80. cv2/kinfu/detail/__init__.pyi +7 -0
  81. cv2/large_kinfu/__init__.pyi +73 -0
  82. cv2/legacy/__init__.pyi +93 -0
  83. cv2/line_descriptor/__init__.pyi +112 -0
  84. cv2/linemod/__init__.pyi +151 -0
  85. cv2/load_config_py2.py +6 -0
  86. cv2/load_config_py3.py +9 -0
  87. cv2/mat_wrapper/__init__.py +40 -0
  88. cv2/mcc/__init__.pyi +109 -0
  89. cv2/misc/__init__.py +1 -0
  90. cv2/misc/version.py +5 -0
  91. cv2/ml/__init__.pyi +696 -0
  92. cv2/motempl/__init__.pyi +29 -0
  93. cv2/multicalib/__init__.pyi +10 -0
  94. cv2/ocl/__init__.pyi +252 -0
  95. cv2/ogl/__init__.pyi +51 -0
  96. cv2/omnidir/__init__.pyi +68 -0
  97. cv2/optflow/__init__.pyi +286 -0
  98. cv2/parallel/__init__.pyi +6 -0
  99. cv2/phase_unwrapping/__init__.pyi +41 -0
  100. cv2/plot/__init__.pyi +64 -0
  101. cv2/ppf_match_3d/__init__.pyi +91 -0
  102. cv2/py.typed +0 -0
  103. cv2/quality/__init__.pyi +149 -0
  104. cv2/rapid/__init__.pyi +91 -0
  105. cv2/reg/__init__.pyi +210 -0
  106. cv2/rgbd/__init__.pyi +449 -0
  107. cv2/saliency/__init__.pyi +117 -0
  108. cv2/samples/__init__.pyi +12 -0
  109. cv2/segmentation/__init__.pyi +39 -0
  110. cv2/signal/__init__.pyi +14 -0
  111. cv2/stereo/__init__.pyi +88 -0
  112. cv2/structured_light/__init__.pyi +94 -0
  113. cv2/text/__init__.pyi +204 -0
  114. cv2/typing/__init__.py +180 -0
  115. cv2/utils/__init__.py +14 -0
  116. cv2/utils/__init__.pyi +110 -0
  117. cv2/utils/fs/__init__.pyi +6 -0
  118. cv2/utils/logging/__init__.pyi +22 -0
  119. cv2/utils/nested/__init__.pyi +31 -0
  120. cv2/version.py +5 -0
  121. cv2/videoio_registry/__init__.pyi +31 -0
  122. cv2/videostab/__init__.pyi +16 -0
  123. cv2/wechat_qrcode/__init__.pyi +23 -0
  124. cv2/xfeatures2d/__init__.pyi +537 -0
  125. cv2/ximgproc/__init__.pyi +746 -0
  126. cv2/ximgproc/segmentation/__init__.pyi +116 -0
  127. cv2/xphoto/__init__.pyi +142 -0
  128. opencv_contrib_python_headless-4.13.0.90.dist-info/LICENSE-3RD-PARTY.txt +3513 -0
  129. opencv_contrib_python_headless-4.13.0.90.dist-info/LICENSE.txt +21 -0
  130. opencv_contrib_python_headless-4.13.0.90.dist-info/METADATA +300 -0
  131. opencv_contrib_python_headless-4.13.0.90.dist-info/RECORD +133 -0
  132. opencv_contrib_python_headless-4.13.0.90.dist-info/WHEEL +6 -0
  133. opencv_contrib_python_headless-4.13.0.90.dist-info/top_level.txt +1 -0
cv2/gapi/__init__.py ADDED
@@ -0,0 +1,323 @@
1
+ __all__ = ['op', 'kernel']
2
+
3
+ import sys
4
+ import cv2 as cv
5
+
6
+ # NB: Register function in specific module
7
+ def register(mname):
8
+ def parameterized(func):
9
+ sys.modules[mname].__dict__[func.__name__] = func
10
+ return func
11
+ return parameterized
12
+
13
+
14
+ @register('cv2.gapi')
15
+ def networks(*args):
16
+ return cv.gapi_GNetPackage(list(map(cv.detail.strip, args)))
17
+
18
+
19
+ @register('cv2.gapi')
20
+ def compile_args(*args):
21
+ return list(map(cv.GCompileArg, args))
22
+
23
+
24
+ @register('cv2')
25
+ def GIn(*args):
26
+ return [*args]
27
+
28
+
29
+ @register('cv2')
30
+ def GOut(*args):
31
+ return [*args]
32
+
33
+
34
+ @register('cv2')
35
+ def gin(*args):
36
+ return [*args]
37
+
38
+
39
+ @register('cv2.gapi')
40
+ def descr_of(*args):
41
+ return [*args]
42
+
43
+
44
+ @register('cv2')
45
+ class GOpaque():
46
+ # NB: Inheritance from c++ class cause segfault.
47
+ # So just aggregate cv.GOpaqueT instead of inheritance
48
+ def __new__(cls, argtype):
49
+ return cv.GOpaqueT(argtype)
50
+
51
+ class Bool():
52
+ def __new__(self):
53
+ return cv.GOpaqueT(cv.gapi.CV_BOOL)
54
+
55
+ class Int():
56
+ def __new__(self):
57
+ return cv.GOpaqueT(cv.gapi.CV_INT)
58
+
59
+ class Int64():
60
+ def __new__(self):
61
+ return cv.GOpaqueT(cv.gapi.CV_INT64)
62
+
63
+ class UInt64():
64
+ def __new__(self):
65
+ return cv.GOpaqueT(cv.gapi.CV_UINT64)
66
+
67
+ class Double():
68
+ def __new__(self):
69
+ return cv.GOpaqueT(cv.gapi.CV_DOUBLE)
70
+
71
+ class Float():
72
+ def __new__(self):
73
+ return cv.GOpaqueT(cv.gapi.CV_FLOAT)
74
+
75
+ class String():
76
+ def __new__(self):
77
+ return cv.GOpaqueT(cv.gapi.CV_STRING)
78
+
79
+ class Point():
80
+ def __new__(self):
81
+ return cv.GOpaqueT(cv.gapi.CV_POINT)
82
+
83
+ class Point2f():
84
+ def __new__(self):
85
+ return cv.GOpaqueT(cv.gapi.CV_POINT2F)
86
+
87
+ class Point3f():
88
+ def __new__(self):
89
+ return cv.GOpaqueT(cv.gapi.CV_POINT3F)
90
+
91
+ class Size():
92
+ def __new__(self):
93
+ return cv.GOpaqueT(cv.gapi.CV_SIZE)
94
+
95
+ class Rect():
96
+ def __new__(self):
97
+ return cv.GOpaqueT(cv.gapi.CV_RECT)
98
+
99
+ class Prim():
100
+ def __new__(self):
101
+ return cv.GOpaqueT(cv.gapi.CV_DRAW_PRIM)
102
+
103
+ class Any():
104
+ def __new__(self):
105
+ return cv.GOpaqueT(cv.gapi.CV_ANY)
106
+
107
+ @register('cv2')
108
+ class GArray():
109
+ # NB: Inheritance from c++ class cause segfault.
110
+ # So just aggregate cv.GArrayT instead of inheritance
111
+ def __new__(cls, argtype):
112
+ return cv.GArrayT(argtype)
113
+
114
+ class Bool():
115
+ def __new__(self):
116
+ return cv.GArrayT(cv.gapi.CV_BOOL)
117
+
118
+ class Int():
119
+ def __new__(self):
120
+ return cv.GArrayT(cv.gapi.CV_INT)
121
+
122
+ class Int64():
123
+ def __new__(self):
124
+ return cv.GArrayT(cv.gapi.CV_INT64)
125
+
126
+ class UInt64():
127
+ def __new__(self):
128
+ return cv.GArrayT(cv.gapi.CV_UINT64)
129
+
130
+ class Double():
131
+ def __new__(self):
132
+ return cv.GArrayT(cv.gapi.CV_DOUBLE)
133
+
134
+ class Float():
135
+ def __new__(self):
136
+ return cv.GArrayT(cv.gapi.CV_FLOAT)
137
+
138
+ class String():
139
+ def __new__(self):
140
+ return cv.GArrayT(cv.gapi.CV_STRING)
141
+
142
+ class Point():
143
+ def __new__(self):
144
+ return cv.GArrayT(cv.gapi.CV_POINT)
145
+
146
+ class Point2f():
147
+ def __new__(self):
148
+ return cv.GArrayT(cv.gapi.CV_POINT2F)
149
+
150
+ class Point3f():
151
+ def __new__(self):
152
+ return cv.GArrayT(cv.gapi.CV_POINT3F)
153
+
154
+ class Size():
155
+ def __new__(self):
156
+ return cv.GArrayT(cv.gapi.CV_SIZE)
157
+
158
+ class Rect():
159
+ def __new__(self):
160
+ return cv.GArrayT(cv.gapi.CV_RECT)
161
+
162
+ class Scalar():
163
+ def __new__(self):
164
+ return cv.GArrayT(cv.gapi.CV_SCALAR)
165
+
166
+ class Mat():
167
+ def __new__(self):
168
+ return cv.GArrayT(cv.gapi.CV_MAT)
169
+
170
+ class GMat():
171
+ def __new__(self):
172
+ return cv.GArrayT(cv.gapi.CV_GMAT)
173
+
174
+ class Prim():
175
+ def __new__(self):
176
+ return cv.GArray(cv.gapi.CV_DRAW_PRIM)
177
+
178
+ class Any():
179
+ def __new__(self):
180
+ return cv.GArray(cv.gapi.CV_ANY)
181
+
182
+
183
+ # NB: Top lvl decorator takes arguments
184
+ def op(op_id, in_types, out_types):
185
+
186
+ garray_types= {
187
+ cv.GArray.Bool: cv.gapi.CV_BOOL,
188
+ cv.GArray.Int: cv.gapi.CV_INT,
189
+ cv.GArray.Int64: cv.gapi.CV_INT64,
190
+ cv.GArray.UInt64: cv.gapi.CV_UINT64,
191
+ cv.GArray.Double: cv.gapi.CV_DOUBLE,
192
+ cv.GArray.Float: cv.gapi.CV_FLOAT,
193
+ cv.GArray.String: cv.gapi.CV_STRING,
194
+ cv.GArray.Point: cv.gapi.CV_POINT,
195
+ cv.GArray.Point2f: cv.gapi.CV_POINT2F,
196
+ cv.GArray.Point3f: cv.gapi.CV_POINT3F,
197
+ cv.GArray.Size: cv.gapi.CV_SIZE,
198
+ cv.GArray.Rect: cv.gapi.CV_RECT,
199
+ cv.GArray.Scalar: cv.gapi.CV_SCALAR,
200
+ cv.GArray.Mat: cv.gapi.CV_MAT,
201
+ cv.GArray.GMat: cv.gapi.CV_GMAT,
202
+ cv.GArray.Prim: cv.gapi.CV_DRAW_PRIM,
203
+ cv.GArray.Any: cv.gapi.CV_ANY
204
+ }
205
+
206
+ gopaque_types= {
207
+ cv.GOpaque.Size: cv.gapi.CV_SIZE,
208
+ cv.GOpaque.Rect: cv.gapi.CV_RECT,
209
+ cv.GOpaque.Bool: cv.gapi.CV_BOOL,
210
+ cv.GOpaque.Int: cv.gapi.CV_INT,
211
+ cv.GOpaque.Int64: cv.gapi.CV_INT64,
212
+ cv.GOpaque.UInt64: cv.gapi.CV_UINT64,
213
+ cv.GOpaque.Double: cv.gapi.CV_DOUBLE,
214
+ cv.GOpaque.Float: cv.gapi.CV_FLOAT,
215
+ cv.GOpaque.String: cv.gapi.CV_STRING,
216
+ cv.GOpaque.Point: cv.gapi.CV_POINT,
217
+ cv.GOpaque.Point2f: cv.gapi.CV_POINT2F,
218
+ cv.GOpaque.Point3f: cv.gapi.CV_POINT3F,
219
+ cv.GOpaque.Size: cv.gapi.CV_SIZE,
220
+ cv.GOpaque.Rect: cv.gapi.CV_RECT,
221
+ cv.GOpaque.Prim: cv.gapi.CV_DRAW_PRIM,
222
+ cv.GOpaque.Any: cv.gapi.CV_ANY
223
+ }
224
+
225
+ type2str = {
226
+ cv.gapi.CV_BOOL: 'cv.gapi.CV_BOOL' ,
227
+ cv.gapi.CV_INT: 'cv.gapi.CV_INT' ,
228
+ cv.gapi.CV_INT64: 'cv.gapi.CV_INT64' ,
229
+ cv.gapi.CV_UINT64: 'cv.gapi.CV_UINT64' ,
230
+ cv.gapi.CV_DOUBLE: 'cv.gapi.CV_DOUBLE' ,
231
+ cv.gapi.CV_FLOAT: 'cv.gapi.CV_FLOAT' ,
232
+ cv.gapi.CV_STRING: 'cv.gapi.CV_STRING' ,
233
+ cv.gapi.CV_POINT: 'cv.gapi.CV_POINT' ,
234
+ cv.gapi.CV_POINT2F: 'cv.gapi.CV_POINT2F' ,
235
+ cv.gapi.CV_POINT3F: 'cv.gapi.CV_POINT3F' ,
236
+ cv.gapi.CV_SIZE: 'cv.gapi.CV_SIZE',
237
+ cv.gapi.CV_RECT: 'cv.gapi.CV_RECT',
238
+ cv.gapi.CV_SCALAR: 'cv.gapi.CV_SCALAR',
239
+ cv.gapi.CV_MAT: 'cv.gapi.CV_MAT',
240
+ cv.gapi.CV_GMAT: 'cv.gapi.CV_GMAT',
241
+ cv.gapi.CV_DRAW_PRIM: 'cv.gapi.CV_DRAW_PRIM'
242
+ }
243
+
244
+ # NB: Second lvl decorator takes class to decorate
245
+ def op_with_params(cls):
246
+ if not in_types:
247
+ raise Exception('{} operation should have at least one input!'.format(cls.__name__))
248
+
249
+ if not out_types:
250
+ raise Exception('{} operation should have at least one output!'.format(cls.__name__))
251
+
252
+ for i, t in enumerate(out_types):
253
+ if t not in [cv.GMat, cv.GScalar, *garray_types, *gopaque_types]:
254
+ raise Exception('{} unsupported output type: {} in position: {}'
255
+ .format(cls.__name__, t.__name__, i))
256
+
257
+ def on(*args):
258
+ if len(in_types) != len(args):
259
+ raise Exception('Invalid number of input elements!\nExpected: {}, Actual: {}'
260
+ .format(len(in_types), len(args)))
261
+
262
+ for i, (t, a) in enumerate(zip(in_types, args)):
263
+ if t in garray_types:
264
+ if not isinstance(a, cv.GArrayT):
265
+ raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
266
+ .format(cls.__name__, i, cv.GArrayT.__name__, type(a).__name__))
267
+
268
+ elif a.type() != garray_types[t]:
269
+ raise Exception("{} invalid GArrayT type for argument {}.\nExpected: {}, Actual: {}"
270
+ .format(cls.__name__, i, type2str[garray_types[t]], type2str[a.type()]))
271
+
272
+ elif t in gopaque_types:
273
+ if not isinstance(a, cv.GOpaqueT):
274
+ raise Exception("{} invalid type for argument {}.\nExpected: {}, Actual: {}"
275
+ .format(cls.__name__, i, cv.GOpaqueT.__name__, type(a).__name__))
276
+
277
+ elif a.type() != gopaque_types[t]:
278
+ raise Exception("{} invalid GOpaque type for argument {}.\nExpected: {}, Actual: {}"
279
+ .format(cls.__name__, i, type2str[gopaque_types[t]], type2str[a.type()]))
280
+
281
+ else:
282
+ if t != type(a):
283
+ raise Exception('{} invalid input type for argument {}.\nExpected: {}, Actual: {}'
284
+ .format(cls.__name__, i, t.__name__, type(a).__name__))
285
+
286
+ op = cv.gapi.__op(op_id, cls.outMeta, *args)
287
+
288
+ out_protos = []
289
+ for i, out_type in enumerate(out_types):
290
+ if out_type == cv.GMat:
291
+ out_protos.append(op.getGMat())
292
+ elif out_type == cv.GScalar:
293
+ out_protos.append(op.getGScalar())
294
+ elif out_type in gopaque_types:
295
+ out_protos.append(op.getGOpaque(gopaque_types[out_type]))
296
+ elif out_type in garray_types:
297
+ out_protos.append(op.getGArray(garray_types[out_type]))
298
+ else:
299
+ raise Exception("""In {}: G-API operation can't produce the output with type: {} in position: {}"""
300
+ .format(cls.__name__, out_type.__name__, i))
301
+
302
+ return tuple(out_protos) if len(out_protos) != 1 else out_protos[0]
303
+
304
+ # NB: Extend operation class
305
+ cls.id = op_id
306
+ cls.on = staticmethod(on)
307
+ return cls
308
+
309
+ return op_with_params
310
+
311
+
312
+ def kernel(op_cls):
313
+ # NB: Second lvl decorator takes class to decorate
314
+ def kernel_with_params(cls):
315
+ # NB: Add new members to kernel class
316
+ cls.id = op_cls.id
317
+ cls.outMeta = op_cls.outMeta
318
+ return cls
319
+
320
+ return kernel_with_params
321
+
322
+
323
+ cv.gapi.wip.GStreamerPipeline = cv.gapi_wip_gst_GStreamerPipeline
cv2/gapi/__init__.pyi ADDED
@@ -0,0 +1,349 @@
1
+ __all__: list[str] = []
2
+
3
+ import cv2
4
+ import cv2.typing
5
+ import typing as _typing
6
+
7
+
8
+ from cv2.gapi import core as core
9
+ from cv2.gapi import ie as ie
10
+ from cv2.gapi import imgproc as imgproc
11
+ from cv2.gapi import oak as oak
12
+ from cv2.gapi import onnx as onnx
13
+ from cv2.gapi import ot as ot
14
+ from cv2.gapi import ov as ov
15
+ from cv2.gapi import own as own
16
+ from cv2.gapi import render as render
17
+ from cv2.gapi import streaming as streaming
18
+ from cv2.gapi import video as video
19
+ from cv2.gapi import wip as wip
20
+
21
+
22
+ # Enumerations
23
+ StereoOutputFormat_DEPTH_FLOAT16: int
24
+ STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16: int
25
+ StereoOutputFormat_DEPTH_FLOAT32: int
26
+ STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32: int
27
+ StereoOutputFormat_DISPARITY_FIXED16_11_5: int
28
+ STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5: int
29
+ StereoOutputFormat_DISPARITY_FIXED16_12_4: int
30
+ STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4: int
31
+ StereoOutputFormat_DEPTH_16F: int
32
+ STEREO_OUTPUT_FORMAT_DEPTH_16F: int
33
+ StereoOutputFormat_DEPTH_32F: int
34
+ STEREO_OUTPUT_FORMAT_DEPTH_32F: int
35
+ StereoOutputFormat_DISPARITY_16Q_10_5: int
36
+ STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5: int
37
+ StereoOutputFormat_DISPARITY_16Q_11_4: int
38
+ STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4: int
39
+ StereoOutputFormat = int
40
+ """One of [StereoOutputFormat_DEPTH_FLOAT16, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT16, StereoOutputFormat_DEPTH_FLOAT32, STEREO_OUTPUT_FORMAT_DEPTH_FLOAT32, StereoOutputFormat_DISPARITY_FIXED16_11_5, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_11_5, StereoOutputFormat_DISPARITY_FIXED16_12_4, STEREO_OUTPUT_FORMAT_DISPARITY_FIXED16_12_4, StereoOutputFormat_DEPTH_16F, STEREO_OUTPUT_FORMAT_DEPTH_16F, StereoOutputFormat_DEPTH_32F, STEREO_OUTPUT_FORMAT_DEPTH_32F, StereoOutputFormat_DISPARITY_16Q_10_5, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_10_5, StereoOutputFormat_DISPARITY_16Q_11_4, STEREO_OUTPUT_FORMAT_DISPARITY_16Q_11_4]"""
41
+
42
+ CV_BOOL: int
43
+ CV_INT: int
44
+ CV_INT64: int
45
+ CV_UINT64: int
46
+ CV_DOUBLE: int
47
+ CV_FLOAT: int
48
+ CV_STRING: int
49
+ CV_POINT: int
50
+ CV_POINT2F: int
51
+ CV_POINT3F: int
52
+ CV_SIZE: int
53
+ CV_RECT: int
54
+ CV_SCALAR: int
55
+ CV_MAT: int
56
+ CV_GMAT: int
57
+ CV_DRAW_PRIM: int
58
+ CV_ANY: int
59
+ ArgType = int
60
+ """One of [CV_BOOL, CV_INT, CV_INT64, CV_UINT64, CV_DOUBLE, CV_FLOAT, CV_STRING, CV_POINT, CV_POINT2F, CV_POINT3F, CV_SIZE, CV_RECT, CV_SCALAR, CV_MAT, CV_GMAT, CV_DRAW_PRIM, CV_ANY]"""
61
+
62
+
63
+
64
+ # Classes
65
+ class GNetParam:
66
+ ...
67
+
68
+ class GNetPackage:
69
+ # Functions
70
+ @_typing.overload
71
+ def __init__(self) -> None: ...
72
+ @_typing.overload
73
+ def __init__(self, nets: _typing.Sequence[GNetParam]) -> None: ...
74
+
75
+
76
+
77
+ # Functions
78
+ def BGR2Gray(src: cv2.GMat) -> cv2.GMat: ...
79
+
80
+ def BGR2I420(src: cv2.GMat) -> cv2.GMat: ...
81
+
82
+ def BGR2LUV(src: cv2.GMat) -> cv2.GMat: ...
83
+
84
+ def BGR2RGB(src: cv2.GMat) -> cv2.GMat: ...
85
+
86
+ def BGR2YUV(src: cv2.GMat) -> cv2.GMat: ...
87
+
88
+ def BayerGR2RGB(src_gr: cv2.GMat) -> cv2.GMat: ...
89
+
90
+ def Canny(image: cv2.GMat, threshold1: float, threshold2: float, apertureSize: int = ..., L2gradient: bool = ...) -> cv2.GMat: ...
91
+
92
+ def I4202BGR(src: cv2.GMat) -> cv2.GMat: ...
93
+
94
+ def I4202RGB(src: cv2.GMat) -> cv2.GMat: ...
95
+
96
+ def LUT(src: cv2.GMat, lut: cv2.typing.MatLike) -> cv2.GMat: ...
97
+
98
+ def LUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
99
+
100
+ def Laplacian(src: cv2.GMat, ddepth: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ...) -> cv2.GMat: ...
101
+
102
+ def NV12toBGR(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
103
+
104
+ def NV12toGray(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
105
+
106
+ def NV12toRGB(src_y: cv2.GMat, src_uv: cv2.GMat) -> cv2.GMat: ...
107
+
108
+ @_typing.overload
109
+ def RGB2Gray(src: cv2.GMat) -> cv2.GMat: ...
110
+ @_typing.overload
111
+ def RGB2Gray(src: cv2.GMat, rY: float, gY: float, bY: float) -> cv2.GMat: ...
112
+
113
+ def RGB2HSV(src: cv2.GMat) -> cv2.GMat: ...
114
+
115
+ def RGB2I420(src: cv2.GMat) -> cv2.GMat: ...
116
+
117
+ def RGB2Lab(src: cv2.GMat) -> cv2.GMat: ...
118
+
119
+ def RGB2YUV(src: cv2.GMat) -> cv2.GMat: ...
120
+
121
+ def RGB2YUV422(src: cv2.GMat) -> cv2.GMat: ...
122
+
123
+ def Sobel(src: cv2.GMat, ddepth: int, dx: int, dy: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
124
+
125
+ def SobelXY(src: cv2.GMat, ddepth: int, order: int, ksize: int = ..., scale: float = ..., delta: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
126
+
127
+ def YUV2BGR(src: cv2.GMat) -> cv2.GMat: ...
128
+
129
+ def YUV2RGB(src: cv2.GMat) -> cv2.GMat: ...
130
+
131
+ def absDiff(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
132
+
133
+ def absDiffC(src: cv2.GMat, c: cv2.GScalar) -> cv2.GMat: ...
134
+
135
+ def add(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
136
+
137
+ @_typing.overload
138
+ def addC(src1: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
139
+ @_typing.overload
140
+ def addC(c: cv2.GScalar, src1: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
141
+
142
+ def addWeighted(src1: cv2.GMat, alpha: float, src2: cv2.GMat, beta: float, gamma: float, ddepth: int = ...) -> cv2.GMat: ...
143
+
144
+ def bilateralFilter(src: cv2.GMat, d: int, sigmaColor: float, sigmaSpace: float, borderType: int = ...) -> cv2.GMat: ...
145
+
146
+ @_typing.overload
147
+ def bitwise_and(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
148
+ @_typing.overload
149
+ def bitwise_and(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
150
+
151
+ def bitwise_not(src: cv2.GMat) -> cv2.GMat: ...
152
+
153
+ @_typing.overload
154
+ def bitwise_or(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
155
+ @_typing.overload
156
+ def bitwise_or(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
157
+
158
+ @_typing.overload
159
+ def bitwise_xor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
160
+ @_typing.overload
161
+ def bitwise_xor(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
162
+
163
+ def blur(src: cv2.GMat, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
164
+
165
+ @_typing.overload
166
+ def boundingRect(src: cv2.GMat) -> cv2.GOpaqueT: ...
167
+ @_typing.overload
168
+ def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
169
+ @_typing.overload
170
+ def boundingRect(src: cv2.GArrayT) -> cv2.GOpaqueT: ...
171
+
172
+ def boxFilter(src: cv2.GMat, dtype: int, ksize: cv2.typing.Size, anchor: cv2.typing.Point = ..., normalize: bool = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
173
+
174
+ def cartToPolar(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
175
+
176
+ @_typing.overload
177
+ def cmpEQ(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
178
+ @_typing.overload
179
+ def cmpEQ(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
180
+
181
+ @_typing.overload
182
+ def cmpGE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
183
+ @_typing.overload
184
+ def cmpGE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
185
+
186
+ @_typing.overload
187
+ def cmpGT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
188
+ @_typing.overload
189
+ def cmpGT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
190
+
191
+ @_typing.overload
192
+ def cmpLE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
193
+ @_typing.overload
194
+ def cmpLE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
195
+
196
+ @_typing.overload
197
+ def cmpLT(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
198
+ @_typing.overload
199
+ def cmpLT(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
200
+
201
+ @_typing.overload
202
+ def cmpNE(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
203
+ @_typing.overload
204
+ def cmpNE(src1: cv2.GMat, src2: cv2.GScalar) -> cv2.GMat: ...
205
+
206
+ def combine(lhs: cv2.GKernelPackage, rhs: cv2.GKernelPackage) -> cv2.GKernelPackage: ...
207
+
208
+ @_typing.overload
209
+ def concatHor(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
210
+ @_typing.overload
211
+ def concatHor(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
212
+
213
+ @_typing.overload
214
+ def concatVert(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
215
+ @_typing.overload
216
+ def concatVert(v: _typing.Sequence[cv2.GMat]) -> cv2.GMat: ...
217
+
218
+ def convertTo(src: cv2.GMat, rdepth: int, alpha: float = ..., beta: float = ...) -> cv2.GMat: ...
219
+
220
+ def copy(in_: cv2.GMat) -> cv2.GMat: ...
221
+
222
+ def countNonZero(src: cv2.GMat) -> cv2.GOpaqueT: ...
223
+
224
+ def crop(src: cv2.GMat, rect: cv2.typing.Rect) -> cv2.GMat: ...
225
+
226
+ def dilate(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
227
+
228
+ def dilate3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
229
+
230
+ def div(src1: cv2.GMat, src2: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
231
+
232
+ def divC(src: cv2.GMat, divisor: cv2.GScalar, scale: float, ddepth: int = ...) -> cv2.GMat: ...
233
+
234
+ def divRC(divident: cv2.GScalar, src: cv2.GMat, scale: float, ddepth: int = ...) -> cv2.GMat: ...
235
+
236
+ def equalizeHist(src: cv2.GMat) -> cv2.GMat: ...
237
+
238
+ def erode(src: cv2.GMat, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
239
+
240
+ def erode3x3(src: cv2.GMat, iterations: int = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
241
+
242
+ def filter2D(src: cv2.GMat, ddepth: int, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., delta: cv2.typing.Scalar = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
243
+
244
+ def flip(src: cv2.GMat, flipCode: int) -> cv2.GMat: ...
245
+
246
+ def gaussianBlur(src: cv2.GMat, ksize: cv2.typing.Size, sigmaX: float, sigmaY: float = ..., borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
247
+
248
+ def goodFeaturesToTrack(image: cv2.GMat, maxCorners: int, qualityLevel: float, minDistance: float, mask: cv2.typing.MatLike | None = ..., blockSize: int = ..., useHarrisDetector: bool = ..., k: float = ...) -> cv2.GArrayT: ...
249
+
250
+ def inRange(src: cv2.GMat, threshLow: cv2.GScalar, threshUp: cv2.GScalar) -> cv2.GMat: ...
251
+
252
+ @_typing.overload
253
+ def infer(name: str, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
254
+ @_typing.overload
255
+ def infer(name: str, roi: cv2.GOpaqueT, inputs: cv2.GInferInputs) -> cv2.GInferOutputs: ...
256
+ @_typing.overload
257
+ def infer(name: str, rois: cv2.GArrayT, inputs: cv2.GInferInputs) -> cv2.GInferListOutputs: ...
258
+
259
+ def infer2(name: str, in_: cv2.GMat, inputs: cv2.GInferListInputs) -> cv2.GInferListOutputs: ...
260
+
261
+ def integral(src: cv2.GMat, sdepth: int = ..., sqdepth: int = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
262
+
263
+ @_typing.overload
264
+ def kmeans(data: cv2.GMat, K: int, bestLabels: cv2.GMat, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
265
+ @_typing.overload
266
+ def kmeans(data: cv2.GMat, K: int, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GMat, cv2.GMat]: ...
267
+ @_typing.overload
268
+ def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
269
+ @_typing.overload
270
+ def kmeans(data: cv2.GArrayT, K: int, bestLabels: cv2.GArrayT, criteria: cv2.typing.TermCriteria, attempts: int, flags: cv2.KmeansFlags) -> tuple[cv2.GOpaqueT, cv2.GArrayT, cv2.GArrayT]: ...
271
+
272
+ def mask(src: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
273
+
274
+ def max(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
275
+
276
+ def mean(src: cv2.GMat) -> cv2.GScalar: ...
277
+
278
+ def medianBlur(src: cv2.GMat, ksize: int) -> cv2.GMat: ...
279
+
280
+ def merge3(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat) -> cv2.GMat: ...
281
+
282
+ def merge4(src1: cv2.GMat, src2: cv2.GMat, src3: cv2.GMat, src4: cv2.GMat) -> cv2.GMat: ...
283
+
284
+ def min(src1: cv2.GMat, src2: cv2.GMat) -> cv2.GMat: ...
285
+
286
+ def morphologyEx(src: cv2.GMat, op: cv2.MorphTypes, kernel: cv2.typing.MatLike, anchor: cv2.typing.Point = ..., iterations: int = ..., borderType: cv2.BorderTypes = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
287
+
288
+ def mul(src1: cv2.GMat, src2: cv2.GMat, scale: float = ..., ddepth: int = ...) -> cv2.GMat: ...
289
+
290
+ @_typing.overload
291
+ def mulC(src: cv2.GMat, multiplier: float, ddepth: int = ...) -> cv2.GMat: ...
292
+ @_typing.overload
293
+ def mulC(src: cv2.GMat, multiplier: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
294
+ @_typing.overload
295
+ def mulC(multiplier: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
296
+
297
+ def normInf(src: cv2.GMat) -> cv2.GScalar: ...
298
+
299
+ def normL1(src: cv2.GMat) -> cv2.GScalar: ...
300
+
301
+ def normL2(src: cv2.GMat) -> cv2.GScalar: ...
302
+
303
+ def normalize(src: cv2.GMat, alpha: float, beta: float, norm_type: int, ddepth: int = ...) -> cv2.GMat: ...
304
+
305
+ @_typing.overload
306
+ def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., filterLabel: int = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
307
+ @_typing.overload
308
+ def parseSSD(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float, alignmentToSquare: bool, filterOutOfBounds: bool) -> cv2.GArrayT: ...
309
+
310
+ def parseYolo(in_: cv2.GMat, inSz: cv2.GOpaqueT, confidenceThreshold: float = ..., nmsThreshold: float = ..., anchors: _typing.Sequence[float] = ...) -> tuple[cv2.GArrayT, cv2.GArrayT]: ...
311
+
312
+ def phase(x: cv2.GMat, y: cv2.GMat, angleInDegrees: bool = ...) -> cv2.GMat: ...
313
+
314
+ def polarToCart(magnitude: cv2.GMat, angle: cv2.GMat, angleInDegrees: bool = ...) -> tuple[cv2.GMat, cv2.GMat]: ...
315
+
316
+ def remap(src: cv2.GMat, map1: cv2.typing.MatLike, map2: cv2.typing.MatLike, interpolation: int, borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
317
+
318
+ def resize(src: cv2.GMat, dsize: cv2.typing.Size, fx: float = ..., fy: float = ..., interpolation: int = ...) -> cv2.GMat: ...
319
+
320
+ def select(src1: cv2.GMat, src2: cv2.GMat, mask: cv2.GMat) -> cv2.GMat: ...
321
+
322
+ def sepFilter(src: cv2.GMat, ddepth: int, kernelX: cv2.typing.MatLike, kernelY: cv2.typing.MatLike, anchor: cv2.typing.Point, delta: cv2.typing.Scalar, borderType: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
323
+
324
+ def split3(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat]: ...
325
+
326
+ def split4(src: cv2.GMat) -> tuple[cv2.GMat, cv2.GMat, cv2.GMat, cv2.GMat]: ...
327
+
328
+ def sqrt(src: cv2.GMat) -> cv2.GMat: ...
329
+
330
+ def sub(src1: cv2.GMat, src2: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
331
+
332
+ def subC(src: cv2.GMat, c: cv2.GScalar, ddepth: int = ...) -> cv2.GMat: ...
333
+
334
+ def subRC(c: cv2.GScalar, src: cv2.GMat, ddepth: int = ...) -> cv2.GMat: ...
335
+
336
+ def sum(src: cv2.GMat) -> cv2.GScalar: ...
337
+
338
+ @_typing.overload
339
+ def threshold(src: cv2.GMat, thresh: cv2.GScalar, maxval: cv2.GScalar, type: int) -> cv2.GMat: ...
340
+ @_typing.overload
341
+ def threshold(src: cv2.GMat, maxval: cv2.GScalar, type: int) -> tuple[cv2.GMat, cv2.GScalar]: ...
342
+
343
+ def transpose(src: cv2.GMat) -> cv2.GMat: ...
344
+
345
+ def warpAffine(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
346
+
347
+ def warpPerspective(src: cv2.GMat, M: cv2.typing.MatLike, dsize: cv2.typing.Size, flags: int = ..., borderMode: int = ..., borderValue: cv2.typing.Scalar = ...) -> cv2.GMat: ...
348
+
349
+
@@ -0,0 +1,7 @@
1
+ __all__: list[str] = []
2
+
3
+ from cv2.gapi.core import cpu as cpu
4
+ from cv2.gapi.core import fluid as fluid
5
+ from cv2.gapi.core import ocl as ocl
6
+
7
+
@@ -0,0 +1,9 @@
1
+ __all__: list[str] = []
2
+
3
+ import cv2
4
+
5
+
6
+ # Functions
7
+ def kernels() -> cv2.GKernelPackage: ...
8
+
9
+
@@ -0,0 +1,9 @@
1
+ __all__: list[str] = []
2
+
3
+ import cv2
4
+
5
+
6
+ # Functions
7
+ def kernels() -> cv2.GKernelPackage: ...
8
+
9
+