pixeltable 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pixeltable might be problematic. Click here for more details.
- pixeltable/__init__.py +3 -1
- pixeltable/__version__.py +2 -2
- pixeltable/catalog/column.py +8 -2
- pixeltable/catalog/insertable_table.py +32 -17
- pixeltable/catalog/table.py +167 -12
- pixeltable/catalog/table_version.py +185 -106
- pixeltable/datatransfer/__init__.py +1 -0
- pixeltable/datatransfer/label_studio.py +452 -0
- pixeltable/datatransfer/remote.py +85 -0
- pixeltable/env.py +148 -69
- pixeltable/exprs/column_ref.py +2 -2
- pixeltable/exprs/comparison.py +39 -1
- pixeltable/exprs/data_row.py +7 -0
- pixeltable/exprs/expr.py +11 -12
- pixeltable/exprs/function_call.py +0 -3
- pixeltable/exprs/globals.py +14 -2
- pixeltable/exprs/similarity_expr.py +5 -3
- pixeltable/ext/functions/whisperx.py +30 -0
- pixeltable/ext/functions/yolox.py +16 -0
- pixeltable/func/aggregate_function.py +2 -2
- pixeltable/func/expr_template_function.py +3 -1
- pixeltable/func/udf.py +2 -2
- pixeltable/functions/fireworks.py +9 -4
- pixeltable/functions/huggingface.py +25 -1
- pixeltable/functions/openai.py +15 -10
- pixeltable/functions/together.py +11 -6
- pixeltable/functions/util.py +0 -43
- pixeltable/functions/video.py +46 -8
- pixeltable/globals.py +20 -2
- pixeltable/index/__init__.py +1 -0
- pixeltable/index/base.py +6 -1
- pixeltable/index/btree.py +54 -0
- pixeltable/index/embedding_index.py +4 -1
- pixeltable/io/__init__.py +1 -0
- pixeltable/io/globals.py +58 -0
- pixeltable/iterators/base.py +4 -4
- pixeltable/iterators/document.py +26 -15
- pixeltable/iterators/video.py +9 -1
- pixeltable/metadata/__init__.py +2 -2
- pixeltable/metadata/converters/convert_14.py +13 -0
- pixeltable/metadata/schema.py +9 -6
- pixeltable/plan.py +9 -5
- pixeltable/store.py +14 -21
- pixeltable/tool/create_test_db_dump.py +14 -0
- pixeltable/type_system.py +14 -4
- pixeltable/utils/coco.py +94 -0
- pixeltable-0.2.8.dist-info/METADATA +137 -0
- {pixeltable-0.2.6.dist-info → pixeltable-0.2.8.dist-info}/RECORD +50 -45
- pixeltable/func/nos_function.py +0 -202
- pixeltable/utils/clip.py +0 -18
- pixeltable-0.2.6.dist-info/METADATA +0 -131
- {pixeltable-0.2.6.dist-info → pixeltable-0.2.8.dist-info}/LICENSE +0 -0
- {pixeltable-0.2.6.dist-info → pixeltable-0.2.8.dist-info}/WHEEL +0 -0
|
@@ -1,21 +1,24 @@
|
|
|
1
|
-
pixeltable/__init__.py,sha256=
|
|
2
|
-
pixeltable/__version__.py,sha256=
|
|
1
|
+
pixeltable/__init__.py,sha256=DzVevwic1g8Tp4QYrcCIzPFFwPZ66KGWeYprlRC9Uwc,1142
|
|
2
|
+
pixeltable/__version__.py,sha256=AyN0bhxu_oExUztSHc2d8uAemad8-aDrT7QgYAM_JCs,112
|
|
3
3
|
pixeltable/catalog/__init__.py,sha256=E41bxaPeQIcgRYzTWc2vkDOboQhRymrJf4IcHQO7o_8,453
|
|
4
4
|
pixeltable/catalog/catalog.py,sha256=0TYWB1R6YBp9qCkWF7kCcX2Yw70UuburKKIemv5L1Js,7908
|
|
5
|
-
pixeltable/catalog/column.py,sha256=
|
|
5
|
+
pixeltable/catalog/column.py,sha256=J8irt6PfT1ofC3wVKi-hDGjNUZ1Ceq2qzbmZyEw-ddA,8335
|
|
6
6
|
pixeltable/catalog/dir.py,sha256=pG1nMpG123POo6WMSHhAmnwXOQ26uUJfUcbzL-Jb4ws,919
|
|
7
7
|
pixeltable/catalog/globals.py,sha256=yLEGNbsSnLzjWNHVJacfjA9hbw13Q6QXLOSCRmdTlq0,943
|
|
8
|
-
pixeltable/catalog/insertable_table.py,sha256=
|
|
8
|
+
pixeltable/catalog/insertable_table.py,sha256=vMa_XUWT3DG3ZlxkScDZ9-mYNw31G8XB4ODUlxXt7NU,8927
|
|
9
9
|
pixeltable/catalog/named_function.py,sha256=a96gnKtx-nz5_MzDIiD4t4Hxqdjkg9ZtijRQxvWA5WQ,1147
|
|
10
10
|
pixeltable/catalog/path.py,sha256=QgccEi_QOfaKt8YsR2zLtd_z7z7QQkU_1kprJFi2SPQ,1677
|
|
11
11
|
pixeltable/catalog/path_dict.py,sha256=xfvxg1Ze5jZCARUGASF2DRbQPh7pRVTYhuJ_u82gYUo,5941
|
|
12
12
|
pixeltable/catalog/schema_object.py,sha256=-UxmPLbuEBqJiJi_GGRbFdr7arAFxTqs4bt6TFmSt3M,1059
|
|
13
|
-
pixeltable/catalog/table.py,sha256=
|
|
14
|
-
pixeltable/catalog/table_version.py,sha256=
|
|
13
|
+
pixeltable/catalog/table.py,sha256=50UOSt7zltvthygiiXbgP-XMOiYUpcIaEXiM1uJFcaA,38220
|
|
14
|
+
pixeltable/catalog/table_version.py,sha256=BeP-4Io6euT6hOQXVJwpZNjZ6ZNehqOH6S98zvQsU9E,52751
|
|
15
15
|
pixeltable/catalog/table_version_path.py,sha256=2Ofzd0n36flcNm86KWwIWDBAfgnV5Z-FxAHdMSPgMLc,5482
|
|
16
16
|
pixeltable/catalog/view.py,sha256=BIL3s4DV3tWbOcqtqnhn46B2UvLaBhppfJUlNEt5nec,9734
|
|
17
17
|
pixeltable/dataframe.py,sha256=lzSzR7mi9C4BO39fNXYo64k3KxILyG_Z7eET6DXTgKY,31922
|
|
18
|
-
pixeltable/
|
|
18
|
+
pixeltable/datatransfer/__init__.py,sha256=cRWdQ_LUNkJgmionI1RrYD71A1CSI92P4o8_XXOnFmU,27
|
|
19
|
+
pixeltable/datatransfer/label_studio.py,sha256=3DLsqfIUNVG9xVRVUU4NayLuC-xUTIM1Sz92kGvrTUc,19643
|
|
20
|
+
pixeltable/datatransfer/remote.py,sha256=t-VeDIq62mX67xBoHLi8voa4V5XqMkr-8UZ-8DhIgk0,3100
|
|
21
|
+
pixeltable/env.py,sha256=OEZv6NS8Z41rdCx73Md5j78ImnKaZf3YhdAexFJR7gw,21381
|
|
19
22
|
pixeltable/exceptions.py,sha256=MSP9zeL0AmXT93XqjdvgGN4rzno1_KRrGriq6hpemnw,376
|
|
20
23
|
pixeltable/exec/__init__.py,sha256=RK7SKvrQ7Ky3G_LXDP4Bf7lHmMM_uYZl8dJaZYs0FjY,454
|
|
21
24
|
pixeltable/exec/aggregation_node.py,sha256=cf6rVAgrGh_uaMrCIgXJIwQTmbcboJlnrH_MmPIQSd0,3321
|
|
@@ -32,14 +35,14 @@ pixeltable/exprs/__init__.py,sha256=7dwrdk-NpF66OT-m5yNtFEhq-o1T476dnXHjluw2K1s,
|
|
|
32
35
|
pixeltable/exprs/arithmetic_expr.py,sha256=sWBYCBKI6IHj9ASwDcm2BlkQ5gleVtKtmpiPvzFNBJM,4386
|
|
33
36
|
pixeltable/exprs/array_slice.py,sha256=VmWc6iFusrM85MjyEBBCfXG1Jnt8-Gr6-J88BXxNoOE,2131
|
|
34
37
|
pixeltable/exprs/column_property_ref.py,sha256=0PHiBys0fxe2LgjaMId5UHob4E-ZggyPLnnW41RgA0E,2706
|
|
35
|
-
pixeltable/exprs/column_ref.py,sha256=
|
|
36
|
-
pixeltable/exprs/comparison.py,sha256=
|
|
38
|
+
pixeltable/exprs/column_ref.py,sha256=5GoddEyH1nLeeKYqX2g2WOBoCqnmghwt3Hg3D6JdzvI,5430
|
|
39
|
+
pixeltable/exprs/comparison.py,sha256=hP3M_lMWcFgENBICFosZPw2lRm1R6_qM_O9bKPmWJGI,4789
|
|
37
40
|
pixeltable/exprs/compound_predicate.py,sha256=Gh22MKi625m5A_RunVRd-a1XFi-fitikqBVz2VNXKrs,3830
|
|
38
|
-
pixeltable/exprs/data_row.py,sha256=
|
|
39
|
-
pixeltable/exprs/expr.py,sha256=
|
|
41
|
+
pixeltable/exprs/data_row.py,sha256=TN4WeAz7D3xiSCDYgCinkVpTaSkkJhGgNkNqviBHVqs,8425
|
|
42
|
+
pixeltable/exprs/expr.py,sha256=NvkJmxAbAxfQYfoegXyBaD0_fm-3lnotUrvf_R6Z0Gk,24163
|
|
40
43
|
pixeltable/exprs/expr_set.py,sha256=Q64Q2yI0CTq2Ma_E-BUYlMotSstVuMm4OFZnBCedHRk,1222
|
|
41
|
-
pixeltable/exprs/function_call.py,sha256=
|
|
42
|
-
pixeltable/exprs/globals.py,sha256=
|
|
44
|
+
pixeltable/exprs/function_call.py,sha256=d3TXU2MbfgQyth1-GEeZJcEZs_BX9548gN5TCwGLNX0,17118
|
|
45
|
+
pixeltable/exprs/globals.py,sha256=KhK4xwkLHv4NsXXcLdjRu2OFSvEnlC7GG-8Gs_IbQtI,1858
|
|
43
46
|
pixeltable/exprs/image_member_access.py,sha256=KSYdTIaLh53dNRjv3SJFchPMPo7o5diJSQkV1NsyB4Y,3547
|
|
44
47
|
pixeltable/exprs/in_predicate.py,sha256=burxrBCH1MXqU-wrNWJvD0PRGzJdWy85intOSftQK54,3696
|
|
45
48
|
pixeltable/exprs/inline_array.py,sha256=293WuUEhYXrcp8-AnPDVIWQBPQMrPviB88A619Ls_Es,4499
|
|
@@ -52,58 +55,60 @@ pixeltable/exprs/object_ref.py,sha256=eTcx84aWRI59fIiGvbdv3_cfL0XW4xEFQ4lwpLpJkM
|
|
|
52
55
|
pixeltable/exprs/predicate.py,sha256=OSDgjfSqiK7J_5GZMUXMvjfyomKEGi0JNxeB073SGXw,1859
|
|
53
56
|
pixeltable/exprs/row_builder.py,sha256=cpQa7GHR2dZYxhCAwZBfz-MqO0oP-NS44mAYoVUOt7A,15662
|
|
54
57
|
pixeltable/exprs/rowid_ref.py,sha256=74w4rEy21YysTVbyKNc3op-pYFqDAx8VJdtl7ZPpxHs,4268
|
|
55
|
-
pixeltable/exprs/similarity_expr.py,sha256=
|
|
58
|
+
pixeltable/exprs/similarity_expr.py,sha256=IvSeUFMjyj-ZFZTae8UJKa2YlDGn6512RyTIvLIcG7w,2829
|
|
56
59
|
pixeltable/exprs/type_cast.py,sha256=JMg8p1qYoFfiAXfJPSbTEnfrK7lRO_JMaqlPHOrhNQU,1793
|
|
57
60
|
pixeltable/exprs/variable.py,sha256=Kg_O4ytcHYZFijIyMHYBJn063cTKU1-YE583FAz8Qaw,1361
|
|
58
61
|
pixeltable/ext/__init__.py,sha256=0uugfuME1FybVo-MdxaVNGagRjhcvNTnv5MZUem6Cyo,269
|
|
59
|
-
pixeltable/ext/functions/
|
|
62
|
+
pixeltable/ext/functions/whisperx.py,sha256=CnpSPZJgufXa01vgUubVkyxQuZIdublJzkwbm5kS1YQ,1078
|
|
63
|
+
pixeltable/ext/functions/yolox.py,sha256=JCG57Kbu0hNc1wRGDY3Mhhr6LQUY0fOgSSh-JV1plkw,3644
|
|
60
64
|
pixeltable/func/__init__.py,sha256=LCB5iB2aZyMrX-hn_oNBYnB1SE60t50hE23av_v2F50,348
|
|
61
|
-
pixeltable/func/aggregate_function.py,sha256=
|
|
65
|
+
pixeltable/func/aggregate_function.py,sha256=fB2kwLaeBKWCAOihOUdACR-2XDbMN14zJQIpQBVP0Bk,9398
|
|
62
66
|
pixeltable/func/callable_function.py,sha256=nEEmXFvd8TW9TBPbDnC3q8phj9ARokAsB-OJ1_hTkGo,4612
|
|
63
|
-
pixeltable/func/expr_template_function.py,sha256=
|
|
67
|
+
pixeltable/func/expr_template_function.py,sha256=7PNIzqkNOsCfU0KXCzrh_fczKMpZihu6FHV4NOgeVDM,4349
|
|
64
68
|
pixeltable/func/function.py,sha256=fANPfafLwY0Mq6CF21VYbuF-hRxxsPLHn5waoj1mOGY,5611
|
|
65
69
|
pixeltable/func/function_registry.py,sha256=1ibSQxEPm3Zd3r497vSlckQiDG9sfCnyJx3zcSm9t7c,11456
|
|
66
70
|
pixeltable/func/globals.py,sha256=sEwn6lGgHMp6VQORb_P5qRd_-Q2_bUSqvqM9-XPN_ec,1483
|
|
67
|
-
pixeltable/func/nos_function.py,sha256=HzIKK4XjTo1E6pML-EbhuX3u_LYibFWUuTkIxoIih7c,9650
|
|
68
71
|
pixeltable/func/signature.py,sha256=erOPFuSuaxkXnRyFd3nCYLuprUWcYFox3Hk3ZKUPWfM,6697
|
|
69
|
-
pixeltable/func/udf.py,sha256=
|
|
72
|
+
pixeltable/func/udf.py,sha256=eu6Dt1SGlMvIBDNUxSc467-VBns5X12UU47FihlO_to,6540
|
|
70
73
|
pixeltable/functions/__init__.py,sha256=uO-XB4QUbx3Jjs9GoaTXoJY2jn0AuXTL32YLkL_3_CI,3297
|
|
71
74
|
pixeltable/functions/eval.py,sha256=_2FANDJqwtIDzTxtcKc0Yacf7b4LTAjyy2fPDw1FG_s,8404
|
|
72
|
-
pixeltable/functions/fireworks.py,sha256=
|
|
73
|
-
pixeltable/functions/huggingface.py,sha256
|
|
75
|
+
pixeltable/functions/fireworks.py,sha256=dVGFZ6Kb2sahyySoLkSatLHsSnEXjAvLc0_hCHXUxXg,985
|
|
76
|
+
pixeltable/functions/huggingface.py,sha256=SlzPc01TS4oeFhJlsbwYCtG3h_LF1r-CnWisIfO37qc,7224
|
|
74
77
|
pixeltable/functions/image.py,sha256=xR_S_0BuX6Ycc5E366GpOfP0JptD7beQwHE_fLl8ZVM,431
|
|
75
|
-
pixeltable/functions/openai.py,sha256=
|
|
78
|
+
pixeltable/functions/openai.py,sha256=lDmp9v7k-TCHUyMsPVTIwfjygWEihrlhnnJuOhvek1I,8046
|
|
76
79
|
pixeltable/functions/pil/image.py,sha256=6eNdMy2lZliFb8Lw12aBRUaShH07VEsFmhHSG21Jjt4,5992
|
|
77
80
|
pixeltable/functions/string.py,sha256=RYOgZwifjC943YloEMi3PdflnjFqOYB2FddrUvzgtXs,516
|
|
78
|
-
pixeltable/functions/together.py,sha256=
|
|
79
|
-
pixeltable/functions/util.py,sha256=
|
|
80
|
-
pixeltable/functions/video.py,sha256=
|
|
81
|
-
pixeltable/globals.py,sha256
|
|
82
|
-
pixeltable/index/__init__.py,sha256=
|
|
83
|
-
pixeltable/index/base.py,sha256=
|
|
84
|
-
pixeltable/index/
|
|
85
|
-
pixeltable/
|
|
81
|
+
pixeltable/functions/together.py,sha256=Iu2_pZjOH6jXq6Sdqmu8LqtG1ELTtBEQ4eSOdYkig9c,4249
|
|
82
|
+
pixeltable/functions/util.py,sha256=DW7SwsftTRYo5h6iYl2Ec3WGmUA1D1Hrv4bPt-j7fKM,262
|
|
83
|
+
pixeltable/functions/video.py,sha256=wanwNQ1dyo5mP0NZ5F6gf6MEMUX40lcHXkz04WyOsbA,3876
|
|
84
|
+
pixeltable/globals.py,sha256=-2_ndBGunTXYm0A99nocSfKXRsQtiiesKVl2pBv0We4,14157
|
|
85
|
+
pixeltable/index/__init__.py,sha256=XBwetNQQwnz0fiKwonOKhyy_U32l_cjt77kNvEIdjWs,102
|
|
86
|
+
pixeltable/index/base.py,sha256=YAQ5Dz1mfI0dfu9rxWHWroE8TjB90yKfPtXAzoADq38,1568
|
|
87
|
+
pixeltable/index/btree.py,sha256=NE4GYhcJWYJhdKyeHI0sQBlFvUaIgGOF9KLyCZOfFjE,1822
|
|
88
|
+
pixeltable/index/embedding_index.py,sha256=AYphEggN-0B4GNrm4nMmi46CEtrQw5tguyk67BK2sWo,7627
|
|
89
|
+
pixeltable/io/__init__.py,sha256=Io5ZLrcvRPeqRQwnU2iODvWMqkYroWErkbp7dLxE4Kk,197
|
|
90
|
+
pixeltable/io/globals.py,sha256=ArnuWVhdKHT9ds84PZBl0-fszmEu-W62P4Su21c9oN4,2642
|
|
86
91
|
pixeltable/io/hf_datasets.py,sha256=h5M1NkXOvEU8kaeT3AON1A18Vmhnc1lVo5a3TZ5AAic,8004
|
|
87
92
|
pixeltable/io/pandas.py,sha256=cDHUDW2CGiBbsEJB9zE5vkXopTKxDdI-CZxNcp0OnIk,6478
|
|
88
93
|
pixeltable/io/parquet.py,sha256=Z1b92gsPeCBf4P9_jgWWHAEHtu51nhuC8nSJgoKiywQ,8150
|
|
89
94
|
pixeltable/iterators/__init__.py,sha256=kokLguXBY_nxBTqUiXZVvCxTv-vGsX4cK8tgIbsW5G8,108
|
|
90
|
-
pixeltable/iterators/base.py,sha256=
|
|
91
|
-
pixeltable/iterators/document.py,sha256=
|
|
92
|
-
pixeltable/iterators/video.py,sha256=
|
|
93
|
-
pixeltable/metadata/__init__.py,sha256=
|
|
95
|
+
pixeltable/iterators/base.py,sha256=cnEh1tNN2JAxRzrLTg3dhun3N1oNQ8vifCm6ts3_UiE,1687
|
|
96
|
+
pixeltable/iterators/document.py,sha256=netSCJatG8NcgbHZ69BvQVICdAorQlYi8OlcpqwLQD4,19436
|
|
97
|
+
pixeltable/iterators/video.py,sha256=xtxODL1AfZwTfHVzWekhTCLA8gwTJIvJFdxC0KecD9Q,3836
|
|
98
|
+
pixeltable/metadata/__init__.py,sha256=beGPpClpNaN7seM_AeKli5R33TSIkb7_mIBWoExT_5M,2228
|
|
94
99
|
pixeltable/metadata/converters/convert_10.py,sha256=0mSGCn7vqtef63riPi9msUaaUvsSQIj-NFj9QFDYPdA,733
|
|
95
100
|
pixeltable/metadata/converters/convert_12.py,sha256=g9rHTcKlDQZbM3_k4eBv0FBdWmQXHWCnMwx1_l6KpMI,107
|
|
96
101
|
pixeltable/metadata/converters/convert_13.py,sha256=FEgOH5PKf05xVoCaioDDDHOSuoWPyBzodojmsSMMZ5U,1366
|
|
97
|
-
pixeltable/metadata/
|
|
98
|
-
pixeltable/
|
|
99
|
-
pixeltable/
|
|
100
|
-
pixeltable/
|
|
102
|
+
pixeltable/metadata/converters/convert_14.py,sha256=UAWHEipZ-NrQtI5zZN1u9C5AD24ZpYXsdpC3Te0t-qE,402
|
|
103
|
+
pixeltable/metadata/schema.py,sha256=ZYBbt_jESRrX7BAx35xKY1CpIgRuJnd2LJYo4MrPnn0,8399
|
|
104
|
+
pixeltable/plan.py,sha256=A_kPsX3bjLyfYbeQ6eCgDbrb_Oldk4w8HdFRqRSDpPY,32653
|
|
105
|
+
pixeltable/store.py,sha256=foQe9y8rRbl35f3naL7rbYVrD8LO00cmD53vWP2A9XI,18850
|
|
106
|
+
pixeltable/tool/create_test_db_dump.py,sha256=17MdBqsSNj7j61w0Re9pS4aDIEdML_4hnE-uZJcEW4I,7537
|
|
101
107
|
pixeltable/tool/create_test_video.py,sha256=OLfccymYReIpzE8osZn4rQvLXxxiPC_l0vc06U74hVM,2899
|
|
102
|
-
pixeltable/type_system.py,sha256=
|
|
108
|
+
pixeltable/type_system.py,sha256=nljZs4O_dsVFMs4aB3z7Szc9LgFtl5eOuloxJkk7tPE,29503
|
|
103
109
|
pixeltable/utils/__init__.py,sha256=UYlrf6TIWJT0g-Hac0b34-dEk478B5Qx8dGco34YlIk,439
|
|
104
110
|
pixeltable/utils/arrow.py,sha256=83_7aG5UR2qtTktw_otLkQs-RQbLk0VVM0JLJkbweNU,3692
|
|
105
|
-
pixeltable/utils/
|
|
106
|
-
pixeltable/utils/coco.py,sha256=mk1cxjKYQC0ABm2ZQ9SNu9MvBPECmmKvnASpxnFXdL0,5604
|
|
111
|
+
pixeltable/utils/coco.py,sha256=ISpFBhR4eO1jOcg_SPb0thVI4KdS6H0RyNQauZIA5A4,7287
|
|
107
112
|
pixeltable/utils/documents.py,sha256=Q7e5U2Hk0go83MdKzD_MIiMscwbcFsLMgRw2IU_vQF4,2213
|
|
108
113
|
pixeltable/utils/filecache.py,sha256=UoNONG2VaAc2IBB0e3sQdsvyOPOes2XSDc5_CsA4qek,7839
|
|
109
114
|
pixeltable/utils/help.py,sha256=cCnxJ4VP9MJ57iDqExmnDcM-JG3a1lw_q7g-D7bpSVI,252
|
|
@@ -113,7 +118,7 @@ pixeltable/utils/pytorch.py,sha256=BR4tgfUWw-2rwWTOgzXj5qdMBpe1Arpp5SK4ax6jjpk,3
|
|
|
113
118
|
pixeltable/utils/s3.py,sha256=rkanuhk9DWvSfmbOLQW1j1Iov4sl2KhxGGKN-AJ8LSE,432
|
|
114
119
|
pixeltable/utils/sql.py,sha256=5n5_OmXAGtqFdL6z5XvgnU-vlx6Ba6f1WJrO1ZwUle8,765
|
|
115
120
|
pixeltable/utils/transactional_directory.py,sha256=UGzCrGtLR3hEEf8sYGuWBzLVFAEQml3vdIavigWeTBM,1349
|
|
116
|
-
pixeltable-0.2.
|
|
117
|
-
pixeltable-0.2.
|
|
118
|
-
pixeltable-0.2.
|
|
119
|
-
pixeltable-0.2.
|
|
121
|
+
pixeltable-0.2.8.dist-info/LICENSE,sha256=0UNMmwuqWPC0xDY1NWMm4uNJ2_MyA1pnTNRgQTvuBiQ,746
|
|
122
|
+
pixeltable-0.2.8.dist-info/METADATA,sha256=VKFyaYflvsFwUgiC1Y2iukgPHGI4W9sQpnLmUCRdMiQ,9806
|
|
123
|
+
pixeltable-0.2.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
124
|
+
pixeltable-0.2.8.dist-info/RECORD,,
|
pixeltable/func/nos_function.py
DELETED
|
@@ -1,202 +0,0 @@
|
|
|
1
|
-
from typing import Optional, Any, Dict, List, Tuple
|
|
2
|
-
import inspect
|
|
3
|
-
import logging
|
|
4
|
-
import sys
|
|
5
|
-
|
|
6
|
-
import numpy as np
|
|
7
|
-
|
|
8
|
-
from .signature import Signature, Parameter
|
|
9
|
-
from .batched_function import BatchedFunction
|
|
10
|
-
import pixeltable.env as env
|
|
11
|
-
import pixeltable.type_system as ts
|
|
12
|
-
import pixeltable.exceptions as excs
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
_logger = logging.getLogger('pixeltable')
|
|
16
|
-
|
|
17
|
-
class NOSFunction(BatchedFunction):
|
|
18
|
-
def __init__(self, model_spec: 'nos.common.ModelSpec', self_path: str):
|
|
19
|
-
return_type, param_types = self._convert_nos_signature(model_spec.signature)
|
|
20
|
-
param_names = list(model_spec.signature.get_inputs_spec().keys())
|
|
21
|
-
params = [
|
|
22
|
-
Parameter(name, col_type, inspect.Parameter.POSITIONAL_OR_KEYWORD, is_batched=False)
|
|
23
|
-
for name, col_type in zip(param_names, param_types)
|
|
24
|
-
]
|
|
25
|
-
signature = Signature(return_type, params)
|
|
26
|
-
|
|
27
|
-
# construct inspect.Signature
|
|
28
|
-
py_params = [
|
|
29
|
-
inspect.Parameter(name, inspect.Parameter.POSITIONAL_OR_KEYWORD)
|
|
30
|
-
for name, col_type in zip(param_names, param_types)
|
|
31
|
-
]
|
|
32
|
-
py_signature = inspect.Signature(py_params)
|
|
33
|
-
super().__init__(signature, py_signature=py_signature, self_path=self_path)
|
|
34
|
-
|
|
35
|
-
self.model_spec = model_spec
|
|
36
|
-
self.nos_param_names = model_spec.signature.get_inputs_spec().keys()
|
|
37
|
-
self.scalar_nos_param_names = []
|
|
38
|
-
|
|
39
|
-
# for models on images
|
|
40
|
-
self.img_param_pos: Optional[int] = None # position of the image parameter in the function signature
|
|
41
|
-
# for multi-resolution image models
|
|
42
|
-
import nos
|
|
43
|
-
self.img_batch_params: List[nos.common.ObjectTypeInfo] = []
|
|
44
|
-
self.img_resolutions: List[int] = [] # for multi-resolution models
|
|
45
|
-
self.batch_size: Optional[int] = None
|
|
46
|
-
self.img_size: Optional[Tuple[int, int]] = None # W, H
|
|
47
|
-
|
|
48
|
-
# try to determine batch_size and img_size
|
|
49
|
-
batch_size = sys.maxsize
|
|
50
|
-
for pos, (param_name, type_info) in enumerate(model_spec.signature.get_inputs_spec().items()):
|
|
51
|
-
if isinstance(type_info, list):
|
|
52
|
-
assert isinstance(type_info[0].base_spec(), nos.common.ImageSpec)
|
|
53
|
-
# this is a multi-resolution image model
|
|
54
|
-
self.img_batch_params = type_info
|
|
55
|
-
self.img_param_pos = pos
|
|
56
|
-
self.img_resolutions = [
|
|
57
|
-
info.base_spec().shape[0] * info.base_spec().shape[1] for info in self.img_batch_params
|
|
58
|
-
]
|
|
59
|
-
else:
|
|
60
|
-
if not type_info.is_batched():
|
|
61
|
-
self.scalar_nos_param_names.append(param_name)
|
|
62
|
-
else:
|
|
63
|
-
batch_size = min(batch_size, type_info.batch_size())
|
|
64
|
-
|
|
65
|
-
if isinstance(type_info.base_spec(), nos.common.ImageSpec):
|
|
66
|
-
# this is a single-resolution image model
|
|
67
|
-
if type_info.base_spec().shape is not None:
|
|
68
|
-
self.img_size = (type_info.base_spec().shape[1], type_info.base_spec().shape[0])
|
|
69
|
-
self.img_param_pos = pos
|
|
70
|
-
|
|
71
|
-
if batch_size != sys.maxsize:
|
|
72
|
-
self.batch_size = batch_size
|
|
73
|
-
|
|
74
|
-
def _convert_nos_type(
|
|
75
|
-
self, type_info: 'nos.common.spec.ObjectTypeInfo', ignore_shape: bool = False
|
|
76
|
-
) -> ts.ColumnType:
|
|
77
|
-
"""Convert ObjectTypeInfo to ColumnType"""
|
|
78
|
-
import nos
|
|
79
|
-
if type_info.base_spec() is None:
|
|
80
|
-
if type_info.base_type() == str:
|
|
81
|
-
return ts.StringType()
|
|
82
|
-
if type_info.base_type() == int:
|
|
83
|
-
return ts.IntType()
|
|
84
|
-
if type_info.base_type() == float:
|
|
85
|
-
return ts.FloatType()
|
|
86
|
-
if type_info.base_type() == bool:
|
|
87
|
-
return ts.BoolType()
|
|
88
|
-
else:
|
|
89
|
-
raise excs.Error(f'Cannot convert {type_info} to ColumnType')
|
|
90
|
-
elif isinstance(type_info.base_spec(), nos.common.ImageSpec):
|
|
91
|
-
size = None
|
|
92
|
-
if not ignore_shape and type_info.base_spec().shape is not None:
|
|
93
|
-
size = (type_info.base_spec().shape[1], type_info.base_spec().shape[0])
|
|
94
|
-
# TODO: set mode
|
|
95
|
-
return ts.ImageType(size=size)
|
|
96
|
-
elif isinstance(type_info.base_spec(), nos.common.TensorSpec):
|
|
97
|
-
return ts.ArrayType(shape=type_info.base_spec().shape, dtype=ts.FloatType())
|
|
98
|
-
else:
|
|
99
|
-
raise excs.Error(f'Cannot convert {type_info} to ColumnType')
|
|
100
|
-
|
|
101
|
-
def _convert_nos_signature(
|
|
102
|
-
self, sig: 'nos.common.spec.FunctionSignature') -> Tuple[ts.ColumnType, List[ts.ColumnType]]:
|
|
103
|
-
if len(sig.get_outputs_spec()) > 1:
|
|
104
|
-
return_type = ts.JsonType()
|
|
105
|
-
else:
|
|
106
|
-
return_type = self._convert_nos_type(list(sig.get_outputs_spec().values())[0])
|
|
107
|
-
param_types: List[ts.ColumnType] = []
|
|
108
|
-
for _, type_info in sig.get_inputs_spec().items():
|
|
109
|
-
# if there are multiple input shapes we leave them out of the ColumnType and deal with them in FunctionCall
|
|
110
|
-
if isinstance(type_info, list):
|
|
111
|
-
param_types.append(self._convert_nos_type(type_info[0], ignore_shape=True))
|
|
112
|
-
else:
|
|
113
|
-
param_types.append(self._convert_nos_type(type_info, ignore_shape=False))
|
|
114
|
-
return return_type, param_types
|
|
115
|
-
|
|
116
|
-
def is_multi_res_model(self) -> bool:
|
|
117
|
-
return self.img_param_pos is not None and len(self.img_batch_params) > 0
|
|
118
|
-
|
|
119
|
-
def get_batch_size(self, *args: Any, **kwargs: Any) -> Optional[int]:
|
|
120
|
-
if self.batch_size is not None or len(self.img_batch_params) == 0 or len(args) == 0:
|
|
121
|
-
return self.batch_size
|
|
122
|
-
|
|
123
|
-
# return batch size appropriate for the given image size
|
|
124
|
-
img_arg = args[self.img_param_pos]
|
|
125
|
-
input_res = img_arg.size[0] * img_arg.size[1]
|
|
126
|
-
batch_size, _ = self._select_model_res(input_res)
|
|
127
|
-
return batch_size
|
|
128
|
-
|
|
129
|
-
def _select_model_res(self, input_res: int) -> Tuple[int, Tuple[int, int]]:
|
|
130
|
-
"""Select the model resolution that is closest to the input resolution
|
|
131
|
-
Returns: batch size, image size
|
|
132
|
-
"""
|
|
133
|
-
deltas = [abs(res - input_res) for res in self.img_resolutions]
|
|
134
|
-
idx = deltas.index(min(deltas))
|
|
135
|
-
type_info = self.img_batch_params[idx]
|
|
136
|
-
return type_info.batch_size(), (type_info.base_spec().shape[1], type_info.base_spec().shape[0])
|
|
137
|
-
|
|
138
|
-
def invoke(self, arg_batches: List[List[Any]], kwarg_batches: Dict[str, List[Any]]) -> List[Any]:
|
|
139
|
-
# check that scalar args are constant
|
|
140
|
-
|
|
141
|
-
num_batch_rows = len(arg_batches[0])
|
|
142
|
-
# if we need to rescale image args, and we're doing object detection, we need to rescale the
|
|
143
|
-
# bounding boxes as well
|
|
144
|
-
scale_factors = np.ndarray((num_batch_rows, 2), dtype=np.float32)
|
|
145
|
-
|
|
146
|
-
target_res: Optional[Tuple[int, int]] = None
|
|
147
|
-
if self.img_param_pos is not None:
|
|
148
|
-
# for now, NOS will only receive RGB images
|
|
149
|
-
arg_batches[self.img_param_pos] = \
|
|
150
|
-
[img.convert('RGB') if img.mode != 'RGB' else img for img in arg_batches[self.img_param_pos ]]
|
|
151
|
-
if self.is_multi_res_model():
|
|
152
|
-
# we need to select the resolution that is closest to the input resolution
|
|
153
|
-
sample_img = arg_batches[self.img_param_pos][0]
|
|
154
|
-
_, target_res = self._select_model_res(sample_img.size[0] * sample_img.size[1])
|
|
155
|
-
else:
|
|
156
|
-
target_res = self.img_size
|
|
157
|
-
|
|
158
|
-
if target_res is not None:
|
|
159
|
-
# we need to record the scale factors and resize the images;
|
|
160
|
-
# keep in mind that every image could have a different resolution
|
|
161
|
-
scale_factors[:, 0] = \
|
|
162
|
-
[img.size[0] / target_res[0] for img in arg_batches[self.img_param_pos]]
|
|
163
|
-
scale_factors[:, 1] = \
|
|
164
|
-
[img.size[1] / target_res[1] for img in arg_batches[self.img_param_pos]]
|
|
165
|
-
arg_batches[self.img_param_pos] = [
|
|
166
|
-
# only resize if necessary
|
|
167
|
-
img.resize(target_res) if img.size != target_res else img
|
|
168
|
-
for img in arg_batches[self.img_param_pos]
|
|
169
|
-
]
|
|
170
|
-
|
|
171
|
-
kwargs = {param_name: args for param_name, args in zip(self.nos_param_names, arg_batches)}
|
|
172
|
-
# fix up scalar parameters
|
|
173
|
-
kwargs.update(
|
|
174
|
-
{param_name: kwargs[param_name][0] for param_name in self.scalar_nos_param_names})
|
|
175
|
-
_logger.debug(
|
|
176
|
-
f'Running NOS task {self.model_spec.task}: '
|
|
177
|
-
f'batch_size={num_batch_rows} target_res={target_res}')
|
|
178
|
-
result = env.Env.get().nos_client.Run(
|
|
179
|
-
task=self.model_spec.task, model_name=self.model_spec.name, **kwargs)
|
|
180
|
-
|
|
181
|
-
import nos
|
|
182
|
-
if self.model_spec.task == nos.common.TaskType.OBJECT_DETECTION_2D and target_res is not None:
|
|
183
|
-
# we need to rescale the bounding boxes
|
|
184
|
-
result_bboxes = [] # workaround: result['bboxes'][*] is immutable
|
|
185
|
-
for i, bboxes in enumerate(result['bboxes']):
|
|
186
|
-
bboxes = np.copy(bboxes)
|
|
187
|
-
nos_batch_row_idx = i
|
|
188
|
-
bboxes[:, 0] *= scale_factors[nos_batch_row_idx, 0]
|
|
189
|
-
bboxes[:, 1] *= scale_factors[nos_batch_row_idx, 1]
|
|
190
|
-
bboxes[:, 2] *= scale_factors[nos_batch_row_idx, 0]
|
|
191
|
-
bboxes[:, 3] *= scale_factors[nos_batch_row_idx, 1]
|
|
192
|
-
result_bboxes.append(bboxes)
|
|
193
|
-
result['bboxes'] = result_bboxes
|
|
194
|
-
|
|
195
|
-
if len(result) == 1:
|
|
196
|
-
key = list(result.keys())[0]
|
|
197
|
-
row_results = result[key]
|
|
198
|
-
else:
|
|
199
|
-
# we rearrange result into one dict per row
|
|
200
|
-
row_results = [{k: v[i].tolist() for k, v in result.items()} for i in range(num_batch_rows)]
|
|
201
|
-
return row_results
|
|
202
|
-
|
pixeltable/utils/clip.py
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
import PIL.Image
|
|
3
|
-
|
|
4
|
-
import pixeltable.func as func
|
|
5
|
-
from pixeltable.env import Env
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def embed_image(img: PIL.Image.Image) -> np.ndarray:
|
|
9
|
-
from pixeltable.functions.nos.image_embedding import openai_clip
|
|
10
|
-
model_info = openai_clip.model_spec
|
|
11
|
-
result = Env.get().nos_client.Run(task=model_info.task, model_name=model_info.name, images=[img.resize((224, 224))])
|
|
12
|
-
return result['embedding'].squeeze(0)
|
|
13
|
-
|
|
14
|
-
def embed_text(text: str) -> np.ndarray:
|
|
15
|
-
from pixeltable.functions.nos.text_embedding import openai_clip
|
|
16
|
-
model_info = openai_clip.model_spec
|
|
17
|
-
result = Env.get().nos_client.Run(task=model_info.task, model_name=model_info.name, texts=[text])
|
|
18
|
-
return result['embedding'].squeeze(0)
|
|
@@ -1,131 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: pixeltable
|
|
3
|
-
Version: 0.2.6
|
|
4
|
-
Summary: Pixeltable: The Multimodal AI Data Plane
|
|
5
|
-
Author: Marcel Kornacker
|
|
6
|
-
Author-email: marcelk@gmail.com
|
|
7
|
-
Requires-Python: >=3.9,<4.0
|
|
8
|
-
Classifier: Programming Language :: Python :: 3
|
|
9
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
10
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
-
Requires-Dist: av (>=10.0.0)
|
|
14
|
-
Requires-Dist: beautifulsoup4 (>=4.0.0,<5.0.0)
|
|
15
|
-
Requires-Dist: cloudpickle (>=2.2.1,<3.0.0)
|
|
16
|
-
Requires-Dist: ftfy (>=6.2.0,<7.0.0)
|
|
17
|
-
Requires-Dist: jinja2 (>=3.1.3,<4.0.0)
|
|
18
|
-
Requires-Dist: jmespath (>=1.0.1,<2.0.0)
|
|
19
|
-
Requires-Dist: mistune (>=3.0.2,<4.0.0)
|
|
20
|
-
Requires-Dist: numpy (>=1.25)
|
|
21
|
-
Requires-Dist: opencv-python-headless (>=4.7.0.68,<5.0.0.0)
|
|
22
|
-
Requires-Dist: pandas (>=2.0,<3.0)
|
|
23
|
-
Requires-Dist: pgserver (==0.1.2)
|
|
24
|
-
Requires-Dist: pgvector (>=0.2.1,<0.3.0)
|
|
25
|
-
Requires-Dist: pillow (>=9.3.0)
|
|
26
|
-
Requires-Dist: psutil (>=5.9.5,<6.0.0)
|
|
27
|
-
Requires-Dist: psycopg2-binary (>=2.9.5,<3.0.0)
|
|
28
|
-
Requires-Dist: pymupdf (>=1.24.1,<2.0.0)
|
|
29
|
-
Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
|
|
30
|
-
Requires-Dist: requests (>=2.31.0,<3.0.0)
|
|
31
|
-
Requires-Dist: setuptools (==69.1.1)
|
|
32
|
-
Requires-Dist: sqlalchemy-utils (>=0.41.1,<0.42.0)
|
|
33
|
-
Requires-Dist: sqlalchemy[mypy] (>=2.0.23,<3.0.0)
|
|
34
|
-
Requires-Dist: tenacity (>=8.2,<9.0)
|
|
35
|
-
Requires-Dist: tqdm (>=4.64.1,<5.0.0)
|
|
36
|
-
Description-Content-Type: text/markdown
|
|
37
|
-
|
|
38
|
-
<div align="center">
|
|
39
|
-
<img src="https://raw.githubusercontent.com/pixeltable/pixeltable/master/docs/pixeltable-banner.png" alt="Pixeltable" width="45%" />
|
|
40
|
-
|
|
41
|
-
# Unifying Data, Models, and Orchestration for AI Products
|
|
42
|
-
|
|
43
|
-
[](https://opensource.org/licenses/Apache-2.0)
|
|
44
|
-
|
|
45
|
-
[](https://github.com/pixeltable/pixeltable/actions)
|
|
46
|
-
|
|
47
|
-
[Installation](https://pixeltable.github.io/pixeltable/getting-started/) | [Documentation](https://pixeltable.github.io/pixeltable/)
|
|
48
|
-
</div>
|
|
49
|
-
|
|
50
|
-
Pixeltable is a Python library that lets AI engineers and data scientists focus on exploration, modeling, and app development without dealing with the customary data plumbing.
|
|
51
|
-
|
|
52
|
-
## What problems does Pixeltable solve?
|
|
53
|
-
|
|
54
|
-
Today’s solutions for AI app development require extensive custom coding and infrastructure
|
|
55
|
-
plumbing. Tracking lineage and versions between and across data transformations, models, and
|
|
56
|
-
deployment is cumbersome. Pixeltable is a replacement for traditional data plumbing, providing
|
|
57
|
-
a unified plane for data, models, and orchestration. It removes the data plumbing overhead in
|
|
58
|
-
building and productionizing AI applications.
|
|
59
|
-
|
|
60
|
-
## ⚡Quick Start
|
|
61
|
-
Learn the basics of Pixeltable through interactive examples. View the notebooks on Google Colab or Kaggle, for free.
|
|
62
|
-
|
|
63
|
-
### Pixeltable Basics
|
|
64
|
-
In this tutorial, we'll survey how to create tables, populate them with data, and enhance them with built-in and user-defined transformations and AI operations.
|
|
65
|
-
|
|
66
|
-
[](https://kaggle.com/kernels/welcome?src=https://github.com/pixeltable/pixeltable/blob/master/docs/tutorials/pixeltable-basics.ipynb)
|
|
67
|
-
<a target="_blank" href="https://colab.research.google.com/github/pixeltable/pixeltable/blob/master/docs/tutorials/pixeltable-basics.ipynb"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/> </a>
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
## 💾 Installation
|
|
71
|
-
Pixeltable works with Python 3.9, 3.10, 3.11, or 3.12 running on Linux, MacOS, or Windows.
|
|
72
|
-
|
|
73
|
-
```
|
|
74
|
-
pip install pixeltable
|
|
75
|
-
```
|
|
76
|
-
|
|
77
|
-
To verify that it's working:
|
|
78
|
-
|
|
79
|
-
```
|
|
80
|
-
import pixeltable as pxt
|
|
81
|
-
pxt.init()
|
|
82
|
-
```
|
|
83
|
-
|
|
84
|
-
For more detailed installation instructions, see the
|
|
85
|
-
[Getting Started with Pixeltable](https://pixeltable.github.io/pixeltable/getting-started/)
|
|
86
|
-
guide. Then, check out the
|
|
87
|
-
[Pixeltable Basics](https://pixeltable.github.io/pixeltable/tutorials/pixeltable-basics/)
|
|
88
|
-
tutorial for a tour of its most important features.
|
|
89
|
-
|
|
90
|
-
## Why should you use Pixeltable?
|
|
91
|
-
|
|
92
|
-
- It gives you transparency and reproducibility
|
|
93
|
-
- All generated data is automatically recorded and versioned
|
|
94
|
-
- You will never need to re-run a workload because you lost track of the input data
|
|
95
|
-
- It saves you money
|
|
96
|
-
- All data changes are automatically incremental
|
|
97
|
-
- You never need to re-run pipelines from scratch because you’re adding data
|
|
98
|
-
- It integrates with any existing Python code or libraries
|
|
99
|
-
- Bring your ever-changing code and workloads
|
|
100
|
-
- You choose the models, tools, and AI practices (e.g., your embedding model for a vector index); Pixeltable orchestrates the data
|
|
101
|
-
|
|
102
|
-
## Example Use Cases
|
|
103
|
-
|
|
104
|
-
* Interact with video data at the frame level without having to think about frame extraction,
|
|
105
|
-
intermediate file storage, or storage space explosion.
|
|
106
|
-
* Augment your data incrementally and interactively with built-in functions and UDFs, such as
|
|
107
|
-
image transformations, model inference, and visualizations, without having to think about data pipelines,
|
|
108
|
-
incremental updates, or capturing function output.
|
|
109
|
-
* Interact with all the data relevant to your AI application (video, images, documents, audio, structured data, JSON) through
|
|
110
|
-
a simple dataframe-style API directly in Python. This includes:
|
|
111
|
-
* similarity search on embeddings, supported by high-dimensional vector indexing
|
|
112
|
-
* path expressions and transformations on JSON data
|
|
113
|
-
* PIL and OpenCV image operations
|
|
114
|
-
* assembling frames into videos
|
|
115
|
-
* Perform keyword and image similarity search at the video frame level without having to worry about frame
|
|
116
|
-
storage.
|
|
117
|
-
* Access all Pixeltable-resident data directly as a PyTorch dataset in your training scripts.
|
|
118
|
-
* Understand the compute and storage costs of your data at the granularity of individual augmentations and
|
|
119
|
-
get cost projections before adding new data and new augmentations.
|
|
120
|
-
* Rely on Pixeltable's automatic versioning and snapshot functionality to protect against regressions
|
|
121
|
-
and to ensure reproducibility.
|
|
122
|
-
|
|
123
|
-
## Contributions & Feedback
|
|
124
|
-
|
|
125
|
-
Are you experiencing issues or bugs with Pixeltable? File an [Issue](https://github.com/pixeltable/pixeltable/issues).
|
|
126
|
-
</br>Do you want to contribute? Feel free to open a [PR](https://github.com/pixeltable/pixeltable/pulls).
|
|
127
|
-
|
|
128
|
-
## :classical_building: License
|
|
129
|
-
|
|
130
|
-
This library is licensed under the Apache 2.0 License.
|
|
131
|
-
|
|
File without changes
|
|
File without changes
|