clarifai 9.7.1__py3-none-any.whl → 9.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/auth/__init__.py +6 -0
- clarifai/auth/helper.py +35 -36
- clarifai/auth/register.py +23 -0
- clarifai/{client → auth}/stub.py +10 -10
- clarifai/client/__init__.py +1 -4
- clarifai/client/app.py +483 -0
- clarifai/client/auth/__init__.py +4 -0
- clarifai/client/{abc.py → auth/abc.py} +2 -2
- clarifai/client/auth/helper.py +377 -0
- clarifai/client/auth/register.py +23 -0
- {clarifai_utils/client → clarifai/client/auth}/stub.py +10 -10
- clarifai/client/base.py +112 -0
- clarifai/client/dataset.py +290 -0
- clarifai/client/input.py +730 -0
- clarifai/client/lister.py +41 -0
- clarifai/client/model.py +218 -0
- clarifai/client/module.py +82 -0
- clarifai/client/user.py +125 -0
- clarifai/client/workflow.py +194 -0
- clarifai/datasets/upload/base.py +66 -0
- clarifai/datasets/upload/examples/README.md +31 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai/datasets/upload/image.py +156 -0
- clarifai/datasets/upload/loaders/README.md +49 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai_utils/data_upload/datasets/zoo → clarifai/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai/datasets/upload/text.py +50 -0
- clarifai/datasets/upload/utils.py +62 -0
- clarifai/errors.py +90 -0
- clarifai/urls/helper.py +16 -17
- clarifai/utils/logging.py +40 -0
- clarifai/utils/misc.py +33 -0
- clarifai/versions.py +6 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/LICENSE +1 -1
- clarifai-9.7.2.dist-info/METADATA +179 -0
- clarifai-9.7.2.dist-info/RECORD +350 -0
- clarifai_utils/auth/__init__.py +6 -0
- clarifai_utils/auth/helper.py +35 -36
- clarifai_utils/auth/register.py +23 -0
- clarifai_utils/auth/stub.py +127 -0
- clarifai_utils/client/__init__.py +1 -4
- clarifai_utils/client/app.py +483 -0
- clarifai_utils/client/auth/__init__.py +4 -0
- clarifai_utils/client/{abc.py → auth/abc.py} +2 -2
- clarifai_utils/client/auth/helper.py +377 -0
- clarifai_utils/client/auth/register.py +23 -0
- clarifai_utils/client/auth/stub.py +127 -0
- clarifai_utils/client/base.py +112 -0
- clarifai_utils/client/dataset.py +290 -0
- clarifai_utils/client/input.py +730 -0
- clarifai_utils/client/lister.py +41 -0
- clarifai_utils/client/model.py +218 -0
- clarifai_utils/client/module.py +82 -0
- clarifai_utils/client/user.py +125 -0
- clarifai_utils/client/workflow.py +194 -0
- clarifai_utils/datasets/upload/base.py +66 -0
- clarifai_utils/datasets/upload/examples/README.md +31 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai_utils/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai_utils/datasets/upload/image.py +156 -0
- clarifai_utils/datasets/upload/loaders/README.md +49 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai/data_upload/datasets/zoo → clarifai_utils/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai_utils/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai_utils/datasets/upload/text.py +50 -0
- clarifai_utils/datasets/upload/utils.py +62 -0
- clarifai_utils/errors.py +90 -0
- clarifai_utils/urls/helper.py +16 -17
- clarifai_utils/utils/logging.py +40 -0
- clarifai_utils/utils/misc.py +33 -0
- clarifai_utils/versions.py +6 -0
- clarifai/data_upload/README.md +0 -63
- clarifai/data_upload/convert_csv.py +0 -182
- clarifai/data_upload/datasets/base.py +0 -87
- clarifai/data_upload/datasets/image.py +0 -253
- clarifai/data_upload/datasets/text.py +0 -60
- clarifai/data_upload/datasets/zoo/README.md +0 -55
- clarifai/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai/data_upload/examples/README.md +0 -5
- clarifai/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai/data_upload/examples.py +0 -17
- clarifai/data_upload/upload.py +0 -356
- clarifai/dataset_export/dataset_export_inputs.py +0 -205
- clarifai/listing/concepts.py +0 -37
- clarifai/listing/datasets.py +0 -37
- clarifai/listing/inputs.py +0 -111
- clarifai/listing/installed_module_versions.py +0 -40
- clarifai/listing/lister.py +0 -200
- clarifai/listing/models.py +0 -46
- clarifai/listing/module_versions.py +0 -42
- clarifai/listing/modules.py +0 -36
- clarifai/runners/base.py +0 -140
- clarifai/runners/example.py +0 -36
- clarifai-9.7.1.dist-info/METADATA +0 -99
- clarifai-9.7.1.dist-info/RECORD +0 -456
- clarifai_utils/data_upload/README.md +0 -63
- clarifai_utils/data_upload/convert_csv.py +0 -182
- clarifai_utils/data_upload/datasets/base.py +0 -87
- clarifai_utils/data_upload/datasets/image.py +0 -253
- clarifai_utils/data_upload/datasets/text.py +0 -60
- clarifai_utils/data_upload/datasets/zoo/README.md +0 -55
- clarifai_utils/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai_utils/data_upload/examples/README.md +0 -5
- clarifai_utils/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai_utils/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai_utils/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai_utils/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai_utils/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai_utils/data_upload/examples/text_classification/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai_utils/data_upload/examples.py +0 -17
- clarifai_utils/data_upload/upload.py +0 -356
- clarifai_utils/dataset_export/dataset_export_inputs.py +0 -205
- clarifai_utils/listing/__init__.py +0 -0
- clarifai_utils/listing/concepts.py +0 -37
- clarifai_utils/listing/datasets.py +0 -37
- clarifai_utils/listing/inputs.py +0 -111
- clarifai_utils/listing/installed_module_versions.py +0 -40
- clarifai_utils/listing/lister.py +0 -200
- clarifai_utils/listing/models.py +0 -46
- clarifai_utils/listing/module_versions.py +0 -42
- clarifai_utils/listing/modules.py +0 -36
- clarifai_utils/runners/__init__.py +0 -0
- clarifai_utils/runners/base.py +0 -140
- clarifai_utils/runners/example.py +0 -36
- /clarifai/{data_upload/__init__.py → cli.py} +0 -0
- /clarifai/{data_upload/datasets → datasets}/__init__.py +0 -0
- /clarifai/{data_upload/datasets/zoo → datasets/upload}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai/{data_upload/examples/image_detection → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_detection/voc → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai/{data_upload/examples/image_segmentation → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_segmentation/coco → utils}/__init__.py +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/WHEEL +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/entry_points.txt +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/top_level.txt +0 -0
- /clarifai/data_upload/examples/text_classification/__init__.py → /clarifai_utils/cli.py +0 -0
- {clarifai/data_upload/examples/text_classification/imdb_dataset → clarifai_utils/datasets}/__init__.py +0 -0
- {clarifai/listing → clarifai_utils/datasets/upload}/__init__.py +0 -0
- {clarifai/runners → clarifai_utils/datasets/upload/examples/image_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload/examples/image_classification/cifar10}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai_utils/{data_upload/datasets → datasets/upload/examples/image_classification/food-101}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai_utils/{data_upload/datasets/zoo → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/cifar10 → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/food-101 → utils}/__init__.py +0 -0
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
#! COCO 2017 Image Segmentation dataset
|
|
2
|
-
|
|
3
|
-
import gc
|
|
4
|
-
import os
|
|
5
|
-
from functools import reduce
|
|
6
|
-
|
|
7
|
-
import cv2
|
|
8
|
-
import numpy as np
|
|
9
|
-
from pycocotools import mask as maskUtils
|
|
10
|
-
from pycocotools.coco import COCO
|
|
11
|
-
|
|
12
|
-
from clarifai.data_upload.datasets.features import VisualSegmentationFeatures
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class COCOSegmentationDataset:
|
|
16
|
-
"""COCO 2017 Image Segmentation Dataset.
|
|
17
|
-
url: https://cocodataset.org/#download
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
def __init__(self, split: str = "train"):
|
|
21
|
-
"""
|
|
22
|
-
Inititalize dataset params.
|
|
23
|
-
Args:
|
|
24
|
-
split: "train" or "test"
|
|
25
|
-
"""
|
|
26
|
-
self.split = split
|
|
27
|
-
self.image_dir = {"train": os.path.join(os.path.dirname(__file__), "images")}
|
|
28
|
-
self.annotations_file = {
|
|
29
|
-
"train":
|
|
30
|
-
os.path.join(os.path.dirname(__file__), "annotations/instances_val2017_subset.json")
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
def dataloader(self):
|
|
34
|
-
"""
|
|
35
|
-
Transform COCO 2017 segmentation dataset into clarifai proto compatible
|
|
36
|
-
format to uplaod
|
|
37
|
-
Returns:
|
|
38
|
-
VisualSegmentationFeatures type generator.
|
|
39
|
-
"""
|
|
40
|
-
coco = COCO(self.annotations_file[self.split])
|
|
41
|
-
categories = coco.loadCats(coco.getCatIds())
|
|
42
|
-
cat_id_map = {category["id"]: category["name"] for category in categories}
|
|
43
|
-
cat_img_ids = {}
|
|
44
|
-
for cat_id in list(cat_id_map.keys()):
|
|
45
|
-
cat_img_ids[cat_id] = coco.getImgIds(catIds=[cat_id])
|
|
46
|
-
|
|
47
|
-
img_ids = []
|
|
48
|
-
for i in list(cat_img_ids.values()):
|
|
49
|
-
img_ids.extend(i)
|
|
50
|
-
|
|
51
|
-
# Get the image information for the specified image IDs
|
|
52
|
-
image_info = coco.loadImgs(img_ids)
|
|
53
|
-
# Extract the file names from the image information
|
|
54
|
-
image_filenames = {img_id: info['file_name'] for info, img_id in zip(image_info, img_ids)}
|
|
55
|
-
#get annotations for each image id
|
|
56
|
-
for _id in set(img_ids):
|
|
57
|
-
annots = [] # polygons
|
|
58
|
-
class_names = []
|
|
59
|
-
labels = [i for i in list(filter(lambda x: _id in cat_img_ids[x], cat_img_ids))]
|
|
60
|
-
image_path = os.path.join(self.image_dir[self.split], image_filenames[_id])
|
|
61
|
-
|
|
62
|
-
image_height, image_width = cv2.imread(image_path).shape[:2]
|
|
63
|
-
for cat_id in labels:
|
|
64
|
-
annot_ids = coco.getAnnIds(imgIds=_id, catIds=[cat_id])
|
|
65
|
-
|
|
66
|
-
if len(annot_ids) > 0:
|
|
67
|
-
img_annotations = coco.loadAnns(annot_ids)
|
|
68
|
-
for ann in img_annotations:
|
|
69
|
-
# get polygons
|
|
70
|
-
if type(ann['segmentation']) == list:
|
|
71
|
-
for seg in ann['segmentation']:
|
|
72
|
-
poly = np.array(seg).reshape((int(len(seg) / 2), 2))
|
|
73
|
-
poly[:, 0], poly[:, 1] = poly[:, 0] / image_width, poly[:, 1] / image_height
|
|
74
|
-
annots.append(poly.tolist()) #[[x=col, y=row],...]
|
|
75
|
-
class_names.append(cat_id_map[cat_id])
|
|
76
|
-
else: # seg: {"counts":[...]}
|
|
77
|
-
if type(ann['segmentation']['counts']) == list:
|
|
78
|
-
rle = maskUtils.frPyObjects([ann['segmentation']], image_height, image_width)
|
|
79
|
-
else:
|
|
80
|
-
rle = ann['segmentation']
|
|
81
|
-
mask = maskUtils.decode(rle) #binary mask
|
|
82
|
-
#convert mask to polygons and add to annots
|
|
83
|
-
contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
|
|
84
|
-
polygons = []
|
|
85
|
-
for cont in contours:
|
|
86
|
-
if cont.size >= 6:
|
|
87
|
-
polygons.append(cont.astype(float).flatten().tolist())
|
|
88
|
-
# store polygons in (x,y) pairs
|
|
89
|
-
polygons_flattened = reduce(lambda x, y: x + y, polygons)
|
|
90
|
-
del polygons
|
|
91
|
-
del contours
|
|
92
|
-
del mask
|
|
93
|
-
gc.collect()
|
|
94
|
-
|
|
95
|
-
polygons = np.array(polygons_flattened).reshape((int(len(polygons_flattened) / 2),
|
|
96
|
-
2))
|
|
97
|
-
polygons[:, 0] = polygons[:, 0] / image_width
|
|
98
|
-
polygons[:, 1] = polygons[:, 1] / image_height
|
|
99
|
-
|
|
100
|
-
annots.append(polygons.tolist()) #[[x=col, y=row],...,[x=col, y=row]]
|
|
101
|
-
class_names.append(cat_id_map[cat_id])
|
|
102
|
-
else: # if no annotations for given image_id-cat_id pair
|
|
103
|
-
continue
|
|
104
|
-
assert len(class_names) == len(annots), f"Num classes must match num annotations\
|
|
105
|
-
for a single image. Found {len(class_names)} classes and {len(annots)} polygons."
|
|
106
|
-
|
|
107
|
-
yield VisualSegmentationFeatures(image_path, class_names, annots, id=_id)
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
File without changes
|
|
File without changes
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
#! IMDB 50k Movie Reviews dataset
|
|
2
|
-
|
|
3
|
-
import csv
|
|
4
|
-
import os
|
|
5
|
-
|
|
6
|
-
from clarifai.data_upload.datasets.features import TextFeatures
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class IMDBMovieReviewsDataset:
|
|
10
|
-
"""IMDB 50K Movie Reviews Dataset."""
|
|
11
|
-
|
|
12
|
-
def __init__(self, split: str = "train"):
|
|
13
|
-
"""
|
|
14
|
-
Initialize dataset params.
|
|
15
|
-
Args:
|
|
16
|
-
data_dir: the local dataset directory.
|
|
17
|
-
split: "train" or "test"
|
|
18
|
-
"""
|
|
19
|
-
self.split = split
|
|
20
|
-
self.data_dirs = {
|
|
21
|
-
"train": os.path.join(os.path.dirname(__file__), "train.csv"),
|
|
22
|
-
"test": os.path.join(os.path.dirname(__file__), "test.csv")
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
def dataloader(self):
|
|
26
|
-
"""
|
|
27
|
-
Transform text data into clarifai proto compatible
|
|
28
|
-
format for upload.
|
|
29
|
-
Returns:
|
|
30
|
-
TextFeatures type generator.
|
|
31
|
-
"""
|
|
32
|
-
## Your preprocessing code here
|
|
33
|
-
with open(self.data_dirs[self.split]) as _file:
|
|
34
|
-
reader = csv.reader(_file)
|
|
35
|
-
next(reader, None) # skip header
|
|
36
|
-
for review in reader:
|
|
37
|
-
yield TextFeatures(
|
|
38
|
-
text=review[0], # text,
|
|
39
|
-
labels=review[1], # sentiment,
|
|
40
|
-
id=None)
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
#! Execute dataset upload using the `from_module` upload feature
|
|
2
|
-
|
|
3
|
-
from clarifai.data_upload.upload import UploadConfig
|
|
4
|
-
|
|
5
|
-
text_upload_obj = UploadConfig(
|
|
6
|
-
user_id="",
|
|
7
|
-
app_id="",
|
|
8
|
-
pat="",
|
|
9
|
-
dataset_id="",
|
|
10
|
-
task="visual_clf",
|
|
11
|
-
from_module="./examples/image_classification/cifar10",
|
|
12
|
-
split="train")
|
|
13
|
-
## change the task and from_module arguments in UploadConfig() to upload
|
|
14
|
-
## example food-101 dataset
|
|
15
|
-
|
|
16
|
-
if __name__ == "__main__":
|
|
17
|
-
text_upload_obj.upload_to_clarifai()
|
|
@@ -1,356 +0,0 @@
|
|
|
1
|
-
#! Clarifai data upload
|
|
2
|
-
|
|
3
|
-
import importlib
|
|
4
|
-
import inspect
|
|
5
|
-
import os
|
|
6
|
-
import sys
|
|
7
|
-
import time
|
|
8
|
-
import uuid
|
|
9
|
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
10
|
-
from multiprocessing import cpu_count
|
|
11
|
-
from typing import Iterator, List, Optional, Tuple, Union
|
|
12
|
-
|
|
13
|
-
from clarifai_grpc.grpc.api import resources_pb2, service_pb2, service_pb2_grpc
|
|
14
|
-
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
|
15
|
-
from google.protobuf.json_format import MessageToDict
|
|
16
|
-
from tqdm import tqdm
|
|
17
|
-
from clarifai.auth.helper import ClarifaiAuthHelper
|
|
18
|
-
from clarifai.client import create_stub
|
|
19
|
-
from clarifai.data_upload.datasets.base import Chunker
|
|
20
|
-
from clarifai.data_upload.datasets.image import (VisualClassificationDataset,
|
|
21
|
-
VisualDetectionDataset, VisualSegmentationDataset)
|
|
22
|
-
from clarifai.data_upload.datasets.text import TextClassificationDataset
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def load_dataset(module_dir: Union[str, os.PathLike], split: str) -> Iterator:
|
|
26
|
-
"""
|
|
27
|
-
Validate and import dataset module data generator.
|
|
28
|
-
Args:
|
|
29
|
-
`module_dir`: relative path to the module directory
|
|
30
|
-
The directory must contain a `dataset.py` script and the data itself.
|
|
31
|
-
`split`: "train" or "val"/"test" dataset split
|
|
32
|
-
Module Directory Structure:
|
|
33
|
-
---------------------------
|
|
34
|
-
<folder_name>/
|
|
35
|
-
├──__init__.py
|
|
36
|
-
├──<Your local dir dataset>/
|
|
37
|
-
└──dataset.py
|
|
38
|
-
dataset.py must implement a class named following the convention,
|
|
39
|
-
<dataset_name>Dataset and this class must have a dataloader()
|
|
40
|
-
generator method
|
|
41
|
-
"""
|
|
42
|
-
sys.path.append(str(module_dir))
|
|
43
|
-
|
|
44
|
-
if not os.path.exists(os.path.join(module_dir, "__init__.py")):
|
|
45
|
-
with open(os.path.join(module_dir, "__init__.py"), "w"):
|
|
46
|
-
pass
|
|
47
|
-
|
|
48
|
-
import dataset # dataset module
|
|
49
|
-
|
|
50
|
-
# get main module class
|
|
51
|
-
main_module_cls = None
|
|
52
|
-
for name, obj in dataset.__dict__.items():
|
|
53
|
-
if inspect.isclass(obj) and "Dataset" in name:
|
|
54
|
-
main_module_cls = obj
|
|
55
|
-
else:
|
|
56
|
-
continue
|
|
57
|
-
|
|
58
|
-
return main_module_cls(split).dataloader()
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def load_zoo_dataset(name: str, split: str) -> Iterator:
|
|
62
|
-
"""
|
|
63
|
-
Get dataset generator object from dataset zoo.
|
|
64
|
-
Args:
|
|
65
|
-
`name`: dataset module name in datasets/zoo/.
|
|
66
|
-
`split`: "train" or "val"/"test" dataset split
|
|
67
|
-
Returns:
|
|
68
|
-
Data generator object
|
|
69
|
-
"""
|
|
70
|
-
zoo_dataset = importlib.import_module(f"datasets.zoo.{name}")
|
|
71
|
-
# get main module class
|
|
72
|
-
main_module_cls = None
|
|
73
|
-
for name, obj in zoo_dataset.__dict__.items():
|
|
74
|
-
if inspect.isclass(obj) and "Dataset" in name:
|
|
75
|
-
main_module_cls = obj
|
|
76
|
-
else:
|
|
77
|
-
continue
|
|
78
|
-
|
|
79
|
-
return main_module_cls(split).dataloader()
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
class UploadConfig:
|
|
83
|
-
|
|
84
|
-
def __init__(
|
|
85
|
-
self,
|
|
86
|
-
user_id: str,
|
|
87
|
-
app_id: str,
|
|
88
|
-
pat: str,
|
|
89
|
-
dataset_id: str,
|
|
90
|
-
task: str,
|
|
91
|
-
from_module: Optional[Union[str, os.PathLike]] = None,
|
|
92
|
-
from_zoo: Optional[str] = None, # load dataset from zoo
|
|
93
|
-
split: str = "train", # train or test/val
|
|
94
|
-
chunk_size: int = 128,
|
|
95
|
-
base_url: str = None):
|
|
96
|
-
"""
|
|
97
|
-
Initialize upload configs.
|
|
98
|
-
Args:
|
|
99
|
-
`user_id`: Clarifai user id.
|
|
100
|
-
`app_id`: Clarifai app id.
|
|
101
|
-
`pat`: Clarifai PAT(Personal Access Token).
|
|
102
|
-
`dataset_id`: Clarifai dataset id (where data is to be uploaded).
|
|
103
|
-
`task`: either of `visual_clf`, `visual_detection`, `visual_segmentation` or `text_clf`.
|
|
104
|
-
`from_module`: Path to dataset module directory.
|
|
105
|
-
Should be left as None if `from_zoo` is to be used.
|
|
106
|
-
`from_zoo`: Name of dataset to upload from the zoo.
|
|
107
|
-
The name must match the dataset module name excluding the file extension.
|
|
108
|
-
Should be left as None if `from_module` is to be used.
|
|
109
|
-
`split`: Dataset split to upload. Either of train or test/val
|
|
110
|
-
`chunk_size`: size of chunks for parallel data upload.
|
|
111
|
-
"""
|
|
112
|
-
self.USER_ID = user_id
|
|
113
|
-
self.APP_ID = app_id
|
|
114
|
-
self.PAT = pat
|
|
115
|
-
self.dataset_id = dataset_id
|
|
116
|
-
self.task = task
|
|
117
|
-
self.module_dir = from_module
|
|
118
|
-
self.zoo_dataset = from_zoo
|
|
119
|
-
self.split = split
|
|
120
|
-
self.chunk_size = min(128, chunk_size) # limit max protos in a req
|
|
121
|
-
self.num_workers: int = min(10, cpu_count()) #15 req/sec rate limit
|
|
122
|
-
self.annot_num_workers = 4
|
|
123
|
-
self.max_retires = 10
|
|
124
|
-
self.__base: str = ""
|
|
125
|
-
if not base_url:
|
|
126
|
-
self.__base = "https://api.clarifai.com"
|
|
127
|
-
else:
|
|
128
|
-
self.__base = base_url
|
|
129
|
-
|
|
130
|
-
auth_helper = ClarifaiAuthHelper(
|
|
131
|
-
user_id=self.USER_ID, app_id=self.APP_ID, pat=self.PAT, base=self.__base)
|
|
132
|
-
self.STUB: service_pb2_grpc.V2Stub = create_stub(auth_helper)
|
|
133
|
-
self.user_app_id = auth_helper.get_user_app_id_proto()
|
|
134
|
-
|
|
135
|
-
def _upload_inputs(self, batch_input: List[resources_pb2.Input]) -> str:
|
|
136
|
-
"""
|
|
137
|
-
Upload inputs to clarifai platform dataset.
|
|
138
|
-
Args:
|
|
139
|
-
batch_input: input batch protos
|
|
140
|
-
Returns:
|
|
141
|
-
input_job_id: Upload Input Job ID
|
|
142
|
-
"""
|
|
143
|
-
input_job_id = uuid.uuid4().hex # generate a unique id for this job
|
|
144
|
-
response = self.STUB.PostInputs(
|
|
145
|
-
service_pb2.PostInputsRequest(
|
|
146
|
-
user_app_id=self.user_app_id, inputs=batch_input, inputs_add_job_id=input_job_id),)
|
|
147
|
-
if response.status.code != status_code_pb2.SUCCESS:
|
|
148
|
-
try:
|
|
149
|
-
print(f"Post inputs failed, status: {response.inputs[0].status.details}")
|
|
150
|
-
except:
|
|
151
|
-
print(f"Post inputs failed, status: {response.status.details}")
|
|
152
|
-
|
|
153
|
-
return input_job_id
|
|
154
|
-
|
|
155
|
-
def _upload_annotations(self, batch_annot: List[resources_pb2.Annotation]
|
|
156
|
-
) -> Union[List[resources_pb2.Annotation], List[None]]:
|
|
157
|
-
"""
|
|
158
|
-
Upload image annotations to clarifai detection dataset
|
|
159
|
-
Args:
|
|
160
|
-
batch_annot: annot batch protos
|
|
161
|
-
Returns:
|
|
162
|
-
retry_upload: failed annot upload
|
|
163
|
-
"""
|
|
164
|
-
retry_upload = [] # those that fail to upload are stored for retries
|
|
165
|
-
response = self.STUB.PostAnnotations(
|
|
166
|
-
service_pb2.PostAnnotationsRequest(user_app_id=self.user_app_id, annotations=batch_annot),)
|
|
167
|
-
|
|
168
|
-
if response.status.code != status_code_pb2.SUCCESS:
|
|
169
|
-
try:
|
|
170
|
-
print(f"Post annotations failed, status: {response.annotations[0].status.details}")
|
|
171
|
-
except:
|
|
172
|
-
print(f"Post annotations failed, status: {response.status.details}")
|
|
173
|
-
finally:
|
|
174
|
-
retry_upload.extend(batch_annot)
|
|
175
|
-
|
|
176
|
-
return retry_upload
|
|
177
|
-
|
|
178
|
-
def _concurrent_annot_upload(self, annots: List[List[resources_pb2.Annotation]]
|
|
179
|
-
) -> Union[List[resources_pb2.Annotation], List[None]]:
|
|
180
|
-
"""
|
|
181
|
-
Uploads annotations concurrently.
|
|
182
|
-
Args:
|
|
183
|
-
annots: annot protos
|
|
184
|
-
Returns:
|
|
185
|
-
retry_annot_upload: All failed annot protos during upload
|
|
186
|
-
"""
|
|
187
|
-
annot_threads = []
|
|
188
|
-
retry_annot_upload = []
|
|
189
|
-
|
|
190
|
-
with ThreadPoolExecutor(max_workers=self.annot_num_workers) as executor: # limit annot workers
|
|
191
|
-
annot_threads = [
|
|
192
|
-
executor.submit(self._upload_annotations, inp_batch) for inp_batch in annots
|
|
193
|
-
]
|
|
194
|
-
|
|
195
|
-
for job in as_completed(annot_threads):
|
|
196
|
-
result = job.result()
|
|
197
|
-
if result:
|
|
198
|
-
retry_annot_upload.extend(result)
|
|
199
|
-
|
|
200
|
-
return retry_annot_upload
|
|
201
|
-
|
|
202
|
-
def _backoff_iterator(self) -> None:
|
|
203
|
-
"""
|
|
204
|
-
Return iterator for exponential backoff intervals.
|
|
205
|
-
"""
|
|
206
|
-
yield 0.1
|
|
207
|
-
for i in range(5, 11):
|
|
208
|
-
yield 0.01 * (2**i)
|
|
209
|
-
while True:
|
|
210
|
-
yield 0.01 * (2**10) #10 sec
|
|
211
|
-
|
|
212
|
-
def _wait_for_inputs(self, input_job_id: str) -> bool:
|
|
213
|
-
"""
|
|
214
|
-
Wait for inputs to be processed. Cancel Job if timeout > 30 minutes.
|
|
215
|
-
Args:
|
|
216
|
-
input_job_id: Upload Input Job ID
|
|
217
|
-
Returns:
|
|
218
|
-
True if inputs are processed, False otherwise
|
|
219
|
-
"""
|
|
220
|
-
backoff_iterator = self._backoff_iterator()
|
|
221
|
-
max_retries = self.max_retires
|
|
222
|
-
start_time = time.time()
|
|
223
|
-
while True:
|
|
224
|
-
response = self.STUB.GetInputsAddJob(
|
|
225
|
-
service_pb2.GetInputsAddJobRequest(user_app_id=self.user_app_id, id=input_job_id),)
|
|
226
|
-
|
|
227
|
-
if time.time() - start_time > 60 * 30 or max_retries == 0: # 30 minutes timeout
|
|
228
|
-
self.STUB.CancelInputsAddJob(
|
|
229
|
-
service_pb2.CancelInputsAddJobRequest(user_app_id=self.user_app_id, id=input_job_id),
|
|
230
|
-
) #Cancel Job
|
|
231
|
-
return False
|
|
232
|
-
if response.status.code != status_code_pb2.SUCCESS:
|
|
233
|
-
max_retries -= 1
|
|
234
|
-
print(f"Get input job failed, status: {response.status.details}\n")
|
|
235
|
-
continue
|
|
236
|
-
if response.inputs_add_job.progress.in_progress_count == 0 and response.inputs_add_job.progress.pending_count == 0:
|
|
237
|
-
return True
|
|
238
|
-
else:
|
|
239
|
-
time.sleep(next(backoff_iterator))
|
|
240
|
-
|
|
241
|
-
def _delete_failed_inputs(self, input_ids: List[str]) -> Tuple[List[str], List[str]]:
|
|
242
|
-
"""
|
|
243
|
-
Delete failed input ids from clarifai platform dataset.
|
|
244
|
-
Args:
|
|
245
|
-
input_ids: batch input ids
|
|
246
|
-
Returns:
|
|
247
|
-
success_inputs: upload success input ids
|
|
248
|
-
failed_inputs: upload failed input ids
|
|
249
|
-
"""
|
|
250
|
-
success_status = status_pb2.Status(code=status_code_pb2.INPUT_DOWNLOAD_SUCCESS)
|
|
251
|
-
response = self.STUB.ListInputs(
|
|
252
|
-
service_pb2.ListInputsRequest(
|
|
253
|
-
ids=input_ids,
|
|
254
|
-
per_page=len(input_ids),
|
|
255
|
-
user_app_id=self.user_app_id,
|
|
256
|
-
status=success_status),)
|
|
257
|
-
response_dict = MessageToDict(response)
|
|
258
|
-
success_inputs = response_dict.get('inputs', [])
|
|
259
|
-
|
|
260
|
-
success_input_ids = [input.get('id') for input in success_inputs]
|
|
261
|
-
failed_input_ids = list(set(input_ids) - set(success_input_ids))
|
|
262
|
-
#delete failed inputs
|
|
263
|
-
self.STUB.DeleteInputs(
|
|
264
|
-
service_pb2.DeleteInputsRequest(user_app_id=self.user_app_id, ids=failed_input_ids),)
|
|
265
|
-
|
|
266
|
-
return success_input_ids, failed_input_ids
|
|
267
|
-
|
|
268
|
-
def _upload_inputs_annotations(
|
|
269
|
-
self, batch_input_ids: List[str]) -> Tuple[List[str], List[resources_pb2.Annotation]]:
|
|
270
|
-
"""
|
|
271
|
-
Uploads batch of inputs and annotations concurrently to clarifai platform dataset.
|
|
272
|
-
Args:
|
|
273
|
-
batch_input_ids: batch input ids
|
|
274
|
-
Returns:
|
|
275
|
-
failed_input_ids: failed input ids
|
|
276
|
-
retry_annot_protos: failed annot protos
|
|
277
|
-
"""
|
|
278
|
-
input_protos, _ = self.dataset_obj.get_protos(batch_input_ids)
|
|
279
|
-
input_job_id = self._upload_inputs(input_protos)
|
|
280
|
-
retry_annot_protos = []
|
|
281
|
-
|
|
282
|
-
self._wait_for_inputs(input_job_id)
|
|
283
|
-
success_input_ids, failed_input_ids = self._delete_failed_inputs(batch_input_ids)
|
|
284
|
-
|
|
285
|
-
if self.task in ["visual_detection", "visual_segmentation"]:
|
|
286
|
-
_, annotation_protos = self.dataset_obj.get_protos(success_input_ids)
|
|
287
|
-
chunked_annotation_protos = Chunker(annotation_protos, self.chunk_size).chunk()
|
|
288
|
-
retry_annot_protos.extend(self._concurrent_annot_upload(chunked_annotation_protos))
|
|
289
|
-
|
|
290
|
-
return failed_input_ids, retry_annot_protos
|
|
291
|
-
|
|
292
|
-
def _retry_uploads(self, failed_input_ids: List[str],
|
|
293
|
-
retry_annot_protos: List[resources_pb2.Annotation]) -> None:
|
|
294
|
-
"""
|
|
295
|
-
Retry failed uploads.
|
|
296
|
-
Args:
|
|
297
|
-
failed_input_ids: failed input ids
|
|
298
|
-
retry_annot_protos: failed annot protos
|
|
299
|
-
"""
|
|
300
|
-
if failed_input_ids:
|
|
301
|
-
self._upload_inputs_annotations(failed_input_ids)
|
|
302
|
-
if retry_annot_protos:
|
|
303
|
-
chunked_annotation_protos = Chunker(retry_annot_protos, self.chunk_size).chunk()
|
|
304
|
-
_ = self._concurrent_annot_upload(chunked_annotation_protos)
|
|
305
|
-
|
|
306
|
-
def _data_upload(self, input_ids: List[str]) -> None:
|
|
307
|
-
"""
|
|
308
|
-
Uploads inputs and annotations to clarifai platform dataset.
|
|
309
|
-
Args:
|
|
310
|
-
input_ids: input ids
|
|
311
|
-
"""
|
|
312
|
-
chunk_input_ids = Chunker(input_ids, self.chunk_size).chunk()
|
|
313
|
-
with ThreadPoolExecutor(max_workers=self.num_workers) as executor:
|
|
314
|
-
with tqdm(total=len(chunk_input_ids), desc='Uploading Dataset') as progress:
|
|
315
|
-
# Submit all jobs to the executor and store the returned futures
|
|
316
|
-
futures = [
|
|
317
|
-
executor.submit(self._upload_inputs_annotations, batch_input_ids)
|
|
318
|
-
for batch_input_ids in chunk_input_ids
|
|
319
|
-
]
|
|
320
|
-
|
|
321
|
-
for job in as_completed(futures):
|
|
322
|
-
retry_input_proto, retry_annot_protos = job.result()
|
|
323
|
-
self._retry_uploads(retry_input_proto, retry_annot_protos)
|
|
324
|
-
progress.update()
|
|
325
|
-
|
|
326
|
-
def upload_to_clarifai(self):
|
|
327
|
-
"""
|
|
328
|
-
Execute data upload.
|
|
329
|
-
"""
|
|
330
|
-
datagen_object = None
|
|
331
|
-
if self.module_dir is None and self.zoo_dataset is None:
|
|
332
|
-
raise Exception("One of `from_module` and `from_zoo` must be \
|
|
333
|
-
specified. Both can't be None or defined at the same time.")
|
|
334
|
-
elif self.module_dir is not None and self.zoo_dataset is not None:
|
|
335
|
-
raise Exception("Use either of `from_module` or `from_zoo` \
|
|
336
|
-
but NOT both.")
|
|
337
|
-
elif self.module_dir is not None:
|
|
338
|
-
datagen_object = load_dataset(self.module_dir, self.split)
|
|
339
|
-
else:
|
|
340
|
-
datagen_object = load_zoo_dataset(self.zoo_dataset, self.split)
|
|
341
|
-
|
|
342
|
-
if self.task == "text_clf":
|
|
343
|
-
self.dataset_obj = TextClassificationDataset(datagen_object, self.dataset_id, self.split)
|
|
344
|
-
self._data_upload(self.dataset_obj.input_ids)
|
|
345
|
-
|
|
346
|
-
elif self.task == "visual_detection":
|
|
347
|
-
self.dataset_obj = VisualDetectionDataset(datagen_object, self.dataset_id, self.split)
|
|
348
|
-
self._data_upload(self.dataset_obj.input_ids) # TODO: get_img_ids or get_input_ids
|
|
349
|
-
|
|
350
|
-
elif self.task == "visual_segmentation":
|
|
351
|
-
self.dataset_obj = VisualSegmentationDataset(datagen_object, self.dataset_id, self.split)
|
|
352
|
-
self._data_upload(self.dataset_obj.input_ids)
|
|
353
|
-
|
|
354
|
-
else: # visual-classification & visual-captioning
|
|
355
|
-
self.dataset_obj = VisualClassificationDataset(datagen_object, self.dataset_id, self.split)
|
|
356
|
-
self._data_upload(self.dataset_obj.input_ids)
|