clarifai 9.7.1__py3-none-any.whl → 9.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/auth/__init__.py +6 -0
- clarifai/auth/helper.py +35 -36
- clarifai/auth/register.py +23 -0
- clarifai/{client → auth}/stub.py +10 -10
- clarifai/client/__init__.py +1 -4
- clarifai/client/app.py +483 -0
- clarifai/client/auth/__init__.py +4 -0
- clarifai/client/{abc.py → auth/abc.py} +2 -2
- clarifai/client/auth/helper.py +377 -0
- clarifai/client/auth/register.py +23 -0
- {clarifai_utils/client → clarifai/client/auth}/stub.py +10 -10
- clarifai/client/base.py +112 -0
- clarifai/client/dataset.py +290 -0
- clarifai/client/input.py +730 -0
- clarifai/client/lister.py +41 -0
- clarifai/client/model.py +218 -0
- clarifai/client/module.py +82 -0
- clarifai/client/user.py +125 -0
- clarifai/client/workflow.py +194 -0
- clarifai/datasets/upload/base.py +66 -0
- clarifai/datasets/upload/examples/README.md +31 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai/datasets/upload/image.py +156 -0
- clarifai/datasets/upload/loaders/README.md +49 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai_utils/data_upload/datasets/zoo → clarifai/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai/datasets/upload/text.py +50 -0
- clarifai/datasets/upload/utils.py +62 -0
- clarifai/errors.py +90 -0
- clarifai/urls/helper.py +16 -17
- clarifai/utils/logging.py +40 -0
- clarifai/utils/misc.py +33 -0
- clarifai/versions.py +6 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/LICENSE +1 -1
- clarifai-9.7.2.dist-info/METADATA +179 -0
- clarifai-9.7.2.dist-info/RECORD +350 -0
- clarifai_utils/auth/__init__.py +6 -0
- clarifai_utils/auth/helper.py +35 -36
- clarifai_utils/auth/register.py +23 -0
- clarifai_utils/auth/stub.py +127 -0
- clarifai_utils/client/__init__.py +1 -4
- clarifai_utils/client/app.py +483 -0
- clarifai_utils/client/auth/__init__.py +4 -0
- clarifai_utils/client/{abc.py → auth/abc.py} +2 -2
- clarifai_utils/client/auth/helper.py +377 -0
- clarifai_utils/client/auth/register.py +23 -0
- clarifai_utils/client/auth/stub.py +127 -0
- clarifai_utils/client/base.py +112 -0
- clarifai_utils/client/dataset.py +290 -0
- clarifai_utils/client/input.py +730 -0
- clarifai_utils/client/lister.py +41 -0
- clarifai_utils/client/model.py +218 -0
- clarifai_utils/client/module.py +82 -0
- clarifai_utils/client/user.py +125 -0
- clarifai_utils/client/workflow.py +194 -0
- clarifai_utils/datasets/upload/base.py +66 -0
- clarifai_utils/datasets/upload/examples/README.md +31 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai_utils/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai_utils/datasets/upload/image.py +156 -0
- clarifai_utils/datasets/upload/loaders/README.md +49 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai/data_upload/datasets/zoo → clarifai_utils/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai_utils/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai_utils/datasets/upload/text.py +50 -0
- clarifai_utils/datasets/upload/utils.py +62 -0
- clarifai_utils/errors.py +90 -0
- clarifai_utils/urls/helper.py +16 -17
- clarifai_utils/utils/logging.py +40 -0
- clarifai_utils/utils/misc.py +33 -0
- clarifai_utils/versions.py +6 -0
- clarifai/data_upload/README.md +0 -63
- clarifai/data_upload/convert_csv.py +0 -182
- clarifai/data_upload/datasets/base.py +0 -87
- clarifai/data_upload/datasets/image.py +0 -253
- clarifai/data_upload/datasets/text.py +0 -60
- clarifai/data_upload/datasets/zoo/README.md +0 -55
- clarifai/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai/data_upload/examples/README.md +0 -5
- clarifai/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai/data_upload/examples.py +0 -17
- clarifai/data_upload/upload.py +0 -356
- clarifai/dataset_export/dataset_export_inputs.py +0 -205
- clarifai/listing/concepts.py +0 -37
- clarifai/listing/datasets.py +0 -37
- clarifai/listing/inputs.py +0 -111
- clarifai/listing/installed_module_versions.py +0 -40
- clarifai/listing/lister.py +0 -200
- clarifai/listing/models.py +0 -46
- clarifai/listing/module_versions.py +0 -42
- clarifai/listing/modules.py +0 -36
- clarifai/runners/base.py +0 -140
- clarifai/runners/example.py +0 -36
- clarifai-9.7.1.dist-info/METADATA +0 -99
- clarifai-9.7.1.dist-info/RECORD +0 -456
- clarifai_utils/data_upload/README.md +0 -63
- clarifai_utils/data_upload/convert_csv.py +0 -182
- clarifai_utils/data_upload/datasets/base.py +0 -87
- clarifai_utils/data_upload/datasets/image.py +0 -253
- clarifai_utils/data_upload/datasets/text.py +0 -60
- clarifai_utils/data_upload/datasets/zoo/README.md +0 -55
- clarifai_utils/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai_utils/data_upload/examples/README.md +0 -5
- clarifai_utils/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai_utils/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai_utils/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai_utils/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai_utils/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai_utils/data_upload/examples/text_classification/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai_utils/data_upload/examples.py +0 -17
- clarifai_utils/data_upload/upload.py +0 -356
- clarifai_utils/dataset_export/dataset_export_inputs.py +0 -205
- clarifai_utils/listing/__init__.py +0 -0
- clarifai_utils/listing/concepts.py +0 -37
- clarifai_utils/listing/datasets.py +0 -37
- clarifai_utils/listing/inputs.py +0 -111
- clarifai_utils/listing/installed_module_versions.py +0 -40
- clarifai_utils/listing/lister.py +0 -200
- clarifai_utils/listing/models.py +0 -46
- clarifai_utils/listing/module_versions.py +0 -42
- clarifai_utils/listing/modules.py +0 -36
- clarifai_utils/runners/__init__.py +0 -0
- clarifai_utils/runners/base.py +0 -140
- clarifai_utils/runners/example.py +0 -36
- /clarifai/{data_upload/__init__.py → cli.py} +0 -0
- /clarifai/{data_upload/datasets → datasets}/__init__.py +0 -0
- /clarifai/{data_upload/datasets/zoo → datasets/upload}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai/{data_upload/examples/image_detection → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_detection/voc → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai/{data_upload/examples/image_segmentation → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_segmentation/coco → utils}/__init__.py +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/WHEEL +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/entry_points.txt +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/top_level.txt +0 -0
- /clarifai/data_upload/examples/text_classification/__init__.py → /clarifai_utils/cli.py +0 -0
- {clarifai/data_upload/examples/text_classification/imdb_dataset → clarifai_utils/datasets}/__init__.py +0 -0
- {clarifai/listing → clarifai_utils/datasets/upload}/__init__.py +0 -0
- {clarifai/runners → clarifai_utils/datasets/upload/examples/image_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload/examples/image_classification/cifar10}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai_utils/{data_upload/datasets → datasets/upload/examples/image_classification/food-101}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai_utils/{data_upload/datasets/zoo → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/cifar10 → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/food-101 → utils}/__init__.py +0 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
2
|
+
from multiprocessing import cpu_count
|
|
3
|
+
from typing import List, Tuple, TypeVar, Union
|
|
4
|
+
|
|
5
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
|
6
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
|
7
|
+
from google.protobuf.json_format import MessageToDict
|
|
8
|
+
from tqdm import tqdm
|
|
9
|
+
|
|
10
|
+
from clarifai.client.base import BaseClient
|
|
11
|
+
from clarifai.client.input import Inputs
|
|
12
|
+
from clarifai.client.lister import Lister
|
|
13
|
+
from clarifai.datasets.upload.image import (VisualClassificationDataset, VisualDetectionDataset,
|
|
14
|
+
VisualSegmentationDataset)
|
|
15
|
+
from clarifai.datasets.upload.text import TextClassificationDataset
|
|
16
|
+
from clarifai.datasets.upload.utils import load_dataloader, load_module_dataloader
|
|
17
|
+
from clarifai.errors import UserError
|
|
18
|
+
from clarifai.urls.helper import ClarifaiUrlHelper
|
|
19
|
+
from clarifai.utils.misc import Chunker
|
|
20
|
+
|
|
21
|
+
ClarifaiDatasetType = TypeVar('ClarifaiDatasetType', VisualClassificationDataset,
|
|
22
|
+
VisualDetectionDataset, VisualSegmentationDataset,
|
|
23
|
+
TextClassificationDataset)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Dataset(Lister, BaseClient):
|
|
27
|
+
"""Dataset is a class that provides access to Clarifai API endpoints related to Dataset information."""
|
|
28
|
+
|
|
29
|
+
def __init__(self, url_init: str = "", dataset_id: str = "", **kwargs):
|
|
30
|
+
"""Initializes a Dataset object.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
url_init (str): The URL to initialize the dataset object.
|
|
34
|
+
dataset_id (str): The Dataset ID within the App to interact with.
|
|
35
|
+
**kwargs: Additional keyword arguments to be passed to the ClarifaiAuthHelper.
|
|
36
|
+
"""
|
|
37
|
+
if url_init != "" and dataset_id != "":
|
|
38
|
+
raise UserError("You can only specify one of url_init or dataset_id.")
|
|
39
|
+
if url_init == "" and dataset_id == "":
|
|
40
|
+
raise UserError("You must specify one of url_init or dataset_id.")
|
|
41
|
+
if url_init != "":
|
|
42
|
+
user_id, app_id, _, dataset_id, _ = ClarifaiUrlHelper.split_clarifai_url(url_init)
|
|
43
|
+
kwargs = {'user_id': user_id, 'app_id': app_id}
|
|
44
|
+
self.kwargs = {**kwargs, 'id': dataset_id}
|
|
45
|
+
self.dataset_info = resources_pb2.Dataset(**self.kwargs)
|
|
46
|
+
# Related to Dataset Upload
|
|
47
|
+
self.num_workers: int = min(10, cpu_count()) #15 req/sec rate limit
|
|
48
|
+
self.annot_num_workers = 4
|
|
49
|
+
self.max_retires = 10
|
|
50
|
+
self.chunk_size = 128 # limit max protos in a req
|
|
51
|
+
self.task = None # Upload dataset type
|
|
52
|
+
self.input_object = Inputs(user_id=self.user_id, app_id=self.app_id)
|
|
53
|
+
BaseClient.__init__(self, user_id=self.user_id, app_id=self.app_id)
|
|
54
|
+
Lister.__init__(self)
|
|
55
|
+
|
|
56
|
+
def _concurrent_annot_upload(self, annots: List[List[resources_pb2.Annotation]]
|
|
57
|
+
) -> Union[List[resources_pb2.Annotation], List[None]]:
|
|
58
|
+
"""Uploads annotations concurrently.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
annots: annot protos
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
retry_annot_upload: All failed annot protos during upload
|
|
65
|
+
"""
|
|
66
|
+
annot_threads = []
|
|
67
|
+
retry_annot_upload = []
|
|
68
|
+
|
|
69
|
+
with ThreadPoolExecutor(max_workers=self.annot_num_workers) as executor: # limit annot workers
|
|
70
|
+
annot_threads = [
|
|
71
|
+
executor.submit(self.input_object.upload_annotations, inp_batch, False)
|
|
72
|
+
for inp_batch in annots
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
for job in as_completed(annot_threads):
|
|
76
|
+
result = job.result()
|
|
77
|
+
if result:
|
|
78
|
+
retry_annot_upload.extend(result)
|
|
79
|
+
|
|
80
|
+
return retry_annot_upload
|
|
81
|
+
|
|
82
|
+
def _delete_failed_inputs(self, batch_input_ids: List[int],
|
|
83
|
+
dataset_obj: ClarifaiDatasetType) -> Tuple[List[int], List[int]]:
|
|
84
|
+
"""Delete failed input ids from clarifai platform dataset.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
batch_input_ids: batch input ids
|
|
88
|
+
dataset_obj: ClarifaiDataset object
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
success_inputs: upload success input ids
|
|
92
|
+
failed_inputs: upload failed input ids
|
|
93
|
+
"""
|
|
94
|
+
success_status = status_pb2.Status(code=status_code_pb2.INPUT_DOWNLOAD_SUCCESS)
|
|
95
|
+
input_ids = {dataset_obj.all_input_ids[id]: id for id in batch_input_ids}
|
|
96
|
+
response = self._grpc_request(
|
|
97
|
+
self.STUB.ListInputs,
|
|
98
|
+
service_pb2.ListInputsRequest(
|
|
99
|
+
ids=list(input_ids.keys()),
|
|
100
|
+
per_page=len(input_ids),
|
|
101
|
+
user_app_id=self.user_app_id,
|
|
102
|
+
status=success_status),
|
|
103
|
+
)
|
|
104
|
+
response_dict = MessageToDict(response)
|
|
105
|
+
success_inputs = response_dict.get('inputs', [])
|
|
106
|
+
|
|
107
|
+
success_input_ids = [input.get('id') for input in success_inputs]
|
|
108
|
+
failed_input_ids = list(set(input_ids) - set(success_input_ids))
|
|
109
|
+
#delete failed inputs
|
|
110
|
+
self._grpc_request(
|
|
111
|
+
self.STUB.DeleteInputs,
|
|
112
|
+
service_pb2.DeleteInputsRequest(user_app_id=self.user_app_id, ids=failed_input_ids),
|
|
113
|
+
)
|
|
114
|
+
return [input_ids[id] for id in success_input_ids], [input_ids[id] for id in failed_input_ids]
|
|
115
|
+
|
|
116
|
+
def _upload_inputs_annotations(self, batch_input_ids: List[int], dataset_obj: ClarifaiDatasetType
|
|
117
|
+
) -> Tuple[List[int], List[resources_pb2.Annotation]]:
|
|
118
|
+
"""Uploads batch of inputs and annotations concurrently to clarifai platform dataset.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
batch_input_ids: batch input ids
|
|
122
|
+
dataset_obj: ClarifaiDataset object
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
failed_input_ids: failed input ids
|
|
126
|
+
retry_annot_protos: failed annot protos
|
|
127
|
+
"""
|
|
128
|
+
input_protos, _ = dataset_obj.get_protos(batch_input_ids)
|
|
129
|
+
input_job_id = self.input_object.upload_inputs(inputs=input_protos, show_log=False)
|
|
130
|
+
retry_annot_protos = []
|
|
131
|
+
|
|
132
|
+
self.input_object._wait_for_inputs(input_job_id)
|
|
133
|
+
success_input_ids, failed_input_ids = self._delete_failed_inputs(batch_input_ids, dataset_obj)
|
|
134
|
+
|
|
135
|
+
if self.task in ["visual_detection", "visual_segmentation"]:
|
|
136
|
+
_, annotation_protos = dataset_obj.get_protos(success_input_ids)
|
|
137
|
+
chunked_annotation_protos = Chunker(annotation_protos, self.chunk_size).chunk()
|
|
138
|
+
retry_annot_protos.extend(self._concurrent_annot_upload(chunked_annotation_protos))
|
|
139
|
+
|
|
140
|
+
return failed_input_ids, retry_annot_protos
|
|
141
|
+
|
|
142
|
+
def _retry_uploads(self, failed_input_ids: List[int],
|
|
143
|
+
retry_annot_protos: List[resources_pb2.Annotation],
|
|
144
|
+
dataset_obj: ClarifaiDatasetType) -> None:
|
|
145
|
+
"""Retry failed uploads.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
failed_input_ids: failed input ids
|
|
149
|
+
retry_annot_protos: failed annot protos
|
|
150
|
+
dataset_obj: ClarifaiDataset object
|
|
151
|
+
"""
|
|
152
|
+
if failed_input_ids:
|
|
153
|
+
self._upload_inputs_annotations(failed_input_ids, dataset_obj)
|
|
154
|
+
if retry_annot_protos:
|
|
155
|
+
chunked_annotation_protos = Chunker(retry_annot_protos, self.chunk_size).chunk()
|
|
156
|
+
_ = self._concurrent_annot_upload(chunked_annotation_protos)
|
|
157
|
+
|
|
158
|
+
def _data_upload(self, dataset_obj: ClarifaiDatasetType) -> None:
|
|
159
|
+
"""Uploads inputs and annotations to clarifai platform dataset.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
dataset_obj: ClarifaiDataset object
|
|
163
|
+
"""
|
|
164
|
+
input_ids = list(range(len(dataset_obj)))
|
|
165
|
+
chunk_input_ids = Chunker(input_ids, self.chunk_size).chunk()
|
|
166
|
+
with ThreadPoolExecutor(max_workers=self.num_workers) as executor:
|
|
167
|
+
with tqdm(total=len(chunk_input_ids), desc='Uploading Dataset') as progress:
|
|
168
|
+
# Submit all jobs to the executor and store the returned futures
|
|
169
|
+
futures = [
|
|
170
|
+
executor.submit(self._upload_inputs_annotations, batch_input_ids, dataset_obj)
|
|
171
|
+
for batch_input_ids in chunk_input_ids
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
for job in as_completed(futures):
|
|
175
|
+
retry_input_ids, retry_annot_protos = job.result()
|
|
176
|
+
self._retry_uploads(retry_input_ids, retry_annot_protos, dataset_obj)
|
|
177
|
+
progress.update()
|
|
178
|
+
|
|
179
|
+
def upload_dataset(self,
|
|
180
|
+
task: str,
|
|
181
|
+
split: str,
|
|
182
|
+
module_dir: str = None,
|
|
183
|
+
dataset_loader: str = None,
|
|
184
|
+
chunk_size: int = 128) -> None:
|
|
185
|
+
"""Uploads a dataset to the app.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
task (str): task type(text_clf, visual-classification, visual_detection, visual_segmentation, visual-captioning)
|
|
189
|
+
split (str): split type(train, test, val)
|
|
190
|
+
module_dir (str): path to the module directory
|
|
191
|
+
dataset_loader (str): name of the dataset loader
|
|
192
|
+
chunk_size (int): chunk size for concurrent upload of inputs and annotations
|
|
193
|
+
"""
|
|
194
|
+
self.chunk_size = min(self.chunk_size, chunk_size)
|
|
195
|
+
self.task = task
|
|
196
|
+
datagen_object = None
|
|
197
|
+
|
|
198
|
+
if module_dir is None and dataset_loader is None:
|
|
199
|
+
raise UserError("One of `from_module` and `dataset_loader` must be \
|
|
200
|
+
specified. Both can't be None or defined at the same time.")
|
|
201
|
+
elif module_dir is not None and dataset_loader is not None:
|
|
202
|
+
raise UserError("Use either of `from_module` or `dataset_loader` \
|
|
203
|
+
but NOT both.")
|
|
204
|
+
elif module_dir is not None:
|
|
205
|
+
datagen_object = load_module_dataloader(module_dir, split)
|
|
206
|
+
else:
|
|
207
|
+
datagen_object = load_dataloader(dataset_loader, split)
|
|
208
|
+
|
|
209
|
+
if self.task == "text_clf":
|
|
210
|
+
dataset_obj = TextClassificationDataset(datagen_object, self.id, split)
|
|
211
|
+
|
|
212
|
+
elif self.task == "visual_detection":
|
|
213
|
+
dataset_obj = VisualDetectionDataset(datagen_object, self.id, split)
|
|
214
|
+
|
|
215
|
+
elif self.task == "visual_segmentation":
|
|
216
|
+
dataset_obj = VisualSegmentationDataset(datagen_object, self.id, split)
|
|
217
|
+
|
|
218
|
+
else: # visual_classification & visual_captioning
|
|
219
|
+
dataset_obj = VisualClassificationDataset(datagen_object, self.id, split)
|
|
220
|
+
|
|
221
|
+
self._data_upload(dataset_obj)
|
|
222
|
+
|
|
223
|
+
def upload_from_csv(self,
|
|
224
|
+
csv_path: str,
|
|
225
|
+
input_type: str = 'text',
|
|
226
|
+
labels: bool = True,
|
|
227
|
+
chunk_size: int = 128) -> None:
|
|
228
|
+
"""Uploads dataset from a csv file.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
csv_path (str): path to the csv file
|
|
232
|
+
input_type (str): type of the dataset(text, image)
|
|
233
|
+
labels (bool): True if csv file has labels column
|
|
234
|
+
chunk_size (int): chunk size for concurrent upload of inputs and annotations
|
|
235
|
+
|
|
236
|
+
Example:
|
|
237
|
+
>>> from clarifai.client.dataset import Dataset
|
|
238
|
+
>>> dataset = Dataset(user_id = 'user_id', app_id = 'demo_app', dataset_id = 'demo_dataset')
|
|
239
|
+
>>> dataset.upload_from_csv(csv_path='csv_path', labels=True)
|
|
240
|
+
|
|
241
|
+
Note: csv file should have either one(input) or two columns(input, labels).
|
|
242
|
+
"""
|
|
243
|
+
if input_type not in ['image', 'text']: #TODO: add image
|
|
244
|
+
raise UserError('Invalid input type it should be image or text')
|
|
245
|
+
chunk_size = min(128, chunk_size)
|
|
246
|
+
if input_type == 'text':
|
|
247
|
+
input_protos = self.input_object.get_text_input_from_csv(
|
|
248
|
+
csv_path=csv_path, dataset_id=self.id, labels=labels)
|
|
249
|
+
self.input_object._bulk_upload(inputs=input_protos, chunk_size=chunk_size)
|
|
250
|
+
|
|
251
|
+
def upload_from_folder(self,
|
|
252
|
+
folder_path: str,
|
|
253
|
+
input_type: str,
|
|
254
|
+
labels: bool = False,
|
|
255
|
+
chunk_size: int = 128) -> None:
|
|
256
|
+
"""Upload dataset from folder.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
folder_path (str): Path to the folder containing images.
|
|
260
|
+
input_type (str): type of the dataset(text, image)
|
|
261
|
+
labels (bool): True if folder name is the label for the inputs
|
|
262
|
+
chunk_size (int): chunk size for concurrent upload of inputs and annotations
|
|
263
|
+
|
|
264
|
+
Example:
|
|
265
|
+
>>> from clarifai.client.dataset import Dataset
|
|
266
|
+
>>> dataset = Dataset(user_id = 'user_id', app_id = 'demo_app', dataset_id = 'demo_dataset')
|
|
267
|
+
>>> dataset.upload_from_folder(folder_path='folder_path', input_type='text', labels=True)
|
|
268
|
+
|
|
269
|
+
Note: The filename is used as the input_id.
|
|
270
|
+
"""
|
|
271
|
+
if input_type not in ['image', 'text']:
|
|
272
|
+
raise UserError('Invalid input type it should be image or text')
|
|
273
|
+
if input_type == 'image':
|
|
274
|
+
input_protos = self.input_object.get_image_inputs_from_folder(
|
|
275
|
+
folder_path=folder_path, dataset_id=self.id, labels=labels)
|
|
276
|
+
if input_type == 'text':
|
|
277
|
+
input_protos = self.input_object.get_text_inputs_from_folder(
|
|
278
|
+
folder_path=folder_path, dataset_id=self.id, labels=labels)
|
|
279
|
+
self.input_object._bulk_upload(inputs=input_protos, chunk_size=chunk_size)
|
|
280
|
+
|
|
281
|
+
def __getattr__(self, name):
|
|
282
|
+
return getattr(self.dataset_info, name)
|
|
283
|
+
|
|
284
|
+
def __str__(self):
|
|
285
|
+
init_params = [param for param in self.kwargs.keys()]
|
|
286
|
+
attribute_strings = [
|
|
287
|
+
f"{param}={getattr(self.dataset_info, param)}" for param in init_params
|
|
288
|
+
if hasattr(self.dataset_info, param)
|
|
289
|
+
]
|
|
290
|
+
return f"Dataset Details: \n{', '.join(attribute_strings)}\n"
|