clarifai 9.7.1__py3-none-any.whl → 9.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/auth/__init__.py +6 -0
- clarifai/auth/helper.py +35 -36
- clarifai/auth/register.py +23 -0
- clarifai/{client → auth}/stub.py +10 -10
- clarifai/client/__init__.py +1 -4
- clarifai/client/app.py +483 -0
- clarifai/client/auth/__init__.py +4 -0
- clarifai/client/{abc.py → auth/abc.py} +2 -2
- clarifai/client/auth/helper.py +377 -0
- clarifai/client/auth/register.py +23 -0
- {clarifai_utils/client → clarifai/client/auth}/stub.py +10 -10
- clarifai/client/base.py +112 -0
- clarifai/client/dataset.py +290 -0
- clarifai/client/input.py +730 -0
- clarifai/client/lister.py +41 -0
- clarifai/client/model.py +218 -0
- clarifai/client/module.py +82 -0
- clarifai/client/user.py +125 -0
- clarifai/client/workflow.py +194 -0
- clarifai/datasets/upload/base.py +66 -0
- clarifai/datasets/upload/examples/README.md +31 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai/datasets/upload/image.py +156 -0
- clarifai/datasets/upload/loaders/README.md +49 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai_utils/data_upload/datasets/zoo → clarifai/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai/datasets/upload/text.py +50 -0
- clarifai/datasets/upload/utils.py +62 -0
- clarifai/errors.py +90 -0
- clarifai/urls/helper.py +16 -17
- clarifai/utils/logging.py +40 -0
- clarifai/utils/misc.py +33 -0
- clarifai/versions.py +6 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/LICENSE +1 -1
- clarifai-9.7.2.dist-info/METADATA +179 -0
- clarifai-9.7.2.dist-info/RECORD +350 -0
- clarifai_utils/auth/__init__.py +6 -0
- clarifai_utils/auth/helper.py +35 -36
- clarifai_utils/auth/register.py +23 -0
- clarifai_utils/auth/stub.py +127 -0
- clarifai_utils/client/__init__.py +1 -4
- clarifai_utils/client/app.py +483 -0
- clarifai_utils/client/auth/__init__.py +4 -0
- clarifai_utils/client/{abc.py → auth/abc.py} +2 -2
- clarifai_utils/client/auth/helper.py +377 -0
- clarifai_utils/client/auth/register.py +23 -0
- clarifai_utils/client/auth/stub.py +127 -0
- clarifai_utils/client/base.py +112 -0
- clarifai_utils/client/dataset.py +290 -0
- clarifai_utils/client/input.py +730 -0
- clarifai_utils/client/lister.py +41 -0
- clarifai_utils/client/model.py +218 -0
- clarifai_utils/client/module.py +82 -0
- clarifai_utils/client/user.py +125 -0
- clarifai_utils/client/workflow.py +194 -0
- clarifai_utils/datasets/upload/base.py +66 -0
- clarifai_utils/datasets/upload/examples/README.md +31 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai_utils/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai_utils/datasets/upload/image.py +156 -0
- clarifai_utils/datasets/upload/loaders/README.md +49 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai/data_upload/datasets/zoo → clarifai_utils/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai_utils/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai_utils/datasets/upload/text.py +50 -0
- clarifai_utils/datasets/upload/utils.py +62 -0
- clarifai_utils/errors.py +90 -0
- clarifai_utils/urls/helper.py +16 -17
- clarifai_utils/utils/logging.py +40 -0
- clarifai_utils/utils/misc.py +33 -0
- clarifai_utils/versions.py +6 -0
- clarifai/data_upload/README.md +0 -63
- clarifai/data_upload/convert_csv.py +0 -182
- clarifai/data_upload/datasets/base.py +0 -87
- clarifai/data_upload/datasets/image.py +0 -253
- clarifai/data_upload/datasets/text.py +0 -60
- clarifai/data_upload/datasets/zoo/README.md +0 -55
- clarifai/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai/data_upload/examples/README.md +0 -5
- clarifai/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai/data_upload/examples.py +0 -17
- clarifai/data_upload/upload.py +0 -356
- clarifai/dataset_export/dataset_export_inputs.py +0 -205
- clarifai/listing/concepts.py +0 -37
- clarifai/listing/datasets.py +0 -37
- clarifai/listing/inputs.py +0 -111
- clarifai/listing/installed_module_versions.py +0 -40
- clarifai/listing/lister.py +0 -200
- clarifai/listing/models.py +0 -46
- clarifai/listing/module_versions.py +0 -42
- clarifai/listing/modules.py +0 -36
- clarifai/runners/base.py +0 -140
- clarifai/runners/example.py +0 -36
- clarifai-9.7.1.dist-info/METADATA +0 -99
- clarifai-9.7.1.dist-info/RECORD +0 -456
- clarifai_utils/data_upload/README.md +0 -63
- clarifai_utils/data_upload/convert_csv.py +0 -182
- clarifai_utils/data_upload/datasets/base.py +0 -87
- clarifai_utils/data_upload/datasets/image.py +0 -253
- clarifai_utils/data_upload/datasets/text.py +0 -60
- clarifai_utils/data_upload/datasets/zoo/README.md +0 -55
- clarifai_utils/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai_utils/data_upload/examples/README.md +0 -5
- clarifai_utils/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai_utils/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai_utils/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai_utils/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai_utils/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai_utils/data_upload/examples/text_classification/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai_utils/data_upload/examples.py +0 -17
- clarifai_utils/data_upload/upload.py +0 -356
- clarifai_utils/dataset_export/dataset_export_inputs.py +0 -205
- clarifai_utils/listing/__init__.py +0 -0
- clarifai_utils/listing/concepts.py +0 -37
- clarifai_utils/listing/datasets.py +0 -37
- clarifai_utils/listing/inputs.py +0 -111
- clarifai_utils/listing/installed_module_versions.py +0 -40
- clarifai_utils/listing/lister.py +0 -200
- clarifai_utils/listing/models.py +0 -46
- clarifai_utils/listing/module_versions.py +0 -42
- clarifai_utils/listing/modules.py +0 -36
- clarifai_utils/runners/__init__.py +0 -0
- clarifai_utils/runners/base.py +0 -140
- clarifai_utils/runners/example.py +0 -36
- /clarifai/{data_upload/__init__.py → cli.py} +0 -0
- /clarifai/{data_upload/datasets → datasets}/__init__.py +0 -0
- /clarifai/{data_upload/datasets/zoo → datasets/upload}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai/{data_upload/examples/image_detection → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_detection/voc → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai/{data_upload/examples/image_segmentation → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_segmentation/coco → utils}/__init__.py +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/WHEEL +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/entry_points.txt +0 -0
- {clarifai-9.7.1.dist-info → clarifai-9.7.2.dist-info}/top_level.txt +0 -0
- /clarifai/data_upload/examples/text_classification/__init__.py → /clarifai_utils/cli.py +0 -0
- {clarifai/data_upload/examples/text_classification/imdb_dataset → clarifai_utils/datasets}/__init__.py +0 -0
- {clarifai/listing → clarifai_utils/datasets/upload}/__init__.py +0 -0
- {clarifai/runners → clarifai_utils/datasets/upload/examples/image_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload/examples/image_classification/cifar10}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai_utils/{data_upload/datasets → datasets/upload/examples/image_classification/food-101}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai_utils/{data_upload/datasets/zoo → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/cifar10 → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/food-101 → utils}/__init__.py +0 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from typing import Any, Callable, Dict, Generator
|
|
2
|
+
|
|
3
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2
|
|
4
|
+
from google.protobuf.json_format import MessageToDict
|
|
5
|
+
|
|
6
|
+
from clarifai.client.base import BaseClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Lister(BaseClient):
|
|
10
|
+
"""Lister class for obtaining paginated results from the Clarifai API."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, page_size: int = 16):
|
|
13
|
+
self.default_page_size = page_size
|
|
14
|
+
|
|
15
|
+
def list_all_pages_generator(
|
|
16
|
+
self, endpoint: Callable, proto_message: Any,
|
|
17
|
+
request_data: Dict[str, Any]) -> Generator[Dict[str, Any], None, None]:
|
|
18
|
+
"""Lists all pages of a resource.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
endpoint (Callable): The endpoint to call.
|
|
22
|
+
proto_message (Any): The proto message to use.
|
|
23
|
+
request_data (dict): The request data to use.
|
|
24
|
+
|
|
25
|
+
Yields:
|
|
26
|
+
response_dict: The next item in the listing.
|
|
27
|
+
"""
|
|
28
|
+
page = 1
|
|
29
|
+
while True:
|
|
30
|
+
request_data['page'] = page
|
|
31
|
+
response = self._grpc_request(endpoint, proto_message(**request_data))
|
|
32
|
+
dict_response = MessageToDict(response, preserving_proto_field_name=True)
|
|
33
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
34
|
+
raise Exception(f"Listing failed with response {response!r}")
|
|
35
|
+
if len(list(dict_response.keys())) == 1:
|
|
36
|
+
break
|
|
37
|
+
else:
|
|
38
|
+
listing_resource = list(dict_response.keys())[1]
|
|
39
|
+
for item in dict_response[listing_resource]:
|
|
40
|
+
yield self.process_response_keys(item, listing_resource[:-1])
|
|
41
|
+
page += 1
|
clarifai/client/model.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
|
6
|
+
from clarifai_grpc.grpc.api.resources_pb2 import Input
|
|
7
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2
|
|
8
|
+
|
|
9
|
+
from clarifai.client.base import BaseClient
|
|
10
|
+
from clarifai.client.lister import Lister
|
|
11
|
+
from clarifai.errors import UserError
|
|
12
|
+
from clarifai.urls.helper import ClarifaiUrlHelper
|
|
13
|
+
from clarifai.utils.logging import get_logger
|
|
14
|
+
from clarifai.utils.misc import BackoffIterator
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Model(Lister, BaseClient):
|
|
18
|
+
"""Model is a class that provides access to Clarifai API endpoints related to Model information."""
|
|
19
|
+
|
|
20
|
+
def __init__(self,
|
|
21
|
+
url_init: str = "",
|
|
22
|
+
model_id: str = "",
|
|
23
|
+
model_version: Dict = {'id': ""},
|
|
24
|
+
output_config: Dict = {'min_value': 0},
|
|
25
|
+
**kwargs):
|
|
26
|
+
"""Initializes a Model object.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
url_init (str): The URL to initialize the model object.
|
|
30
|
+
model_id (str): The Model ID to interact with.
|
|
31
|
+
model_version (dict): The Model Version to interact with.
|
|
32
|
+
output_config (dict): The output config to interact with.
|
|
33
|
+
min_value (float): The minimum value of the prediction confidence to filter.
|
|
34
|
+
max_concepts (int): The maximum number of concepts to return.
|
|
35
|
+
select_concepts (list[Concept]): The concepts to select.
|
|
36
|
+
sample_ms (int): The number of milliseconds to sample.
|
|
37
|
+
**kwargs: Additional keyword arguments to be passed to the ClarifaiAuthHelper.
|
|
38
|
+
"""
|
|
39
|
+
if url_init != "" and model_id != "":
|
|
40
|
+
raise UserError("You can only specify one of url_init or model_id.")
|
|
41
|
+
if url_init == "" and model_id == "":
|
|
42
|
+
raise UserError("You must specify one of url_init or model_id.")
|
|
43
|
+
if url_init != "":
|
|
44
|
+
user_id, app_id, _, model_id, model_version_id = ClarifaiUrlHelper.split_clarifai_url(
|
|
45
|
+
url_init)
|
|
46
|
+
model_version = {'id': model_version_id}
|
|
47
|
+
kwargs = {'user_id': user_id, 'app_id': app_id}
|
|
48
|
+
self.kwargs = {**kwargs, 'id': model_id, 'model_version': model_version,
|
|
49
|
+
'output_info': {'output_config': output_config}}
|
|
50
|
+
self.model_info = resources_pb2.Model(**self.kwargs)
|
|
51
|
+
self.logger = get_logger(logger_level="INFO")
|
|
52
|
+
BaseClient.__init__(self, user_id=self.user_id, app_id=self.app_id)
|
|
53
|
+
Lister.__init__(self)
|
|
54
|
+
|
|
55
|
+
def predict(self, inputs: List[Input]):
|
|
56
|
+
"""Predicts the model based on the given inputs.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
inputs (list[Input]): The inputs to predict, must be less than 128.
|
|
60
|
+
"""
|
|
61
|
+
if len(inputs) > 128:
|
|
62
|
+
raise UserError("Too many inputs. Max is 128.") # TODO Use Chunker for inputs len > 128
|
|
63
|
+
|
|
64
|
+
request = service_pb2.PostModelOutputsRequest(
|
|
65
|
+
user_app_id=self.user_app_id,
|
|
66
|
+
model_id=self.id,
|
|
67
|
+
version_id=self.model_version.id,
|
|
68
|
+
inputs=inputs,
|
|
69
|
+
model=self.model_info)
|
|
70
|
+
|
|
71
|
+
start_time = time.time()
|
|
72
|
+
backoff_iterator = BackoffIterator()
|
|
73
|
+
while True:
|
|
74
|
+
response = self._grpc_request(self.STUB.PostModelOutputs, request)
|
|
75
|
+
|
|
76
|
+
if response.outputs and \
|
|
77
|
+
response.outputs[0].status.code == status_code_pb2.MODEL_DEPLOYING and \
|
|
78
|
+
time.time() - start_time < 60 * 10: # 10 minutes
|
|
79
|
+
self.logger.info(f"{self.id} model is still deploying, please wait...")
|
|
80
|
+
time.sleep(next(backoff_iterator))
|
|
81
|
+
continue
|
|
82
|
+
|
|
83
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
84
|
+
raise Exception(f"Model Predict failed with response {response.status!r}")
|
|
85
|
+
else:
|
|
86
|
+
break
|
|
87
|
+
|
|
88
|
+
return response
|
|
89
|
+
|
|
90
|
+
def predict_by_filepath(self, filepath: str, input_type: str):
|
|
91
|
+
"""Predicts the model based on the given filepath.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
filepath (str): The filepath to predict.
|
|
95
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
|
|
96
|
+
|
|
97
|
+
Example:
|
|
98
|
+
>>> from clarifai.client.model import Model
|
|
99
|
+
>>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
|
|
100
|
+
or
|
|
101
|
+
>>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
|
|
102
|
+
>>> model_prediction = model.predict_by_filepath('/path/to/image.jpg', 'image')
|
|
103
|
+
>>> model_prediction = model.predict_by_filepath('/path/to/text.txt', 'text')
|
|
104
|
+
"""
|
|
105
|
+
if input_type not in ['image', 'text', 'video', 'audio']:
|
|
106
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
107
|
+
if not os.path.isfile(filepath):
|
|
108
|
+
raise UserError('Invalid filepath.')
|
|
109
|
+
|
|
110
|
+
with open(filepath, "rb") as f:
|
|
111
|
+
file_bytes = f.read()
|
|
112
|
+
|
|
113
|
+
return self.predict_by_bytes(file_bytes, input_type)
|
|
114
|
+
|
|
115
|
+
def predict_by_bytes(self, input_bytes: bytes, input_type: str):
|
|
116
|
+
"""Predicts the model based on the given bytes.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
input_bytes (bytes): File Bytes to predict on.
|
|
120
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio'.
|
|
121
|
+
|
|
122
|
+
Example:
|
|
123
|
+
>>> from clarifai.client.model import Model
|
|
124
|
+
>>> model = Model("https://clarifai.com/anthropic/completion/models/claude-v2")
|
|
125
|
+
>>> model_prediction = model.predict_by_bytes(b'Write a tweet on future of AI', 'text')
|
|
126
|
+
"""
|
|
127
|
+
if input_type not in {'image', 'text', 'video', 'audio'}:
|
|
128
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
129
|
+
if not isinstance(input_bytes, bytes):
|
|
130
|
+
raise UserError('Invalid bytes.')
|
|
131
|
+
# TODO will obtain proto from input class
|
|
132
|
+
if input_type == "image":
|
|
133
|
+
input_proto = resources_pb2.Input(
|
|
134
|
+
data=resources_pb2.Data(image=resources_pb2.Image(base64=input_bytes)))
|
|
135
|
+
elif input_type == "text":
|
|
136
|
+
input_proto = resources_pb2.Input(
|
|
137
|
+
data=resources_pb2.Data(text=resources_pb2.Text(raw=input_bytes)))
|
|
138
|
+
elif input_type == "video":
|
|
139
|
+
input_proto = resources_pb2.Input(
|
|
140
|
+
data=resources_pb2.Data(video=resources_pb2.Video(base64=input_bytes)))
|
|
141
|
+
elif input_type == "audio":
|
|
142
|
+
input_proto = resources_pb2.Input(
|
|
143
|
+
data=resources_pb2.Data(audio=resources_pb2.Audio(base64=input_bytes)))
|
|
144
|
+
|
|
145
|
+
return self.predict(inputs=[input_proto])
|
|
146
|
+
|
|
147
|
+
def predict_by_url(self, url: str, input_type: str):
|
|
148
|
+
"""Predicts the model based on the given URL.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
url (str): The URL to predict.
|
|
152
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
|
|
153
|
+
|
|
154
|
+
Example:
|
|
155
|
+
>>> from clarifai.client.model import Model
|
|
156
|
+
>>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
|
|
157
|
+
or
|
|
158
|
+
>>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
|
|
159
|
+
>>> model_prediction = model.predict_by_url('url', 'image')
|
|
160
|
+
"""
|
|
161
|
+
if input_type not in {'image', 'text', 'video', 'audio'}:
|
|
162
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
163
|
+
# TODO will be obtain proto from input class
|
|
164
|
+
if input_type == "image":
|
|
165
|
+
input_proto = resources_pb2.Input(
|
|
166
|
+
data=resources_pb2.Data(image=resources_pb2.Image(url=url)))
|
|
167
|
+
elif input_type == "text":
|
|
168
|
+
input_proto = resources_pb2.Input(data=resources_pb2.Data(text=resources_pb2.Text(url=url)))
|
|
169
|
+
elif input_type == "video":
|
|
170
|
+
input_proto = resources_pb2.Input(
|
|
171
|
+
data=resources_pb2.Data(video=resources_pb2.Video(url=url)))
|
|
172
|
+
elif input_type == "audio":
|
|
173
|
+
input_proto = resources_pb2.Input(
|
|
174
|
+
data=resources_pb2.Data(audio=resources_pb2.Audio(url=url)))
|
|
175
|
+
|
|
176
|
+
return self.predict(inputs=[input_proto])
|
|
177
|
+
|
|
178
|
+
def list_versions(self) -> List['Model']:
|
|
179
|
+
"""Lists all the versions for the model.
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
List[Model]: A list of Model objects for the versions of the model.
|
|
183
|
+
|
|
184
|
+
Example:
|
|
185
|
+
>>> from clarifai.client.model import Model
|
|
186
|
+
>>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
|
|
187
|
+
or
|
|
188
|
+
>>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
|
|
189
|
+
>>> all_model_versions = model.list_versions()
|
|
190
|
+
"""
|
|
191
|
+
request_data = dict(
|
|
192
|
+
user_app_id=self.user_app_id,
|
|
193
|
+
model_id=self.id,
|
|
194
|
+
per_page=self.default_page_size,
|
|
195
|
+
)
|
|
196
|
+
all_model_versions_info = list(
|
|
197
|
+
self.list_all_pages_generator(self.STUB.ListModelVersions,
|
|
198
|
+
service_pb2.ListModelVersionsRequest, request_data))
|
|
199
|
+
|
|
200
|
+
for model_version_info in all_model_versions_info:
|
|
201
|
+
model_version_info['id'] = model_version_info['model_version_id']
|
|
202
|
+
del model_version_info['model_version_id']
|
|
203
|
+
|
|
204
|
+
return [
|
|
205
|
+
Model(model_id=self.id, **dict(self.kwargs, model_version=model_version_info))
|
|
206
|
+
for model_version_info in all_model_versions_info
|
|
207
|
+
]
|
|
208
|
+
|
|
209
|
+
def __getattr__(self, name):
|
|
210
|
+
return getattr(self.model_info, name)
|
|
211
|
+
|
|
212
|
+
def __str__(self):
|
|
213
|
+
init_params = [param for param in self.kwargs.keys()]
|
|
214
|
+
attribute_strings = [
|
|
215
|
+
f"{param}={getattr(self.model_info, param)}" for param in init_params
|
|
216
|
+
if hasattr(self.model_info, param)
|
|
217
|
+
]
|
|
218
|
+
return f"Model Details: \n{', '.join(attribute_strings)}\n"
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from typing import Dict, List
|
|
2
|
+
|
|
3
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
|
4
|
+
|
|
5
|
+
from clarifai.client.base import BaseClient
|
|
6
|
+
from clarifai.client.lister import Lister
|
|
7
|
+
from clarifai.errors import UserError
|
|
8
|
+
from clarifai.urls.helper import ClarifaiUrlHelper
|
|
9
|
+
from clarifai.utils.logging import get_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Module(Lister, BaseClient):
|
|
13
|
+
"""Module is a class that provides access to Clarifai API endpoints related to Module information."""
|
|
14
|
+
|
|
15
|
+
def __init__(self,
|
|
16
|
+
url_init: str = "",
|
|
17
|
+
module_id: str = "",
|
|
18
|
+
module_version: Dict = {'id': ""},
|
|
19
|
+
**kwargs):
|
|
20
|
+
"""Initializes a Module object.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
url_init (str): The URL to initialize the module object.
|
|
24
|
+
module_id (str): The Module ID to interact with.
|
|
25
|
+
module_version (dict): The Module Version to interact with.
|
|
26
|
+
**kwargs: Additional keyword arguments to be passed to the ClarifaiAuthHelper.
|
|
27
|
+
"""
|
|
28
|
+
if url_init != "" and module_id != "":
|
|
29
|
+
raise UserError("You can only specify one of url_init or module_id.")
|
|
30
|
+
if url_init == "" and module_id == "":
|
|
31
|
+
raise UserError("You must specify one of url_init or module_id.")
|
|
32
|
+
if url_init != "":
|
|
33
|
+
user_id, app_id, module_id, module_version_id = ClarifaiUrlHelper.split_module_ui_url(
|
|
34
|
+
url_init)
|
|
35
|
+
module_version = {'id': module_version_id}
|
|
36
|
+
kwargs = {'user_id': user_id, 'app_id': app_id}
|
|
37
|
+
|
|
38
|
+
self.kwargs = {**kwargs, 'id': module_id, 'module_version': module_version}
|
|
39
|
+
self.module_info = resources_pb2.Module(**self.kwargs)
|
|
40
|
+
self.logger = get_logger(logger_level="INFO")
|
|
41
|
+
BaseClient.__init__(self, user_id=self.user_id, app_id=self.app_id)
|
|
42
|
+
Lister.__init__(self)
|
|
43
|
+
|
|
44
|
+
def list_versions(self) -> List['Module']:
|
|
45
|
+
"""Lists all the module versions for the module.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
List[Moudle]: A list of Module objects for versions of the module.
|
|
49
|
+
|
|
50
|
+
Example:
|
|
51
|
+
>>> from clarifai.client.module import Module
|
|
52
|
+
>>> module = Module(module_id='module_id', user_id='user_id', app_id='app_id')
|
|
53
|
+
>>> all_Module_versions = module.list_versions()
|
|
54
|
+
"""
|
|
55
|
+
request_data = dict(
|
|
56
|
+
user_app_id=self.user_app_id,
|
|
57
|
+
module_id=self.id,
|
|
58
|
+
per_page=self.default_page_size,
|
|
59
|
+
)
|
|
60
|
+
all_module_versions_info = list(
|
|
61
|
+
self.list_all_pages_generator(self.STUB.ListModuleVersions,
|
|
62
|
+
service_pb2.ListModuleVersionsRequest, request_data))
|
|
63
|
+
|
|
64
|
+
for module_version_info in all_module_versions_info:
|
|
65
|
+
module_version_info['id'] = module_version_info['module_version_id']
|
|
66
|
+
del module_version_info['module_version_id']
|
|
67
|
+
|
|
68
|
+
return [
|
|
69
|
+
Module(module_id=self.id, **dict(self.kwargs, module_version=module_version_info))
|
|
70
|
+
for module_version_info in all_module_versions_info
|
|
71
|
+
]
|
|
72
|
+
|
|
73
|
+
def __getattr__(self, name):
|
|
74
|
+
return getattr(self.module_info, name)
|
|
75
|
+
|
|
76
|
+
def __str__(self):
|
|
77
|
+
init_params = [param for param in self.kwargs.keys()]
|
|
78
|
+
attribute_strings = [
|
|
79
|
+
f"{param}={getattr(self.module_info, param)}" for param in init_params
|
|
80
|
+
if hasattr(self.module_info, param)
|
|
81
|
+
]
|
|
82
|
+
return f"Module Details: \n{', '.join(attribute_strings)}\n"
|
clarifai/client/user.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
from typing import Any, Dict, List
|
|
2
|
+
|
|
3
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2 # noqa: F401
|
|
4
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2
|
|
5
|
+
|
|
6
|
+
from clarifai.client.app import App
|
|
7
|
+
from clarifai.client.base import BaseClient
|
|
8
|
+
from clarifai.client.lister import Lister
|
|
9
|
+
from clarifai.utils.logging import get_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class User(Lister, BaseClient):
|
|
13
|
+
"""User is a class that provides access to Clarifai API endpoints related to user information."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, user_id: str = "", **kwargs):
|
|
16
|
+
"""Initializes an User object.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
user_id (str): The user ID for the user to interact with.
|
|
20
|
+
**kwargs: Additional keyword arguments to be passed to the ClarifaiAuthHelper.
|
|
21
|
+
"""
|
|
22
|
+
self.kwargs = {**kwargs, 'id': user_id}
|
|
23
|
+
self.user_info = resources_pb2.User(**self.kwargs)
|
|
24
|
+
self.logger = get_logger(logger_level="INFO", name=__name__)
|
|
25
|
+
BaseClient.__init__(self, user_id=self.id, app_id="")
|
|
26
|
+
Lister.__init__(self)
|
|
27
|
+
|
|
28
|
+
def list_apps(self, filter_by: Dict[str, Any] = {}) -> List[App]:
|
|
29
|
+
"""Lists all the apps for the user.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
filter_by (dict): A dictionary of filters to be applied to the list of apps.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
list of App: A list of App objects for the user.
|
|
36
|
+
|
|
37
|
+
Example:
|
|
38
|
+
>>> from clarifai.client.user import User
|
|
39
|
+
>>> apps = User("user_id").list_apps()
|
|
40
|
+
"""
|
|
41
|
+
request_data = dict(user_app_id=self.user_app_id, per_page=self.default_page_size, **filter_by)
|
|
42
|
+
all_apps_info = list(
|
|
43
|
+
self.list_all_pages_generator(self.STUB.ListApps, service_pb2.ListAppsRequest,
|
|
44
|
+
request_data))
|
|
45
|
+
|
|
46
|
+
return [App(**app_info) for app_info in all_apps_info]
|
|
47
|
+
|
|
48
|
+
def create_app(self, app_id: str, base_workflow: str = 'Language-Understanding',
|
|
49
|
+
**kwargs) -> App:
|
|
50
|
+
"""Creates an app for the user.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
app_id (str): The app ID for the app to create.
|
|
54
|
+
base_workflow (str): The base workflow to use for the app.(Examples: 'Universal', 'Empty', 'General')
|
|
55
|
+
**kwargs: Additional keyword arguments to be passed to the App.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
App: An App object for the specified app ID.
|
|
59
|
+
|
|
60
|
+
Example:
|
|
61
|
+
>>> from clarifai.client.user import User
|
|
62
|
+
>>> client = User(user_id="user_id")
|
|
63
|
+
>>> app = client.create_app(app_id="app_id",base_workflow="Universal")
|
|
64
|
+
"""
|
|
65
|
+
workflow = resources_pb2.Workflow(id=base_workflow, app_id="main", user_id="clarifai")
|
|
66
|
+
request = service_pb2.PostAppsRequest(
|
|
67
|
+
user_app_id=self.user_app_id,
|
|
68
|
+
apps=[resources_pb2.App(id=app_id, default_workflow=workflow, **kwargs)])
|
|
69
|
+
response = self._grpc_request(self.STUB.PostApps, request)
|
|
70
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
71
|
+
raise Exception(response.status)
|
|
72
|
+
self.logger.info("\nApp created\n%s", response.status)
|
|
73
|
+
kwargs.update({'user_id': self.id})
|
|
74
|
+
return App(app_id=app_id, **kwargs)
|
|
75
|
+
|
|
76
|
+
def app(self, app_id: str, **kwargs) -> App:
|
|
77
|
+
"""Returns an App object for the specified app ID.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
app_id (str): The app ID for the app to interact with.
|
|
81
|
+
**kwargs: Additional keyword arguments to be passed to the App.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
App: An App object for the specified app ID.
|
|
85
|
+
|
|
86
|
+
Example:
|
|
87
|
+
>>> from clarifai.client.user import User
|
|
88
|
+
>>> app = User("user_id").app("app_id")
|
|
89
|
+
"""
|
|
90
|
+
request = service_pb2.GetAppRequest(
|
|
91
|
+
user_app_id=resources_pb2.UserAppIDSet(user_id=self.id, app_id=app_id))
|
|
92
|
+
response = self._grpc_request(self.STUB.GetApp, request)
|
|
93
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
94
|
+
raise Exception(response.status)
|
|
95
|
+
|
|
96
|
+
kwargs['user_id'] = self.id
|
|
97
|
+
return App(app_id=app_id, **kwargs)
|
|
98
|
+
|
|
99
|
+
def delete_app(self, app_id: str) -> None:
|
|
100
|
+
"""Deletes an app for the user.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
app_id (str): The app ID for the app to delete.
|
|
104
|
+
|
|
105
|
+
Example:
|
|
106
|
+
>>> from clarifai.client.user import User
|
|
107
|
+
>>> user = User("user_id").delete_app("app_id")
|
|
108
|
+
"""
|
|
109
|
+
request = service_pb2.DeleteAppRequest(
|
|
110
|
+
user_app_id=resources_pb2.UserAppIDSet(user_id=self.id, app_id=app_id))
|
|
111
|
+
response = self._grpc_request(self.STUB.DeleteApp, request)
|
|
112
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
113
|
+
raise Exception(response.status)
|
|
114
|
+
self.logger.info("\nApp Deleted\n%s", response.status)
|
|
115
|
+
|
|
116
|
+
def __getattr__(self, name):
|
|
117
|
+
return getattr(self.user_info, name)
|
|
118
|
+
|
|
119
|
+
def __str__(self):
|
|
120
|
+
init_params = [param for param in self.kwargs.keys()]
|
|
121
|
+
attribute_strings = [
|
|
122
|
+
f"{param}={getattr(self.user_info, param)}" for param in init_params
|
|
123
|
+
if hasattr(self.user_info, param)
|
|
124
|
+
]
|
|
125
|
+
return f"Clarifai User Details: \n{', '.join(attribute_strings)}\n"
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Dict, List
|
|
3
|
+
|
|
4
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
|
5
|
+
from clarifai_grpc.grpc.api.resources_pb2 import Input
|
|
6
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2
|
|
7
|
+
|
|
8
|
+
from clarifai.client.base import BaseClient
|
|
9
|
+
from clarifai.client.lister import Lister
|
|
10
|
+
from clarifai.errors import UserError
|
|
11
|
+
from clarifai.urls.helper import ClarifaiUrlHelper
|
|
12
|
+
from clarifai.utils.logging import get_logger
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Workflow(Lister, BaseClient):
|
|
16
|
+
"""Workflow is a class that provides access to Clarifai API endpoints related to Workflow information."""
|
|
17
|
+
|
|
18
|
+
def __init__(self,
|
|
19
|
+
url_init: str = "",
|
|
20
|
+
workflow_id: str = "",
|
|
21
|
+
workflow_version: Dict = {'id': ""},
|
|
22
|
+
output_config: Dict = {'min_value': 0},
|
|
23
|
+
**kwargs):
|
|
24
|
+
"""Initializes a Workflow object.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
url_init (str): The URL to initialize the workflow object.
|
|
28
|
+
workflow_id (str): The Workflow ID to interact with.
|
|
29
|
+
workflow_version (dict): The Workflow Version to interact with.
|
|
30
|
+
output_config (dict): The output config to interact with.
|
|
31
|
+
min_value (float): The minimum value of the prediction confidence to filter.
|
|
32
|
+
max_concepts (int): The maximum number of concepts to return.
|
|
33
|
+
select_concepts (list[Concept]): The concepts to select.
|
|
34
|
+
sample_ms (int): The number of milliseconds to sample.
|
|
35
|
+
**kwargs: Additional keyword arguments to be passed to the ClarifaiAuthHelper.
|
|
36
|
+
"""
|
|
37
|
+
if url_init != "" and workflow_id != "":
|
|
38
|
+
raise UserError("You can only specify one of url_init or workflow_id.")
|
|
39
|
+
if url_init == "" and workflow_id == "":
|
|
40
|
+
raise UserError("You must specify one of url_init or workflow_id.")
|
|
41
|
+
if url_init != "":
|
|
42
|
+
user_id, app_id, _, workflow_id, workflow_version_id = ClarifaiUrlHelper.split_clarifai_url(
|
|
43
|
+
url_init)
|
|
44
|
+
workflow_version = {'id': workflow_version_id}
|
|
45
|
+
kwargs = {'user_id': user_id, 'app_id': app_id}
|
|
46
|
+
self.kwargs = {**kwargs, 'id': workflow_id, 'version': workflow_version}
|
|
47
|
+
self.output_config = output_config
|
|
48
|
+
self.workflow_info = resources_pb2.Workflow(**self.kwargs)
|
|
49
|
+
self.logger = get_logger(logger_level="INFO")
|
|
50
|
+
BaseClient.__init__(self, user_id=self.user_id, app_id=self.app_id)
|
|
51
|
+
Lister.__init__(self)
|
|
52
|
+
|
|
53
|
+
def predict(self, inputs: List[Input]):
|
|
54
|
+
"""Predicts the workflow based on the given inputs.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
inputs (list[Input]): The inputs to predict.
|
|
58
|
+
"""
|
|
59
|
+
if len(inputs) > 128:
|
|
60
|
+
raise UserError("Too many inputs. Max is 128.") # TODO Use Chunker for inputs len > 128
|
|
61
|
+
request = service_pb2.PostWorkflowResultsRequest(
|
|
62
|
+
user_app_id=self.user_app_id,
|
|
63
|
+
workflow_id=self.id,
|
|
64
|
+
version_id=self.version.id,
|
|
65
|
+
inputs=inputs,
|
|
66
|
+
output_config=self.output_config)
|
|
67
|
+
|
|
68
|
+
response = self._grpc_request(self.STUB.PostWorkflowResults, request)
|
|
69
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
|
70
|
+
raise Exception(f"Workflow Predict failed with response {response.status!r}")
|
|
71
|
+
|
|
72
|
+
return response
|
|
73
|
+
|
|
74
|
+
def predict_by_filepath(self, filepath: str, input_type: str):
|
|
75
|
+
"""Predicts the workflow based on the given filepath.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
filepath (str): The filepath to predict.
|
|
79
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
|
|
80
|
+
|
|
81
|
+
Example:
|
|
82
|
+
>>> from clarifai.client.workflow import Workflow
|
|
83
|
+
>>> workflow = Workflow("workflow_url") # Example: https://clarifai.com/clarifai/main/workflows/Face-Sentiment
|
|
84
|
+
or
|
|
85
|
+
>>> workflow = Workflow(user_id='user_id', app_id='app_id', workflow_id='workflow_id')
|
|
86
|
+
>>> workflow_prediction = workflow.predict_by_filepath('filepath', 'image')
|
|
87
|
+
"""
|
|
88
|
+
if input_type not in {'image', 'text', 'video', 'audio'}:
|
|
89
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
90
|
+
if not os.path.isfile(filepath):
|
|
91
|
+
raise UserError('Invalid filepath.')
|
|
92
|
+
|
|
93
|
+
with open(filepath, "rb") as f:
|
|
94
|
+
file_bytes = f.read()
|
|
95
|
+
|
|
96
|
+
return self.predict_by_bytes(file_bytes, input_type)
|
|
97
|
+
|
|
98
|
+
def predict_by_bytes(self, input_bytes: bytes, input_type: str):
|
|
99
|
+
"""Predicts the workflow based on the given bytes.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
input_bytes (bytes): Bytes to predict on.
|
|
103
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
|
|
104
|
+
"""
|
|
105
|
+
if input_type not in {'image', 'text', 'video', 'audio'}:
|
|
106
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
107
|
+
if not isinstance(input_bytes, bytes):
|
|
108
|
+
raise UserError('Invalid bytes.')
|
|
109
|
+
|
|
110
|
+
if input_type == "image":
|
|
111
|
+
input_proto = resources_pb2.Input(
|
|
112
|
+
data=resources_pb2.Data(image=resources_pb2.Image(base64=input_bytes)))
|
|
113
|
+
elif input_type == "text":
|
|
114
|
+
input_proto = resources_pb2.Input(
|
|
115
|
+
data=resources_pb2.Data(text=resources_pb2.Text(raw=input_bytes)))
|
|
116
|
+
elif input_type == "video":
|
|
117
|
+
input_proto = resources_pb2.Input(
|
|
118
|
+
data=resources_pb2.Data(video=resources_pb2.Video(base64=input_bytes)))
|
|
119
|
+
elif input_type == "audio":
|
|
120
|
+
input_proto = resources_pb2.Input(
|
|
121
|
+
data=resources_pb2.Data(audio=resources_pb2.Audio(base64=input_bytes)))
|
|
122
|
+
|
|
123
|
+
return self.predict(inputs=[input_proto])
|
|
124
|
+
|
|
125
|
+
def predict_by_url(self, url: str, input_type: str):
|
|
126
|
+
"""Predicts the workflow based on the given URL.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
url (str): The URL to predict.
|
|
130
|
+
input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
|
|
131
|
+
|
|
132
|
+
Example:
|
|
133
|
+
>>> from clarifai.client.workflow import Workflow
|
|
134
|
+
>>> workflow = Workflow("workflow_url") # Example: https://clarifai.com/clarifai/main/workflows/Face-Sentiment
|
|
135
|
+
or
|
|
136
|
+
>>> workflow = Workflow(user_id='user_id', app_id='app_id', workflow_id='workflow_id')
|
|
137
|
+
>>> workflow_prediction = workflow.predict_by_url('url', 'image')
|
|
138
|
+
"""
|
|
139
|
+
if input_type not in {'image', 'text', 'video', 'audio'}:
|
|
140
|
+
raise UserError('Invalid input type it should be image, text, video or audio.')
|
|
141
|
+
|
|
142
|
+
if input_type == "image":
|
|
143
|
+
input_proto = resources_pb2.Input(
|
|
144
|
+
data=resources_pb2.Data(image=resources_pb2.Image(url=url)))
|
|
145
|
+
elif input_type == "text":
|
|
146
|
+
input_proto = resources_pb2.Input(data=resources_pb2.Data(text=resources_pb2.Text(url=url)))
|
|
147
|
+
elif input_type == "video":
|
|
148
|
+
input_proto = resources_pb2.Input(
|
|
149
|
+
data=resources_pb2.Data(video=resources_pb2.Video(url=url)))
|
|
150
|
+
elif input_type == "audio":
|
|
151
|
+
input_proto = resources_pb2.Input(
|
|
152
|
+
data=resources_pb2.Data(audio=resources_pb2.Audio(url=url)))
|
|
153
|
+
|
|
154
|
+
return self.predict(inputs=[input_proto])
|
|
155
|
+
|
|
156
|
+
def list_versions(self) -> List['Workflow']:
|
|
157
|
+
"""Lists all the versions of the workflow.
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
list[Workflow]: A list of Workflow objects.
|
|
161
|
+
|
|
162
|
+
Example:
|
|
163
|
+
>>> from clarifai.client.workflow import Workflow
|
|
164
|
+
>>> workflow = Workflow(user_id='user_id', app_id='app_id', workflow_id='workflow_id')
|
|
165
|
+
>>> workflow_versions = workflow.list_versions()
|
|
166
|
+
"""
|
|
167
|
+
request_data = dict(
|
|
168
|
+
user_app_id=self.user_app_id,
|
|
169
|
+
workflow_id=self.id,
|
|
170
|
+
per_page=self.default_page_size,
|
|
171
|
+
)
|
|
172
|
+
all_workflow_versions_info = list(
|
|
173
|
+
self.list_all_pages_generator(self.STUB.ListWorkflowVersions,
|
|
174
|
+
service_pb2.ListWorkflowVersionsRequest, request_data))
|
|
175
|
+
|
|
176
|
+
for workflow_version_info in all_workflow_versions_info:
|
|
177
|
+
workflow_version_info['id'] = workflow_version_info['workflow_version_id']
|
|
178
|
+
del workflow_version_info['workflow_version_id']
|
|
179
|
+
|
|
180
|
+
return [
|
|
181
|
+
Workflow(workflow_id=self.id, **dict(self.kwargs, version=workflow_version_info))
|
|
182
|
+
for workflow_version_info in all_workflow_versions_info
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
def __getattr__(self, name):
|
|
186
|
+
return getattr(self.workflow_info, name)
|
|
187
|
+
|
|
188
|
+
def __str__(self):
|
|
189
|
+
init_params = [param for param in self.kwargs.keys()]
|
|
190
|
+
attribute_strings = [
|
|
191
|
+
f"{param}={getattr(self.workflow_info, param)}" for param in init_params
|
|
192
|
+
if hasattr(self.workflow_info, param)
|
|
193
|
+
]
|
|
194
|
+
return f"Workflow Details: \n{', '.join(attribute_strings)}\n"
|