clarifai 9.7.0__py3-none-any.whl → 9.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/auth/__init__.py +6 -0
- clarifai/auth/helper.py +35 -36
- clarifai/auth/register.py +23 -0
- clarifai/{client → auth}/stub.py +10 -10
- clarifai/client/__init__.py +1 -4
- clarifai/client/app.py +483 -0
- clarifai/client/auth/__init__.py +4 -0
- clarifai/client/{abc.py → auth/abc.py} +2 -2
- clarifai/client/auth/helper.py +377 -0
- clarifai/client/auth/register.py +23 -0
- {clarifai_utils/client → clarifai/client/auth}/stub.py +10 -10
- clarifai/client/base.py +112 -0
- clarifai/client/dataset.py +290 -0
- clarifai/client/input.py +730 -0
- clarifai/client/lister.py +41 -0
- clarifai/client/model.py +218 -0
- clarifai/client/module.py +82 -0
- clarifai/client/user.py +125 -0
- clarifai/client/workflow.py +194 -0
- clarifai/datasets/upload/base.py +66 -0
- clarifai/datasets/upload/examples/README.md +31 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai/datasets/upload/image.py +156 -0
- clarifai/datasets/upload/loaders/README.md +49 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai_utils/data_upload/datasets/zoo → clarifai/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai/datasets/upload/text.py +50 -0
- clarifai/datasets/upload/utils.py +62 -0
- clarifai/errors.py +90 -0
- clarifai/urls/helper.py +16 -17
- clarifai/utils/logging.py +40 -0
- clarifai/utils/misc.py +33 -0
- clarifai/versions.py +6 -0
- {clarifai-9.7.0.dist-info → clarifai-9.7.2.dist-info}/LICENSE +1 -1
- clarifai-9.7.2.dist-info/METADATA +179 -0
- clarifai-9.7.2.dist-info/RECORD +350 -0
- clarifai_utils/auth/__init__.py +6 -0
- clarifai_utils/auth/helper.py +35 -36
- clarifai_utils/auth/register.py +23 -0
- clarifai_utils/auth/stub.py +127 -0
- clarifai_utils/client/__init__.py +1 -4
- clarifai_utils/client/app.py +483 -0
- clarifai_utils/client/auth/__init__.py +4 -0
- clarifai_utils/client/{abc.py → auth/abc.py} +2 -2
- clarifai_utils/client/auth/helper.py +377 -0
- clarifai_utils/client/auth/register.py +23 -0
- clarifai_utils/client/auth/stub.py +127 -0
- clarifai_utils/client/base.py +112 -0
- clarifai_utils/client/dataset.py +290 -0
- clarifai_utils/client/input.py +730 -0
- clarifai_utils/client/lister.py +41 -0
- clarifai_utils/client/model.py +218 -0
- clarifai_utils/client/module.py +82 -0
- clarifai_utils/client/user.py +125 -0
- clarifai_utils/client/workflow.py +194 -0
- clarifai_utils/datasets/upload/base.py +66 -0
- clarifai_utils/datasets/upload/examples/README.md +31 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +42 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +39 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +37 -0
- clarifai_utils/{data_upload/datasets → datasets/upload}/features.py +4 -12
- clarifai_utils/datasets/upload/image.py +156 -0
- clarifai_utils/datasets/upload/loaders/README.md +49 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/coco_captions.py +24 -21
- {clarifai/data_upload/datasets/zoo → clarifai_utils/datasets/upload/loaders}/coco_detection.py +46 -42
- clarifai_utils/datasets/upload/loaders/coco_segmentation.py +166 -0
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/imagenet_classification.py +22 -12
- clarifai_utils/{data_upload/datasets/zoo → datasets/upload/loaders}/xview_detection.py +44 -53
- clarifai_utils/datasets/upload/text.py +50 -0
- clarifai_utils/datasets/upload/utils.py +62 -0
- clarifai_utils/errors.py +90 -0
- clarifai_utils/urls/helper.py +16 -17
- clarifai_utils/utils/logging.py +40 -0
- clarifai_utils/utils/misc.py +33 -0
- clarifai_utils/versions.py +6 -0
- clarifai/data_upload/README.md +0 -63
- clarifai/data_upload/convert_csv.py +0 -182
- clarifai/data_upload/datasets/base.py +0 -87
- clarifai/data_upload/datasets/image.py +0 -253
- clarifai/data_upload/datasets/text.py +0 -60
- clarifai/data_upload/datasets/zoo/README.md +0 -55
- clarifai/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai/data_upload/examples/README.md +0 -5
- clarifai/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai/data_upload/examples.py +0 -17
- clarifai/data_upload/upload.py +0 -356
- clarifai/dataset_export/dataset_export_inputs.py +0 -205
- clarifai/listing/concepts.py +0 -37
- clarifai/listing/datasets.py +0 -37
- clarifai/listing/inputs.py +0 -111
- clarifai/listing/installed_module_versions.py +0 -40
- clarifai/listing/lister.py +0 -200
- clarifai/listing/models.py +0 -46
- clarifai/listing/module_versions.py +0 -42
- clarifai/listing/modules.py +0 -36
- clarifai/runners/base.py +0 -140
- clarifai/runners/example.py +0 -36
- clarifai-9.7.0.dist-info/METADATA +0 -99
- clarifai-9.7.0.dist-info/RECORD +0 -456
- clarifai_utils/data_upload/README.md +0 -63
- clarifai_utils/data_upload/convert_csv.py +0 -182
- clarifai_utils/data_upload/datasets/base.py +0 -87
- clarifai_utils/data_upload/datasets/image.py +0 -253
- clarifai_utils/data_upload/datasets/text.py +0 -60
- clarifai_utils/data_upload/datasets/zoo/README.md +0 -55
- clarifai_utils/data_upload/datasets/zoo/coco_segmentation.py +0 -160
- clarifai_utils/data_upload/examples/README.md +0 -5
- clarifai_utils/data_upload/examples/image_classification/cifar10/dataset.py +0 -40
- clarifai_utils/data_upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1036242.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/1114182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2012944.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/2464389.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/beignets/478632.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1061270.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1202261.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/1381751.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/3289634.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/hamburger/862025.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/102197.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2749372.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/2938268.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/3590861.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/prime_rib/746716.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/2955110.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3208966.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3270629.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/3424562.jpg +0 -0
- clarifai_utils/data_upload/examples/image_classification/food-101/images/ramen/544680.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2007_000464.xml +0 -39
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_000853.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_003182.xml +0 -54
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2008_008526.xml +0 -67
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004315.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2009_004382.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_000430.xml +0 -28
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_001610.xml +0 -46
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2011_006412.xml +0 -99
- clarifai_utils/data_upload/examples/image_detection/voc/annotations/2012_000690.xml +0 -43
- clarifai_utils/data_upload/examples/image_detection/voc/dataset.py +0 -76
- clarifai_utils/data_upload/examples/image_detection/voc/images/2007_000464.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_000853.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_003182.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2008_008526.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004315.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2009_004382.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_000430.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_001610.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2011_006412.jpg +0 -0
- clarifai_utils/data_upload/examples/image_detection/voc/images/2012_000690.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/__init__.py +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/annotations/instances_val2017_subset.json +0 -5342
- clarifai_utils/data_upload/examples/image_segmentation/coco/dataset.py +0 -107
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000074646.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000086956.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000166563.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000176857.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000182202.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000193245.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000384850.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000409630.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000424349.jpg +0 -0
- clarifai_utils/data_upload/examples/image_segmentation/coco/images/000000573008.jpg +0 -0
- clarifai_utils/data_upload/examples/text_classification/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai_utils/data_upload/examples/text_classification/imdb_dataset/dataset.py +0 -40
- clarifai_utils/data_upload/examples.py +0 -17
- clarifai_utils/data_upload/upload.py +0 -356
- clarifai_utils/dataset_export/dataset_export_inputs.py +0 -205
- clarifai_utils/listing/__init__.py +0 -0
- clarifai_utils/listing/concepts.py +0 -37
- clarifai_utils/listing/datasets.py +0 -37
- clarifai_utils/listing/inputs.py +0 -111
- clarifai_utils/listing/installed_module_versions.py +0 -40
- clarifai_utils/listing/lister.py +0 -200
- clarifai_utils/listing/models.py +0 -46
- clarifai_utils/listing/module_versions.py +0 -42
- clarifai_utils/listing/modules.py +0 -36
- clarifai_utils/runners/__init__.py +0 -0
- clarifai_utils/runners/base.py +0 -140
- clarifai_utils/runners/example.py +0 -36
- /clarifai/{data_upload/__init__.py → cli.py} +0 -0
- /clarifai/{data_upload/datasets → datasets}/__init__.py +0 -0
- /clarifai/{data_upload/datasets/zoo → datasets/upload}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai/{data_upload/examples/image_detection → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_detection/voc → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai/{data_upload/examples/image_segmentation → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai/{data_upload/examples/image_segmentation/coco → utils}/__init__.py +0 -0
- {clarifai-9.7.0.dist-info → clarifai-9.7.2.dist-info}/WHEEL +0 -0
- {clarifai-9.7.0.dist-info → clarifai-9.7.2.dist-info}/entry_points.txt +0 -0
- {clarifai-9.7.0.dist-info → clarifai-9.7.2.dist-info}/top_level.txt +0 -0
- /clarifai/data_upload/examples/text_classification/__init__.py → /clarifai_utils/cli.py +0 -0
- {clarifai/data_upload/examples/text_classification/imdb_dataset → clarifai_utils/datasets}/__init__.py +0 -0
- {clarifai/listing → clarifai_utils/datasets/upload}/__init__.py +0 -0
- {clarifai/runners → clarifai_utils/datasets/upload/examples/image_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload/examples/image_classification/cifar10}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/cifar_small_train.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- /clarifai_utils/{data_upload/datasets → datasets/upload/examples/image_classification/food-101}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- /clarifai_utils/{data_upload/datasets/zoo → datasets/upload/examples/text_classification}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification → datasets/upload/examples/text_classification/imdb_dataset}/__init__.py +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/test.csv +0 -0
- /clarifai_utils/{data_upload → datasets/upload}/examples/text_classification/imdb_dataset/train.csv +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/cifar10 → datasets/upload/loaders}/__init__.py +0 -0
- /clarifai_utils/{data_upload/examples/image_classification/food-101 → utils}/__init__.py +0 -0
clarifai/errors.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
import requests # noqa
|
|
6
|
+
from google.protobuf.json_format import MessageToDict
|
|
7
|
+
|
|
8
|
+
from clarifai.versions import CLIENT_VERSION, OS_VER, PYTHON_VERSION
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TokenError(Exception):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ApiError(Exception):
|
|
16
|
+
""" API Server error """
|
|
17
|
+
|
|
18
|
+
def __init__(self, resource: str, params: dict, method: str,
|
|
19
|
+
response: requests.Response = None) -> None:
|
|
20
|
+
self.resource = resource
|
|
21
|
+
self.params = params
|
|
22
|
+
self.method = method
|
|
23
|
+
self.response = response
|
|
24
|
+
|
|
25
|
+
self.error_code = 'N/A'
|
|
26
|
+
self.error_desc = 'N/A'
|
|
27
|
+
self.error_details = 'N/A'
|
|
28
|
+
response_json = 'N/A'
|
|
29
|
+
|
|
30
|
+
if response is not None:
|
|
31
|
+
response_json_dict = MessageToDict(response)
|
|
32
|
+
|
|
33
|
+
self.error_code = response_json_dict.get('status', {}).get('code', None)
|
|
34
|
+
self.error_desc = response_json_dict.get('status', {}).get('description', None)
|
|
35
|
+
self.error_details = response_json_dict.get('status', {}).get('details', None)
|
|
36
|
+
response_json = json.dumps(response_json_dict['status'], indent=2)
|
|
37
|
+
|
|
38
|
+
current_ts_str = str(time.time())
|
|
39
|
+
|
|
40
|
+
msg = """%(method)s %(resource)s FAILED(%(time_ts)s). error_code: %(error_code)s, error_description: %(error_desc)s, error_details: %(error_details)s
|
|
41
|
+
>> Python client %(client_version)s with Python %(python_version)s on %(os_version)s
|
|
42
|
+
>> %(method)s %(resource)s
|
|
43
|
+
>> REQUEST(%(time_ts)s) %(request)s
|
|
44
|
+
>> RESPONSE(%(time_ts)s) %(response)s""" % {
|
|
45
|
+
'baseurl': '%s/v2/' % _base_url(self.resource),
|
|
46
|
+
'method': method,
|
|
47
|
+
'resource': resource,
|
|
48
|
+
'error_code': self.error_code,
|
|
49
|
+
'error_desc': self.error_desc,
|
|
50
|
+
'error_details': self.error_details,
|
|
51
|
+
'request': json.dumps(params, indent=2),
|
|
52
|
+
'response': response_json,
|
|
53
|
+
'time_ts': current_ts_str,
|
|
54
|
+
'client_version': CLIENT_VERSION,
|
|
55
|
+
'python_version': PYTHON_VERSION,
|
|
56
|
+
'os_version': OS_VER
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
super(ApiError, self).__init__(msg)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class ApiClientError(Exception):
|
|
63
|
+
""" API Client Error """
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class UserError(Exception):
|
|
67
|
+
""" User Error """
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class AuthError(Exception):
|
|
71
|
+
"""Raised when a client has missing or invalid authentication."""
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _base_url(url: str) -> str:
|
|
75
|
+
"""
|
|
76
|
+
Extracts the base URL from the url, which is everything before the 4th slash character.
|
|
77
|
+
https://www.clarifai.com/v2/models/1/output -> https://www.clarifai.com/v2/
|
|
78
|
+
"""
|
|
79
|
+
try:
|
|
80
|
+
return url[:_find_nth(url, '/', 4) + 1]
|
|
81
|
+
except:
|
|
82
|
+
return ''
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _find_nth(haystack: str, needle: str, n: int) -> int:
|
|
86
|
+
start = haystack.find(needle)
|
|
87
|
+
while start >= 0 and n > 1:
|
|
88
|
+
start = haystack.find(needle, start + len(needle))
|
|
89
|
+
n -= 1
|
|
90
|
+
return start
|
clarifai/urls/helper.py
CHANGED
|
@@ -6,9 +6,9 @@ class ClarifaiUrlHelper(object):
|
|
|
6
6
|
|
|
7
7
|
def __init__(self, auth, module_manager_imv_id="module_manager_install"):
|
|
8
8
|
"""
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
9
|
+
Args:
|
|
10
|
+
auth: a ClarifaiAuthHelper object.
|
|
11
|
+
"""
|
|
12
12
|
self._auth = auth
|
|
13
13
|
self._module_manager_imv_id = module_manager_imv_id
|
|
14
14
|
|
|
@@ -39,12 +39,12 @@ class ClarifaiUrlHelper(object):
|
|
|
39
39
|
def clarifai_url(self, user_id, app_id, resource_type, resource_id, version_id: str = None):
|
|
40
40
|
"""This is the path to the resource in community.
|
|
41
41
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
42
|
+
Args:
|
|
43
|
+
user_id: the author of the resource.
|
|
44
|
+
app_id: the author's app the resource was created in.
|
|
45
|
+
resource_type: the type of resource. One of "modules", "models", "concepts", "inputs", "workflows", "tasks", "installed_module_versions"
|
|
46
|
+
resource_id: the resource ID
|
|
47
|
+
version_id: the version of the resource.
|
|
48
48
|
"""
|
|
49
49
|
if resource_type not in [
|
|
50
50
|
"modules", "models", "concepts", "inputs", "workflows", "tasks",
|
|
@@ -85,15 +85,14 @@ class ClarifaiUrlHelper(object):
|
|
|
85
85
|
|
|
86
86
|
@classmethod
|
|
87
87
|
def split_module_ui_url(cls, install):
|
|
88
|
-
"""Takes in a path like https://clarifai.com/zeiler/app/modules/module1/versions/2
|
|
89
|
-
to split it apart into it's IDs.
|
|
88
|
+
"""Takes in a path like https://clarifai.com/zeiler/app/modules/module1/versions/2 to split it apart into it's IDs.
|
|
90
89
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
90
|
+
Returns:
|
|
91
|
+
user_id: the author of the module.
|
|
92
|
+
app_id: the author's app the module was created in.
|
|
93
|
+
module_id: the module ID
|
|
94
|
+
module_version_id: the version of the module.
|
|
95
|
+
"""
|
|
97
96
|
user_id, app_id, resource_type, resource_id, resource_version_id = cls.split_clarifai_url(
|
|
98
97
|
install)
|
|
99
98
|
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from rich.logging import RichHandler
|
|
5
|
+
from rich.table import Table
|
|
6
|
+
from rich.traceback import install
|
|
7
|
+
|
|
8
|
+
install()
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def table_from_dict(data, column_names, title="") -> Table:
|
|
12
|
+
"""Use this function for printing tables from a list of dicts."""
|
|
13
|
+
table = Table(title=title, show_header=True, header_style="bold blue")
|
|
14
|
+
for column_name in column_names:
|
|
15
|
+
table.add_column(column_name)
|
|
16
|
+
for row in data:
|
|
17
|
+
req_row = [row.get(column_name, "") for column_name in column_names]
|
|
18
|
+
table.add_row(*req_row)
|
|
19
|
+
return table
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _get_library_name() -> str:
|
|
23
|
+
return __name__.split(".")[0]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _configure_logger(logger_level: str = "ERROR") -> None:
|
|
27
|
+
logging.basicConfig(
|
|
28
|
+
level=logger_level,
|
|
29
|
+
datefmt='%Y-%m-%d %H:%M:%S',
|
|
30
|
+
handlers=[RichHandler(rich_tracebacks=True)])
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_logger(logger_level: str = "ERROR", name: Optional[str] = None) -> logging.Logger:
|
|
34
|
+
"""Return a logger with the specified name."""
|
|
35
|
+
|
|
36
|
+
if name is None:
|
|
37
|
+
name = _get_library_name()
|
|
38
|
+
|
|
39
|
+
_configure_logger(logger_level)
|
|
40
|
+
return logging.getLogger(name)
|
clarifai/utils/misc.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Chunker:
|
|
5
|
+
"""Split an input sequence into small chunks."""
|
|
6
|
+
|
|
7
|
+
def __init__(self, seq: List, size: int) -> None:
|
|
8
|
+
self.seq = seq
|
|
9
|
+
self.size = size
|
|
10
|
+
|
|
11
|
+
def chunk(self) -> List[List]:
|
|
12
|
+
"""Chunk input sequence."""
|
|
13
|
+
return [self.seq[pos:pos + self.size] for pos in range(0, len(self.seq), self.size)]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BackoffIterator:
|
|
17
|
+
"""Iterator that returns a sequence of backoff values."""
|
|
18
|
+
|
|
19
|
+
def __init__(self):
|
|
20
|
+
self.count = 0
|
|
21
|
+
|
|
22
|
+
def __iter__(self):
|
|
23
|
+
return self
|
|
24
|
+
|
|
25
|
+
def __next__(self):
|
|
26
|
+
if self.count < 1:
|
|
27
|
+
self.count += 1
|
|
28
|
+
return 0.1
|
|
29
|
+
elif self.count < 7:
|
|
30
|
+
self.count += 1
|
|
31
|
+
return 0.01 * (2**(self.count + 4))
|
|
32
|
+
else:
|
|
33
|
+
return 0.01 * (2**10) # 10 seconds
|
clarifai/versions.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: clarifai
|
|
3
|
+
Version: 9.7.2
|
|
4
|
+
Summary: Clarifai Python SDK
|
|
5
|
+
Home-page: https://github.com/Clarifai/clarifai-python
|
|
6
|
+
Author: Clarifai
|
|
7
|
+
Author-email: support@clarifai.com
|
|
8
|
+
License: Apache 2.0
|
|
9
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.8
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: clarifai-grpc (>=9.7.3)
|
|
17
|
+
Requires-Dist: tritonclient (==2.34.0)
|
|
18
|
+
Requires-Dist: packaging
|
|
19
|
+
|
|
20
|
+

|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# Clarifai API Python Client
|
|
25
|
+
|
|
26
|
+
This is the official Python client for interacting with our powerful recognition [API](https://docs.clarifai.com).
|
|
27
|
+
The Clarifai Python SDK offers a comprehensive set of tools to integrate Clarifai's AI-powered image, video, and text recognition capabilities into your applications. With just a few lines of code, you can leverage cutting-edge artificial intelligence to unlock valuable insights from visual and textual content.
|
|
28
|
+
|
|
29
|
+
* Try the Clarifai demo at: https://clarifai.com/demo
|
|
30
|
+
* Sign up for a free account at: https://clarifai.com/developer/account/signup/
|
|
31
|
+
* Read the developer guide at: https://clarifai.com/developer/guide/
|
|
32
|
+
|
|
33
|
+
## Getting started
|
|
34
|
+
Clarifai uses Personal Access Tokens(PATs) to validate requests. You can create and manage PATs under your Clarifai account security settings.
|
|
35
|
+
|
|
36
|
+
Export your PAT as an environment variable. Then, import and initialize the API Client.
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
```cmd
|
|
40
|
+
export CLARIFAI_PAT={your personal access token}
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
```python
|
|
44
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
45
|
+
from clarifai.client.user import User
|
|
46
|
+
client = User(user_id="user_id")
|
|
47
|
+
|
|
48
|
+
# Get all apps
|
|
49
|
+
apps = client.list_apps()
|
|
50
|
+
|
|
51
|
+
# Create app and dataset
|
|
52
|
+
app = client.create_app(app_id="demo_app", base_workflow="Universal")
|
|
53
|
+
dataset = app.create_dataset(dataset_id="demo_dataset")
|
|
54
|
+
|
|
55
|
+
# execute data upload to Clarifai app dataset
|
|
56
|
+
dataset.upload_dataset(task='visual_segmentation', split="train", dataset_loader='coco_segmentation')
|
|
57
|
+
|
|
58
|
+
#upload text from csv
|
|
59
|
+
dataset.upload_from_csv(csv_path='csv_path', labels=True)
|
|
60
|
+
|
|
61
|
+
#upload data from folder
|
|
62
|
+
dataset.upload_from_folder(folder_path='folder_path', input_type='text', labels=True)
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
## Interacting with Inputs
|
|
67
|
+
|
|
68
|
+
```python
|
|
69
|
+
from clarifai.client.user import User
|
|
70
|
+
app = User(user_id="user_id").app(app_id="app_id")
|
|
71
|
+
input_obj = app.inputs()
|
|
72
|
+
|
|
73
|
+
#input upload from url
|
|
74
|
+
input_obj.upload_from_url(input_id = 'demo', image_url='https://samples.clarifai.com/metro-north.jpg')
|
|
75
|
+
|
|
76
|
+
#input upload from filename
|
|
77
|
+
input_obj.upload_from_file(input_id = 'demo', video_file='demo.mp4')
|
|
78
|
+
|
|
79
|
+
#listing inputs
|
|
80
|
+
input_obj.list_inputs()
|
|
81
|
+
|
|
82
|
+
# text upload
|
|
83
|
+
input_obj.upload_text(input_id = 'demo', raw_text = 'This is a test')
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
## Interacting with Models
|
|
88
|
+
|
|
89
|
+
### Model Predict
|
|
90
|
+
```python
|
|
91
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
92
|
+
from clarifai.client.model import Model
|
|
93
|
+
|
|
94
|
+
# Model Predict
|
|
95
|
+
model = Model("https://clarifai.com/anthropic/completion/models/claude-v2").predict_by_bytes(b"Write a tweet on future of AI", "text")
|
|
96
|
+
|
|
97
|
+
model = Model(user_id="user_id", app_id="app_id", model_id="model_id")
|
|
98
|
+
model_prediction = model.predict_by_url(url="url", input_type="image") # Supports image, text, audio, video
|
|
99
|
+
|
|
100
|
+
# Customizing Model Inference Output
|
|
101
|
+
model = Model(user_id="user_id", app_id="app_id", model_id="model_id",
|
|
102
|
+
output_config={"min_value": 0.98}) # Return predictions having prediction confidence > 0.98
|
|
103
|
+
model_prediction = model.predict_by_filepath(filepath="local_filepath", input_type="text") # Supports image, text, audio, video
|
|
104
|
+
|
|
105
|
+
model = Model(user_id="user_id", app_id="app_id", model_id="model_id",
|
|
106
|
+
output_config={"sample_ms": 2000}) # Return predictions for specified interval
|
|
107
|
+
model_prediction = model.predict_by_url(url="VIDEO_URL", input_type="video")
|
|
108
|
+
```
|
|
109
|
+
### Models Listing
|
|
110
|
+
```python
|
|
111
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
112
|
+
|
|
113
|
+
# List all model versions
|
|
114
|
+
all_model_versions = model.list_versions()
|
|
115
|
+
|
|
116
|
+
# Go to specific model version
|
|
117
|
+
model_v1 = client.app("app_id").model(model_id="model_id", model_version_id="model_version_id")
|
|
118
|
+
|
|
119
|
+
# List all models in an app
|
|
120
|
+
all_models = app.list_models()
|
|
121
|
+
|
|
122
|
+
# List all models in community filtered by model_type, description
|
|
123
|
+
all_llm_community_models = App().list_models(filter_by={"query": "LLM",
|
|
124
|
+
"model_type_id": "text-to-text"}, only_in_app=False)
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
## Interacting with Workflows
|
|
128
|
+
|
|
129
|
+
### Workflow Predict
|
|
130
|
+
```python
|
|
131
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
132
|
+
from clarifai.client.workflow import Workflow
|
|
133
|
+
|
|
134
|
+
# Workflow Predict
|
|
135
|
+
workflow = Workflow("workflow_url") # Example: https://clarifai.com/clarifai/main/workflows/Face-Sentiment
|
|
136
|
+
workflow_prediction = workflow.predict_by_url(url="url", input_type="image") # Supports image, text, audio, video
|
|
137
|
+
|
|
138
|
+
# Customizing Workflow Inference Output
|
|
139
|
+
workflow = Workflow(user_id="user_id", app_id="app_id", workflow_id="workflow_id",
|
|
140
|
+
output_config={"min_value": 0.98}) # Return predictions having prediction confidence > 0.98
|
|
141
|
+
workflow_prediction = workflow.predict_by_filepath(filepath="local_filepath", input_type="text") # Supports image, text, audio, video
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
### Workflows Listing
|
|
145
|
+
```python
|
|
146
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
147
|
+
|
|
148
|
+
# List all workflow versions
|
|
149
|
+
all_workflow_versions = workflow.list_versions()
|
|
150
|
+
|
|
151
|
+
# Go to specific workflow version
|
|
152
|
+
workflow_v1 = Workflow(workflow_id="workflow_id", workflow_version=dict(id="workflow_version_id"), app_id="app_id", user_id="user_id")
|
|
153
|
+
|
|
154
|
+
# List all workflow in an app
|
|
155
|
+
all_workflow = app.list_workflow()
|
|
156
|
+
|
|
157
|
+
# List all workflow in community filtered by description
|
|
158
|
+
all_face_community_workflows = App().list_workflows(filter_by={"query": "face"}, only_in_app=False) # Get all face related workflows
|
|
159
|
+
```
|
|
160
|
+
|
|
161
|
+
## Interacting with Modules
|
|
162
|
+
|
|
163
|
+
```python
|
|
164
|
+
# Note: CLARIFAI_PAT must be set as env variable.
|
|
165
|
+
from clarifai.client.app import App
|
|
166
|
+
app = App(user_id="user_id", app_id="app_id")
|
|
167
|
+
|
|
168
|
+
# create a new module
|
|
169
|
+
module = app.create_module(module_id="module_id", description="module_description")
|
|
170
|
+
|
|
171
|
+
# List all modules in an app
|
|
172
|
+
all_modules = app.list_modules()
|
|
173
|
+
|
|
174
|
+
# List all module versions
|
|
175
|
+
all_module_versions = module.list_versions()
|
|
176
|
+
|
|
177
|
+
# Delete a module
|
|
178
|
+
app.delete_module(module_id="module_id")
|
|
179
|
+
```
|