clarifai 9.10.2__py3-none-any.whl → 9.10.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/client/__init__.py +3 -2
- clarifai/client/app.py +39 -23
- clarifai/client/base.py +6 -6
- clarifai/client/dataset.py +113 -55
- clarifai/client/input.py +47 -55
- clarifai/client/model.py +27 -25
- clarifai/client/module.py +13 -11
- clarifai/client/runner.py +5 -3
- clarifai/client/search.py +7 -3
- clarifai/client/user.py +14 -8
- clarifai/client/workflow.py +22 -20
- clarifai/constants/dataset.py +22 -0
- clarifai/datasets/upload/base.py +9 -7
- clarifai/datasets/upload/features.py +3 -3
- clarifai/datasets/upload/image.py +49 -50
- clarifai/datasets/upload/loaders/coco_captions.py +26 -80
- clarifai/datasets/upload/loaders/coco_detection.py +56 -115
- clarifai/datasets/upload/loaders/coco_segmentation.py +69 -137
- clarifai/datasets/upload/loaders/imagenet_classification.py +2 -3
- clarifai/datasets/upload/loaders/xview_detection.py +3 -3
- clarifai/datasets/upload/text.py +16 -16
- clarifai/datasets/upload/utils.py +196 -21
- clarifai/utils/misc.py +21 -0
- clarifai/versions.py +1 -1
- {clarifai-9.10.2.dist-info → clarifai-9.10.3.dist-info}/METADATA +3 -3
- clarifai-9.10.3.dist-info/RECORD +96 -0
- clarifai-9.10.3.dist-info/top_level.txt +1 -0
- clarifai/auth/__init__.py +0 -6
- clarifai/auth/helper.py +0 -367
- clarifai/auth/register.py +0 -23
- clarifai/auth/stub.py +0 -127
- clarifai/datasets/upload/examples/README.md +0 -31
- clarifai/datasets/upload/examples/image_classification/__init__.py +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/__init__.py +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/cifar_small_test.csv +0 -10
- clarifai/datasets/upload/examples/image_classification/cifar10/cifar_small_train.csv +0 -10
- clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +0 -46
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/__init__.py +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- clarifai/datasets/upload/examples/text_classification/__init__.py +0 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +0 -42
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/test.csv +0 -201
- clarifai/datasets/upload/examples/text_classification/imdb_dataset/train.csv +0 -201
- clarifai/datasets/upload/loaders/README.md +0 -49
- clarifai/models/model_serving/README.md +0 -155
- clarifai/models/model_serving/docs/custom_config.md +0 -33
- clarifai/models/model_serving/docs/dependencies.md +0 -11
- clarifai/models/model_serving/docs/inference_parameters.md +0 -134
- clarifai/models/model_serving/docs/model_types.md +0 -20
- clarifai/models/model_serving/docs/output.md +0 -28
- clarifai/models/model_serving/examples/README.md +0 -7
- clarifai/models/model_serving/examples/image_classification/README.md +0 -9
- clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/README.md +0 -11
- clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/config.json +0 -42
- clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/preprocessor_config.json +0 -15
- clarifai/models/model_serving/examples/image_classification/age_vit/config.pbtxt +0 -23
- clarifai/models/model_serving/examples/image_classification/age_vit/labels.txt +0 -9
- clarifai/models/model_serving/examples/image_classification/age_vit/requirements.txt +0 -7
- clarifai/models/model_serving/examples/text_classification/README.md +0 -9
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/README.md +0 -12
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/config.json +0 -34
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/special_tokens_map.json +0 -1
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/config.pbtxt +0 -21
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/labels.txt +0 -3
- clarifai/models/model_serving/examples/text_classification/xlm-roberta/requirements.txt +0 -7
- clarifai/models/model_serving/examples/text_embedding/README.md +0 -9
- clarifai/models/model_serving/examples/text_to_image/README.md +0 -9
- clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/__init__.py +0 -0
- clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/inference.py +0 -52
- clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/model.py +0 -60
- clarifai/models/model_serving/examples/text_to_image/sd-v1.5/config.pbtxt +0 -22
- clarifai/models/model_serving/examples/text_to_image/sd-v1.5/requirements.txt +0 -6
- clarifai/models/model_serving/examples/text_to_text/README.md +0 -10
- clarifai/models/model_serving/examples/text_to_text/bart-summarize/config.pbtxt +0 -20
- clarifai/models/model_serving/examples/text_to_text/bart-summarize/requirements.txt +0 -4
- clarifai/models/model_serving/examples/visual_detection/README.md +0 -11
- clarifai/models/model_serving/examples/visual_detection/yolov5x/config.pbtxt +0 -36
- clarifai/models/model_serving/examples/visual_detection/yolov5x/labels.txt +0 -80
- clarifai/models/model_serving/examples/visual_detection/yolov5x/requirements.txt +0 -12
- clarifai/models/model_serving/examples/visual_embedding/README.md +0 -9
- clarifai/models/model_serving/examples/visual_embedding/vit-base/config.pbtxt +0 -22
- clarifai/models/model_serving/examples/visual_embedding/vit-base/requirements.txt +0 -5
- clarifai/models/model_serving/examples/visual_segmentation/README.md +0 -9
- clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/config.pbtxt +0 -24
- clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/labels.txt +0 -18
- clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/requirements.txt +0 -5
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -24
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -28
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -18
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -18
- clarifai/modules/README.md +0 -5
- clarifai/modules/style.css +0 -217
- clarifai-9.10.2.dist-info/RECORD +0 -386
- clarifai-9.10.2.dist-info/top_level.txt +0 -2
- clarifai_utils/__init__.py +0 -0
- clarifai_utils/auth/__init__.py +0 -6
- clarifai_utils/auth/helper.py +0 -367
- clarifai_utils/auth/register.py +0 -23
- clarifai_utils/auth/stub.py +0 -127
- clarifai_utils/cli.py +0 -0
- clarifai_utils/client/__init__.py +0 -16
- clarifai_utils/client/app.py +0 -684
- clarifai_utils/client/auth/__init__.py +0 -4
- clarifai_utils/client/auth/helper.py +0 -367
- clarifai_utils/client/auth/register.py +0 -23
- clarifai_utils/client/auth/stub.py +0 -127
- clarifai_utils/client/base.py +0 -131
- clarifai_utils/client/dataset.py +0 -442
- clarifai_utils/client/input.py +0 -892
- clarifai_utils/client/lister.py +0 -54
- clarifai_utils/client/model.py +0 -575
- clarifai_utils/client/module.py +0 -94
- clarifai_utils/client/runner.py +0 -161
- clarifai_utils/client/search.py +0 -254
- clarifai_utils/client/user.py +0 -253
- clarifai_utils/client/workflow.py +0 -223
- clarifai_utils/constants/model.py +0 -4
- clarifai_utils/constants/search.py +0 -2
- clarifai_utils/datasets/__init__.py +0 -0
- clarifai_utils/datasets/export/__init__.py +0 -0
- clarifai_utils/datasets/export/inputs_annotations.py +0 -222
- clarifai_utils/datasets/upload/__init__.py +0 -0
- clarifai_utils/datasets/upload/base.py +0 -66
- clarifai_utils/datasets/upload/examples/README.md +0 -31
- clarifai_utils/datasets/upload/examples/image_classification/__init__.py +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/__init__.py +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/cifar_small_test.csv +0 -10
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/cifar_small_train.csv +0 -10
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +0 -46
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/__init__.py +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +0 -39
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
- clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
- clarifai_utils/datasets/upload/examples/text_classification/__init__.py +0 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +0 -42
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/test.csv +0 -201
- clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/train.csv +0 -201
- clarifai_utils/datasets/upload/features.py +0 -44
- clarifai_utils/datasets/upload/image.py +0 -165
- clarifai_utils/datasets/upload/loaders/README.md +0 -49
- clarifai_utils/datasets/upload/loaders/__init__.py +0 -0
- clarifai_utils/datasets/upload/loaders/coco_captions.py +0 -103
- clarifai_utils/datasets/upload/loaders/coco_detection.py +0 -134
- clarifai_utils/datasets/upload/loaders/coco_segmentation.py +0 -166
- clarifai_utils/datasets/upload/loaders/imagenet_classification.py +0 -59
- clarifai_utils/datasets/upload/loaders/xview_detection.py +0 -148
- clarifai_utils/datasets/upload/text.py +0 -53
- clarifai_utils/datasets/upload/utils.py +0 -63
- clarifai_utils/errors.py +0 -89
- clarifai_utils/models/__init__.py +0 -0
- clarifai_utils/models/api.py +0 -283
- clarifai_utils/models/model_serving/README.md +0 -155
- clarifai_utils/models/model_serving/__init__.py +0 -12
- clarifai_utils/models/model_serving/cli/__init__.py +0 -12
- clarifai_utils/models/model_serving/cli/deploy_cli.py +0 -123
- clarifai_utils/models/model_serving/cli/model_zip.py +0 -61
- clarifai_utils/models/model_serving/cli/repository.py +0 -87
- clarifai_utils/models/model_serving/constants.py +0 -1
- clarifai_utils/models/model_serving/docs/custom_config.md +0 -33
- clarifai_utils/models/model_serving/docs/dependencies.md +0 -11
- clarifai_utils/models/model_serving/docs/inference_parameters.md +0 -134
- clarifai_utils/models/model_serving/docs/model_types.md +0 -20
- clarifai_utils/models/model_serving/docs/output.md +0 -28
- clarifai_utils/models/model_serving/examples/README.md +0 -7
- clarifai_utils/models/model_serving/examples/image_classification/README.md +0 -9
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/inference.py +0 -56
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/model.py +0 -61
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/README.md +0 -11
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/config.json +0 -42
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/preprocessor_config.json +0 -15
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/config.pbtxt +0 -23
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/labels.txt +0 -9
- clarifai_utils/models/model_serving/examples/image_classification/age_vit/requirements.txt +0 -7
- clarifai_utils/models/model_serving/examples/text_classification/README.md +0 -9
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/inference.py +0 -55
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/model.py +0 -61
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/README.md +0 -12
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/config.json +0 -34
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/special_tokens_map.json +0 -1
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/config.pbtxt +0 -21
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/labels.txt +0 -3
- clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/requirements.txt +0 -7
- clarifai_utils/models/model_serving/examples/text_embedding/README.md +0 -9
- clarifai_utils/models/model_serving/examples/text_to_image/README.md +0 -9
- clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/inference.py +0 -52
- clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/model.py +0 -60
- clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/config.pbtxt +0 -22
- clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/requirements.txt +0 -6
- clarifai_utils/models/model_serving/examples/text_to_text/README.md +0 -10
- clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/inference.py +0 -47
- clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/model.py +0 -60
- clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/config.pbtxt +0 -20
- clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/requirements.txt +0 -4
- clarifai_utils/models/model_serving/examples/visual_detection/README.md +0 -11
- clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/1/inference.py +0 -72
- clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/1/model.py +0 -61
- clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/config.pbtxt +0 -36
- clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/labels.txt +0 -80
- clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/requirements.txt +0 -12
- clarifai_utils/models/model_serving/examples/visual_embedding/README.md +0 -9
- clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/inference.py +0 -51
- clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/model.py +0 -60
- clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/config.pbtxt +0 -22
- clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/requirements.txt +0 -5
- clarifai_utils/models/model_serving/examples/visual_segmentation/README.md +0 -9
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/__init__.py +0 -0
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/inference.py +0 -55
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/model.py +0 -60
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/config.pbtxt +0 -24
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/labels.txt +0 -18
- clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/requirements.txt +0 -5
- clarifai_utils/models/model_serving/model_config/__init__.py +0 -14
- clarifai_utils/models/model_serving/model_config/config.py +0 -302
- clarifai_utils/models/model_serving/model_config/inference_parameter.py +0 -124
- clarifai_utils/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -24
- clarifai_utils/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -28
- clarifai_utils/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -18
- clarifai_utils/models/model_serving/model_config/serializer.py +0 -134
- clarifai_utils/models/model_serving/models/__init__.py +0 -12
- clarifai_utils/models/model_serving/models/default_test.py +0 -275
- clarifai_utils/models/model_serving/models/inference.py +0 -42
- clarifai_utils/models/model_serving/models/model_types.py +0 -265
- clarifai_utils/models/model_serving/models/output.py +0 -124
- clarifai_utils/models/model_serving/models/pb_model.py +0 -74
- clarifai_utils/models/model_serving/models/test.py +0 -64
- clarifai_utils/models/model_serving/pb_model_repository.py +0 -101
- clarifai_utils/modules/README.md +0 -5
- clarifai_utils/modules/__init__.py +0 -0
- clarifai_utils/modules/css.py +0 -60
- clarifai_utils/modules/pages.py +0 -42
- clarifai_utils/modules/style.css +0 -217
- clarifai_utils/runners/__init__.py +0 -0
- clarifai_utils/runners/example.py +0 -33
- clarifai_utils/schema/search.py +0 -69
- clarifai_utils/urls/helper.py +0 -103
- clarifai_utils/utils/__init__.py +0 -0
- clarifai_utils/utils/logging.py +0 -90
- clarifai_utils/utils/misc.py +0 -33
- clarifai_utils/utils/model_train.py +0 -157
- clarifai_utils/versions.py +0 -6
- clarifai_utils/workflows/__init__.py +0 -0
- clarifai_utils/workflows/export.py +0 -68
- clarifai_utils/workflows/utils.py +0 -59
- clarifai_utils/workflows/validate.py +0 -67
- {clarifai-9.10.2.dist-info → clarifai-9.10.3.dist-info}/LICENSE +0 -0
- {clarifai-9.10.2.dist-info → clarifai-9.10.3.dist-info}/WHEEL +0 -0
- {clarifai-9.10.2.dist-info → clarifai-9.10.3.dist-info}/entry_points.txt +0 -0
|
@@ -1,59 +0,0 @@
|
|
|
1
|
-
#! ImageNet Classification dataset
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
|
|
5
|
-
from clarifai.datasets.upload.base import ClarifaiDataLoader
|
|
6
|
-
from ..features import VisualClassificationFeatures
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class ImageNetDataLoader(ClarifaiDataLoader):
|
|
10
|
-
"""ImageNet Dataset."""
|
|
11
|
-
|
|
12
|
-
def __init__(self, split: str = "train"):
|
|
13
|
-
"""
|
|
14
|
-
Initialize dataset params.
|
|
15
|
-
Args:
|
|
16
|
-
data_dir: the local dataset directory.
|
|
17
|
-
split: "train" or "test"
|
|
18
|
-
"""
|
|
19
|
-
self.split = split
|
|
20
|
-
self.data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
|
21
|
-
"data") # data storage directory
|
|
22
|
-
self.label_map = dict()
|
|
23
|
-
self.concepts = []
|
|
24
|
-
self.image_paths = []
|
|
25
|
-
|
|
26
|
-
self.load_data()
|
|
27
|
-
|
|
28
|
-
def load_data(self):
|
|
29
|
-
#Creating label map
|
|
30
|
-
with open(os.path.join(self.data_dir, "LOC_synset_mapping.txt")) as _file:
|
|
31
|
-
for _id in _file:
|
|
32
|
-
#Removing the spaces,upper quotes and Converting to set to remove repetitions. Then converting to list for compatibility.
|
|
33
|
-
self.label_map[_id.split(" ")[0]] = list({
|
|
34
|
-
"".join(("".join((label.rstrip().lstrip().split(" ")))).split("'"))
|
|
35
|
-
for label in _id[_id.find(" ") + 1:].split(",")
|
|
36
|
-
})
|
|
37
|
-
|
|
38
|
-
for _folder in os.listdir(os.path.join(self.data_dir, self.split)):
|
|
39
|
-
try:
|
|
40
|
-
concept = self.label_map[_folder] #concepts
|
|
41
|
-
except Exception:
|
|
42
|
-
continue
|
|
43
|
-
folder_path = os.path.join(self.data_dir, self.split) + "/" + _folder
|
|
44
|
-
for _img in os.listdir(folder_path):
|
|
45
|
-
if _img.lower().endswith(('.png', '.jpg', '.jpeg', '.tiff')):
|
|
46
|
-
self.concepts.append(concept)
|
|
47
|
-
self.image_paths.append(folder_path + "/" + _img)
|
|
48
|
-
|
|
49
|
-
assert len(self.concepts) == len(self.image_paths)
|
|
50
|
-
"Number of concepts and images are not equal"
|
|
51
|
-
|
|
52
|
-
def __len__(self):
|
|
53
|
-
return len(self.image_paths)
|
|
54
|
-
|
|
55
|
-
def __getitem__(self, idx):
|
|
56
|
-
return VisualClassificationFeatures(
|
|
57
|
-
image_path=self.image_paths[idx],
|
|
58
|
-
label=self.concepts[idx],
|
|
59
|
-
id=self.image_paths[idx].split('.')[0].split('/')[-1])
|
|
@@ -1,148 +0,0 @@
|
|
|
1
|
-
import glob
|
|
2
|
-
import json
|
|
3
|
-
import os
|
|
4
|
-
from collections import defaultdict
|
|
5
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
6
|
-
from multiprocessing import cpu_count
|
|
7
|
-
from typing import DefaultDict, Dict, List
|
|
8
|
-
|
|
9
|
-
import cv2
|
|
10
|
-
from tqdm import tqdm
|
|
11
|
-
|
|
12
|
-
from clarifai.datasets.upload.base import ClarifaiDataLoader
|
|
13
|
-
|
|
14
|
-
from ..features import VisualDetectionFeatures
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class xviewDetectionDataLoader(ClarifaiDataLoader):
|
|
18
|
-
"""xview Image Detection Dataset"""
|
|
19
|
-
|
|
20
|
-
xview_concepts = [
|
|
21
|
-
'Fixed-wing Aircraft', 'Small Aircraft', 'Cargo Plane', 'Helicopter', 'Passenger Vehicle',
|
|
22
|
-
'Small Car', 'Bus', 'Pickup Truck', 'Utility Truck', 'Truck', 'Cargo Truck', 'Truck w-Box',
|
|
23
|
-
'Truck Tractor', 'Trailer', 'Truck w-Flatbed', 'Truck w-Liquid', 'Crane Truck',
|
|
24
|
-
'Railway Vehicle', 'Passenger Car', 'Cargo Car', 'Flat Car', 'Tank car', 'Locomotive',
|
|
25
|
-
'Maritime Vessel', 'Motorboat', 'Sailboat', 'Tugboat', 'Barge', 'Fishing Vessel', 'Ferry',
|
|
26
|
-
'Yacht', 'Container Ship', 'Oil Tanker', 'Engineering Vehicle', 'Tower crane',
|
|
27
|
-
'Container Crane', 'Reach Stacker', 'Straddle Carrier', 'Mobile Crane', 'Dump Truck',
|
|
28
|
-
'Haul Truck', 'Scraper-Tractor', 'Front loader-Bulldozer', 'Excavator', 'Cement Mixer',
|
|
29
|
-
'Ground Grader', 'Hut-Tent', 'Shed', 'Building', 'Aircraft Hangar', 'Damaged Building',
|
|
30
|
-
'Facility', 'Construction Site', 'Vehicle Lot', 'Helipad', 'Storage Tank',
|
|
31
|
-
'Shipping container lot', 'Shipping Container', 'Pylon', 'Tower'
|
|
32
|
-
]
|
|
33
|
-
|
|
34
|
-
def __init__(self, split: str = "train") -> None:
|
|
35
|
-
"""Initialize and Compress xview dataset.
|
|
36
|
-
Args:
|
|
37
|
-
split: "train"
|
|
38
|
-
"""
|
|
39
|
-
|
|
40
|
-
self.data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
|
|
41
|
-
self.img_dir = os.path.join(self.data_dir, "train_images")
|
|
42
|
-
self.img_comp_dir = os.path.join(self.data_dir, "train_images_comp")
|
|
43
|
-
self.label_file = os.path.join(self.data_dir, "xview_train.geojson")
|
|
44
|
-
|
|
45
|
-
self.preprocess()
|
|
46
|
-
self.all_data = self.xview_data_parser()
|
|
47
|
-
|
|
48
|
-
self.load_data()
|
|
49
|
-
|
|
50
|
-
def compress_tiff(self, img_path: str) -> None:
|
|
51
|
-
"""Compress tiff image"""
|
|
52
|
-
img_comp_path = os.path.join(self.img_comp_dir, os.path.basename(img_path))
|
|
53
|
-
img_arr = cv2.imread(img_path)
|
|
54
|
-
cv2.imwrite(
|
|
55
|
-
img_comp_path, img_arr, params=(cv2.IMWRITE_TIFF_COMPRESSION, 8)) # 8: Adobe Deflate
|
|
56
|
-
|
|
57
|
-
def preprocess(self):
|
|
58
|
-
"""Compress the tiff images to comply with clarifai grpc image encoding limit(<20MB) Uses ADOBE_DEFLATE compression algorithm"""
|
|
59
|
-
all_img_ids = glob.glob(os.path.join(self.img_dir, "*.tif"))
|
|
60
|
-
|
|
61
|
-
if not os.path.exists(self.img_comp_dir):
|
|
62
|
-
os.mkdir(self.img_comp_dir)
|
|
63
|
-
|
|
64
|
-
num_workers = cpu_count()
|
|
65
|
-
futures = []
|
|
66
|
-
|
|
67
|
-
with ThreadPoolExecutor(max_workers=num_workers) as executor:
|
|
68
|
-
with tqdm(total=len(all_img_ids), desc="compressing ...") as progress:
|
|
69
|
-
|
|
70
|
-
for img_path in all_img_ids:
|
|
71
|
-
future = executor.submit(self.compress_tiff, img_path)
|
|
72
|
-
future.add_done_callback(lambda _: progress.update())
|
|
73
|
-
futures.append(future)
|
|
74
|
-
|
|
75
|
-
results = []
|
|
76
|
-
for future in futures:
|
|
77
|
-
result = future.result()
|
|
78
|
-
results.append(result)
|
|
79
|
-
|
|
80
|
-
def xview_classes2indices(self, classes: List) -> List:
|
|
81
|
-
"""remap xview classes 11-94 to 0-59"""
|
|
82
|
-
indices = [
|
|
83
|
-
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, -1, 3, -1, 4, 5, 6, 7, 8, -1, 9, 10,
|
|
84
|
-
11, 12, 13, 14, 15, -1, -1, 16, 17, 18, 19, 20, 21, 22, -1, 23, 24, 25, -1, 26, 27, -1, 28,
|
|
85
|
-
-1, 29, 30, 31, 32, 33, 34, 35, 36, 37, -1, 38, 39, 40, 41, 42, 43, 44, 45, -1, -1, -1, -1,
|
|
86
|
-
46, 47, 48, 49, -1, 50, 51, -1, 52, -1, -1, -1, 53, 54, -1, 55, -1, -1, 56, -1, 57, -1, 58,
|
|
87
|
-
59
|
|
88
|
-
]
|
|
89
|
-
return [indices[int(c)] for c in classes]
|
|
90
|
-
|
|
91
|
-
def xview_indices2concepts(self, indices: List) -> List:
|
|
92
|
-
"""remap classes to concept names"""
|
|
93
|
-
return [self.xview_concepts[i] for i in indices]
|
|
94
|
-
|
|
95
|
-
def xview_data_parser(self) -> DefaultDict[str, Dict[List, List]]:
|
|
96
|
-
"""Parse geojson data into nested dict of imageid w.r.t bounding boxes, concepts"""
|
|
97
|
-
all_data = defaultdict(lambda: dict(bboxes=[], concepts=[]))
|
|
98
|
-
|
|
99
|
-
with open(self.label_file) as f:
|
|
100
|
-
geojson_data = json.loads(f.read())
|
|
101
|
-
|
|
102
|
-
for feature in tqdm(
|
|
103
|
-
geojson_data['features'], total=len(geojson_data['features']),
|
|
104
|
-
desc="Parsing geojson data"):
|
|
105
|
-
image_id = feature['properties']['image_id'].split(".")[0]
|
|
106
|
-
xview_classid = feature['properties']['type_id']
|
|
107
|
-
bbox = list(map(int, feature['properties']['bounds_imcoords'].split(",")))
|
|
108
|
-
concept = self.xview_indices2concepts(self.xview_classes2indices([xview_classid]))
|
|
109
|
-
|
|
110
|
-
all_data[image_id]['bboxes'].append(bbox)
|
|
111
|
-
all_data[image_id]['concepts'].append(concept[0])
|
|
112
|
-
|
|
113
|
-
return all_data
|
|
114
|
-
|
|
115
|
-
def load_data(self):
|
|
116
|
-
"""Load image paths"""
|
|
117
|
-
self.image_paths = []
|
|
118
|
-
all_img_ids = glob.glob(os.path.join(self.img_comp_dir, "*.tif"))
|
|
119
|
-
self.image_paths = all_img_ids
|
|
120
|
-
|
|
121
|
-
def __len__(self):
|
|
122
|
-
return len(self.image_paths)
|
|
123
|
-
|
|
124
|
-
def __getitem__(self, index: int):
|
|
125
|
-
"""Get dataset for a given index.
|
|
126
|
-
Returns:
|
|
127
|
-
VisualDetectionFeature type.
|
|
128
|
-
"""
|
|
129
|
-
_id = os.path.splitext(os.path.basename(self.image_paths[index]))[0]
|
|
130
|
-
image_path = self.image_paths[index]
|
|
131
|
-
|
|
132
|
-
image_height, image_width = cv2.imread(image_path).shape[:2]
|
|
133
|
-
annots = []
|
|
134
|
-
class_names = []
|
|
135
|
-
for bbox, concept in zip(self.all_data[_id]['bboxes'], self.all_data[_id]['concepts']):
|
|
136
|
-
x_min = max(min(bbox[0] / image_width, 1.0), 0.0) #left_col
|
|
137
|
-
y_min = max(min(bbox[1] / image_height, 1.0), 0.0) #top_row
|
|
138
|
-
x_max = max(min(bbox[2] / image_width, 1.0), 0.0) #right_col
|
|
139
|
-
y_max = max(min(bbox[3] / image_height, 1.0), 0.0) #bottom_row
|
|
140
|
-
if (x_min >= x_max) or (y_min >= y_max):
|
|
141
|
-
continue
|
|
142
|
-
annots.append([x for x in [x_min, y_min, x_max, y_max]])
|
|
143
|
-
class_names.append(concept)
|
|
144
|
-
|
|
145
|
-
assert len(class_names) == len(annots), f"Num classes must match num bbox annotations\
|
|
146
|
-
for a single image. Found {len(class_names)} classes and {len(annots)} bboxes."
|
|
147
|
-
|
|
148
|
-
return VisualDetectionFeatures(image_path, class_names, annots, id=_id)
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
2
|
-
from typing import Iterator, List, Tuple
|
|
3
|
-
|
|
4
|
-
from clarifai_grpc.grpc.api import resources_pb2
|
|
5
|
-
from google.protobuf.struct_pb2 import Struct
|
|
6
|
-
|
|
7
|
-
from .base import ClarifaiDataset
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class TextClassificationDataset(ClarifaiDataset):
|
|
11
|
-
"""Upload text classification datasets to clarifai datasets"""
|
|
12
|
-
|
|
13
|
-
def __init__(self, datagen_object: Iterator, dataset_id: str, split: str) -> None:
|
|
14
|
-
super().__init__(datagen_object, dataset_id, split)
|
|
15
|
-
|
|
16
|
-
def _extract_protos(self, batch_input_ids: List[int]
|
|
17
|
-
) -> Tuple[List[resources_pb2.Input], List[resources_pb2.Annotation]]:
|
|
18
|
-
"""Create input image and annotation protos for batch of input ids.
|
|
19
|
-
Args:
|
|
20
|
-
batch_input_ids: List of input IDs to retrieve the protos for.
|
|
21
|
-
Returns:
|
|
22
|
-
input_protos: List of input protos.
|
|
23
|
-
annotation_protos: List of annotation protos.
|
|
24
|
-
"""
|
|
25
|
-
input_protos, annotation_protos = [], []
|
|
26
|
-
|
|
27
|
-
def process_datagen_item(id):
|
|
28
|
-
datagen_item = self.datagen_object[id]
|
|
29
|
-
metadata = Struct()
|
|
30
|
-
text = datagen_item.text
|
|
31
|
-
labels = datagen_item.labels if isinstance(
|
|
32
|
-
datagen_item.labels, list) else [datagen_item.labels] # clarifai concept
|
|
33
|
-
input_id = f"{self.dataset_id}-{self.split}-{id}" if datagen_item.id is None else f"{self.dataset_id}-{self.split}-{str(datagen_item.id)}"
|
|
34
|
-
if datagen_item.metadata is not None:
|
|
35
|
-
metadata.update(datagen_item.metadata)
|
|
36
|
-
else:
|
|
37
|
-
metadata.update({"split": self.split})
|
|
38
|
-
|
|
39
|
-
self.all_input_ids[id] = input_id
|
|
40
|
-
input_protos.append(
|
|
41
|
-
self.input_object.get_text_input(
|
|
42
|
-
input_id=input_id,
|
|
43
|
-
raw_text=text,
|
|
44
|
-
dataset_id=self.dataset_id,
|
|
45
|
-
labels=labels,
|
|
46
|
-
metadata=metadata))
|
|
47
|
-
|
|
48
|
-
with ThreadPoolExecutor(max_workers=4) as executor:
|
|
49
|
-
futures = [executor.submit(process_datagen_item, id) for id in batch_input_ids]
|
|
50
|
-
for job in futures:
|
|
51
|
-
job.result()
|
|
52
|
-
|
|
53
|
-
return input_protos, annotation_protos
|
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
import importlib
|
|
2
|
-
import inspect
|
|
3
|
-
import os
|
|
4
|
-
from typing import Union
|
|
5
|
-
|
|
6
|
-
from clarifai.datasets.upload.base import ClarifaiDataLoader
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
def load_module_dataloader(module_dir: Union[str, os.PathLike], split: str) -> ClarifaiDataLoader:
|
|
10
|
-
"""Validate and import dataset module data generator.
|
|
11
|
-
Args:
|
|
12
|
-
`module_dir`: relative path to the module directory
|
|
13
|
-
The directory must contain a `dataset.py` script and the data itself.
|
|
14
|
-
`split`: "train" or "val"/"test" dataset split
|
|
15
|
-
Module Directory Structure:
|
|
16
|
-
---------------------------
|
|
17
|
-
<folder_name>/
|
|
18
|
-
├──__init__.py
|
|
19
|
-
├──<Your local dir dataset>/
|
|
20
|
-
└──dataset.py
|
|
21
|
-
dataset.py must implement a class named following the convention,
|
|
22
|
-
<dataset_name>DataLoader and this class must inherit from base ClarifaiDataLoader()
|
|
23
|
-
"""
|
|
24
|
-
module_path = os.path.join(module_dir, "dataset.py")
|
|
25
|
-
spec = importlib.util.spec_from_file_location("dataset", module_path)
|
|
26
|
-
|
|
27
|
-
if not spec:
|
|
28
|
-
raise ImportError(f"Module not found at {module_path}")
|
|
29
|
-
|
|
30
|
-
# Load the module using the spec
|
|
31
|
-
dataset = importlib.util.module_from_spec(spec)
|
|
32
|
-
# Execute the module to make its contents available
|
|
33
|
-
spec.loader.exec_module(dataset)
|
|
34
|
-
|
|
35
|
-
# get main module class
|
|
36
|
-
main_module_cls = None
|
|
37
|
-
for name, obj in dataset.__dict__.items():
|
|
38
|
-
if inspect.isclass(obj) and "DataLoader" in name:
|
|
39
|
-
main_module_cls = obj
|
|
40
|
-
else:
|
|
41
|
-
continue
|
|
42
|
-
|
|
43
|
-
return main_module_cls(split)
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def load_dataloader(name: str, split: str) -> ClarifaiDataLoader:
|
|
47
|
-
"""Get dataset generator object from dataset loaders.
|
|
48
|
-
Args:
|
|
49
|
-
`name`: dataset module name in datasets/upload/loaders/.
|
|
50
|
-
`split`: "train" or "val"/"test" dataset split
|
|
51
|
-
Returns:
|
|
52
|
-
Data generator object
|
|
53
|
-
"""
|
|
54
|
-
loader_dataset = importlib.import_module(f"clarifai.datasets.upload.loaders.{name}")
|
|
55
|
-
# get main module class
|
|
56
|
-
main_module_cls = None
|
|
57
|
-
for name, obj in loader_dataset.__dict__.items():
|
|
58
|
-
if inspect.isclass(obj) and "DataLoader" in name:
|
|
59
|
-
main_module_cls = obj
|
|
60
|
-
else:
|
|
61
|
-
continue
|
|
62
|
-
|
|
63
|
-
return main_module_cls(split)
|
clarifai_utils/errors.py
DELETED
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
import json
|
|
3
|
-
import time
|
|
4
|
-
|
|
5
|
-
import requests # noqa
|
|
6
|
-
from google.protobuf.json_format import MessageToDict
|
|
7
|
-
|
|
8
|
-
from clarifai.versions import CLIENT_VERSION, OS_VER, PYTHON_VERSION
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class TokenError(Exception):
|
|
12
|
-
pass
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class ApiError(Exception):
|
|
16
|
-
""" API Server error """
|
|
17
|
-
|
|
18
|
-
def __init__(self, resource: str, params: dict, method: str,
|
|
19
|
-
response: requests.Response = None) -> None:
|
|
20
|
-
self.resource = resource
|
|
21
|
-
self.params = params
|
|
22
|
-
self.method = method
|
|
23
|
-
self.response = response
|
|
24
|
-
|
|
25
|
-
self.error_code = 'N/A'
|
|
26
|
-
self.error_desc = 'N/A'
|
|
27
|
-
self.error_details = 'N/A'
|
|
28
|
-
response_json = 'N/A'
|
|
29
|
-
|
|
30
|
-
if response is not None:
|
|
31
|
-
response_json_dict = MessageToDict(response)
|
|
32
|
-
|
|
33
|
-
self.error_code = response_json_dict.get('status', {}).get('code', None)
|
|
34
|
-
self.error_desc = response_json_dict.get('status', {}).get('description', None)
|
|
35
|
-
self.error_details = response_json_dict.get('status', {}).get('details', None)
|
|
36
|
-
response_json = json.dumps(response_json_dict['status'], indent=2)
|
|
37
|
-
|
|
38
|
-
current_ts_str = str(time.time())
|
|
39
|
-
|
|
40
|
-
msg = """%(method)s %(resource)s FAILED(%(time_ts)s). error_code: %(error_code)s, error_description: %(error_desc)s, error_details: %(error_details)s
|
|
41
|
-
>> Python client %(client_version)s with Python %(python_version)s on %(os_version)s
|
|
42
|
-
>> %(method)s %(resource)s
|
|
43
|
-
>> REQUEST(%(time_ts)s) %(request)s
|
|
44
|
-
>> RESPONSE(%(time_ts)s) %(response)s""" % {
|
|
45
|
-
'method': method,
|
|
46
|
-
'resource': resource,
|
|
47
|
-
'error_code': self.error_code,
|
|
48
|
-
'error_desc': self.error_desc,
|
|
49
|
-
'error_details': self.error_details,
|
|
50
|
-
'request': json.dumps(params, indent=2),
|
|
51
|
-
'response': response_json,
|
|
52
|
-
'time_ts': current_ts_str,
|
|
53
|
-
'client_version': CLIENT_VERSION,
|
|
54
|
-
'python_version': PYTHON_VERSION,
|
|
55
|
-
'os_version': OS_VER
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
super(ApiError, self).__init__(msg)
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
class ApiClientError(Exception):
|
|
62
|
-
""" API Client Error """
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
class UserError(Exception):
|
|
66
|
-
""" User Error """
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
class AuthError(Exception):
|
|
70
|
-
"""Raised when a client has missing or invalid authentication."""
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def _base_url(url: str) -> str:
|
|
74
|
-
"""
|
|
75
|
-
Extracts the base URL from the url, which is everything before the 4th slash character.
|
|
76
|
-
https://www.clarifai.com/v2/models/1/output -> https://www.clarifai.com/v2/
|
|
77
|
-
"""
|
|
78
|
-
try:
|
|
79
|
-
return url[:_find_nth(url, '/', 4) + 1]
|
|
80
|
-
except Exception:
|
|
81
|
-
return ''
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def _find_nth(haystack: str, needle: str, n: int) -> int:
|
|
85
|
-
start = haystack.find(needle)
|
|
86
|
-
while start >= 0 and n > 1:
|
|
87
|
-
start = haystack.find(needle, start + len(needle))
|
|
88
|
-
n -= 1
|
|
89
|
-
return start
|
|
File without changes
|