label-studio-sdk 0.0.34__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of label-studio-sdk might be problematic. Click here for more details.
- label_studio_sdk/__init__.py +232 -9
- label_studio_sdk/_extensions/label_studio_tools/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/label_config.py +163 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/exceptions.py +2 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/io.py +228 -0
- label_studio_sdk/_extensions/label_studio_tools/core/utils/params.py +45 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/__init__.py +1 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/beam.py +34 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/example.py +17 -0
- label_studio_sdk/_extensions/label_studio_tools/etl/registry.py +67 -0
- label_studio_sdk/_extensions/label_studio_tools/postprocessing/__init__.py +0 -0
- label_studio_sdk/_extensions/label_studio_tools/postprocessing/video.py +97 -0
- label_studio_sdk/_extensions/pager_ext.py +49 -0
- label_studio_sdk/_legacy/__init__.py +11 -0
- label_studio_sdk/_legacy/client.py +471 -0
- label_studio_sdk/_legacy/objects.py +74 -0
- label_studio_sdk/{project.py → _legacy/project.py} +2 -2
- label_studio_sdk/{schema → _legacy/schema}/label_config_schema.json +14 -14
- label_studio_sdk/actions/__init__.py +27 -0
- label_studio_sdk/actions/client.py +223 -0
- label_studio_sdk/actions/types/__init__.py +25 -0
- label_studio_sdk/actions/types/actions_create_request_filters.py +43 -0
- label_studio_sdk/actions/types/actions_create_request_filters_conjunction.py +5 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +49 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item_filter.py +31 -0
- label_studio_sdk/actions/types/actions_create_request_filters_items_item_operator.py +23 -0
- label_studio_sdk/actions/types/actions_create_request_id.py +19 -0
- label_studio_sdk/actions/types/actions_create_request_ordering_item.py +31 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items.py +10 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +39 -0
- label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +39 -0
- label_studio_sdk/annotations/__init__.py +2 -0
- label_studio_sdk/annotations/client.py +750 -0
- label_studio_sdk/base_client.py +183 -0
- label_studio_sdk/client.py +17 -463
- label_studio_sdk/converter/__init__.py +7 -0
- label_studio_sdk/converter/audio.py +56 -0
- label_studio_sdk/converter/brush.py +452 -0
- label_studio_sdk/converter/converter.py +1175 -0
- label_studio_sdk/converter/exports/__init__.py +0 -0
- label_studio_sdk/converter/exports/csv.py +82 -0
- label_studio_sdk/converter/exports/csv2.py +103 -0
- label_studio_sdk/converter/funsd.py +85 -0
- label_studio_sdk/converter/imports/__init__.py +0 -0
- label_studio_sdk/converter/imports/coco.py +314 -0
- label_studio_sdk/converter/imports/colors.py +198 -0
- label_studio_sdk/converter/imports/label_config.py +45 -0
- label_studio_sdk/converter/imports/pathtrack.py +269 -0
- label_studio_sdk/converter/imports/yolo.py +236 -0
- label_studio_sdk/converter/main.py +202 -0
- label_studio_sdk/converter/utils.py +473 -0
- label_studio_sdk/core/__init__.py +33 -0
- label_studio_sdk/core/api_error.py +15 -0
- label_studio_sdk/core/client_wrapper.py +55 -0
- label_studio_sdk/core/datetime_utils.py +28 -0
- label_studio_sdk/core/file.py +38 -0
- label_studio_sdk/core/http_client.py +447 -0
- label_studio_sdk/core/jsonable_encoder.py +99 -0
- label_studio_sdk/core/pagination.py +87 -0
- label_studio_sdk/core/pydantic_utilities.py +28 -0
- label_studio_sdk/core/query_encoder.py +33 -0
- label_studio_sdk/core/remove_none_from_dict.py +11 -0
- label_studio_sdk/core/request_options.py +32 -0
- label_studio_sdk/environment.py +7 -0
- label_studio_sdk/errors/__init__.py +6 -0
- label_studio_sdk/errors/bad_request_error.py +8 -0
- label_studio_sdk/errors/internal_server_error.py +8 -0
- label_studio_sdk/export_storage/__init__.py +28 -0
- label_studio_sdk/export_storage/azure/__init__.py +5 -0
- label_studio_sdk/export_storage/azure/client.py +888 -0
- label_studio_sdk/export_storage/azure/types/__init__.py +6 -0
- label_studio_sdk/export_storage/azure/types/azure_create_response.py +67 -0
- label_studio_sdk/export_storage/azure/types/azure_update_response.py +67 -0
- label_studio_sdk/export_storage/client.py +107 -0
- label_studio_sdk/export_storage/gcs/__init__.py +5 -0
- label_studio_sdk/export_storage/gcs/client.py +888 -0
- label_studio_sdk/export_storage/gcs/types/__init__.py +6 -0
- label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +67 -0
- label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +67 -0
- label_studio_sdk/export_storage/local/__init__.py +5 -0
- label_studio_sdk/export_storage/local/client.py +834 -0
- label_studio_sdk/export_storage/local/types/__init__.py +6 -0
- label_studio_sdk/export_storage/local/types/local_create_response.py +57 -0
- label_studio_sdk/export_storage/local/types/local_update_response.py +57 -0
- label_studio_sdk/export_storage/redis/__init__.py +5 -0
- label_studio_sdk/export_storage/redis/client.py +918 -0
- label_studio_sdk/export_storage/redis/types/__init__.py +6 -0
- label_studio_sdk/export_storage/redis/types/redis_create_response.py +72 -0
- label_studio_sdk/export_storage/redis/types/redis_update_response.py +72 -0
- label_studio_sdk/export_storage/s3/__init__.py +5 -0
- label_studio_sdk/export_storage/s3/client.py +1008 -0
- label_studio_sdk/export_storage/s3/types/__init__.py +6 -0
- label_studio_sdk/export_storage/s3/types/s3create_response.py +89 -0
- label_studio_sdk/export_storage/s3/types/s3update_response.py +89 -0
- label_studio_sdk/export_storage/types/__init__.py +5 -0
- label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +30 -0
- label_studio_sdk/files/__init__.py +2 -0
- label_studio_sdk/files/client.py +556 -0
- label_studio_sdk/import_storage/__init__.py +28 -0
- label_studio_sdk/import_storage/azure/__init__.py +5 -0
- label_studio_sdk/import_storage/azure/client.py +988 -0
- label_studio_sdk/import_storage/azure/types/__init__.py +6 -0
- label_studio_sdk/import_storage/azure/types/azure_create_response.py +82 -0
- label_studio_sdk/import_storage/azure/types/azure_update_response.py +82 -0
- label_studio_sdk/import_storage/client.py +107 -0
- label_studio_sdk/import_storage/gcs/__init__.py +5 -0
- label_studio_sdk/import_storage/gcs/client.py +988 -0
- label_studio_sdk/import_storage/gcs/types/__init__.py +6 -0
- label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +82 -0
- label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +82 -0
- label_studio_sdk/import_storage/local/__init__.py +5 -0
- label_studio_sdk/import_storage/local/client.py +836 -0
- label_studio_sdk/import_storage/local/types/__init__.py +6 -0
- label_studio_sdk/import_storage/local/types/local_create_response.py +57 -0
- label_studio_sdk/import_storage/local/types/local_update_response.py +57 -0
- label_studio_sdk/import_storage/redis/__init__.py +5 -0
- label_studio_sdk/import_storage/redis/client.py +924 -0
- label_studio_sdk/import_storage/redis/types/__init__.py +6 -0
- label_studio_sdk/import_storage/redis/types/redis_create_response.py +72 -0
- label_studio_sdk/import_storage/redis/types/redis_update_response.py +72 -0
- label_studio_sdk/import_storage/s3/__init__.py +5 -0
- label_studio_sdk/import_storage/s3/client.py +1138 -0
- label_studio_sdk/import_storage/s3/types/__init__.py +6 -0
- label_studio_sdk/import_storage/s3/types/s3create_response.py +109 -0
- label_studio_sdk/import_storage/s3/types/s3update_response.py +109 -0
- label_studio_sdk/import_storage/types/__init__.py +5 -0
- label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +30 -0
- label_studio_sdk/label_interface/base.py +10 -0
- label_studio_sdk/label_interface/control_tags.py +109 -71
- label_studio_sdk/label_interface/data_examples.json +96 -0
- label_studio_sdk/label_interface/interface.py +102 -53
- label_studio_sdk/label_interface/object_tags.py +8 -13
- label_studio_sdk/label_interface/region.py +33 -1
- label_studio_sdk/ml/__init__.py +19 -0
- label_studio_sdk/ml/client.py +981 -0
- label_studio_sdk/ml/types/__init__.py +17 -0
- label_studio_sdk/ml/types/ml_create_request_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_create_response.py +78 -0
- label_studio_sdk/ml/types/ml_create_response_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_update_request_auth_method.py +5 -0
- label_studio_sdk/ml/types/ml_update_response.py +78 -0
- label_studio_sdk/ml/types/ml_update_response_auth_method.py +5 -0
- label_studio_sdk/predictions/__init__.py +2 -0
- label_studio_sdk/predictions/client.py +638 -0
- label_studio_sdk/projects/__init__.py +6 -0
- label_studio_sdk/projects/client.py +1055 -0
- label_studio_sdk/projects/client_ext.py +19 -0
- label_studio_sdk/projects/exports/__init__.py +2 -0
- label_studio_sdk/projects/exports/client.py +930 -0
- label_studio_sdk/projects/types/__init__.py +7 -0
- label_studio_sdk/projects/types/projects_create_response.py +96 -0
- label_studio_sdk/projects/types/projects_import_tasks_response.py +71 -0
- label_studio_sdk/projects/types/projects_list_response.py +33 -0
- label_studio_sdk/py.typed +0 -0
- label_studio_sdk/tasks/__init__.py +5 -0
- label_studio_sdk/tasks/client.py +835 -0
- label_studio_sdk/tasks/client_ext.py +18 -0
- label_studio_sdk/tasks/types/__init__.py +6 -0
- label_studio_sdk/tasks/types/tasks_list_request_fields.py +5 -0
- label_studio_sdk/tasks/types/tasks_list_response.py +48 -0
- label_studio_sdk/types/__init__.py +115 -0
- label_studio_sdk/types/annotation.py +116 -0
- label_studio_sdk/types/annotation_filter_options.py +42 -0
- label_studio_sdk/types/annotation_last_action.py +19 -0
- label_studio_sdk/types/azure_blob_export_storage.py +112 -0
- label_studio_sdk/types/azure_blob_export_storage_status.py +7 -0
- label_studio_sdk/types/azure_blob_import_storage.py +113 -0
- label_studio_sdk/types/azure_blob_import_storage_status.py +7 -0
- label_studio_sdk/types/base_task.py +113 -0
- label_studio_sdk/types/base_user.py +42 -0
- label_studio_sdk/types/converted_format.py +36 -0
- label_studio_sdk/types/converted_format_status.py +5 -0
- label_studio_sdk/types/export.py +48 -0
- label_studio_sdk/types/export_convert.py +32 -0
- label_studio_sdk/types/export_create.py +54 -0
- label_studio_sdk/types/export_create_status.py +5 -0
- label_studio_sdk/types/export_status.py +5 -0
- label_studio_sdk/types/file_upload.py +30 -0
- label_studio_sdk/types/filter.py +53 -0
- label_studio_sdk/types/filter_group.py +35 -0
- label_studio_sdk/types/gcs_export_storage.py +112 -0
- label_studio_sdk/types/gcs_export_storage_status.py +7 -0
- label_studio_sdk/types/gcs_import_storage.py +113 -0
- label_studio_sdk/types/gcs_import_storage_status.py +7 -0
- label_studio_sdk/types/local_files_export_storage.py +97 -0
- label_studio_sdk/types/local_files_export_storage_status.py +7 -0
- label_studio_sdk/types/local_files_import_storage.py +92 -0
- label_studio_sdk/types/local_files_import_storage_status.py +7 -0
- label_studio_sdk/types/ml_backend.py +89 -0
- label_studio_sdk/types/ml_backend_auth_method.py +5 -0
- label_studio_sdk/types/ml_backend_state.py +5 -0
- label_studio_sdk/types/prediction.py +78 -0
- label_studio_sdk/types/project.py +198 -0
- label_studio_sdk/types/project_import.py +63 -0
- label_studio_sdk/types/project_import_status.py +5 -0
- label_studio_sdk/types/project_label_config.py +32 -0
- label_studio_sdk/types/project_sampling.py +7 -0
- label_studio_sdk/types/project_skip_queue.py +5 -0
- label_studio_sdk/types/redis_export_storage.py +117 -0
- label_studio_sdk/types/redis_export_storage_status.py +7 -0
- label_studio_sdk/types/redis_import_storage.py +112 -0
- label_studio_sdk/types/redis_import_storage_status.py +7 -0
- label_studio_sdk/types/s3export_storage.py +134 -0
- label_studio_sdk/types/s3export_storage_status.py +7 -0
- label_studio_sdk/types/s3import_storage.py +140 -0
- label_studio_sdk/types/s3import_storage_status.py +7 -0
- label_studio_sdk/types/serialization_option.py +36 -0
- label_studio_sdk/types/serialization_options.py +45 -0
- label_studio_sdk/types/task.py +157 -0
- label_studio_sdk/types/task_filter_options.py +49 -0
- label_studio_sdk/types/user_simple.py +37 -0
- label_studio_sdk/types/view.py +55 -0
- label_studio_sdk/types/webhook.py +67 -0
- label_studio_sdk/types/webhook_actions_item.py +21 -0
- label_studio_sdk/types/webhook_serializer_for_update.py +67 -0
- label_studio_sdk/types/webhook_serializer_for_update_actions_item.py +21 -0
- label_studio_sdk/users/__init__.py +5 -0
- label_studio_sdk/users/client.py +830 -0
- label_studio_sdk/users/types/__init__.py +6 -0
- label_studio_sdk/users/types/users_get_token_response.py +36 -0
- label_studio_sdk/users/types/users_reset_token_response.py +36 -0
- label_studio_sdk/version.py +4 -0
- label_studio_sdk/views/__init__.py +35 -0
- label_studio_sdk/views/client.py +564 -0
- label_studio_sdk/views/types/__init__.py +33 -0
- label_studio_sdk/views/types/views_create_request_data.py +43 -0
- label_studio_sdk/views/types/views_create_request_data_filters.py +43 -0
- label_studio_sdk/views/types/views_create_request_data_filters_conjunction.py +5 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +49 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item_filter.py +31 -0
- label_studio_sdk/views/types/views_create_request_data_filters_items_item_operator.py +23 -0
- label_studio_sdk/views/types/views_create_request_data_ordering_item.py +31 -0
- label_studio_sdk/views/types/views_update_request_data.py +43 -0
- label_studio_sdk/views/types/views_update_request_data_filters.py +43 -0
- label_studio_sdk/views/types/views_update_request_data_filters_conjunction.py +5 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +49 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item_filter.py +31 -0
- label_studio_sdk/views/types/views_update_request_data_filters_items_item_operator.py +23 -0
- label_studio_sdk/views/types/views_update_request_data_ordering_item.py +31 -0
- label_studio_sdk/webhooks/__init__.py +5 -0
- label_studio_sdk/webhooks/client.py +636 -0
- label_studio_sdk/webhooks/types/__init__.py +5 -0
- label_studio_sdk/webhooks/types/webhooks_update_request_actions_item.py +21 -0
- label_studio_sdk-1.0.1.dist-info/METADATA +163 -0
- label_studio_sdk-1.0.1.dist-info/RECORD +256 -0
- {label_studio_sdk-0.0.34.dist-info → label_studio_sdk-1.0.1.dist-info}/WHEEL +1 -2
- label_studio_sdk/objects.py +0 -35
- label_studio_sdk-0.0.34.dist-info/LICENSE +0 -201
- label_studio_sdk-0.0.34.dist-info/METADATA +0 -24
- label_studio_sdk-0.0.34.dist-info/RECORD +0 -37
- label_studio_sdk-0.0.34.dist-info/top_level.txt +0 -2
- tests/test_client.py +0 -37
- tests/test_export.py +0 -105
- tests/test_interface/__init__.py +0 -1
- tests/test_interface/configs.py +0 -137
- tests/test_interface/mockups.py +0 -22
- tests/test_interface/test_compat.py +0 -64
- tests/test_interface/test_control_tags.py +0 -55
- tests/test_interface/test_data_generation.py +0 -45
- tests/test_interface/test_lpi.py +0 -15
- tests/test_interface/test_main.py +0 -196
- tests/test_interface/test_object_tags.py +0 -36
- tests/test_interface/test_region.py +0 -36
- tests/test_interface/test_validate_summary.py +0 -35
- tests/test_interface/test_validation.py +0 -59
- {tests → label_studio_sdk/_extensions}/__init__.py +0 -0
- /label_studio_sdk/{exceptions.py → _legacy/exceptions.py} +0 -0
- /label_studio_sdk/{users.py → _legacy/users.py} +0 -0
- /label_studio_sdk/{utils.py → _legacy/utils.py} +0 -0
- /label_studio_sdk/{workspaces.py → _legacy/workspaces.py} +0 -0
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import datetime
|
|
3
|
+
import hashlib
|
|
4
|
+
import io
|
|
5
|
+
import logging
|
|
6
|
+
import math
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
import shutil
|
|
10
|
+
import urllib
|
|
11
|
+
import wave
|
|
12
|
+
from collections import defaultdict
|
|
13
|
+
from copy import deepcopy
|
|
14
|
+
from operator import itemgetter
|
|
15
|
+
from urllib.parse import urlparse
|
|
16
|
+
|
|
17
|
+
import numpy as np
|
|
18
|
+
import requests
|
|
19
|
+
from PIL import Image
|
|
20
|
+
from label_studio_sdk._extensions.label_studio_tools.core.utils.params import get_env
|
|
21
|
+
from lxml import etree
|
|
22
|
+
from nltk.tokenize.treebank import TreebankWordTokenizer
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
_LABEL_TAGS = {"Label", "Choice"}
|
|
27
|
+
_NOT_CONTROL_TAGS = {
|
|
28
|
+
"Filter",
|
|
29
|
+
}
|
|
30
|
+
LOCAL_FILES_DOCUMENT_ROOT = get_env(
|
|
31
|
+
"LOCAL_FILES_DOCUMENT_ROOT", default=os.path.abspath(os.sep)
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
TreebankWordTokenizer.PUNCTUATION = [
|
|
35
|
+
(re.compile(r"([:,])([^\d])"), r" \1 \2"),
|
|
36
|
+
(re.compile(r"([:,])$"), r" \1 "),
|
|
37
|
+
(re.compile(r"\.\.\."), r" ... "),
|
|
38
|
+
(re.compile(r"[;@#$/%&]"), r" \g<0> "),
|
|
39
|
+
(
|
|
40
|
+
re.compile(r'([^\.])(\.)([\]\)}>"\']*)\s*$'),
|
|
41
|
+
r"\1 \2\3 ",
|
|
42
|
+
), # Handles the final period.
|
|
43
|
+
(re.compile(r"[?!]"), r" \g<0> "),
|
|
44
|
+
(re.compile(r"([^'])' "), r"\1 ' "),
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class ExpandFullPath(argparse.Action):
|
|
49
|
+
def __call__(self, parser, namespace, values, option_string=None):
|
|
50
|
+
setattr(namespace, self.dest, os.path.abspath(os.path.expanduser(values)))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def tokenize(text):
|
|
54
|
+
tok_start = 0
|
|
55
|
+
out = []
|
|
56
|
+
for tok in text.split():
|
|
57
|
+
if len(tok):
|
|
58
|
+
out.append((tok, tok_start))
|
|
59
|
+
tok_start += len(tok) + 1
|
|
60
|
+
else:
|
|
61
|
+
tok_start += 1
|
|
62
|
+
return out
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def create_tokens_and_tags(text, spans):
|
|
66
|
+
# tokens_and_idx = tokenize(text) # This function doesn't work properly if text contains multiple whitespaces...
|
|
67
|
+
token_index_tuples = [
|
|
68
|
+
token for token in TreebankWordTokenizer().span_tokenize(text)
|
|
69
|
+
]
|
|
70
|
+
tokens_and_idx = [(text[start:end], start) for start, end in token_index_tuples]
|
|
71
|
+
if spans and all(
|
|
72
|
+
[
|
|
73
|
+
span.get("start") is not None and span.get("end") is not None
|
|
74
|
+
for span in spans
|
|
75
|
+
]
|
|
76
|
+
):
|
|
77
|
+
spans = list(sorted(spans, key=itemgetter("start")))
|
|
78
|
+
span = spans.pop(0)
|
|
79
|
+
span_start = span["start"]
|
|
80
|
+
span_end = span["end"] - 1
|
|
81
|
+
prefix = "B-"
|
|
82
|
+
tokens, tags = [], []
|
|
83
|
+
for token, token_start in tokens_and_idx:
|
|
84
|
+
tokens.append(token)
|
|
85
|
+
token_end = (
|
|
86
|
+
token_start + len(token) - 1
|
|
87
|
+
) # "- 1" - This substraction is wrong. token already uses the index E.g. "Hello" is 0-4
|
|
88
|
+
token_start_ind = token_start # It seems like the token start is too early.. for whichever reason
|
|
89
|
+
|
|
90
|
+
# if for some reason end of span is missed.. pop the new span (Which is quite probable due to this method)
|
|
91
|
+
# Attention it seems like span['end'] is the index of first char afterwards. In case the whitespace is part of the
|
|
92
|
+
# labell we need to subtract one. Otherwise next token won't trigger the span update.. only the token after next..
|
|
93
|
+
if token_start_ind > span_end:
|
|
94
|
+
while spans:
|
|
95
|
+
span = spans.pop(0)
|
|
96
|
+
span_start = span["start"]
|
|
97
|
+
span_end = span["end"] - 1
|
|
98
|
+
prefix = "B-"
|
|
99
|
+
if token_start <= span_end:
|
|
100
|
+
break
|
|
101
|
+
# Add tag "O" for spans that:
|
|
102
|
+
# - are empty
|
|
103
|
+
# - span start has passed over token_end
|
|
104
|
+
# - do not have any label (None or empty list)
|
|
105
|
+
if not span or token_end < span_start or not span.get("labels"):
|
|
106
|
+
tags.append("O")
|
|
107
|
+
elif span_start <= token_end and span_end >= token_start_ind:
|
|
108
|
+
tags.append(prefix + span["labels"][0])
|
|
109
|
+
prefix = "I-"
|
|
110
|
+
else:
|
|
111
|
+
tags.append("O")
|
|
112
|
+
else:
|
|
113
|
+
tokens = [token for token, _ in tokens_and_idx]
|
|
114
|
+
tags = ["O"] * len(tokens)
|
|
115
|
+
|
|
116
|
+
return tokens, tags
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _get_upload_dir(project_dir=None, upload_dir=None):
|
|
120
|
+
"""Return either upload_dir, or path by LS_UPLOAD_DIR, or project_dir/upload"""
|
|
121
|
+
if upload_dir:
|
|
122
|
+
return upload_dir
|
|
123
|
+
upload_dir = os.environ.get("LS_UPLOAD_DIR")
|
|
124
|
+
if not upload_dir and project_dir:
|
|
125
|
+
upload_dir = os.path.join(project_dir, "upload")
|
|
126
|
+
if not os.path.exists(upload_dir):
|
|
127
|
+
upload_dir = None
|
|
128
|
+
if not upload_dir:
|
|
129
|
+
raise FileNotFoundError(
|
|
130
|
+
"Can't find upload dir: either LS_UPLOAD_DIR or project should be passed to converter"
|
|
131
|
+
)
|
|
132
|
+
return upload_dir
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def download(
|
|
136
|
+
url,
|
|
137
|
+
output_dir,
|
|
138
|
+
filename=None,
|
|
139
|
+
project_dir=None,
|
|
140
|
+
return_relative_path=False,
|
|
141
|
+
upload_dir=None,
|
|
142
|
+
download_resources=True,
|
|
143
|
+
):
|
|
144
|
+
is_local_file = url.startswith("/data/") and "?d=" in url
|
|
145
|
+
is_uploaded_file = url.startswith("/data/upload")
|
|
146
|
+
|
|
147
|
+
if is_uploaded_file:
|
|
148
|
+
upload_dir = _get_upload_dir(project_dir, upload_dir)
|
|
149
|
+
filename = urllib.parse.unquote(url.replace("/data/upload/", ""))
|
|
150
|
+
filepath = os.path.join(upload_dir, filename)
|
|
151
|
+
logger.debug(
|
|
152
|
+
f"Copy {filepath} to {output_dir}".format(
|
|
153
|
+
filepath=filepath, output_dir=output_dir
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
if download_resources:
|
|
157
|
+
shutil.copy(filepath, output_dir)
|
|
158
|
+
if return_relative_path:
|
|
159
|
+
return os.path.join(
|
|
160
|
+
os.path.basename(output_dir), os.path.basename(filename)
|
|
161
|
+
)
|
|
162
|
+
return filepath
|
|
163
|
+
|
|
164
|
+
if is_local_file:
|
|
165
|
+
filename, dir_path = url.split("/data/", 1)[-1].split("?d=")
|
|
166
|
+
dir_path = str(urllib.parse.unquote(dir_path))
|
|
167
|
+
filepath = os.path.join(LOCAL_FILES_DOCUMENT_ROOT, dir_path)
|
|
168
|
+
if not os.path.exists(filepath):
|
|
169
|
+
raise FileNotFoundError(filepath)
|
|
170
|
+
if download_resources:
|
|
171
|
+
shutil.copy(filepath, output_dir)
|
|
172
|
+
return filepath
|
|
173
|
+
|
|
174
|
+
if filename is None:
|
|
175
|
+
basename, ext = os.path.splitext(os.path.basename(urlparse(url).path))
|
|
176
|
+
filename = f"{basename}{ext}"
|
|
177
|
+
filepath = os.path.join(output_dir, filename)
|
|
178
|
+
if os.path.exists(filepath):
|
|
179
|
+
filename = (
|
|
180
|
+
basename
|
|
181
|
+
+ "_"
|
|
182
|
+
+ hashlib.md5(
|
|
183
|
+
url.encode() + str(datetime.datetime.now().timestamp()).encode()
|
|
184
|
+
).hexdigest()[:4]
|
|
185
|
+
+ ext
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
filepath = os.path.join(output_dir, filename)
|
|
189
|
+
if not os.path.exists(filepath):
|
|
190
|
+
logger.info("Download {url} to {filepath}".format(url=url, filepath=filepath))
|
|
191
|
+
if download_resources:
|
|
192
|
+
r = requests.get(url)
|
|
193
|
+
r.raise_for_status()
|
|
194
|
+
with io.open(filepath, mode="wb") as fout:
|
|
195
|
+
fout.write(r.content)
|
|
196
|
+
if return_relative_path:
|
|
197
|
+
return os.path.join(os.path.basename(output_dir), os.path.basename(filename))
|
|
198
|
+
return filepath
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def get_image_size(image_path):
|
|
202
|
+
return Image.open(image_path).size
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def get_image_size_and_channels(image_path):
|
|
206
|
+
i = Image.open(image_path)
|
|
207
|
+
w, h = i.size
|
|
208
|
+
c = len(i.getbands())
|
|
209
|
+
return w, h, c
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def get_audio_duration(audio_path):
|
|
213
|
+
with wave.open(audio_path, mode="r") as f:
|
|
214
|
+
return f.getnframes() / float(f.getframerate())
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def ensure_dir(dir_path):
|
|
218
|
+
if not os.path.exists(dir_path):
|
|
219
|
+
os.makedirs(dir_path)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def parse_config(config_string):
|
|
223
|
+
"""
|
|
224
|
+
:param config_string: Label config string
|
|
225
|
+
:return: structured config of the form:
|
|
226
|
+
{
|
|
227
|
+
"<ControlTag>.name": {
|
|
228
|
+
"type": "ControlTag",
|
|
229
|
+
"to_name": ["<ObjectTag1>.name", "<ObjectTag2>.name"],
|
|
230
|
+
"inputs: [
|
|
231
|
+
{"type": "ObjectTag1", "value": "<ObjectTag1>.value"},
|
|
232
|
+
{"type": "ObjectTag2", "value": "<ObjectTag2>.value"}
|
|
233
|
+
],
|
|
234
|
+
"labels": ["Label1", "Label2", "Label3"] // taken from "alias" if exists or "value"
|
|
235
|
+
}
|
|
236
|
+
"""
|
|
237
|
+
if not config_string:
|
|
238
|
+
return {}
|
|
239
|
+
|
|
240
|
+
def _is_input_tag(tag):
|
|
241
|
+
return tag.attrib.get("name") and tag.attrib.get("value")
|
|
242
|
+
|
|
243
|
+
def _is_output_tag(tag):
|
|
244
|
+
return (
|
|
245
|
+
tag.attrib.get("name")
|
|
246
|
+
and tag.attrib.get("toName")
|
|
247
|
+
and tag.tag not in _NOT_CONTROL_TAGS
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
def _get_parent_output_tag_name(tag, outputs):
|
|
251
|
+
# Find parental <Choices> tag for nested tags like <Choices><View><View><Choice>...
|
|
252
|
+
parent = tag
|
|
253
|
+
while True:
|
|
254
|
+
parent = parent.getparent()
|
|
255
|
+
if parent is None:
|
|
256
|
+
return
|
|
257
|
+
name = parent.attrib.get("name")
|
|
258
|
+
if name in outputs:
|
|
259
|
+
return name
|
|
260
|
+
|
|
261
|
+
try:
|
|
262
|
+
xml_tree = etree.fromstring(config_string)
|
|
263
|
+
except etree.XMLSyntaxError as e:
|
|
264
|
+
raise ValueError(str(e))
|
|
265
|
+
|
|
266
|
+
inputs, outputs, labels = {}, {}, defaultdict(dict)
|
|
267
|
+
for tag in xml_tree.iter():
|
|
268
|
+
if _is_output_tag(tag):
|
|
269
|
+
tag_info = {"type": tag.tag, "to_name": tag.attrib["toName"].split(",")}
|
|
270
|
+
# Grab conditionals if any
|
|
271
|
+
conditionals = {}
|
|
272
|
+
if tag.attrib.get("perRegion") == "true":
|
|
273
|
+
if tag.attrib.get("whenTagName"):
|
|
274
|
+
conditionals = {"type": "tag", "name": tag.attrib["whenTagName"]}
|
|
275
|
+
elif tag.attrib.get("whenLabelValue"):
|
|
276
|
+
conditionals = {
|
|
277
|
+
"type": "label",
|
|
278
|
+
"name": tag.attrib["whenLabelValue"],
|
|
279
|
+
}
|
|
280
|
+
elif tag.attrib.get("whenChoiceValue"):
|
|
281
|
+
conditionals = {
|
|
282
|
+
"type": "choice",
|
|
283
|
+
"name": tag.attrib["whenChoiceValue"],
|
|
284
|
+
}
|
|
285
|
+
if conditionals:
|
|
286
|
+
tag_info["conditionals"] = conditionals
|
|
287
|
+
outputs[tag.attrib["name"]] = tag_info
|
|
288
|
+
elif _is_input_tag(tag):
|
|
289
|
+
inputs[tag.attrib["name"]] = {
|
|
290
|
+
"type": tag.tag,
|
|
291
|
+
"value": tag.attrib["value"].lstrip("$"),
|
|
292
|
+
}
|
|
293
|
+
if tag.tag not in _LABEL_TAGS:
|
|
294
|
+
continue
|
|
295
|
+
parent_name = _get_parent_output_tag_name(tag, outputs)
|
|
296
|
+
if parent_name is not None:
|
|
297
|
+
actual_value = tag.attrib.get("alias") or tag.attrib.get("value")
|
|
298
|
+
if not actual_value:
|
|
299
|
+
logger.debug(
|
|
300
|
+
'Inspecting tag {tag_name}... found no "value" or "alias" attributes.'.format(
|
|
301
|
+
tag_name=etree.tostring(tag, encoding="unicode").strip()[:50]
|
|
302
|
+
)
|
|
303
|
+
)
|
|
304
|
+
else:
|
|
305
|
+
labels[parent_name][actual_value] = dict(tag.attrib)
|
|
306
|
+
for output_tag, tag_info in outputs.items():
|
|
307
|
+
tag_info["inputs"] = []
|
|
308
|
+
for input_tag_name in tag_info["to_name"]:
|
|
309
|
+
if input_tag_name not in inputs:
|
|
310
|
+
logger.debug(
|
|
311
|
+
f"to_name={input_tag_name} is specified for output tag name={output_tag}, "
|
|
312
|
+
"but we can't find it among input tags"
|
|
313
|
+
)
|
|
314
|
+
continue
|
|
315
|
+
tag_info["inputs"].append(inputs[input_tag_name])
|
|
316
|
+
tag_info["labels"] = list(labels[output_tag])
|
|
317
|
+
tag_info["labels_attrs"] = labels[output_tag]
|
|
318
|
+
return outputs
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def get_polygon_area(x, y):
|
|
322
|
+
"""https://en.wikipedia.org/wiki/Shoelace_formula"""
|
|
323
|
+
|
|
324
|
+
assert len(x) == len(y)
|
|
325
|
+
|
|
326
|
+
return float(0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))))
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def get_polygon_bounding_box(x, y):
|
|
330
|
+
assert len(x) == len(y)
|
|
331
|
+
|
|
332
|
+
x1, y1, x2, y2 = min(x), min(y), max(x), max(y)
|
|
333
|
+
return [x1, y1, x2 - x1, y2 - y1]
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def get_annotator(item, default=None, int_id=False):
|
|
337
|
+
"""Get annotator id or email from annotation"""
|
|
338
|
+
annotator = item["completed_by"]
|
|
339
|
+
if isinstance(annotator, dict):
|
|
340
|
+
annotator = annotator.get("email", default)
|
|
341
|
+
return annotator
|
|
342
|
+
|
|
343
|
+
if isinstance(annotator, int) and int_id:
|
|
344
|
+
return annotator
|
|
345
|
+
|
|
346
|
+
return str(annotator)
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def get_json_root_type(filename):
|
|
350
|
+
char = "x"
|
|
351
|
+
with open(filename, "r", encoding="utf-8") as f:
|
|
352
|
+
# Read the file character by character
|
|
353
|
+
while char != "":
|
|
354
|
+
char = f.read(1)
|
|
355
|
+
|
|
356
|
+
# Skip any whitespace
|
|
357
|
+
if char.isspace():
|
|
358
|
+
continue
|
|
359
|
+
|
|
360
|
+
# If the first non-whitespace character is '{', it's a dict
|
|
361
|
+
if char == "{":
|
|
362
|
+
return "dict"
|
|
363
|
+
|
|
364
|
+
# If the first non-whitespace character is '[', it's an array
|
|
365
|
+
if char == "[":
|
|
366
|
+
return "list"
|
|
367
|
+
|
|
368
|
+
# If neither, the JSON file is invalid
|
|
369
|
+
return "invalid"
|
|
370
|
+
|
|
371
|
+
# If the file is empty, return "empty"
|
|
372
|
+
return "empty"
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def prettify_result(v):
|
|
376
|
+
"""
|
|
377
|
+
:param v: list of regions or results
|
|
378
|
+
:return: label name as is if there is only 1 item in result `v`, else list of label names
|
|
379
|
+
"""
|
|
380
|
+
out = []
|
|
381
|
+
tag_type = None
|
|
382
|
+
for i in v:
|
|
383
|
+
j = deepcopy(i)
|
|
384
|
+
tag_type = j.pop("type")
|
|
385
|
+
if tag_type == "Choices" and len(j["choices"]) == 1:
|
|
386
|
+
out.append(j["choices"][0])
|
|
387
|
+
elif tag_type == "TextArea" and len(j["text"]) == 1:
|
|
388
|
+
out.append(j["text"][0])
|
|
389
|
+
else:
|
|
390
|
+
out.append(j)
|
|
391
|
+
return out[0] if tag_type in ("Choices", "TextArea") and len(out) == 1 else out
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def convert_annotation_to_yolo(label):
|
|
395
|
+
"""
|
|
396
|
+
Convert LS annotation to Yolo format.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
label (dict): Dictionary containing annotation information including:
|
|
400
|
+
- width (float): Width of the object.
|
|
401
|
+
- height (float): Height of the object.
|
|
402
|
+
- x (float): X-coordinate of the top-left corner of the object.
|
|
403
|
+
- y (float): Y-coordinate of the top-left corner of the object.
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
tuple or None: If the conversion is successful, returns a tuple (x, y, w, h) representing
|
|
407
|
+
the coordinates and dimensions of the object in Yolo format, where (x, y) are the center
|
|
408
|
+
coordinates of the object, and (w, h) are the width and height of the object respectively.
|
|
409
|
+
"""
|
|
410
|
+
|
|
411
|
+
if not ("x" in label and "y" in label and "width" in label and "height" in label):
|
|
412
|
+
return None
|
|
413
|
+
|
|
414
|
+
w = label["width"]
|
|
415
|
+
h = label["height"]
|
|
416
|
+
|
|
417
|
+
x = (label["x"] + w / 2) / 100
|
|
418
|
+
y = (label["y"] + h / 2) / 100
|
|
419
|
+
w = w / 100
|
|
420
|
+
h = h / 100
|
|
421
|
+
|
|
422
|
+
return x, y, w, h
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
def convert_annotation_to_yolo_obb(label):
|
|
426
|
+
"""
|
|
427
|
+
Convert LS annotation to Yolo OBB format.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
label (dict): Dictionary containing annotation information including:
|
|
431
|
+
- original_width (int): Original width of the image.
|
|
432
|
+
- original_height (int): Original height of the image.
|
|
433
|
+
- x (float): X-coordinate of the top-left corner of the object in percentage of the original width.
|
|
434
|
+
- y (float): Y-coordinate of the top-left corner of the object in percentage of the original height.
|
|
435
|
+
- width (float): Width of the object in percentage of the original width.
|
|
436
|
+
- height (float): Height of the object in percentage of the original height.
|
|
437
|
+
- rotation (float, optional): Rotation angle of the object in degrees (default is 0).
|
|
438
|
+
|
|
439
|
+
Returns:
|
|
440
|
+
list of tuple or None: List of tuples containing the coordinates of the object in Yolo OBB format.
|
|
441
|
+
Each tuple represents a corner of the bounding box in the order:
|
|
442
|
+
(top-left, top-right, bottom-right, bottom-left).
|
|
443
|
+
"""
|
|
444
|
+
|
|
445
|
+
if not (
|
|
446
|
+
"original_width" in label
|
|
447
|
+
and "original_height" in label
|
|
448
|
+
and "x" in label
|
|
449
|
+
and "y" in label
|
|
450
|
+
and "width" in label
|
|
451
|
+
and "height" in label
|
|
452
|
+
and "rotation" in label
|
|
453
|
+
):
|
|
454
|
+
return None
|
|
455
|
+
|
|
456
|
+
org_width, org_height = label["original_width"], label["original_height"]
|
|
457
|
+
x = label["x"] / 100 * org_width
|
|
458
|
+
y = label["y"] / 100 * org_height
|
|
459
|
+
w = label["width"] / 100 * org_width
|
|
460
|
+
h = label["height"] / 100 * org_height
|
|
461
|
+
|
|
462
|
+
rotation = math.radians(label.get("rotation", 0))
|
|
463
|
+
cos, sin = math.cos(rotation), math.sin(rotation)
|
|
464
|
+
|
|
465
|
+
coords = [
|
|
466
|
+
(x, y),
|
|
467
|
+
(x + w * cos, y + w * sin),
|
|
468
|
+
(x + w * cos - h * sin, y + w * sin + h * cos),
|
|
469
|
+
(x - h * sin, y + h * cos),
|
|
470
|
+
]
|
|
471
|
+
|
|
472
|
+
# Normalize coordinates
|
|
473
|
+
return [(coord[0] / org_width, coord[1] / org_height) for coord in coords]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from .api_error import ApiError
|
|
4
|
+
from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
|
|
5
|
+
from .datetime_utils import serialize_datetime
|
|
6
|
+
from .file import File, convert_file_dict_to_httpx_tuples
|
|
7
|
+
from .http_client import AsyncHttpClient, HttpClient
|
|
8
|
+
from .jsonable_encoder import jsonable_encoder
|
|
9
|
+
from .pagination import AsyncPager, SyncPager
|
|
10
|
+
from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
|
11
|
+
from .query_encoder import encode_query
|
|
12
|
+
from .remove_none_from_dict import remove_none_from_dict
|
|
13
|
+
from .request_options import RequestOptions
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"ApiError",
|
|
17
|
+
"AsyncClientWrapper",
|
|
18
|
+
"AsyncHttpClient",
|
|
19
|
+
"AsyncPager",
|
|
20
|
+
"BaseClientWrapper",
|
|
21
|
+
"File",
|
|
22
|
+
"HttpClient",
|
|
23
|
+
"RequestOptions",
|
|
24
|
+
"SyncClientWrapper",
|
|
25
|
+
"SyncPager",
|
|
26
|
+
"convert_file_dict_to_httpx_tuples",
|
|
27
|
+
"deep_union_pydantic_dicts",
|
|
28
|
+
"encode_query",
|
|
29
|
+
"jsonable_encoder",
|
|
30
|
+
"pydantic_v1",
|
|
31
|
+
"remove_none_from_dict",
|
|
32
|
+
"serialize_datetime",
|
|
33
|
+
]
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ApiError(Exception):
|
|
7
|
+
status_code: typing.Optional[int]
|
|
8
|
+
body: typing.Any
|
|
9
|
+
|
|
10
|
+
def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None):
|
|
11
|
+
self.status_code = status_code
|
|
12
|
+
self.body = body
|
|
13
|
+
|
|
14
|
+
def __str__(self) -> str:
|
|
15
|
+
return f"status_code: {self.status_code}, body: {self.body}"
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from .http_client import AsyncHttpClient, HttpClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BaseClientWrapper:
|
|
11
|
+
def __init__(self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None):
|
|
12
|
+
self.api_key = api_key
|
|
13
|
+
self._base_url = base_url
|
|
14
|
+
self._timeout = timeout
|
|
15
|
+
|
|
16
|
+
def get_headers(self) -> typing.Dict[str, str]:
|
|
17
|
+
headers: typing.Dict[str, str] = {
|
|
18
|
+
"X-Fern-Language": "Python",
|
|
19
|
+
"X-Fern-SDK-Name": "label-studio-sdk",
|
|
20
|
+
"X-Fern-SDK-Version": "0.0.121",
|
|
21
|
+
}
|
|
22
|
+
headers["Authorization"] = f"Token {self.api_key}"
|
|
23
|
+
return headers
|
|
24
|
+
|
|
25
|
+
def get_base_url(self) -> str:
|
|
26
|
+
return self._base_url
|
|
27
|
+
|
|
28
|
+
def get_timeout(self) -> typing.Optional[float]:
|
|
29
|
+
return self._timeout
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SyncClientWrapper(BaseClientWrapper):
|
|
33
|
+
def __init__(
|
|
34
|
+
self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.Client
|
|
35
|
+
):
|
|
36
|
+
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
|
|
37
|
+
self.httpx_client = HttpClient(
|
|
38
|
+
httpx_client=httpx_client,
|
|
39
|
+
base_headers=self.get_headers(),
|
|
40
|
+
base_timeout=self.get_timeout(),
|
|
41
|
+
base_url=self.get_base_url(),
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class AsyncClientWrapper(BaseClientWrapper):
|
|
46
|
+
def __init__(
|
|
47
|
+
self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient
|
|
48
|
+
):
|
|
49
|
+
super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
|
|
50
|
+
self.httpx_client = AsyncHttpClient(
|
|
51
|
+
httpx_client=httpx_client,
|
|
52
|
+
base_headers=self.get_headers(),
|
|
53
|
+
base_timeout=self.get_timeout(),
|
|
54
|
+
base_url=self.get_base_url(),
|
|
55
|
+
)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def serialize_datetime(v: dt.datetime) -> str:
|
|
7
|
+
"""
|
|
8
|
+
Serialize a datetime including timezone info.
|
|
9
|
+
|
|
10
|
+
Uses the timezone info provided if present, otherwise uses the current runtime's timezone info.
|
|
11
|
+
|
|
12
|
+
UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def _serialize_zoned_datetime(v: dt.datetime) -> str:
|
|
16
|
+
if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None):
|
|
17
|
+
# UTC is a special case where we use "Z" at the end instead of "+00:00"
|
|
18
|
+
return v.isoformat().replace("+00:00", "Z")
|
|
19
|
+
else:
|
|
20
|
+
# Delegate to the typical +/- offset format
|
|
21
|
+
return v.isoformat()
|
|
22
|
+
|
|
23
|
+
if v.tzinfo is not None:
|
|
24
|
+
return _serialize_zoned_datetime(v)
|
|
25
|
+
else:
|
|
26
|
+
local_tz = dt.datetime.now().astimezone().tzinfo
|
|
27
|
+
localized_dt = v.replace(tzinfo=local_tz)
|
|
28
|
+
return _serialize_zoned_datetime(localized_dt)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
# File typing inspired by the flexibility of types within the httpx library
|
|
6
|
+
# https://github.com/encode/httpx/blob/master/httpx/_types.py
|
|
7
|
+
FileContent = typing.Union[typing.IO[bytes], bytes, str]
|
|
8
|
+
File = typing.Union[
|
|
9
|
+
# file (or bytes)
|
|
10
|
+
FileContent,
|
|
11
|
+
# (filename, file (or bytes))
|
|
12
|
+
typing.Tuple[typing.Optional[str], FileContent],
|
|
13
|
+
# (filename, file (or bytes), content_type)
|
|
14
|
+
typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]],
|
|
15
|
+
# (filename, file (or bytes), content_type, headers)
|
|
16
|
+
typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str], typing.Mapping[str, str]],
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def convert_file_dict_to_httpx_tuples(
|
|
21
|
+
d: typing.Dict[str, typing.Union[File, typing.List[File]]]
|
|
22
|
+
) -> typing.List[typing.Tuple[str, File]]:
|
|
23
|
+
"""
|
|
24
|
+
The format we use is a list of tuples, where the first element is the
|
|
25
|
+
name of the file and the second is the file object. Typically HTTPX wants
|
|
26
|
+
a dict, but to be able to send lists of files, you have to use the list
|
|
27
|
+
approach (which also works for non-lists)
|
|
28
|
+
https://github.com/encode/httpx/pull/1032
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
httpx_tuples = []
|
|
32
|
+
for key, file_like in d.items():
|
|
33
|
+
if isinstance(file_like, list):
|
|
34
|
+
for file_like_item in file_like:
|
|
35
|
+
httpx_tuples.append((key, file_like_item))
|
|
36
|
+
else:
|
|
37
|
+
httpx_tuples.append((key, file_like))
|
|
38
|
+
return httpx_tuples
|