unitlab 1.7.8__tar.gz → 1.8.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {unitlab-1.7.8 → unitlab-1.8.0}/PKG-INFO +1 -1
- {unitlab-1.7.8 → unitlab-1.8.0}/setup.py +4 -1
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab/client.py +16 -3
- unitlab-1.8.0/src/unitlab/core.py +242 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab/run.py +10 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/PKG-INFO +1 -1
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/requires.txt +3 -0
- unitlab-1.7.8/src/unitlab/core.py +0 -175
- {unitlab-1.7.8 → unitlab-1.8.0}/LICENSE.md +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/MANIFEST.in +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/README.md +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/setup.cfg +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab/__init__.py +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab/exceptions.py +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab/pretty.py +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/SOURCES.txt +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/dependency_links.txt +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/entry_points.txt +0 -0
- {unitlab-1.7.8 → unitlab-1.8.0}/src/unitlab.egg-info/top_level.txt +0 -0
@@ -2,7 +2,7 @@ from setuptools import find_packages, setup
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="unitlab",
|
5
|
-
version="1.
|
5
|
+
version="1.8.0",
|
6
6
|
license="MIT",
|
7
7
|
author="Unitlab Inc.",
|
8
8
|
author_email="team@unitlab.ai",
|
@@ -30,6 +30,9 @@ setup(
|
|
30
30
|
"requests",
|
31
31
|
"prettytable",
|
32
32
|
"tqdm",
|
33
|
+
"numpy",
|
34
|
+
"opencv-python",
|
35
|
+
"Pillow",
|
33
36
|
],
|
34
37
|
entry_points={
|
35
38
|
"console_scripts": ["unitlab=unitlab.run:main"],
|
@@ -2,7 +2,7 @@ import asyncio
|
|
2
2
|
import errno
|
3
3
|
import glob
|
4
4
|
import os
|
5
|
-
import
|
5
|
+
import re
|
6
6
|
|
7
7
|
import aiohttp
|
8
8
|
import requests
|
@@ -255,13 +255,26 @@ class UnitlabClient:
|
|
255
255
|
Returns:
|
256
256
|
Writes the data to a json file.
|
257
257
|
"""
|
258
|
-
|
258
|
+
response = self.api_session.get(
|
259
259
|
url=ENPOINTS["task_download_data"].format(task_id),
|
260
260
|
headers=self._get_auth_header(),
|
261
|
+
)
|
262
|
+
response.raise_for_status()
|
263
|
+
with self.api_session.get(
|
264
|
+
url=response.json()["file"],
|
261
265
|
stream=True,
|
262
266
|
) as r:
|
263
267
|
r.raise_for_status()
|
264
|
-
|
268
|
+
if "Content-Disposition" in r.headers.keys():
|
269
|
+
content_disposition = r.headers["Content-Disposition"]
|
270
|
+
filename_match = re.search('filename="(.+)"', content_disposition)
|
271
|
+
if filename_match:
|
272
|
+
filename = filename_match.group(1)
|
273
|
+
else:
|
274
|
+
filename = f"task-data-{task_id}.json"
|
275
|
+
else:
|
276
|
+
filename = f"task-data-{task_id}.json"
|
277
|
+
|
265
278
|
with open(filename, "wb") as f:
|
266
279
|
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
267
280
|
f.write(chunk)
|
@@ -0,0 +1,242 @@
|
|
1
|
+
import argparse
|
2
|
+
import asyncio
|
3
|
+
import glob
|
4
|
+
import os
|
5
|
+
import re
|
6
|
+
from io import BytesIO
|
7
|
+
from uuid import UUID
|
8
|
+
|
9
|
+
import aiohttp
|
10
|
+
import cv2
|
11
|
+
import numpy as np
|
12
|
+
import requests
|
13
|
+
import tqdm
|
14
|
+
from PIL import Image, ImageColor, ImageDraw
|
15
|
+
|
16
|
+
from unitlab import pretty
|
17
|
+
|
18
|
+
BASE_URL = "https://api-dev.unitlab.ai/api/cli"
|
19
|
+
|
20
|
+
ENPOINTS = {
|
21
|
+
"ai_model_list": BASE_URL + "/task-parent/",
|
22
|
+
"ai_model_detail": BASE_URL + "/task-parent/{}/",
|
23
|
+
"task_list": BASE_URL + "/task/",
|
24
|
+
"task_detail": BASE_URL + "/task/{}/",
|
25
|
+
"task_data_sources": BASE_URL + "/task/{}/datasource/",
|
26
|
+
"task_members": BASE_URL + "/task/{}/members/",
|
27
|
+
"task_statistics": BASE_URL + "/task/{}/statistics/",
|
28
|
+
"task_upload_datasources": BASE_URL + "/task/upload-datasource/",
|
29
|
+
"task_download_data": BASE_URL + "/task/{}/download-data/",
|
30
|
+
"datasource_result": BASE_URL + "/datasource/{}/result/",
|
31
|
+
}
|
32
|
+
|
33
|
+
api_key_template = {
|
34
|
+
"type": str,
|
35
|
+
"dest": "api_key",
|
36
|
+
"nargs": "?",
|
37
|
+
"required": True,
|
38
|
+
"help": "The api-key that obtained from unitlab.ai",
|
39
|
+
}
|
40
|
+
|
41
|
+
|
42
|
+
def get_headers(namespace):
|
43
|
+
return {"Authorization": f"Api-Key {namespace.api_key}"}
|
44
|
+
|
45
|
+
|
46
|
+
def validate_uuid(uuid):
|
47
|
+
try:
|
48
|
+
UUID(uuid, version=4)
|
49
|
+
except ValueError:
|
50
|
+
raise argparse.ArgumentTypeError("Invalid UUID")
|
51
|
+
return uuid
|
52
|
+
|
53
|
+
|
54
|
+
def ai_model_list(namespace):
|
55
|
+
r = requests.get(
|
56
|
+
url=ENPOINTS[namespace.func.__name__],
|
57
|
+
headers=get_headers(namespace),
|
58
|
+
)
|
59
|
+
r.raise_for_status()
|
60
|
+
pretty.print_ai_model(r.json(), many=True)
|
61
|
+
|
62
|
+
|
63
|
+
def ai_model_detail(namespace):
|
64
|
+
r = requests.get(
|
65
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
66
|
+
headers=get_headers(namespace),
|
67
|
+
)
|
68
|
+
r.raise_for_status()
|
69
|
+
pretty.print_ai_model(r.json(), many=False)
|
70
|
+
|
71
|
+
|
72
|
+
def task_list(namespace):
|
73
|
+
r = requests.get(
|
74
|
+
url=ENPOINTS[namespace.func.__name__],
|
75
|
+
headers=get_headers(namespace),
|
76
|
+
)
|
77
|
+
r.raise_for_status()
|
78
|
+
pretty.print_task(r.json(), many=True)
|
79
|
+
|
80
|
+
|
81
|
+
def task_detail(namespace):
|
82
|
+
r = requests.get(
|
83
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
84
|
+
headers=get_headers(namespace),
|
85
|
+
)
|
86
|
+
r.raise_for_status()
|
87
|
+
pretty.print_task(r.json(), many=False)
|
88
|
+
|
89
|
+
|
90
|
+
def task_data_sources(namespace):
|
91
|
+
r = requests.get(
|
92
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
93
|
+
headers=get_headers(namespace),
|
94
|
+
)
|
95
|
+
r.raise_for_status()
|
96
|
+
pretty.print_data_sources(r.json())
|
97
|
+
|
98
|
+
|
99
|
+
def task_members(namespace):
|
100
|
+
r = requests.get(
|
101
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
102
|
+
headers=get_headers(namespace),
|
103
|
+
)
|
104
|
+
r.raise_for_status()
|
105
|
+
pretty.print_members(r.json())
|
106
|
+
|
107
|
+
|
108
|
+
def task_statistics(namespace):
|
109
|
+
r = requests.get(
|
110
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
111
|
+
headers=get_headers(namespace),
|
112
|
+
)
|
113
|
+
pretty.print_task_statistics(r.json())
|
114
|
+
|
115
|
+
|
116
|
+
def task_upload_datasources(namespace):
|
117
|
+
if not os.path.exists(namespace.input_dir):
|
118
|
+
raise ValueError(f"Directory {namespace.input_dir} does not exist.")
|
119
|
+
|
120
|
+
async def upload_images(session, endpoint, task_id, images, progress_bar):
|
121
|
+
for file_path in images:
|
122
|
+
with open(file_path, "rb") as image:
|
123
|
+
async with session.post(
|
124
|
+
endpoint,
|
125
|
+
data=aiohttp.FormData(fields={"task": task_id, "image": image}),
|
126
|
+
) as response:
|
127
|
+
if response.status != 201:
|
128
|
+
raise Exception(
|
129
|
+
f"Failed to upload file {file_path}. HTTP status code: {response.status}"
|
130
|
+
)
|
131
|
+
progress_bar.update(os.path.getsize(file_path))
|
132
|
+
await response.read()
|
133
|
+
|
134
|
+
async def upload_images_in_batches(
|
135
|
+
folder: str, api_key: str, task_id: str, batch_size=1000
|
136
|
+
):
|
137
|
+
images = [
|
138
|
+
image
|
139
|
+
for images_list in [
|
140
|
+
glob.glob(os.path.join(folder, "") + extension)
|
141
|
+
for extension in ["*jpg", "*png"]
|
142
|
+
]
|
143
|
+
for image in images_list
|
144
|
+
]
|
145
|
+
total_size = sum(os.path.getsize(f) for f in images)
|
146
|
+
endpoint = ENPOINTS[namespace.func.__name__]
|
147
|
+
|
148
|
+
with tqdm.tqdm(total=total_size, unit="B", unit_scale=True) as progress_bar:
|
149
|
+
async with aiohttp.ClientSession(
|
150
|
+
headers={"Authorization": f"Api-Key {api_key}"}
|
151
|
+
) as session:
|
152
|
+
for i in range(0, len(images), batch_size):
|
153
|
+
batch_images = images[i : i + batch_size]
|
154
|
+
tasks = [
|
155
|
+
asyncio.create_task(
|
156
|
+
upload_images(
|
157
|
+
session, endpoint, task_id, batch_images, progress_bar
|
158
|
+
)
|
159
|
+
)
|
160
|
+
]
|
161
|
+
await asyncio.gather(*tasks)
|
162
|
+
|
163
|
+
try:
|
164
|
+
asyncio.run(
|
165
|
+
upload_images_in_batches(
|
166
|
+
namespace.input_dir, namespace.api_key, namespace.uuid, batch_size=1000
|
167
|
+
)
|
168
|
+
)
|
169
|
+
except Exception as e:
|
170
|
+
print(str(e))
|
171
|
+
|
172
|
+
|
173
|
+
def task_download_data(namespace):
|
174
|
+
response = requests.get(
|
175
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
176
|
+
headers=get_headers(namespace),
|
177
|
+
)
|
178
|
+
response.raise_for_status()
|
179
|
+
session = requests.Session()
|
180
|
+
with session.get(
|
181
|
+
url=response.json()["file"],
|
182
|
+
stream=True,
|
183
|
+
) as r:
|
184
|
+
r.raise_for_status()
|
185
|
+
if "Content-Disposition" in r.headers.keys():
|
186
|
+
content_disposition = r.headers["Content-Disposition"]
|
187
|
+
filename_match = re.search('filename="(.+)"', content_disposition)
|
188
|
+
if filename_match:
|
189
|
+
filename = filename_match.group(1)
|
190
|
+
else:
|
191
|
+
filename = f"task-data-{namespace.uuid}.json"
|
192
|
+
else:
|
193
|
+
filename = f"task-data-{namespace.uuid}.json"
|
194
|
+
|
195
|
+
with open(filename, "wb") as f:
|
196
|
+
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
197
|
+
f.write(chunk)
|
198
|
+
|
199
|
+
|
200
|
+
def datasource_result(namespace):
|
201
|
+
r = requests.get(
|
202
|
+
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
203
|
+
headers=get_headers(namespace),
|
204
|
+
)
|
205
|
+
r.raise_for_status()
|
206
|
+
if r.status_code == 204:
|
207
|
+
print("No results yet")
|
208
|
+
return
|
209
|
+
result = r.json()
|
210
|
+
if result["render_type"] == "img_segmentation":
|
211
|
+
bitmap = np.array(Image.open(BytesIO(requests.get(result["source"]).content)))
|
212
|
+
classes = result["classes"]
|
213
|
+
bitmap = bitmap[:, :, 0] if len(bitmap.shape) > 2 else bitmap
|
214
|
+
bitmap = np.array(bitmap)
|
215
|
+
label_values = [list(ImageColor.getcolor(l["color"], "RGB")) for l in classes]
|
216
|
+
label_values.insert(0, [0, 0, 0]) # Add background color
|
217
|
+
label_values = np.array(label_values)
|
218
|
+
thumbnail = label_values[bitmap.astype(int)]
|
219
|
+
thumbnail = np.array(thumbnail).astype(np.uint8)
|
220
|
+
thumbnail = cv2.cvtColor(thumbnail, cv2.COLOR_RGB2RGBA)
|
221
|
+
thumbnail[thumbnail[:, :, 3] == 255, 3] = 128
|
222
|
+
thumbnail = Image.fromarray(thumbnail)
|
223
|
+
return thumbnail.show()
|
224
|
+
elif result["render_type"] == "img_bbox":
|
225
|
+
rotation = result.get("image", {}).get("rotation", 0)
|
226
|
+
boxes = result["bboxes"]
|
227
|
+
canvas = Image.open(BytesIO(requests.get(result["source"]).content))
|
228
|
+
if rotation:
|
229
|
+
canvas = canvas.rotate(rotation, expand=False)
|
230
|
+
for line in boxes:
|
231
|
+
for box in line:
|
232
|
+
poly = box["box"]
|
233
|
+
class_ = box["class"]
|
234
|
+
label_values = [
|
235
|
+
list(ImageColor.getcolor(l["color"], "RGB"))
|
236
|
+
for l in result["classes"]
|
237
|
+
]
|
238
|
+
class_color = label_values[class_]
|
239
|
+
poly = [(p[0], p[1]) for p in poly]
|
240
|
+
ImageDraw.Draw(canvas).polygon(poly, outline=tuple(class_color))
|
241
|
+
return canvas.show()
|
242
|
+
print("No results yet")
|
@@ -82,6 +82,16 @@ def main():
|
|
82
82
|
)
|
83
83
|
parser_task_download_labeled_data.set_defaults(func=core.task_download_data)
|
84
84
|
|
85
|
+
# DataSource Result
|
86
|
+
parser_datasource_result = subparsers.add_parser(
|
87
|
+
"datasource-result", help="Get datasource result"
|
88
|
+
)
|
89
|
+
parser_datasource_result.add_argument(
|
90
|
+
"-id", "--uuid", type=core.validate_uuid, required=True, help="DataSource uuid"
|
91
|
+
)
|
92
|
+
parser_datasource_result.add_argument("-k", "--api_key", **core.api_key_template)
|
93
|
+
parser_datasource_result.set_defaults(func=core.datasource_result)
|
94
|
+
|
85
95
|
# AI Model List
|
86
96
|
parser_ai_model_list = subparsers.add_parser(
|
87
97
|
"ai-model-list", help="Get AI model list"
|
@@ -1,175 +0,0 @@
|
|
1
|
-
import argparse
|
2
|
-
import asyncio
|
3
|
-
import errno
|
4
|
-
import glob
|
5
|
-
import logging
|
6
|
-
import os
|
7
|
-
import uuid
|
8
|
-
from uuid import UUID
|
9
|
-
|
10
|
-
import aiohttp
|
11
|
-
import requests
|
12
|
-
import tqdm
|
13
|
-
|
14
|
-
from unitlab import pretty
|
15
|
-
|
16
|
-
BASE_URL = "https://api-dev.unitlab.ai/api/cli"
|
17
|
-
|
18
|
-
ENPOINTS = {
|
19
|
-
"ai_model_list": BASE_URL + "/task-parent/",
|
20
|
-
"ai_model_detail": BASE_URL + "/task-parent/{}/",
|
21
|
-
"task_list": BASE_URL + "/task/",
|
22
|
-
"task_detail": BASE_URL + "/task/{}/",
|
23
|
-
"task_data_sources": BASE_URL + "/task/{}/datasource/",
|
24
|
-
"task_members": BASE_URL + "/task/{}/members/",
|
25
|
-
"task_statistics": BASE_URL + "/task/{}/statistics/",
|
26
|
-
"task_upload_datasources": BASE_URL + "/task/upload-datasource/",
|
27
|
-
"task_download_data": BASE_URL + "/task/{}/download-data/",
|
28
|
-
}
|
29
|
-
|
30
|
-
api_key_template = {
|
31
|
-
"type": str,
|
32
|
-
"dest": "api_key",
|
33
|
-
"nargs": "?",
|
34
|
-
"required": True,
|
35
|
-
"help": "The api-key that obtained from unitlab.ai",
|
36
|
-
}
|
37
|
-
|
38
|
-
|
39
|
-
def get_headers(namespace):
|
40
|
-
return {"Authorization": f"Api-Key {namespace.api_key}"}
|
41
|
-
|
42
|
-
|
43
|
-
def validate_uuid(uuid):
|
44
|
-
try:
|
45
|
-
UUID(uuid, version=4)
|
46
|
-
except ValueError:
|
47
|
-
raise argparse.ArgumentTypeError("Invalid UUID")
|
48
|
-
return uuid
|
49
|
-
|
50
|
-
|
51
|
-
def ai_model_list(namespace):
|
52
|
-
r = requests.get(
|
53
|
-
url=ENPOINTS[namespace.func.__name__],
|
54
|
-
headers=get_headers(namespace),
|
55
|
-
)
|
56
|
-
r.raise_for_status()
|
57
|
-
pretty.print_ai_model(r.json(), many=True)
|
58
|
-
|
59
|
-
|
60
|
-
def ai_model_detail(namespace):
|
61
|
-
r = requests.get(
|
62
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
63
|
-
headers=get_headers(namespace),
|
64
|
-
)
|
65
|
-
r.raise_for_status()
|
66
|
-
pretty.print_ai_model(r.json(), many=False)
|
67
|
-
|
68
|
-
|
69
|
-
def task_list(namespace):
|
70
|
-
r = requests.get(
|
71
|
-
url=ENPOINTS[namespace.func.__name__],
|
72
|
-
headers=get_headers(namespace),
|
73
|
-
)
|
74
|
-
r.raise_for_status()
|
75
|
-
pretty.print_task(r.json(), many=True)
|
76
|
-
|
77
|
-
|
78
|
-
def task_detail(namespace):
|
79
|
-
r = requests.get(
|
80
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
81
|
-
headers=get_headers(namespace),
|
82
|
-
)
|
83
|
-
r.raise_for_status()
|
84
|
-
pretty.print_task(r.json(), many=False)
|
85
|
-
|
86
|
-
|
87
|
-
def task_data_sources(namespace):
|
88
|
-
r = requests.get(
|
89
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
90
|
-
headers=get_headers(namespace),
|
91
|
-
)
|
92
|
-
r.raise_for_status()
|
93
|
-
pretty.print_data_sources(r.json())
|
94
|
-
|
95
|
-
|
96
|
-
def task_members(namespace):
|
97
|
-
r = requests.get(
|
98
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
99
|
-
headers=get_headers(namespace),
|
100
|
-
)
|
101
|
-
r.raise_for_status()
|
102
|
-
pretty.print_members(r.json())
|
103
|
-
|
104
|
-
|
105
|
-
def task_statistics(namespace):
|
106
|
-
r = requests.get(
|
107
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
108
|
-
headers=get_headers(namespace),
|
109
|
-
)
|
110
|
-
pretty.print_task_statistics(r.json())
|
111
|
-
|
112
|
-
|
113
|
-
def task_upload_datasources(namespace):
|
114
|
-
logging.basicConfig(level=logging.INFO, format=None)
|
115
|
-
|
116
|
-
try:
|
117
|
-
os.makedirs(namespace.input_dir)
|
118
|
-
except OSError as e:
|
119
|
-
if e.errno != errno.EEXIST:
|
120
|
-
raise
|
121
|
-
|
122
|
-
async def post_image(session: aiohttp.ClientSession, image: str, task_id: str):
|
123
|
-
with open(image, "rb") as img:
|
124
|
-
await session.request(
|
125
|
-
"POST",
|
126
|
-
url=ENPOINTS[namespace.func.__name__],
|
127
|
-
data=aiohttp.FormData(fields={"task": task_id, "image": img}),
|
128
|
-
)
|
129
|
-
return os.path.getsize(image)
|
130
|
-
|
131
|
-
async def data_upload(folder: str, api_key: str, task_id: str):
|
132
|
-
async with aiohttp.ClientSession(
|
133
|
-
headers={"Authorization": f"Api-Key {api_key}"}
|
134
|
-
) as session:
|
135
|
-
total_bytes = 0
|
136
|
-
tasks = []
|
137
|
-
images = [
|
138
|
-
image
|
139
|
-
for images_list in [
|
140
|
-
glob.glob(os.path.join(folder, "") + extension)
|
141
|
-
for extension in ["*jpg", "*png"]
|
142
|
-
]
|
143
|
-
for image in images_list
|
144
|
-
]
|
145
|
-
for image in images:
|
146
|
-
total_bytes += os.path.getsize(image)
|
147
|
-
for image in images:
|
148
|
-
tasks.append(post_image(session=session, image=image, task_id=task_id))
|
149
|
-
|
150
|
-
pbar = tqdm.tqdm(
|
151
|
-
total=total_bytes,
|
152
|
-
unit="B",
|
153
|
-
unit_scale=True,
|
154
|
-
unit_divisor=1024,
|
155
|
-
ncols=80,
|
156
|
-
)
|
157
|
-
for f in asyncio.as_completed(tasks):
|
158
|
-
value = await f
|
159
|
-
pbar.update(value)
|
160
|
-
|
161
|
-
asyncio.run(data_upload(namespace.input_dir, namespace.api_key, namespace.uuid))
|
162
|
-
|
163
|
-
|
164
|
-
def task_download_data(namespace):
|
165
|
-
session = requests.Session()
|
166
|
-
with session.get(
|
167
|
-
url=ENPOINTS[namespace.func.__name__].format(namespace.uuid),
|
168
|
-
headers=get_headers(namespace),
|
169
|
-
stream=True,
|
170
|
-
) as r:
|
171
|
-
r.raise_for_status()
|
172
|
-
filename = f"task-{namespace.uuid}-{uuid.uuid4().hex[:8]}.json"
|
173
|
-
with open(filename, "wb") as f:
|
174
|
-
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
175
|
-
f.write(chunk)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|