arkindex-base-worker 0.4.0rc1__tar.gz → 0.4.0rc2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/PKG-INFO +3 -3
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_base_worker.egg-info/PKG-INFO +3 -3
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_base_worker.egg-info/SOURCES.txt +1 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_base_worker.egg-info/requires.txt +2 -2
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/__init__.py +23 -109
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/base.py +8 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/dataset.py +70 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/element.py +17 -0
- arkindex_base_worker-0.4.0rc2/arkindex_worker/worker/process.py +63 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/pyproject.toml +3 -3
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/__init__.py +1 -1
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/conftest.py +10 -3
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_dataset_worker.py +5 -2
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_elements.py +269 -2
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/LICENSE +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/README.md +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_base_worker.egg-info/dependency_links.txt +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_base_worker.egg-info/top_level.txt +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/__init__.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/cache.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/image.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/models.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/utils.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/classification.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/corpus.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/entity.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/image.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/metadata.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/task.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/training.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/transcription.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/version.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/hooks/pre_gen_project.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/setup.cfg +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_base_worker.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_cache.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_element.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/__init__.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_classifications.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_cli.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_corpus.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_dataset.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_entities.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_image.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_metadata.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_task.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_training.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_transcriptions.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_elements_worker/test_worker.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_image.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_merge.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_utils.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/__init__.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/conftest.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/test_worker.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/worker_demo/__init__.py +0 -0
- {arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/worker_demo/worker.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: arkindex-base-worker
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.0rc2
|
|
4
4
|
Summary: Base Worker to easily build Arkindex ML workflows
|
|
5
5
|
Author-email: Teklia <contact@teklia.com>
|
|
6
6
|
Maintainer-email: Teklia <contact@teklia.com>
|
|
@@ -40,7 +40,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
40
40
|
Requires-Python: >=3.10
|
|
41
41
|
Description-Content-Type: text/markdown
|
|
42
42
|
License-File: LICENSE
|
|
43
|
-
Requires-Dist: humanize==4.
|
|
43
|
+
Requires-Dist: humanize==4.10.0
|
|
44
44
|
Requires-Dist: peewee~=3.17
|
|
45
45
|
Requires-Dist: Pillow==10.4.0
|
|
46
46
|
Requires-Dist: python-gnupg==0.5.2
|
|
@@ -49,7 +49,7 @@ Requires-Dist: teklia-toolbox==0.1.5
|
|
|
49
49
|
Requires-Dist: zstandard==0.22.0
|
|
50
50
|
Provides-Extra: docs
|
|
51
51
|
Requires-Dist: black==24.4.2; extra == "docs"
|
|
52
|
-
Requires-Dist: mkdocs-material==9.5.
|
|
52
|
+
Requires-Dist: mkdocs-material==9.5.33; extra == "docs"
|
|
53
53
|
Requires-Dist: mkdocstrings-python==1.10.8; extra == "docs"
|
|
54
54
|
Provides-Extra: tests
|
|
55
55
|
Requires-Dist: pytest==8.3.2; extra == "tests"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: arkindex-base-worker
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.0rc2
|
|
4
4
|
Summary: Base Worker to easily build Arkindex ML workflows
|
|
5
5
|
Author-email: Teklia <contact@teklia.com>
|
|
6
6
|
Maintainer-email: Teklia <contact@teklia.com>
|
|
@@ -40,7 +40,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
40
40
|
Requires-Python: >=3.10
|
|
41
41
|
Description-Content-Type: text/markdown
|
|
42
42
|
License-File: LICENSE
|
|
43
|
-
Requires-Dist: humanize==4.
|
|
43
|
+
Requires-Dist: humanize==4.10.0
|
|
44
44
|
Requires-Dist: peewee~=3.17
|
|
45
45
|
Requires-Dist: Pillow==10.4.0
|
|
46
46
|
Requires-Dist: python-gnupg==0.5.2
|
|
@@ -49,7 +49,7 @@ Requires-Dist: teklia-toolbox==0.1.5
|
|
|
49
49
|
Requires-Dist: zstandard==0.22.0
|
|
50
50
|
Provides-Extra: docs
|
|
51
51
|
Requires-Dist: black==24.4.2; extra == "docs"
|
|
52
|
-
Requires-Dist: mkdocs-material==9.5.
|
|
52
|
+
Requires-Dist: mkdocs-material==9.5.33; extra == "docs"
|
|
53
53
|
Requires-Dist: mkdocstrings-python==1.10.8; extra == "docs"
|
|
54
54
|
Provides-Extra: tests
|
|
55
55
|
Requires-Dist: pytest==8.3.2; extra == "tests"
|
|
@@ -20,6 +20,7 @@ arkindex_worker/worker/element.py
|
|
|
20
20
|
arkindex_worker/worker/entity.py
|
|
21
21
|
arkindex_worker/worker/image.py
|
|
22
22
|
arkindex_worker/worker/metadata.py
|
|
23
|
+
arkindex_worker/worker/process.py
|
|
23
24
|
arkindex_worker/worker/task.py
|
|
24
25
|
arkindex_worker/worker/training.py
|
|
25
26
|
arkindex_worker/worker/transcription.py
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/__init__.py
RENAMED
|
@@ -4,12 +4,10 @@ Base classes to implement Arkindex workers.
|
|
|
4
4
|
|
|
5
5
|
import contextlib
|
|
6
6
|
import json
|
|
7
|
-
import os
|
|
8
7
|
import sys
|
|
9
8
|
import uuid
|
|
10
|
-
from
|
|
11
|
-
from
|
|
12
|
-
from enum import Enum
|
|
9
|
+
from collections.abc import Iterable
|
|
10
|
+
from itertools import chain
|
|
13
11
|
from pathlib import Path
|
|
14
12
|
|
|
15
13
|
from apistar.exceptions import ErrorResponse
|
|
@@ -21,47 +19,27 @@ from arkindex_worker.utils import pluralize
|
|
|
21
19
|
from arkindex_worker.worker.base import BaseWorker
|
|
22
20
|
from arkindex_worker.worker.classification import ClassificationMixin
|
|
23
21
|
from arkindex_worker.worker.corpus import CorpusMixin
|
|
24
|
-
from arkindex_worker.worker.dataset import
|
|
22
|
+
from arkindex_worker.worker.dataset import (
|
|
23
|
+
DatasetMixin,
|
|
24
|
+
DatasetState,
|
|
25
|
+
MissingDatasetArchive,
|
|
26
|
+
)
|
|
25
27
|
from arkindex_worker.worker.element import ElementMixin
|
|
26
28
|
from arkindex_worker.worker.entity import EntityMixin
|
|
27
29
|
from arkindex_worker.worker.image import ImageMixin
|
|
28
30
|
from arkindex_worker.worker.metadata import MetaDataMixin, MetaType # noqa: F401
|
|
31
|
+
from arkindex_worker.worker.process import ActivityState, ProcessMode
|
|
29
32
|
from arkindex_worker.worker.task import TaskMixin
|
|
30
33
|
from arkindex_worker.worker.transcription import TranscriptionMixin
|
|
31
34
|
from arkindex_worker.worker.version import WorkerVersionMixin
|
|
32
35
|
|
|
33
36
|
|
|
34
|
-
class ActivityState(Enum):
|
|
35
|
-
"""
|
|
36
|
-
Processing state of an element.
|
|
37
|
-
"""
|
|
38
|
-
|
|
39
|
-
Queued = "queued"
|
|
40
|
-
"""
|
|
41
|
-
The element has not yet been processed by a worker.
|
|
42
|
-
"""
|
|
43
|
-
|
|
44
|
-
Started = "started"
|
|
45
|
-
"""
|
|
46
|
-
The element is being processed by a worker.
|
|
47
|
-
"""
|
|
48
|
-
|
|
49
|
-
Processed = "processed"
|
|
50
|
-
"""
|
|
51
|
-
The element has been successfully processed by a worker.
|
|
52
|
-
"""
|
|
53
|
-
|
|
54
|
-
Error = "error"
|
|
55
|
-
"""
|
|
56
|
-
An error occurred while processing this element.
|
|
57
|
-
"""
|
|
58
|
-
|
|
59
|
-
|
|
60
37
|
class ElementsWorker(
|
|
38
|
+
ElementMixin,
|
|
39
|
+
DatasetMixin,
|
|
61
40
|
BaseWorker,
|
|
62
41
|
ClassificationMixin,
|
|
63
42
|
CorpusMixin,
|
|
64
|
-
ElementMixin,
|
|
65
43
|
TranscriptionMixin,
|
|
66
44
|
WorkerVersionMixin,
|
|
67
45
|
EntityMixin,
|
|
@@ -96,22 +74,7 @@ class ElementsWorker(
|
|
|
96
74
|
|
|
97
75
|
self._worker_version_cache = {}
|
|
98
76
|
|
|
99
|
-
def
|
|
100
|
-
"""Define specific ``argparse`` arguments for this worker"""
|
|
101
|
-
self.parser.add_argument(
|
|
102
|
-
"--elements-list",
|
|
103
|
-
help="JSON elements list to use",
|
|
104
|
-
type=open,
|
|
105
|
-
default=os.environ.get("TASK_ELEMENTS"),
|
|
106
|
-
)
|
|
107
|
-
self.parser.add_argument(
|
|
108
|
-
"--element",
|
|
109
|
-
type=str,
|
|
110
|
-
nargs="+",
|
|
111
|
-
help="One or more Arkindex element ID",
|
|
112
|
-
)
|
|
113
|
-
|
|
114
|
-
def get_elements(self) -> Iterable[CachedElement] | list[str]:
|
|
77
|
+
def get_elements(self) -> Iterable[CachedElement] | list[str] | list[Element]:
|
|
115
78
|
"""
|
|
116
79
|
List the elements to be processed, either from the CLI arguments or
|
|
117
80
|
the cache database when enabled.
|
|
@@ -143,15 +106,20 @@ class ElementsWorker(
|
|
|
143
106
|
)
|
|
144
107
|
if self.use_cache and cache_query.exists():
|
|
145
108
|
return cache_query
|
|
146
|
-
# Process elements from JSON file
|
|
147
109
|
elif self.args.elements_list:
|
|
110
|
+
# Process elements from JSON file
|
|
148
111
|
data = json.load(self.args.elements_list)
|
|
149
112
|
assert isinstance(data, list), "Elements list must be a list"
|
|
150
113
|
assert len(data), "No elements in elements list"
|
|
151
114
|
out += list(filter(None, [element.get("id") for element in data]))
|
|
152
|
-
# Add any extra element from CLI
|
|
153
115
|
elif self.args.element:
|
|
116
|
+
# Add any extra element from CLI
|
|
154
117
|
out += self.args.element
|
|
118
|
+
elif self.process_mode == ProcessMode.Dataset or self.args.set:
|
|
119
|
+
# Elements from datasets
|
|
120
|
+
return list(
|
|
121
|
+
chain.from_iterable(map(self.list_set_elements, self.list_sets()))
|
|
122
|
+
)
|
|
155
123
|
|
|
156
124
|
invalid_element_ids = list(filter(invalid_element_id, out))
|
|
157
125
|
assert (
|
|
@@ -166,7 +134,8 @@ class ElementsWorker(
|
|
|
166
134
|
Whether or not WorkerActivity support has been enabled on the DataImport
|
|
167
135
|
used to run this worker.
|
|
168
136
|
"""
|
|
169
|
-
if self.is_read_only:
|
|
137
|
+
if self.is_read_only or self.process_mode == ProcessMode.Dataset:
|
|
138
|
+
# Worker activities are also disabled when running an ElementsWorker in a Dataset process.
|
|
170
139
|
return False
|
|
171
140
|
assert (
|
|
172
141
|
self.process_information
|
|
@@ -200,7 +169,7 @@ class ElementsWorker(
|
|
|
200
169
|
for i, item in enumerate(elements, start=1):
|
|
201
170
|
element = None
|
|
202
171
|
try:
|
|
203
|
-
if
|
|
172
|
+
if isinstance(item, CachedElement | Element):
|
|
204
173
|
# Just use the result of get_elements as the element
|
|
205
174
|
element = item
|
|
206
175
|
else:
|
|
@@ -316,29 +285,7 @@ class ElementsWorker(
|
|
|
316
285
|
return True
|
|
317
286
|
|
|
318
287
|
|
|
319
|
-
|
|
320
|
-
values = value.split(":")
|
|
321
|
-
if len(values) != 2:
|
|
322
|
-
raise ArgumentTypeError(
|
|
323
|
-
f"'{value}' is not in the correct format `<dataset_id>:<set_name>`"
|
|
324
|
-
)
|
|
325
|
-
|
|
326
|
-
dataset_id, set_name = values
|
|
327
|
-
try:
|
|
328
|
-
dataset_id = uuid.UUID(dataset_id)
|
|
329
|
-
return (dataset_id, set_name)
|
|
330
|
-
except (TypeError, ValueError) as e:
|
|
331
|
-
raise ArgumentTypeError(f"'{dataset_id}' should be a valid UUID") from e
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
class MissingDatasetArchive(Exception):
|
|
335
|
-
"""
|
|
336
|
-
Exception raised when the compressed archive associated to
|
|
337
|
-
a dataset isn't found in its task artifacts.
|
|
338
|
-
"""
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
class DatasetWorker(BaseWorker, DatasetMixin, TaskMixin):
|
|
288
|
+
class DatasetWorker(DatasetMixin, BaseWorker, TaskMixin):
|
|
342
289
|
"""
|
|
343
290
|
Base class for ML workers that operate on Arkindex dataset sets.
|
|
344
291
|
|
|
@@ -361,19 +308,6 @@ class DatasetWorker(BaseWorker, DatasetMixin, TaskMixin):
|
|
|
361
308
|
# Set as an instance variable as dataset workers might use it to easily extract its content
|
|
362
309
|
self.downloaded_dataset_artifact: Path | None = None
|
|
363
310
|
|
|
364
|
-
def add_arguments(self):
|
|
365
|
-
"""Define specific ``argparse`` arguments for this worker"""
|
|
366
|
-
self.parser.add_argument(
|
|
367
|
-
"--set",
|
|
368
|
-
type=check_dataset_set,
|
|
369
|
-
nargs="+",
|
|
370
|
-
help="""
|
|
371
|
-
One or more Arkindex dataset sets, format is <dataset_uuid>:<set_name>
|
|
372
|
-
(e.g.: "12341234-1234-1234-1234-123412341234:train")
|
|
373
|
-
""",
|
|
374
|
-
default=[],
|
|
375
|
-
)
|
|
376
|
-
|
|
377
311
|
def cleanup_downloaded_artifact(self) -> None:
|
|
378
312
|
"""
|
|
379
313
|
Cleanup the downloaded dataset artifact if any
|
|
@@ -421,30 +355,10 @@ class DatasetWorker(BaseWorker, DatasetMixin, TaskMixin):
|
|
|
421
355
|
:param set: The set to process.
|
|
422
356
|
"""
|
|
423
357
|
|
|
424
|
-
def list_sets(self) -> Iterator[Set]:
|
|
425
|
-
"""
|
|
426
|
-
List the sets to be processed, either from the CLI arguments or using the
|
|
427
|
-
[list_process_sets][arkindex_worker.worker.dataset.DatasetMixin.list_process_sets] method.
|
|
428
|
-
|
|
429
|
-
:returns: An iterator of ``Set`` objects.
|
|
430
|
-
"""
|
|
431
|
-
if not self.is_read_only:
|
|
432
|
-
yield from self.list_process_sets()
|
|
433
|
-
|
|
434
|
-
datasets: dict[uuid.UUID, Dataset] = {}
|
|
435
|
-
for dataset_id, set_name in self.args.set:
|
|
436
|
-
# Retrieving dataset information is not already cached
|
|
437
|
-
if dataset_id not in datasets:
|
|
438
|
-
datasets[dataset_id] = Dataset(
|
|
439
|
-
**self.api_client.request("RetrieveDataset", id=dataset_id)
|
|
440
|
-
)
|
|
441
|
-
|
|
442
|
-
yield Set(name=set_name, dataset=datasets[dataset_id])
|
|
443
|
-
|
|
444
358
|
def run(self):
|
|
445
359
|
"""
|
|
446
360
|
Implements an Arkindex worker that goes through each dataset set returned by
|
|
447
|
-
[list_sets][arkindex_worker.worker.
|
|
361
|
+
[list_sets][arkindex_worker.worker.dataset.DatasetMixin.list_sets].
|
|
448
362
|
|
|
449
363
|
It calls [process_set][arkindex_worker.worker.DatasetWorker.process_set],
|
|
450
364
|
catching exceptions.
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/base.py
RENAMED
|
@@ -24,6 +24,7 @@ from arkindex_worker.cache import (
|
|
|
24
24
|
merge_parents_cache,
|
|
25
25
|
)
|
|
26
26
|
from arkindex_worker.utils import close_delete_file, extract_tar_zst_archive
|
|
27
|
+
from arkindex_worker.worker.process import ProcessMode
|
|
27
28
|
from teklia_toolbox.requests import get_arkindex_client
|
|
28
29
|
|
|
29
30
|
|
|
@@ -156,6 +157,13 @@ class BaseWorker:
|
|
|
156
157
|
raise Exception("Missing ARKINDEX_CORPUS_ID environment variable")
|
|
157
158
|
return self._corpus_id
|
|
158
159
|
|
|
160
|
+
@property
|
|
161
|
+
def process_mode(self) -> ProcessMode | None:
|
|
162
|
+
"""Mode of the process being run. Returns None when read-only."""
|
|
163
|
+
if self.is_read_only:
|
|
164
|
+
return
|
|
165
|
+
return ProcessMode(self.process_information["mode"])
|
|
166
|
+
|
|
159
167
|
@property
|
|
160
168
|
def is_read_only(self) -> bool:
|
|
161
169
|
"""
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/dataset.py
RENAMED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
BaseWorker methods for datasets.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
import uuid
|
|
6
|
+
from argparse import ArgumentTypeError
|
|
5
7
|
from collections.abc import Iterator
|
|
6
8
|
from enum import Enum
|
|
7
9
|
|
|
@@ -36,7 +38,55 @@ class DatasetState(Enum):
|
|
|
36
38
|
"""
|
|
37
39
|
|
|
38
40
|
|
|
41
|
+
class MissingDatasetArchive(Exception):
|
|
42
|
+
"""
|
|
43
|
+
Exception raised when the compressed archive associated to
|
|
44
|
+
a dataset isn't found in its task artifacts.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def check_dataset_set(value: str) -> tuple[uuid.UUID, str]:
|
|
49
|
+
"""The `--set` argument should have the following format:
|
|
50
|
+
<dataset_id>:<set_name>
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
value (str): Provided argument.
|
|
54
|
+
|
|
55
|
+
Raises:
|
|
56
|
+
ArgumentTypeError: When the value is invalid.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
tuple[uuid.UUID, str]: The ID of the dataset parsed as UUID and the name of the set.
|
|
60
|
+
"""
|
|
61
|
+
values = value.split(":")
|
|
62
|
+
if len(values) != 2:
|
|
63
|
+
raise ArgumentTypeError(
|
|
64
|
+
f"'{value}' is not in the correct format `<dataset_id>:<set_name>`"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
dataset_id, set_name = values
|
|
68
|
+
try:
|
|
69
|
+
dataset_id = uuid.UUID(dataset_id)
|
|
70
|
+
return (dataset_id, set_name)
|
|
71
|
+
except (TypeError, ValueError) as e:
|
|
72
|
+
raise ArgumentTypeError(f"'{dataset_id}' should be a valid UUID") from e
|
|
73
|
+
|
|
74
|
+
|
|
39
75
|
class DatasetMixin:
|
|
76
|
+
def add_arguments(self) -> None:
|
|
77
|
+
"""Define specific ``argparse`` arguments for the worker using this mixin"""
|
|
78
|
+
self.parser.add_argument(
|
|
79
|
+
"--set",
|
|
80
|
+
type=check_dataset_set,
|
|
81
|
+
nargs="+",
|
|
82
|
+
help="""
|
|
83
|
+
One or more Arkindex dataset sets, format is <dataset_uuid>:<set_name>
|
|
84
|
+
(e.g.: "12341234-1234-1234-1234-123412341234:train")
|
|
85
|
+
""",
|
|
86
|
+
default=[],
|
|
87
|
+
)
|
|
88
|
+
super().add_arguments()
|
|
89
|
+
|
|
40
90
|
def list_process_sets(self) -> Iterator[Set]:
|
|
41
91
|
"""
|
|
42
92
|
List dataset sets associated to the worker's process. This helper is not available in developer mode.
|
|
@@ -73,6 +123,26 @@ class DatasetMixin:
|
|
|
73
123
|
|
|
74
124
|
return map(lambda result: Element(**result["element"]), results)
|
|
75
125
|
|
|
126
|
+
def list_sets(self) -> Iterator[Set]:
|
|
127
|
+
"""
|
|
128
|
+
List the sets to be processed, either from the CLI arguments or using the
|
|
129
|
+
[list_process_sets][arkindex_worker.worker.dataset.DatasetMixin.list_process_sets] method.
|
|
130
|
+
|
|
131
|
+
:returns: An iterator of ``Set`` objects.
|
|
132
|
+
"""
|
|
133
|
+
if not self.is_read_only:
|
|
134
|
+
yield from self.list_process_sets()
|
|
135
|
+
|
|
136
|
+
datasets: dict[uuid.UUID, Dataset] = {}
|
|
137
|
+
for dataset_id, set_name in self.args.set:
|
|
138
|
+
# Retrieving dataset information if not already cached
|
|
139
|
+
if dataset_id not in datasets:
|
|
140
|
+
datasets[dataset_id] = Dataset(
|
|
141
|
+
**self.api_client.request("RetrieveDataset", id=dataset_id)
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
yield Set(name=set_name, dataset=datasets[dataset_id])
|
|
145
|
+
|
|
76
146
|
@unsupported_cache
|
|
77
147
|
def update_dataset_state(self, dataset: Dataset, state: DatasetState) -> Dataset:
|
|
78
148
|
"""
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/element.py
RENAMED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
ElementsWorker methods for elements and element types.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
import os
|
|
5
6
|
from collections.abc import Iterable
|
|
6
7
|
from operator import attrgetter
|
|
7
8
|
from typing import NamedTuple
|
|
@@ -38,6 +39,22 @@ class MissingTypeError(Exception):
|
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
class ElementMixin:
|
|
42
|
+
def add_arguments(self):
|
|
43
|
+
"""Define specific ``argparse`` arguments for the worker using this mixin"""
|
|
44
|
+
self.parser.add_argument(
|
|
45
|
+
"--elements-list",
|
|
46
|
+
help="JSON elements list to use",
|
|
47
|
+
type=open,
|
|
48
|
+
default=os.environ.get("TASK_ELEMENTS"),
|
|
49
|
+
)
|
|
50
|
+
self.parser.add_argument(
|
|
51
|
+
"--element",
|
|
52
|
+
type=str,
|
|
53
|
+
nargs="+",
|
|
54
|
+
help="One or more Arkindex element ID",
|
|
55
|
+
)
|
|
56
|
+
super().add_arguments()
|
|
57
|
+
|
|
41
58
|
def list_corpus_types(self):
|
|
42
59
|
"""
|
|
43
60
|
Loads available element types in corpus.
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ActivityState(Enum):
|
|
5
|
+
"""
|
|
6
|
+
Processing state of an element.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
Queued = "queued"
|
|
10
|
+
"""
|
|
11
|
+
The element has not yet been processed by a worker.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
Started = "started"
|
|
15
|
+
"""
|
|
16
|
+
The element is being processed by a worker.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
Processed = "processed"
|
|
20
|
+
"""
|
|
21
|
+
The element has been successfully processed by a worker.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
Error = "error"
|
|
25
|
+
"""
|
|
26
|
+
An error occurred while processing this element.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ProcessMode(Enum):
|
|
31
|
+
"""
|
|
32
|
+
Mode of the process of the worker.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
Files = "files"
|
|
36
|
+
"""
|
|
37
|
+
Processes of files (images, PDFs, IIIF, ...) imports.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
Workers = "workers"
|
|
41
|
+
"""
|
|
42
|
+
Processes of worker executions.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
Template = "template"
|
|
46
|
+
"""
|
|
47
|
+
Process templates.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
S3 = "s3"
|
|
51
|
+
"""
|
|
52
|
+
Processes of imports from an S3-compatible storage.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
Local = "local"
|
|
56
|
+
"""
|
|
57
|
+
Local processes.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
Dataset = "dataset"
|
|
61
|
+
"""
|
|
62
|
+
Dataset processes.
|
|
63
|
+
"""
|
|
@@ -4,11 +4,11 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "arkindex-base-worker"
|
|
7
|
-
version = "0.4.
|
|
7
|
+
version = "0.4.0rc2"
|
|
8
8
|
description = "Base Worker to easily build Arkindex ML workflows"
|
|
9
9
|
license = { file = "LICENSE" }
|
|
10
10
|
dependencies = [
|
|
11
|
-
"humanize==4.
|
|
11
|
+
"humanize==4.10.0",
|
|
12
12
|
"peewee~=3.17",
|
|
13
13
|
"Pillow==10.4.0",
|
|
14
14
|
"python-gnupg==0.5.2",
|
|
@@ -44,7 +44,7 @@ Authors = "https://teklia.com"
|
|
|
44
44
|
[project.optional-dependencies]
|
|
45
45
|
docs = [
|
|
46
46
|
"black==24.4.2",
|
|
47
|
-
"mkdocs-material==9.5.
|
|
47
|
+
"mkdocs-material==9.5.33",
|
|
48
48
|
"mkdocstrings-python==1.10.8",
|
|
49
49
|
]
|
|
50
50
|
tests = [
|
|
@@ -23,10 +23,15 @@ from arkindex_worker.cache import (
|
|
|
23
23
|
init_cache_db,
|
|
24
24
|
)
|
|
25
25
|
from arkindex_worker.models import Artifact, Dataset, Set
|
|
26
|
-
from arkindex_worker.worker import
|
|
26
|
+
from arkindex_worker.worker import (
|
|
27
|
+
BaseWorker,
|
|
28
|
+
DatasetWorker,
|
|
29
|
+
ElementsWorker,
|
|
30
|
+
ProcessMode,
|
|
31
|
+
)
|
|
27
32
|
from arkindex_worker.worker.dataset import DatasetState
|
|
28
33
|
from arkindex_worker.worker.transcription import TextOrientation
|
|
29
|
-
from tests import CORPUS_ID,
|
|
34
|
+
from tests import CORPUS_ID, SAMPLES_DIR
|
|
30
35
|
|
|
31
36
|
__yaml_cache = {}
|
|
32
37
|
|
|
@@ -601,7 +606,9 @@ def mock_dataset_worker(monkeypatch, mocker, _mock_worker_run_api):
|
|
|
601
606
|
|
|
602
607
|
dataset_worker = DatasetWorker()
|
|
603
608
|
dataset_worker.configure()
|
|
604
|
-
|
|
609
|
+
|
|
610
|
+
# Update process mode
|
|
611
|
+
dataset_worker.process_information["mode"] = ProcessMode.Dataset
|
|
605
612
|
|
|
606
613
|
assert not dataset_worker.is_read_only
|
|
607
614
|
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/tests/test_dataset_worker.py
RENAMED
|
@@ -6,8 +6,11 @@ import pytest
|
|
|
6
6
|
from apistar.exceptions import ErrorResponse
|
|
7
7
|
|
|
8
8
|
from arkindex_worker.models import Dataset, Set
|
|
9
|
-
from arkindex_worker.worker import
|
|
10
|
-
|
|
9
|
+
from arkindex_worker.worker.dataset import (
|
|
10
|
+
DatasetState,
|
|
11
|
+
MissingDatasetArchive,
|
|
12
|
+
check_dataset_set,
|
|
13
|
+
)
|
|
11
14
|
from tests import FIXTURES_DIR, PROCESS_ID
|
|
12
15
|
from tests.test_elements_worker import BASE_API_CALLS
|
|
13
16
|
|
|
@@ -17,7 +17,9 @@ from arkindex_worker.cache import (
|
|
|
17
17
|
from arkindex_worker.models import Element
|
|
18
18
|
from arkindex_worker.utils import DEFAULT_BATCH_SIZE
|
|
19
19
|
from arkindex_worker.worker import ElementsWorker
|
|
20
|
+
from arkindex_worker.worker.dataset import DatasetState
|
|
20
21
|
from arkindex_worker.worker.element import MissingTypeError
|
|
22
|
+
from arkindex_worker.worker.process import ProcessMode
|
|
21
23
|
from tests import CORPUS_ID
|
|
22
24
|
|
|
23
25
|
from . import BASE_API_CALLS
|
|
@@ -208,10 +210,12 @@ def test_get_elements_element_arg_not_uuid(mocker, mock_elements_worker):
|
|
|
208
210
|
"arkindex_worker.worker.base.argparse.ArgumentParser.parse_args",
|
|
209
211
|
return_value=Namespace(
|
|
210
212
|
element=["volumeid", "pageid"],
|
|
213
|
+
config={},
|
|
211
214
|
verbose=False,
|
|
212
215
|
elements_list=None,
|
|
213
216
|
database=None,
|
|
214
|
-
dev=
|
|
217
|
+
dev=True,
|
|
218
|
+
set=[],
|
|
215
219
|
),
|
|
216
220
|
)
|
|
217
221
|
|
|
@@ -232,10 +236,12 @@ def test_get_elements_element_arg(mocker, mock_elements_worker):
|
|
|
232
236
|
"11111111-1111-1111-1111-111111111111",
|
|
233
237
|
"22222222-2222-2222-2222-222222222222",
|
|
234
238
|
],
|
|
239
|
+
config={},
|
|
235
240
|
verbose=False,
|
|
236
241
|
elements_list=None,
|
|
237
242
|
database=None,
|
|
238
|
-
dev=
|
|
243
|
+
dev=True,
|
|
244
|
+
set=[],
|
|
239
245
|
),
|
|
240
246
|
)
|
|
241
247
|
|
|
@@ -250,6 +256,264 @@ def test_get_elements_element_arg(mocker, mock_elements_worker):
|
|
|
250
256
|
]
|
|
251
257
|
|
|
252
258
|
|
|
259
|
+
def test_get_elements_dataset_set_arg(responses, mocker, mock_elements_worker):
|
|
260
|
+
mocker.patch(
|
|
261
|
+
"arkindex_worker.worker.base.argparse.ArgumentParser.parse_args",
|
|
262
|
+
return_value=Namespace(
|
|
263
|
+
element=[],
|
|
264
|
+
config={},
|
|
265
|
+
verbose=False,
|
|
266
|
+
elements_list=None,
|
|
267
|
+
database=None,
|
|
268
|
+
dev=True,
|
|
269
|
+
set=[(UUID("11111111-1111-1111-1111-111111111111"), "train")],
|
|
270
|
+
),
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
# Mock RetrieveDataset call
|
|
274
|
+
responses.add(
|
|
275
|
+
responses.GET,
|
|
276
|
+
"http://testserver/api/v1/datasets/11111111-1111-1111-1111-111111111111/",
|
|
277
|
+
status=200,
|
|
278
|
+
json={
|
|
279
|
+
"id": "11111111-1111-1111-1111-111111111111",
|
|
280
|
+
"name": "My dataset",
|
|
281
|
+
"description": "A dataset about cats.",
|
|
282
|
+
"sets": ["train", "dev", "test"],
|
|
283
|
+
"state": DatasetState.Complete.value,
|
|
284
|
+
},
|
|
285
|
+
content_type="application/json",
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
# Mock ListSetElements call
|
|
289
|
+
element = {
|
|
290
|
+
"id": "22222222-2222-2222-2222-222222222222",
|
|
291
|
+
"type": "page",
|
|
292
|
+
"name": "1",
|
|
293
|
+
"corpus": {
|
|
294
|
+
"id": "11111111-1111-1111-1111-111111111111",
|
|
295
|
+
},
|
|
296
|
+
"thumbnail_url": "http://example.com",
|
|
297
|
+
"zone": {
|
|
298
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
299
|
+
"polygon": [[0, 0], [0, 0], [0, 0]],
|
|
300
|
+
"image": {
|
|
301
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
302
|
+
"path": "string",
|
|
303
|
+
"width": 0,
|
|
304
|
+
"height": 0,
|
|
305
|
+
"url": "http://example.com",
|
|
306
|
+
"s3_url": "string",
|
|
307
|
+
"status": "checked",
|
|
308
|
+
"server": {
|
|
309
|
+
"display_name": "string",
|
|
310
|
+
"url": "http://example.com",
|
|
311
|
+
"max_width": 2147483647,
|
|
312
|
+
"max_height": 2147483647,
|
|
313
|
+
},
|
|
314
|
+
},
|
|
315
|
+
"url": "http://example.com",
|
|
316
|
+
},
|
|
317
|
+
"rotation_angle": 0,
|
|
318
|
+
"mirrored": False,
|
|
319
|
+
"created": "2019-08-24T14:15:22Z",
|
|
320
|
+
"classes": [
|
|
321
|
+
{
|
|
322
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
323
|
+
"ml_class": {
|
|
324
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
325
|
+
"name": "string",
|
|
326
|
+
},
|
|
327
|
+
"state": "pending",
|
|
328
|
+
"confidence": 0,
|
|
329
|
+
"high_confidence": True,
|
|
330
|
+
"worker_run": {
|
|
331
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
332
|
+
"summary": "string",
|
|
333
|
+
},
|
|
334
|
+
}
|
|
335
|
+
],
|
|
336
|
+
"metadata": [
|
|
337
|
+
{
|
|
338
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
339
|
+
"type": "text",
|
|
340
|
+
"name": "string",
|
|
341
|
+
"value": "string",
|
|
342
|
+
"dates": [{"type": "exact", "year": 0, "month": 1, "day": 1}],
|
|
343
|
+
}
|
|
344
|
+
],
|
|
345
|
+
"transcriptions": [
|
|
346
|
+
{
|
|
347
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
348
|
+
"text": "string",
|
|
349
|
+
"confidence": 0,
|
|
350
|
+
"orientation": "horizontal-lr",
|
|
351
|
+
"worker_run": {
|
|
352
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
353
|
+
"summary": "string",
|
|
354
|
+
},
|
|
355
|
+
}
|
|
356
|
+
],
|
|
357
|
+
"has_children": True,
|
|
358
|
+
"worker_run": {
|
|
359
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
360
|
+
"summary": "string",
|
|
361
|
+
},
|
|
362
|
+
"confidence": 1,
|
|
363
|
+
}
|
|
364
|
+
responses.add(
|
|
365
|
+
responses.GET,
|
|
366
|
+
"http://testserver/api/v1/datasets/11111111-1111-1111-1111-111111111111/elements/?set=train&with_count=true",
|
|
367
|
+
status=200,
|
|
368
|
+
json={
|
|
369
|
+
"next": None,
|
|
370
|
+
"previous": None,
|
|
371
|
+
"results": [
|
|
372
|
+
{
|
|
373
|
+
"set": "train",
|
|
374
|
+
"element": element,
|
|
375
|
+
}
|
|
376
|
+
],
|
|
377
|
+
"count": 1,
|
|
378
|
+
},
|
|
379
|
+
content_type="application/json",
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
worker = ElementsWorker()
|
|
383
|
+
worker.configure()
|
|
384
|
+
|
|
385
|
+
elt_list = worker.get_elements()
|
|
386
|
+
|
|
387
|
+
assert elt_list == [
|
|
388
|
+
Element(**element),
|
|
389
|
+
]
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def test_get_elements_dataset_set_api(responses, mocker, mock_elements_worker):
|
|
393
|
+
# Mock ListProcessSets call
|
|
394
|
+
responses.add(
|
|
395
|
+
responses.GET,
|
|
396
|
+
"http://testserver/api/v1/process/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeffff/sets/",
|
|
397
|
+
status=200,
|
|
398
|
+
json={
|
|
399
|
+
"next": None,
|
|
400
|
+
"previous": None,
|
|
401
|
+
"results": [
|
|
402
|
+
{
|
|
403
|
+
"id": "33333333-3333-3333-3333-333333333333",
|
|
404
|
+
"dataset": {"id": "11111111-1111-1111-1111-111111111111"},
|
|
405
|
+
"set_name": "train",
|
|
406
|
+
}
|
|
407
|
+
],
|
|
408
|
+
"count": 1,
|
|
409
|
+
},
|
|
410
|
+
content_type="application/json",
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
# Mock ListSetElements call
|
|
414
|
+
element = {
|
|
415
|
+
"id": "22222222-2222-2222-2222-222222222222",
|
|
416
|
+
"type": "page",
|
|
417
|
+
"name": "1",
|
|
418
|
+
"corpus": {
|
|
419
|
+
"id": "11111111-1111-1111-1111-111111111111",
|
|
420
|
+
},
|
|
421
|
+
"thumbnail_url": "http://example.com",
|
|
422
|
+
"zone": {
|
|
423
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
424
|
+
"polygon": [[0, 0], [0, 0], [0, 0]],
|
|
425
|
+
"image": {
|
|
426
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
427
|
+
"path": "string",
|
|
428
|
+
"width": 0,
|
|
429
|
+
"height": 0,
|
|
430
|
+
"url": "http://example.com",
|
|
431
|
+
"s3_url": "string",
|
|
432
|
+
"status": "checked",
|
|
433
|
+
"server": {
|
|
434
|
+
"display_name": "string",
|
|
435
|
+
"url": "http://example.com",
|
|
436
|
+
"max_width": 2147483647,
|
|
437
|
+
"max_height": 2147483647,
|
|
438
|
+
},
|
|
439
|
+
},
|
|
440
|
+
"url": "http://example.com",
|
|
441
|
+
},
|
|
442
|
+
"rotation_angle": 0,
|
|
443
|
+
"mirrored": False,
|
|
444
|
+
"created": "2019-08-24T14:15:22Z",
|
|
445
|
+
"classes": [
|
|
446
|
+
{
|
|
447
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
448
|
+
"ml_class": {
|
|
449
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
450
|
+
"name": "string",
|
|
451
|
+
},
|
|
452
|
+
"state": "pending",
|
|
453
|
+
"confidence": 0,
|
|
454
|
+
"high_confidence": True,
|
|
455
|
+
"worker_run": {
|
|
456
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
457
|
+
"summary": "string",
|
|
458
|
+
},
|
|
459
|
+
}
|
|
460
|
+
],
|
|
461
|
+
"metadata": [
|
|
462
|
+
{
|
|
463
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
464
|
+
"type": "text",
|
|
465
|
+
"name": "string",
|
|
466
|
+
"value": "string",
|
|
467
|
+
"dates": [{"type": "exact", "year": 0, "month": 1, "day": 1}],
|
|
468
|
+
}
|
|
469
|
+
],
|
|
470
|
+
"transcriptions": [
|
|
471
|
+
{
|
|
472
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
473
|
+
"text": "string",
|
|
474
|
+
"confidence": 0,
|
|
475
|
+
"orientation": "horizontal-lr",
|
|
476
|
+
"worker_run": {
|
|
477
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
478
|
+
"summary": "string",
|
|
479
|
+
},
|
|
480
|
+
}
|
|
481
|
+
],
|
|
482
|
+
"has_children": True,
|
|
483
|
+
"worker_run": {
|
|
484
|
+
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
|
485
|
+
"summary": "string",
|
|
486
|
+
},
|
|
487
|
+
"confidence": 1,
|
|
488
|
+
}
|
|
489
|
+
responses.add(
|
|
490
|
+
responses.GET,
|
|
491
|
+
"http://testserver/api/v1/datasets/11111111-1111-1111-1111-111111111111/elements/?set=train&with_count=true",
|
|
492
|
+
status=200,
|
|
493
|
+
json={
|
|
494
|
+
"next": None,
|
|
495
|
+
"previous": None,
|
|
496
|
+
"results": [
|
|
497
|
+
{
|
|
498
|
+
"set": "train",
|
|
499
|
+
"element": element,
|
|
500
|
+
}
|
|
501
|
+
],
|
|
502
|
+
"count": 1,
|
|
503
|
+
},
|
|
504
|
+
content_type="application/json",
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
# Update ProcessMode to Dataset
|
|
508
|
+
mock_elements_worker.process_information["mode"] = ProcessMode.Dataset
|
|
509
|
+
|
|
510
|
+
elt_list = mock_elements_worker.get_elements()
|
|
511
|
+
|
|
512
|
+
assert elt_list == [
|
|
513
|
+
Element(**element),
|
|
514
|
+
]
|
|
515
|
+
|
|
516
|
+
|
|
253
517
|
def test_get_elements_both_args_error(mocker, mock_elements_worker, tmp_path):
|
|
254
518
|
elements_path = tmp_path / "elements.json"
|
|
255
519
|
elements_path.write_text(
|
|
@@ -270,6 +534,7 @@ def test_get_elements_both_args_error(mocker, mock_elements_worker, tmp_path):
|
|
|
270
534
|
elements_list=elements_path.open(),
|
|
271
535
|
database=None,
|
|
272
536
|
dev=False,
|
|
537
|
+
set=[],
|
|
273
538
|
),
|
|
274
539
|
)
|
|
275
540
|
|
|
@@ -295,6 +560,7 @@ def test_database_arg(mocker, mock_elements_worker, tmp_path):
|
|
|
295
560
|
elements_list=None,
|
|
296
561
|
database=database_path,
|
|
297
562
|
dev=False,
|
|
563
|
+
set=[],
|
|
298
564
|
),
|
|
299
565
|
)
|
|
300
566
|
|
|
@@ -319,6 +585,7 @@ def test_database_arg_cache_missing_version_table(
|
|
|
319
585
|
elements_list=None,
|
|
320
586
|
database=database_path,
|
|
321
587
|
dev=False,
|
|
588
|
+
set=[],
|
|
322
589
|
),
|
|
323
590
|
)
|
|
324
591
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/corpus.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/entity.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/image.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/metadata.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/task.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/training.py
RENAMED
|
File without changes
|
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/arkindex_worker/worker/version.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/__init__.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/conftest.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/tests/test_worker.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/worker_demo/__init__.py
RENAMED
|
File without changes
|
{arkindex_base_worker-0.4.0rc1 → arkindex_base_worker-0.4.0rc2}/worker-demo/worker_demo/worker.py
RENAMED
|
File without changes
|