logs-py 1.14__py3-none-any.whl → 2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of logs-py might be problematic. Click here for more details.
- LOGS/Auxiliary/Constants.py +2 -2
- LOGS/Auxiliary/DateTimeConverter.py +1 -1
- LOGS/Auxiliary/Exceptions.py +1 -1
- LOGS/Auxiliary/MinimalModelGenerator.py +28 -6
- LOGS/Auxiliary/Tools.py +4 -0
- LOGS/Entities/AutoloadClientInfo.py +6 -8
- LOGS/Entities/AutoloadStatus.py +3 -3
- LOGS/Entities/{AutoloadSource.py → Bridge.py} +11 -10
- LOGS/Entities/BridgeMinimal.py +8 -0
- LOGS/Entities/BridgeRelations.py +23 -0
- LOGS/Entities/BridgeRequestParameter.py +19 -0
- LOGS/Entities/{AutoloadSourceType.py → BridgeType.py} +1 -1
- LOGS/Entities/Bridges.py +12 -0
- LOGS/Entities/{AutoloadConfiguration.py → DataSource.py} +97 -23
- LOGS/Entities/DataSourceMinimal.py +8 -0
- LOGS/Entities/DataSourceRelations.py +23 -0
- LOGS/Entities/DataSourceRequestParameter.py +44 -0
- LOGS/Entities/{AutoloadConfigurationStatus.py → DataSourceStatus.py} +1 -1
- LOGS/Entities/DataSources.py +12 -0
- LOGS/Entities/Dataset.py +83 -71
- LOGS/Entities/DatasetCreator.py +38 -18
- LOGS/Entities/DatasetMatchTypes.py +38 -6
- LOGS/Entities/DatasetRelations.py +13 -2
- LOGS/Entities/DatasetRequestParameter.py +19 -7
- LOGS/Entities/Datasets.py +1 -1
- LOGS/Entities/Document.py +2 -2
- LOGS/Entities/Experiment.py +9 -24
- LOGS/Entities/ExperimentRelations.py +23 -0
- LOGS/Entities/ExperimentRequestParameter.py +27 -2
- LOGS/Entities/Format.py +27 -61
- LOGS/Entities/FormatFormat.py +8 -6
- LOGS/Entities/FormatInstrument.py +2 -2
- LOGS/Entities/FormatMetaData.py +13 -5
- LOGS/Entities/FormatMethod.py +34 -3
- LOGS/Entities/FormatMinimal.py +11 -1
- LOGS/Entities/FormatRequestParameter.py +8 -3
- LOGS/Entities/FormatVendor.py +14 -1
- LOGS/Entities/FormatVendorRequestParameter.py +1 -0
- LOGS/Entities/ICustomSchemaRequest.py +7 -0
- LOGS/Entities/IRelatedEntityRequest.py +9 -0
- LOGS/Entities/Instrument.py +16 -2
- LOGS/Entities/InstrumentRelations.py +23 -0
- LOGS/Entities/InstrumentRequestParameter.py +34 -2
- LOGS/Entities/LabNotebookEntry.py +67 -7
- LOGS/Entities/LabNotebookEntryRelations.py +81 -0
- LOGS/Entities/LabNotebookEntryRequestParameter.py +57 -2
- LOGS/Entities/Method.py +24 -2
- LOGS/Entities/MethodRelations.py +35 -0
- LOGS/Entities/MethodRequestParameter.py +35 -1
- LOGS/Entities/Origin.py +2 -2
- LOGS/Entities/OriginRequestParameter.py +3 -3
- LOGS/Entities/Person.py +34 -3
- LOGS/Entities/PersonRelations.py +36 -3
- LOGS/Entities/PersonRequestParameter.py +50 -10
- LOGS/Entities/Project.py +15 -2
- LOGS/Entities/ProjectRelations.py +11 -0
- LOGS/Entities/ProjectRequestParameter.py +44 -3
- LOGS/Entities/Role.py +45 -16
- LOGS/Entities/RoleRelations.py +23 -0
- LOGS/Entities/RoleRequestParameter.py +28 -1
- LOGS/Entities/Sample.py +26 -32
- LOGS/Entities/SampleRelations.py +4 -2
- LOGS/Entities/SampleRequestParameter.py +5 -10
- LOGS/Entities/__init__.py +8 -7
- LOGS/Entity/Entity.py +30 -6
- LOGS/Entity/EntityRelations.py +3 -2
- LOGS/Entity/EntityRequestParameter.py +10 -5
- LOGS/Entity/EntityWithIntId.py +1 -1
- LOGS/Entity/SerializeableContent.py +10 -4
- LOGS/Interfaces/ICreationRecord.py +12 -3
- LOGS/Interfaces/IModificationRecord.py +10 -1
- LOGS/Interfaces/IProjectBased.py +30 -0
- LOGS/Interfaces/IRelatedEntity.py +34 -0
- LOGS/Interfaces/IRelationModel.py +6 -0
- LOGS/Interfaces/IRelationRequest.py +12 -0
- LOGS/Interfaces/ITypedEntity.py +28 -0
- LOGS/Interfaces/IUniqueEntity.py +2 -3
- LOGS/LOGS.py +128 -40
- LOGS/LOGSConnection.py +2 -1
- {logs_py-1.14.dist-info → logs_py-2.1.dist-info}/METADATA +1 -1
- {logs_py-1.14.dist-info → logs_py-2.1.dist-info}/RECORD +84 -70
- LOGS/Entities/AutoloadConfigurationMinimal.py +0 -8
- LOGS/Entities/AutoloadConfigurationRequestParameter.py +0 -17
- LOGS/Entities/AutoloadConfigurations.py +0 -16
- LOGS/Entities/AutoloadSourceMinimal.py +0 -8
- LOGS/Entities/AutoloadSourceRequestParameter.py +0 -13
- LOGS/Entities/AutoloadSources.py +0 -12
- /LOGS/Interfaces/{ICustomFields.py → ICustomField.py} +0 -0
- {logs_py-1.14.dist-info → logs_py-2.1.dist-info}/WHEEL +0 -0
- {logs_py-1.14.dist-info → logs_py-2.1.dist-info}/top_level.txt +0 -0
LOGS/Entities/Dataset.py
CHANGED
|
@@ -2,6 +2,8 @@ import os
|
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
|
|
4
4
|
|
|
5
|
+
from numpy import deprecate
|
|
6
|
+
|
|
5
7
|
from LOGS.Auxiliary.Constants import Constants
|
|
6
8
|
from LOGS.Auxiliary.Decorators import Endpoint, UiEndpoint
|
|
7
9
|
from LOGS.Auxiliary.Exceptions import (
|
|
@@ -10,7 +12,7 @@ from LOGS.Auxiliary.Exceptions import (
|
|
|
10
12
|
LOGSException,
|
|
11
13
|
)
|
|
12
14
|
from LOGS.Auxiliary.MinimalModelGenerator import (
|
|
13
|
-
|
|
15
|
+
BridgeMinimalFromDict,
|
|
14
16
|
DatasetTypeMinimalFromDict,
|
|
15
17
|
ExperimentMinimalFromDict,
|
|
16
18
|
FormatMinimalFromDict,
|
|
@@ -30,15 +32,19 @@ from LOGS.Entities.FileEntry import FileEntry
|
|
|
30
32
|
from LOGS.Entities.HierarchyNode import HierarchyNode
|
|
31
33
|
from LOGS.Entities.ParserLog import ParserLog
|
|
32
34
|
from LOGS.Entities.Track import Track
|
|
33
|
-
from LOGS.Entity.EntityWithIntId import
|
|
35
|
+
from LOGS.Entity.EntityWithIntId import IEntityWithIntId
|
|
34
36
|
from LOGS.Entity.SerializeableContent import SerializeableContent
|
|
35
37
|
from LOGS.Interfaces.INamedEntity import INamedEntity
|
|
36
38
|
from LOGS.Interfaces.IOwnedEntity import IOwnedEntity
|
|
39
|
+
from LOGS.Interfaces.IProjectBased import IProjectBased
|
|
40
|
+
from LOGS.Interfaces.IRelatedEntity import IRelatedEntity
|
|
41
|
+
from LOGS.Interfaces.ISoftDeletable import ISoftDeletable
|
|
42
|
+
from LOGS.Interfaces.ITypedEntity import ITypedEntity
|
|
37
43
|
from LOGS.Interfaces.IUniqueEntity import IUniqueEntity
|
|
38
44
|
from LOGS.LOGSConnection import LOGSConnection, ResponseTypes
|
|
39
45
|
|
|
40
46
|
if TYPE_CHECKING:
|
|
41
|
-
from LOGS.Entities.
|
|
47
|
+
from LOGS.Entities.BridgeMinimal import BridgeMinimal
|
|
42
48
|
from LOGS.Entities.EquipmentMinimal import EquipmentMinimal
|
|
43
49
|
from LOGS.Entities.ExperimentMinimal import ExperimentMinimal
|
|
44
50
|
from LOGS.Entities.FormatMinimal import FormatMinimal
|
|
@@ -60,33 +66,39 @@ class ParsedMetadata(SerializeableContent):
|
|
|
60
66
|
@UiEndpoint("#data")
|
|
61
67
|
class Dataset(
|
|
62
68
|
INamedEntity,
|
|
63
|
-
|
|
69
|
+
IProjectBased,
|
|
64
70
|
IOwnedEntity,
|
|
65
|
-
|
|
71
|
+
IEntityWithIntId,
|
|
72
|
+
IRelatedEntity[DatasetRelations],
|
|
73
|
+
ITypedEntity,
|
|
74
|
+
ISoftDeletable,
|
|
75
|
+
IUniqueEntity,
|
|
66
76
|
):
|
|
67
77
|
_noInfo = True
|
|
68
78
|
_noParameters = True
|
|
79
|
+
_relationType = type(DatasetRelations)
|
|
69
80
|
|
|
70
|
-
|
|
81
|
+
_legacyId: Optional[str]
|
|
82
|
+
_type: Optional[DatasetTypeMinimal]
|
|
71
83
|
_format: Optional["FormatMinimal"]
|
|
72
84
|
_acquisitionDate: Optional[datetime]
|
|
73
85
|
_path: Optional[str]
|
|
86
|
+
_sourceBaseDirectory: Optional[str]
|
|
87
|
+
_sourceRelativeDirectory: Optional[str]
|
|
74
88
|
_method: Optional["MethodMinimal"]
|
|
75
89
|
_experiment: Optional["ExperimentMinimal"]
|
|
76
90
|
_claimed: Optional[bool]
|
|
77
91
|
_notes: Optional[str]
|
|
78
92
|
_dateAdded: Optional[datetime]
|
|
79
|
-
|
|
93
|
+
_isViewableEntity: Optional[bool]
|
|
80
94
|
_other: Optional[str]
|
|
81
95
|
_instrument: Optional["InstrumentMinimal"]
|
|
82
96
|
_sample: Optional["SampleMinimal"]
|
|
83
|
-
|
|
97
|
+
_bridge: Optional["BridgeMinimal"]
|
|
84
98
|
_operators: Optional[List["PersonMinimal"]]
|
|
85
99
|
_equipments: Optional[List["EquipmentMinimal"]]
|
|
86
100
|
_projects: Optional[List["ProjectMinimal"]]
|
|
87
|
-
_customFields: Optional[Dict[str, Any]]
|
|
88
101
|
_parsingState: Optional[ParsingStates]
|
|
89
|
-
_relations: Optional[DatasetRelations]
|
|
90
102
|
_parsedMetadata: Optional[ParsedMetadata]
|
|
91
103
|
_parameters: Optional[Dict[str, Any]]
|
|
92
104
|
_formatVersion: Optional[int]
|
|
@@ -95,7 +107,6 @@ class Dataset(
|
|
|
95
107
|
_datatracks: Optional[List[Datatrack]]
|
|
96
108
|
_tracksHierarchy: Optional[HierarchyNode]
|
|
97
109
|
_files: Optional[List[FileEntry]]
|
|
98
|
-
_formatId: Optional[str]
|
|
99
110
|
_parameterHelper: Optional[ParameterHelper]
|
|
100
111
|
|
|
101
112
|
def __init__(
|
|
@@ -106,7 +117,8 @@ class Dataset(
|
|
|
106
117
|
files: Optional[Sequence[Constants.FILE_TYPE]] = None,
|
|
107
118
|
format: Optional[Union[str, "FormatMinimal"]] = None,
|
|
108
119
|
):
|
|
109
|
-
self.
|
|
120
|
+
self._legacyId = None
|
|
121
|
+
self._type = None
|
|
110
122
|
self._format = None
|
|
111
123
|
self._acquisitionDate = None
|
|
112
124
|
self._path = None
|
|
@@ -115,17 +127,16 @@ class Dataset(
|
|
|
115
127
|
self._claimed = None
|
|
116
128
|
self._notes = None
|
|
117
129
|
self._dateAdded = None
|
|
130
|
+
self._isViewableEntity = None
|
|
118
131
|
self._isDeleted = None
|
|
119
132
|
self._other = None
|
|
120
133
|
self._instrument = None
|
|
121
134
|
self._sample = None
|
|
122
|
-
self.
|
|
135
|
+
self._bridge = None
|
|
123
136
|
self._operators = None
|
|
124
137
|
self._equipments = None
|
|
125
138
|
self._projects = None
|
|
126
|
-
self._customFields = None
|
|
127
139
|
self._parsingState = None
|
|
128
|
-
self._relations = None
|
|
129
140
|
self._parsedMetadata = None
|
|
130
141
|
self._parameters = None
|
|
131
142
|
self._formatVersion = None
|
|
@@ -197,22 +208,6 @@ class Dataset(
|
|
|
197
208
|
|
|
198
209
|
super().fromDict(ref=ref, formatDict=formatDict)
|
|
199
210
|
|
|
200
|
-
# def _getConnectionData(self):
|
|
201
|
-
# if not self._endpoint:
|
|
202
|
-
# raise NotImplementedError(
|
|
203
|
-
# "Endpoint missing for of entity type %a."
|
|
204
|
-
# % (
|
|
205
|
-
# type(self).__name__
|
|
206
|
-
# if type(self).__name__ != Entity.__name__
|
|
207
|
-
# else "unknown"
|
|
208
|
-
# )
|
|
209
|
-
# )
|
|
210
|
-
|
|
211
|
-
# if not self.id:
|
|
212
|
-
# raise EntityNotFoundException(self)
|
|
213
|
-
|
|
214
|
-
# return self._getConnection(), self._endpoint, self.id
|
|
215
|
-
|
|
216
211
|
def fetchParameters(self):
|
|
217
212
|
connection, endpoint, id = self._getConnectionData()
|
|
218
213
|
|
|
@@ -310,12 +305,12 @@ class Dataset(
|
|
|
310
305
|
return self._parameterHelper.get(key, removeUnit)
|
|
311
306
|
|
|
312
307
|
@property
|
|
313
|
-
def
|
|
314
|
-
return self.
|
|
308
|
+
def type(self) -> Optional[DatasetTypeMinimal]:
|
|
309
|
+
return self._type
|
|
315
310
|
|
|
316
|
-
@
|
|
317
|
-
def
|
|
318
|
-
self.
|
|
311
|
+
@type.setter
|
|
312
|
+
def type(self, value):
|
|
313
|
+
self._type = DatasetTypeMinimalFromDict(
|
|
319
314
|
value, "datasetType", connection=self.connection
|
|
320
315
|
)
|
|
321
316
|
|
|
@@ -387,14 +382,6 @@ class Dataset(
|
|
|
387
382
|
def other(self, value):
|
|
388
383
|
self._other = self.checkAndConvertNullable(value, str, "other")
|
|
389
384
|
|
|
390
|
-
@property
|
|
391
|
-
def customFields(self) -> Optional[Dict[str, Any]]:
|
|
392
|
-
return self._customFields
|
|
393
|
-
|
|
394
|
-
@customFields.setter
|
|
395
|
-
def customFields(self, value):
|
|
396
|
-
self._customFields = self.checkAndConvertNullable(value, dict, "customFields")
|
|
397
|
-
|
|
398
385
|
@property
|
|
399
386
|
def parsingState(self) -> Optional[ParsingStates]:
|
|
400
387
|
return self._parsingState
|
|
@@ -405,15 +392,6 @@ class Dataset(
|
|
|
405
392
|
ParsingStates, self.checkAndConvertNullable(value, str, "parsingState")
|
|
406
393
|
)
|
|
407
394
|
|
|
408
|
-
@property
|
|
409
|
-
def relations(self) -> Optional[DatasetRelations]:
|
|
410
|
-
return self._relations
|
|
411
|
-
|
|
412
|
-
@relations.setter
|
|
413
|
-
def relations(self, value):
|
|
414
|
-
if value:
|
|
415
|
-
self._relations = self.checkAndConvert(value, DatasetRelations, "relations")
|
|
416
|
-
|
|
417
395
|
@property
|
|
418
396
|
def parsedMetadata(self) -> Optional[ParsedMetadata]:
|
|
419
397
|
return self._parsedMetadata
|
|
@@ -461,23 +439,23 @@ class Dataset(
|
|
|
461
439
|
return self._tracksHierarchy
|
|
462
440
|
|
|
463
441
|
@property
|
|
464
|
-
def
|
|
465
|
-
return self.
|
|
442
|
+
def bridge(self) -> Optional["BridgeMinimal"]:
|
|
443
|
+
return self._bridge
|
|
466
444
|
|
|
467
|
-
@
|
|
468
|
-
def
|
|
469
|
-
self.
|
|
470
|
-
value, "
|
|
445
|
+
@bridge.setter
|
|
446
|
+
def bridge(self, value):
|
|
447
|
+
self._bridge = BridgeMinimalFromDict(
|
|
448
|
+
value, "bridge", connection=self.connection
|
|
471
449
|
)
|
|
472
450
|
|
|
473
451
|
@property
|
|
474
|
-
def
|
|
475
|
-
return self.
|
|
452
|
+
def bridgeId(self) -> Optional[int]:
|
|
453
|
+
return self._bridge.id if self._bridge else None
|
|
476
454
|
|
|
477
|
-
@
|
|
478
|
-
def
|
|
479
|
-
self.
|
|
480
|
-
value, "
|
|
455
|
+
@bridgeId.setter
|
|
456
|
+
def bridgeId(self, value):
|
|
457
|
+
self._bridge = BridgeMinimalFromDict(
|
|
458
|
+
value, "bridge", connection=self.connection
|
|
481
459
|
)
|
|
482
460
|
|
|
483
461
|
@property
|
|
@@ -516,12 +494,6 @@ class Dataset(
|
|
|
516
494
|
def methodId(self) -> Optional[int]:
|
|
517
495
|
return self._method.id if self._method else None
|
|
518
496
|
|
|
519
|
-
@methodId.setter
|
|
520
|
-
def methodId(self, value):
|
|
521
|
-
self._method = MethodMinimalFromDict(
|
|
522
|
-
value, "method", connection=self.connection
|
|
523
|
-
)
|
|
524
|
-
|
|
525
497
|
@property
|
|
526
498
|
def experiment(self) -> Optional["ExperimentMinimal"]:
|
|
527
499
|
return self._experiment
|
|
@@ -615,3 +587,43 @@ class Dataset(
|
|
|
615
587
|
self._instrument = InstrumentMinimalFromDict(
|
|
616
588
|
value, "instrument", connection=self.connection
|
|
617
589
|
)
|
|
590
|
+
|
|
591
|
+
@property
|
|
592
|
+
def legacyId(self) -> Optional[str]:
|
|
593
|
+
return self._legacyId
|
|
594
|
+
|
|
595
|
+
@legacyId.setter
|
|
596
|
+
def legacyId(self, value):
|
|
597
|
+
self._legacyId = self.checkAndConvertNullable(value, str, "legacyId")
|
|
598
|
+
|
|
599
|
+
@property
|
|
600
|
+
def sourceBaseDirectory(self) -> Optional[str]:
|
|
601
|
+
return self._sourceBaseDirectory
|
|
602
|
+
|
|
603
|
+
@sourceBaseDirectory.setter
|
|
604
|
+
def sourceBaseDirectory(self, value):
|
|
605
|
+
self._sourceBaseDirectory = self.checkAndConvertNullable(
|
|
606
|
+
value, str, "sourceBaseDirectory"
|
|
607
|
+
)
|
|
608
|
+
|
|
609
|
+
@property
|
|
610
|
+
def sourceRelativeDirectory(self) -> Optional[str]:
|
|
611
|
+
return self._sourceRelativeDirectory
|
|
612
|
+
|
|
613
|
+
@sourceRelativeDirectory.setter
|
|
614
|
+
def sourceRelativeDirectory(self, value):
|
|
615
|
+
self._sourceRelativeDirectory = self.checkAndConvertNullable(
|
|
616
|
+
value, str, "sourceRelativeDirectory"
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
@property
|
|
620
|
+
@deprecate(new_name="attachment")
|
|
621
|
+
def isViewableEntity(self) -> Optional[bool]:
|
|
622
|
+
return self._isViewableEntity
|
|
623
|
+
|
|
624
|
+
@isViewableEntity.setter
|
|
625
|
+
@deprecate(new_name="attachment")
|
|
626
|
+
def isViewableEntity(self, value):
|
|
627
|
+
self._isViewableEntity = self.checkAndConvertNullable(
|
|
628
|
+
value, bool, "isViewableEntity"
|
|
629
|
+
)
|
LOGS/Entities/DatasetCreator.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
2
|
from typing import TYPE_CHECKING, Any, List, Optional
|
|
3
3
|
|
|
4
|
+
from numpy import deprecate
|
|
5
|
+
|
|
4
6
|
from LOGS.Auxiliary.Decorators import Endpoint
|
|
5
7
|
from LOGS.Auxiliary.Exceptions import EntityCreatingException, LOGSException
|
|
6
8
|
from LOGS.Auxiliary.Tools import Tools
|
|
@@ -25,50 +27,68 @@ class DatasetUploadRequest(SerializeableClass):
|
|
|
25
27
|
_typeMapper = {"files": FileEntry}
|
|
26
28
|
|
|
27
29
|
def __init__(self, ref: Any = None):
|
|
28
|
-
self.parserId: str = ""
|
|
29
|
-
self.files: List[FileEntry] = []
|
|
30
|
-
self.autoloadBaseDir: str = ""
|
|
31
|
-
self.relativeDir: str = ""
|
|
32
|
-
self.sourceType: DatasetSourceType = DatasetSourceType.APIUpload
|
|
33
|
-
self.datasetType: Optional[str] = None
|
|
34
30
|
self.name: Optional[str] = None
|
|
35
|
-
self.
|
|
31
|
+
self.formatId: str = ""
|
|
36
32
|
self.methodId: Optional[int] = None
|
|
37
33
|
self.instrumentId: Optional[int] = None
|
|
38
34
|
self.experimentId: Optional[int] = None
|
|
39
35
|
self.sampleId: Optional[int] = None
|
|
36
|
+
self.ownerId: Optional[int] = None
|
|
40
37
|
self.projectIds: Optional[List[int]] = None
|
|
41
38
|
self.organizationIds: Optional[List[int]] = None
|
|
42
39
|
self.operatorIds: Optional[List[int]] = None
|
|
43
40
|
self.equipmentIds: Optional[List[int]] = None
|
|
44
|
-
self.
|
|
41
|
+
self.autoloadBaseDir: str = ""
|
|
42
|
+
self.relativeDir: str = ""
|
|
43
|
+
self.dataSourceId: Optional[str] = None
|
|
45
44
|
self.isViewableEntity: Optional[bool] = None
|
|
45
|
+
self.files: List[FileEntry] = []
|
|
46
|
+
|
|
46
47
|
if isinstance(ref, Dataset) and ref.format:
|
|
47
48
|
self.name = ref.name
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
|
|
50
|
+
if ref.name:
|
|
51
|
+
self.name = ref.name
|
|
52
|
+
if ref.format:
|
|
53
|
+
self.formatId = ref.format.id
|
|
50
54
|
if ref.owner:
|
|
51
55
|
self.ownerId = ref.owner.id
|
|
52
56
|
if ref._files:
|
|
53
57
|
self.files = ref._files
|
|
54
58
|
if ref.method:
|
|
55
|
-
self.methodId = ref.
|
|
59
|
+
self.methodId = ref.method.id
|
|
56
60
|
if ref.instrument:
|
|
57
|
-
self.instrumentId = ref.
|
|
61
|
+
self.instrumentId = ref.instrument.id
|
|
58
62
|
if ref.experiment:
|
|
59
|
-
self.
|
|
60
|
-
if ref.
|
|
61
|
-
self.sampleId = ref.
|
|
63
|
+
self.experimentId = ref.experiment.id
|
|
64
|
+
if ref.sampleId:
|
|
65
|
+
self.sampleId = ref.sampleId
|
|
66
|
+
if ref.owner:
|
|
67
|
+
self.ownerId = ref.owner.id
|
|
62
68
|
if ref.projects:
|
|
63
|
-
self.projectIds = ref.
|
|
69
|
+
self.projectIds = [p.id for p in ref.projects]
|
|
64
70
|
if ref.operators:
|
|
65
|
-
self.operatorIds = ref.
|
|
71
|
+
self.operatorIds = [o.id for o in ref.operators]
|
|
66
72
|
if ref.equipments:
|
|
67
|
-
self.equipmentIds = ref.
|
|
73
|
+
self.equipmentIds = [e.id for e in ref.equipments]
|
|
74
|
+
if ref.isViewableEntity:
|
|
75
|
+
self.isViewableEntity = ref.isViewableEntity
|
|
76
|
+
if ref._files:
|
|
77
|
+
self.files = ref._files
|
|
68
78
|
ref = None
|
|
69
79
|
|
|
70
80
|
super().__init__(ref)
|
|
71
81
|
|
|
82
|
+
@property
|
|
83
|
+
@deprecate(new_name="formatId")
|
|
84
|
+
def parserId(self):
|
|
85
|
+
return self.formatId
|
|
86
|
+
|
|
87
|
+
@parserId.setter
|
|
88
|
+
@deprecate(new_name="formatId")
|
|
89
|
+
def paserId(self, value):
|
|
90
|
+
self.formatId = value
|
|
91
|
+
|
|
72
92
|
|
|
73
93
|
@Endpoint("datasets")
|
|
74
94
|
class DatasetCreator(EntityConnector):
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import uuid
|
|
2
2
|
from typing import Any, List, Optional
|
|
3
3
|
|
|
4
|
+
from numpy import deprecate
|
|
5
|
+
|
|
4
6
|
from LOGS.Auxiliary.Tools import Tools
|
|
5
7
|
from LOGS.Entities.FileEntry import FileEntry, FileFragment
|
|
6
8
|
from LOGS.Entity.SerializeableContent import SerializeableClass
|
|
@@ -20,12 +22,22 @@ class DatasetForSearch(SerializeableClass):
|
|
|
20
22
|
|
|
21
23
|
def __init__(self, ref: Any = None):
|
|
22
24
|
self.id: str = ""
|
|
23
|
-
self.
|
|
25
|
+
self.formatId: str = ""
|
|
24
26
|
self.checkUpdatable: bool = True
|
|
25
27
|
self.files: List[FileEntry] = []
|
|
26
28
|
super().__init__(ref)
|
|
27
29
|
self.id = uuid.uuid4().hex
|
|
28
30
|
|
|
31
|
+
@property
|
|
32
|
+
@deprecate(new_name="formatId")
|
|
33
|
+
def parserId(self):
|
|
34
|
+
return self.formatId
|
|
35
|
+
|
|
36
|
+
@parserId.setter
|
|
37
|
+
@deprecate(new_name="formatId")
|
|
38
|
+
def paserId(self, value):
|
|
39
|
+
self.formatId = value
|
|
40
|
+
|
|
29
41
|
|
|
30
42
|
class DatasetsUpdatableFiles(SerializeableClass):
|
|
31
43
|
_typeMapper = {"files": FileEntry}
|
|
@@ -70,7 +82,7 @@ class MatchedDataset(SerializeableClass):
|
|
|
70
82
|
|
|
71
83
|
def __init__(self, ref: Any = None):
|
|
72
84
|
self.id: str = ""
|
|
73
|
-
self.
|
|
85
|
+
self.formatId: str = ""
|
|
74
86
|
self.name: str = ""
|
|
75
87
|
self.parentMissing: bool = False
|
|
76
88
|
self.parentPath: str = ""
|
|
@@ -85,18 +97,28 @@ class MatchedDataset(SerializeableClass):
|
|
|
85
97
|
if len(self.files):
|
|
86
98
|
return "<%s %a %s>" % (
|
|
87
99
|
type(self).__name__,
|
|
88
|
-
self.
|
|
100
|
+
self.formatId,
|
|
89
101
|
Tools.numberPlural("file", len(self.files)),
|
|
90
102
|
)
|
|
91
103
|
else:
|
|
92
104
|
return "<%s>" % (type(self).__name__)
|
|
93
105
|
|
|
106
|
+
@property
|
|
107
|
+
@deprecate(new_name="formatId")
|
|
108
|
+
def parserId(self):
|
|
109
|
+
return self.formatId
|
|
110
|
+
|
|
111
|
+
@parserId.setter
|
|
112
|
+
@deprecate(new_name="formatId")
|
|
113
|
+
def paserId(self, value):
|
|
114
|
+
self.formatId = value
|
|
115
|
+
|
|
94
116
|
|
|
95
117
|
class DatasetMatch(SerializeableClass):
|
|
96
118
|
_typeMapper = {"datasets": MatchedDataset}
|
|
97
119
|
|
|
98
120
|
def __init__(self, ref: Any = None):
|
|
99
|
-
self.
|
|
121
|
+
self.fromatId: str = ""
|
|
100
122
|
self.parserName: str = ""
|
|
101
123
|
self.datasets: List[MatchedDataset] = []
|
|
102
124
|
super().__init__(ref)
|
|
@@ -106,15 +128,25 @@ class DatasetMatch(SerializeableClass):
|
|
|
106
128
|
yield dataset
|
|
107
129
|
|
|
108
130
|
def __str__(self):
|
|
109
|
-
if self.
|
|
131
|
+
if self.fromatId and len(self.datasets):
|
|
110
132
|
return "<%s %a(%d)>" % (
|
|
111
133
|
type(self).__name__,
|
|
112
|
-
self.
|
|
134
|
+
self.fromatId,
|
|
113
135
|
len(self.datasets),
|
|
114
136
|
)
|
|
115
137
|
else:
|
|
116
138
|
return "<%s>" % (type(self).__name__)
|
|
117
139
|
|
|
140
|
+
@property
|
|
141
|
+
@deprecate(new_name="formatId")
|
|
142
|
+
def parserId(self):
|
|
143
|
+
return self.formatId
|
|
144
|
+
|
|
145
|
+
@parserId.setter
|
|
146
|
+
@deprecate(new_name="formatId")
|
|
147
|
+
def paserId(self, value):
|
|
148
|
+
self.formatId = value
|
|
149
|
+
|
|
118
150
|
|
|
119
151
|
class MatchResult(SerializeableClass):
|
|
120
152
|
_typeMapper = {"matches": DatasetMatch, "missingFragments": FileFragment}
|
|
@@ -1,18 +1,21 @@
|
|
|
1
1
|
from typing import TYPE_CHECKING, Optional
|
|
2
2
|
|
|
3
|
-
from LOGS.Entities.Documents import Documents
|
|
4
3
|
from LOGS.Entity.EntityRelation import EntityRelation
|
|
5
4
|
from LOGS.Entity.EntityRelations import EntityRelations
|
|
6
5
|
|
|
7
6
|
if TYPE_CHECKING:
|
|
8
7
|
from LOGS.Entities.Document import Document
|
|
8
|
+
from LOGS.Entities.Documents import Documents
|
|
9
|
+
from LOGS.Entities.LabNotebookEntry import LabNotebookEntry
|
|
10
|
+
from LOGS.Entities.Project import Project
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
class DatasetRelations(EntityRelations):
|
|
12
14
|
"""Relations of a Dataset with other entities"""
|
|
13
15
|
|
|
14
16
|
_documents: Optional[EntityRelation["Document"]] = None
|
|
15
|
-
_labNotebookEntries: Optional[EntityRelation] = None
|
|
17
|
+
_labNotebookEntries: Optional[EntityRelation["LabNotebookEntry"]] = None
|
|
18
|
+
_projects: Optional[EntityRelation["Project"]] = None
|
|
16
19
|
|
|
17
20
|
@property
|
|
18
21
|
def documents(self) -> Optional[EntityRelation["Document"]]:
|
|
@@ -31,3 +34,11 @@ class DatasetRelations(EntityRelations):
|
|
|
31
34
|
self._labNotebookEntries = self.checkAndConvertNullable(
|
|
32
35
|
value, EntityRelation, "labNotebookEntries"
|
|
33
36
|
)
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def projects(self) -> Optional[EntityRelation]:
|
|
40
|
+
return self._projects
|
|
41
|
+
|
|
42
|
+
@projects.setter
|
|
43
|
+
def projects(self, value):
|
|
44
|
+
self._projects = self.checkAndConvertNullable(value, EntityRelation, "projects")
|
|
@@ -4,9 +4,13 @@ from enum import Enum
|
|
|
4
4
|
from typing import Any, Dict, List, Literal, Optional, Sequence
|
|
5
5
|
|
|
6
6
|
from LOGS.Auxiliary.Constants import Constants
|
|
7
|
+
from LOGS.Entities.ICustomSchemaRequest import ICustomSchemaRequest
|
|
8
|
+
from LOGS.Entities.IRelatedEntityRequest import IRelatedEntityRequest
|
|
7
9
|
from LOGS.Entity.EntityRequestParameter import EntityRequestParameter
|
|
8
|
-
from LOGS.Interfaces.
|
|
10
|
+
from LOGS.Interfaces.INamedEntity import INamedEntityRequest
|
|
9
11
|
from LOGS.Interfaces.IOwnedEntity import IOwnedEntityRequest
|
|
12
|
+
from LOGS.Interfaces.ISoftDeletable import ISoftDeletableRequest
|
|
13
|
+
from LOGS.Interfaces.ITypedEntity import ITypedEntityRequest
|
|
10
14
|
from LOGS.Interfaces.IUniqueEntity import IUniqueEntityRequest
|
|
11
15
|
|
|
12
16
|
ParsingStates = Literal[
|
|
@@ -37,16 +41,22 @@ class DatasetOrder(Enum):
|
|
|
37
41
|
PARSING_STATE_DESC = "PARSING_STATE_DESC"
|
|
38
42
|
PARSERID_ASC = "PARSERID_ASC"
|
|
39
43
|
PARSERID_DESC = "PARSERID_DESC"
|
|
40
|
-
|
|
41
|
-
|
|
44
|
+
FORMAT_ID_ASC = "FORMAT_ID_ASC"
|
|
45
|
+
FORMAT_ID_DESC = "FORMAT_ID_DESC"
|
|
46
|
+
TYPE_ASC = "TYPE_ASC"
|
|
47
|
+
TYPE_DESC = "YPE_DESC"
|
|
42
48
|
|
|
43
49
|
|
|
44
50
|
@dataclass
|
|
45
51
|
class DatasetRequestParameter(
|
|
46
|
-
EntityRequestParameter,
|
|
47
|
-
|
|
52
|
+
EntityRequestParameter[DatasetOrder],
|
|
53
|
+
IRelatedEntityRequest,
|
|
54
|
+
ITypedEntityRequest,
|
|
55
|
+
ISoftDeletableRequest,
|
|
56
|
+
ICustomSchemaRequest,
|
|
48
57
|
IOwnedEntityRequest,
|
|
49
|
-
|
|
58
|
+
INamedEntityRequest,
|
|
59
|
+
IUniqueEntityRequest,
|
|
50
60
|
):
|
|
51
61
|
includeParameters: Optional[bool] = None
|
|
52
62
|
methodIds: Optional[List[int]] = None
|
|
@@ -69,7 +79,8 @@ class DatasetRequestParameter(
|
|
|
69
79
|
participatedPersonIds: Optional[List[int]] = None
|
|
70
80
|
pathContains: Optional[str] = None
|
|
71
81
|
parsingState: Optional[List[ParsingStates]] = None
|
|
72
|
-
|
|
82
|
+
bridgeIds: Optional[List[int]] = None
|
|
83
|
+
dataSourceIds: Optional[List[int]] = None
|
|
73
84
|
orderBy: Optional[DatasetOrder] = None
|
|
74
85
|
typeIds: Optional[List[str]] = None
|
|
75
86
|
hashes: Optional[List[str]] = None
|
|
@@ -80,3 +91,4 @@ class DatasetRequestParameter(
|
|
|
80
91
|
includeUnclaimed: Optional[Optional[bool]] = None
|
|
81
92
|
files: Optional[Sequence[Constants.FILE_TYPE]] = None
|
|
82
93
|
parameters: Optional[Dict[str, Any]] = None
|
|
94
|
+
isReferencedByLabNotebook: Optional[Optional[bool]] = None
|
LOGS/Entities/Datasets.py
CHANGED
|
@@ -77,7 +77,7 @@ class Datasets(EntityIterator[Dataset, DatasetRequestParameter]):
|
|
|
77
77
|
for formatId in formatIds:
|
|
78
78
|
dataset = DatasetForSearch()
|
|
79
79
|
dataset.checkUpdatable = checkUpdatable
|
|
80
|
-
dataset.
|
|
80
|
+
dataset.formatId = formatId
|
|
81
81
|
dataset.files.extend(fileList)
|
|
82
82
|
request.datasets.append(dataset)
|
|
83
83
|
return request
|
LOGS/Entities/Document.py
CHANGED
|
@@ -5,7 +5,7 @@ from LOGS.Auxiliary.Decorators import Endpoint
|
|
|
5
5
|
from LOGS.Auxiliary.MinimalModelGenerator import MinimalFromList
|
|
6
6
|
from LOGS.Entities.DocumentRelations import DocumentRelations
|
|
7
7
|
from LOGS.Entity.EntityMinimalWithIntId import EntityMinimalWithIntId
|
|
8
|
-
from LOGS.Entity.EntityWithIntId import
|
|
8
|
+
from LOGS.Entity.EntityWithIntId import IEntityWithIntId
|
|
9
9
|
from LOGS.Entity.SerializeableContent import SerializeableContent
|
|
10
10
|
from LOGS.Interfaces.IOwnedEntity import IOwnedEntity
|
|
11
11
|
from LOGS.LOGSConnection import LOGSConnection
|
|
@@ -57,7 +57,7 @@ class DocumentFile(SerializeableContent):
|
|
|
57
57
|
|
|
58
58
|
|
|
59
59
|
@Endpoint("documents")
|
|
60
|
-
class Document(
|
|
60
|
+
class Document(IEntityWithIntId, IOwnedEntity):
|
|
61
61
|
_name: Optional[str]
|
|
62
62
|
_creationDate: Optional[datetime]
|
|
63
63
|
_modificationDate: Optional[datetime]
|
LOGS/Entities/Experiment.py
CHANGED
|
@@ -2,25 +2,30 @@ from typing import Optional
|
|
|
2
2
|
|
|
3
3
|
from LOGS.Auxiliary.Decorators import Endpoint
|
|
4
4
|
from LOGS.Auxiliary.MinimalModelGenerator import MethodMinimalFromDict
|
|
5
|
+
from LOGS.Entities.ExperimentRelations import ExperimentRelations
|
|
5
6
|
from LOGS.Entities.MethodMinimal import MethodMinimal
|
|
6
|
-
from LOGS.Entity.EntityWithIntId import
|
|
7
|
+
from LOGS.Entity.EntityWithIntId import IEntityWithIntId
|
|
7
8
|
from LOGS.Interfaces.ICreationRecord import ICreationRecord
|
|
8
9
|
from LOGS.Interfaces.IModificationRecord import IModificationRecord
|
|
9
10
|
from LOGS.Interfaces.INamedEntity import INamedEntity
|
|
10
11
|
from LOGS.Interfaces.IOwnedEntity import IOwnedEntity
|
|
12
|
+
from LOGS.Interfaces.IRelatedEntity import IRelatedEntity
|
|
11
13
|
from LOGS.Interfaces.IUniqueEntity import IUniqueEntity
|
|
12
14
|
from LOGS.LOGSConnection import LOGSConnection
|
|
13
15
|
|
|
14
16
|
|
|
15
17
|
@Endpoint("experiments")
|
|
16
18
|
class Experiment(
|
|
19
|
+
IEntityWithIntId,
|
|
17
20
|
INamedEntity,
|
|
18
|
-
EntityWithIntId,
|
|
19
21
|
IUniqueEntity,
|
|
20
|
-
IOwnedEntity,
|
|
21
|
-
IModificationRecord,
|
|
22
22
|
ICreationRecord,
|
|
23
|
+
IModificationRecord,
|
|
24
|
+
IOwnedEntity,
|
|
25
|
+
IRelatedEntity[ExperimentRelations],
|
|
23
26
|
):
|
|
27
|
+
_relationType = type(ExperimentRelations)
|
|
28
|
+
|
|
24
29
|
_method: Optional[MethodMinimal]
|
|
25
30
|
_notes: Optional[str]
|
|
26
31
|
|
|
@@ -34,8 +39,6 @@ class Experiment(
|
|
|
34
39
|
|
|
35
40
|
self._method = None
|
|
36
41
|
self._notes = None
|
|
37
|
-
self._date_added = None
|
|
38
|
-
self._date_last_modified = None
|
|
39
42
|
|
|
40
43
|
super().__init__(ref=ref, id=id, connection=connection)
|
|
41
44
|
|
|
@@ -64,21 +67,3 @@ class Experiment(
|
|
|
64
67
|
@notes.setter
|
|
65
68
|
def notes(self, value):
|
|
66
69
|
self._notes = self.checkAndConvertNullable(value, str, "notes")
|
|
67
|
-
|
|
68
|
-
@property
|
|
69
|
-
def date_added(self) -> Optional[str]:
|
|
70
|
-
return self._date_added
|
|
71
|
-
|
|
72
|
-
@date_added.setter
|
|
73
|
-
def date_added(self, value):
|
|
74
|
-
self._date_added = self.checkAndConvertNullable(value, str, "date_added")
|
|
75
|
-
|
|
76
|
-
@property
|
|
77
|
-
def date_last_modified(self) -> Optional[str]:
|
|
78
|
-
return self._date_last_modified
|
|
79
|
-
|
|
80
|
-
@date_last_modified.setter
|
|
81
|
-
def date_last_modified(self, value):
|
|
82
|
-
self._date_last_modified = self.checkAndConvertNullable(
|
|
83
|
-
value, str, "date_last_modified"
|
|
84
|
-
)
|