logs-py 4.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LOGS/Auxiliary/CheckClassName.py +1075 -0
- LOGS/Auxiliary/Constants.py +99 -0
- LOGS/Auxiliary/CustomEntityClassGenerator.py +254 -0
- LOGS/Auxiliary/CustomFieldClassGenerator.py +115 -0
- LOGS/Auxiliary/CustomFieldValueTypeChecker.py +168 -0
- LOGS/Auxiliary/CustomSectionClassGenerator.py +113 -0
- LOGS/Auxiliary/CustomTypeClassGenerator.py +147 -0
- LOGS/Auxiliary/DateTimeConverter.py +66 -0
- LOGS/Auxiliary/Decorators.py +109 -0
- LOGS/Auxiliary/Exceptions.py +341 -0
- LOGS/Auxiliary/LOGSErrorResponse.py +89 -0
- LOGS/Auxiliary/MinimalModelGenerator.py +236 -0
- LOGS/Auxiliary/ParameterHelper.py +56 -0
- LOGS/Auxiliary/ReplaceMessage.py +13 -0
- LOGS/Auxiliary/Tools.py +432 -0
- LOGS/Auxiliary/__init__.py +15 -0
- LOGS/Converter/Conversion.py +248 -0
- LOGS/Converter/Converter.py +96 -0
- LOGS/Converter/ConverterParameter.py +88 -0
- LOGS/Converter/DateTimeRange.py +58 -0
- LOGS/Converter/ExportParameters.py +89 -0
- LOGS/Converter/__init__.py +13 -0
- LOGS/Entities/Attachment.py +84 -0
- LOGS/Entities/AttachmentMinimal.py +8 -0
- LOGS/Entities/AttachmentRequestParameter.py +42 -0
- LOGS/Entities/Attachments.py +53 -0
- LOGS/Entities/AutoloadFileInfo.py +12 -0
- LOGS/Entities/AutoloadStatusError.py +7 -0
- LOGS/Entities/AxisNaming.py +33 -0
- LOGS/Entities/AxisZoom.py +33 -0
- LOGS/Entities/Bridge.py +165 -0
- LOGS/Entities/BridgeClientInfo.py +93 -0
- LOGS/Entities/BridgeMinimal.py +8 -0
- LOGS/Entities/BridgeRequestParameter.py +49 -0
- LOGS/Entities/BridgeType.py +7 -0
- LOGS/Entities/Bridges.py +12 -0
- LOGS/Entities/CustomField.py +243 -0
- LOGS/Entities/CustomFieldMinimal.py +8 -0
- LOGS/Entities/CustomFieldModels.py +111 -0
- LOGS/Entities/CustomFieldRequestParameter.py +69 -0
- LOGS/Entities/CustomFieldSearchQuery.py +40 -0
- LOGS/Entities/CustomFields.py +12 -0
- LOGS/Entities/CustomType.py +212 -0
- LOGS/Entities/CustomTypeMinimal.py +8 -0
- LOGS/Entities/CustomTypeRequestParameter.py +60 -0
- LOGS/Entities/CustomTypeSection.py +63 -0
- LOGS/Entities/CustomTypes.py +12 -0
- LOGS/Entities/DataFormat.py +97 -0
- LOGS/Entities/DataFormatInstrument.py +18 -0
- LOGS/Entities/DataFormatInstrumentMinimal.py +8 -0
- LOGS/Entities/DataFormatInstrumentRequestParameter.py +17 -0
- LOGS/Entities/DataFormatInstruments.py +16 -0
- LOGS/Entities/DataFormatMinimal.py +18 -0
- LOGS/Entities/DataFormatRequestParameter.py +21 -0
- LOGS/Entities/DataFormats.py +12 -0
- LOGS/Entities/DataSource.py +218 -0
- LOGS/Entities/DataSourceConnectionStatus.py +12 -0
- LOGS/Entities/DataSourceMinimal.py +8 -0
- LOGS/Entities/DataSourceRequestParameter.py +57 -0
- LOGS/Entities/DataSourceStatus.py +108 -0
- LOGS/Entities/DataSourceStatusIterator.py +16 -0
- LOGS/Entities/DataSourceStatusRequestParameter.py +31 -0
- LOGS/Entities/DataSources.py +12 -0
- LOGS/Entities/Dataset.py +439 -0
- LOGS/Entities/DatasetBase.py +196 -0
- LOGS/Entities/DatasetCreator.py +148 -0
- LOGS/Entities/DatasetInfo.py +147 -0
- LOGS/Entities/DatasetMatchTypes.py +157 -0
- LOGS/Entities/DatasetMatching.py +196 -0
- LOGS/Entities/DatasetMinimal.py +8 -0
- LOGS/Entities/DatasetModels.py +33 -0
- LOGS/Entities/DatasetRequestParameter.py +92 -0
- LOGS/Entities/DatasetTemplate.py +23 -0
- LOGS/Entities/DatasetUploadParameter.py +14 -0
- LOGS/Entities/Datasets.py +142 -0
- LOGS/Entities/Datatrack.py +179 -0
- LOGS/Entities/DatatrackFormattedTable.py +25 -0
- LOGS/Entities/DatatrackGeneric.py +34 -0
- LOGS/Entities/DatatrackImage.py +25 -0
- LOGS/Entities/DatatrackNumericArray.py +30 -0
- LOGS/Entities/DatatrackNumericMatrix.py +98 -0
- LOGS/Entities/Entities.py +71 -0
- LOGS/Entities/EntitiesRequestParameter.py +18 -0
- LOGS/Entities/EntityOriginWriteModelWithId.py +15 -0
- LOGS/Entities/FileEntry.py +138 -0
- LOGS/Entities/FileExcludePattern.py +8 -0
- LOGS/Entities/FormatMetaData.py +56 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTable.py +135 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTableCell.py +108 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTableSettings.py +11 -0
- LOGS/Entities/FormattedTable/__init__.py +9 -0
- LOGS/Entities/HierarchyLeaf.py +15 -0
- LOGS/Entities/HierarchyNode.py +40 -0
- LOGS/Entities/ILiteraryTypedEntity.py +19 -0
- LOGS/Entities/InventoryItem.py +102 -0
- LOGS/Entities/InventoryItemMinimal.py +25 -0
- LOGS/Entities/InventoryItemRequestParameter.py +58 -0
- LOGS/Entities/InventoryItems.py +12 -0
- LOGS/Entities/LabNotebook.py +33 -0
- LOGS/Entities/LabNotebookEntries.py +16 -0
- LOGS/Entities/LabNotebookEntry.py +106 -0
- LOGS/Entities/LabNotebookEntryContent/BasicAttribute.py +15 -0
- LOGS/Entities/LabNotebookEntryContent/EntityAttribute.py +85 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentBlockquote.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentBulletList.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentCallout.py +40 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentContentPlaceholderNode.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentConverter.py +207 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentDocument.py +8 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentEntity.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentEntityMention.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentHeading.py +33 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentHorizontalRule.py +12 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentItem.py +37 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentListItem.py +49 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentOrderedList.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentParagraph.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTable.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTableCell.py +40 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTableRow.py +8 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTaskList.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTaskListItem.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentText.py +33 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithAttribute.py +23 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithContent.py +38 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithTextAttribute.py +16 -0
- LOGS/Entities/LabNotebookEntryContent/TextAttribute.py +46 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarkAtributes.py +64 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarkConverter.py +45 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarks.py +71 -0
- LOGS/Entities/LabNotebookEntryContent/__init__.py +34 -0
- LOGS/Entities/LabNotebookEntryMinimal.py +8 -0
- LOGS/Entities/LabNotebookEntryRequestParameter.py +59 -0
- LOGS/Entities/LabNotebookExperiment.py +58 -0
- LOGS/Entities/LabNotebookExperimentMinimal.py +8 -0
- LOGS/Entities/LabNotebookExperimentRequestParameter.py +52 -0
- LOGS/Entities/LabNotebookExperiments.py +16 -0
- LOGS/Entities/LabNotebookMinimal.py +8 -0
- LOGS/Entities/LabNotebookModels.py +14 -0
- LOGS/Entities/LabNotebookRequestParameter.py +42 -0
- LOGS/Entities/LabNotebookTemplate.py +42 -0
- LOGS/Entities/LabNotebookTemplateMinimal.py +8 -0
- LOGS/Entities/LabNotebookTemplateRequestParameter.py +38 -0
- LOGS/Entities/LabNotebookTemplates.py +16 -0
- LOGS/Entities/LabNotebooks.py +12 -0
- LOGS/Entities/Method.py +66 -0
- LOGS/Entities/MethodMinimal.py +8 -0
- LOGS/Entities/MethodRequestParameter.py +16 -0
- LOGS/Entities/Methods.py +12 -0
- LOGS/Entities/Origin.py +53 -0
- LOGS/Entities/OriginMinimal.py +8 -0
- LOGS/Entities/OriginRequestParameter.py +28 -0
- LOGS/Entities/Origins.py +12 -0
- LOGS/Entities/ParserLog.py +49 -0
- LOGS/Entities/Permission.py +9 -0
- LOGS/Entities/Person.py +145 -0
- LOGS/Entities/PersonCategory.py +12 -0
- LOGS/Entities/PersonMinimal.py +8 -0
- LOGS/Entities/PersonRequestParameter.py +58 -0
- LOGS/Entities/Persons.py +12 -0
- LOGS/Entities/Project.py +52 -0
- LOGS/Entities/ProjectMinimal.py +8 -0
- LOGS/Entities/ProjectPersonPermission.py +102 -0
- LOGS/Entities/ProjectRequestParameter.py +58 -0
- LOGS/Entities/Projects.py +12 -0
- LOGS/Entities/Role.py +94 -0
- LOGS/Entities/RoleMinimal.py +8 -0
- LOGS/Entities/RoleRequestParameter.py +40 -0
- LOGS/Entities/Roles.py +12 -0
- LOGS/Entities/RunState.py +9 -0
- LOGS/Entities/Sample.py +53 -0
- LOGS/Entities/SampleMinimal.py +8 -0
- LOGS/Entities/SampleRequestParameter.py +54 -0
- LOGS/Entities/Samples.py +12 -0
- LOGS/Entities/SharedContent.py +87 -0
- LOGS/Entities/SharedContentMinimal.py +8 -0
- LOGS/Entities/SharedContentRequestParameter.py +38 -0
- LOGS/Entities/SharedContents.py +12 -0
- LOGS/Entities/Signature.py +60 -0
- LOGS/Entities/Track.py +93 -0
- LOGS/Entities/TrackData.py +20 -0
- LOGS/Entities/TrackImage.py +21 -0
- LOGS/Entities/TrackImageData.py +20 -0
- LOGS/Entities/TrackMatrix.py +28 -0
- LOGS/Entities/TrackMatrixData.py +22 -0
- LOGS/Entities/TrackSettings.py +55 -0
- LOGS/Entities/TrackTable.py +21 -0
- LOGS/Entities/TrackTableData.py +22 -0
- LOGS/Entities/TrackXY.py +40 -0
- LOGS/Entities/TrackXYComplex.py +51 -0
- LOGS/Entities/TrackXYComplexData.py +50 -0
- LOGS/Entities/TrackXYData.py +31 -0
- LOGS/Entities/Vendor.py +40 -0
- LOGS/Entities/VendorMinimal.py +8 -0
- LOGS/Entities/VendorRequestParameter.py +17 -0
- LOGS/Entities/Vendors.py +12 -0
- LOGS/Entities/__init__.py +118 -0
- LOGS/Entity/ConnectedEntity.py +170 -0
- LOGS/Entity/Entity.py +203 -0
- LOGS/Entity/EntityConnector.py +70 -0
- LOGS/Entity/EntityIterator.py +263 -0
- LOGS/Entity/EntityMinimal.py +141 -0
- LOGS/Entity/EntityMinimalWithIntId.py +36 -0
- LOGS/Entity/EntityMinimalWithStrId.py +36 -0
- LOGS/Entity/EntityMinimalWithType.py +47 -0
- LOGS/Entity/EntityRequestParameter.py +104 -0
- LOGS/Entity/EntitySortBy.py +69 -0
- LOGS/Entity/EntityWithIntId.py +26 -0
- LOGS/Entity/EntityWithStrId.py +26 -0
- LOGS/Entity/IGenericEntityOrderBy.py +55 -0
- LOGS/Entity/IdIterator.py +207 -0
- LOGS/Entity/SerializableContent.py +834 -0
- LOGS/Entity/__init__.py +23 -0
- LOGS/Interfaces/ICustomFieldValue.py +92 -0
- LOGS/Interfaces/ICustomSectionValue.py +161 -0
- LOGS/Interfaces/ICustomTypeValue.py +152 -0
- LOGS/Interfaces/ICustomValue.py +28 -0
- LOGS/Interfaces/IEntityInterface.py +7 -0
- LOGS/Interfaces/IEntryRecord.py +57 -0
- LOGS/Interfaces/IHierarchicalEntity.py +41 -0
- LOGS/Interfaces/IHierarchyType.py +63 -0
- LOGS/Interfaces/ILockableEntity.py +52 -0
- LOGS/Interfaces/IModificationRecord.py +56 -0
- LOGS/Interfaces/INamedEntity.py +25 -0
- LOGS/Interfaces/IOwnedEntity.py +27 -0
- LOGS/Interfaces/IPaginationRequest.py +11 -0
- LOGS/Interfaces/IPermissionedEntity.py +72 -0
- LOGS/Interfaces/IProjectBased.py +27 -0
- LOGS/Interfaces/ISessionedEntity.py +59 -0
- LOGS/Interfaces/ISignableEntity.py +49 -0
- LOGS/Interfaces/ISoftDeletable.py +28 -0
- LOGS/Interfaces/ITypedEntity.py +129 -0
- LOGS/Interfaces/IUniqueEntity.py +61 -0
- LOGS/Interfaces/IVersionedEntity.py +39 -0
- LOGS/Interfaces/__init__.py +7 -0
- LOGS/LOGS.py +1436 -0
- LOGS/LOGSConnection.py +647 -0
- LOGS/LOGSOptions.py +11 -0
- LOGS/Parameters/Color.py +92 -0
- LOGS/Parameters/ParameterBase.py +55 -0
- LOGS/Parameters/ParameterConverter.py +24 -0
- LOGS/Parameters/ParameterElement.py +99 -0
- LOGS/Parameters/ParameterList.py +52 -0
- LOGS/Parameters/ParameterTable.py +64 -0
- LOGS/Parameters/__init__.py +13 -0
- LOGS/ServerMetaData.py +120 -0
- LOGS/__init__.py +12 -0
- logs_py-4.0.7.dist-info/METADATA +51 -0
- logs_py-4.0.7.dist-info/RECORD +251 -0
- logs_py-4.0.7.dist-info/WHEEL +5 -0
- logs_py-4.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, Dict, List, Optional, cast
|
|
3
|
+
|
|
4
|
+
from LOGS.Auxiliary.Exceptions import (
|
|
5
|
+
EntityFetchingException,
|
|
6
|
+
EntityIncompleteException,
|
|
7
|
+
LOGSException,
|
|
8
|
+
)
|
|
9
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
10
|
+
from LOGS.Entities.DataFormatMinimal import DataFormatMinimal
|
|
11
|
+
from LOGS.Entities.DatasetInfo import DatasetInfo
|
|
12
|
+
from LOGS.Entities.DatasetRequestParameter import ParsingStates
|
|
13
|
+
from LOGS.Entities.Datatrack import Datatrack
|
|
14
|
+
from LOGS.Entities.FileEntry import FileEntry
|
|
15
|
+
from LOGS.Entities.HierarchyNode import HierarchyNode
|
|
16
|
+
from LOGS.Entities.ParserLog import ParserLog
|
|
17
|
+
from LOGS.Entities.Track import Track
|
|
18
|
+
from LOGS.Entity.EntityWithIntId import IEntityWithIntId
|
|
19
|
+
from LOGS.Interfaces.INamedEntity import INamedEntity
|
|
20
|
+
from LOGS.LOGSConnection import ResponseTypes
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DatasetBase(
|
|
24
|
+
IEntityWithIntId,
|
|
25
|
+
INamedEntity,
|
|
26
|
+
):
|
|
27
|
+
# private attributes
|
|
28
|
+
_files: Optional[List[FileEntry]] = None
|
|
29
|
+
|
|
30
|
+
# state of additionally fetched data
|
|
31
|
+
_noInfo = True
|
|
32
|
+
|
|
33
|
+
# fields
|
|
34
|
+
_path: Optional[str] = None
|
|
35
|
+
|
|
36
|
+
# special fetched fields
|
|
37
|
+
## Getter and setter implemented in inherited classes
|
|
38
|
+
_formatVersion: Optional[int] = None
|
|
39
|
+
_tracks: Optional[List[Track]] = None
|
|
40
|
+
_datatracks: Optional[List[Datatrack]] = None
|
|
41
|
+
_tracksHierarchy: Optional[HierarchyNode] = None
|
|
42
|
+
|
|
43
|
+
## Getter and setter implemented in this class
|
|
44
|
+
_parsingState: Optional[ParsingStates] = None
|
|
45
|
+
_parserLogs: Optional[List[ParserLog]] = None
|
|
46
|
+
_zipSize: Optional[int] = None
|
|
47
|
+
_format: Optional["DataFormatMinimal"] = None
|
|
48
|
+
|
|
49
|
+
def fetchZipSize(self):
|
|
50
|
+
connection, endpoint, id = self._getConnectionData()
|
|
51
|
+
|
|
52
|
+
zip, responseError = connection.getEndpoint(
|
|
53
|
+
endpoint + ["zip_size"], parameters={"ids": [self.id]}
|
|
54
|
+
)
|
|
55
|
+
if responseError:
|
|
56
|
+
raise EntityFetchingException(entity=self, responseError=responseError)
|
|
57
|
+
|
|
58
|
+
if isinstance(zip, dict) and "size" in zip:
|
|
59
|
+
self._zipSize = zip["size"]
|
|
60
|
+
|
|
61
|
+
def _getDataDir(self):
|
|
62
|
+
if self.cacheDir:
|
|
63
|
+
if not os.path.isdir(self.cacheDir):
|
|
64
|
+
raise LOGSException(
|
|
65
|
+
f"Specified cache directory '{self.cacheDir}' cannot be opened or is not a directory."
|
|
66
|
+
)
|
|
67
|
+
return self.cacheDir
|
|
68
|
+
return None
|
|
69
|
+
|
|
70
|
+
def fetchInfo(self):
|
|
71
|
+
connection, endpoint, id = self._getConnectionData()
|
|
72
|
+
|
|
73
|
+
data, responseError = connection.getEndpoint(endpoint + [id, "info"])
|
|
74
|
+
if responseError:
|
|
75
|
+
raise EntityFetchingException(entity=self, responseError=responseError)
|
|
76
|
+
|
|
77
|
+
dataDir = self._getDataDir()
|
|
78
|
+
if dataDir and not os.path.exists(dataDir):
|
|
79
|
+
os.mkdir(dataDir)
|
|
80
|
+
|
|
81
|
+
self._setInfo(cast(dict, data))
|
|
82
|
+
self._noInfo = False
|
|
83
|
+
if self._datatracks:
|
|
84
|
+
for datatrack in self._datatracks:
|
|
85
|
+
datatrack._endpoint = (
|
|
86
|
+
endpoint + [str(id), "datatrack"] if endpoint else None
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
def _setInfo(self, data: dict):
|
|
90
|
+
info = DatasetInfo(data)
|
|
91
|
+
self._formatVersion = info.formatVersion
|
|
92
|
+
self._parserLogs = info.parserLogs
|
|
93
|
+
self._tracks = info.tracks
|
|
94
|
+
self._datatracks = info.datatracks
|
|
95
|
+
self._tracksHierarchy = info.tracksHierarchy
|
|
96
|
+
self._parsingState = info.parsingState
|
|
97
|
+
|
|
98
|
+
dataDir = self._getDataDir()
|
|
99
|
+
|
|
100
|
+
trackLookup: Dict[str, Datatrack] = {}
|
|
101
|
+
if self._datatracks:
|
|
102
|
+
for datatrack in self._datatracks:
|
|
103
|
+
datatrack._setConnection(self._getConnection())
|
|
104
|
+
datatrack.cacheDir = dataDir
|
|
105
|
+
if datatrack.id:
|
|
106
|
+
trackLookup[datatrack.id] = datatrack
|
|
107
|
+
|
|
108
|
+
if self._tracks:
|
|
109
|
+
for track in self._tracks:
|
|
110
|
+
track._setConnection(self._getConnection())
|
|
111
|
+
track.cacheDir = dataDir
|
|
112
|
+
if track._dataIds:
|
|
113
|
+
track.datatracks = cast(
|
|
114
|
+
Any,
|
|
115
|
+
{
|
|
116
|
+
k: (trackLookup[v] if v in trackLookup else None)
|
|
117
|
+
for k, v in track._dataIds.items()
|
|
118
|
+
},
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
def download(
|
|
122
|
+
self,
|
|
123
|
+
directory: Optional[str] = None,
|
|
124
|
+
fileName: Optional[str] = None,
|
|
125
|
+
overwrite=False,
|
|
126
|
+
):
|
|
127
|
+
connection, endpoint, id = self._getConnectionData()
|
|
128
|
+
|
|
129
|
+
if not directory:
|
|
130
|
+
directory = os.curdir
|
|
131
|
+
|
|
132
|
+
if not fileName:
|
|
133
|
+
fileName = self.name if self.name and self.name != "" else "Dataset"
|
|
134
|
+
fileName += ".zip"
|
|
135
|
+
|
|
136
|
+
path = os.path.join(directory, Tools.sanitizeFileName(fileName=fileName))
|
|
137
|
+
|
|
138
|
+
if overwrite:
|
|
139
|
+
if os.path.exists(path) and not os.path.isfile(path):
|
|
140
|
+
raise LOGSException("Path %a is not a file" % path)
|
|
141
|
+
else:
|
|
142
|
+
if os.path.exists(path):
|
|
143
|
+
raise LOGSException("File %a already exists" % path)
|
|
144
|
+
|
|
145
|
+
data, responseError = connection.getEndpoint(
|
|
146
|
+
endpoint + [id, "files", "zip"], responseType=ResponseTypes.RAW
|
|
147
|
+
)
|
|
148
|
+
if responseError:
|
|
149
|
+
raise EntityFetchingException(entity=self, responseError=responseError)
|
|
150
|
+
|
|
151
|
+
with open(path, mode="wb") as localFile:
|
|
152
|
+
localFile.write(cast(bytes, data))
|
|
153
|
+
|
|
154
|
+
return path
|
|
155
|
+
|
|
156
|
+
@property
|
|
157
|
+
def format(self) -> Optional["DataFormatMinimal"]:
|
|
158
|
+
return self._format
|
|
159
|
+
|
|
160
|
+
@property
|
|
161
|
+
def path(self) -> Optional[str]:
|
|
162
|
+
return self._path
|
|
163
|
+
|
|
164
|
+
@path.setter
|
|
165
|
+
def path(self, value):
|
|
166
|
+
self._path = self.checkAndConvertNullable(value, str, "path")
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def parserLogs(self) -> Optional[List[ParserLog]]:
|
|
170
|
+
if self._noInfo:
|
|
171
|
+
raise EntityIncompleteException(
|
|
172
|
+
self,
|
|
173
|
+
parameterName="parserLogs",
|
|
174
|
+
functionName=f"{self.fetchInfo.__name__}()",
|
|
175
|
+
)
|
|
176
|
+
return self._parserLogs
|
|
177
|
+
|
|
178
|
+
@property
|
|
179
|
+
def parsingState(self) -> Optional[ParsingStates]:
|
|
180
|
+
return self._parsingState
|
|
181
|
+
|
|
182
|
+
@parsingState.setter
|
|
183
|
+
def parsingState(self, value):
|
|
184
|
+
self._parsingState = cast(
|
|
185
|
+
ParsingStates, self.checkAndConvertNullable(value, str, "parsingState")
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def zipSize(self) -> Optional[int]:
|
|
190
|
+
if self._zipSize is None:
|
|
191
|
+
raise EntityIncompleteException(
|
|
192
|
+
self,
|
|
193
|
+
parameterName="zipSize",
|
|
194
|
+
functionName=f"{self.fetchZipSize.__name__}()",
|
|
195
|
+
)
|
|
196
|
+
return self._zipSize
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Optional, Union
|
|
2
|
+
|
|
3
|
+
from LOGS.Auxiliary.Decorators import Endpoint
|
|
4
|
+
from LOGS.Auxiliary.Exceptions import EntityCreatingException, LOGSException
|
|
5
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
6
|
+
from LOGS.Entities.Attachment import Attachment
|
|
7
|
+
from LOGS.Entities.Dataset import Dataset
|
|
8
|
+
from LOGS.Entities.DatasetModels import ViewableEntityTypes
|
|
9
|
+
from LOGS.Entities.FileEntry import FileEntry
|
|
10
|
+
from LOGS.Entity.ConnectedEntity import ConnectedEntity
|
|
11
|
+
from LOGS.Entity.EntityConnector import EntityConnector
|
|
12
|
+
from LOGS.LOGSConnection import LOGSConnection, MultipartEntry
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DatasetUploadRequest(ConnectedEntity):
|
|
16
|
+
_typeMapper = {"files": FileEntry}
|
|
17
|
+
_dataset: Optional[Union[Dataset, Attachment]] = None
|
|
18
|
+
|
|
19
|
+
_viewableEntityTypes: Optional[List[ViewableEntityTypes]] = None
|
|
20
|
+
_isViewableEntity: Optional[bool] = None
|
|
21
|
+
_files: Optional[List[FileEntry]] = None
|
|
22
|
+
_filePathsAreAbsolute: Optional[bool] = True
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
ref=None,
|
|
27
|
+
dataset: Optional[Union[Dataset, Attachment]] = None,
|
|
28
|
+
connection: Optional[LOGSConnection] = None,
|
|
29
|
+
):
|
|
30
|
+
super().__init__(ref=ref, connection=connection)
|
|
31
|
+
|
|
32
|
+
if not dataset:
|
|
33
|
+
return
|
|
34
|
+
self._dataset = dataset
|
|
35
|
+
|
|
36
|
+
if not isinstance(dataset, (Dataset, Attachment)):
|
|
37
|
+
raise ValueError(
|
|
38
|
+
f"Dataset parameter must be of type Dataset or Attachment in the {type(self).__name__} constructor."
|
|
39
|
+
)
|
|
40
|
+
self._files = dataset._files
|
|
41
|
+
self._format = dataset._format
|
|
42
|
+
if isinstance(dataset, Attachment):
|
|
43
|
+
self._isViewableEntity = True
|
|
44
|
+
self._viewableEntityTypes = dataset._viewableEntityTypes
|
|
45
|
+
|
|
46
|
+
def toDict(self) -> Dict[str, Any]:
|
|
47
|
+
result = self._dataset.toDict() if self._dataset else {}
|
|
48
|
+
result.update(super().toDict())
|
|
49
|
+
return result
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def files(self) -> Optional[List[FileEntry]]:
|
|
53
|
+
return self._files
|
|
54
|
+
|
|
55
|
+
@files.setter
|
|
56
|
+
def files(self, value):
|
|
57
|
+
self._files = self.checkListAndConvertNullable(value, FileEntry, "files")
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def filePathsAreAbsolute(self) -> Optional[bool]:
|
|
61
|
+
return self._filePathsAreAbsolute
|
|
62
|
+
|
|
63
|
+
@filePathsAreAbsolute.setter
|
|
64
|
+
def filePathsAreAbsolute(self, value):
|
|
65
|
+
self._filePathsAreAbsolute = self.checkAndConvertNullable(
|
|
66
|
+
value, bool, "filePathsAreAbsolute"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def isViewableEntity(self) -> Optional[bool]:
|
|
71
|
+
return self._isViewableEntity
|
|
72
|
+
|
|
73
|
+
@isViewableEntity.setter
|
|
74
|
+
def isViewableEntity(self, value):
|
|
75
|
+
self._isViewableEntity = self.checkAndConvertNullable(
|
|
76
|
+
value, bool, "isViewableEntity"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def viewableEntityTypes(self) -> Optional[List[ViewableEntityTypes]]:
|
|
81
|
+
return self._viewableEntityTypes
|
|
82
|
+
|
|
83
|
+
@viewableEntityTypes.setter
|
|
84
|
+
def viewableEntityTypes(self, value):
|
|
85
|
+
self._viewableEntityTypes = self.checkListAndConvertNullable(
|
|
86
|
+
value, ViewableEntityTypes, "viewableEntityTypes"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@Endpoint("datasets")
|
|
91
|
+
class DatasetCreator(EntityConnector):
|
|
92
|
+
_request: DatasetUploadRequest = DatasetUploadRequest()
|
|
93
|
+
_formatId: str
|
|
94
|
+
_files: List[FileEntry]
|
|
95
|
+
|
|
96
|
+
def __init__(self, connection: LOGSConnection, dataset: Union[Dataset, Attachment]):
|
|
97
|
+
self._connection = connection
|
|
98
|
+
|
|
99
|
+
if not dataset:
|
|
100
|
+
raise LOGSException("Cannot not create empty dataset")
|
|
101
|
+
if not dataset._files:
|
|
102
|
+
raise LOGSException("Cannot not create dataset without files")
|
|
103
|
+
if not dataset.format or not dataset.format.id:
|
|
104
|
+
raise LOGSException("Cannot not create dataset without a format field")
|
|
105
|
+
|
|
106
|
+
self._formatId = dataset.format.id
|
|
107
|
+
self._files = dataset._files
|
|
108
|
+
dataset._setConnection(self._connection)
|
|
109
|
+
self._request = self._getDatasetUploadRequest(dataset=dataset)
|
|
110
|
+
|
|
111
|
+
def create(self):
|
|
112
|
+
connection, endpoint = self._getConnectionData()
|
|
113
|
+
|
|
114
|
+
multipart = [
|
|
115
|
+
MultipartEntry(
|
|
116
|
+
name="Dataset", fileName=None, content=self._request.toDict()
|
|
117
|
+
)
|
|
118
|
+
]
|
|
119
|
+
multipart.extend(
|
|
120
|
+
[
|
|
121
|
+
MultipartEntry(name="files", fileName=file.id, content=file)
|
|
122
|
+
for file in self._files
|
|
123
|
+
]
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
data, responseError = connection.postMultipartEndpoint(
|
|
127
|
+
endpoint=endpoint + ["create"], data=multipart
|
|
128
|
+
)
|
|
129
|
+
if responseError:
|
|
130
|
+
raise EntityCreatingException(responseError=responseError)
|
|
131
|
+
|
|
132
|
+
return Tools.checkAndConvert(data, dict, "dataset creation result")
|
|
133
|
+
|
|
134
|
+
def _getDatasetUploadRequest(self, dataset: Union[Dataset, Attachment]):
|
|
135
|
+
# print("\n".join([f.fullPath for f in fileList]))
|
|
136
|
+
if not self._files:
|
|
137
|
+
raise LOGSException("Cannot not create dataset without files")
|
|
138
|
+
if not self._formatId:
|
|
139
|
+
raise LOGSException("Cannot not create dataset without a formatId")
|
|
140
|
+
|
|
141
|
+
for file in self._files:
|
|
142
|
+
file.addMtime()
|
|
143
|
+
|
|
144
|
+
request = DatasetUploadRequest(
|
|
145
|
+
dataset=dataset, connection=self._getConnection()
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
return request
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from typing import List, Optional, TypeVar, Union, cast
|
|
2
|
+
|
|
3
|
+
from LOGS.Entities.DatasetRequestParameter import ParsingStates
|
|
4
|
+
from LOGS.Entities.Datatrack import Datatrack
|
|
5
|
+
from LOGS.Entities.DatatrackFormattedTable import DatatrackFormattedTable
|
|
6
|
+
from LOGS.Entities.DatatrackGeneric import DatatrackGeneric
|
|
7
|
+
from LOGS.Entities.DatatrackImage import DatatrackImage
|
|
8
|
+
from LOGS.Entities.DatatrackNumericArray import DatatrackNumericArray
|
|
9
|
+
from LOGS.Entities.DatatrackNumericMatrix import DatatrackNumericMatrix
|
|
10
|
+
from LOGS.Entities.HierarchyNode import HierarchyNode
|
|
11
|
+
from LOGS.Entities.ParserLog import ParserLog
|
|
12
|
+
from LOGS.Entities.Track import Track
|
|
13
|
+
from LOGS.Entities.TrackImage import TrackImage
|
|
14
|
+
from LOGS.Entities.TrackMatrix import TrackMatrix
|
|
15
|
+
from LOGS.Entities.TrackTable import TrackTable
|
|
16
|
+
from LOGS.Entities.TrackXY import TrackXY
|
|
17
|
+
from LOGS.Entities.TrackXYComplex import TrackXYComplex
|
|
18
|
+
from LOGS.Entity.SerializableContent import SerializableContent
|
|
19
|
+
|
|
20
|
+
TRACKS = Union[Track, TrackXY]
|
|
21
|
+
DATATRACKS = Union[Datatrack, DatatrackNumericArray]
|
|
22
|
+
_T = TypeVar("_T", Track, TrackXY)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DatasetInfo(SerializableContent):
|
|
26
|
+
_name: Optional[str] = None
|
|
27
|
+
_type: Optional[str] = None
|
|
28
|
+
_formatVersion: Optional[int] = None
|
|
29
|
+
_parsingState: Optional[ParsingStates] = None
|
|
30
|
+
_parserLogs: Optional[List[ParserLog]] = None
|
|
31
|
+
_tracks: Optional[List[TRACKS]] = None
|
|
32
|
+
_datatracks: Optional[List[DATATRACKS]] = None
|
|
33
|
+
_tracksHierarchy: Optional[HierarchyNode] = None
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def name(self) -> Optional[str]:
|
|
37
|
+
return self._name
|
|
38
|
+
|
|
39
|
+
@name.setter
|
|
40
|
+
def name(self, value):
|
|
41
|
+
self._name = self.checkAndConvertNullable(value, str, "name")
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def type(self) -> Optional[str]:
|
|
45
|
+
return self._type
|
|
46
|
+
|
|
47
|
+
@type.setter
|
|
48
|
+
def type(self, value):
|
|
49
|
+
self._type = self.checkAndConvertNullable(value, str, "type")
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def parsingState(self) -> Optional[ParsingStates]:
|
|
53
|
+
return self._parsingState
|
|
54
|
+
|
|
55
|
+
@parsingState.setter
|
|
56
|
+
def parsingState(self, value):
|
|
57
|
+
self._parsingState = cast(
|
|
58
|
+
ParsingStates, self.checkAndConvertNullable(value, str, "parsingState")
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def formatVersion(self) -> Optional[int]:
|
|
63
|
+
return self._formatVersion
|
|
64
|
+
|
|
65
|
+
@formatVersion.setter
|
|
66
|
+
def formatVersion(self, value):
|
|
67
|
+
self._formatVersion = self.checkAndConvertNullable(value, int, "formatVersion")
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def parserLogs(self) -> Optional[List[ParserLog]]:
|
|
71
|
+
return self._parserLogs
|
|
72
|
+
|
|
73
|
+
@parserLogs.setter
|
|
74
|
+
def parserLogs(self, value):
|
|
75
|
+
self._parserLogs = self.checkListAndConvertNullable(
|
|
76
|
+
value, ParserLog, "parserLogs"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def _trackConverter(cls, value: dict) -> TRACKS:
|
|
81
|
+
if isinstance(value, dict) and "type" in value:
|
|
82
|
+
# print("_trackConverter", value["type"])
|
|
83
|
+
if value["type"] == "XY_real":
|
|
84
|
+
return TrackXY(value)
|
|
85
|
+
elif value["type"] == "XY_complex":
|
|
86
|
+
return TrackXYComplex(value)
|
|
87
|
+
elif value["type"] == "matrix_real":
|
|
88
|
+
return TrackMatrix(value)
|
|
89
|
+
elif value["type"] == "image":
|
|
90
|
+
return TrackImage(value)
|
|
91
|
+
elif value["type"] == "table":
|
|
92
|
+
return TrackTable(value)
|
|
93
|
+
else:
|
|
94
|
+
return Track(value)
|
|
95
|
+
else:
|
|
96
|
+
return Track(value)
|
|
97
|
+
|
|
98
|
+
@classmethod
|
|
99
|
+
def _datatrackConverter(cls, value: dict) -> DATATRACKS:
|
|
100
|
+
if isinstance(value, dict) and "type" in value:
|
|
101
|
+
# print("_datatrackConverter", value["type"])
|
|
102
|
+
if value["type"] == "numeric_array":
|
|
103
|
+
return DatatrackNumericArray(value)
|
|
104
|
+
elif value["type"] == "numeric_matrix":
|
|
105
|
+
return DatatrackNumericMatrix(value)
|
|
106
|
+
elif value["type"] == "image":
|
|
107
|
+
return DatatrackImage(value)
|
|
108
|
+
elif value["type"] == "formatted_table":
|
|
109
|
+
return DatatrackFormattedTable(value)
|
|
110
|
+
else:
|
|
111
|
+
return DatatrackGeneric(value)
|
|
112
|
+
else:
|
|
113
|
+
return DatatrackGeneric(value)
|
|
114
|
+
|
|
115
|
+
@property
|
|
116
|
+
def tracks(self) -> Optional[List[Track]]:
|
|
117
|
+
return self._tracks
|
|
118
|
+
|
|
119
|
+
@tracks.setter
|
|
120
|
+
def tracks(self, value):
|
|
121
|
+
self._tracks = self.checkListAndConvertNullable(
|
|
122
|
+
value, Track, "tracks", converter=self._trackConverter
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
@property
|
|
126
|
+
def datatracks(self) -> Optional[List[Datatrack]]:
|
|
127
|
+
return self._datatracks
|
|
128
|
+
|
|
129
|
+
@datatracks.setter
|
|
130
|
+
def datatracks(self, value):
|
|
131
|
+
if value is None:
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
if not isinstance(value, list):
|
|
135
|
+
value = [value]
|
|
136
|
+
|
|
137
|
+
self._datatracks = [self._datatrackConverter(v) for v in value]
|
|
138
|
+
|
|
139
|
+
@property
|
|
140
|
+
def tracksHierarchy(self) -> Optional[HierarchyNode]:
|
|
141
|
+
return self._tracksHierarchy
|
|
142
|
+
|
|
143
|
+
@tracksHierarchy.setter
|
|
144
|
+
def tracksHierarchy(self, value):
|
|
145
|
+
self._tracksHierarchy = self.checkAndConvertNullable(
|
|
146
|
+
value, HierarchyNode, "tracksHierarchy"
|
|
147
|
+
)
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Any, List, Optional
|
|
3
|
+
|
|
4
|
+
from deprecation import deprecated # type: ignore
|
|
5
|
+
|
|
6
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
7
|
+
from LOGS.Entities.FileEntry import FileEntry, FileFragment
|
|
8
|
+
from LOGS.Entity.SerializableContent import SerializableClass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class MatchRequest(SerializableClass):
|
|
12
|
+
_typeMapper = {"files": FileEntry}
|
|
13
|
+
|
|
14
|
+
def __init__(self, ref: Any = None):
|
|
15
|
+
self.files: List[FileEntry] = []
|
|
16
|
+
self.formatIds: Optional[List[str]] = None
|
|
17
|
+
super().__init__(ref)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DatasetForSearch(SerializableClass):
|
|
21
|
+
_typeMapper = {"files": FileEntry}
|
|
22
|
+
|
|
23
|
+
def __init__(self, ref: Any = None):
|
|
24
|
+
self.id: str = ""
|
|
25
|
+
self.formatId: str = ""
|
|
26
|
+
self.checkUpdatable: bool = True
|
|
27
|
+
self.files: List[FileEntry] = []
|
|
28
|
+
super().__init__(ref)
|
|
29
|
+
self.id = uuid.uuid4().hex
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
@deprecated(details="Please use property 'formatId'")
|
|
33
|
+
def parserId(self):
|
|
34
|
+
return self.formatId
|
|
35
|
+
|
|
36
|
+
@parserId.setter
|
|
37
|
+
@deprecated(details="Please use property 'formatId'")
|
|
38
|
+
def paserId(self, value):
|
|
39
|
+
self.formatId = value
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class DatasetsUpdatableFiles(SerializableClass):
|
|
43
|
+
_typeMapper = {"files": FileEntry}
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
ref: Any = None,
|
|
48
|
+
datasetId: Optional[int] = None,
|
|
49
|
+
files: Optional[List[FileEntry]] = None,
|
|
50
|
+
):
|
|
51
|
+
self.datasetId: int = 0
|
|
52
|
+
self.files: List[FileEntry] = []
|
|
53
|
+
super().__init__(ref)
|
|
54
|
+
if datasetId is not None:
|
|
55
|
+
self.datasetId = datasetId
|
|
56
|
+
if files is not None:
|
|
57
|
+
self.files = files
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class DatasetSearchResult(SerializableClass):
|
|
61
|
+
_typeMapper: dict = {"files": FileEntry}
|
|
62
|
+
|
|
63
|
+
def __init__(self, ref: Any = None):
|
|
64
|
+
self.id: str = ""
|
|
65
|
+
self.errors: List[str] = []
|
|
66
|
+
self.logsId: Optional[int] = None
|
|
67
|
+
self.isUpdateable: Optional[bool] = None
|
|
68
|
+
self.files: List[FileEntry] = []
|
|
69
|
+
super().__init__(ref)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class DatasetSearchRequest(SerializableClass):
|
|
73
|
+
_typeMapper = {"datasets": DatasetForSearch}
|
|
74
|
+
|
|
75
|
+
def __init__(self, ref: Any = None):
|
|
76
|
+
self.datasets: List[DatasetForSearch] = []
|
|
77
|
+
super().__init__(ref)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class MatchedDataset(SerializableClass):
|
|
81
|
+
_typeMapper = {"files": FileEntry}
|
|
82
|
+
|
|
83
|
+
def __init__(self, ref: Any = None):
|
|
84
|
+
self.id: str = ""
|
|
85
|
+
self.formatId: str = ""
|
|
86
|
+
self.name: str = ""
|
|
87
|
+
self.parentMissing: bool = False
|
|
88
|
+
self.parentPath: str = ""
|
|
89
|
+
self.files: List[FileEntry] = []
|
|
90
|
+
super().__init__(ref)
|
|
91
|
+
|
|
92
|
+
def __iter__(self):
|
|
93
|
+
for file in self.files:
|
|
94
|
+
yield file
|
|
95
|
+
|
|
96
|
+
def __str__(self):
|
|
97
|
+
if len(self.files):
|
|
98
|
+
return "<%s %a %s>" % (
|
|
99
|
+
type(self).__name__,
|
|
100
|
+
self.formatId,
|
|
101
|
+
Tools.numberPlural("file", len(self.files)),
|
|
102
|
+
)
|
|
103
|
+
else:
|
|
104
|
+
return "<%s>" % (type(self).__name__)
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
@deprecated(details="Please use property 'formatId'")
|
|
108
|
+
def parserId(self):
|
|
109
|
+
return self.formatId
|
|
110
|
+
|
|
111
|
+
@parserId.setter
|
|
112
|
+
@deprecated(details="Please use property 'formatId'")
|
|
113
|
+
def paserId(self, value):
|
|
114
|
+
self.formatId = value
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class DatasetMatch(SerializableClass):
|
|
118
|
+
_typeMapper = {"datasets": MatchedDataset}
|
|
119
|
+
|
|
120
|
+
def __init__(self, ref: Any = None):
|
|
121
|
+
self.fromatId: str = ""
|
|
122
|
+
self.parserName: str = ""
|
|
123
|
+
self.datasets: List[MatchedDataset] = []
|
|
124
|
+
super().__init__(ref)
|
|
125
|
+
|
|
126
|
+
def __iter__(self):
|
|
127
|
+
for dataset in self.datasets:
|
|
128
|
+
yield dataset
|
|
129
|
+
|
|
130
|
+
def __str__(self):
|
|
131
|
+
if self.fromatId and len(self.datasets):
|
|
132
|
+
return "<%s %a(%d)>" % (
|
|
133
|
+
type(self).__name__,
|
|
134
|
+
self.fromatId,
|
|
135
|
+
len(self.datasets),
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
return "<%s>" % (type(self).__name__)
|
|
139
|
+
|
|
140
|
+
@property
|
|
141
|
+
@deprecated(details="Please use property 'formatId'")
|
|
142
|
+
def parserId(self):
|
|
143
|
+
return self.formatId
|
|
144
|
+
|
|
145
|
+
@parserId.setter
|
|
146
|
+
@deprecated(details="Please use property 'formatId'")
|
|
147
|
+
def parserId(self, value):
|
|
148
|
+
self.formatId = value
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class MatchResult(SerializableClass):
|
|
152
|
+
_typeMapper = {"matches": DatasetMatch, "missingFragments": FileFragment}
|
|
153
|
+
|
|
154
|
+
def __init__(self, ref: Any = None):
|
|
155
|
+
self.matches: List[DatasetMatch] = []
|
|
156
|
+
self.missingFragments: List[FileFragment] = []
|
|
157
|
+
super().__init__(ref)
|