logs-py 4.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LOGS/Auxiliary/CheckClassName.py +1075 -0
- LOGS/Auxiliary/Constants.py +99 -0
- LOGS/Auxiliary/CustomEntityClassGenerator.py +254 -0
- LOGS/Auxiliary/CustomFieldClassGenerator.py +115 -0
- LOGS/Auxiliary/CustomFieldValueTypeChecker.py +168 -0
- LOGS/Auxiliary/CustomSectionClassGenerator.py +113 -0
- LOGS/Auxiliary/CustomTypeClassGenerator.py +147 -0
- LOGS/Auxiliary/DateTimeConverter.py +66 -0
- LOGS/Auxiliary/Decorators.py +109 -0
- LOGS/Auxiliary/Exceptions.py +341 -0
- LOGS/Auxiliary/LOGSErrorResponse.py +89 -0
- LOGS/Auxiliary/MinimalModelGenerator.py +236 -0
- LOGS/Auxiliary/ParameterHelper.py +56 -0
- LOGS/Auxiliary/ReplaceMessage.py +13 -0
- LOGS/Auxiliary/Tools.py +432 -0
- LOGS/Auxiliary/__init__.py +15 -0
- LOGS/Converter/Conversion.py +248 -0
- LOGS/Converter/Converter.py +96 -0
- LOGS/Converter/ConverterParameter.py +88 -0
- LOGS/Converter/DateTimeRange.py +58 -0
- LOGS/Converter/ExportParameters.py +89 -0
- LOGS/Converter/__init__.py +13 -0
- LOGS/Entities/Attachment.py +84 -0
- LOGS/Entities/AttachmentMinimal.py +8 -0
- LOGS/Entities/AttachmentRequestParameter.py +42 -0
- LOGS/Entities/Attachments.py +53 -0
- LOGS/Entities/AutoloadFileInfo.py +12 -0
- LOGS/Entities/AutoloadStatusError.py +7 -0
- LOGS/Entities/AxisNaming.py +33 -0
- LOGS/Entities/AxisZoom.py +33 -0
- LOGS/Entities/Bridge.py +165 -0
- LOGS/Entities/BridgeClientInfo.py +93 -0
- LOGS/Entities/BridgeMinimal.py +8 -0
- LOGS/Entities/BridgeRequestParameter.py +49 -0
- LOGS/Entities/BridgeType.py +7 -0
- LOGS/Entities/Bridges.py +12 -0
- LOGS/Entities/CustomField.py +243 -0
- LOGS/Entities/CustomFieldMinimal.py +8 -0
- LOGS/Entities/CustomFieldModels.py +111 -0
- LOGS/Entities/CustomFieldRequestParameter.py +69 -0
- LOGS/Entities/CustomFieldSearchQuery.py +40 -0
- LOGS/Entities/CustomFields.py +12 -0
- LOGS/Entities/CustomType.py +212 -0
- LOGS/Entities/CustomTypeMinimal.py +8 -0
- LOGS/Entities/CustomTypeRequestParameter.py +60 -0
- LOGS/Entities/CustomTypeSection.py +63 -0
- LOGS/Entities/CustomTypes.py +12 -0
- LOGS/Entities/DataFormat.py +97 -0
- LOGS/Entities/DataFormatInstrument.py +18 -0
- LOGS/Entities/DataFormatInstrumentMinimal.py +8 -0
- LOGS/Entities/DataFormatInstrumentRequestParameter.py +17 -0
- LOGS/Entities/DataFormatInstruments.py +16 -0
- LOGS/Entities/DataFormatMinimal.py +18 -0
- LOGS/Entities/DataFormatRequestParameter.py +21 -0
- LOGS/Entities/DataFormats.py +12 -0
- LOGS/Entities/DataSource.py +218 -0
- LOGS/Entities/DataSourceConnectionStatus.py +12 -0
- LOGS/Entities/DataSourceMinimal.py +8 -0
- LOGS/Entities/DataSourceRequestParameter.py +57 -0
- LOGS/Entities/DataSourceStatus.py +108 -0
- LOGS/Entities/DataSourceStatusIterator.py +16 -0
- LOGS/Entities/DataSourceStatusRequestParameter.py +31 -0
- LOGS/Entities/DataSources.py +12 -0
- LOGS/Entities/Dataset.py +439 -0
- LOGS/Entities/DatasetBase.py +196 -0
- LOGS/Entities/DatasetCreator.py +148 -0
- LOGS/Entities/DatasetInfo.py +147 -0
- LOGS/Entities/DatasetMatchTypes.py +157 -0
- LOGS/Entities/DatasetMatching.py +196 -0
- LOGS/Entities/DatasetMinimal.py +8 -0
- LOGS/Entities/DatasetModels.py +33 -0
- LOGS/Entities/DatasetRequestParameter.py +92 -0
- LOGS/Entities/DatasetTemplate.py +23 -0
- LOGS/Entities/DatasetUploadParameter.py +14 -0
- LOGS/Entities/Datasets.py +142 -0
- LOGS/Entities/Datatrack.py +179 -0
- LOGS/Entities/DatatrackFormattedTable.py +25 -0
- LOGS/Entities/DatatrackGeneric.py +34 -0
- LOGS/Entities/DatatrackImage.py +25 -0
- LOGS/Entities/DatatrackNumericArray.py +30 -0
- LOGS/Entities/DatatrackNumericMatrix.py +98 -0
- LOGS/Entities/Entities.py +71 -0
- LOGS/Entities/EntitiesRequestParameter.py +18 -0
- LOGS/Entities/EntityOriginWriteModelWithId.py +15 -0
- LOGS/Entities/FileEntry.py +138 -0
- LOGS/Entities/FileExcludePattern.py +8 -0
- LOGS/Entities/FormatMetaData.py +56 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTable.py +135 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTableCell.py +108 -0
- LOGS/Entities/FormattedTable/DatatypeFormattedTableSettings.py +11 -0
- LOGS/Entities/FormattedTable/__init__.py +9 -0
- LOGS/Entities/HierarchyLeaf.py +15 -0
- LOGS/Entities/HierarchyNode.py +40 -0
- LOGS/Entities/ILiteraryTypedEntity.py +19 -0
- LOGS/Entities/InventoryItem.py +102 -0
- LOGS/Entities/InventoryItemMinimal.py +25 -0
- LOGS/Entities/InventoryItemRequestParameter.py +58 -0
- LOGS/Entities/InventoryItems.py +12 -0
- LOGS/Entities/LabNotebook.py +33 -0
- LOGS/Entities/LabNotebookEntries.py +16 -0
- LOGS/Entities/LabNotebookEntry.py +106 -0
- LOGS/Entities/LabNotebookEntryContent/BasicAttribute.py +15 -0
- LOGS/Entities/LabNotebookEntryContent/EntityAttribute.py +85 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentBlockquote.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentBulletList.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentCallout.py +40 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentContentPlaceholderNode.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentConverter.py +207 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentDocument.py +8 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentEntity.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentEntityMention.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentHeading.py +33 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentHorizontalRule.py +12 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentItem.py +37 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentListItem.py +49 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentOrderedList.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentParagraph.py +13 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTable.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTableCell.py +40 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTableRow.py +8 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTaskList.py +17 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentTaskListItem.py +31 -0
- LOGS/Entities/LabNotebookEntryContent/EntryContentText.py +33 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithAttribute.py +23 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithContent.py +38 -0
- LOGS/Entities/LabNotebookEntryContent/IEntryContentWithTextAttribute.py +16 -0
- LOGS/Entities/LabNotebookEntryContent/TextAttribute.py +46 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarkAtributes.py +64 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarkConverter.py +45 -0
- LOGS/Entities/LabNotebookEntryContent/TextMarks.py +71 -0
- LOGS/Entities/LabNotebookEntryContent/__init__.py +34 -0
- LOGS/Entities/LabNotebookEntryMinimal.py +8 -0
- LOGS/Entities/LabNotebookEntryRequestParameter.py +59 -0
- LOGS/Entities/LabNotebookExperiment.py +58 -0
- LOGS/Entities/LabNotebookExperimentMinimal.py +8 -0
- LOGS/Entities/LabNotebookExperimentRequestParameter.py +52 -0
- LOGS/Entities/LabNotebookExperiments.py +16 -0
- LOGS/Entities/LabNotebookMinimal.py +8 -0
- LOGS/Entities/LabNotebookModels.py +14 -0
- LOGS/Entities/LabNotebookRequestParameter.py +42 -0
- LOGS/Entities/LabNotebookTemplate.py +42 -0
- LOGS/Entities/LabNotebookTemplateMinimal.py +8 -0
- LOGS/Entities/LabNotebookTemplateRequestParameter.py +38 -0
- LOGS/Entities/LabNotebookTemplates.py +16 -0
- LOGS/Entities/LabNotebooks.py +12 -0
- LOGS/Entities/Method.py +66 -0
- LOGS/Entities/MethodMinimal.py +8 -0
- LOGS/Entities/MethodRequestParameter.py +16 -0
- LOGS/Entities/Methods.py +12 -0
- LOGS/Entities/Origin.py +53 -0
- LOGS/Entities/OriginMinimal.py +8 -0
- LOGS/Entities/OriginRequestParameter.py +28 -0
- LOGS/Entities/Origins.py +12 -0
- LOGS/Entities/ParserLog.py +49 -0
- LOGS/Entities/Permission.py +9 -0
- LOGS/Entities/Person.py +145 -0
- LOGS/Entities/PersonCategory.py +12 -0
- LOGS/Entities/PersonMinimal.py +8 -0
- LOGS/Entities/PersonRequestParameter.py +58 -0
- LOGS/Entities/Persons.py +12 -0
- LOGS/Entities/Project.py +52 -0
- LOGS/Entities/ProjectMinimal.py +8 -0
- LOGS/Entities/ProjectPersonPermission.py +102 -0
- LOGS/Entities/ProjectRequestParameter.py +58 -0
- LOGS/Entities/Projects.py +12 -0
- LOGS/Entities/Role.py +94 -0
- LOGS/Entities/RoleMinimal.py +8 -0
- LOGS/Entities/RoleRequestParameter.py +40 -0
- LOGS/Entities/Roles.py +12 -0
- LOGS/Entities/RunState.py +9 -0
- LOGS/Entities/Sample.py +53 -0
- LOGS/Entities/SampleMinimal.py +8 -0
- LOGS/Entities/SampleRequestParameter.py +54 -0
- LOGS/Entities/Samples.py +12 -0
- LOGS/Entities/SharedContent.py +87 -0
- LOGS/Entities/SharedContentMinimal.py +8 -0
- LOGS/Entities/SharedContentRequestParameter.py +38 -0
- LOGS/Entities/SharedContents.py +12 -0
- LOGS/Entities/Signature.py +60 -0
- LOGS/Entities/Track.py +93 -0
- LOGS/Entities/TrackData.py +20 -0
- LOGS/Entities/TrackImage.py +21 -0
- LOGS/Entities/TrackImageData.py +20 -0
- LOGS/Entities/TrackMatrix.py +28 -0
- LOGS/Entities/TrackMatrixData.py +22 -0
- LOGS/Entities/TrackSettings.py +55 -0
- LOGS/Entities/TrackTable.py +21 -0
- LOGS/Entities/TrackTableData.py +22 -0
- LOGS/Entities/TrackXY.py +40 -0
- LOGS/Entities/TrackXYComplex.py +51 -0
- LOGS/Entities/TrackXYComplexData.py +50 -0
- LOGS/Entities/TrackXYData.py +31 -0
- LOGS/Entities/Vendor.py +40 -0
- LOGS/Entities/VendorMinimal.py +8 -0
- LOGS/Entities/VendorRequestParameter.py +17 -0
- LOGS/Entities/Vendors.py +12 -0
- LOGS/Entities/__init__.py +118 -0
- LOGS/Entity/ConnectedEntity.py +170 -0
- LOGS/Entity/Entity.py +203 -0
- LOGS/Entity/EntityConnector.py +70 -0
- LOGS/Entity/EntityIterator.py +263 -0
- LOGS/Entity/EntityMinimal.py +141 -0
- LOGS/Entity/EntityMinimalWithIntId.py +36 -0
- LOGS/Entity/EntityMinimalWithStrId.py +36 -0
- LOGS/Entity/EntityMinimalWithType.py +47 -0
- LOGS/Entity/EntityRequestParameter.py +104 -0
- LOGS/Entity/EntitySortBy.py +69 -0
- LOGS/Entity/EntityWithIntId.py +26 -0
- LOGS/Entity/EntityWithStrId.py +26 -0
- LOGS/Entity/IGenericEntityOrderBy.py +55 -0
- LOGS/Entity/IdIterator.py +207 -0
- LOGS/Entity/SerializableContent.py +834 -0
- LOGS/Entity/__init__.py +23 -0
- LOGS/Interfaces/ICustomFieldValue.py +92 -0
- LOGS/Interfaces/ICustomSectionValue.py +161 -0
- LOGS/Interfaces/ICustomTypeValue.py +152 -0
- LOGS/Interfaces/ICustomValue.py +28 -0
- LOGS/Interfaces/IEntityInterface.py +7 -0
- LOGS/Interfaces/IEntryRecord.py +57 -0
- LOGS/Interfaces/IHierarchicalEntity.py +41 -0
- LOGS/Interfaces/IHierarchyType.py +63 -0
- LOGS/Interfaces/ILockableEntity.py +52 -0
- LOGS/Interfaces/IModificationRecord.py +56 -0
- LOGS/Interfaces/INamedEntity.py +25 -0
- LOGS/Interfaces/IOwnedEntity.py +27 -0
- LOGS/Interfaces/IPaginationRequest.py +11 -0
- LOGS/Interfaces/IPermissionedEntity.py +72 -0
- LOGS/Interfaces/IProjectBased.py +27 -0
- LOGS/Interfaces/ISessionedEntity.py +59 -0
- LOGS/Interfaces/ISignableEntity.py +49 -0
- LOGS/Interfaces/ISoftDeletable.py +28 -0
- LOGS/Interfaces/ITypedEntity.py +129 -0
- LOGS/Interfaces/IUniqueEntity.py +61 -0
- LOGS/Interfaces/IVersionedEntity.py +39 -0
- LOGS/Interfaces/__init__.py +7 -0
- LOGS/LOGS.py +1436 -0
- LOGS/LOGSConnection.py +647 -0
- LOGS/LOGSOptions.py +11 -0
- LOGS/Parameters/Color.py +92 -0
- LOGS/Parameters/ParameterBase.py +55 -0
- LOGS/Parameters/ParameterConverter.py +24 -0
- LOGS/Parameters/ParameterElement.py +99 -0
- LOGS/Parameters/ParameterList.py +52 -0
- LOGS/Parameters/ParameterTable.py +64 -0
- LOGS/Parameters/__init__.py +13 -0
- LOGS/ServerMetaData.py +120 -0
- LOGS/__init__.py +12 -0
- logs_py-4.0.7.dist-info/METADATA +51 -0
- logs_py-4.0.7.dist-info/RECORD +251 -0
- logs_py-4.0.7.dist-info/WHEEL +5 -0
- logs_py-4.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Dict, List, Literal, Optional, Sequence, Union
|
|
3
|
+
|
|
4
|
+
from LOGS.Auxiliary.Constants import Constants
|
|
5
|
+
from LOGS.Auxiliary.Decorators import Endpoint
|
|
6
|
+
from LOGS.Auxiliary.Exceptions import LOGSException
|
|
7
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
8
|
+
from LOGS.Entities.DatasetMatchTypes import MatchRequest, MatchResult
|
|
9
|
+
from LOGS.Entities.FileEntry import FileEntry
|
|
10
|
+
from LOGS.Entity.EntityConnector import EntityConnector
|
|
11
|
+
from LOGS.LOGSConnection import LOGSConnection
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class DirectoryTreeNode:
|
|
16
|
+
name: str = ""
|
|
17
|
+
path: str = "/"
|
|
18
|
+
type: Literal["dir", "file"] = "dir"
|
|
19
|
+
content: Dict[str, "DirectoryTreeNode"] = field(default_factory=dict)
|
|
20
|
+
fileCount: int = 0
|
|
21
|
+
|
|
22
|
+
def print(self, countOnly=False, indent=""):
|
|
23
|
+
if self.type == "file":
|
|
24
|
+
if not countOnly:
|
|
25
|
+
print("%s%s (f)" % (indent, self.name))
|
|
26
|
+
return
|
|
27
|
+
print(
|
|
28
|
+
"%s%s (%s)%s"
|
|
29
|
+
% (
|
|
30
|
+
indent,
|
|
31
|
+
self.name,
|
|
32
|
+
Tools.getHumanReadableSize(self.fileCount),
|
|
33
|
+
": " + str(len(self.content.keys())) if countOnly else "",
|
|
34
|
+
)
|
|
35
|
+
)
|
|
36
|
+
for entry in self.content.values():
|
|
37
|
+
entry.print(indent=indent + "..", countOnly=countOnly)
|
|
38
|
+
|
|
39
|
+
def splitTreeByFileCount(self, maxCount: int, parentPath="", level=0):
|
|
40
|
+
forrest: List[DirectoryTreeNode] = []
|
|
41
|
+
|
|
42
|
+
path = parentPath + "/" + self.name
|
|
43
|
+
if path == "/":
|
|
44
|
+
path = ""
|
|
45
|
+
# print(
|
|
46
|
+
# "." * level + self.name,
|
|
47
|
+
# self.fileCount,
|
|
48
|
+
# "<",
|
|
49
|
+
# maxSize,
|
|
50
|
+
# "->",
|
|
51
|
+
# self.fileCount <= maxSize,
|
|
52
|
+
# "=>",
|
|
53
|
+
# path,
|
|
54
|
+
# )
|
|
55
|
+
if self.fileCount <= maxCount:
|
|
56
|
+
self.path = path
|
|
57
|
+
return [self]
|
|
58
|
+
else:
|
|
59
|
+
files = [item for item in self.content.values() if item.type == "file"]
|
|
60
|
+
if len(files) > 0:
|
|
61
|
+
forrest.append(
|
|
62
|
+
DirectoryTreeNode(
|
|
63
|
+
name=self.name,
|
|
64
|
+
path=path,
|
|
65
|
+
content={f.name: f for f in files},
|
|
66
|
+
fileCount=len(files),
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
for item in self.content.values():
|
|
71
|
+
if item.type == "dir":
|
|
72
|
+
forrest.extend(
|
|
73
|
+
item.splitTreeByFileCount(
|
|
74
|
+
maxCount=maxCount, parentPath=path, level=level + 1
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return forrest
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@Endpoint("data_formats")
|
|
82
|
+
class DatasetMatching(EntityConnector):
|
|
83
|
+
_request: MatchRequest = MatchRequest()
|
|
84
|
+
_formatIds: Optional[List[str]] = None
|
|
85
|
+
_matchResult: Optional[MatchResult] = None
|
|
86
|
+
_files: List[List[FileEntry]] = []
|
|
87
|
+
_maxFileCountInDirectory = 20000
|
|
88
|
+
|
|
89
|
+
def __init__(
|
|
90
|
+
self,
|
|
91
|
+
connection: LOGSConnection,
|
|
92
|
+
files: Union[Constants.FILE_TYPE, Sequence[Constants.FILE_TYPE]],
|
|
93
|
+
formatIds: Optional[List[str]] = None,
|
|
94
|
+
ignoreReadErrors=False,
|
|
95
|
+
):
|
|
96
|
+
self._connection = connection
|
|
97
|
+
self._formatIds = formatIds
|
|
98
|
+
self._files = self.splitFileList(
|
|
99
|
+
FileEntry.entriesFromFiles(files, ignoreReadErrors)
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
@classmethod
|
|
103
|
+
def fileListToTree(cls, files: List[FileEntry]):
|
|
104
|
+
root = DirectoryTreeNode(name="")
|
|
105
|
+
i = 0
|
|
106
|
+
for file in files:
|
|
107
|
+
path = file.path.split("/")
|
|
108
|
+
rootPath = path.pop(0)
|
|
109
|
+
fileName = path.pop()
|
|
110
|
+
# fileSize = file.size if file.size else 0
|
|
111
|
+
if rootPath != "":
|
|
112
|
+
continue
|
|
113
|
+
current = root
|
|
114
|
+
for i, p in enumerate(path):
|
|
115
|
+
current.fileCount += 1
|
|
116
|
+
if p not in current.content:
|
|
117
|
+
current.content[p] = DirectoryTreeNode(
|
|
118
|
+
name=p, path="/".join(f for f in path[:i])
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
current = current.content[p]
|
|
122
|
+
current.fileCount += 1
|
|
123
|
+
current.content[fileName] = DirectoryTreeNode(
|
|
124
|
+
name=fileName, path=file.path, type="file", fileCount=1
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
return root
|
|
128
|
+
|
|
129
|
+
@classmethod
|
|
130
|
+
def TreeToFileList(cls, root: DirectoryTreeNode):
|
|
131
|
+
if root.type == "file":
|
|
132
|
+
file = FileEntry()
|
|
133
|
+
file.fullPath = root.path
|
|
134
|
+
file.id = root.path
|
|
135
|
+
file.path = root.path
|
|
136
|
+
return [file]
|
|
137
|
+
|
|
138
|
+
files: List[FileEntry] = []
|
|
139
|
+
for item in root.content.values():
|
|
140
|
+
files.extend(cls.TreeToFileList(item))
|
|
141
|
+
|
|
142
|
+
return files
|
|
143
|
+
|
|
144
|
+
def splitFileList(self, files: List[FileEntry]):
|
|
145
|
+
root = self.fileListToTree(files)
|
|
146
|
+
forrest = root.splitTreeByFileCount(maxCount=self._maxFileCountInDirectory)
|
|
147
|
+
return [self.TreeToFileList(tree) for tree in forrest]
|
|
148
|
+
|
|
149
|
+
def __iter__(self):
|
|
150
|
+
for files in self._files:
|
|
151
|
+
self._request = MatchRequest()
|
|
152
|
+
self._request.formatIds = self._formatIds
|
|
153
|
+
self._request.files = files
|
|
154
|
+
|
|
155
|
+
self._match()
|
|
156
|
+
if not self._matchResult:
|
|
157
|
+
return None
|
|
158
|
+
for match in self._matchResult.matches:
|
|
159
|
+
for dataset in match:
|
|
160
|
+
yield dataset
|
|
161
|
+
|
|
162
|
+
def _match(self):
|
|
163
|
+
connection, endpoint = self._getConnectionData()
|
|
164
|
+
|
|
165
|
+
# print(
|
|
166
|
+
# ">>> request",
|
|
167
|
+
# len(self._request.files),
|
|
168
|
+
# len(str(self._request.toDict()).encode("utf-8")),
|
|
169
|
+
# "->",
|
|
170
|
+
# len(str(self._request.toDict()).encode("utf-8")) / len(self._request.files),
|
|
171
|
+
# "<",
|
|
172
|
+
# 30000000,
|
|
173
|
+
# )
|
|
174
|
+
data, error = connection.postEndpoint(
|
|
175
|
+
endpoint=endpoint + ["match"], data=self._request.toDict()
|
|
176
|
+
)
|
|
177
|
+
if error:
|
|
178
|
+
raise LOGSException("Could not match dataset files: %a" % error)
|
|
179
|
+
|
|
180
|
+
self._matchResult = MatchResult(data)
|
|
181
|
+
|
|
182
|
+
if self._matchResult.missingFragments:
|
|
183
|
+
lookUp = {e.id: e for e in self._request.files}
|
|
184
|
+
for fileFragment in self._matchResult.missingFragments:
|
|
185
|
+
if fileFragment.id not in lookUp:
|
|
186
|
+
continue
|
|
187
|
+
|
|
188
|
+
file = lookUp[fileFragment.id]
|
|
189
|
+
file.addFragment(fileFragment.fragments)
|
|
190
|
+
|
|
191
|
+
data, error = connection.postEndpoint(
|
|
192
|
+
endpoint=endpoint + ["match"], data=self._request.toDict()
|
|
193
|
+
)
|
|
194
|
+
if error:
|
|
195
|
+
raise LOGSException("Could not match dataset files: %a" % error)
|
|
196
|
+
self._matchResult = MatchResult(data)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
|
|
4
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
5
|
+
from LOGS.Entity.SerializableContent import SerializableClass
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DatasetSourceType(Enum):
|
|
9
|
+
ManualUpload = 0
|
|
10
|
+
SFTPAutoload = 1
|
|
11
|
+
ClientAutoload = 2
|
|
12
|
+
APIUpload = 3
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ParsedMetadata(SerializableClass):
|
|
16
|
+
Parameters: bool = False
|
|
17
|
+
Tracks: bool = False
|
|
18
|
+
TrackCount: int = False
|
|
19
|
+
TrackViewerTypes: List[str] = []
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class DatasetSource(SerializableClass):
|
|
23
|
+
id: Optional[int] = None
|
|
24
|
+
type: Optional[DatasetSourceType] = None
|
|
25
|
+
name: Optional[str] = None
|
|
26
|
+
|
|
27
|
+
def __str__(self):
|
|
28
|
+
return Tools.ObjectToString(self)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ViewableEntityTypes(Enum):
|
|
32
|
+
ELN = "ELN"
|
|
33
|
+
CustomField = "CustomField"
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any, Dict, List, Optional, Sequence, Type, cast
|
|
5
|
+
|
|
6
|
+
from typing_extensions import Self
|
|
7
|
+
|
|
8
|
+
from LOGS.Auxiliary.Constants import Constants
|
|
9
|
+
from LOGS.Entity.EntityRequestParameter import EntityRequestParameter
|
|
10
|
+
from LOGS.Entity.IGenericEntityOrderBy import (
|
|
11
|
+
IEntryRecordSortingOptions,
|
|
12
|
+
IGenericEntitySortingOptions,
|
|
13
|
+
IModificationRecordSortingOptions,
|
|
14
|
+
INamedEntitySortingOptions,
|
|
15
|
+
ITypedEntitySortingOptions,
|
|
16
|
+
)
|
|
17
|
+
from LOGS.Interfaces.IEntryRecord import IEntryRecordRequest
|
|
18
|
+
from LOGS.Interfaces.ILockableEntity import ILockableEntityRequest
|
|
19
|
+
from LOGS.Interfaces.IModificationRecord import IModificationRecordRequest
|
|
20
|
+
from LOGS.Interfaces.INamedEntity import INamedEntityRequest
|
|
21
|
+
from LOGS.Interfaces.IOwnedEntity import IOwnedEntityRequest
|
|
22
|
+
from LOGS.Interfaces.IPermissionedEntity import IPermissionedEntityRequest
|
|
23
|
+
from LOGS.Interfaces.IProjectBased import IProjectBasedRequest
|
|
24
|
+
from LOGS.Interfaces.ISignableEntity import ISignableEntityRequest
|
|
25
|
+
from LOGS.Interfaces.ISoftDeletable import ISoftDeletableRequest
|
|
26
|
+
from LOGS.Interfaces.ITypedEntity import ITypedEntityRequest
|
|
27
|
+
from LOGS.Interfaces.IUniqueEntity import IUniqueEntityRequest
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ParsingStates(Enum):
|
|
31
|
+
ParsedSuccessfully = "ParsedSuccessfully"
|
|
32
|
+
NotParsable = "NotParsable"
|
|
33
|
+
ParsingFailed = "ParsingFailed"
|
|
34
|
+
NotYetParsed = "NotYetParsed"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class DatasetSortingOptions(
|
|
38
|
+
IGenericEntitySortingOptions,
|
|
39
|
+
INamedEntitySortingOptions,
|
|
40
|
+
IEntryRecordSortingOptions,
|
|
41
|
+
IModificationRecordSortingOptions,
|
|
42
|
+
ITypedEntitySortingOptions,
|
|
43
|
+
):
|
|
44
|
+
CREATION_DATE: Self = cast(Self, "CREATION_DATE")
|
|
45
|
+
PARSING_STATE: Self = cast(Self, "PARSING_STATE")
|
|
46
|
+
FORMAT_ID: Self = cast(Self, "FORMAT_ID")
|
|
47
|
+
OWNER: Self = cast(Self, "OWNER")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class DatasetRequestParameter(
|
|
52
|
+
EntityRequestParameter[DatasetSortingOptions],
|
|
53
|
+
IPermissionedEntityRequest,
|
|
54
|
+
IUniqueEntityRequest,
|
|
55
|
+
INamedEntityRequest,
|
|
56
|
+
IOwnedEntityRequest,
|
|
57
|
+
IProjectBasedRequest,
|
|
58
|
+
IEntryRecordRequest,
|
|
59
|
+
IModificationRecordRequest,
|
|
60
|
+
ISoftDeletableRequest,
|
|
61
|
+
ILockableEntityRequest,
|
|
62
|
+
ISignableEntityRequest,
|
|
63
|
+
ITypedEntityRequest,
|
|
64
|
+
):
|
|
65
|
+
_orderByType: Type[DatasetSortingOptions] = field(
|
|
66
|
+
default=DatasetSortingOptions, init=False
|
|
67
|
+
)
|
|
68
|
+
includeSource: Optional[bool] = None
|
|
69
|
+
hasCustomImport: Optional[bool] = None
|
|
70
|
+
creationDateFrom: Optional[datetime] = None
|
|
71
|
+
creationDateTo: Optional[datetime] = None
|
|
72
|
+
autoloadServerIds: Optional[List[int]] = None
|
|
73
|
+
bridgeIds: Optional[List[int]] = None
|
|
74
|
+
dataSourceIds: Optional[List[int]] = None
|
|
75
|
+
excludeUndeleted: Optional[bool] = None
|
|
76
|
+
files: Optional[Sequence[Constants.FILE_TYPE]] = None
|
|
77
|
+
formatIds: Optional[List[str]] = None
|
|
78
|
+
hashes: Optional[List[str]] = None
|
|
79
|
+
includeUnclaimed: Optional[Optional[bool]] = None
|
|
80
|
+
isClaimed: Optional[Optional[bool]] = None
|
|
81
|
+
isReferencedByLabNotebook: Optional[Optional[bool]] = None
|
|
82
|
+
parameters: Optional[Dict[str, Any]] = None
|
|
83
|
+
parsingState: Optional[List[ParsingStates]] = None
|
|
84
|
+
pathContains: Optional[str] = None
|
|
85
|
+
searchTermIncludeParameters: Optional[bool] = None
|
|
86
|
+
searchTermIncludePaths: Optional[bool] = None
|
|
87
|
+
|
|
88
|
+
# Additional fetch options
|
|
89
|
+
includeParameters: Optional[bool] = None
|
|
90
|
+
includeInfo: Optional[bool] = None
|
|
91
|
+
includeZipSize: Optional[bool] = None
|
|
92
|
+
includeExports: Optional[bool] = None
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING, Optional
|
|
2
|
+
|
|
3
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
4
|
+
from LOGS.Entity.ConnectedEntity import ConnectedEntity
|
|
5
|
+
from LOGS.Interfaces.IOwnedEntity import IOwnedEntity
|
|
6
|
+
from LOGS.Interfaces.IProjectBased import IProjectBased
|
|
7
|
+
from LOGS.Interfaces.ITypedEntity import ITypedEntity
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DatasetTemplate(ConnectedEntity, ITypedEntity, IOwnedEntity, IProjectBased):
|
|
14
|
+
_id: int = 0
|
|
15
|
+
_datasetTemplates: Optional[list] = None
|
|
16
|
+
|
|
17
|
+
@property
|
|
18
|
+
def id(self) -> int:
|
|
19
|
+
return self._id
|
|
20
|
+
|
|
21
|
+
@id.setter
|
|
22
|
+
def id(self, value):
|
|
23
|
+
self._id = Tools.checkAndConvert(value, int, "id")
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class DatasetUploadParameter:
|
|
7
|
+
name: Optional[str] = None
|
|
8
|
+
methodId: Optional[int] = None
|
|
9
|
+
instrumentId: Optional[int] = None
|
|
10
|
+
experimentId: Optional[int] = None
|
|
11
|
+
sampleId: Optional[int] = None
|
|
12
|
+
projectIds: Optional[List[int]] = None
|
|
13
|
+
organizationIds: Optional[List[int]] = None
|
|
14
|
+
operatorIds: Optional[List[int]] = None
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import List, Optional, Sequence, cast
|
|
3
|
+
|
|
4
|
+
from LOGS.Auxiliary.Constants import Constants
|
|
5
|
+
from LOGS.Auxiliary.Decorators import Endpoint
|
|
6
|
+
from LOGS.Auxiliary.Exceptions import LOGSException
|
|
7
|
+
from LOGS.Auxiliary.Tools import Tools
|
|
8
|
+
from LOGS.Entities.Dataset import Dataset
|
|
9
|
+
from LOGS.Entities.DatasetMatchTypes import (
|
|
10
|
+
DatasetForSearch,
|
|
11
|
+
DatasetSearchRequest,
|
|
12
|
+
DatasetSearchResult,
|
|
13
|
+
)
|
|
14
|
+
from LOGS.Entities.DatasetRequestParameter import DatasetRequestParameter
|
|
15
|
+
from LOGS.Entities.FileEntry import FileEntry
|
|
16
|
+
from LOGS.Entity.EntityIterator import EntityIterator
|
|
17
|
+
from LOGS.LOGSConnection import ResponseTypes
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@Endpoint("datasets")
|
|
21
|
+
class Datasets(EntityIterator[Dataset, DatasetRequestParameter]):
|
|
22
|
+
"""LOGS connected Dataset iterator"""
|
|
23
|
+
|
|
24
|
+
_generatorType = Dataset
|
|
25
|
+
_parameterType = DatasetRequestParameter
|
|
26
|
+
|
|
27
|
+
def download(
|
|
28
|
+
self,
|
|
29
|
+
directory: Optional[str] = None,
|
|
30
|
+
fileName: Optional[str] = None,
|
|
31
|
+
overwrite=False,
|
|
32
|
+
) -> str:
|
|
33
|
+
connection, endpoint = self._getConnectionData()
|
|
34
|
+
|
|
35
|
+
if not directory:
|
|
36
|
+
directory = os.curdir
|
|
37
|
+
|
|
38
|
+
path = os.path.join(
|
|
39
|
+
directory,
|
|
40
|
+
Tools.sanitizeFileName(fileName=fileName, defaultName="Dataset.zip"),
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if overwrite:
|
|
44
|
+
if os.path.exists(path) and not os.path.isfile(path):
|
|
45
|
+
raise LOGSException("Path %a is not a file" % path)
|
|
46
|
+
else:
|
|
47
|
+
if os.path.exists(path):
|
|
48
|
+
raise LOGSException("File %a already exists" % path)
|
|
49
|
+
|
|
50
|
+
data, error = connection.postEndpoint(
|
|
51
|
+
endpoint=endpoint + ["zip"],
|
|
52
|
+
data=self._parameters.toDict(),
|
|
53
|
+
responseType=ResponseTypes.RAW,
|
|
54
|
+
)
|
|
55
|
+
if error:
|
|
56
|
+
raise LOGSException(
|
|
57
|
+
"Could not fetch datasets zip file: %a" % error.errorString()
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
with open(path, mode="wb") as localFile:
|
|
61
|
+
localFile.write(cast(bytes, data))
|
|
62
|
+
|
|
63
|
+
return path
|
|
64
|
+
|
|
65
|
+
def _getDatasetSearchRequest(
|
|
66
|
+
self,
|
|
67
|
+
files: Sequence[Constants.FILE_TYPE],
|
|
68
|
+
formatIds: List[str],
|
|
69
|
+
checkUpdatable=True,
|
|
70
|
+
):
|
|
71
|
+
fileList = FileEntry.entriesFromFiles(files)
|
|
72
|
+
for file in fileList:
|
|
73
|
+
file.addHash()
|
|
74
|
+
# print("\n".join([f.fullPath for f in fileList]))
|
|
75
|
+
|
|
76
|
+
request = DatasetSearchRequest()
|
|
77
|
+
|
|
78
|
+
request.datasets = []
|
|
79
|
+
for formatId in formatIds:
|
|
80
|
+
dataset = DatasetForSearch()
|
|
81
|
+
dataset.checkUpdatable = checkUpdatable
|
|
82
|
+
dataset.formatId = formatId
|
|
83
|
+
dataset.files.extend(fileList)
|
|
84
|
+
request.datasets.append(dataset)
|
|
85
|
+
return request
|
|
86
|
+
|
|
87
|
+
def findDatasetByFiles(
|
|
88
|
+
self,
|
|
89
|
+
files: Sequence[Constants.FILE_TYPE],
|
|
90
|
+
formatIds: List[str],
|
|
91
|
+
checkUpdatable=True,
|
|
92
|
+
):
|
|
93
|
+
request = self._getDatasetSearchRequest(files, formatIds, checkUpdatable)
|
|
94
|
+
connection, endpoint = self._getConnectionData()
|
|
95
|
+
|
|
96
|
+
data, errors = connection.postEndpoint(
|
|
97
|
+
endpoint=endpoint + ["find"], data=request.toDict()
|
|
98
|
+
)
|
|
99
|
+
if errors:
|
|
100
|
+
raise LOGSException("Could not find dataset by files: %a" % errors)
|
|
101
|
+
|
|
102
|
+
return Tools.checkListAndConvert(data, DatasetSearchResult, "files search")
|
|
103
|
+
|
|
104
|
+
def __iter__(self):
|
|
105
|
+
if self._parameters:
|
|
106
|
+
parameters = cast(DatasetRequestParameter, self._parameters)
|
|
107
|
+
|
|
108
|
+
if parameters.files:
|
|
109
|
+
if not parameters.formatIds:
|
|
110
|
+
typeName = type(parameters).__name__
|
|
111
|
+
raise LOGSException(
|
|
112
|
+
"%s.formatIds must be defined when %s.files is used."
|
|
113
|
+
% (typeName, typeName)
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
results = self.findDatasetByFiles(
|
|
117
|
+
parameters.files, parameters.formatIds, False
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
if len(results) > 0:
|
|
121
|
+
if parameters.ids is None:
|
|
122
|
+
parameters.ids = []
|
|
123
|
+
|
|
124
|
+
cast(List[int], parameters.ids).extend(
|
|
125
|
+
[r.logsId for r in results if r.logsId]
|
|
126
|
+
)
|
|
127
|
+
self._initEntityIterator()
|
|
128
|
+
return self
|
|
129
|
+
|
|
130
|
+
def __next__(self):
|
|
131
|
+
dataset = super().__next__()
|
|
132
|
+
|
|
133
|
+
if self._parameters.includeParameters:
|
|
134
|
+
dataset.fetchParameters()
|
|
135
|
+
if self._parameters.includeInfo:
|
|
136
|
+
dataset.fetchInfo()
|
|
137
|
+
if self._parameters.includeZipSize:
|
|
138
|
+
dataset.fetchZipSize()
|
|
139
|
+
if self._parameters.includeExports:
|
|
140
|
+
dataset.fetchExports()
|
|
141
|
+
|
|
142
|
+
return dataset
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, List, Literal, Optional, cast
|
|
3
|
+
|
|
4
|
+
from LOGS.Auxiliary.Exceptions import EntityIncompleteException, LOGSException
|
|
5
|
+
from LOGS.Entity.ConnectedEntity import ConnectedEntity
|
|
6
|
+
from LOGS.LOGSConnection import LOGSConnection, ResponseTypes
|
|
7
|
+
|
|
8
|
+
_NumberTypeType = Literal["int", "float", "double"]
|
|
9
|
+
_DatatrackType = Literal[
|
|
10
|
+
"binary", "char", "formatted_table", "image", "numeric_array", "numeric_matrix"
|
|
11
|
+
]
|
|
12
|
+
_CodecType = Literal["char", "jpeg", "points", "generator"]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Datatrack(ConnectedEntity):
|
|
16
|
+
_type: Optional[_DatatrackType] = None
|
|
17
|
+
_codec: Optional[_CodecType] = None
|
|
18
|
+
_id: Optional[str] = None
|
|
19
|
+
_count: Optional[int] = None
|
|
20
|
+
_size: Optional[List[int]] = None
|
|
21
|
+
_min: Optional[List[float]] = None
|
|
22
|
+
_max: Optional[List[float]] = None
|
|
23
|
+
_numberType: Optional[_NumberTypeType] = None
|
|
24
|
+
_data: Optional[Any] = None
|
|
25
|
+
_incomplete = True
|
|
26
|
+
|
|
27
|
+
def __init__(self, ref=None, connection: Optional[LOGSConnection] = None):
|
|
28
|
+
self._noSerialize += ["data"]
|
|
29
|
+
super().__init__(ref=ref, connection=connection)
|
|
30
|
+
|
|
31
|
+
def _getConnectionData(self):
|
|
32
|
+
if not self._endpoint:
|
|
33
|
+
raise NotImplementedError(
|
|
34
|
+
"Endpoint missing for of entity type %a." % (type(self).__name__)
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
if not self.id:
|
|
38
|
+
raise LOGSException("%s id is not defined." % type(self).__name__)
|
|
39
|
+
|
|
40
|
+
return self._getConnection(), self._endpoint, self.id
|
|
41
|
+
|
|
42
|
+
def _fetchDataFromCache(self):
|
|
43
|
+
cacheFile = self._getCacheFile()
|
|
44
|
+
if cacheFile is None:
|
|
45
|
+
raise LOGSException("Cache directory not defined.")
|
|
46
|
+
|
|
47
|
+
if not os.path.exists(cacheFile):
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
with open(cacheFile, "rb") as f:
|
|
51
|
+
return f.read()
|
|
52
|
+
|
|
53
|
+
def _getCacheFile(self):
|
|
54
|
+
if self.cacheDir is None:
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
return os.path.join(self.cacheDir, self.cacheId + ".cache")
|
|
58
|
+
|
|
59
|
+
def _storeDataInCache(self, data):
|
|
60
|
+
cacheFile = self._getCacheFile()
|
|
61
|
+
if cacheFile is None:
|
|
62
|
+
raise LOGSException("Cache directory not defined.")
|
|
63
|
+
with open(cacheFile, "wb") as f:
|
|
64
|
+
f.write(data)
|
|
65
|
+
|
|
66
|
+
def clearCache(self):
|
|
67
|
+
cacheFile = self._getCacheFile()
|
|
68
|
+
if cacheFile is not None:
|
|
69
|
+
if os.path.exists(cacheFile):
|
|
70
|
+
os.remove(cacheFile)
|
|
71
|
+
|
|
72
|
+
def _fetchData(self):
|
|
73
|
+
data = None
|
|
74
|
+
if self.cacheDir:
|
|
75
|
+
data = self._fetchDataFromCache()
|
|
76
|
+
|
|
77
|
+
if data is None:
|
|
78
|
+
connection, endpoint, id = self._getConnectionData()
|
|
79
|
+
|
|
80
|
+
data, responseError = connection.getEndpoint(
|
|
81
|
+
endpoint + [id], responseType=ResponseTypes.RAW
|
|
82
|
+
)
|
|
83
|
+
if responseError:
|
|
84
|
+
raise LOGSException(
|
|
85
|
+
"Could not fetch %s: %s"
|
|
86
|
+
% (type(self).__name__, responseError.errorString()),
|
|
87
|
+
responseError=responseError,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
if self.cacheDir:
|
|
91
|
+
self._storeDataInCache(data)
|
|
92
|
+
|
|
93
|
+
self._data = data
|
|
94
|
+
|
|
95
|
+
def fetchFull(self, cacheDir: Optional[str] = None):
|
|
96
|
+
self._fetchData()
|
|
97
|
+
self._incomplete = False
|
|
98
|
+
|
|
99
|
+
def __iter__(self):
|
|
100
|
+
if self._incomplete:
|
|
101
|
+
raise EntityIncompleteException(self)
|
|
102
|
+
if self._data is not None:
|
|
103
|
+
for x in self._data:
|
|
104
|
+
yield x
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def type(self) -> Optional[_DatatrackType]:
|
|
108
|
+
return self._type
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def codec(self) -> Optional[_CodecType]:
|
|
112
|
+
return self._codec
|
|
113
|
+
|
|
114
|
+
@codec.setter
|
|
115
|
+
def codec(self, value):
|
|
116
|
+
self._codec = cast(Any, self.checkAndConvertNullable(value, str, "codec"))
|
|
117
|
+
|
|
118
|
+
@property
|
|
119
|
+
def id(self) -> Optional[str]:
|
|
120
|
+
return self._id
|
|
121
|
+
|
|
122
|
+
@id.setter
|
|
123
|
+
def id(self, value):
|
|
124
|
+
self._id = self.checkAndConvertNullable(value, str, "id")
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def count(self) -> Optional[int]:
|
|
128
|
+
return self._count
|
|
129
|
+
|
|
130
|
+
@count.setter
|
|
131
|
+
def count(self, value):
|
|
132
|
+
self._count = self.checkAndConvertNullable(value, int, "count")
|
|
133
|
+
|
|
134
|
+
@property
|
|
135
|
+
def size(self) -> Optional[List[int]]:
|
|
136
|
+
return self._size
|
|
137
|
+
|
|
138
|
+
@size.setter
|
|
139
|
+
def size(self, value):
|
|
140
|
+
self._size = self.checkListAndConvertNullable(value, int, "size")
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def min(self) -> Optional[List[float]]:
|
|
144
|
+
return self._min
|
|
145
|
+
|
|
146
|
+
@min.setter
|
|
147
|
+
def min(self, value):
|
|
148
|
+
self._min = self.checkListAndConvertNullable(value, float, "min")
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
def max(self) -> Optional[List[float]]:
|
|
152
|
+
return self._max
|
|
153
|
+
|
|
154
|
+
@max.setter
|
|
155
|
+
def max(self, value):
|
|
156
|
+
self._max = self.checkListAndConvertNullable(value, float, "max")
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def numberType(self) -> Optional[_NumberTypeType]:
|
|
160
|
+
return self._numberType
|
|
161
|
+
|
|
162
|
+
@numberType.setter
|
|
163
|
+
def numberType(self, value):
|
|
164
|
+
self._numberType = cast(
|
|
165
|
+
Any, self.checkAndConvertNullable(value, str, "numberType")
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def data(self) -> Optional[Any]:
|
|
170
|
+
raise NotImplementedError(
|
|
171
|
+
"Field 'data' of %a class not implemented." % type(self).__name__
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
@property
|
|
175
|
+
def cacheId(self) -> str:
|
|
176
|
+
if self._cacheId is None:
|
|
177
|
+
return f"{self.type}_{self.id}"
|
|
178
|
+
else:
|
|
179
|
+
return self._cacheId
|