hardpy 0.19.1__py3-none-any.whl → 0.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hardpy/__init__.py +2 -0
- hardpy/cli/cli.py +6 -0
- hardpy/common/config.py +19 -0
- hardpy/hardpy_panel/api.py +62 -1
- hardpy/hardpy_panel/frontend/dist/assets/{allPaths-C_-7WXHD.js → allPaths-BXbcAtew.js} +1 -1
- hardpy/hardpy_panel/frontend/dist/assets/{allPathsLoader-DgH0Xily.js → allPathsLoader-lJLHMNNZ.js} +2 -2
- hardpy/hardpy_panel/frontend/dist/assets/{browser-ponyfill-BbOvdqIF.js → browser-ponyfill-DzwgrUwX.js} +1 -1
- hardpy/hardpy_panel/frontend/dist/assets/{index-DEJb2W0B.js → index-CVhA7vmQ.js} +158 -158
- hardpy/hardpy_panel/frontend/dist/assets/{splitPathsBySizeLoader-o5HCcdVL.js → splitPathsBySizeLoader-BdwEQHyO.js} +1 -1
- hardpy/hardpy_panel/frontend/dist/index.html +1 -1
- hardpy/pytest_hardpy/db/__init__.py +0 -2
- hardpy/pytest_hardpy/db/runstore.py +378 -10
- hardpy/pytest_hardpy/db/statestore.py +390 -5
- hardpy/pytest_hardpy/db/tempstore.py +219 -17
- hardpy/pytest_hardpy/plugin.py +2 -2
- hardpy/pytest_hardpy/result/__init__.py +2 -0
- hardpy/pytest_hardpy/result/report_loader/json_loader.py +49 -0
- hardpy/pytest_hardpy/result/report_synchronizer/synchronizer.py +25 -9
- {hardpy-0.19.1.dist-info → hardpy-0.20.0.dist-info}/METADATA +18 -3
- {hardpy-0.19.1.dist-info → hardpy-0.20.0.dist-info}/RECORD +23 -23
- hardpy/pytest_hardpy/db/base_store.py +0 -179
- {hardpy-0.19.1.dist-info → hardpy-0.20.0.dist-info}/WHEEL +0 -0
- {hardpy-0.19.1.dist-info → hardpy-0.20.0.dist-info}/entry_points.txt +0 -0
- {hardpy-0.19.1.dist-info → hardpy-0.20.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
import{_ as o,a as _,b as i,p as c,I as u}from"./index-
|
|
1
|
+
import{_ as o,a as _,b as i,p as c,I as u}from"./index-CVhA7vmQ.js";var p=function(n,s){return o(void 0,void 0,void 0,function(){var a,r;return _(this,function(e){switch(e.label){case 0:return a=c(n),s!==u.STANDARD?[3,2]:[4,i(()=>import("./index-DLOviMB1.js").then(t=>t.I),[])];case 1:return r=e.sent(),[3,4];case 2:return[4,i(()=>import("./index-B-fsa5Ru.js").then(t=>t.I),[])];case 3:r=e.sent(),e.label=4;case 4:return[2,r[a]]}})})};export{p as splitPathsBySizeLoader};
|
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
Learn how to configure a non-root public URL by running `npm run build`.
|
|
26
26
|
-->
|
|
27
27
|
<title>HardPy Operator Panel</title>
|
|
28
|
-
<script type="module" crossorigin src="/assets/index-
|
|
28
|
+
<script type="module" crossorigin src="/assets/index-CVhA7vmQ.js"></script>
|
|
29
29
|
<link rel="stylesheet" crossorigin href="/assets/index-B7T9xvaW.css">
|
|
30
30
|
</head>
|
|
31
31
|
<body>
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
# Copyright (c) 2025 Everypin
|
|
2
2
|
# GNU General Public License v3.0 (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
3
3
|
|
|
4
|
-
from hardpy.pytest_hardpy.db.base_store import BaseStore
|
|
5
4
|
from hardpy.pytest_hardpy.db.const import DatabaseField
|
|
6
5
|
from hardpy.pytest_hardpy.db.runstore import RunStore
|
|
7
6
|
from hardpy.pytest_hardpy.db.schema import ResultRunStore, ResultStateStore
|
|
@@ -16,7 +15,6 @@ from hardpy.pytest_hardpy.db.statestore import StateStore
|
|
|
16
15
|
from hardpy.pytest_hardpy.db.tempstore import TempStore
|
|
17
16
|
|
|
18
17
|
__all__ = [
|
|
19
|
-
"BaseStore",
|
|
20
18
|
"Chart",
|
|
21
19
|
"DatabaseField",
|
|
22
20
|
"Instrument",
|
|
@@ -1,28 +1,396 @@
|
|
|
1
1
|
# Copyright (c) 2024 Everypin
|
|
2
2
|
# GNU General Public License v3.0 (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
3
3
|
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
from abc import ABC, abstractmethod
|
|
8
|
+
from json import dumps
|
|
4
9
|
from logging import getLogger
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
5
12
|
|
|
6
|
-
from
|
|
13
|
+
from glom import PathAccessError, assign, glom
|
|
7
14
|
|
|
15
|
+
from hardpy.common.config import ConfigManager, StorageType
|
|
8
16
|
from hardpy.common.singleton import SingletonMeta
|
|
9
|
-
from hardpy.pytest_hardpy.db.
|
|
17
|
+
from hardpy.pytest_hardpy.db.const import DatabaseField as DF # noqa: N817
|
|
10
18
|
from hardpy.pytest_hardpy.db.schema import ResultRunStore
|
|
11
19
|
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from pycouchdb.client import Database # type: ignore[import-untyped]
|
|
22
|
+
from pydantic import BaseModel
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _create_default_doc_structure(doc_id: str, doc_id_for_rev: str) -> dict:
|
|
26
|
+
"""Create default document structure with standard fields.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
doc_id (str): Document ID to use
|
|
30
|
+
doc_id_for_rev (str): Document ID for _rev field (for JSON compatibility)
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
dict: Default document structure
|
|
34
|
+
"""
|
|
35
|
+
return {
|
|
36
|
+
"_id": doc_id,
|
|
37
|
+
"_rev": doc_id_for_rev,
|
|
38
|
+
DF.MODULES: {},
|
|
39
|
+
DF.DUT: {
|
|
40
|
+
DF.TYPE: None,
|
|
41
|
+
DF.NAME: None,
|
|
42
|
+
DF.REVISION: None,
|
|
43
|
+
DF.SERIAL_NUMBER: None,
|
|
44
|
+
DF.PART_NUMBER: None,
|
|
45
|
+
DF.SUB_UNITS: [],
|
|
46
|
+
DF.INFO: {},
|
|
47
|
+
},
|
|
48
|
+
DF.TEST_STAND: {
|
|
49
|
+
DF.HW_ID: None,
|
|
50
|
+
DF.NAME: None,
|
|
51
|
+
DF.REVISION: None,
|
|
52
|
+
DF.TIMEZONE: None,
|
|
53
|
+
DF.LOCATION: None,
|
|
54
|
+
DF.NUMBER: None,
|
|
55
|
+
DF.INSTRUMENTS: [],
|
|
56
|
+
DF.DRIVERS: {},
|
|
57
|
+
DF.INFO: {},
|
|
58
|
+
},
|
|
59
|
+
DF.PROCESS: {
|
|
60
|
+
DF.NAME: None,
|
|
61
|
+
DF.NUMBER: None,
|
|
62
|
+
DF.INFO: {},
|
|
63
|
+
},
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class RunStoreInterface(ABC):
|
|
68
|
+
"""Interface for run storage implementations."""
|
|
69
|
+
|
|
70
|
+
@abstractmethod
|
|
71
|
+
def get_field(self, key: str) -> Any: # noqa: ANN401
|
|
72
|
+
"""Get field from the run store.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
key (str): Field key, supports nested access with dots
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Any: Field value
|
|
79
|
+
"""
|
|
12
80
|
|
|
13
|
-
|
|
14
|
-
|
|
81
|
+
@abstractmethod
|
|
82
|
+
def update_doc_value(self, key: str, value: Any) -> None: # noqa: ANN401
|
|
83
|
+
"""Update document value in memory (does not persist).
|
|
15
84
|
|
|
16
|
-
|
|
85
|
+
Args:
|
|
86
|
+
key (str): Field key, supports nested access with dots
|
|
87
|
+
value (Any): Value to set
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
@abstractmethod
|
|
91
|
+
def update_db(self) -> None:
|
|
92
|
+
"""Persist in-memory document to storage backend."""
|
|
93
|
+
|
|
94
|
+
@abstractmethod
|
|
95
|
+
def update_doc(self) -> None:
|
|
96
|
+
"""Reload document from storage backend to memory."""
|
|
97
|
+
|
|
98
|
+
@abstractmethod
|
|
99
|
+
def get_document(self) -> BaseModel:
|
|
100
|
+
"""Get full document with schema validation.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
BaseModel: Validated document model
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
@abstractmethod
|
|
107
|
+
def clear(self) -> None:
|
|
108
|
+
"""Clear storage and reset to initial state."""
|
|
109
|
+
|
|
110
|
+
@abstractmethod
|
|
111
|
+
def compact(self) -> None:
|
|
112
|
+
"""Optimize storage (implementation-specific, may be no-op)."""
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class JsonRunStore(RunStoreInterface):
|
|
116
|
+
"""JSON file-based run storage implementation.
|
|
117
|
+
|
|
118
|
+
Stores test run data using JSON files.
|
|
17
119
|
"""
|
|
18
120
|
|
|
19
121
|
def __init__(self) -> None:
|
|
20
|
-
|
|
122
|
+
config_manager = ConfigManager()
|
|
123
|
+
self._store_name = "runstore"
|
|
124
|
+
config_storage_path = Path(config_manager.config.database.storage_path)
|
|
125
|
+
if config_storage_path.is_absolute():
|
|
126
|
+
self._storage_dir = config_storage_path / "storage" / self._store_name
|
|
127
|
+
else:
|
|
128
|
+
self._storage_dir = Path(
|
|
129
|
+
config_manager.tests_path
|
|
130
|
+
/ config_manager.config.database.storage_path
|
|
131
|
+
/ "storage"
|
|
132
|
+
/ self._store_name,
|
|
133
|
+
)
|
|
134
|
+
self._storage_dir.mkdir(parents=True, exist_ok=True)
|
|
135
|
+
self._doc_id = config_manager.config.database.doc_id
|
|
136
|
+
self._file_path = self._storage_dir / f"{self._doc_id}.json"
|
|
21
137
|
self._log = getLogger(__name__)
|
|
138
|
+
self._schema: type[BaseModel] = ResultRunStore
|
|
139
|
+
self._doc: dict = self._init_doc()
|
|
140
|
+
|
|
141
|
+
def get_field(self, key: str) -> Any: # noqa: ANN401
|
|
142
|
+
"""Get field value from document using dot notation.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
key (str): Field key, supports nested access with dots
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
Any: Field value, or None if path does not exist
|
|
149
|
+
"""
|
|
150
|
+
try:
|
|
151
|
+
return glom(self._doc, key)
|
|
152
|
+
except PathAccessError:
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
def update_doc_value(self, key: str, value: Any) -> None: # noqa: ANN401
|
|
156
|
+
"""Update document value in memory (does not persist).
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
key (str): Field key, supports nested access with dots
|
|
160
|
+
value (Any): Value to set
|
|
161
|
+
"""
|
|
162
|
+
try:
|
|
163
|
+
dumps(value)
|
|
164
|
+
except Exception: # noqa: BLE001
|
|
165
|
+
value = dumps(value, default=str)
|
|
166
|
+
|
|
167
|
+
if "." in key:
|
|
168
|
+
assign(self._doc, key, value, missing=dict)
|
|
169
|
+
else:
|
|
170
|
+
self._doc[key] = value
|
|
171
|
+
|
|
172
|
+
def update_db(self) -> None:
|
|
173
|
+
"""Persist in-memory document to JSON file with atomic write."""
|
|
174
|
+
self._storage_dir.mkdir(parents=True, exist_ok=True)
|
|
175
|
+
temp_file = self._file_path.with_suffix(".tmp")
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
with temp_file.open("w") as f:
|
|
179
|
+
json.dump(self._doc, f, indent=2, default=str)
|
|
180
|
+
temp_file.replace(self._file_path)
|
|
181
|
+
except Exception as exc:
|
|
182
|
+
self._log.error(f"Error writing to storage file: {exc}")
|
|
183
|
+
if temp_file.exists():
|
|
184
|
+
temp_file.unlink()
|
|
185
|
+
raise
|
|
186
|
+
|
|
187
|
+
def update_doc(self) -> None:
|
|
188
|
+
"""Reload document from JSON file to memory."""
|
|
189
|
+
if self._file_path.exists():
|
|
190
|
+
try:
|
|
191
|
+
with self._file_path.open("r") as f:
|
|
192
|
+
self._doc = json.load(f)
|
|
193
|
+
except json.JSONDecodeError as exc:
|
|
194
|
+
self._log.error(f"Error reading storage file: {exc}")
|
|
195
|
+
except Exception as exc:
|
|
196
|
+
self._log.error(f"Error reading storage file: {exc}")
|
|
197
|
+
raise
|
|
198
|
+
|
|
199
|
+
def get_document(self) -> BaseModel:
|
|
200
|
+
"""Get full document with schema validation.
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
BaseModel: Validated document model
|
|
204
|
+
"""
|
|
205
|
+
self.update_doc()
|
|
206
|
+
return self._schema(**self._doc)
|
|
207
|
+
|
|
208
|
+
def clear(self) -> None:
|
|
209
|
+
"""Clear storage by resetting to initial state (in-memory only)."""
|
|
210
|
+
self._doc = _create_default_doc_structure(self._doc_id, self._doc_id)
|
|
211
|
+
|
|
212
|
+
def compact(self) -> None:
|
|
213
|
+
"""Optimize storage (no-op for JSON file storage)."""
|
|
214
|
+
|
|
215
|
+
def _init_doc(self) -> dict:
|
|
216
|
+
"""Initialize or load document structure."""
|
|
217
|
+
if self._file_path.exists():
|
|
218
|
+
try:
|
|
219
|
+
with self._file_path.open("r") as f:
|
|
220
|
+
doc = json.load(f)
|
|
221
|
+
|
|
222
|
+
if DF.MODULES not in doc:
|
|
223
|
+
doc[DF.MODULES] = {}
|
|
224
|
+
|
|
225
|
+
return doc
|
|
226
|
+
except json.JSONDecodeError:
|
|
227
|
+
self._log.warning(f"Corrupted storage file {self._file_path},"
|
|
228
|
+
f" creating new")
|
|
229
|
+
except Exception as exc: # noqa: BLE001
|
|
230
|
+
self._log.warning(f"Error loading storage file: {exc}, creating new")
|
|
231
|
+
|
|
232
|
+
return _create_default_doc_structure(self._doc_id, self._doc_id)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class CouchDBRunStore(RunStoreInterface):
|
|
236
|
+
"""CouchDB-based run storage implementation.
|
|
237
|
+
|
|
238
|
+
Stores test run data using CouchDB.
|
|
239
|
+
Clears the storage on initialization to start fresh.
|
|
240
|
+
"""
|
|
241
|
+
|
|
242
|
+
def __init__(self) -> None:
|
|
243
|
+
from pycouchdb import Server as DbServer # type: ignore[import-untyped]
|
|
244
|
+
from pycouchdb.exceptions import ( # type: ignore[import-untyped]
|
|
245
|
+
Conflict,
|
|
246
|
+
GenericError,
|
|
247
|
+
)
|
|
248
|
+
from requests.exceptions import ConnectionError # noqa: A004
|
|
249
|
+
|
|
250
|
+
config_manager = ConfigManager()
|
|
251
|
+
config = config_manager.config
|
|
252
|
+
self._db_srv = DbServer(config.database.url)
|
|
253
|
+
self._db_name = "runstore"
|
|
254
|
+
self._doc_id = config.database.doc_id
|
|
255
|
+
self._log = getLogger(__name__)
|
|
256
|
+
self._schema: type[BaseModel] = ResultRunStore
|
|
257
|
+
|
|
258
|
+
# Initialize database
|
|
259
|
+
try:
|
|
260
|
+
self._db: Database = self._db_srv.create(self._db_name) # type: ignore[name-defined]
|
|
261
|
+
except Conflict:
|
|
262
|
+
self._db = self._db_srv.database(self._db_name)
|
|
263
|
+
except GenericError as exc:
|
|
264
|
+
msg = f"Error initializing database {exc}"
|
|
265
|
+
raise RuntimeError(msg) from exc
|
|
266
|
+
except ConnectionError as exc:
|
|
267
|
+
msg = f"Error initializing database: {exc}"
|
|
268
|
+
raise RuntimeError(msg) from exc
|
|
269
|
+
|
|
270
|
+
self._doc: dict = self._init_doc()
|
|
271
|
+
|
|
272
|
+
# Clear the runstore on initialization for CouchDB
|
|
273
|
+
try:
|
|
274
|
+
self.clear()
|
|
275
|
+
except Exception: # noqa: BLE001
|
|
276
|
+
self._log.debug("Runstore storage will be created for the first time")
|
|
277
|
+
|
|
278
|
+
def get_field(self, key: str) -> Any: # noqa: ANN401
|
|
279
|
+
"""Get field from the run store.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
key (str): Field key, supports nested access with dots
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Any: Field value, or None if path does not exist
|
|
286
|
+
"""
|
|
287
|
+
try:
|
|
288
|
+
return glom(self._doc, key)
|
|
289
|
+
except PathAccessError:
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
def update_doc_value(self, key: str, value: Any) -> None: # noqa: ANN401
|
|
293
|
+
"""Update document value in memory (does not persist).
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
key (str): Field key, supports nested access with dots
|
|
297
|
+
value (Any): Value to set
|
|
298
|
+
"""
|
|
299
|
+
try:
|
|
300
|
+
dumps(value)
|
|
301
|
+
except Exception: # noqa: BLE001
|
|
302
|
+
value = dumps(value, default=str)
|
|
303
|
+
|
|
304
|
+
if "." in key:
|
|
305
|
+
assign(self._doc, key, value, missing=dict)
|
|
306
|
+
else:
|
|
307
|
+
self._doc[key] = value
|
|
308
|
+
|
|
309
|
+
def update_db(self) -> None:
|
|
310
|
+
"""Persist in-memory document to storage backend."""
|
|
311
|
+
from pycouchdb.exceptions import Conflict # type: ignore[import-untyped]
|
|
312
|
+
|
|
313
|
+
try:
|
|
314
|
+
self._doc = self._db.save(self._doc)
|
|
315
|
+
except Conflict:
|
|
316
|
+
self._doc["_rev"] = self._db.get(self._doc_id)["_rev"]
|
|
317
|
+
self._doc = self._db.save(self._doc)
|
|
318
|
+
|
|
319
|
+
def update_doc(self) -> None:
|
|
320
|
+
"""Reload document from storage backend to memory."""
|
|
321
|
+
self._doc["_rev"] = self._db.get(self._doc_id)["_rev"]
|
|
322
|
+
self._doc = self._db.get(self._doc_id)
|
|
323
|
+
|
|
324
|
+
def get_document(self) -> BaseModel:
|
|
325
|
+
"""Get full document with schema validation.
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
BaseModel: Validated document model
|
|
329
|
+
"""
|
|
330
|
+
self._doc = self._db.get(self._doc_id)
|
|
331
|
+
return self._schema(**self._doc)
|
|
332
|
+
|
|
333
|
+
def clear(self) -> None:
|
|
334
|
+
"""Clear storage and reset to initial state."""
|
|
335
|
+
from pycouchdb.exceptions import ( # type: ignore[import-untyped]
|
|
336
|
+
Conflict,
|
|
337
|
+
NotFound,
|
|
338
|
+
)
|
|
339
|
+
|
|
22
340
|
try:
|
|
23
|
-
# Clear the runstore database before each launch
|
|
24
341
|
self._db.delete(self._doc_id)
|
|
25
342
|
except (Conflict, NotFound):
|
|
26
|
-
self._log.debug("
|
|
27
|
-
self._doc
|
|
28
|
-
|
|
343
|
+
self._log.debug("Database will be created for the first time")
|
|
344
|
+
self._doc = self._init_doc()
|
|
345
|
+
|
|
346
|
+
def compact(self) -> None:
|
|
347
|
+
"""Optimize storage (implementation-specific, may be no-op)."""
|
|
348
|
+
self._db.compact()
|
|
349
|
+
|
|
350
|
+
def _init_doc(self) -> dict:
|
|
351
|
+
"""Initialize or load document structure."""
|
|
352
|
+
from pycouchdb.exceptions import NotFound # type: ignore[import-untyped]
|
|
353
|
+
|
|
354
|
+
try:
|
|
355
|
+
doc = self._db.get(self._doc_id)
|
|
356
|
+
except NotFound:
|
|
357
|
+
# CouchDB doesn't need _rev field in the default structure
|
|
358
|
+
default = _create_default_doc_structure(self._doc_id, self._doc_id)
|
|
359
|
+
del default["_rev"] # CouchDB manages _rev automatically
|
|
360
|
+
return default
|
|
361
|
+
|
|
362
|
+
if DF.MODULES not in doc:
|
|
363
|
+
doc[DF.MODULES] = {}
|
|
364
|
+
|
|
365
|
+
return doc
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
class RunStore(metaclass=SingletonMeta):
|
|
369
|
+
"""HardPy run storage factory for test run data.
|
|
370
|
+
|
|
371
|
+
Creates appropriate storage backend based on configuration:
|
|
372
|
+
- JSON file storage when storage_type is "json"
|
|
373
|
+
- CouchDB storage when storage_type is "couchdb"
|
|
374
|
+
|
|
375
|
+
Save state and case artifact. Supports multiple storage backends
|
|
376
|
+
through the factory pattern.
|
|
377
|
+
|
|
378
|
+
Note: This class acts as a factory. When instantiated, it returns
|
|
379
|
+
the appropriate concrete implementation (JsonRunStore or CouchDBRunStore).
|
|
380
|
+
"""
|
|
381
|
+
|
|
382
|
+
def __new__(cls) -> RunStoreInterface: # type: ignore[misc]
|
|
383
|
+
"""Create and return the appropriate storage implementation.
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
RunStoreInterface: Concrete storage implementation based on config
|
|
387
|
+
"""
|
|
388
|
+
config = ConfigManager()
|
|
389
|
+
storage_type = config.config.database.storage_type
|
|
390
|
+
|
|
391
|
+
if storage_type == StorageType.JSON:
|
|
392
|
+
return JsonRunStore()
|
|
393
|
+
if storage_type == StorageType.COUCHDB:
|
|
394
|
+
return CouchDBRunStore()
|
|
395
|
+
msg = f"Unknown storage type: {storage_type}"
|
|
396
|
+
raise ValueError(msg)
|