psr-factory 5.0.0b69__py3-none-manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of psr-factory might be problematic. Click here for more details.
- psr/apps/__init__.py +7 -0
- psr/apps/apps.py +225 -0
- psr/apps/version.py +5 -0
- psr/execqueue/client.py +126 -0
- psr/execqueue/config.py +52 -0
- psr/execqueue/db.py +286 -0
- psr/execqueue/server.py +689 -0
- psr/execqueue/watcher.py +146 -0
- psr/factory/__init__.py +7 -0
- psr/factory/api.py +2745 -0
- psr/factory/factory.pmd +7322 -0
- psr/factory/factory.pmk +19461 -0
- psr/factory/factorylib.py +410 -0
- psr/factory/libfactory.so +0 -0
- psr/factory/py.typed +0 -0
- psr/factory/samples/__init__.py +2 -0
- psr/factory/samples/sddp_case01.py +166 -0
- psr/factory/samples/sddp_case21.py +242 -0
- psr/outputs/__init__.py +5 -0
- psr/outputs/outputs.py +179 -0
- psr/outputs/resample.py +289 -0
- psr/psrfcommon/__init__.py +6 -0
- psr/psrfcommon/psrfcommon.py +57 -0
- psr/psrfcommon/tempfile.py +118 -0
- psr/runner/__init__.py +7 -0
- psr/runner/runner.py +743 -0
- psr/runner/version.py +5 -0
- psr_factory-5.0.0b69.dist-info/METADATA +47 -0
- psr_factory-5.0.0b69.dist-info/RECORD +32 -0
- psr_factory-5.0.0b69.dist-info/WHEEL +5 -0
- psr_factory-5.0.0b69.dist-info/licenses/LICENSE.txt +21 -0
- psr_factory-5.0.0b69.dist-info/top_level.txt +1 -0
psr/execqueue/db.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import (
|
|
4
|
+
List,
|
|
5
|
+
Optional
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import (
|
|
9
|
+
create_engine,
|
|
10
|
+
Column,
|
|
11
|
+
Integer,
|
|
12
|
+
String,
|
|
13
|
+
ForeignKey,
|
|
14
|
+
Text,
|
|
15
|
+
TIMESTAMP,
|
|
16
|
+
)
|
|
17
|
+
from sqlalchemy.ext.declarative import declarative_base
|
|
18
|
+
from sqlalchemy.orm import relationship, sessionmaker
|
|
19
|
+
|
|
20
|
+
from psr.execqueue.config import *
|
|
21
|
+
|
|
22
|
+
class CloudStatus(Enum):
|
|
23
|
+
RUNNING = 1
|
|
24
|
+
FINISHED = 3
|
|
25
|
+
ERROR = 4
|
|
26
|
+
RESULTS_AVAILABLE = 5
|
|
27
|
+
LOGS_AVAILABLE_ERROR = 6
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
DB_NAME = "app.db"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
LOCAL_EXECUTION_RUNNING = 0
|
|
34
|
+
LOCAL_EXECUTION_FINISHED = 1
|
|
35
|
+
LOCAL_EXECUTION_ERROR = 2
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_db_path():
|
|
39
|
+
return os.path.join(STORAGE_PATH, DB_NAME)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
Base = declarative_base()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Case(Base):
|
|
46
|
+
__tablename__ = 'cases'
|
|
47
|
+
|
|
48
|
+
case_id = Column(String(26), primary_key=True)
|
|
49
|
+
upload_time = Column(TIMESTAMP, default=datetime.datetime.utcnow)
|
|
50
|
+
checksum = Column(Text)
|
|
51
|
+
removed = Column(Integer, default=0)
|
|
52
|
+
|
|
53
|
+
local_executions = relationship("LocalExecution", back_populates="case")
|
|
54
|
+
cloud_uploads = relationship("CloudUpload", back_populates="case")
|
|
55
|
+
cloud_executions = relationship("CloudExecution", back_populates="case")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class LocalExecution(Base):
|
|
59
|
+
__tablename__ = 'local_executions'
|
|
60
|
+
|
|
61
|
+
execution_id = Column(String(26), primary_key=True)
|
|
62
|
+
case_id = Column(String(26), ForeignKey('cases.case_id'))
|
|
63
|
+
start_time = Column(TIMESTAMP, default=datetime.datetime.utcnow)
|
|
64
|
+
finish_time = Column(TIMESTAMP)
|
|
65
|
+
status = Column(Integer, default=LOCAL_EXECUTION_RUNNING)
|
|
66
|
+
# Module-related fields
|
|
67
|
+
is_module = Column(Integer, default=0) # 0 = no, 1 = yes
|
|
68
|
+
module = Column(String(128)) # optional module name
|
|
69
|
+
|
|
70
|
+
case = relationship("Case", back_populates="local_executions")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class CloudUpload(Base):
|
|
74
|
+
__tablename__ = 'cloud_uploads'
|
|
75
|
+
|
|
76
|
+
cloud_upload_id = Column(String(26), primary_key=True)
|
|
77
|
+
case_id = Column(String(26), ForeignKey('cases.case_id'))
|
|
78
|
+
start_time = Column(TIMESTAMP, default=datetime.datetime.utcnow)
|
|
79
|
+
|
|
80
|
+
case = relationship("Case", back_populates="cloud_uploads")
|
|
81
|
+
cloud_executions = relationship("CloudExecution", back_populates="cloud_upload")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class CloudExecution(Base):
|
|
85
|
+
__tablename__ = 'cloud_executions'
|
|
86
|
+
|
|
87
|
+
repository_id = Column(Integer, primary_key=True)
|
|
88
|
+
cloud_upload_id = Column(String(26), ForeignKey('cloud_uploads.cloud_upload_id'))
|
|
89
|
+
case_id = Column(String(26), ForeignKey('cases.case_id'))
|
|
90
|
+
start_time = Column(TIMESTAMP, default=datetime.datetime.utcnow)
|
|
91
|
+
archived = Column(Integer, default=0)
|
|
92
|
+
status = Column(Integer, default=0)
|
|
93
|
+
|
|
94
|
+
case = relationship("Case", back_populates="cloud_executions")
|
|
95
|
+
cloud_upload = relationship("CloudUpload", back_populates="cloud_executions")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def initialize():
|
|
99
|
+
# Create the SQLite database and create the tables
|
|
100
|
+
_db_path = get_db_path()
|
|
101
|
+
_create_db = not os.path.exists(_db_path)
|
|
102
|
+
engine = create_engine(f'sqlite:///{_db_path}')
|
|
103
|
+
Session = sessionmaker(bind=engine)
|
|
104
|
+
session = Session()
|
|
105
|
+
|
|
106
|
+
if _create_db:
|
|
107
|
+
Base.metadata.create_all(engine)
|
|
108
|
+
_first_time_setup(session)
|
|
109
|
+
else:
|
|
110
|
+
# Basic migration: add columns if missing
|
|
111
|
+
# Note: SQLite supports limited ALTERs; use simple try/except for idempotency
|
|
112
|
+
from sqlalchemy import inspect
|
|
113
|
+
inspector = inspect(engine)
|
|
114
|
+
cols = {c['name'] for c in inspector.get_columns('local_executions')}
|
|
115
|
+
with engine.connect() as conn:
|
|
116
|
+
if 'is_module' not in cols:
|
|
117
|
+
try:
|
|
118
|
+
conn.execute("ALTER TABLE local_executions ADD COLUMN is_module INTEGER DEFAULT 0")
|
|
119
|
+
except Exception:
|
|
120
|
+
pass
|
|
121
|
+
if 'module' not in cols:
|
|
122
|
+
try:
|
|
123
|
+
conn.execute("ALTER TABLE local_executions ADD COLUMN module VARCHAR(128)")
|
|
124
|
+
except Exception:
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
return session, engine
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def close(session):
|
|
131
|
+
session.close()
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _first_time_setup(session):
|
|
135
|
+
pass
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def register_case(session, case_id, checksum):
|
|
139
|
+
case = Case(case_id=case_id,
|
|
140
|
+
checksum=checksum,
|
|
141
|
+
upload_time=datetime.datetime.utcnow()
|
|
142
|
+
)
|
|
143
|
+
session.add(case)
|
|
144
|
+
session.commit()
|
|
145
|
+
# registry.configure()
|
|
146
|
+
|
|
147
|
+
return case
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def register_local_execution(session, case_id: str, execution_id: str, *, is_module: int = 0, module: Optional[str] = None):
|
|
151
|
+
case = session.query(Case).filter(Case.case_id == case_id).first()
|
|
152
|
+
local_execution = LocalExecution(
|
|
153
|
+
execution_id=execution_id,
|
|
154
|
+
case_id=case_id,
|
|
155
|
+
start_time=datetime.datetime.utcnow(),
|
|
156
|
+
is_module=is_module,
|
|
157
|
+
module=module,
|
|
158
|
+
)
|
|
159
|
+
case.local_executions.append(local_execution)
|
|
160
|
+
session.commit()
|
|
161
|
+
return local_execution
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def get_case_id_from_execution_id(session, execution_id: str) -> Optional[str]:
|
|
165
|
+
local_execution = session.query(LocalExecution).filter(LocalExecution.execution_id == execution_id).first()
|
|
166
|
+
return local_execution.case_id if local_execution else None
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def update_local_execution_status(session, execution_id: str, status: int) -> bool:
|
|
170
|
+
local_execution = session.query(LocalExecution).filter(LocalExecution.execution_id == execution_id).first()
|
|
171
|
+
if local_execution:
|
|
172
|
+
if status not in [LOCAL_EXECUTION_FINISHED, LOCAL_EXECUTION_ERROR,
|
|
173
|
+
LOCAL_EXECUTION_RUNNING]:
|
|
174
|
+
raise ValueError("Wrong status for update.")
|
|
175
|
+
local_execution.status = status
|
|
176
|
+
if status in [LOCAL_EXECUTION_FINISHED, LOCAL_EXECUTION_ERROR]:
|
|
177
|
+
local_execution.finish_time = datetime.datetime.utcnow()
|
|
178
|
+
session.commit()
|
|
179
|
+
return True
|
|
180
|
+
return True
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def update_cloud_execution_status(session, repository_id: int, status: int) -> bool:
|
|
184
|
+
cloud_execution = session.query(CloudExecution).filter(CloudExecution.repository_id == repository_id).first()
|
|
185
|
+
if cloud_execution:
|
|
186
|
+
if CloudStatus(status) not in CloudStatus:
|
|
187
|
+
raise ValueError("Wrong status for update.")
|
|
188
|
+
cloud_execution.status = status
|
|
189
|
+
session.commit()
|
|
190
|
+
return True
|
|
191
|
+
return False
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def get_local_execution_status(session, execution_id: str) -> Optional[int]:
|
|
195
|
+
local_execution = session.query(LocalExecution).filter(LocalExecution.execution_id == execution_id).first()
|
|
196
|
+
return local_execution.status if local_execution else None
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def any_running_modules_for_case(session, case_id: str) -> bool:
|
|
200
|
+
return session.query(LocalExecution).filter(
|
|
201
|
+
LocalExecution.case_id == case_id,
|
|
202
|
+
LocalExecution.is_module == 1,
|
|
203
|
+
LocalExecution.status == LOCAL_EXECUTION_RUNNING
|
|
204
|
+
).count() > 0
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def any_failed_modules_for_case(session, case_id: str) -> bool:
|
|
208
|
+
return session.query(LocalExecution).filter(
|
|
209
|
+
LocalExecution.case_id == case_id,
|
|
210
|
+
LocalExecution.is_module == 1,
|
|
211
|
+
LocalExecution.status == LOCAL_EXECUTION_ERROR
|
|
212
|
+
).count() > 0
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def last_module_execution_for_case(session, case_id: str, module: Optional[str] = None) -> Optional[LocalExecution]:
|
|
216
|
+
q = session.query(LocalExecution).filter(
|
|
217
|
+
LocalExecution.case_id == case_id,
|
|
218
|
+
LocalExecution.is_module == 1
|
|
219
|
+
)
|
|
220
|
+
if module:
|
|
221
|
+
q = q.filter(LocalExecution.module == module)
|
|
222
|
+
return q.order_by(LocalExecution.start_time.desc()).first()
|
|
223
|
+
|
|
224
|
+
def get_distinct_module_names_for_case(session, case_id: str) -> List[str]:
|
|
225
|
+
rows = session.query(LocalExecution.module).filter(
|
|
226
|
+
LocalExecution.case_id == case_id,
|
|
227
|
+
LocalExecution.is_module == 1,
|
|
228
|
+
LocalExecution.module.isnot(None)
|
|
229
|
+
).distinct().all()
|
|
230
|
+
# rows is a list of tuples [(module,), ...]
|
|
231
|
+
return [r[0] for r in rows if r and r[0]]
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def register_cloud_upload(session, case_id: str, cloud_upload_id: str):
|
|
235
|
+
case = session.query(Case).filter(Case.case_id == case_id).first()
|
|
236
|
+
cloud_upload = CloudUpload(cloud_upload_id=cloud_upload_id,
|
|
237
|
+
case_id=case_id,
|
|
238
|
+
start_time=datetime.datetime.utcnow()
|
|
239
|
+
)
|
|
240
|
+
case.cloud_uploads.append(cloud_upload)
|
|
241
|
+
session.commit()
|
|
242
|
+
return cloud_upload
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def register_cloud_execution(session, repository_id: int, cloud_upload_id: str, case_id: str):
|
|
246
|
+
cloud_upload = session.query(CloudUpload).filter(CloudUpload.cloud_upload_id == cloud_upload_id).first()
|
|
247
|
+
cloud_execution = CloudExecution(repository_id=repository_id,
|
|
248
|
+
cloud_upload_id=cloud_upload_id,
|
|
249
|
+
case_id=case_id,
|
|
250
|
+
start_time=datetime.datetime.utcnow(),
|
|
251
|
+
status=CloudStatus.RUNNING.value
|
|
252
|
+
)
|
|
253
|
+
cloud_upload.cloud_executions.append(cloud_execution)
|
|
254
|
+
session.commit()
|
|
255
|
+
return cloud_execution
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def get_case_id_from_cloud_execution_id(session, repository_id: int) -> Optional[str]:
|
|
259
|
+
cloud_execution = session.query(CloudExecution).filter(CloudExecution.repository_id == repository_id).first()
|
|
260
|
+
return cloud_execution.case_id if cloud_execution else None
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def get_case_id_from_repository_id(session, repository_id: int) -> Optional[str]:
|
|
264
|
+
cloud_execution = session.query(CloudExecution).filter(CloudExecution.repository_id == repository_id).first()
|
|
265
|
+
return cloud_execution.case_id if cloud_execution else None
|
|
266
|
+
|
|
267
|
+
def get_repository_id_from_cloud_upload_id(session, cloud_upload_id: str) -> Optional[int]:
|
|
268
|
+
cloud_execution = session.query(CloudExecution).filter(CloudExecution.cloud_upload_id == cloud_upload_id).first()
|
|
269
|
+
return cloud_execution.repository_id if cloud_execution else None
|
|
270
|
+
|
|
271
|
+
def get_repository_ids_from_case_id(session, case_id: str) -> List[int]:
|
|
272
|
+
cloud_executions = session.query(CloudExecution).filter(CloudExecution.case_id == case_id).all()
|
|
273
|
+
return [ce.repository_id for ce in cloud_executions]
|
|
274
|
+
|
|
275
|
+
def get_runing_cloud_executions(session) -> List[CloudExecution]:
|
|
276
|
+
return session.query(CloudExecution).filter(CloudExecution.status == CloudStatus.RUNNING.value).all()
|
|
277
|
+
|
|
278
|
+
def get_cloud_execution_status(session, repository_id: int) -> Optional[int]:
|
|
279
|
+
cloud_execution = session.query(CloudExecution).filter(CloudExecution.repository_id == repository_id).first()
|
|
280
|
+
return cloud_execution.status if cloud_execution else None
|
|
281
|
+
|
|
282
|
+
def get_cloud_finished_executions(session) -> List[CloudExecution]:
|
|
283
|
+
return session.query(CloudExecution).filter(CloudExecution.status == CloudStatus.FINISHED.value).all()
|
|
284
|
+
|
|
285
|
+
def get_cloud_failed_executions(session) -> List[CloudExecution]:
|
|
286
|
+
return session.query(CloudExecution).filter(CloudExecution.status == CloudStatus.ERROR.value).all()
|