psr-factory 5.0.0b10__py3-none-win_amd64.whl → 5.0.0b13__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,383 @@
1
+ import hashlib
2
+ import queue
3
+ import shutil
4
+ import sys
5
+ import threading
6
+ import time
7
+ import zipfile
8
+ from dotenv import load_dotenv
9
+ from flask import (
10
+ Flask,
11
+ request,
12
+ jsonify,
13
+ send_file
14
+ )
15
+ import ulid
16
+
17
+
18
+ import psr.runner
19
+ import psr.cloud
20
+
21
+ from psr.execqueue.config import *
22
+ from psr.execqueue import db
23
+
24
+ _execution_queue = queue.Queue()
25
+ _cloud_upload_queue = queue.Queue()
26
+
27
+
28
+ os.makedirs(UPLOADS_FOLDER, exist_ok=True)
29
+ os.makedirs(LOCAL_RESULTS_FOLDER, exist_ok=True)
30
+ os.makedirs(CLOUD_RESULTS_FOLDER, exist_ok=True)
31
+ os.makedirs(TEMPORARY_UPLOAD_FOLDER, exist_ok=True)
32
+
33
+
34
+ load_dotenv()
35
+
36
+
37
+ try:
38
+ client = psr.cloud.Client(cluster=psrcloud_cluster, verbose=True)
39
+ except psr.cloud.CloudInputError as e:
40
+ print(f"Error connecting to PSR Cloud. Check user credentials: {e}")
41
+ exit(1)
42
+
43
+ _cloud_execution_case_map = {}
44
+
45
+ app = Flask(__name__)
46
+ session = None
47
+
48
+
49
+ def get_file_checksum(file_path: str) -> str:
50
+ with open(file_path, 'rb') as file:
51
+ return hashlib.md5(file.read()).hexdigest()
52
+
53
+
54
+ def run_local_case(execution_id: str, case_path: str):
55
+ global session
56
+ success = False
57
+ try:
58
+ psr.runner.run_sddp(case_path, sddp_path, parallel_run=False)
59
+ success = True
60
+ except RuntimeError as e:
61
+ print(f"Error running {execution_id}: {e}")
62
+
63
+ status = db.LOCAL_EXECUTION_FINISHED if success else db.LOCAL_EXECUTION_ERROR
64
+ db.update_local_execution_status(session, execution_id, status)
65
+
66
+
67
+ def initialize_db():
68
+ session, engine = db.initialize()
69
+ return session
70
+
71
+
72
+ def run_cloud_case(execution_id: str, case_path: str):
73
+ global client
74
+ # Run the case
75
+ case = psr.cloud.Case(
76
+ name="LSEG Server "+ execution_id,
77
+ data_path=case_path,
78
+ program="SDDP",
79
+ program_version = "17.3.9",
80
+ execution_type="Default",
81
+ memory_per_process_ratio='2:1',
82
+ price_optimized=False,
83
+ number_of_processes=64,
84
+ repository_duration=1,
85
+ )
86
+ case_id = client.run_case(case)
87
+
88
+ return str(case_id)
89
+
90
+
91
+ def process_local_execution_queue():
92
+ global session
93
+ while True:
94
+ execution_id, case_id = _execution_queue.get()
95
+ try:
96
+ print(f"Processing {execution_id}...")
97
+ # Unzip the file
98
+ execution_extraction_path = os.path.join(LOCAL_RESULTS_FOLDER, execution_id)
99
+ os.makedirs(execution_extraction_path, exist_ok=True)
100
+
101
+ zip_upload_path = os.path.join(UPLOADS_FOLDER, case_id + ".zip")
102
+
103
+ with zipfile.ZipFile(zip_upload_path, 'r') as zip_ref:
104
+ zip_ref.extractall(execution_extraction_path)
105
+ # Run SDDP
106
+ run_local_case(execution_id, execution_extraction_path)
107
+
108
+ except Exception as e:
109
+ print(f"Error processing {execution_id}: {e}")
110
+ finally:
111
+ _execution_queue.task_done()
112
+
113
+
114
+ threading.Thread(target=process_local_execution_queue, daemon=True).start()
115
+
116
+
117
+ def process_cloud_execution_queue():
118
+ global client
119
+ global session
120
+ while True:
121
+ cloud_upload_id, case_id = _cloud_upload_queue.get()
122
+ try:
123
+ print(f"Processing {cloud_upload_id}...")
124
+ # Unzip the file
125
+ zip_upload_path = os.path.join(UPLOADS_FOLDER, case_id + ".zip")
126
+ tmp_extraction_path = os.path.join(TEMPORARY_UPLOAD_FOLDER, cloud_upload_id)
127
+
128
+ os.makedirs(tmp_extraction_path, exist_ok=True)
129
+ with zipfile.ZipFile(zip_upload_path, 'r') as zip_ref:
130
+ zip_ref.extractall(tmp_extraction_path)
131
+
132
+ # Run SDDP
133
+ repository_id = run_cloud_case(cloud_upload_id, tmp_extraction_path)
134
+
135
+ #delete the extraction path folder recursively
136
+ shutil.rmtree(tmp_extraction_path)
137
+
138
+ execution_extraction_path = os.path.join(CLOUD_RESULTS_FOLDER, repository_id)
139
+ os.makedirs(execution_extraction_path, exist_ok=True)
140
+ with zipfile.ZipFile(zip_upload_path, 'r') as zip_ref:
141
+ zip_ref.extractall(execution_extraction_path)
142
+
143
+ db.register_cloud_execution(session, repository_id, cloud_upload_id, case_id)
144
+
145
+ except Exception as e:
146
+ print(f"Error processing {cloud_upload_id}: {e}")
147
+ finally:
148
+ _cloud_upload_queue.task_done()
149
+
150
+
151
+ threading.Thread(target=process_cloud_execution_queue, daemon=True).start()
152
+
153
+
154
+ def monitor_cloud_runs():
155
+ global client
156
+ global session
157
+
158
+ #wait for cloud upload queue to be empty
159
+ while not _cloud_upload_queue.empty():
160
+ time.sleep(10)
161
+
162
+ while True:
163
+ if session:
164
+ #check running executions
165
+ for cloud_execution in db.get_runing_cloud_executions(session):
166
+ case_id = cloud_execution.repository_id
167
+ print(f"Checking status of {case_id}...")
168
+ status, status_msg = client.get_status(case_id)
169
+ if status in psr.cloud.FAULTY_TERMINATION_STATUS:
170
+ print(f"Execution {case_id} finished with errors")
171
+ db.update_cloud_execution_status(session, case_id, db.CloudStatus.ERROR.value)
172
+ elif status == psr.cloud.ExecutionStatus.SUCCESS:
173
+ print(f"Execution {case_id} finished successfully")
174
+ db.update_cloud_execution_status(session, case_id, db.CloudStatus.FINISHED.value)
175
+
176
+ #download finished executions
177
+ for cloud_execution in db.get_cloud_finished_executions(session):
178
+ repository_id = cloud_execution.repository_id
179
+ print(f"Downloading results for {repository_id}...")
180
+ result_path = os.path.join(CLOUD_RESULTS_FOLDER, str(repository_id))
181
+ client.download_results(repository_id, result_path)
182
+ db.update_cloud_execution_status(session, repository_id, db.CloudStatus.RESULTS_AVAILABLE.value)
183
+ else:
184
+ print("Database not initialized. Retrying in 30s...")
185
+ time.sleep(30)
186
+
187
+ threading.Thread(target=monitor_cloud_runs, daemon=True).start()
188
+
189
+ @app.route('/upload', methods=['POST'])
190
+ def upload_file():
191
+ global session
192
+ if 'file' not in request.files:
193
+ return jsonify({'error': 'No file part'}), 400
194
+
195
+ file = request.files['file']
196
+ if file.filename == '':
197
+ return jsonify({'error': 'No selected file'}), 400
198
+
199
+ case_id = str(ulid.ULID())
200
+ zip_path = os.path.join(UPLOADS_FOLDER, f"{case_id}.zip")
201
+ file.save(zip_path)
202
+
203
+ checksum = get_file_checksum(zip_path)
204
+ db.register_case(session, case_id, checksum)
205
+
206
+ return jsonify({'case_id': case_id}), 200
207
+
208
+
209
+ # route to run an uploaded file
210
+ @app.route('/run', methods=['POST'])
211
+ def run_endpoint():
212
+ global session
213
+ cloud_execution = request.form.get('cloud_execution', 'false').lower() == 'true'
214
+ case_id = request.form.get('case_id')
215
+
216
+ if not case_id:
217
+ return jsonify({'error': 'Case ID not provided'}), 400
218
+
219
+ zip_case_path = os.path.join(UPLOADS_FOLDER, f"{case_id}.zip")
220
+ if not os.path.exists(zip_case_path):
221
+ return jsonify({'error': 'Upload file for this case ID not found'}), 404
222
+
223
+ if cloud_execution:
224
+ cloud_upload_id = str(ulid.ULID())
225
+ _cloud_upload_queue.put((cloud_upload_id, case_id))
226
+
227
+ db.register_cloud_upload(session, case_id, cloud_upload_id)
228
+
229
+ return jsonify({'case_id': case_id, 'cloud_upload_id': cloud_upload_id}), 200
230
+ else:
231
+ execution_id = str(ulid.ULID())
232
+ _execution_queue.put((execution_id, case_id))
233
+
234
+ db.register_local_execution(session, case_id, execution_id)
235
+
236
+ return jsonify({'case_id': case_id, 'execution_id': execution_id}), 200
237
+
238
+
239
+ @app.route('/upload_and_run', methods=['POST'])
240
+ def upload_and_run_file():
241
+ global session
242
+ if 'file' not in request.files:
243
+ return jsonify({'error': 'No file part'}), 400
244
+
245
+ file = request.files['file']
246
+ if file.filename == '':
247
+ return jsonify({'error': 'No selected file'}), 400
248
+
249
+ cloud_execution = request.form.get('cloud_execution', 'false').lower() == 'true'
250
+
251
+ case_id = str(ulid.ULID())
252
+ zip_path = os.path.join(UPLOADS_FOLDER, f"{case_id}.zip")
253
+ file.save(zip_path)
254
+ db.register_case(session, case_id, get_file_checksum(zip_path))
255
+
256
+ if cloud_execution:
257
+ cloud_upload_id = str(ulid.ULID())
258
+ _cloud_upload_queue.put((cloud_upload_id, case_id))
259
+ db.register_cloud_upload(session, case_id, cloud_upload_id)
260
+ return jsonify({'case_id': case_id, 'cloud_upload_id': cloud_upload_id}), 200
261
+ else:
262
+ execution_id = str(ulid.ULID())
263
+ _execution_queue.put((execution_id, case_id))
264
+ db.register_local_execution(session, case_id, execution_id)
265
+ return jsonify({'case_id': case_id, 'execution_id': execution_id}), 200
266
+
267
+
268
+ @app.route('/status/<execution_id>', methods=['GET'])
269
+ def get_status(execution_id):
270
+ """
271
+ Get the status of an execution
272
+ ---
273
+ tags:
274
+ - Execution
275
+ parameters:
276
+ - name: execution_id
277
+ in: path
278
+ type: string
279
+ required: true
280
+ description: The ID of the execution
281
+ responses:
282
+ 200:
283
+ description: Execution status
284
+ schema:
285
+ type: string
286
+ 404:
287
+ description: Execution ID not found
288
+ """
289
+ global client
290
+ global session
291
+ cloud_execution = request.form.get('cloud_execution', 'false').lower() == 'true'
292
+ return_status_id = request.form.get('return_status_id', 'false').lower() == 'true'
293
+
294
+ if cloud_execution:
295
+ repository_id = db.get_repository_id_from_cloud_upload_id(session, execution_id)
296
+ if repository_id is None:
297
+ return jsonify({'error': 'Execution ID not found in Cloud'}), 404
298
+ status = db.get_cloud_execution_status(session, repository_id)
299
+ if return_status_id:
300
+ return jsonify({'status_id': status}), 200
301
+ if status == db.CloudStatus.ERROR.value:
302
+ return jsonify({'status': 'Execution finished with errors'}), 200
303
+ elif status == db.CloudStatus.RUNNING.value:
304
+ return jsonify({'status': 'Execution not finished yet'}), 200
305
+ elif status == db.CloudStatus.FINISHED.value:
306
+ return jsonify({'status': 'Execution finished, but download not yet downloaded from Cloud server'}), 200
307
+ elif status == db.CloudStatus.RESULTS_AVAILABLE.value:
308
+ return jsonify({'status': 'Execution finished and results are available to download'}), 200
309
+
310
+ @app.route('/results/<execution_id>', methods=['GET'])
311
+ def get_results(execution_id: str):
312
+ global session
313
+ global client
314
+ cloud_execution = request.form.get('cloud_execution', 'false').lower() == 'true'
315
+
316
+ if cloud_execution:
317
+ repository_id = db.get_repository_id_from_cloud_upload_id(session, execution_id)
318
+ if repository_id is None:
319
+ return jsonify({'error': 'Execution ID not found in Cloud'}),
320
+ status = db.get_cloud_execution_status(session, execution_id)
321
+
322
+ if status == db.CloudStatus.ERROR:
323
+ return jsonify({'error': 'Execution Finished with errors'}), 401
324
+ elif status == db.CloudStatus.RUNNING:
325
+ return jsonify({'error': 'Execution not finished yet'}), 402
326
+ elif status == db.CloudStatus.FINISHED:
327
+ return jsonify({'error': 'Results not available yet'}), 403
328
+ else:
329
+ #fazer download da pasta do resultado
330
+ result_path = os.path.join(CLOUD_RESULTS_FOLDER, str(repository_id))
331
+ if not os.path.exists(result_path):
332
+ return jsonify({'error': 'Execution result folder not found'}), 404
333
+ result_files = os.listdir(result_path)
334
+ return jsonify({'execution_id': repository_id, 'files': result_files}), 200
335
+ else:
336
+ status = db.get_local_execution_status(session, execution_id)
337
+ if status == db.LOCAL_EXECUTION_ERROR:
338
+ return jsonify({'error': 'Execution finished with errors'}), 401
339
+ if status != db.LOCAL_EXECUTION_FINISHED:
340
+ return jsonify({'error': 'Execution not finished yet'}), 402
341
+ result_path = os.path.join(LOCAL_RESULTS_FOLDER, execution_id)
342
+ if not os.path.exists(result_path):
343
+ return jsonify({'error': 'Execution result folder not found'}), 404
344
+ result_files = os.listdir(result_path)
345
+ return jsonify({'execution_id': execution_id, 'files': result_files}), 200
346
+
347
+
348
+ @app.route('/results/<execution_id>/<file>', methods=['GET'])
349
+ def download_file(execution_id: str, file):
350
+ global session
351
+
352
+ cloud_execution = request.form.get('cloud_execution', 'false').lower() == 'true'
353
+
354
+ if cloud_execution:
355
+ repository_id = db.get_repository_id_from_cloud_upload_id(session, execution_id)
356
+ result_path = os.path.join(CLOUD_RESULTS_FOLDER, str(repository_id))
357
+ else:
358
+ result_path = os.path.join(LOCAL_RESULTS_FOLDER, execution_id)
359
+ if not os.path.exists(result_path):
360
+ return jsonify({'error': 'Execution result folder not found'}), 404
361
+
362
+ file_path = os.path.join(result_path, file)
363
+ if not os.path.exists(file_path):
364
+ return jsonify({'error': 'File not found'}), 404
365
+
366
+ try:
367
+ return send_file(file_path, as_attachment=True)
368
+ except Exception as e:
369
+ return jsonify({'error': str(e)}), 500
370
+
371
+
372
+ if __name__ == '__main__':
373
+ print("Starting server...")
374
+ session = initialize_db()
375
+ try:
376
+ app.run(debug=FLASK_DEBUG,
377
+ port=settings.get("port", DEFAULT_PORT),
378
+ threaded=True,
379
+ use_reloader=False,)
380
+ except Exception as e:
381
+ print(f"Error starting server: {e}")
382
+ sys.exit(1)
383
+
@@ -0,0 +1,128 @@
1
+ import os
2
+ import time
3
+ import shutil
4
+ import logging
5
+ import sqlite3
6
+ from datetime import datetime
7
+ from dotenv import load_dotenv
8
+ import psr.execqueue.client as execqueue
9
+
10
+ load_dotenv()
11
+ SERVER_URL = os.getenv("SERVER_URL", "http://127.0.0.1:5000")
12
+ WATCH_DIR = os.getenv("WATCH_DIR")
13
+ PROCESSED_DIR = os.getenv("PROCESSED_DIR")
14
+ RESULTS_DIR = os.getenv("RESULTS_DIR", "results")
15
+ SLEEP_SECONDS = int(os.getenv("WATCHER_SLEEP", "30"))
16
+ DB_PATH = os.getenv("WATCHER_DB_PATH", "watcher.sqlite")
17
+
18
+
19
+ def _init_db():
20
+ conn = sqlite3.connect(DB_PATH)
21
+ cursor = conn.cursor()
22
+ cursor.execute("""
23
+ CREATE TABLE IF NOT EXISTS processed_files (
24
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
25
+ filename TEXT NOT NULL,
26
+ cloud_upload_id TEXT NOT NULL,
27
+ processed_at TEXT NOT NULL,
28
+ downloaded INTEGER NOT NULL DEFAULT 0
29
+ )
30
+ """)
31
+ conn.commit()
32
+ conn.close()
33
+
34
+
35
+ def _log_to_db(filename, cloud_upload_id):
36
+ conn = sqlite3.connect(DB_PATH)
37
+ cursor = conn.cursor()
38
+ cursor.execute(
39
+ "INSERT INTO processed_files (filename, cloud_upload_id, processed_at, downloaded) VALUES (?, ?, ?, ?)",
40
+ (filename, cloud_upload_id, datetime.now().isoformat(), 0)
41
+ )
42
+ conn.commit()
43
+ conn.close()
44
+
45
+
46
+ def _process_zip_files():
47
+ for filename in os.listdir(WATCH_DIR):
48
+ if filename.lower().endswith('.zip'):
49
+ zip_path = os.path.join(WATCH_DIR, filename)
50
+ logging.info(f"zip file found: {zip_path}")
51
+
52
+ case_id = execqueue.upload_case_file(zip_path, SERVER_URL)
53
+ if not case_id:
54
+ logging.error(f"Failed uploading file {zip_path}")
55
+ continue
56
+
57
+ cloud_upload_id = execqueue.run_case(case_id, SERVER_URL, cloud_execution=True)
58
+ if not cloud_upload_id:
59
+ logging.error(f"Failed executing case {case_id} with {zip_path}")
60
+ continue
61
+
62
+ logging.info(f"File {filename} uploaded and execution started. Cloud Upload ID: {cloud_upload_id}")
63
+ _log_to_db(filename, cloud_upload_id)
64
+ dest_path = os.path.join(PROCESSED_DIR, filename)
65
+ shutil.move(zip_path, dest_path)
66
+ logging.info(f"File {filename} moved to {PROCESSED_DIR}")
67
+
68
+
69
+ def _check_and_download_results():
70
+ conn = sqlite3.connect(DB_PATH)
71
+ cursor = conn.cursor()
72
+ cursor.execute("SELECT id, filename, cloud_upload_id FROM processed_files WHERE downloaded=0")
73
+ rows = cursor.fetchall()
74
+ for row in rows:
75
+ record_id, filename, cloud_upload_id = row
76
+ status = execqueue.get_execution_status(cloud_upload_id, SERVER_URL, cloud_execution=True, return_status_id=True)
77
+ if status == "5" or status == 5:
78
+ files = execqueue.get_results(cloud_upload_id, SERVER_URL, cloud_execution=True)
79
+ if files:
80
+ base_filename = os.path.splitext(filename)[0]
81
+ download_folder_name = f"{base_filename}-{cloud_upload_id}"
82
+ download_path = os.path.join(RESULTS_DIR, download_folder_name)
83
+ os.makedirs(download_path, exist_ok=True)
84
+ for file in files:
85
+ execqueue.download_execution_file(cloud_upload_id, SERVER_URL, file, download_path,
86
+ cloud_execution=True)
87
+ # Update downloaded flag
88
+ cursor.execute("UPDATE processed_files SET downloaded=1 WHERE id=?", (record_id,))
89
+ conn.commit()
90
+ logging.info(f"Results of {cloud_upload_id} downloaded to {download_path}")
91
+ elif status == "4" or status == 4:
92
+ logging.info(f"Execution {cloud_upload_id} is finished with errors.")
93
+ cursor.execute("UPDATE processed_files SET downloaded=4 WHERE id=?", (record_id,))
94
+ conn.commit()
95
+ conn.close()
96
+
97
+
98
+ if __name__ == "__main__":
99
+ if not WATCH_DIR or not PROCESSED_DIR:
100
+ print("WATCH_DIR and PROCESSED_DIR must be set as environment variables or in a .env file")
101
+ exit(1)
102
+
103
+ LOG_FILE = os.path.join(os.getcwd(), "watcher.log")
104
+ logging.basicConfig(
105
+ filename=LOG_FILE,
106
+ level=logging.INFO,
107
+ format='%(asctime)s - %(levelname)s - %(message)s',
108
+ datefmt='%Y-%m-%d %H:%M:%S'
109
+ )
110
+ # log to standard output as well
111
+ console_handler = logging.StreamHandler()
112
+ console_handler.setLevel(logging.INFO)
113
+ logging.getLogger().addHandler(console_handler)
114
+
115
+ os.makedirs(WATCH_DIR, exist_ok=True)
116
+ os.makedirs(PROCESSED_DIR, exist_ok=True)
117
+ os.makedirs(RESULTS_DIR, exist_ok=True)
118
+ _init_db()
119
+ logging.info(f"Case watcher started. Monitoring directory for SDDP cases: {WATCH_DIR}")
120
+
121
+ while True:
122
+ try:
123
+ _check_and_download_results()
124
+ _process_zip_files()
125
+
126
+ except Exception as e:
127
+ logging.error(f"Watcher error: {e}", exc_info=True)
128
+ time.sleep(SLEEP_SECONDS)
psr/factory/__init__.py CHANGED
@@ -2,6 +2,6 @@
2
2
  # Unauthorized copying of this file, via any medium is strictly prohibited
3
3
  # Proprietary and confidential
4
4
 
5
- __version__ = "5.0.0b10"
5
+ __version__ = "5.0.0b13"
6
6
 
7
7
  from .api import *
psr/factory/api.py CHANGED
@@ -173,18 +173,6 @@ def build_version() -> str:
173
173
  return _version_short()
174
174
 
175
175
 
176
- def diagnostics() -> str:
177
- """Run diagnostics and returns the results as text.
178
- Use for troubleshoot and debug purposes."""
179
- _check_loaded()
180
- size = 1000
181
- buffer = ctypes.create_string_buffer(size)
182
- status = factorylib.lib.psrd_diagnostics(buffer, size)
183
- if status == 0:
184
- return _from_c_str(buffer.value)
185
- return ""
186
-
187
-
188
176
  def get_log_level() -> LogLevel:
189
177
  """Get log level."""
190
178
  _check_loaded()
@@ -206,14 +194,57 @@ def set_log_level(log_level: LogLevel):
206
194
  raise FactoryException("Error setting log level")
207
195
 
208
196
 
197
+ def get_log_file_path() -> str:
198
+ """Get log file path."""
199
+ _check_loaded()
200
+ error = Error()
201
+ size = 1000
202
+ buffer = ctypes.create_string_buffer(size)
203
+ code = factorylib.lib.psrd_get_log_file_path(buffer, size, error.handler())
204
+ if code != 0:
205
+ raise FactoryException("Error getting log file path")
206
+ return _from_c_str(buffer.value)
207
+
208
+
209
209
  def set_debug_mode(value: Union[bool, int]):
210
+ warnings.warn(DeprecationWarning("set_debug_mode is deprecated, use set_diagnostics_mode instead."))
211
+ set_diagnostics_mode(value)
212
+
213
+
214
+ def set_diagnostics_mode(value: Union[bool, int]):
210
215
  """Set debug mode."""
211
216
  _check_loaded()
212
217
  if isinstance(value, bool):
213
218
  value = 1 if value else 0
214
- code = factorylib.lib.psrd_set_debug_mode(value)
219
+ code = factorylib.lib.psrd_set_diagnostics_mode(value)
215
220
  if code != 0:
216
- raise FactoryException("Error setting debug mode")
221
+ raise FactoryException("Error setting diagnostics mode")
222
+
223
+
224
+ def diagnostics() -> str:
225
+ global _initialized
226
+ global _initialized_lock
227
+ with _initialized_lock:
228
+ """Get diagnostics information."""
229
+ py_diagnostics = f"Python version: {sys.version}\n" \
230
+ f"Python encoding: {sys.getdefaultencoding()}\n" \
231
+ f"Python locale: {locale.getlocale()}\n" \
232
+ f"Operating system: {sys.platform}\n" \
233
+ f"Operating system encoding: {locale.getpreferredencoding()}\n" \
234
+ f"Module path: {os.path.abspath(os.path.dirname(__file__))}\n" \
235
+ f"Working directory: {os.getcwd()}\n"
236
+
237
+ _check_loaded()
238
+ error = Error()
239
+ size = 10000
240
+ buffer = ctypes.create_string_buffer(size)
241
+ module_path = os.path.dirname(__file__)
242
+ factorylib.lib.psrd_diagnostics(_c_str(module_path), _bytes(module_path),
243
+ buffer, size, error.handler())
244
+ if error.code != 0:
245
+ raise FactoryException(error.what)
246
+ _initialized = True
247
+ return py_diagnostics + _from_c_str(buffer.value)
217
248
 
218
249
 
219
250
  def get_setting(key: str) -> Union[str, int, float, bool]:
@@ -2282,9 +2313,9 @@ def _initialize():
2282
2313
  raise FactoryException(_err.what)
2283
2314
 
2284
2315
  # Where to look for pmd and pmk files
2285
- common_path = os.path.dirname(__file__)
2286
- factorylib.lib.psrd_initialize(_c_str(common_path),
2287
- _bytes(common_path),
2316
+ module_path = os.path.dirname(__file__)
2317
+ factorylib.lib.psrd_initialize(_c_str(module_path),
2318
+ _bytes(module_path),
2288
2319
  _err.handler())
2289
2320
  if _err.code != 0:
2290
2321
  raise FactoryException(_err.what)
psr/factory/factory.dll CHANGED
Binary file
psr/factory/factory.pmd CHANGED
@@ -981,6 +981,13 @@ DEFINE_MODEL MODL:Estima_Configuration
981
981
  PARM INTEGER MarkovInitialWeek
982
982
  END_MODEL
983
983
 
984
+ DEFINE_CLASS PSRClusterMarkov
985
+ PARM STRING Name @id
986
+
987
+ VECTOR REFERENCE HydroStations PSRGaugingStation
988
+ VECTOR REAL HydroStationWeights
989
+ END_CLASS
990
+
984
991
  //----------------------------------------------------------------------
985
992
  // Start of Factory runtime models
986
993
  //----------------------------------------------------------------------
@@ -4447,6 +4454,8 @@ DEFINE_MODEL MODL:SDDP_V10.2_Termica
4447
4454
  PARM INTEGER MaxStartUps
4448
4455
  PARM INTEGER MaxShutDowns
4449
4456
  PARM REAL ShutDownCost
4457
+ PARM REAL RampUpCost
4458
+ PARM REAL RampDownCost
4450
4459
 
4451
4460
  MERGE_MODEL SDDP_MAINTENANCE_PER_STAGE
4452
4461
  MERGE_MODEL SDDP_MAINTENANCE_PER_HOUR
@@ -6626,6 +6635,7 @@ DEFINE_MODEL MODL:ePSR_CasoOperacao
6626
6635
  PARM INTEGER Crn
6627
6636
  PARM INTEGER Itr
6628
6637
  PARM INTEGER Cnv
6638
+ PARM INTEGER TpPl
6629
6639
  PARM REAL PMn
6630
6640
  PARM REAL PVt
6631
6641
  PARM STRING EtFCF
psr/factory/factory.pmk CHANGED
@@ -12230,25 +12230,27 @@ DEFINE_MASK CSVDATA SDDP_v14.0_opectr
12230
12230
 
12231
12231
  DEFINE_HEADER
12232
12232
  $version=1
12233
- !Code,Name........,RampUp,RampDown,MinUptime,MinDowntime,MaxStartUps,MaxShutDowns,ShutdownCost,WarmStartUpCost,ColdStartUpCost,PartialCoolingTime,TotalCoolingTime,ConstantGen,MaxUpTime
12233
+ !Code,Name........,RampUp,RampDown,MinUptime,MinDowntime,MaxStartUps,MaxShutDowns,ShutdownCost,WarmStartUpCost,ColdStartUpCost,PartialCoolingTime,TotalCoolingTime,ConstantGen,MaxUpTime,RampUpCost,RampDownCost
12234
12234
  END_HEADER
12235
12235
 
12236
12236
  DEFINE_DATA
12237
- Code INTEGER,1
12238
- Name STRING,2
12239
- RampUp REAL,3 AUTOSET(model.parm("RampUp"))
12240
- RampDown REAL,4 AUTOSET(model.parm("RampDown"))
12241
- MinUptime REAL,5 AUTOSET(model.parm("MinUptime"))
12242
- MinDowntime REAL,6 AUTOSET(model.parm("MinDowntime"))
12243
- MaxStartUps INTEGER,7 AUTOSET(model.parm("MaxStartUps"))
12244
- MaxShutDowns INTEGER,8 AUTOSET(model.parm("MaxShutDowns"))
12245
- ShutDownCost REAL,9 AUTOSET(model.parm("ShutDownCost"))
12237
+ Code INTEGER,1
12238
+ Name STRING,2
12239
+ RampUp REAL,3 AUTOSET(model.parm("RampUp"))
12240
+ RampDown REAL,4 AUTOSET(model.parm("RampDown"))
12241
+ MinUptime REAL,5 AUTOSET(model.parm("MinUptime"))
12242
+ MinDowntime REAL,6 AUTOSET(model.parm("MinDowntime"))
12243
+ MaxStartUps INTEGER,7 AUTOSET(model.parm("MaxStartUps"))
12244
+ MaxShutDowns INTEGER,8 AUTOSET(model.parm("MaxShutDowns"))
12245
+ ShutDownCost REAL,9 AUTOSET(model.parm("ShutDownCost"))
12246
12246
  StartUpWarmCost REAL,10 AUTOSET(model.parm("StartUpWarmCost"))
12247
12247
  StartUpColdCost REAL,11 AUTOSET(model.parm("StartUpColdCost"))
12248
12248
  PartialCoolingTime REAL,12 AUTOSET(model.parm("PartialCoolingTime"))
12249
12249
  TotalCoolingTime REAL,13 AUTOSET(model.parm("TotalCoolingTime"))
12250
- ConstantGen INTEGER,14 AUTOSET(model.parm("ConstantGen"))
12251
- MaxUptime REAL,15 AUTOSET(model.parm("MaxUptime"))
12250
+ ConstantGen INTEGER,14 AUTOSET(model.parm("ConstantGen"))
12251
+ MaxUptime REAL,15 AUTOSET(model.parm("MaxUptime"))
12252
+ RampUpCost REAL,16 AUTOSET(model.parm("RampUpCost"))
12253
+ RampDownCost REAL,17 AUTOSET(model.parm("RampDownCost"))
12252
12254
 
12253
12255
  END_DATA
12254
12256