psr-factory 5.0.0b69__py3-none-manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of psr-factory might be problematic. Click here for more details.
- psr/apps/__init__.py +7 -0
- psr/apps/apps.py +225 -0
- psr/apps/version.py +5 -0
- psr/execqueue/client.py +126 -0
- psr/execqueue/config.py +52 -0
- psr/execqueue/db.py +286 -0
- psr/execqueue/server.py +689 -0
- psr/execqueue/watcher.py +146 -0
- psr/factory/__init__.py +7 -0
- psr/factory/api.py +2745 -0
- psr/factory/factory.pmd +7322 -0
- psr/factory/factory.pmk +19461 -0
- psr/factory/factorylib.py +410 -0
- psr/factory/libfactory.so +0 -0
- psr/factory/py.typed +0 -0
- psr/factory/samples/__init__.py +2 -0
- psr/factory/samples/sddp_case01.py +166 -0
- psr/factory/samples/sddp_case21.py +242 -0
- psr/outputs/__init__.py +5 -0
- psr/outputs/outputs.py +179 -0
- psr/outputs/resample.py +289 -0
- psr/psrfcommon/__init__.py +6 -0
- psr/psrfcommon/psrfcommon.py +57 -0
- psr/psrfcommon/tempfile.py +118 -0
- psr/runner/__init__.py +7 -0
- psr/runner/runner.py +743 -0
- psr/runner/version.py +5 -0
- psr_factory-5.0.0b69.dist-info/METADATA +47 -0
- psr_factory-5.0.0b69.dist-info/RECORD +32 -0
- psr_factory-5.0.0b69.dist-info/WHEEL +5 -0
- psr_factory-5.0.0b69.dist-info/licenses/LICENSE.txt +21 -0
- psr_factory-5.0.0b69.dist-info/top_level.txt +1 -0
psr/execqueue/watcher.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
import shutil
|
|
4
|
+
import logging
|
|
5
|
+
import sqlite3
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from dotenv import load_dotenv
|
|
8
|
+
import psr.execqueue.client as execqueue
|
|
9
|
+
|
|
10
|
+
load_dotenv()
|
|
11
|
+
SERVER_URL = os.getenv("SERVER_URL", "http://127.0.0.1:5000")
|
|
12
|
+
WATCH_DIR = os.getenv("WATCH_DIR")
|
|
13
|
+
PROCESSED_DIR = os.getenv("PROCESSED_DIR")
|
|
14
|
+
RESULTS_DIR = os.getenv("RESULTS_DIR", "results")
|
|
15
|
+
SLEEP_SECONDS = int(os.getenv("WATCHER_SLEEP", "10"))
|
|
16
|
+
DB_PATH = os.getenv("WATCHER_DB_PATH", "watcher.sqlite")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _init_db():
|
|
20
|
+
conn = sqlite3.connect(DB_PATH)
|
|
21
|
+
cursor = conn.cursor()
|
|
22
|
+
cursor.execute("""
|
|
23
|
+
CREATE TABLE IF NOT EXISTS processed_files (
|
|
24
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
25
|
+
filename TEXT NOT NULL,
|
|
26
|
+
cloud_upload_id TEXT NOT NULL,
|
|
27
|
+
processed_at TEXT NOT NULL,
|
|
28
|
+
downloaded INTEGER NOT NULL DEFAULT 0
|
|
29
|
+
)
|
|
30
|
+
""")
|
|
31
|
+
conn.commit()
|
|
32
|
+
conn.close()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _log_to_db(filename, cloud_upload_id):
|
|
36
|
+
conn = sqlite3.connect(DB_PATH)
|
|
37
|
+
cursor = conn.cursor()
|
|
38
|
+
cursor.execute(
|
|
39
|
+
"INSERT INTO processed_files (filename, cloud_upload_id, processed_at, downloaded) VALUES (?, ?, ?, ?)",
|
|
40
|
+
(filename, cloud_upload_id, datetime.now().isoformat(), 0)
|
|
41
|
+
)
|
|
42
|
+
conn.commit()
|
|
43
|
+
conn.close()
|
|
44
|
+
|
|
45
|
+
def _is_file_locked(filepath):
|
|
46
|
+
"""Returns True if the file is locked by another process (e.g., still being copied)."""
|
|
47
|
+
if not os.path.exists(filepath):
|
|
48
|
+
return True
|
|
49
|
+
try:
|
|
50
|
+
# Try to open for exclusive writing
|
|
51
|
+
with open(filepath, 'rb+') as f:
|
|
52
|
+
pass
|
|
53
|
+
return False
|
|
54
|
+
except (OSError, PermissionError):
|
|
55
|
+
return True
|
|
56
|
+
|
|
57
|
+
def _process_zip_files():
|
|
58
|
+
for filename in os.listdir(WATCH_DIR):
|
|
59
|
+
if filename.lower().endswith('.zip'):
|
|
60
|
+
zip_path = os.path.join(WATCH_DIR, filename)
|
|
61
|
+
|
|
62
|
+
# Check if the file is locked
|
|
63
|
+
if _is_file_locked(zip_path):
|
|
64
|
+
logging.info(f"Skipping {zip_path}: file is locked or being copied.")
|
|
65
|
+
continue
|
|
66
|
+
|
|
67
|
+
logging.info(f"zip file found: {zip_path}")
|
|
68
|
+
|
|
69
|
+
case_id = execqueue.upload_case_file(zip_path, SERVER_URL)
|
|
70
|
+
if not case_id:
|
|
71
|
+
logging.error(f"Failed uploading file {zip_path}")
|
|
72
|
+
continue
|
|
73
|
+
|
|
74
|
+
cloud_upload_id = execqueue.run_case(case_id, SERVER_URL, cloud_execution=True)
|
|
75
|
+
if not cloud_upload_id:
|
|
76
|
+
logging.error(f"Failed executing case {case_id} with {zip_path}")
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
logging.info(f"File {filename} uploaded and execution started. Cloud Upload ID: {cloud_upload_id}")
|
|
80
|
+
_log_to_db(filename, cloud_upload_id)
|
|
81
|
+
dest_path = os.path.join(PROCESSED_DIR, filename)
|
|
82
|
+
shutil.move(zip_path, dest_path)
|
|
83
|
+
logging.info(f"File {filename} moved to {PROCESSED_DIR}")
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _check_and_download_results():
|
|
87
|
+
conn = sqlite3.connect(DB_PATH)
|
|
88
|
+
cursor = conn.cursor()
|
|
89
|
+
cursor.execute("SELECT id, filename, cloud_upload_id FROM processed_files WHERE downloaded=0")
|
|
90
|
+
rows = cursor.fetchall()
|
|
91
|
+
for row in rows:
|
|
92
|
+
record_id, filename, cloud_upload_id = row
|
|
93
|
+
status_id, status_msg = execqueue.get_execution_status(cloud_upload_id, SERVER_URL, cloud_execution=True)
|
|
94
|
+
logging.info(f"Execution status for {cloud_upload_id}: {status_id} - {status_msg}")
|
|
95
|
+
if status_id is None:
|
|
96
|
+
logging.error(f"Failed to get status for {cloud_upload_id}. Skipping download.")
|
|
97
|
+
continue
|
|
98
|
+
if status_id == 5 or status_id == 6:
|
|
99
|
+
files = execqueue.get_results(cloud_upload_id, SERVER_URL, cloud_execution=True)
|
|
100
|
+
if files:
|
|
101
|
+
base_filename = os.path.splitext(filename)[0]
|
|
102
|
+
download_folder_name = f"{base_filename}-{cloud_upload_id}"
|
|
103
|
+
download_path = os.path.join(RESULTS_DIR, download_folder_name)
|
|
104
|
+
os.makedirs(download_path, exist_ok=True)
|
|
105
|
+
for file in files:
|
|
106
|
+
execqueue.download_execution_file(cloud_upload_id, SERVER_URL, file, download_path,
|
|
107
|
+
cloud_execution=True)
|
|
108
|
+
# Update downloaded flag
|
|
109
|
+
cursor.execute("UPDATE processed_files SET downloaded=1 WHERE id=?", (record_id,))
|
|
110
|
+
conn.commit()
|
|
111
|
+
logging.info(f"Results of {cloud_upload_id} downloaded to {download_path}")
|
|
112
|
+
|
|
113
|
+
conn.close()
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
if __name__ == "__main__":
|
|
117
|
+
if not WATCH_DIR or not PROCESSED_DIR:
|
|
118
|
+
print("WATCH_DIR and PROCESSED_DIR must be set as environment variables or in a .env file")
|
|
119
|
+
exit(1)
|
|
120
|
+
|
|
121
|
+
LOG_FILE = os.path.join(os.getcwd(), "watcher.log")
|
|
122
|
+
logging.basicConfig(
|
|
123
|
+
filename=LOG_FILE,
|
|
124
|
+
level=logging.INFO,
|
|
125
|
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
|
126
|
+
datefmt='%Y-%m-%d %H:%M:%S'
|
|
127
|
+
)
|
|
128
|
+
# log to standard output as well
|
|
129
|
+
console_handler = logging.StreamHandler()
|
|
130
|
+
console_handler.setLevel(logging.INFO)
|
|
131
|
+
logging.getLogger().addHandler(console_handler)
|
|
132
|
+
|
|
133
|
+
os.makedirs(WATCH_DIR, exist_ok=True)
|
|
134
|
+
os.makedirs(PROCESSED_DIR, exist_ok=True)
|
|
135
|
+
os.makedirs(RESULTS_DIR, exist_ok=True)
|
|
136
|
+
_init_db()
|
|
137
|
+
logging.info(f"Case watcher started. Monitoring directory for SDDP cases: {WATCH_DIR}")
|
|
138
|
+
|
|
139
|
+
while True:
|
|
140
|
+
try:
|
|
141
|
+
_check_and_download_results()
|
|
142
|
+
_process_zip_files()
|
|
143
|
+
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logging.error(f"Watcher error: {e}", exc_info=True)
|
|
146
|
+
time.sleep(SLEEP_SECONDS)
|