yowasp-yosys 0.55.0.3.post946.dev0__py3-none-any.whl → 0.56.0.0.post964__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- yowasp_yosys/sby.py +39 -8
- yowasp_yosys/share/include/kernel/constids.inc +1 -0
- yowasp_yosys/share/include/kernel/io.h +382 -8
- yowasp_yosys/share/include/kernel/json.h +2 -2
- yowasp_yosys/share/include/kernel/register.h +42 -4
- yowasp_yosys/share/include/kernel/rtlil.h +5 -1
- yowasp_yosys/share/include/kernel/satgen.h +3 -1
- yowasp_yosys/share/include/kernel/yosys_common.h +9 -0
- yowasp_yosys/share/include/passes/techmap/libparse.h +235 -0
- yowasp_yosys/share/python3/sby_autotune.py +1 -1
- yowasp_yosys/share/python3/sby_cmdline.py +13 -0
- yowasp_yosys/share/python3/sby_core.py +208 -85
- yowasp_yosys/share/python3/sby_design.py +4 -0
- yowasp_yosys/share/python3/sby_engine_abc.py +15 -4
- yowasp_yosys/share/python3/sby_engine_aiger.py +14 -9
- yowasp_yosys/share/python3/sby_engine_btor.py +15 -4
- yowasp_yosys/share/python3/sby_engine_smtbmc.py +40 -27
- yowasp_yosys/share/python3/sby_status.py +388 -115
- yowasp_yosys/yosys.wasm +0 -0
- {yowasp_yosys-0.55.0.3.post946.dev0.dist-info → yowasp_yosys-0.56.0.0.post964.dist-info}/METADATA +1 -1
- {yowasp_yosys-0.55.0.3.post946.dev0.dist-info → yowasp_yosys-0.56.0.0.post964.dist-info}/RECORD +24 -23
- {yowasp_yosys-0.55.0.3.post946.dev0.dist-info → yowasp_yosys-0.56.0.0.post964.dist-info}/WHEEL +0 -0
- {yowasp_yosys-0.55.0.3.post946.dev0.dist-info → yowasp_yosys-0.56.0.0.post964.dist-info}/entry_points.txt +0 -0
- {yowasp_yosys-0.55.0.3.post946.dev0.dist-info → yowasp_yosys-0.56.0.0.post964.dist-info}/top_level.txt +0 -0
|
@@ -4,6 +4,8 @@ import sqlite3
|
|
|
4
4
|
import os
|
|
5
5
|
import time
|
|
6
6
|
import json
|
|
7
|
+
import click
|
|
8
|
+
import re
|
|
7
9
|
from collections import defaultdict
|
|
8
10
|
from functools import wraps
|
|
9
11
|
from pathlib import Path
|
|
@@ -13,69 +15,130 @@ from sby_design import SbyProperty, pretty_path
|
|
|
13
15
|
|
|
14
16
|
Fn = TypeVar("Fn", bound=Callable[..., Any])
|
|
15
17
|
|
|
18
|
+
SQLSCRIPT = """\
|
|
19
|
+
CREATE TABLE task (
|
|
20
|
+
id INTEGER PRIMARY KEY,
|
|
21
|
+
workdir TEXT,
|
|
22
|
+
name TEXT,
|
|
23
|
+
mode TEXT,
|
|
24
|
+
created REAL
|
|
25
|
+
);
|
|
26
|
+
CREATE TABLE task_status (
|
|
27
|
+
id INTEGER PRIMARY KEY,
|
|
28
|
+
task INTEGER,
|
|
29
|
+
status TEXT,
|
|
30
|
+
data TEXT,
|
|
31
|
+
created REAL,
|
|
32
|
+
FOREIGN KEY(task) REFERENCES task(id)
|
|
33
|
+
);
|
|
34
|
+
CREATE TABLE task_property (
|
|
35
|
+
id INTEGER PRIMARY KEY,
|
|
36
|
+
task INTEGER,
|
|
37
|
+
src TEXT,
|
|
38
|
+
name TEXT,
|
|
39
|
+
hdlname TEXT,
|
|
40
|
+
kind TEXT,
|
|
41
|
+
created REAL,
|
|
42
|
+
FOREIGN KEY(task) REFERENCES task(id)
|
|
43
|
+
);
|
|
44
|
+
CREATE TABLE task_property_status (
|
|
45
|
+
id INTEGER PRIMARY KEY,
|
|
46
|
+
task_property INTEGER,
|
|
47
|
+
task_trace INTEGER,
|
|
48
|
+
status TEXT,
|
|
49
|
+
data TEXT,
|
|
50
|
+
created REAL,
|
|
51
|
+
FOREIGN KEY(task_property) REFERENCES task_property(id),
|
|
52
|
+
FOREIGN KEY(task_trace) REFERENCES task_trace(id)
|
|
53
|
+
);
|
|
54
|
+
CREATE TABLE task_trace (
|
|
55
|
+
id INTEGER PRIMARY KEY,
|
|
56
|
+
task INTEGER,
|
|
57
|
+
trace TEXT,
|
|
58
|
+
path TEXT,
|
|
59
|
+
kind TEXT,
|
|
60
|
+
engine_case TEXT,
|
|
61
|
+
created REAL,
|
|
62
|
+
FOREIGN KEY(task) REFERENCES task(id)
|
|
63
|
+
);"""
|
|
16
64
|
|
|
17
65
|
def transaction(method: Fn) -> Fn:
|
|
18
66
|
@wraps(method)
|
|
19
67
|
def wrapper(self: SbyStatusDb, *args: Any, **kwargs: Any) -> Any:
|
|
20
|
-
if self.
|
|
68
|
+
if self.con.in_transaction:
|
|
21
69
|
return method(self, *args, **kwargs)
|
|
22
70
|
|
|
23
71
|
try:
|
|
24
|
-
self.
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
self.db.execute("commit")
|
|
29
|
-
self._transaction_active = False
|
|
30
|
-
self.log_debug(f"comitted {method.__name__!r} transaction")
|
|
31
|
-
return result
|
|
32
|
-
except sqlite3.OperationalError as err:
|
|
33
|
-
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
|
34
|
-
self.db.rollback()
|
|
35
|
-
self._transaction_active = False
|
|
72
|
+
with self.con:
|
|
73
|
+
self.log_debug(f"begin {method.__name__!r} transaction")
|
|
74
|
+
self.db.execute("begin")
|
|
75
|
+
result = method(self, *args, **kwargs)
|
|
36
76
|
except Exception as err:
|
|
37
77
|
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
)
|
|
45
|
-
self.db.execute("begin immediate")
|
|
46
|
-
self._transaction_active = True
|
|
47
|
-
result = method(self, *args, **kwargs)
|
|
48
|
-
self.db.execute("commit")
|
|
49
|
-
self._transaction_active = False
|
|
78
|
+
if not isinstance(err, sqlite3.OperationalError):
|
|
79
|
+
raise
|
|
80
|
+
if re.match(r"table \w+ has no column named \w+", err.args[0]):
|
|
81
|
+
err.add_note("SBY status database can be reset with --statusreset")
|
|
82
|
+
raise
|
|
83
|
+
else:
|
|
50
84
|
self.log_debug(f"comitted {method.__name__!r} transaction")
|
|
51
85
|
return result
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
with self.con:
|
|
89
|
+
self.log_debug(
|
|
90
|
+
f"retrying {method.__name__!r} transaction once in immediate mode"
|
|
91
|
+
)
|
|
92
|
+
self.db.execute("begin immediate")
|
|
93
|
+
result = method(self, *args, **kwargs)
|
|
52
94
|
except Exception as err:
|
|
53
95
|
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
|
54
|
-
self.db.rollback()
|
|
55
|
-
self._transaction_active = False
|
|
56
96
|
raise
|
|
97
|
+
else:
|
|
98
|
+
self.log_debug(f"comitted {method.__name__!r} transaction")
|
|
99
|
+
return result
|
|
57
100
|
|
|
58
101
|
return wrapper # type: ignore
|
|
59
102
|
|
|
103
|
+
class FileInUseError(Exception):
|
|
104
|
+
def __init__(self, *args, file: Path|str = "file"):
|
|
105
|
+
super().__init__(f"Found {file}, try again later", *args)
|
|
106
|
+
|
|
60
107
|
|
|
61
108
|
class SbyStatusDb:
|
|
62
|
-
def __init__(self, path: Path, task, timeout: float = 5.0):
|
|
109
|
+
def __init__(self, path: Path, task, timeout: float = 5.0, live_formats = []):
|
|
63
110
|
self.debug = False
|
|
64
111
|
self.task = task
|
|
65
|
-
self.
|
|
66
|
-
|
|
67
|
-
setup = not os.path.exists(path)
|
|
112
|
+
self.live_formats = live_formats
|
|
68
113
|
|
|
69
|
-
self.
|
|
114
|
+
self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout)
|
|
115
|
+
self.db = self.con.cursor()
|
|
70
116
|
self.db.row_factory = sqlite3.Row
|
|
71
|
-
|
|
72
|
-
|
|
117
|
+
err_count = 0
|
|
118
|
+
err_max = 3
|
|
119
|
+
while True:
|
|
120
|
+
try:
|
|
121
|
+
self.db.execute("PRAGMA journal_mode=WAL")
|
|
122
|
+
self.db.execute("PRAGMA synchronous=0")
|
|
123
|
+
self.db.execute("PRAGMA foreign_keys=ON")
|
|
124
|
+
except sqlite3.OperationalError as err:
|
|
125
|
+
if "database is locked" not in err.args[0]:
|
|
126
|
+
raise
|
|
127
|
+
err_count += 1
|
|
128
|
+
if err_count > err_max:
|
|
129
|
+
err.add_note(f"Failed to acquire lock after {err_count} attempts, aborting")
|
|
130
|
+
raise
|
|
131
|
+
backoff = err_count / 10.0
|
|
132
|
+
self.log_debug(f"Database locked, retrying in {backoff}s")
|
|
133
|
+
time.sleep(backoff)
|
|
134
|
+
else:
|
|
135
|
+
break
|
|
73
136
|
|
|
74
|
-
|
|
75
|
-
self._setup()
|
|
137
|
+
self._setup()
|
|
76
138
|
|
|
77
139
|
if task is not None:
|
|
78
|
-
self.
|
|
140
|
+
self.start_time = time.time()
|
|
141
|
+
self.task_id = self.create_task(workdir=task.workdir, name=task.name, mode=task.opt_mode, now=self.start_time)
|
|
79
142
|
|
|
80
143
|
def log_debug(self, *args):
|
|
81
144
|
if self.debug:
|
|
@@ -86,59 +149,25 @@ class SbyStatusDb:
|
|
|
86
149
|
|
|
87
150
|
@transaction
|
|
88
151
|
def _setup(self):
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
id INTEGER PRIMARY KEY,
|
|
92
|
-
workdir TEXT,
|
|
93
|
-
mode TEXT,
|
|
94
|
-
created REAL
|
|
95
|
-
);
|
|
96
|
-
CREATE TABLE task_status (
|
|
97
|
-
id INTEGER PRIMARY KEY,
|
|
98
|
-
task INTEGER,
|
|
99
|
-
status TEXT,
|
|
100
|
-
data TEXT,
|
|
101
|
-
created REAL,
|
|
102
|
-
FOREIGN KEY(task) REFERENCES task(id)
|
|
103
|
-
);
|
|
104
|
-
CREATE TABLE task_property (
|
|
105
|
-
id INTEGER PRIMARY KEY,
|
|
106
|
-
task INTEGER,
|
|
107
|
-
src TEXT,
|
|
108
|
-
name TEXT,
|
|
109
|
-
created REAL,
|
|
110
|
-
FOREIGN KEY(task) REFERENCES task(id)
|
|
111
|
-
);
|
|
112
|
-
CREATE TABLE task_property_status (
|
|
113
|
-
id INTEGER PRIMARY KEY,
|
|
114
|
-
task_property INTEGER,
|
|
115
|
-
status TEXT,
|
|
116
|
-
data TEXT,
|
|
117
|
-
created REAL,
|
|
118
|
-
FOREIGN KEY(task_property) REFERENCES task_property(id)
|
|
119
|
-
);
|
|
120
|
-
CREATE TABLE task_property_data (
|
|
121
|
-
id INTEGER PRIMARY KEY,
|
|
122
|
-
task_property INTEGER,
|
|
123
|
-
kind TEXT,
|
|
124
|
-
data TEXT,
|
|
125
|
-
created REAL,
|
|
126
|
-
FOREIGN KEY(task_property) REFERENCES task_property(id)
|
|
127
|
-
);
|
|
128
|
-
"""
|
|
129
|
-
for statement in script.split(";\n"):
|
|
130
|
-
statement = statement.strip()
|
|
152
|
+
for statement in SQLSCRIPT.split(";\n"):
|
|
153
|
+
statement = statement.strip().replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS")
|
|
131
154
|
if statement:
|
|
132
155
|
self.db.execute(statement)
|
|
133
156
|
|
|
157
|
+
def test_schema(self) -> bool:
|
|
158
|
+
schema = self.db.execute("SELECT sql FROM sqlite_master;").fetchall()
|
|
159
|
+
schema_script = '\n'.join(str(sql[0] + ';') for sql in schema)
|
|
160
|
+
self._tables = re.findall(r"CREATE TABLE (\w+) \(", schema_script)
|
|
161
|
+
return schema_script != SQLSCRIPT
|
|
162
|
+
|
|
134
163
|
@transaction
|
|
135
|
-
def create_task(self, workdir: str, mode: str) -> int:
|
|
164
|
+
def create_task(self, workdir: str, name: str, mode: str, now:float) -> int:
|
|
136
165
|
return self.db.execute(
|
|
137
166
|
"""
|
|
138
|
-
INSERT INTO task (workdir, mode, created)
|
|
139
|
-
VALUES (:workdir, :mode, :now)
|
|
167
|
+
INSERT INTO task (workdir, name, mode, created)
|
|
168
|
+
VALUES (:workdir, :name, :mode, :now)
|
|
140
169
|
""",
|
|
141
|
-
dict(workdir=workdir, mode=mode, now=
|
|
170
|
+
dict(workdir=workdir, name=name, mode=mode, now=now),
|
|
142
171
|
).lastrowid
|
|
143
172
|
|
|
144
173
|
@transaction
|
|
@@ -150,14 +179,16 @@ class SbyStatusDb:
|
|
|
150
179
|
now = time.time()
|
|
151
180
|
self.db.executemany(
|
|
152
181
|
"""
|
|
153
|
-
INSERT INTO task_property (name, src, task, created)
|
|
154
|
-
VALUES (:name, :src, :task, :now)
|
|
182
|
+
INSERT INTO task_property (name, src, hdlname, task, kind, created)
|
|
183
|
+
VALUES (:name, :src, :hdlname, :task, :kind, :now)
|
|
155
184
|
""",
|
|
156
185
|
[
|
|
157
186
|
dict(
|
|
158
187
|
name=json.dumps(prop.path),
|
|
159
188
|
src=prop.location or "",
|
|
189
|
+
hdlname=prop.hdlname,
|
|
160
190
|
task=task_id,
|
|
191
|
+
kind=prop.kind,
|
|
161
192
|
now=now,
|
|
162
193
|
)
|
|
163
194
|
for prop in properties
|
|
@@ -195,55 +226,67 @@ class SbyStatusDb:
|
|
|
195
226
|
def set_task_property_status(
|
|
196
227
|
self,
|
|
197
228
|
property: SbyProperty,
|
|
198
|
-
|
|
229
|
+
trace_id: Optional[int] = None,
|
|
199
230
|
data: Any = None,
|
|
200
231
|
):
|
|
201
|
-
if status is None:
|
|
202
|
-
status = property.status
|
|
203
|
-
|
|
204
232
|
now = time.time()
|
|
205
233
|
self.db.execute(
|
|
206
234
|
"""
|
|
207
235
|
INSERT INTO task_property_status (
|
|
208
|
-
task_property, status, data, created
|
|
236
|
+
task_property, task_trace, status, data, created
|
|
209
237
|
)
|
|
210
238
|
VALUES (
|
|
211
239
|
(SELECT id FROM task_property WHERE task = :task AND name = :name),
|
|
212
|
-
:status, :data, :now
|
|
240
|
+
:trace_id, :status, :data, :now
|
|
213
241
|
)
|
|
214
242
|
""",
|
|
215
243
|
dict(
|
|
216
244
|
task=self.task_id,
|
|
245
|
+
trace_id=trace_id,
|
|
217
246
|
name=json.dumps(property.path),
|
|
218
|
-
status=status,
|
|
247
|
+
status=property.status,
|
|
219
248
|
data=json.dumps(data),
|
|
220
249
|
now=now,
|
|
221
250
|
),
|
|
222
251
|
)
|
|
223
252
|
|
|
253
|
+
if self.live_formats:
|
|
254
|
+
row = self.get_status_data_joined(self.db.lastrowid)
|
|
255
|
+
for fmt in self.live_formats:
|
|
256
|
+
fmtline = format_status_data_fmtline(row, fmt)
|
|
257
|
+
self.task.log(f"{click.style(fmt, fg='yellow')}: {fmtline}")
|
|
258
|
+
|
|
224
259
|
@transaction
|
|
225
|
-
def
|
|
260
|
+
def add_task_trace(
|
|
261
|
+
self,
|
|
262
|
+
trace: str,
|
|
263
|
+
path: str,
|
|
264
|
+
kind: str,
|
|
265
|
+
engine_case: Optional[str] = None,
|
|
266
|
+
task_id: Optional[int] = None,
|
|
267
|
+
):
|
|
268
|
+
if task_id is None:
|
|
269
|
+
task_id = self.task_id
|
|
226
270
|
now = time.time()
|
|
227
|
-
self.db.execute(
|
|
271
|
+
return self.db.execute(
|
|
228
272
|
"""
|
|
229
|
-
INSERT INTO
|
|
230
|
-
|
|
273
|
+
INSERT INTO task_trace (
|
|
274
|
+
trace, task, path, engine_case, kind, created
|
|
231
275
|
)
|
|
232
276
|
VALUES (
|
|
233
|
-
|
|
234
|
-
:kind, :data, :now
|
|
277
|
+
:trace, :task, :path, :engine_case, :kind, :now
|
|
235
278
|
)
|
|
236
279
|
""",
|
|
237
280
|
dict(
|
|
238
|
-
|
|
239
|
-
|
|
281
|
+
trace=trace,
|
|
282
|
+
task=task_id,
|
|
283
|
+
path=path,
|
|
284
|
+
engine_case=engine_case,
|
|
240
285
|
kind=kind,
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
)
|
|
286
|
+
now=now
|
|
287
|
+
)
|
|
288
|
+
).lastrowid
|
|
245
289
|
|
|
246
|
-
@transaction
|
|
247
290
|
def all_tasks(self):
|
|
248
291
|
rows = self.db.execute(
|
|
249
292
|
"""
|
|
@@ -253,7 +296,18 @@ class SbyStatusDb:
|
|
|
253
296
|
|
|
254
297
|
return {row["id"]: dict(row) for row in rows}
|
|
255
298
|
|
|
256
|
-
|
|
299
|
+
def all_tasks_status(self):
|
|
300
|
+
rows = self.db.execute(
|
|
301
|
+
"""
|
|
302
|
+
SELECT task.id, task.name, task.created,
|
|
303
|
+
task_status.status, task_status.created as 'status_created'
|
|
304
|
+
FROM task
|
|
305
|
+
LEFT JOIN task_status ON task_status.task=task.id
|
|
306
|
+
"""
|
|
307
|
+
).fetchall()
|
|
308
|
+
|
|
309
|
+
return {row["id"]: dict(row) for row in rows}
|
|
310
|
+
|
|
257
311
|
def all_task_properties(self):
|
|
258
312
|
rows = self.db.execute(
|
|
259
313
|
"""
|
|
@@ -264,12 +318,10 @@ class SbyStatusDb:
|
|
|
264
318
|
def get_result(row):
|
|
265
319
|
row = dict(row)
|
|
266
320
|
row["name"] = tuple(json.loads(row.get("name", "[]")))
|
|
267
|
-
row["data"] = json.loads(row.get("data", "null"))
|
|
268
321
|
return row
|
|
269
322
|
|
|
270
323
|
return {row["id"]: get_result(row) for row in rows}
|
|
271
324
|
|
|
272
|
-
@transaction
|
|
273
325
|
def all_task_property_statuses(self):
|
|
274
326
|
rows = self.db.execute(
|
|
275
327
|
"""
|
|
@@ -285,7 +337,6 @@ class SbyStatusDb:
|
|
|
285
337
|
|
|
286
338
|
return {row["id"]: get_result(row) for row in rows}
|
|
287
339
|
|
|
288
|
-
@transaction
|
|
289
340
|
def all_status_data(self):
|
|
290
341
|
return (
|
|
291
342
|
self.all_tasks(),
|
|
@@ -294,20 +345,35 @@ class SbyStatusDb:
|
|
|
294
345
|
)
|
|
295
346
|
|
|
296
347
|
@transaction
|
|
348
|
+
def _reset(self):
|
|
349
|
+
hard_reset = self.test_schema()
|
|
350
|
+
# table names can't be parameters, so we need to use f-strings
|
|
351
|
+
# but it is safe to use here because it comes from the regex "\w+"
|
|
352
|
+
for table in self._tables:
|
|
353
|
+
if hard_reset:
|
|
354
|
+
self.log_debug(f"dropping {table}")
|
|
355
|
+
self.db.execute(f"DROP TABLE {table}")
|
|
356
|
+
else:
|
|
357
|
+
self.log_debug(f"clearing {table}")
|
|
358
|
+
self.db.execute(f"DELETE FROM {table}")
|
|
359
|
+
if hard_reset:
|
|
360
|
+
self._setup()
|
|
361
|
+
|
|
297
362
|
def reset(self):
|
|
298
|
-
self.db.execute("
|
|
299
|
-
self.
|
|
300
|
-
self.db.execute("
|
|
301
|
-
self.db.execute("""DELETE FROM task_status""")
|
|
302
|
-
self.db.execute("""DELETE FROM task""")
|
|
363
|
+
self.db.execute("PRAGMA foreign_keys=OFF")
|
|
364
|
+
self._reset()
|
|
365
|
+
self.db.execute("PRAGMA foreign_keys=ON")
|
|
303
366
|
|
|
304
|
-
def print_status_summary(self):
|
|
367
|
+
def print_status_summary(self, latest: bool):
|
|
305
368
|
tasks, task_properties, task_property_statuses = self.all_status_data()
|
|
369
|
+
latest_task_ids = filter_latest_task_ids(tasks)
|
|
306
370
|
properties = defaultdict(set)
|
|
307
371
|
|
|
308
372
|
uniquify_paths = defaultdict(dict)
|
|
309
373
|
|
|
310
374
|
def add_status(task_property, status):
|
|
375
|
+
if latest and task_property["task"] not in latest_task_ids:
|
|
376
|
+
return
|
|
311
377
|
|
|
312
378
|
display_name = task_property["name"]
|
|
313
379
|
if display_name[-1].startswith("$"):
|
|
@@ -334,6 +400,107 @@ class SbyStatusDb:
|
|
|
334
400
|
for display_name, statuses in sorted(properties.items()):
|
|
335
401
|
print(pretty_path(display_name), combine_statuses(statuses))
|
|
336
402
|
|
|
403
|
+
def print_task_summary(self):
|
|
404
|
+
tasks = self.all_tasks_status()
|
|
405
|
+
task_status = defaultdict(set)
|
|
406
|
+
for task in tasks.values():
|
|
407
|
+
task_status[task["name"]].add(task["status"] or "UNKNOWN")
|
|
408
|
+
for task_name, statuses in sorted(task_status.items()):
|
|
409
|
+
print(task_name, combine_statuses(statuses))
|
|
410
|
+
|
|
411
|
+
def get_status_data_joined(self, status_id: int):
|
|
412
|
+
row = self.db.execute(
|
|
413
|
+
"""
|
|
414
|
+
SELECT task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind,
|
|
415
|
+
task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status,
|
|
416
|
+
task_property_status.data, task_property_status.created as 'status_created',
|
|
417
|
+
task_property_status.id, task_trace.path, task_trace.kind as trace_kind
|
|
418
|
+
FROM task
|
|
419
|
+
INNER JOIN task_property ON task_property.task=task.id
|
|
420
|
+
INNER JOIN task_property_status ON task_property_status.task_property=task_property.id
|
|
421
|
+
LEFT JOIN task_trace ON task_property_status.task_trace=task_trace.id
|
|
422
|
+
WHERE task_property_status.id=:status_id;
|
|
423
|
+
""",
|
|
424
|
+
dict(status_id=status_id)
|
|
425
|
+
).fetchone()
|
|
426
|
+
return parse_status_data_row(row)
|
|
427
|
+
|
|
428
|
+
def all_status_data_joined(self):
|
|
429
|
+
rows = self.db.execute(
|
|
430
|
+
"""
|
|
431
|
+
SELECT task.id as 'task_id', task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind,
|
|
432
|
+
task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status,
|
|
433
|
+
task_property_status.data, task_property_status.created as 'status_created',
|
|
434
|
+
task_property_status.id, task_trace.path, task_trace.kind as trace_kind
|
|
435
|
+
FROM task
|
|
436
|
+
INNER JOIN task_property ON task_property.task=task.id
|
|
437
|
+
INNER JOIN task_property_status ON task_property_status.task_property=task_property.id
|
|
438
|
+
LEFT JOIN task_trace ON task_property_status.task_trace=task_trace.id;
|
|
439
|
+
"""
|
|
440
|
+
).fetchall()
|
|
441
|
+
|
|
442
|
+
return {row["id"]: parse_status_data_row(row) for row in rows}
|
|
443
|
+
|
|
444
|
+
def print_status_summary_fmt(self, tasknames: list[str], status_format: str, latest: bool):
|
|
445
|
+
# get all statuses
|
|
446
|
+
all_properties = self.all_status_data_joined()
|
|
447
|
+
latest_task_ids = filter_latest_task_ids(self.all_tasks())
|
|
448
|
+
|
|
449
|
+
# print header
|
|
450
|
+
header = format_status_data_fmtline(None, status_format)
|
|
451
|
+
if header:
|
|
452
|
+
print(header)
|
|
453
|
+
|
|
454
|
+
# find summary for each task/property combo
|
|
455
|
+
prop_map: dict[(str, str, str), dict[str, (int, int)]] = {}
|
|
456
|
+
for row, prop_status in all_properties.items():
|
|
457
|
+
if tasknames and prop_status['task_name'] not in tasknames:
|
|
458
|
+
continue
|
|
459
|
+
if latest and prop_status['task_id'] not in latest_task_ids:
|
|
460
|
+
continue
|
|
461
|
+
status = prop_status['status']
|
|
462
|
+
this_depth = prop_status['data'].get('step')
|
|
463
|
+
this_kind = prop_status['trace_kind']
|
|
464
|
+
key = (prop_status['task_name'], prop_status['hdlname'])
|
|
465
|
+
try:
|
|
466
|
+
prop_status_map = prop_map[key]
|
|
467
|
+
except KeyError:
|
|
468
|
+
prop_map[key] = prop_status_map = {}
|
|
469
|
+
|
|
470
|
+
try:
|
|
471
|
+
current_depth, _, current_kind = prop_status_map[status]
|
|
472
|
+
except KeyError:
|
|
473
|
+
prop_status_map[status] = (this_depth, row, this_kind)
|
|
474
|
+
continue
|
|
475
|
+
|
|
476
|
+
update_map = False
|
|
477
|
+
if current_depth is None and current_kind is None:
|
|
478
|
+
# no depth or kind to compare, just take latest data
|
|
479
|
+
update_map = True
|
|
480
|
+
elif this_depth is not None and this_depth != current_depth:
|
|
481
|
+
if current_depth is None:
|
|
482
|
+
# always prefer a known depth to an unknown
|
|
483
|
+
update_map = True
|
|
484
|
+
elif status == 'FAIL' and this_depth < current_depth:
|
|
485
|
+
# earliest fail
|
|
486
|
+
update_map = True
|
|
487
|
+
elif status != 'FAIL' and this_depth > current_depth:
|
|
488
|
+
# latest non-FAIL
|
|
489
|
+
update_map = True
|
|
490
|
+
elif this_kind in ['fst', 'vcd']:
|
|
491
|
+
# prefer traces over witness files
|
|
492
|
+
update_map = True
|
|
493
|
+
if update_map:
|
|
494
|
+
prop_status_map[status] = (this_depth, row, this_kind)
|
|
495
|
+
|
|
496
|
+
for prop in prop_map.values():
|
|
497
|
+
# ignore UNKNOWNs if there are other statuses
|
|
498
|
+
if len(prop) > 1 and "UNKNOWN" in prop:
|
|
499
|
+
del prop["UNKNOWN"]
|
|
500
|
+
|
|
501
|
+
for _, row, _ in prop.values():
|
|
502
|
+
line = format_status_data_fmtline(all_properties[row], status_format)
|
|
503
|
+
print(line)
|
|
337
504
|
|
|
338
505
|
def combine_statuses(statuses):
|
|
339
506
|
statuses = set(statuses)
|
|
@@ -342,3 +509,109 @@ def combine_statuses(statuses):
|
|
|
342
509
|
statuses.discard("UNKNOWN")
|
|
343
510
|
|
|
344
511
|
return ",".join(sorted(statuses))
|
|
512
|
+
|
|
513
|
+
def parse_status_data_row(raw: sqlite3.Row):
|
|
514
|
+
row_dict = dict(raw)
|
|
515
|
+
row_dict["name"] = json.loads(row_dict.get("name", "null"))
|
|
516
|
+
row_dict["data"] = json.loads(row_dict.get("data") or "{}")
|
|
517
|
+
return row_dict
|
|
518
|
+
|
|
519
|
+
fmtline_columns = [
|
|
520
|
+
"time",
|
|
521
|
+
"task_name",
|
|
522
|
+
"mode",
|
|
523
|
+
"engine",
|
|
524
|
+
"name",
|
|
525
|
+
"location",
|
|
526
|
+
"kind",
|
|
527
|
+
"status",
|
|
528
|
+
"trace",
|
|
529
|
+
"depth",
|
|
530
|
+
]
|
|
531
|
+
|
|
532
|
+
def format_status_data_fmtline(row: dict|None, fmt: str = "csv") -> str:
|
|
533
|
+
if row is None:
|
|
534
|
+
data = None
|
|
535
|
+
else:
|
|
536
|
+
engine = row['data'].get('engine', row['data'].get('source'))
|
|
537
|
+
name = row['hdlname']
|
|
538
|
+
depth = row['data'].get('step')
|
|
539
|
+
|
|
540
|
+
data = {
|
|
541
|
+
"task_name": row['task_name'],
|
|
542
|
+
"mode": row['mode'],
|
|
543
|
+
"engine": engine,
|
|
544
|
+
"name": name or pretty_path(row['name']),
|
|
545
|
+
"location": row['location'],
|
|
546
|
+
"kind": row['kind'],
|
|
547
|
+
"status": row['status'] or "UNKNOWN",
|
|
548
|
+
"depth": depth,
|
|
549
|
+
}
|
|
550
|
+
try:
|
|
551
|
+
data["trace"] = str(Path(row['workdir']) / row['path'])
|
|
552
|
+
except TypeError:
|
|
553
|
+
pass
|
|
554
|
+
try:
|
|
555
|
+
data['time'] = round(row['status_created'] - row['created'], 2)
|
|
556
|
+
except TypeError:
|
|
557
|
+
pass
|
|
558
|
+
if fmt == "csv":
|
|
559
|
+
if data is None:
|
|
560
|
+
csv_line = fmtline_columns
|
|
561
|
+
else:
|
|
562
|
+
csv_line = [data.get(column) for column in fmtline_columns]
|
|
563
|
+
def csv_field(value):
|
|
564
|
+
if value is None:
|
|
565
|
+
return ""
|
|
566
|
+
value = str(value).replace('"', '""')
|
|
567
|
+
if any(c in value for c in '",\n'):
|
|
568
|
+
value = f'"{value}"'
|
|
569
|
+
return value
|
|
570
|
+
return ','.join(map(csv_field, csv_line))
|
|
571
|
+
elif fmt == "jsonl":
|
|
572
|
+
if data is None:
|
|
573
|
+
return ""
|
|
574
|
+
# field order
|
|
575
|
+
data = {column: data[column] for column in fmtline_columns if data.get(column)}
|
|
576
|
+
return json.dumps(data)
|
|
577
|
+
|
|
578
|
+
def filter_latest_task_ids(all_tasks: dict[int, dict[str]]):
|
|
579
|
+
latest: dict[str, int] = {}
|
|
580
|
+
for task_id, task_dict in all_tasks.items():
|
|
581
|
+
latest[task_dict["workdir"]] = task_id
|
|
582
|
+
return list(latest.values())
|
|
583
|
+
|
|
584
|
+
def remove_db(path):
|
|
585
|
+
path = Path(path)
|
|
586
|
+
lock_exts = [".sqlite-wal", ".sqlite-shm"]
|
|
587
|
+
maybe_locked = False
|
|
588
|
+
for lock_file in [path.with_suffix(ext) for ext in lock_exts]:
|
|
589
|
+
if lock_file.exists():
|
|
590
|
+
# lock file may be a false positive if it wasn't cleaned up
|
|
591
|
+
maybe_locked = True
|
|
592
|
+
break
|
|
593
|
+
|
|
594
|
+
if not maybe_locked:
|
|
595
|
+
# safe to delete
|
|
596
|
+
os.remove(path)
|
|
597
|
+
return
|
|
598
|
+
|
|
599
|
+
# test database directly
|
|
600
|
+
with sqlite3.connect(path, isolation_level="EXCLUSIVE", timeout=1) as con:
|
|
601
|
+
cur = con.cursor()
|
|
602
|
+
# single result rows
|
|
603
|
+
cur.row_factory = lambda _, r: r[0]
|
|
604
|
+
|
|
605
|
+
# changing journal_mode is disallowed if there are multiple connections
|
|
606
|
+
try:
|
|
607
|
+
cur.execute("PRAGMA journal_mode=DELETE")
|
|
608
|
+
except sqlite3.OperationalError as err:
|
|
609
|
+
if "database is locked" in err.args[0]:
|
|
610
|
+
raise FileInUseError(file=path)
|
|
611
|
+
else:
|
|
612
|
+
raise
|
|
613
|
+
|
|
614
|
+
# no other connections, delete all tables
|
|
615
|
+
drop_script = cur.execute("SELECT name FROM sqlite_master WHERE type = 'table';").fetchall()
|
|
616
|
+
for table in drop_script:
|
|
617
|
+
cur.execute(f"DROP TABLE {table}")
|
yowasp_yosys/yosys.wasm
CHANGED
|
Binary file
|