siliconcompiler 0.26.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. siliconcompiler/__init__.py +24 -0
  2. siliconcompiler/__main__.py +12 -0
  3. siliconcompiler/_common.py +49 -0
  4. siliconcompiler/_metadata.py +36 -0
  5. siliconcompiler/apps/__init__.py +0 -0
  6. siliconcompiler/apps/_common.py +76 -0
  7. siliconcompiler/apps/sc.py +92 -0
  8. siliconcompiler/apps/sc_dashboard.py +94 -0
  9. siliconcompiler/apps/sc_issue.py +178 -0
  10. siliconcompiler/apps/sc_remote.py +199 -0
  11. siliconcompiler/apps/sc_server.py +39 -0
  12. siliconcompiler/apps/sc_show.py +142 -0
  13. siliconcompiler/apps/smake.py +232 -0
  14. siliconcompiler/checklists/__init__.py +0 -0
  15. siliconcompiler/checklists/oh_tapeout.py +41 -0
  16. siliconcompiler/core.py +3221 -0
  17. siliconcompiler/data/RobotoMono/LICENSE.txt +202 -0
  18. siliconcompiler/data/RobotoMono/RobotoMono-Regular.ttf +0 -0
  19. siliconcompiler/data/heartbeat.v +18 -0
  20. siliconcompiler/data/logo.png +0 -0
  21. siliconcompiler/flowgraph.py +570 -0
  22. siliconcompiler/flows/__init__.py +0 -0
  23. siliconcompiler/flows/_common.py +67 -0
  24. siliconcompiler/flows/asicflow.py +180 -0
  25. siliconcompiler/flows/asictopflow.py +38 -0
  26. siliconcompiler/flows/dvflow.py +86 -0
  27. siliconcompiler/flows/fpgaflow.py +202 -0
  28. siliconcompiler/flows/generate_openroad_rcx.py +66 -0
  29. siliconcompiler/flows/lintflow.py +35 -0
  30. siliconcompiler/flows/screenshotflow.py +51 -0
  31. siliconcompiler/flows/showflow.py +59 -0
  32. siliconcompiler/flows/signoffflow.py +53 -0
  33. siliconcompiler/flows/synflow.py +128 -0
  34. siliconcompiler/fpgas/__init__.py +0 -0
  35. siliconcompiler/fpgas/lattice_ice40.py +42 -0
  36. siliconcompiler/fpgas/vpr_example.py +109 -0
  37. siliconcompiler/issue.py +300 -0
  38. siliconcompiler/libs/__init__.py +0 -0
  39. siliconcompiler/libs/asap7sc7p5t.py +8 -0
  40. siliconcompiler/libs/gf180mcu.py +8 -0
  41. siliconcompiler/libs/nangate45.py +8 -0
  42. siliconcompiler/libs/sky130hd.py +8 -0
  43. siliconcompiler/libs/sky130io.py +8 -0
  44. siliconcompiler/package.py +412 -0
  45. siliconcompiler/pdks/__init__.py +0 -0
  46. siliconcompiler/pdks/asap7.py +8 -0
  47. siliconcompiler/pdks/freepdk45.py +8 -0
  48. siliconcompiler/pdks/gf180.py +8 -0
  49. siliconcompiler/pdks/skywater130.py +8 -0
  50. siliconcompiler/remote/__init__.py +36 -0
  51. siliconcompiler/remote/client.py +891 -0
  52. siliconcompiler/remote/schema.py +106 -0
  53. siliconcompiler/remote/server.py +507 -0
  54. siliconcompiler/remote/server_schema/requests/cancel_job.json +51 -0
  55. siliconcompiler/remote/server_schema/requests/check_progress.json +61 -0
  56. siliconcompiler/remote/server_schema/requests/check_server.json +38 -0
  57. siliconcompiler/remote/server_schema/requests/delete_job.json +51 -0
  58. siliconcompiler/remote/server_schema/requests/get_results.json +48 -0
  59. siliconcompiler/remote/server_schema/requests/remote_run.json +40 -0
  60. siliconcompiler/remote/server_schema/responses/cancel_job.json +18 -0
  61. siliconcompiler/remote/server_schema/responses/check_progress.json +30 -0
  62. siliconcompiler/remote/server_schema/responses/check_server.json +32 -0
  63. siliconcompiler/remote/server_schema/responses/delete_job.json +18 -0
  64. siliconcompiler/remote/server_schema/responses/get_results.json +21 -0
  65. siliconcompiler/remote/server_schema/responses/remote_run.json +25 -0
  66. siliconcompiler/report/__init__.py +13 -0
  67. siliconcompiler/report/html_report.py +74 -0
  68. siliconcompiler/report/report.py +355 -0
  69. siliconcompiler/report/streamlit_report.py +137 -0
  70. siliconcompiler/report/streamlit_viewer.py +944 -0
  71. siliconcompiler/report/summary_image.py +117 -0
  72. siliconcompiler/report/summary_table.py +105 -0
  73. siliconcompiler/report/utils.py +163 -0
  74. siliconcompiler/scheduler/__init__.py +2092 -0
  75. siliconcompiler/scheduler/docker_runner.py +253 -0
  76. siliconcompiler/scheduler/run_node.py +138 -0
  77. siliconcompiler/scheduler/send_messages.py +178 -0
  78. siliconcompiler/scheduler/slurm.py +208 -0
  79. siliconcompiler/scheduler/validation/email_credentials.json +54 -0
  80. siliconcompiler/schema/__init__.py +7 -0
  81. siliconcompiler/schema/schema_cfg.py +4014 -0
  82. siliconcompiler/schema/schema_obj.py +1841 -0
  83. siliconcompiler/schema/utils.py +93 -0
  84. siliconcompiler/sphinx_ext/__init__.py +0 -0
  85. siliconcompiler/sphinx_ext/dynamicgen.py +1006 -0
  86. siliconcompiler/sphinx_ext/schemagen.py +221 -0
  87. siliconcompiler/sphinx_ext/utils.py +166 -0
  88. siliconcompiler/targets/__init__.py +0 -0
  89. siliconcompiler/targets/asap7_demo.py +68 -0
  90. siliconcompiler/targets/asic_demo.py +38 -0
  91. siliconcompiler/targets/fpgaflow_demo.py +47 -0
  92. siliconcompiler/targets/freepdk45_demo.py +59 -0
  93. siliconcompiler/targets/gf180_demo.py +77 -0
  94. siliconcompiler/targets/skywater130_demo.py +70 -0
  95. siliconcompiler/templates/email/general.j2 +66 -0
  96. siliconcompiler/templates/email/summary.j2 +43 -0
  97. siliconcompiler/templates/issue/README.txt +26 -0
  98. siliconcompiler/templates/issue/run.sh +6 -0
  99. siliconcompiler/templates/report/bootstrap.min.css +7 -0
  100. siliconcompiler/templates/report/bootstrap.min.js +7 -0
  101. siliconcompiler/templates/report/bootstrap_LICENSE.md +24 -0
  102. siliconcompiler/templates/report/sc_report.j2 +427 -0
  103. siliconcompiler/templates/slurm/run.sh +9 -0
  104. siliconcompiler/templates/tcl/manifest.tcl.j2 +137 -0
  105. siliconcompiler/tools/__init__.py +0 -0
  106. siliconcompiler/tools/_common/__init__.py +432 -0
  107. siliconcompiler/tools/_common/asic.py +115 -0
  108. siliconcompiler/tools/_common/sdc/sc_constraints.sdc +76 -0
  109. siliconcompiler/tools/_common/tcl/sc_pin_constraints.tcl +63 -0
  110. siliconcompiler/tools/bambu/bambu.py +32 -0
  111. siliconcompiler/tools/bambu/convert.py +77 -0
  112. siliconcompiler/tools/bluespec/bluespec.py +40 -0
  113. siliconcompiler/tools/bluespec/convert.py +103 -0
  114. siliconcompiler/tools/builtin/_common.py +155 -0
  115. siliconcompiler/tools/builtin/builtin.py +26 -0
  116. siliconcompiler/tools/builtin/concatenate.py +85 -0
  117. siliconcompiler/tools/builtin/join.py +27 -0
  118. siliconcompiler/tools/builtin/maximum.py +46 -0
  119. siliconcompiler/tools/builtin/minimum.py +57 -0
  120. siliconcompiler/tools/builtin/mux.py +70 -0
  121. siliconcompiler/tools/builtin/nop.py +38 -0
  122. siliconcompiler/tools/builtin/verify.py +83 -0
  123. siliconcompiler/tools/chisel/SCDriver.scala +10 -0
  124. siliconcompiler/tools/chisel/build.sbt +27 -0
  125. siliconcompiler/tools/chisel/chisel.py +37 -0
  126. siliconcompiler/tools/chisel/convert.py +140 -0
  127. siliconcompiler/tools/execute/exec_input.py +41 -0
  128. siliconcompiler/tools/execute/execute.py +17 -0
  129. siliconcompiler/tools/genfasm/bitstream.py +61 -0
  130. siliconcompiler/tools/genfasm/genfasm.py +40 -0
  131. siliconcompiler/tools/ghdl/convert.py +87 -0
  132. siliconcompiler/tools/ghdl/ghdl.py +41 -0
  133. siliconcompiler/tools/icarus/compile.py +87 -0
  134. siliconcompiler/tools/icarus/icarus.py +36 -0
  135. siliconcompiler/tools/icepack/bitstream.py +20 -0
  136. siliconcompiler/tools/icepack/icepack.py +43 -0
  137. siliconcompiler/tools/klayout/export.py +117 -0
  138. siliconcompiler/tools/klayout/klayout.py +119 -0
  139. siliconcompiler/tools/klayout/klayout_export.py +205 -0
  140. siliconcompiler/tools/klayout/klayout_operations.py +363 -0
  141. siliconcompiler/tools/klayout/klayout_show.py +242 -0
  142. siliconcompiler/tools/klayout/klayout_utils.py +176 -0
  143. siliconcompiler/tools/klayout/operations.py +194 -0
  144. siliconcompiler/tools/klayout/screenshot.py +98 -0
  145. siliconcompiler/tools/klayout/show.py +101 -0
  146. siliconcompiler/tools/magic/drc.py +49 -0
  147. siliconcompiler/tools/magic/extspice.py +19 -0
  148. siliconcompiler/tools/magic/magic.py +85 -0
  149. siliconcompiler/tools/magic/sc_drc.tcl +96 -0
  150. siliconcompiler/tools/magic/sc_extspice.tcl +54 -0
  151. siliconcompiler/tools/magic/sc_magic.tcl +47 -0
  152. siliconcompiler/tools/montage/montage.py +30 -0
  153. siliconcompiler/tools/montage/tile.py +66 -0
  154. siliconcompiler/tools/netgen/count_lvs.py +132 -0
  155. siliconcompiler/tools/netgen/lvs.py +90 -0
  156. siliconcompiler/tools/netgen/netgen.py +36 -0
  157. siliconcompiler/tools/netgen/sc_lvs.tcl +46 -0
  158. siliconcompiler/tools/nextpnr/apr.py +24 -0
  159. siliconcompiler/tools/nextpnr/nextpnr.py +59 -0
  160. siliconcompiler/tools/openfpgaloader/openfpgaloader.py +39 -0
  161. siliconcompiler/tools/openroad/__init__.py +0 -0
  162. siliconcompiler/tools/openroad/cts.py +45 -0
  163. siliconcompiler/tools/openroad/dfm.py +66 -0
  164. siliconcompiler/tools/openroad/export.py +131 -0
  165. siliconcompiler/tools/openroad/floorplan.py +70 -0
  166. siliconcompiler/tools/openroad/openroad.py +977 -0
  167. siliconcompiler/tools/openroad/physyn.py +27 -0
  168. siliconcompiler/tools/openroad/place.py +41 -0
  169. siliconcompiler/tools/openroad/rcx_bench.py +95 -0
  170. siliconcompiler/tools/openroad/rcx_extract.py +34 -0
  171. siliconcompiler/tools/openroad/route.py +45 -0
  172. siliconcompiler/tools/openroad/screenshot.py +60 -0
  173. siliconcompiler/tools/openroad/scripts/sc_apr.tcl +499 -0
  174. siliconcompiler/tools/openroad/scripts/sc_cts.tcl +64 -0
  175. siliconcompiler/tools/openroad/scripts/sc_dfm.tcl +20 -0
  176. siliconcompiler/tools/openroad/scripts/sc_export.tcl +98 -0
  177. siliconcompiler/tools/openroad/scripts/sc_floorplan.tcl +413 -0
  178. siliconcompiler/tools/openroad/scripts/sc_metrics.tcl +158 -0
  179. siliconcompiler/tools/openroad/scripts/sc_physyn.tcl +7 -0
  180. siliconcompiler/tools/openroad/scripts/sc_place.tcl +84 -0
  181. siliconcompiler/tools/openroad/scripts/sc_procs.tcl +423 -0
  182. siliconcompiler/tools/openroad/scripts/sc_rcx.tcl +63 -0
  183. siliconcompiler/tools/openroad/scripts/sc_rcx_bench.tcl +20 -0
  184. siliconcompiler/tools/openroad/scripts/sc_rcx_extract.tcl +12 -0
  185. siliconcompiler/tools/openroad/scripts/sc_route.tcl +133 -0
  186. siliconcompiler/tools/openroad/scripts/sc_screenshot.tcl +21 -0
  187. siliconcompiler/tools/openroad/scripts/sc_write.tcl +5 -0
  188. siliconcompiler/tools/openroad/scripts/sc_write_images.tcl +361 -0
  189. siliconcompiler/tools/openroad/show.py +94 -0
  190. siliconcompiler/tools/openroad/templates/pex.tcl +8 -0
  191. siliconcompiler/tools/opensta/__init__.py +101 -0
  192. siliconcompiler/tools/opensta/report_libraries.py +28 -0
  193. siliconcompiler/tools/opensta/scripts/sc_procs.tcl +47 -0
  194. siliconcompiler/tools/opensta/scripts/sc_report_libraries.tcl +74 -0
  195. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +268 -0
  196. siliconcompiler/tools/opensta/timing.py +214 -0
  197. siliconcompiler/tools/slang/__init__.py +49 -0
  198. siliconcompiler/tools/slang/lint.py +101 -0
  199. siliconcompiler/tools/surelog/__init__.py +123 -0
  200. siliconcompiler/tools/surelog/parse.py +183 -0
  201. siliconcompiler/tools/surelog/templates/output.v +7 -0
  202. siliconcompiler/tools/sv2v/convert.py +46 -0
  203. siliconcompiler/tools/sv2v/sv2v.py +37 -0
  204. siliconcompiler/tools/template/template.py +125 -0
  205. siliconcompiler/tools/verilator/compile.py +139 -0
  206. siliconcompiler/tools/verilator/lint.py +19 -0
  207. siliconcompiler/tools/verilator/parse.py +27 -0
  208. siliconcompiler/tools/verilator/verilator.py +172 -0
  209. siliconcompiler/tools/vivado/__init__.py +7 -0
  210. siliconcompiler/tools/vivado/bitstream.py +21 -0
  211. siliconcompiler/tools/vivado/place.py +21 -0
  212. siliconcompiler/tools/vivado/route.py +21 -0
  213. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +6 -0
  214. siliconcompiler/tools/vivado/scripts/sc_place.tcl +2 -0
  215. siliconcompiler/tools/vivado/scripts/sc_route.tcl +4 -0
  216. siliconcompiler/tools/vivado/scripts/sc_run.tcl +45 -0
  217. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +25 -0
  218. siliconcompiler/tools/vivado/syn_fpga.py +20 -0
  219. siliconcompiler/tools/vivado/vivado.py +147 -0
  220. siliconcompiler/tools/vpr/_json_constraint.py +63 -0
  221. siliconcompiler/tools/vpr/_xml_constraint.py +109 -0
  222. siliconcompiler/tools/vpr/place.py +137 -0
  223. siliconcompiler/tools/vpr/route.py +124 -0
  224. siliconcompiler/tools/vpr/screenshot.py +54 -0
  225. siliconcompiler/tools/vpr/show.py +88 -0
  226. siliconcompiler/tools/vpr/vpr.py +357 -0
  227. siliconcompiler/tools/xyce/xyce.py +36 -0
  228. siliconcompiler/tools/yosys/lec.py +56 -0
  229. siliconcompiler/tools/yosys/prepareLib.py +59 -0
  230. siliconcompiler/tools/yosys/sc_lec.tcl +84 -0
  231. siliconcompiler/tools/yosys/sc_syn.tcl +79 -0
  232. siliconcompiler/tools/yosys/syn_asic.py +565 -0
  233. siliconcompiler/tools/yosys/syn_asic.tcl +377 -0
  234. siliconcompiler/tools/yosys/syn_asic_fpga_shared.tcl +31 -0
  235. siliconcompiler/tools/yosys/syn_fpga.py +146 -0
  236. siliconcompiler/tools/yosys/syn_fpga.tcl +233 -0
  237. siliconcompiler/tools/yosys/syn_strategies.tcl +81 -0
  238. siliconcompiler/tools/yosys/techmaps/lcu_kogge_stone.v +39 -0
  239. siliconcompiler/tools/yosys/templates/abc.const +2 -0
  240. siliconcompiler/tools/yosys/yosys.py +147 -0
  241. siliconcompiler/units.py +259 -0
  242. siliconcompiler/use.py +177 -0
  243. siliconcompiler/utils/__init__.py +423 -0
  244. siliconcompiler/utils/asic.py +158 -0
  245. siliconcompiler/utils/showtools.py +25 -0
  246. siliconcompiler-0.26.5.dist-info/LICENSE +190 -0
  247. siliconcompiler-0.26.5.dist-info/METADATA +195 -0
  248. siliconcompiler-0.26.5.dist-info/RECORD +251 -0
  249. siliconcompiler-0.26.5.dist-info/WHEEL +5 -0
  250. siliconcompiler-0.26.5.dist-info/entry_points.txt +12 -0
  251. siliconcompiler-0.26.5.dist-info/top_level.txt +1 -0
@@ -0,0 +1,106 @@
1
+ from siliconcompiler.schema.schema_cfg import scparam
2
+ from siliconcompiler.schema import Schema
3
+
4
+
5
+ SCHEMA_VERSION = '0.0.1'
6
+
7
+
8
+ def schema_cfg():
9
+ # Basic schema setup
10
+ cfg = {}
11
+
12
+ scparam(cfg, ['schemaversion'],
13
+ sctype='str',
14
+ scope='global',
15
+ defvalue=SCHEMA_VERSION,
16
+ require='all',
17
+ shorthelp="Schema version number",
18
+ lock=True,
19
+ switch="-schemaversion <str>",
20
+ example=["api: server.get('schemaversion')"],
21
+ schelp="""SiliconCompiler server schema version number.""")
22
+
23
+ scparam(cfg, ['option', 'port'],
24
+ sctype='int',
25
+ scope='global',
26
+ defvalue=8080,
27
+ require='all',
28
+ shorthelp="Port number to run the server on.",
29
+ switch="-port <int>",
30
+ example=["cli: -port 8000",
31
+ "api: server.set('option', 'port', 8080)"],
32
+ schelp="""Port number to run the server on.""")
33
+
34
+ scparam(cfg, ['option', 'cluster'],
35
+ sctype='enum',
36
+ enum=['local', 'slurm'],
37
+ scope='global',
38
+ defvalue='local',
39
+ require='all',
40
+ shorthelp="Type of compute cluster to use.",
41
+ switch="-cluster <str>",
42
+ example=["cli: -cluster slurm",
43
+ "api: server.set('option', 'clister', 'slurm')"],
44
+ schelp="""Type of compute cluster to use.""")
45
+
46
+ scparam(cfg, ['option', 'nfsmount'],
47
+ sctype='dir',
48
+ scope='global',
49
+ defvalue='/nfs/sc_compute',
50
+ require='all',
51
+ shorthelp="Directory of mounted shared NFS storage.",
52
+ switch="-nfsmount <dir>",
53
+ example=["cli: -nfsmount ~/sc_server",
54
+ "api: server.set('option', 'server', '~/sc_server')"],
55
+ schelp="""Directory of mounted shared NFS storage.""")
56
+
57
+ scparam(cfg, ['option', 'auth'],
58
+ sctype='bool',
59
+ scope='global',
60
+ defvalue=False,
61
+ require='all',
62
+ shorthelp="Flag determining whether to enable authenticated and encrypted jobs.",
63
+ switch="-auth <bool>",
64
+ example=["cli: -auth true",
65
+ "api: server.set('option', 'auth', True)"],
66
+ schelp="""Flag determining whether to enable authenticated and encrypted jobs.""")
67
+
68
+ scparam(cfg, ['option', 'cfg'],
69
+ sctype='[file]',
70
+ scope='job',
71
+ shorthelp="Configuration manifest",
72
+ switch="-cfg <file>",
73
+ example=["cli: -cfg mypdk.json",
74
+ "api: chip.set('option', 'cfg', 'mypdk.json')"],
75
+ schelp="""
76
+ List of filepaths to JSON formatted schema configuration
77
+ manifests. The files are read in automatically when using the
78
+ command line application. In Python programs, JSON manifests
79
+ can be merged into the current working manifest using the
80
+ read_manifest() method.""")
81
+
82
+ scparam(cfg, ['option', 'loglevel'],
83
+ sctype='enum',
84
+ enum=["info", "warning", "error", "critical", "debug"],
85
+ pernode='optional',
86
+ scope='job',
87
+ defvalue='info',
88
+ shorthelp="Logging level",
89
+ switch="-loglevel <str>",
90
+ example=[
91
+ "cli: -loglevel info",
92
+ "api: server.set('option', 'loglevel', 'info')"],
93
+ schelp="""
94
+ Provides explicit control over the level of debug logging printed.""")
95
+
96
+ return cfg
97
+
98
+
99
+ class ServerSchema(Schema):
100
+ def __init__(self, cfg=None, manifest=None, logger=None):
101
+ super().__init__(cfg=cfg,
102
+ manifest=manifest,
103
+ logger=logger)
104
+
105
+ def _init_schema_cfg(self):
106
+ return schema_cfg()
@@ -0,0 +1,507 @@
1
+ # Copyright 2020 Silicon Compiler Authors. All Rights Reserved.
2
+
3
+ from aiohttp import web
4
+ import threading
5
+ import json
6
+ import logging as log
7
+ import os
8
+ import shutil
9
+ import uuid
10
+ import tarfile
11
+ import sys
12
+ import fastjsonschema
13
+ from pathlib import Path
14
+ from fastjsonschema import JsonSchemaException
15
+ import io
16
+
17
+ from siliconcompiler import Chip, Schema
18
+ from siliconcompiler.schema import utils as schema_utils
19
+ from siliconcompiler._metadata import version as sc_version
20
+ from siliconcompiler.schema import SCHEMA_VERSION as sc_schema_version
21
+ from siliconcompiler.remote.schema import ServerSchema
22
+ from siliconcompiler.remote import banner, JobStatus
23
+ from siliconcompiler.scheduler.slurm import get_configuration_directory
24
+ from siliconcompiler.flowgraph import nodes_to_execute
25
+
26
+
27
+ # Compile validation code for API request bodies.
28
+ api_dir = Path(__file__).parent / 'server_schema' / 'requests'
29
+
30
+ # 'remote_run': Run a stage of a job using the server's cluster settings.
31
+ with open(api_dir / 'remote_run.json') as schema:
32
+ validate_remote_run = fastjsonschema.compile(json.loads(schema.read()))
33
+
34
+ # 'check_progress': Check whether a given job stage is currently running.
35
+ with open(api_dir / 'check_progress.json') as schema:
36
+ validate_check_progress = fastjsonschema.compile(json.loads(schema.read()))
37
+
38
+ # 'check_server': Check whether a given job stage is currently running.
39
+ with open(api_dir / 'check_server.json') as schema:
40
+ validate_check_server = fastjsonschema.compile(json.loads(schema.read()))
41
+
42
+ # 'cancel_job': Cancel a running job.
43
+ with open(api_dir / 'cancel_job.json') as schema:
44
+ validate_cancel_job = fastjsonschema.compile(json.loads(schema.read()))
45
+
46
+ # 'delete_job': Delete a job and remove it from server-side storage.
47
+ with open(api_dir / 'delete_job.json') as schema:
48
+ validate_delete_job = fastjsonschema.compile(json.loads(schema.read()))
49
+
50
+ # 'get_results': Fetch the results of a job run.
51
+ # Currently, the 'job_hash' is included in the URL for this call.
52
+ with open(api_dir / 'get_results.json') as schema:
53
+ validate_get_results = fastjsonschema.compile(json.loads(schema.read()))
54
+
55
+
56
+ class Server:
57
+ """
58
+ The core class for the siliconcompiler 'gateway' server, which can run
59
+ locally or on a remote host. Its job is to process requests for
60
+ asynchronous siliconcompiler jobs, by using the slurm HPC daemon to
61
+ schedule work on available compute nodes. It can also be configured to
62
+ launch new compute nodes in the cloud, for on-demand jobs.
63
+ """
64
+
65
+ __version__ = '0.0.1'
66
+
67
+ ####################
68
+ def __init__(self, loglevel="info"):
69
+ '''
70
+ Init method for Server object
71
+ '''
72
+
73
+ # Initialize logger
74
+ self.logger = log.getLogger(f'sc_server_{id(self)}')
75
+ handler = log.StreamHandler(stream=sys.stdout)
76
+ formatter = log.Formatter('%(asctime)s | %(levelname)-8s | %(message)s')
77
+ handler.setFormatter(formatter)
78
+ self.logger.addHandler(handler)
79
+ self.logger.setLevel(schema_utils.translate_loglevel(loglevel))
80
+
81
+ self.schema = ServerSchema(logger=self.logger)
82
+
83
+ # Set up a dictionary to track running jobs.
84
+ self.sc_jobs = {}
85
+
86
+ def run(self):
87
+ if not os.path.exists(self.nfs_mount):
88
+ raise FileNotFoundError(f'{self.nfs_mount} could not be found.')
89
+
90
+ self.logger.info(f"Running in: {self.nfs_mount}")
91
+ # If authentication is enabled, try connecting to the SQLite3 database.
92
+ # (An empty one will be created if it does not exist.)
93
+
94
+ # If authentication is enabled, load [username : password/key] mappings.
95
+ # Remember, this development server is intended to be a minimal
96
+ # demonstration of the API, and should only be used for testing purposes.
97
+ # You should NEVER store plaintext passwords in a production
98
+ # server implementation.
99
+ if self.get('option', 'auth'):
100
+ self.user_keys = {}
101
+ json_path = os.path.join(self.nfs_mount, 'users.json')
102
+ try:
103
+ with open(json_path, 'r') as users_file:
104
+ users_json = json.loads(users_file.read())
105
+ for mapping in users_json['users']:
106
+ username = mapping['username']
107
+ self.user_keys[username] = {
108
+ 'password': mapping['password'],
109
+ 'compute_time': 0,
110
+ 'bandwidth': 0
111
+ }
112
+ if 'compute_time' in mapping:
113
+ self.user_keys[username]['compute_time'] = mapping['compute_time']
114
+ if 'bandwidth' in mapping:
115
+ self.user_keys[username]['bandwidth'] = mapping['bandwidth']
116
+ except Exception:
117
+ self.logger.warning("Could not find well-formatted 'users.json' "
118
+ "file in the server's working directory. "
119
+ "(User : Key) mappings were not imported.")
120
+
121
+ # Create a minimal web server to process the 'remote_run' API call.
122
+ self.app = web.Application()
123
+ self.app.add_routes([
124
+ web.post('/remote_run/', self.handle_remote_run),
125
+ web.post('/check_progress/', self.handle_check_progress),
126
+ web.post('/check_server/', self.handle_check_server),
127
+ web.post('/delete_job/', self.handle_delete_job),
128
+ web.post('/get_results/{job_hash}.tar.gz', self.handle_get_results),
129
+ ])
130
+ # TODO: Put zip files in a different directory.
131
+ # For security reasons, this is not a good public-facing solution.
132
+ # There's no access control on which files can be downloaded.
133
+ # But this is an example server which only implements a minimal API.
134
+ self.app.router.add_static('/get_results/', self.nfs_mount)
135
+
136
+ # Start the async server.
137
+ web.run_app(self.app, port=self.get('option', 'port'))
138
+
139
+ def create_cmdline(self, progname, description=None, switchlist=None, additional_args=None):
140
+ def print_banner():
141
+ print(banner)
142
+ print("Version:", Server.__version__, "\n")
143
+ print("-" * 80)
144
+
145
+ return self.schema.create_cmdline(
146
+ progname=progname,
147
+ description=description,
148
+ switchlist=switchlist,
149
+ input_map=None,
150
+ additional_args=additional_args,
151
+ version=Server.__version__,
152
+ print_banner=print_banner,
153
+ logger=self.logger)
154
+
155
+ ####################
156
+ async def handle_remote_run(self, request):
157
+ '''
158
+ API handler for 'remote_run' commands. This method delegates
159
+ a 'Chip.run(...)' method to a compute node using slurm.
160
+ '''
161
+
162
+ # Temporary file path to store streamed data.
163
+ tmp_file = os.path.join(self.nfs_mount, uuid.uuid4().hex)
164
+
165
+ # Set up a multipart reader to read in the large file, and param data.
166
+ reader = await request.multipart()
167
+ while True:
168
+ # Get the next part; if it doesn't exist, we're done.
169
+ part = await reader.next()
170
+ if part is None:
171
+ break
172
+
173
+ # Save the initial 'import' step archive. Note: production server
174
+ # implementations may want to encrypt data before storing it on disk.
175
+ if part.name == 'import':
176
+ with open(tmp_file, 'wb') as f:
177
+ while True:
178
+ chunk = await part.read_chunk()
179
+ if not chunk:
180
+ break
181
+ f.write(chunk)
182
+
183
+ # Retrieve JSON request parameters.
184
+ elif part.name == 'params':
185
+ # Get the job parameters.
186
+ params = await part.json()
187
+
188
+ if 'chip_cfg' not in params:
189
+ return self.__response('Manifest not provided.', status=400)
190
+ chip_cfg = params['chip_cfg']
191
+
192
+ # Process input parameters
193
+ job_params, response = self._check_request(params['params'],
194
+ validate_remote_run)
195
+ if response is not None:
196
+ return response
197
+
198
+ # Create a dummy Chip object to make schema traversal easier.
199
+ # start with a dummy name, as this will be overwritten
200
+ chip = Chip('server')
201
+ # Add provided schema
202
+ chip.schema = Schema(cfg=chip_cfg)
203
+
204
+ # Fetch some common values.
205
+ design = chip.design
206
+ job_name = chip.get('option', 'jobname')
207
+ job_hash = uuid.uuid4().hex
208
+ chip.set('record', 'remoteid', job_hash)
209
+
210
+ # Ensure that the job's root directory exists.
211
+ job_root = os.path.join(self.nfs_mount, job_hash)
212
+ job_dir = os.path.join(job_root, design, job_name)
213
+ os.makedirs(job_dir, exist_ok=True)
214
+
215
+ # Move the uploaded archive and un-zip it.
216
+ # (Contents will be encrypted for authenticated jobs)
217
+ with tarfile.open(tmp_file, "r:gz") as tar:
218
+ tar.extractall(path=job_dir)
219
+
220
+ # Delete the temporary file if it still exists.
221
+ if os.path.exists(tmp_file):
222
+ os.remove(tmp_file)
223
+
224
+ # Create the working directory for the given 'job hash' if necessary.
225
+ chip.set('option', 'builddir', job_root)
226
+
227
+ # Remove 'remote' JSON config value to run locally on compute node.
228
+ chip.set('option', 'remote', False)
229
+
230
+ # Write JSON config to shared compute storage.
231
+ os.makedirs(os.path.join(job_root, 'configs'), exist_ok=True)
232
+
233
+ # Run the job with the configured clustering option. (Non-blocking)
234
+ job_proc = threading.Thread(target=self.remote_sc,
235
+ args=[
236
+ chip,
237
+ job_params['username']])
238
+ job_proc.start()
239
+
240
+ # Return a response to the client.
241
+ return web.json_response({'message': f"Starting job: {job_hash}",
242
+ 'interval': 30,
243
+ 'job_hash': job_hash})
244
+
245
+ ####################
246
+ async def handle_get_results(self, request):
247
+ '''
248
+ API handler to redirect 'get_results' POST calls.
249
+ '''
250
+
251
+ # Process input parameters
252
+ params = await request.json()
253
+ job_params, response = self._check_request(params,
254
+ validate_get_results)
255
+ job_params['job_hash'] = request.match_info.get('job_hash', '')
256
+ if response is not None:
257
+ return response
258
+
259
+ job_hash = job_params['job_hash']
260
+ node = job_params['node'] if 'node' in job_params else None
261
+
262
+ resp = web.StreamResponse(
263
+ status=200,
264
+ reason='OK',
265
+ headers={
266
+ 'Content-Type': 'application/x-tar',
267
+ 'Content-Disposition': f'attachment; filename="{job_hash}_{node}.tar.gz"'
268
+ },
269
+ )
270
+ await resp.prepare(request)
271
+
272
+ zipfn = os.path.join(self.nfs_mount, job_hash, f'{job_hash}_{node}.tar.gz')
273
+ if not node:
274
+ with tarfile.open(zipfn, 'w:gz') as tar:
275
+ text = "Done"
276
+ metadata_file = io.BytesIO(text.encode('ascii'))
277
+ tarinfo = tarfile.TarInfo(f'{job_hash}/done')
278
+ tarinfo.size = metadata_file.getbuffer().nbytes
279
+ tar.addfile(tarinfo=tarinfo, fileobj=metadata_file)
280
+
281
+ with open(zipfn, 'rb') as zipf:
282
+ await resp.write(zipf.read())
283
+
284
+ await resp.write_eof()
285
+
286
+ return resp
287
+
288
+ ####################
289
+ async def handle_delete_job(self, request):
290
+ '''
291
+ API handler for 'delete_job' requests. Delete a job from shared
292
+ cloud compute storage.
293
+ '''
294
+
295
+ # Process input parameters
296
+ job_params, response = self._check_request(await request.json(),
297
+ validate_delete_job)
298
+ if response is not None:
299
+ return response
300
+
301
+ job_hash = job_params['job_hash']
302
+
303
+ # Determine if the job is running.
304
+ for job in self.sc_jobs:
305
+ if job_hash in job:
306
+ return self.__response("Error: job is still running.", status=400)
307
+
308
+ # Delete job hash directory, only if it exists.
309
+ # TODO: This assumes no malicious input.
310
+ # Again, we will need a more mature server framework to implement
311
+ # good access control and security policies for a public-facing service.
312
+ build_dir = os.path.join(self.nfs_mount, job_hash)
313
+ check_dir = os.path.dirname(build_dir)
314
+ if check_dir == self.nfs_mount:
315
+ if os.path.exists(build_dir):
316
+ shutil.rmtree(build_dir)
317
+
318
+ tar_file = f'{build_dir}.tar.gz'
319
+ if os.path.exists(tar_file):
320
+ os.remove(tar_file)
321
+
322
+ return web.Response(text="Job deleted.")
323
+
324
+ ####################
325
+ async def handle_check_progress(self, request):
326
+ '''
327
+ API handler for the 'check progress' endpoint. Currently,
328
+ It only returns a response containing a 'still running', 'done', or
329
+ 'not found' message. In the future, it can respond with up-to-date
330
+ information about the job's progress and intermediary outputs.
331
+ '''
332
+
333
+ # Process input parameters
334
+ job_params, response = self._check_request(await request.json(),
335
+ validate_check_progress)
336
+ if response is not None:
337
+ return response
338
+
339
+ job_hash = job_params['job_hash']
340
+ username = job_params['username']
341
+
342
+ jobname = self.job_name(username, job_hash)
343
+
344
+ # Determine if the job is running.
345
+ # TODO: Return information about individual flowgraph nodes.
346
+ if jobname in self.sc_jobs:
347
+ resp = {
348
+ 'status': JobStatus.RUNNING,
349
+ 'message': 'Job is currently running on the server.',
350
+ }
351
+ else:
352
+ resp = {
353
+ 'status': JobStatus.COMPLETED,
354
+ 'message': 'Job has no running steps.',
355
+ }
356
+ return web.json_response(resp)
357
+
358
+ ####################
359
+ async def handle_check_server(self, request):
360
+ '''
361
+ API handler for the 'check user' endpoint.
362
+ '''
363
+
364
+ # Process input parameters
365
+
366
+ job_params, response = self._check_request(await request.json(),
367
+ validate_check_server)
368
+ if response is not None:
369
+ return response
370
+
371
+ resp = {
372
+ 'status': 'ready',
373
+ 'versions': {
374
+ 'sc': sc_version,
375
+ 'sc_schema': sc_schema_version,
376
+ 'sc_server': Server.__version__,
377
+ },
378
+ 'progress_interval': 30
379
+ }
380
+
381
+ username = job_params['username']
382
+ if username:
383
+ resp['user_info'] = {
384
+ 'compute_time': self.user_keys[username]['compute_time'],
385
+ 'bandwidth_kb': self.user_keys[username]['bandwidth'],
386
+ }
387
+
388
+ return web.json_response(resp)
389
+
390
+ def job_name(self, username, job_hash):
391
+ if username:
392
+ return f'{username}_{job_hash}'
393
+ else:
394
+ return job_hash
395
+
396
+ ####################
397
+ def remote_sc(self, chip, username):
398
+ '''
399
+ Async method to delegate an '.run()' command to a host,
400
+ and send an email notification when the job completes.
401
+ '''
402
+
403
+ # Assemble core job parameters.
404
+ job_hash = chip.get('record', 'remoteid')
405
+
406
+ # Mark the job run as busy.
407
+ sc_job_name = self.job_name(username, job_hash)
408
+ self.sc_jobs[sc_job_name] = 'busy'
409
+
410
+ build_dir = os.path.join(self.nfs_mount, job_hash)
411
+ chip.set('option', 'builddir', build_dir)
412
+ chip.set('option', 'remote', False)
413
+
414
+ job_cfg_dir = get_configuration_directory(chip)
415
+ os.makedirs(job_cfg_dir, exist_ok=True)
416
+ chip.write_manifest(f"{job_cfg_dir}/chip{chip.get('option', 'jobname')}.json")
417
+
418
+ if self.get('option', 'cluster') == 'slurm':
419
+ # Run the job with slurm clustering.
420
+ chip.set('option', 'scheduler', 'name', 'slurm')
421
+
422
+ # Run the job.
423
+ chip.run()
424
+
425
+ # Archive each task.
426
+ for (step, index) in nodes_to_execute(chip):
427
+ chip.cwd = os.path.join(chip.get('option', 'builddir'), '..')
428
+ tf = tarfile.open(os.path.join(self.nfs_mount,
429
+ job_hash,
430
+ f'{job_hash}_{step}{index}.tar.gz'),
431
+ mode='w:gz')
432
+ chip._archive_node(tf, step=step, index=index)
433
+ tf.close()
434
+
435
+ # (Email notifications can be sent here using your preferred API)
436
+
437
+ # Mark the job hash as being done.
438
+ self.sc_jobs.pop(sc_job_name)
439
+
440
+ ####################
441
+ def __auth_password(self, username, password):
442
+ '''
443
+ Helper method to authenticate a username : password combination.
444
+ This minimal implementation of the API uses a simple string match, because
445
+ we don't have a database/signup process/etc for the development server.
446
+ But production server implementations should ALWAYS hash and salt
447
+ passwords before storing them.
448
+ NEVER store production passwords as plaintext!
449
+ '''
450
+
451
+ if username not in self.user_keys:
452
+ return False
453
+ return password == self.user_keys[username]['password']
454
+
455
+ def _check_request(self, request, json_validator):
456
+ params = {}
457
+ if request:
458
+ try:
459
+ params = json_validator(request)
460
+ except JsonSchemaException as e:
461
+ return (params, self.__response(f"Error: Invalid parameters: {e}.", status=400))
462
+
463
+ if not request:
464
+ return (params, self.__response("Error: Invalid parameters.", status=400))
465
+
466
+ # Check for authentication parameters.
467
+ if self.get('option', 'auth'):
468
+ if ('username' in params) and ('key' in params):
469
+ # Authenticate the user.
470
+ if not self.__auth_password(params['username'], params['key']):
471
+ return (params, self.__response("Authentication error.", status=403))
472
+
473
+ else:
474
+ return (params,
475
+ self.__response("Error: some authentication parameters are missing.",
476
+ status=400))
477
+
478
+ if 'username' not in params:
479
+ params['username'] = None
480
+
481
+ return (params, None)
482
+
483
+ ###################
484
+ def __response(self, message, status=200):
485
+ return web.json_response({'message': message}, status=status)
486
+
487
+ ###################
488
+ @property
489
+ def nfs_mount(self):
490
+ # Ensure that NFS mounting path is absolute.
491
+ return os.path.abspath(self.get('option', 'nfsmount'))
492
+
493
+ def get(self, *keypath, field='value'):
494
+ return self.schema.get(*keypath, field=field)
495
+
496
+ def set(self, *args, field='value', clobber=True):
497
+ keypath = args[:-1]
498
+ value = args[-1]
499
+
500
+ if keypath == ['option', 'loglevel'] and field == 'value':
501
+ self.logger.setLevel(schema_utils.translate_loglevel(value))
502
+
503
+ self.schema.set(*keypath, value, field=field, clobber=clobber)
504
+
505
+ def write_configuration(self, filepath):
506
+ with open(filepath, 'w') as f:
507
+ self.schema.write_json(f)
@@ -0,0 +1,51 @@
1
+ {
2
+ "title": "cancel_job/",
3
+ "description": "Schema describing parameters for canceling an ongoing job run.",
4
+ "examples": [
5
+ {
6
+ "job_hash": "0123456789abcdeffedcba9876543210"
7
+ },
8
+ {
9
+ "username": "valid_user",
10
+ "key": "valid_base64_encoded_key",
11
+ "job_hash": "0123456789abcdeffedcba9876543210"
12
+ }
13
+ ],
14
+
15
+ "type": "object",
16
+ "additionalProperties": false,
17
+ "properties": {
18
+ "username": {
19
+ "title": "Username",
20
+ "description": "User account ID. Required for authentication if the data was originally imported by a valid user.",
21
+ "examples": ["my_user", "account1234"],
22
+
23
+ "type": "string",
24
+ "pattern": "^[^\\s;]*$"
25
+ },
26
+
27
+ "key": {
28
+ "title": "Authentication Key",
29
+ "description": "Base64-encoded decryption key for the user account's public key. Required if 'username' is provided.",
30
+ "examples": ["PHlvdXJfa2V5X2hlcmU+"],
31
+
32
+ "type": "string"
33
+ },
34
+
35
+ "job_hash": {
36
+ "title": "Job Hash",
37
+ "description": "UUID associated with the data that should be cancelled.",
38
+ "examples": ["01234567890abcdeffedcba0987654321"],
39
+
40
+ "type": "string",
41
+ "pattern": "^[0-9a-f]{32}$"
42
+ }
43
+ },
44
+
45
+ "required": ["job_hash"],
46
+
47
+ "dependencies": {
48
+ "username": ["key"],
49
+ "key": ["username"]
50
+ }
51
+ }