siliconcompiler 0.34.0__py3-none-any.whl → 0.34.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +1 -1
- siliconcompiler/apps/sc.py +1 -1
- siliconcompiler/apps/sc_issue.py +1 -1
- siliconcompiler/apps/sc_remote.py +3 -3
- siliconcompiler/apps/sc_show.py +2 -2
- siliconcompiler/apps/utils/replay.py +4 -4
- siliconcompiler/checklist.py +203 -2
- siliconcompiler/core.py +28 -246
- siliconcompiler/data/templates/email/general.j2 +3 -3
- siliconcompiler/data/templates/email/summary.j2 +1 -1
- siliconcompiler/data/templates/issue/README.txt +1 -1
- siliconcompiler/data/templates/report/sc_report.j2 +7 -7
- siliconcompiler/design.py +148 -54
- siliconcompiler/flowgraph.py +50 -15
- siliconcompiler/optimizer/vizier.py +2 -2
- siliconcompiler/pdk.py +5 -5
- siliconcompiler/remote/client.py +18 -12
- siliconcompiler/remote/server.py +2 -2
- siliconcompiler/report/dashboard/cli/__init__.py +6 -6
- siliconcompiler/report/dashboard/cli/board.py +3 -3
- siliconcompiler/report/dashboard/web/components/__init__.py +5 -5
- siliconcompiler/report/dashboard/web/components/flowgraph.py +4 -4
- siliconcompiler/report/dashboard/web/components/graph.py +2 -2
- siliconcompiler/report/dashboard/web/state.py +1 -1
- siliconcompiler/report/dashboard/web/utils/__init__.py +5 -5
- siliconcompiler/report/html_report.py +1 -1
- siliconcompiler/report/report.py +4 -4
- siliconcompiler/report/summary_table.py +2 -2
- siliconcompiler/report/utils.py +5 -5
- siliconcompiler/scheduler/docker.py +3 -8
- siliconcompiler/scheduler/run_node.py +2 -7
- siliconcompiler/scheduler/scheduler.py +14 -11
- siliconcompiler/scheduler/schedulernode.py +136 -126
- siliconcompiler/scheduler/send_messages.py +3 -3
- siliconcompiler/scheduler/slurm.py +5 -3
- siliconcompiler/scheduler/taskscheduler.py +8 -7
- siliconcompiler/schema/baseschema.py +1 -2
- siliconcompiler/schema/namedschema.py +26 -2
- siliconcompiler/tool.py +398 -175
- siliconcompiler/tools/__init__.py +2 -0
- siliconcompiler/tools/builtin/_common.py +5 -5
- siliconcompiler/tools/builtin/concatenate.py +5 -5
- siliconcompiler/tools/builtin/minimum.py +4 -4
- siliconcompiler/tools/builtin/mux.py +4 -4
- siliconcompiler/tools/builtin/nop.py +4 -4
- siliconcompiler/tools/builtin/verify.py +7 -7
- siliconcompiler/tools/execute/exec_input.py +1 -1
- siliconcompiler/tools/genfasm/genfasm.py +1 -6
- siliconcompiler/tools/openroad/_apr.py +5 -1
- siliconcompiler/tools/openroad/antenna_repair.py +1 -1
- siliconcompiler/tools/openroad/macro_placement.py +1 -1
- siliconcompiler/tools/openroad/power_grid.py +1 -1
- siliconcompiler/tools/openroad/scripts/common/procs.tcl +5 -0
- siliconcompiler/tools/opensta/timing.py +26 -3
- siliconcompiler/tools/slang/__init__.py +2 -2
- siliconcompiler/tools/surfer/__init__.py +0 -0
- siliconcompiler/tools/surfer/show.py +53 -0
- siliconcompiler/tools/surfer/surfer.py +30 -0
- siliconcompiler/tools/vpr/route.py +27 -14
- siliconcompiler/tools/vpr/vpr.py +23 -6
- siliconcompiler/tools/yosys/__init__.py +1 -1
- siliconcompiler/tools/yosys/scripts/procs.tcl +143 -0
- siliconcompiler/tools/yosys/{sc_synth_asic.tcl → scripts/sc_synth_asic.tcl} +4 -0
- siliconcompiler/tools/yosys/{sc_synth_fpga.tcl → scripts/sc_synth_fpga.tcl} +24 -77
- siliconcompiler/tools/yosys/syn_fpga.py +14 -0
- siliconcompiler/toolscripts/_tools.json +8 -12
- siliconcompiler/toolscripts/rhel9/install-vpr.sh +0 -2
- siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +33 -0
- siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +33 -0
- siliconcompiler/utils/__init__.py +2 -1
- siliconcompiler/utils/flowgraph.py +24 -23
- siliconcompiler/utils/issue.py +23 -29
- siliconcompiler/utils/logging.py +35 -6
- siliconcompiler/utils/showtools.py +6 -1
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/METADATA +15 -25
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/RECORD +84 -82
- siliconcompiler/tools/yosys/procs.tcl +0 -71
- siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +0 -68
- siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +0 -68
- /siliconcompiler/tools/yosys/{sc_lec.tcl → scripts/sc_lec.tcl} +0 -0
- /siliconcompiler/tools/yosys/{sc_screenshot.tcl → scripts/sc_screenshot.tcl} +0 -0
- /siliconcompiler/tools/yosys/{syn_strategies.tcl → scripts/syn_strategies.tcl} +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
# This file contains a set of procedures that are shared
|
|
2
|
+
# between syn_asic.tcl and syn_fpga.tcl
|
|
3
|
+
|
|
4
|
+
proc post_techmap { { opt_args "" } } {
|
|
5
|
+
# perform techmap in case previous techmaps introduced constructs
|
|
6
|
+
# that need techmapping
|
|
7
|
+
yosys techmap
|
|
8
|
+
# Quick optimization
|
|
9
|
+
yosys opt {*}$opt_args -purge
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
proc sc_map_memory { lib_file techmap_file do_rom } {
|
|
13
|
+
set design_mod 0
|
|
14
|
+
|
|
15
|
+
if { $lib_file != "" } {
|
|
16
|
+
yosys memory_libmap -lib $lib_file
|
|
17
|
+
set design_mod 1
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if { $do_rom } {
|
|
21
|
+
yosys memory_map -rom-only
|
|
22
|
+
set design_mod 1
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if { $techmap_file != "" } {
|
|
26
|
+
yosys techmap -map $techmap_file
|
|
27
|
+
set design_mod 1
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return $design_mod
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
proc sc_apply_params { } {
|
|
34
|
+
global sc_design
|
|
35
|
+
|
|
36
|
+
yosys chparam -list $sc_design
|
|
37
|
+
if { [sc_cfg_exists option param] } {
|
|
38
|
+
yosys echo off
|
|
39
|
+
set module_params [yosys tee -q -s result.string chparam -list $sc_design]
|
|
40
|
+
yosys echo on
|
|
41
|
+
|
|
42
|
+
dict for {key value} [sc_cfg_get option param] {
|
|
43
|
+
if { ![string is integer $value] } {
|
|
44
|
+
set value [concat \"$value\"]
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if { [string first $key $module_params] != -1 } {
|
|
48
|
+
yosys chparam -set $key $value $sc_design
|
|
49
|
+
} else {
|
|
50
|
+
puts "Warning: $key is not a defined parameter in $sc_design"
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
proc sc_get_scratchpad { name } {
|
|
57
|
+
yosys echo off
|
|
58
|
+
set value [yosys tee -q -s result.string scratchpad -get $name]
|
|
59
|
+
yosys echo on
|
|
60
|
+
|
|
61
|
+
return $value
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
proc sc_load_plugin { name } {
|
|
65
|
+
catch { yosys tee -q -s sc.load.test plugin -i $name }
|
|
66
|
+
set load_test [sc_get_scratchpad sc.load.test]
|
|
67
|
+
if { [string first "ERROR" $load_test] == -1 } {
|
|
68
|
+
return 1
|
|
69
|
+
}
|
|
70
|
+
return 0
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
proc sc_fpga_legalize_flops { feature_set } {
|
|
74
|
+
set legalize_flop_types []
|
|
75
|
+
|
|
76
|
+
if {
|
|
77
|
+
[lsearch -exact $feature_set enable] >= 0 &&
|
|
78
|
+
[lsearch -exact $feature_set async_set] >= 0 &&
|
|
79
|
+
[lsearch -exact $feature_set async_reset] >= 0
|
|
80
|
+
} {
|
|
81
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
82
|
+
lappend legalize_flop_types \$_DFF_PN?_
|
|
83
|
+
lappend legalize_flop_types \$_DFFE_PP_
|
|
84
|
+
lappend legalize_flop_types \$_DFFE_PN?P_
|
|
85
|
+
lappend legalize_flop_types \$_DFFSR_PNN_
|
|
86
|
+
lappend legalize_flop_types \$_DFFSRE_PNNP_
|
|
87
|
+
} elseif {
|
|
88
|
+
[lsearch -exact $feature_set enable] >= 0 &&
|
|
89
|
+
[lsearch -exact $feature_set async_set] >= 0
|
|
90
|
+
} {
|
|
91
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
92
|
+
lappend legalize_flop_types \$_DFF_PN1_
|
|
93
|
+
lappend legalize_flop_types \$_DFFE_PP_
|
|
94
|
+
lappend legalize_flop_types \$_DFFE_PN1P_
|
|
95
|
+
} elseif {
|
|
96
|
+
[lsearch -exact $feature_set enable] >= 0 &&
|
|
97
|
+
[lsearch -exact $feature_set async_reset] >= 0
|
|
98
|
+
} {
|
|
99
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
100
|
+
lappend legalize_flop_types \$_DFF_PN0_
|
|
101
|
+
lappend legalize_flop_types \$_DFFE_PP_
|
|
102
|
+
lappend legalize_flop_types \$_DFFE_PN0P_
|
|
103
|
+
} elseif { [lsearch -exact $feature_set enable] >= 0 } {
|
|
104
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
105
|
+
lappend legalize_flop_types \$_DFF_P??_
|
|
106
|
+
lappend legalize_flop_types \$_DFFE_PP_
|
|
107
|
+
lappend legalize_flop_types \$_DFFE_P??P_
|
|
108
|
+
} elseif {
|
|
109
|
+
[lsearch -exact $feature_set async_set] >= 0 &&
|
|
110
|
+
[lsearch -exact $feature_set async_reset] >= 0
|
|
111
|
+
} {
|
|
112
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
113
|
+
lappend legalize_flop_types \$_DFF_PN?_
|
|
114
|
+
lappend legalize_flop_types \$_DFFSR_PNN_
|
|
115
|
+
} elseif { [lsearch -exact $feature_set async_set] >= 0 } {
|
|
116
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
117
|
+
lappend legalize_flop_types \$_DFF_PN1_
|
|
118
|
+
} elseif { [lsearch -exact $feature_set async_reset] >= 0 } {
|
|
119
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
120
|
+
lappend legalize_flop_types \$_DFF_PN0_
|
|
121
|
+
} else {
|
|
122
|
+
# Choose to legalize to async resets even though they
|
|
123
|
+
# won't tech map. Goal is to get the user to fix
|
|
124
|
+
# their code and put in synchronous resets
|
|
125
|
+
lappend legalize_flop_types \$_DFF_P_
|
|
126
|
+
lappend legalize_flop_types \$_DFF_P??_
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
set legalize_list []
|
|
130
|
+
foreach flop_type $legalize_flop_types {
|
|
131
|
+
lappend legalize_list -cell $flop_type 01
|
|
132
|
+
}
|
|
133
|
+
yosys log "Legalize list: $legalize_list"
|
|
134
|
+
yosys dfflegalize {*}$legalize_list
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
proc sc_fpga_get_dsp_options { sc_syn_dsp_options } {
|
|
138
|
+
set option_text [list]
|
|
139
|
+
foreach dsp_option $sc_syn_dsp_options {
|
|
140
|
+
lappend option_text -D $dsp_option
|
|
141
|
+
}
|
|
142
|
+
return $option_text
|
|
143
|
+
}
|
|
@@ -507,6 +507,10 @@ yosys abc {*}$abc_args {*}$abc_dont_use
|
|
|
507
507
|
########################################################
|
|
508
508
|
|
|
509
509
|
yosys clean -purge
|
|
510
|
+
|
|
511
|
+
# Recheck hierarchy to remove all unused modules
|
|
512
|
+
yosys hierarchy -top $sc_design
|
|
513
|
+
|
|
510
514
|
yosys setundef -zero
|
|
511
515
|
|
|
512
516
|
yosys splitnets
|
|
@@ -57,9 +57,12 @@ if { [file exists $input_verilog] } {
|
|
|
57
57
|
yosys read_slang \
|
|
58
58
|
-D SYNTHESIS \
|
|
59
59
|
--keep-hierarchy \
|
|
60
|
+
--ignore-assertions \
|
|
61
|
+
--allow-use-before-declare \
|
|
60
62
|
--top $sc_design \
|
|
61
63
|
{*}$slang_params \
|
|
62
64
|
$input_verilog
|
|
65
|
+
yosys setattr -unset init
|
|
63
66
|
} else {
|
|
64
67
|
# Use -noblackbox to correctly interpret empty modules as empty,
|
|
65
68
|
# actual black boxes are read in later
|
|
@@ -74,81 +77,6 @@ if { [file exists $input_verilog] } {
|
|
|
74
77
|
}
|
|
75
78
|
}
|
|
76
79
|
|
|
77
|
-
####################
|
|
78
|
-
# Helper functions
|
|
79
|
-
####################
|
|
80
|
-
proc legalize_flops { feature_set } {
|
|
81
|
-
set legalize_flop_types []
|
|
82
|
-
|
|
83
|
-
if {
|
|
84
|
-
[lsearch -exact $feature_set enable] >= 0 &&
|
|
85
|
-
[lsearch -exact $feature_set async_set] >= 0 &&
|
|
86
|
-
[lsearch -exact $feature_set async_reset] >= 0
|
|
87
|
-
} {
|
|
88
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
89
|
-
lappend legalize_flop_types \$_DFF_PN?_
|
|
90
|
-
lappend legalize_flop_types \$_DFFE_PP_
|
|
91
|
-
lappend legalize_flop_types \$_DFFE_PN?P_
|
|
92
|
-
lappend legalize_flop_types \$_DFFSR_PNN_
|
|
93
|
-
lappend legalize_flop_types \$_DFFSRE_PNNP_
|
|
94
|
-
} elseif {
|
|
95
|
-
[lsearch -exact $feature_set enable] >= 0 &&
|
|
96
|
-
[lsearch -exact $feature_set async_set] >= 0
|
|
97
|
-
} {
|
|
98
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
99
|
-
lappend legalize_flop_types \$_DFF_PN1_
|
|
100
|
-
lappend legalize_flop_types \$_DFFE_PP_
|
|
101
|
-
lappend legalize_flop_types \$_DFFE_PN1P_
|
|
102
|
-
} elseif {
|
|
103
|
-
[lsearch -exact $feature_set enable] >= 0 &&
|
|
104
|
-
[lsearch -exact $feature_set async_reset] >= 0
|
|
105
|
-
} {
|
|
106
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
107
|
-
lappend legalize_flop_types \$_DFF_PN0_
|
|
108
|
-
lappend legalize_flop_types \$_DFFE_PP_
|
|
109
|
-
lappend legalize_flop_types \$_DFFE_PN0P_
|
|
110
|
-
} elseif { [lsearch -exact $feature_set enable] >= 0 } {
|
|
111
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
112
|
-
lappend legalize_flop_types \$_DFF_P??_
|
|
113
|
-
lappend legalize_flop_types \$_DFFE_PP_
|
|
114
|
-
lappend legalize_flop_types \$_DFFE_P??P_
|
|
115
|
-
} elseif {
|
|
116
|
-
[lsearch -exact $feature_set async_set] >= 0 &&
|
|
117
|
-
[lsearch -exact $feature_set async_reset] >= 0
|
|
118
|
-
} {
|
|
119
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
120
|
-
lappend legalize_flop_types \$_DFF_PN?_
|
|
121
|
-
lappend legalize_flop_types \$_DFFSR_PNN_
|
|
122
|
-
} elseif { [lsearch -exact $feature_set async_set] >= 0 } {
|
|
123
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
124
|
-
lappend legalize_flop_types \$_DFF_PN1_
|
|
125
|
-
} elseif { [lsearch -exact $feature_set async_reset] >= 0 } {
|
|
126
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
127
|
-
lappend legalize_flop_types \$_DFF_PN0_
|
|
128
|
-
} else {
|
|
129
|
-
# Choose to legalize to async resets even though they
|
|
130
|
-
# won't tech map. Goal is to get the user to fix
|
|
131
|
-
# their code and put in synchronous resets
|
|
132
|
-
lappend legalize_flop_types \$_DFF_P_
|
|
133
|
-
lappend legalize_flop_types \$_DFF_P??_
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
set legalize_list []
|
|
137
|
-
foreach flop_type $legalize_flop_types {
|
|
138
|
-
lappend legalize_list -cell $flop_type 01
|
|
139
|
-
}
|
|
140
|
-
yosys log "Legalize list: $legalize_list"
|
|
141
|
-
yosys dfflegalize {*}$legalize_list
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
proc get_dsp_options { sc_syn_dsp_options } {
|
|
145
|
-
set option_text [list]
|
|
146
|
-
foreach dsp_option $sc_syn_dsp_options {
|
|
147
|
-
lappend option_text -D $dsp_option
|
|
148
|
-
}
|
|
149
|
-
return $option_text
|
|
150
|
-
}
|
|
151
|
-
|
|
152
80
|
set sc_partname [sc_cfg_get fpga partname]
|
|
153
81
|
set build_dir [sc_cfg_get option builddir]
|
|
154
82
|
set job_name [sc_cfg_get option jobname]
|
|
@@ -182,6 +110,25 @@ yosys hierarchy -top $sc_design
|
|
|
182
110
|
|
|
183
111
|
if { [string match {ice*} $sc_partname] } {
|
|
184
112
|
yosys synth_ice40 -top $sc_design -json "${sc_design}.netlist.json"
|
|
113
|
+
} elseif {
|
|
114
|
+
[sc_cfg_exists fpga $sc_partname file yosys_fpga_config] &&
|
|
115
|
+
[llength [sc_cfg_get fpga $sc_partname file yosys_fpga_config]] != 0 &&
|
|
116
|
+
[sc_load_plugin yosys-syn]
|
|
117
|
+
} {
|
|
118
|
+
set synth_fpga_args []
|
|
119
|
+
if { [lindex [sc_cfg_tool_task_get var synth_fpga_opt_mode] 0] != "none" } {
|
|
120
|
+
lappend synth_fpga_args \
|
|
121
|
+
-opt [lindex [sc_cfg_tool_task_get var synth_fpga_opt_mode] 0]
|
|
122
|
+
}
|
|
123
|
+
if { [lindex [sc_cfg_tool_task_get var synth_fpga_insert_buffers] 0] == "true" } {
|
|
124
|
+
lappend synth_fpga_args -insbuf
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
yosys synth_fpga \
|
|
128
|
+
-config [lindex [sc_cfg_get fpga $sc_partname file yosys_fpga_config] 0] \
|
|
129
|
+
-show_config \
|
|
130
|
+
-top $sc_design \
|
|
131
|
+
{*}$synth_fpga_args
|
|
185
132
|
} else {
|
|
186
133
|
# Pre-processing step: if DSPs instance are hard-coded into
|
|
187
134
|
# the user's design, we can use a blackbox flow for DSP mapping
|
|
@@ -252,7 +199,7 @@ if { [string match {ice*} $sc_partname] } {
|
|
|
252
199
|
[sc_cfg_get fpga $sc_partname file yosys_dsp_techmap]
|
|
253
200
|
|
|
254
201
|
yosys log "Run techmap flow for DSP Blocks"
|
|
255
|
-
set formatted_dsp_options [
|
|
202
|
+
set formatted_dsp_options [sc_fpga_get_dsp_options $sc_syn_dsp_options]
|
|
256
203
|
yosys techmap -map +/mul2dsp.v -map $sc_syn_dsp_library \
|
|
257
204
|
{*}$formatted_dsp_options
|
|
258
205
|
|
|
@@ -290,7 +237,7 @@ if { [string match {ice*} $sc_partname] } {
|
|
|
290
237
|
yosys demuxmap
|
|
291
238
|
yosys simplemap
|
|
292
239
|
|
|
293
|
-
|
|
240
|
+
sc_fpga_legalize_flops $sc_syn_feature_set
|
|
294
241
|
|
|
295
242
|
if { [sc_cfg_exists fpga $sc_partname file yosys_flop_techmap] } {
|
|
296
243
|
set sc_syn_flop_library \
|
|
@@ -43,6 +43,20 @@ def setup(chip):
|
|
|
43
43
|
'true/false, if true will attempt to use the slang frontend',
|
|
44
44
|
field='help')
|
|
45
45
|
|
|
46
|
+
chip.set('tool', tool, 'task', task, 'var', 'synth_fpga_opt_mode', 'none',
|
|
47
|
+
step=step, index=index,
|
|
48
|
+
clobber=False)
|
|
49
|
+
chip.set('tool', tool, 'task', task, 'var', 'synth_fpga_opt_mode',
|
|
50
|
+
'optimization mode for the synth_fpga command',
|
|
51
|
+
field='help')
|
|
52
|
+
|
|
53
|
+
chip.set('tool', tool, 'task', task, 'var', 'synth_fpga_insert_buffers', True,
|
|
54
|
+
step=step, index=index,
|
|
55
|
+
clobber=False)
|
|
56
|
+
chip.set('tool', tool, 'task', task, 'var', 'synth_fpga_insert_buffers',
|
|
57
|
+
'insert buffers as part of the synth_fpga command',
|
|
58
|
+
field='help')
|
|
59
|
+
|
|
46
60
|
# Setup FPGA params
|
|
47
61
|
part_name = chip.get('fpga', 'partname')
|
|
48
62
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"openroad": {
|
|
3
3
|
"git-url": "https://github.com/The-OpenROAD-Project/OpenROAD.git",
|
|
4
|
-
"git-commit": "
|
|
4
|
+
"git-commit": "cef8746bc887f3da67bd964ad9061ca7a8b8903a",
|
|
5
5
|
"docker-cmds": [
|
|
6
6
|
"# Remove OR-Tools files",
|
|
7
7
|
"RUN rm -f $SC_PREFIX/Makefile $SC_PREFIX/README.md",
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
},
|
|
18
18
|
"opensta": {
|
|
19
19
|
"git-url": "https://github.com/parallaxsw/OpenSTA.git",
|
|
20
|
-
"git-commit": "
|
|
20
|
+
"git-commit": "cda30445d652c6d41f68732675ddbf28b5efeeab",
|
|
21
21
|
"auto-update": true
|
|
22
22
|
},
|
|
23
23
|
"netgen": {
|
|
@@ -55,7 +55,7 @@
|
|
|
55
55
|
},
|
|
56
56
|
"sv2v": {
|
|
57
57
|
"git-url": "https://github.com/zachjs/sv2v.git",
|
|
58
|
-
"git-commit": "
|
|
58
|
+
"git-commit": "80a2f0cf685f6d873b4ee37b76e6c18dc60e2555",
|
|
59
59
|
"auto-update": true
|
|
60
60
|
},
|
|
61
61
|
"verilator": {
|
|
@@ -76,7 +76,7 @@
|
|
|
76
76
|
},
|
|
77
77
|
"vpr": {
|
|
78
78
|
"git-url": "https://github.com/verilog-to-routing/vtr-verilog-to-routing.git",
|
|
79
|
-
"git-commit": "
|
|
79
|
+
"git-commit": "4bb340af8243b95efdea493d4cb58aed1099151f",
|
|
80
80
|
"auto-update": false
|
|
81
81
|
},
|
|
82
82
|
"icepack": {
|
|
@@ -151,7 +151,7 @@
|
|
|
151
151
|
},
|
|
152
152
|
"yosys-slang": {
|
|
153
153
|
"git-url": "https://github.com/povik/yosys-slang.git",
|
|
154
|
-
"git-commit": "
|
|
154
|
+
"git-commit": "9d9ce7b767d2ea776e2dee0ef636a84512e6b229",
|
|
155
155
|
"docker-depends": "yosys",
|
|
156
156
|
"auto-update": true
|
|
157
157
|
},
|
|
@@ -161,13 +161,9 @@
|
|
|
161
161
|
"docker-depends": "yosys",
|
|
162
162
|
"auto-update": true
|
|
163
163
|
},
|
|
164
|
-
"
|
|
165
|
-
"git-url": "https://
|
|
166
|
-
"git-commit": "
|
|
167
|
-
"docker-depends": [
|
|
168
|
-
"yosys",
|
|
169
|
-
"vpr"
|
|
170
|
-
],
|
|
164
|
+
"surfer": {
|
|
165
|
+
"git-url": "https://gitlab.com/surfer-project/surfer.git",
|
|
166
|
+
"git-commit": "v0.3.0",
|
|
171
167
|
"auto-update": false
|
|
172
168
|
}
|
|
173
169
|
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
|
|
3
|
+
set -ex
|
|
4
|
+
|
|
5
|
+
# Get directory of script
|
|
6
|
+
src_path=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)/..
|
|
7
|
+
|
|
8
|
+
mkdir -p deps
|
|
9
|
+
cd deps
|
|
10
|
+
|
|
11
|
+
sudo apt-get install -y build-essential curl git libssl-dev openssl pkg-config
|
|
12
|
+
|
|
13
|
+
USE_SUDO_INSTALL="${USE_SUDO_INSTALL:-yes}"
|
|
14
|
+
if [ "${USE_SUDO_INSTALL:-yes}" = "yes" ]; then
|
|
15
|
+
SUDO_INSTALL=sudo
|
|
16
|
+
else
|
|
17
|
+
SUDO_INSTALL=""
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s - -y
|
|
21
|
+
export PATH="$HOME/.cargo/bin:$PATH"
|
|
22
|
+
|
|
23
|
+
git clone $(python3 ${src_path}/_tools.py --tool surfer --field git-url) surfer
|
|
24
|
+
cd surfer
|
|
25
|
+
git checkout $(python3 ${src_path}/_tools.py --tool surfer --field git-commit)
|
|
26
|
+
git submodule update --init
|
|
27
|
+
|
|
28
|
+
cargo fetch --locked
|
|
29
|
+
cargo build -j $(nproc) --frozen --release
|
|
30
|
+
|
|
31
|
+
if [ ! -z ${PREFIX} ]; then
|
|
32
|
+
$SUDO_INSTALL install -Dm00755 target/release/surfer -t ${PREFIX}/bin
|
|
33
|
+
fi
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
|
|
3
|
+
set -ex
|
|
4
|
+
|
|
5
|
+
# Get directory of script
|
|
6
|
+
src_path=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)/..
|
|
7
|
+
|
|
8
|
+
mkdir -p deps
|
|
9
|
+
cd deps
|
|
10
|
+
|
|
11
|
+
sudo apt-get install -y build-essential curl git libssl-dev openssl pkg-config
|
|
12
|
+
|
|
13
|
+
USE_SUDO_INSTALL="${USE_SUDO_INSTALL:-yes}"
|
|
14
|
+
if [ "${USE_SUDO_INSTALL:-yes}" = "yes" ]; then
|
|
15
|
+
SUDO_INSTALL=sudo
|
|
16
|
+
else
|
|
17
|
+
SUDO_INSTALL=""
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s - -y
|
|
21
|
+
export PATH="$HOME/.cargo/bin:$PATH"
|
|
22
|
+
|
|
23
|
+
git clone $(python3 ${src_path}/_tools.py --tool surfer --field git-url) surfer
|
|
24
|
+
cd surfer
|
|
25
|
+
git checkout $(python3 ${src_path}/_tools.py --tool surfer --field git-commit)
|
|
26
|
+
git submodule update --init
|
|
27
|
+
|
|
28
|
+
cargo fetch --locked
|
|
29
|
+
cargo build -j $(nproc) --frozen --release
|
|
30
|
+
|
|
31
|
+
if [ ! -z ${PREFIX} ]; then
|
|
32
|
+
$SUDO_INSTALL install -Dm00755 target/release/surfer -t ${PREFIX}/bin
|
|
33
|
+
fi
|
|
@@ -221,7 +221,8 @@ def default_email_credentials_file():
|
|
|
221
221
|
|
|
222
222
|
@contextlib.contextmanager
|
|
223
223
|
def sc_open(path, *args, **kwargs):
|
|
224
|
-
|
|
224
|
+
if 'errors' not in kwargs:
|
|
225
|
+
kwargs['errors'] = 'ignore'
|
|
225
226
|
fobj = open(path, *args, **kwargs)
|
|
226
227
|
try:
|
|
227
228
|
with contextlib.closing(fobj):
|
|
@@ -16,16 +16,16 @@ def _check_flowgraph_io(chip, nodes=None):
|
|
|
16
16
|
flow = chip.get('option', 'flow')
|
|
17
17
|
|
|
18
18
|
runtime_full = RuntimeFlowgraph(
|
|
19
|
-
chip.
|
|
19
|
+
chip.get("flowgraph", flow, field='schema'),
|
|
20
20
|
to_steps=chip.get('option', 'to'),
|
|
21
21
|
prune_nodes=chip.get('option', 'prune'))
|
|
22
22
|
runtime_flow = RuntimeFlowgraph(
|
|
23
|
-
chip.
|
|
23
|
+
chip.get("flowgraph", flow, field='schema'),
|
|
24
24
|
args=(chip.get('arg', 'step'), chip.get('arg', 'index')),
|
|
25
25
|
from_steps=chip.get('option', 'from'),
|
|
26
26
|
to_steps=chip.get('option', 'to'),
|
|
27
27
|
prune_nodes=chip.get('option', 'prune'))
|
|
28
|
-
record = chip.
|
|
28
|
+
record = chip.get("record", field='schema')
|
|
29
29
|
|
|
30
30
|
if not nodes:
|
|
31
31
|
nodes = runtime_flow.get_nodes()
|
|
@@ -62,25 +62,25 @@ def _check_flowgraph_io(chip, nodes=None):
|
|
|
62
62
|
manifest = f'{design}.pkg.json'
|
|
63
63
|
inputs = [inp for inp in os.listdir(in_step_out_dir) if inp != manifest]
|
|
64
64
|
else:
|
|
65
|
-
in_tool,
|
|
66
|
-
task_class = chip.get("tool", in_tool, field="schema")
|
|
67
|
-
task_class.set_runtime(chip, step=in_step, index=in_index)
|
|
65
|
+
in_tool, in_task = get_tool_task(chip, in_step, in_index, flow=flow)
|
|
66
|
+
task_class = chip.get("tool", in_tool, "task", in_task, field="schema")
|
|
68
67
|
|
|
69
|
-
|
|
68
|
+
with task_class.runtime(chip, step=in_step, index=in_index) as task:
|
|
69
|
+
inputs = task.get_output_files()
|
|
70
70
|
|
|
71
71
|
for inp in inputs:
|
|
72
72
|
node_inp = input_file_node_name(inp, in_step, in_index)
|
|
73
73
|
if node_inp in requirements:
|
|
74
74
|
inp = node_inp
|
|
75
75
|
if inp in all_inputs:
|
|
76
|
-
chip.logger.error(f'Invalid flow: {step}{index} '
|
|
76
|
+
chip.logger.error(f'Invalid flow: {step}/{index} '
|
|
77
77
|
f'receives {inp} from multiple input tasks')
|
|
78
78
|
return False
|
|
79
79
|
all_inputs.add(inp)
|
|
80
80
|
|
|
81
81
|
for requirement in requirements:
|
|
82
82
|
if requirement not in all_inputs:
|
|
83
|
-
chip.logger.error(f'Invalid flow: {step}{index} will '
|
|
83
|
+
chip.logger.error(f'Invalid flow: {step}/{index} will '
|
|
84
84
|
f'not receive required input {requirement}.')
|
|
85
85
|
return False
|
|
86
86
|
|
|
@@ -95,7 +95,7 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
95
95
|
chip.schema = chip.schema.copy()
|
|
96
96
|
|
|
97
97
|
# Setup nodes
|
|
98
|
-
node_exec_order = chip.
|
|
98
|
+
node_exec_order = chip.get("flowgraph", flow, field="schema").get_execution_order()
|
|
99
99
|
if io:
|
|
100
100
|
prev_flow = chip.get("option", "flow")
|
|
101
101
|
chip.set("option", "flow", flow)
|
|
@@ -107,12 +107,12 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
107
107
|
node_rank = {}
|
|
108
108
|
for rank, rank_nodes in enumerate(node_exec_order):
|
|
109
109
|
for step, index in rank_nodes:
|
|
110
|
-
node_rank[f'{step}{index}'] = rank
|
|
110
|
+
node_rank[f'{step}/{index}'] = rank
|
|
111
111
|
|
|
112
112
|
graph_inputs = {}
|
|
113
113
|
all_graph_inputs = set()
|
|
114
114
|
if io:
|
|
115
|
-
for step, index in chip.
|
|
115
|
+
for step, index in chip.get("flowgraph", flow, field="schema").get_nodes():
|
|
116
116
|
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
117
117
|
for keypath in chip.get('tool', tool, 'task', task, 'require', step=step, index=index):
|
|
118
118
|
key = tuple(keypath.split(','))
|
|
@@ -122,7 +122,7 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
122
122
|
for inputs in graph_inputs.values():
|
|
123
123
|
all_graph_inputs.update(inputs)
|
|
124
124
|
|
|
125
|
-
exit_nodes = [f'{step}{index}' for step, index in chip.
|
|
125
|
+
exit_nodes = [f'{step}/{index}' for step, index in chip.get(
|
|
126
126
|
"flowgraph", flow, field="schema").get_exit_nodes()]
|
|
127
127
|
|
|
128
128
|
nodes = {}
|
|
@@ -135,11 +135,11 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
135
135
|
return label.replace("<", r"\<").replace(">", r"\>")
|
|
136
136
|
|
|
137
137
|
all_nodes = [(step, index) for step, index in sorted(
|
|
138
|
-
chip.
|
|
138
|
+
chip.get("flowgraph", flow, field="schema").get_nodes())
|
|
139
139
|
if chip.get('record', 'status', step=step, index=index) != NodeStatus.SKIPPED]
|
|
140
140
|
|
|
141
|
-
runtime_flow = RuntimeFlowgraph(chip.
|
|
142
|
-
record = chip.
|
|
141
|
+
runtime_flow = RuntimeFlowgraph(chip.get("flowgraph", flow, field='schema'))
|
|
142
|
+
record = chip.get("record", field='schema')
|
|
143
143
|
|
|
144
144
|
for step, index in all_nodes:
|
|
145
145
|
tool, task = get_tool_task(chip, step, index, flow=flow)
|
|
@@ -153,7 +153,7 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
153
153
|
inputs = []
|
|
154
154
|
outputs = []
|
|
155
155
|
|
|
156
|
-
node = f'{step}{index}'
|
|
156
|
+
node = f'{step}/{index}'
|
|
157
157
|
if io and (step, index) in graph_inputs:
|
|
158
158
|
inputs.extend(graph_inputs[(step, index)])
|
|
159
159
|
|
|
@@ -173,11 +173,12 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
173
173
|
|
|
174
174
|
rank_diff = {}
|
|
175
175
|
for in_step, in_index in runtime_flow.get_node_inputs(step, index, record=record):
|
|
176
|
-
|
|
176
|
+
in_node_name = f'{in_step}/{in_index}'
|
|
177
|
+
rank_diff[in_node_name] = node_rank[node] - node_rank[in_node_name]
|
|
177
178
|
nodes[node]["rank_diff"] = rank_diff
|
|
178
179
|
|
|
179
180
|
for step, index in all_nodes:
|
|
180
|
-
node = f'{step}{index}'
|
|
181
|
+
node = f'{step}/{index}'
|
|
181
182
|
if io:
|
|
182
183
|
# get inputs
|
|
183
184
|
edge_stats = {}
|
|
@@ -189,9 +190,9 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
189
190
|
infile = input_file_node_name(infile, in_step, in_index)
|
|
190
191
|
if infile not in nodes[node]["file_inputs"]:
|
|
191
192
|
continue
|
|
192
|
-
in_node_name = f"{in_step}{in_index}"
|
|
193
|
+
in_node_name = f"{in_step}/{in_index}"
|
|
193
194
|
outlabel = f"{in_node_name}:output-{clean_label(outfile)}"
|
|
194
|
-
inlabel = f"{step}{index}:input-{clean_label(infile)}"
|
|
195
|
+
inlabel = f"{step}/{index}:input-{clean_label(infile)}"
|
|
195
196
|
|
|
196
197
|
if in_node_name not in edge_stats:
|
|
197
198
|
edge_stats[in_node_name] = {
|
|
@@ -229,12 +230,12 @@ def _get_flowgraph_information(chip, flow, io=True):
|
|
|
229
230
|
|
|
230
231
|
if (step, index) in graph_inputs:
|
|
231
232
|
for key in graph_inputs[(step, index)]:
|
|
232
|
-
inlabel = f"{step}{index}:input-{clean_label(key)}"
|
|
233
|
+
inlabel = f"{step}/{index}:input-{clean_label(key)}"
|
|
233
234
|
edges.append((key, inlabel, 1))
|
|
234
235
|
else:
|
|
235
236
|
all_inputs = []
|
|
236
237
|
for in_step, in_index in chip.get('flowgraph', flow, step, index, 'input'):
|
|
237
|
-
all_inputs.append(f'{in_step}{in_index}')
|
|
238
|
+
all_inputs.append(f'{in_step}/{in_index}')
|
|
238
239
|
for item in all_inputs:
|
|
239
240
|
edges.append((item, node, 1 if node in exit_nodes else 2))
|
|
240
241
|
|