opencos-eda 0.2.48__py3-none-any.whl → 0.2.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. opencos/__init__.py +4 -2
  2. opencos/_version.py +10 -7
  3. opencos/commands/flist.py +8 -7
  4. opencos/commands/multi.py +14 -15
  5. opencos/commands/sim.py +5 -0
  6. opencos/commands/sweep.py +3 -2
  7. opencos/deps/__init__.py +0 -0
  8. opencos/deps/defaults.py +69 -0
  9. opencos/deps/deps_commands.py +419 -0
  10. opencos/deps/deps_file.py +326 -0
  11. opencos/deps/deps_processor.py +670 -0
  12. opencos/deps_schema.py +7 -8
  13. opencos/eda.py +84 -64
  14. opencos/eda_base.py +585 -316
  15. opencos/eda_config.py +85 -14
  16. opencos/eda_config_defaults.yml +36 -4
  17. opencos/eda_extract_targets.py +22 -14
  18. opencos/eda_tool_helper.py +33 -7
  19. opencos/export_helper.py +166 -86
  20. opencos/export_json_convert.py +31 -23
  21. opencos/files.py +2 -1
  22. opencos/hw/__init__.py +0 -0
  23. opencos/{oc_cli.py → hw/oc_cli.py} +9 -4
  24. opencos/names.py +0 -4
  25. opencos/peakrdl_cleanup.py +13 -7
  26. opencos/seed.py +19 -11
  27. opencos/tests/helpers.py +3 -2
  28. opencos/tests/test_deps_helpers.py +35 -32
  29. opencos/tests/test_eda.py +36 -29
  30. opencos/tests/test_eda_elab.py +7 -4
  31. opencos/tests/test_eda_synth.py +1 -1
  32. opencos/tests/test_oc_cli.py +1 -1
  33. opencos/tests/test_tools.py +4 -2
  34. opencos/tools/iverilog.py +2 -2
  35. opencos/tools/modelsim_ase.py +24 -2
  36. opencos/tools/questa.py +5 -3
  37. opencos/tools/questa_fse.py +57 -0
  38. opencos/tools/riviera.py +1 -1
  39. opencos/tools/slang.py +9 -3
  40. opencos/tools/surelog.py +1 -1
  41. opencos/tools/verilator.py +26 -1
  42. opencos/tools/vivado.py +34 -27
  43. opencos/tools/yosys.py +4 -3
  44. opencos/util.py +532 -474
  45. opencos/utils/__init__.py +0 -0
  46. opencos/utils/markup_helpers.py +98 -0
  47. opencos/utils/str_helpers.py +111 -0
  48. opencos/utils/subprocess_helpers.py +108 -0
  49. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/METADATA +1 -1
  50. opencos_eda-0.2.50.dist-info/RECORD +89 -0
  51. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/entry_points.txt +1 -1
  52. opencos/deps_helpers.py +0 -1346
  53. opencos_eda-0.2.48.dist-info/RECORD +0 -79
  54. /opencos/{pcie.py → hw/pcie.py} +0 -0
  55. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/WHEEL +0 -0
  56. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/licenses/LICENSE +0 -0
  57. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/licenses/LICENSE.spdx +0 -0
  58. {opencos_eda-0.2.48.dist-info → opencos_eda-0.2.50.dist-info}/top_level.txt +0 -0
opencos/export_helper.py CHANGED
@@ -1,38 +1,49 @@
1
+ ''' opencos.export_helper: package used by command sim and synth,
2
+
3
+ to handle common tasks with "exporting" a DEPS target. An exported target copies all
4
+ source files (deps, reqs) and attempts to resolve included files so they are all relative
5
+ to +incdir+.
6
+ '''
7
+
1
8
  import os
2
9
  import shutil
3
10
  import json
4
11
 
5
12
  from opencos import util
6
13
  from opencos.util import debug, info, warning, error
14
+ from opencos.utils.markup_helpers import yaml_safe_writer
7
15
 
16
+ # pylint: disable=dangerous-default-value
8
17
 
9
- _include_iteration_max_depth = 128 # Depth to look for nested included files.
10
- _remove_DEPS_yml_defines = [
18
+
19
+ SV_INCLUDE_ITERATION_MAX_DEPTH = 128 # Depth to look for nested included files.
20
+ REMOVE_DEPS_YML_DEFINES = [
11
21
  'OC_SEED',
12
22
  'OC_ROOT',
13
23
  ]
14
24
 
15
25
 
16
- def json_paths_to_jsonl(json_file_paths:list, output_json_path:str,
17
- assert_json_types=[dict]) -> None:
26
+ def json_paths_to_jsonl(
27
+ json_file_paths:list, output_json_path:str, assert_json_types=[dict]
28
+ ) -> None:
18
29
  '''Given a list of .json filepath strs, save a single .jsonl (newline separated json(s)).
19
30
 
20
31
  errors if one of json_file_paths content's type is not in assert_json_types
21
32
  (assert_json_types can be empty list to avoid check).
22
33
  '''
23
34
 
24
- if len(json_file_paths) == 0:
35
+ if not json_file_paths:
25
36
  error(f'{json_file_paths=} cannot be empty list')
26
37
 
27
38
 
28
39
  output_json_dir = os.path.split(output_json_path)[0]
29
40
  util.safe_mkdir(output_json_dir)
30
41
 
31
- with open(output_json_path, 'w') as outf:
42
+ with open(output_json_path, 'w', encoding='utf-8') as outf:
32
43
 
33
44
  # jsonl is every line of the file is a json.
34
45
  for json_file_path in json_file_paths:
35
- with open(json_file_path) as f:
46
+ with open(json_file_path, encoding='utf-8') as f:
36
47
  data = json.load(f)
37
48
  if len(assert_json_types) > 0 and type(data) not in assert_json_types:
38
49
  error(f'{json_file_path=} JSON data is not a Table (py dict) {type(data)=}')
@@ -41,8 +52,9 @@ def json_paths_to_jsonl(json_file_paths:list, output_json_path:str,
41
52
  info(f'Wrote {len(json_file_paths)} tests to {output_json_path=}')
42
53
 
43
54
 
44
- def json_paths_to_single_json(json_file_paths:list, output_json_path:str,
45
- assert_json_types=[dict]) -> None:
55
+ def json_paths_to_single_json(
56
+ json_file_paths:list, output_json_path:str, assert_json_types=[dict]
57
+ ) -> None:
46
58
  '''Given a list of .json filepath strs, save a single .json with key 'tests' and a list.
47
59
 
48
60
  errors if one of json_file_paths content's type is not in assert_json_types
@@ -56,13 +68,13 @@ def json_paths_to_single_json(json_file_paths:list, output_json_path:str,
56
68
  output_json_dir = os.path.split(output_json_path)[0]
57
69
  util.safe_mkdir(output_json_dir)
58
70
 
59
- with open(output_json_path, 'w') as outf:
71
+ with open(output_json_path, 'w', encoding='utf-8') as outf:
60
72
 
61
73
  out_json_data = {
62
- 'tests': list(),
74
+ 'tests': [],
63
75
  }
64
76
  for json_file_path in json_file_paths:
65
- with open(json_file_path) as f:
77
+ with open(json_file_path, encoding='utf-8') as f:
66
78
  data = json.load(f)
67
79
  if len(assert_json_types) > 0 and type(data) not in assert_json_types:
68
80
  error(f'{json_file_path=} JSON data is not a Table (py dict) {type(data)=}')
@@ -72,35 +84,22 @@ def json_paths_to_single_json(json_file_paths:list, output_json_path:str,
72
84
  info(f'Wrote {len(json_file_paths)} tests {output_json_path=}')
73
85
 
74
86
 
75
- def find_sv_included_files_within_file(filename:str,
76
- known_incdir_paths:list,
77
- warnings:bool=True,
78
- modify_files_and_save_to_path=None,
79
- unmodified_files_copy_to_path=None) -> list:
80
- '''Given a filename (full path) and a list of known incdir paths, returns
81
- a list of included files (full path).
87
+ def traverse_sv_file_for_includes(filename: str) -> (dict, set):
88
+ '''Lazily parses a SV <filename> looking for `includes
82
89
 
83
- (Optional) modify_files_and_save_to_path (str: directory/path) if you wish
84
- to strip all path information on the `include "(path)" for example:
85
- `include "foo.svh" -- no modifications
86
- `include "../bar.svh" -- is modified to become `include "bar.svh"
87
- (Optional) unmodified_files_copy_to_path (str: directory/path) if you wish
88
- to copy unmodified files to this path.
90
+ Returns tuple:
91
+ - dict of modified_lines: {linenum (int): line (str, modified line value), ...}
92
+ - set of found included files
89
93
  '''
90
94
 
91
- found_included_files = set()
92
-
93
95
  assert any(filename.endswith(x) for x in ['.v', '.sv', '.vh', '.svh']), \
94
96
  f'{filename=} does not have a supported extension, refusing to parse it'
95
97
  assert os.path.exists(filename), f'{filename=} does not exist'
96
98
 
97
- modified_lines = dict() # {linenum (int): line (str, modified line value), ...}
98
-
99
- filename_no_path = os.path.split(filename)[1]
100
-
101
- debug(f'export_helper: {filename=} {modify_files_and_save_to_path=} {unmodified_files_copy_to_path=}')
99
+ found_included_files = set()
100
+ modified_lines = {}
102
101
 
103
- with open(filename) as f:
102
+ with open(filename, encoding='utf-8') as f:
104
103
 
105
104
  for linenum, line in enumerate(f.readlines()):
106
105
  line_modified = False
@@ -112,7 +111,7 @@ def find_sv_included_files_within_file(filename:str,
112
111
  parts = line.split("//")
113
112
  words = parts[0].split() # only use what's on the left of the comments
114
113
  prev_word_is_tick_include = False
115
- for iter,word in enumerate(words):
114
+ for i,word in enumerate(words):
116
115
  word = word.rstrip('\n')
117
116
  if word == '`include':
118
117
  # don't print this word, wait until next word
@@ -125,11 +124,10 @@ def find_sv_included_files_within_file(filename:str,
125
124
 
126
125
  # strip the path information and keep track that
127
126
  # we would like to modify this line of filename
128
- if modify_files_and_save_to_path:
129
- include_fname_no_path = os.path.split(include_fname)[1]
130
- if include_fname != include_fname_no_path:
131
- words[iter] = '"' + include_fname_no_path + '"'
132
- line_modified = True
127
+ _, include_fname_no_path = os.path.split(include_fname)
128
+ if include_fname != include_fname_no_path:
129
+ words[i] = '"' + include_fname_no_path + '"'
130
+ line_modified = True
133
131
 
134
132
  if include_fname not in found_included_files:
135
133
  # this has path information, perhaps relative, perhaps absolute, or
@@ -139,33 +137,85 @@ def find_sv_included_files_within_file(filename:str,
139
137
  if line_modified:
140
138
  modified_lines[linenum] = ' '.join(words)
141
139
 
142
- debug(f'export_helper: {filename=} {modified_lines=}')
140
+ return modified_lines, found_included_files
141
+
142
+
143
+ def write_modified_lines(
144
+ src_filename: str,
145
+ modified_lines: dict,
146
+ modify_files_and_save_to_path: str = '',
147
+ unmodified_files_copy_to_path: str = ''
148
+ ) -> None:
149
+ '''Given a dict of modified lines, walk the src_filename contents and write modifications
150
+
151
+ to the dst_filename. The modified_lines dict is:
152
+ {linenum (int): line (str, modified line value), ...}
153
+ '''
154
+
155
+ debug(f'export_helper: {src_filename=} {modified_lines=}')
156
+
157
+ _, src_filename_no_path = os.path.split(src_filename)
158
+
143
159
  # Optionally write out modified files (flatten the path information
144
160
  # on `include "../bar.svh" )
145
- if len(modified_lines) > 0 and modify_files_and_save_to_path:
146
- dst = os.path.join(modify_files_and_save_to_path, filename_no_path)
161
+ if modified_lines and modify_files_and_save_to_path:
162
+ dst = os.path.join(modify_files_and_save_to_path, src_filename_no_path)
147
163
  if not os.path.exists(dst):
148
- with open(filename) as f, open(dst, 'w') as outf:
164
+ with open(src_filename, encoding='utf-8') as f, \
165
+ open(dst, 'w', encoding='utf-8') as outf:
149
166
  for linenum, line in enumerate(f.readlines()):
150
167
  if linenum in modified_lines:
151
168
  new_line = modified_lines[linenum]
152
169
  outf.write(new_line + '\n')
153
- debug(f'export_helper: Modified {filename=} as {dst=}: {linenum=} {new_line=}')
170
+ debug(f'export_helper: Modified {src_filename=} as {dst=}:',
171
+ f'{linenum=} {new_line=}')
154
172
  else:
155
173
  outf.write(line)
156
174
 
157
175
  # Copy unmodified files to some path.
158
- if len(modified_lines) == 0 and unmodified_files_copy_to_path:
176
+ if not modified_lines and unmodified_files_copy_to_path:
159
177
  if os.path.isdir(unmodified_files_copy_to_path):
160
- dst = os.path.join(unmodified_files_copy_to_path, filename_no_path)
178
+ dst = os.path.join(unmodified_files_copy_to_path, src_filename_no_path)
161
179
  if not os.path.exists(dst):
162
- debug(f'export_helper: Copied unmodified {filename=} to {dst=}')
163
- shutil.copy(src=filename, dst=dst)
180
+ debug(f'export_helper: Copied unmodified {src_filename=} to {dst=}')
181
+ shutil.copy(src=src_filename, dst=dst)
164
182
 
165
183
 
184
+ def find_sv_included_files_within_file(
185
+ filename: str,
186
+ known_incdir_paths: list,
187
+ warnings: bool = True,
188
+ modify_files_and_save_to_path: str = '',
189
+ unmodified_files_copy_to_path: str = ''
190
+ ) -> list:
191
+ '''Given a filename (full path) and a list of known incdir paths, returns
192
+ a list of included files (full path).
193
+
194
+ (Optional) modify_files_and_save_to_path (str: directory/path) if you wish
195
+ to strip all path information on the `include "(path)" for example:
196
+ `include "foo.svh" -- no modifications
197
+ `include "../bar.svh" -- is modified to become `include "bar.svh"
198
+ (Optional) unmodified_files_copy_to_path (str: directory/path) if you wish
199
+ to copy unmodified files to this path.
200
+ '''
201
+
202
+ modified_lines, found_included_files = traverse_sv_file_for_includes(filename)
203
+
204
+ debug(f'export_helper: {filename=} {modify_files_and_save_to_path=}',
205
+ f'{unmodified_files_copy_to_path=}')
206
+
207
+ if modify_files_and_save_to_path or unmodified_files_copy_to_path:
208
+ # Save outputs to these paths:
209
+ write_modified_lines(
210
+ src_filename=filename,
211
+ modified_lines=modified_lines,
212
+ modify_files_and_save_to_path=modify_files_and_save_to_path,
213
+ unmodified_files_copy_to_path=unmodified_files_copy_to_path
214
+ )
215
+
166
216
  # Back to the list found_included_files that we observed within our filename, we
167
217
  # still need to return all the included files.
168
- ret = list()
218
+ ret = []
169
219
  for fname in found_included_files:
170
220
  # Does this file exist, using our known_incdir_paths?
171
221
  found = False
@@ -175,7 +225,8 @@ def find_sv_included_files_within_file(filename:str,
175
225
  if try_file_path not in ret:
176
226
  ret.append(try_file_path)
177
227
  found = True
178
- debug(f'export_helper: Include observed in {filename=} will use {try_file_path=} for export')
228
+ debug(f'export_helper: Include observed in {filename=} will use',
229
+ f'{try_file_path=} for export')
179
230
  break # we can only match one possible file out of N possible incdir paths.
180
231
 
181
232
 
@@ -183,18 +234,20 @@ def find_sv_included_files_within_file(filename:str,
183
234
  # file doesn't exist in any included directory, we only warn here b/c
184
235
  # it will eventually fail compile.
185
236
  include_fname = fname
186
- warning( f'export_helper: {include_fname=} does not exist in any of {known_incdir_paths=},' \
187
- + f'was included within source files: {filename=}'
188
- )
237
+ warning(f'export_helper: {include_fname=} does not exist in any of'
238
+ f'{known_incdir_paths=}, was included within source files: {filename=}')
189
239
 
190
240
  return ret
191
241
 
192
242
 
193
-
194
- def get_list_sv_included_files(all_src_files:list, known_incdir_paths:list, target:str='',
195
- warnings:bool=True,
196
- modify_files_and_save_to_path=None,
197
- unmodified_files_copy_to_path=None) -> list:
243
+ def get_list_sv_included_files(
244
+ all_src_files: list,
245
+ known_incdir_paths: list,
246
+ target: str = '',
247
+ warnings: bool = True,
248
+ modify_files_and_save_to_path: str = '',
249
+ unmodified_files_copy_to_path: str = ''
250
+ ) -> list:
198
251
  ''' Given a list of all_src_files, and list of known_incdir_paths, returns a list
199
252
  of all included files (fullpath). This is recurisve if an included file includes another file.
200
253
 
@@ -213,7 +266,7 @@ def get_list_sv_included_files(all_src_files:list, known_incdir_paths:list, targ
213
266
  # order shouldn't matter, these will get added to the testrunner's filelist and
214
267
  # be included with +incdir+.
215
268
 
216
- sv_included_files_dict = dict() # key, value is if we've traversed it (bool)
269
+ sv_included_files_dict = {} # key, value is if we've traversed it (bool)
217
270
 
218
271
  for fname in all_src_files:
219
272
  included_files_list = find_sv_included_files_within_file(
@@ -228,7 +281,7 @@ def get_list_sv_included_files(all_src_files:list, known_incdir_paths:list, targ
228
281
  if f not in sv_included_files_dict:
229
282
  sv_included_files_dict[f] = False # add entry, mark it not traversed.
230
283
 
231
- for _ in range(_include_iteration_max_depth):
284
+ for _ in range(SV_INCLUDE_ITERATION_MAX_DEPTH):
232
285
  # do these for a a depth of recurisve levels, in case `include'd file includes another file.
233
286
  # If we have more than N levels of `include hunting, then rethink this.
234
287
  # For example, some codebases would do their file dependencies as `include
@@ -251,11 +304,11 @@ def get_list_sv_included_files(all_src_files:list, known_incdir_paths:list, targ
251
304
  if not all(sv_included_files_dict.values()):
252
305
  # we had some that we're traversed.
253
306
  not_traversed = [k for k,v in sv_included_files_dict.items() if not v]
254
- error(f'Depth {_include_iteration_max_depth=} exceeded in looking for `includes,' \
307
+ error(f'Depth {SV_INCLUDE_ITERATION_MAX_DEPTH=} exceeded in looking for `includes,' \
255
308
  + f' {target=} {not_traversed=}')
256
309
 
257
310
 
258
- ret = list()
311
+ ret = []
259
312
  for fname,traversed in sv_included_files_dict.items():
260
313
  if traversed:
261
314
  # add all the included files (should be traversed!) to our return list
@@ -265,6 +318,11 @@ def get_list_sv_included_files(all_src_files:list, known_incdir_paths:list, targ
265
318
 
266
319
 
267
320
  class ExportHelper:
321
+ '''ExportHelper is an object that command handlers can use to assist in creating
322
+
323
+ a directory with all exported sources, args, incdirs, defines, and output of
324
+ what was exported.
325
+ '''
268
326
 
269
327
  def __init__(self, cmd_design_obj, eda_command='export', out_dir=None, target=''):
270
328
  self.cmd_design_obj = cmd_design_obj
@@ -273,19 +331,21 @@ class ExportHelper:
273
331
  self.target = target
274
332
 
275
333
  self.args = self.cmd_design_obj.args # lazy alias.
276
- self.included_files = list()
334
+ self.included_files = []
277
335
  self.out_deps_file = None
278
336
 
279
- # TODO(drew) It would be neat if I could export an "eda multi" command, like
280
- # CommandMulti that only gave me the list of all targets from a wildcard?
281
- # Because then I could create exports for each individual target, but lump
282
- # all files together and have a single exported DEPS.yml with unique targets.
283
337
 
284
- def run(self, check_if_overwrite:bool=False,
285
- deps_file_args:list=list(),
286
- export_json_eda_config:dict=dict(), **kwargs):
338
+ def run(
339
+ self, check_if_overwrite:bool=False,
340
+ deps_file_args:list=[],
341
+ export_json_eda_config:dict={}, **kwargs
342
+ ) -> None:
343
+ '''main entrypoint for ExportHelper object. Creates output directory, writes files
287
344
 
288
- self.make_out_dir(check_if_overwrite)
345
+ to it, creates a DEPS.yml in output directory, and optional output JSON file
346
+ '''
347
+
348
+ self.create_out_dir(check_if_overwrite)
289
349
  self.write_files_to_out_dir()
290
350
  self.create_deps_yml_in_out_dir(deps_file_args=deps_file_args)
291
351
 
@@ -295,8 +355,16 @@ class ExportHelper:
295
355
  info(f'export_helper: done - wrote to: {self.out_dir}')
296
356
 
297
357
 
298
- def make_out_dir(self, check_if_overwrite:bool=False):
299
- assert self.args.get('top', ''), f'Need "top" to be set'
358
+ def create_out_dir(self, check_if_overwrite: bool= False) -> None:
359
+ '''Creates output directory for exported files, requires a 'top' to be
360
+
361
+ set by the original target, or inferred from target or files.
362
+ '''
363
+
364
+ if not self.args.get('top', ''):
365
+ error('export_helper.py internal error, args[top] is not set, cannot create',
366
+ 'output directory for export',
367
+ f'{self.args=} {self.target=} {self.eda_command=} {self.out_dir=}')
300
368
 
301
369
  if not self.out_dir:
302
370
  if self.args.get('output', '') == "":
@@ -304,13 +372,20 @@ class ExportHelper:
304
372
 
305
373
  if check_if_overwrite and self.args.get('force', False):
306
374
  if os.path.exists(self.out_dir):
307
- util.error(f"export_helper: output directory {out_dir} exists, use --force to overwrite")
375
+ error(f"export_helper: output directory {self.out_dir} exists, use --force",
376
+ "to overwrite")
308
377
 
309
378
  if not os.path.exists(self.out_dir):
310
379
  info(f"export_helper: Creating {self.out_dir} for exported file tree")
311
380
  util.safe_mkdir(self.out_dir)
312
381
 
382
+
313
383
  def write_files_to_out_dir(self):
384
+ '''Called by self.run(), writes all files to output directory. Has to determine
385
+
386
+ the includes files used by SV/Verilog to unravel nested or relative included
387
+ paths.
388
+ '''
314
389
 
315
390
  # Also sets our list of included files.
316
391
  self.included_files = get_list_sv_included_files(
@@ -328,7 +403,9 @@ class ExportHelper:
328
403
  if not os.path.exists(dst):
329
404
  shutil.copy(src=filename, dst=dst)
330
405
 
331
- def create_deps_yml_in_out_dir(self, deps_file_args:list=list()):
406
+
407
+ def create_deps_yml_in_out_dir(self, deps_file_args:list=[]):
408
+ '''Creates ouput exported directory DEPS.yml file with the exported target'''
332
409
  if not self.target:
333
410
  self.target = 'test'
334
411
  else:
@@ -339,7 +416,7 @@ class ExportHelper:
339
416
  info(f'export_helper: Creating DEPS.yml for {self.target=} in {self.out_dir=}')
340
417
 
341
418
  # Need to strip path information from our files_sv and files_v:
342
- deps_files = list()
419
+ deps_files = []
343
420
  for fullpath in self.cmd_design_obj.files_sv + self.cmd_design_obj.files_v:
344
421
  filename = os.path.split(fullpath)[1]
345
422
  deps_files.append(filename)
@@ -361,7 +438,7 @@ class ExportHelper:
361
438
 
362
439
  if self.cmd_design_obj.defines:
363
440
  data[self.target]['defines'] = self.cmd_design_obj.defines.copy()
364
- for define in _remove_DEPS_yml_defines:
441
+ for define in REMOVE_DEPS_YML_DEFINES:
365
442
  # Remove defines keys for OC_ROOT and OC_SEED. Change OC_SEED to _ORIG_OC_SEED
366
443
  if define in data[self.target]['defines']:
367
444
  data[self.target]['defines'].pop(define)
@@ -369,7 +446,7 @@ class ExportHelper:
369
446
  reqs_fullpath_list = self.included_files + self.cmd_design_obj.files_non_source
370
447
  if reqs_fullpath_list:
371
448
  # Need to strip path information from non-source files:
372
- data[self.target]['reqs'] = list()
449
+ data[self.target]['reqs'] = []
373
450
  for fullpath in reqs_fullpath_list:
374
451
  filename = os.path.split(fullpath)[1]
375
452
  data[self.target]['reqs'].append(filename)
@@ -377,10 +454,13 @@ class ExportHelper:
377
454
 
378
455
  dst = os.path.join(self.out_dir, 'DEPS.yml')
379
456
  self.out_deps_file = dst
380
- util.yaml_safe_writer(data=data, filepath=dst)
457
+ yaml_safe_writer(data=data, filepath=dst)
381
458
 
382
459
 
383
- def create_export_json_in_out_dir(self, eda_config:dict=dict(), **kwargs):
460
+ def create_export_json_in_out_dir( # pylint: disable=unused-argument
461
+ self, eda_config:dict={}, **kwargs
462
+ ) -> None:
463
+ '''Optionally creates an exported JSON file in the output directory'''
384
464
 
385
465
  if not self.eda_command:
386
466
  return
@@ -396,17 +476,17 @@ class ExportHelper:
396
476
  'multi': False, # Not yet implemented.
397
477
  'command': self.eda_command,
398
478
  'targets': [self.target],
399
- 'args': list(),
479
+ 'args': [],
400
480
  'waves': self.args.get('waves', False),
401
481
  # tool - eda.CommandSimVerilator has this set in self.args:
402
482
  'tool': self.args.get('tool', None),
403
483
  },
404
- 'files': list(),
484
+ 'files': [],
405
485
  }
406
486
 
407
487
  # allow caller to override eda - tool, or eda - args, etc.
408
488
  for k,v in eda_config.items():
409
- if k in data['eda'].keys() and v is not None:
489
+ if k in data['eda'] and v is not None:
410
490
  data['eda'][k] = v
411
491
 
412
492
  # Note that args may already be set via:
@@ -434,10 +514,10 @@ class ExportHelper:
434
514
  if os.path.exists(out_dir_filename):
435
515
  somefile = out_dir_filename
436
516
  else:
437
- raise(f'export.json: {self.target=} Missing exported file, orig: {somefile=}')
517
+ error(f'export.json: {self.target=} Missing exported file, orig: {somefile=}')
438
518
 
439
519
  assert os.path.exists(somefile)
440
- with open(somefile) as f:
520
+ with open(somefile, encoding='utf-8') as f:
441
521
  filestr = ''.join(f.readlines())
442
522
  data['files'].append({
443
523
  'name': os.path.split(somefile)[1],
@@ -446,7 +526,7 @@ class ExportHelper:
446
526
 
447
527
 
448
528
  dst = os.path.join(self.out_dir, 'export.json')
449
- with open(dst, 'w') as f:
529
+ with open(dst, 'w', encoding='utf-8') as f:
450
530
  json.dump(data, f)
451
531
  f.write('\n')
452
532
  info(f'export_helper: Wrote {dst=}')
@@ -1,53 +1,60 @@
1
+ '''Converts eda style export.json or export.jsonl to an alternate schema.'''
1
2
 
2
- '''Converts eda style export.json or export.jsonl to a format suitable for test runner schema.'''
3
-
4
- import uuid
5
- import json
6
- import yaml
7
3
  import argparse
8
- import sys
4
+ import json
9
5
  import os
6
+ import sys
7
+ import uuid
10
8
 
11
- def convert(
12
- input_json_fname: str,
13
- output_json_fname: str,
14
- correlation_ids: list = [],
15
- output_top_is_eda_target_name:bool = False
16
- ) -> None:
9
+ import yaml
17
10
 
11
+ def read_input_json_fname(input_json_fname: str) -> list:
12
+ '''Returns list (or tests) from input JSON or JSONL, helper function used by convert(...)'''
18
13
  data = None
19
- new_tests_list = list()
20
14
  assert os.path.exists(input_json_fname), f'{input_json_fname=} does not exist'
21
- with open(input_json_fname) as f:
15
+ with open(input_json_fname, encoding='utf-8') as f:
22
16
 
23
17
  if input_json_fname.lower().endswith('.jsonl'):
24
- data = list()
18
+ data = []
25
19
  for line in f.readlines():
26
20
  if line.rstrip():
27
21
  data.append(json.loads(line.rstrip()))
28
22
  else:
29
23
  data = json.load(f)
30
24
 
31
- if type(data) is dict and 'eda' in data:
25
+ if isinstance(data, dict) and 'eda' in data:
32
26
  # 1 test, make it a list:
33
27
  data = [data]
34
- elif type(data) is dict and 'tests' in data and type(data['tests']) is list:
28
+ elif isinstance(data, dict) and 'tests' in data and isinstance(data['tests'], list):
35
29
  data = data['tests']
36
30
 
31
+ assert data is not None and isinstance(data, list), f'unknown schmea for {input_json_fname=}'
32
+ return data
37
33
 
38
- assert data is not None and type(data) is list, f'unknown schmea for {input_json_fname=}'
34
+
35
+ def convert( # pylint: disable=dangerous-default-value, too-many-locals
36
+ input_json_fname: str,
37
+ output_json_fname: str,
38
+ correlation_ids: list = [],
39
+ correlation_id_key: str = 'correlation_id',
40
+ output_top_is_eda_target_name:bool = False
41
+ ) -> None:
42
+ '''Returns None, takes an input json filename, writes to output_json_fname'''
43
+
44
+ data = read_input_json_fname(input_json_fname)
45
+ new_tests_list = []
39
46
 
40
47
  for index,test in enumerate(data):
41
48
 
42
49
  if correlation_ids and index < len(correlation_ids):
43
50
  correlation_id = correlation_ids[index]
44
51
  else:
45
- correlation_id = str(uuid.uuid4())
52
+ correlation_id = str(uuid.uuid4()) # somewhat uuid per entry.
46
53
 
47
54
  new_test_item = {
48
- 'top': '', # TODO(drew): eventually change to "targets" list
49
- 'files_list': list(),
50
- 'correlation_id': correlation_id,
55
+ 'top': '',
56
+ 'files_list': [],
57
+ correlation_id_key: correlation_id,
51
58
  }
52
59
 
53
60
  assert 'files' in test
@@ -86,11 +93,12 @@ def convert(
86
93
  'tests': new_tests_list
87
94
  }
88
95
 
89
- with open(output_json_fname, 'w') as f:
96
+ with open(output_json_fname, 'w', encoding='utf-8') as f:
90
97
  json.dump(new_data, f)
91
98
 
92
99
  print(f'Wrote: {output_json_fname=}')
93
100
 
101
+
94
102
  if __name__ == '__main__':
95
103
  parser = argparse.ArgumentParser(prog='export_json_convert', add_help=True, allow_abbrev=False)
96
104
 
opencos/files.py CHANGED
@@ -37,7 +37,8 @@ def get_source_file(target:str) -> (bool, str, str):
37
37
  if '@' in target:
38
38
  for p in ALL_FORCED_PREFIXES:
39
39
  if p in target:
40
- fpath = ''.join(target.split(p)) # essentially just removing the "sv@" or whatever it is
40
+ # essentially removing the leading "sv@" or whatever prefix.
41
+ fpath = ''.join(target.split(p))
41
42
  if os.path.isfile(fpath):
42
43
  return True, fpath, FORCE_PREFIX_DICT.get(p)
43
44
 
opencos/hw/__init__.py ADDED
File without changes
@@ -19,8 +19,9 @@ import socket
19
19
  import serial # maybe at some point, pull this in, as PySerial is pure python.
20
20
  import serial.tools.list_ports
21
21
 
22
- from opencos import util, pcie
22
+ from opencos import util
23
23
  from opencos import names as opencos_names
24
+ from opencos.hw import pcie
24
25
 
25
26
 
26
27
  util.progname_in_message = False # too noise for this use case
@@ -2552,7 +2553,7 @@ def main(*args):
2552
2553
  #port = "COM1" if os.name == 'nt' else "/dev/ttyUSB2"
2553
2554
  port = None
2554
2555
  pcie = None
2555
- for opt,arg in opts:
2556
+ for opt, arg in opts:
2556
2557
  if opt in ['-h', '--help']:
2557
2558
  return usage(do_exit=False, error_code=0)
2558
2559
  if opt in ['-p', '--pcie']:
@@ -2561,8 +2562,12 @@ def main(*args):
2561
2562
  port = arg
2562
2563
  if opt in ['-b', '--baud']:
2563
2564
  baud = arg
2564
- if util.process_token(opt):
2565
- if arg: util.proces_token(arg)
2565
+ if opt:
2566
+ # attempt to process util args individually:
2567
+ if arg:
2568
+ util.process_token([opt, arg])
2569
+ else:
2570
+ util.process_token([opt])
2566
2571
 
2567
2572
  if port == None and pcie == None and getpass.getuser() == 'root':
2568
2573
  # we have not been given a connection method, infer the best one
opencos/names.py CHANGED
@@ -1,7 +1,3 @@
1
- #!/usr/bin/python3
2
-
3
- # SPDX-License-Identifier: MPL-2.0
4
-
5
1
  ''' names.py
6
2
 
7
3
  This is a human-maintained but ideally machine parsable mapping of names