harnice 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- harnice/__init__.py +0 -0
- harnice/__main__.py +4 -0
- harnice/cli.py +234 -0
- harnice/fileio.py +295 -0
- harnice/gui/launcher.py +426 -0
- harnice/lists/channel_map.py +182 -0
- harnice/lists/circuits_list.py +302 -0
- harnice/lists/disconnect_map.py +237 -0
- harnice/lists/formboard_graph.py +63 -0
- harnice/lists/instances_list.py +280 -0
- harnice/lists/library_history.py +40 -0
- harnice/lists/manifest.py +93 -0
- harnice/lists/post_harness_instances_list.py +66 -0
- harnice/lists/rev_history.py +325 -0
- harnice/lists/signals_list.py +135 -0
- harnice/products/__init__.py +1 -0
- harnice/products/cable.py +152 -0
- harnice/products/chtype.py +80 -0
- harnice/products/device.py +844 -0
- harnice/products/disconnect.py +225 -0
- harnice/products/flagnote.py +139 -0
- harnice/products/harness.py +522 -0
- harnice/products/macro.py +10 -0
- harnice/products/part.py +640 -0
- harnice/products/system.py +125 -0
- harnice/products/tblock.py +270 -0
- harnice/state.py +57 -0
- harnice/utils/appearance.py +51 -0
- harnice/utils/circuit_utils.py +326 -0
- harnice/utils/feature_tree_utils.py +183 -0
- harnice/utils/formboard_utils.py +973 -0
- harnice/utils/library_utils.py +333 -0
- harnice/utils/note_utils.py +417 -0
- harnice/utils/svg_utils.py +819 -0
- harnice/utils/system_utils.py +563 -0
- harnice-0.3.0.dist-info/METADATA +32 -0
- harnice-0.3.0.dist-info/RECORD +41 -0
- harnice-0.3.0.dist-info/WHEEL +5 -0
- harnice-0.3.0.dist-info/entry_points.txt +3 -0
- harnice-0.3.0.dist-info/licenses/LICENSE +19 -0
- harnice-0.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,844 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import runpy
|
|
3
|
+
import sexpdata
|
|
4
|
+
import json
|
|
5
|
+
import csv
|
|
6
|
+
from harnice import fileio, state
|
|
7
|
+
from harnice.lists import signals_list, rev_history
|
|
8
|
+
from harnice.products import chtype
|
|
9
|
+
|
|
10
|
+
default_desc = "DEVICE, FUNCTION, ATTRIBUTES, etc."
|
|
11
|
+
|
|
12
|
+
device_feature_tree_utils_default = """
|
|
13
|
+
from harnice.lists import signals_list
|
|
14
|
+
from harnice.products import chtype
|
|
15
|
+
|
|
16
|
+
ch_type_ids = {
|
|
17
|
+
"in": (1, "https://github.com/harnice/harnice"),
|
|
18
|
+
"out": (4, "https://github.com/harnice/harnice"),
|
|
19
|
+
"chassis": (5, "https://github.com/harnice/harnice")
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
xlr_pinout = {
|
|
23
|
+
"pos": 2,
|
|
24
|
+
"neg": 3,
|
|
25
|
+
"chassis": 1
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
connector_mpns = {
|
|
29
|
+
"XLR3F": ["in1", "in2"],
|
|
30
|
+
"XLR3M": ["out1", "out2"]
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
def mpn_for_connector(connector_name):
|
|
34
|
+
for mpn, conn_list in connector_mpns.items():
|
|
35
|
+
if connector_name in conn_list:
|
|
36
|
+
return mpn
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
signals_list.new()
|
|
40
|
+
|
|
41
|
+
for connector_name in ["in1", "in2", "out1", "out2"]:
|
|
42
|
+
if connector_name.startswith("in"):
|
|
43
|
+
channel_type = ch_type_ids["in"]
|
|
44
|
+
elif connector_name.startswith("out"):
|
|
45
|
+
channel_type = ch_type_ids["out"]
|
|
46
|
+
else:
|
|
47
|
+
continue
|
|
48
|
+
|
|
49
|
+
channel_name = connector_name
|
|
50
|
+
connector_mpn = mpn_for_connector(connector_name)
|
|
51
|
+
|
|
52
|
+
for signal in chtype.signals(channel_type):
|
|
53
|
+
signals_list.append(
|
|
54
|
+
channel_id=channel_name,
|
|
55
|
+
signal=signal,
|
|
56
|
+
connector_name=connector_name,
|
|
57
|
+
cavity=xlr_pinout.get(signal),
|
|
58
|
+
channel_type=channel_type,
|
|
59
|
+
connector_mpn=connector_mpn
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Add shield row
|
|
63
|
+
signals_list.append(
|
|
64
|
+
channel_id=f"{channel_name}-shield",
|
|
65
|
+
signal="chassis",
|
|
66
|
+
connector_name=connector_name,
|
|
67
|
+
cavity=xlr_pinout.get("chassis"),
|
|
68
|
+
channel_type=ch_type_ids["chassis"],
|
|
69
|
+
connector_mpn=connector_mpn
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def file_structure():
|
|
76
|
+
return {
|
|
77
|
+
f"{state.partnumber('pn-rev')}-feature_tree.py": "feature tree",
|
|
78
|
+
f"{state.partnumber('pn-rev')}-signals_list.tsv": "signals list",
|
|
79
|
+
f"{state.partnumber('pn-rev')}-attributes.json": "attributes",
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# define these here because they exist outside the rev folder you're currently working in and fileio.path cant handle that
|
|
84
|
+
def path(target_value):
|
|
85
|
+
if target_value == "library file":
|
|
86
|
+
return os.path.join(dirpath("kicad"), f"{state.partnumber('pn')}.kicad_sym")
|
|
87
|
+
return fileio.path(target_value)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def dirpath(target_value):
|
|
91
|
+
if target_value == "kicad":
|
|
92
|
+
return os.path.join(fileio.part_directory(), "kicad")
|
|
93
|
+
return fileio.dirpath(target_value)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def generate_structure():
|
|
97
|
+
os.makedirs(dirpath("kicad"), exist_ok=True)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _make_new_library_file():
|
|
101
|
+
"""Create a bare .kicad_sym file with only library header info."""
|
|
102
|
+
|
|
103
|
+
symbol_lib = [
|
|
104
|
+
sexpdata.Symbol("kicad_symbol_lib"),
|
|
105
|
+
[sexpdata.Symbol("version"), 20241209],
|
|
106
|
+
[sexpdata.Symbol("generator"), "kicad_symbol_editor"],
|
|
107
|
+
[sexpdata.Symbol("generator_version"), "9.0"],
|
|
108
|
+
]
|
|
109
|
+
|
|
110
|
+
with open(path("library file"), "w", encoding="utf-8") as f:
|
|
111
|
+
sexpdata.dump(symbol_lib, f, pretty=True)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _parse_kicad_sym_file():
|
|
115
|
+
"""
|
|
116
|
+
Load a KiCad .kicad_sym file and return its parsed sexp data.
|
|
117
|
+
"""
|
|
118
|
+
with open(path("library file"), "r", encoding="utf-8") as f:
|
|
119
|
+
data = sexpdata.load(f)
|
|
120
|
+
return data
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _symbol_exists(kicad_library_data, target_symbol_name):
|
|
124
|
+
"""
|
|
125
|
+
Check if a symbol with a given name exists in a KiCad library.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
kicad_library_data: Parsed sexpdata of the .kicad_sym file.
|
|
129
|
+
target_symbol_name: The symbol name string to search for.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
True if the symbol exists, False otherwise.
|
|
133
|
+
"""
|
|
134
|
+
for element in kicad_library_data:
|
|
135
|
+
# Each element could be a list like: ["symbol", "sym_name", ...]
|
|
136
|
+
if isinstance(element, list) and len(element) > 1:
|
|
137
|
+
if element[0] == sexpdata.Symbol("symbol"):
|
|
138
|
+
if str(element[1]) == target_symbol_name:
|
|
139
|
+
return True
|
|
140
|
+
return False
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _make_property(name, value, id_counter=None, hide=False):
|
|
144
|
+
# adds a property to the current rev symbol of the library
|
|
145
|
+
builtins = {"Reference", "Value", "Footprint", "Datasheet", "Description"}
|
|
146
|
+
prop = [
|
|
147
|
+
sexpdata.Symbol("property"),
|
|
148
|
+
name,
|
|
149
|
+
value, # always a string
|
|
150
|
+
]
|
|
151
|
+
if name not in builtins:
|
|
152
|
+
if id_counter is None:
|
|
153
|
+
raise ValueError(f"Custom property {name} requires an id_counter")
|
|
154
|
+
prop.append([sexpdata.Symbol("id"), id_counter])
|
|
155
|
+
prop.append([sexpdata.Symbol("at"), 0, 0, 0])
|
|
156
|
+
effects = [
|
|
157
|
+
sexpdata.Symbol("effects"),
|
|
158
|
+
[sexpdata.Symbol("font"), [sexpdata.Symbol("size"), 1.27, 1.27]],
|
|
159
|
+
]
|
|
160
|
+
if hide:
|
|
161
|
+
effects.append([sexpdata.Symbol("hide"), sexpdata.Symbol("yes")])
|
|
162
|
+
prop.append(effects)
|
|
163
|
+
return prop
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _add_blank_symbol(sym_name, value="", footprint="", datasheet="", description=""):
|
|
167
|
+
"""Append a blank symbol into the .kicad_sym at fileio.path('library file')."""
|
|
168
|
+
|
|
169
|
+
lib_path = path("library file")
|
|
170
|
+
|
|
171
|
+
# Load the existing s-expression
|
|
172
|
+
with open(lib_path, "r", encoding="utf-8") as f:
|
|
173
|
+
data = sexpdata.load(f)
|
|
174
|
+
|
|
175
|
+
# Build symbol s-expression
|
|
176
|
+
if rev_history.info(field="library_repo") in ["", None]:
|
|
177
|
+
lib_repo_to_write = "local"
|
|
178
|
+
else:
|
|
179
|
+
lib_repo_to_write = _get_attribute("library_repo")
|
|
180
|
+
symbol = [
|
|
181
|
+
sexpdata.Symbol("symbol"),
|
|
182
|
+
sym_name,
|
|
183
|
+
[sexpdata.Symbol("exclude_from_sim"), sexpdata.Symbol("no")],
|
|
184
|
+
[sexpdata.Symbol("in_bom"), sexpdata.Symbol("yes")],
|
|
185
|
+
[sexpdata.Symbol("on_board"), sexpdata.Symbol("yes")],
|
|
186
|
+
_make_property("Reference", _get_attribute("default_refdes")),
|
|
187
|
+
_make_property("Value", value),
|
|
188
|
+
_make_property("Footprint", footprint, hide=True),
|
|
189
|
+
_make_property("Datasheet", datasheet, hide=True),
|
|
190
|
+
_make_property("Description", _get_attribute("desc"), hide=True),
|
|
191
|
+
_make_property("MFG", _get_attribute("manufacturer"), hide=False, id_counter=0),
|
|
192
|
+
_make_property(
|
|
193
|
+
"MPN", _get_attribute("manufacturer_part_number"), hide=False, id_counter=1
|
|
194
|
+
),
|
|
195
|
+
_make_property("lib_repo", lib_repo_to_write, hide=True, id_counter=2),
|
|
196
|
+
_make_property(
|
|
197
|
+
"lib_subpath", _get_attribute("library_subpath"), hide=True, id_counter=3
|
|
198
|
+
),
|
|
199
|
+
_make_property("rev", state.partnumber("rev"), hide=True, id_counter=4),
|
|
200
|
+
[sexpdata.Symbol("embedded_fonts"), sexpdata.Symbol("no")],
|
|
201
|
+
]
|
|
202
|
+
|
|
203
|
+
# Append to the library data
|
|
204
|
+
data.append(symbol)
|
|
205
|
+
|
|
206
|
+
# Write back out
|
|
207
|
+
with open(lib_path, "w", encoding="utf-8") as f:
|
|
208
|
+
sexpdata.dump(data, f, pretty=True)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def _overwrite_or_create_property_in_symbol(prop_name, value, hide=False):
|
|
212
|
+
"""
|
|
213
|
+
Overwrite or create a property inside the target symbol block
|
|
214
|
+
in the KiCad .kicad_sym library file.
|
|
215
|
+
|
|
216
|
+
- File is always path("library file")
|
|
217
|
+
- Symbol to modify is always named state.partnumber("pn-rev")
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
prop_name (str): Name of the property
|
|
221
|
+
value (str): Value to set (will always be forced to string)
|
|
222
|
+
hide (bool): Whether to hide the property
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
target_symbol_name = state.partnumber("pn-rev")
|
|
226
|
+
|
|
227
|
+
# Ensure value is a string (KiCad requirement)
|
|
228
|
+
if value is None:
|
|
229
|
+
value = ""
|
|
230
|
+
else:
|
|
231
|
+
value = str(value)
|
|
232
|
+
|
|
233
|
+
# Load the library file
|
|
234
|
+
with open(path("library file"), "r", encoding="utf-8") as f:
|
|
235
|
+
data = sexpdata.load(f)
|
|
236
|
+
|
|
237
|
+
def next_id(symbol):
|
|
238
|
+
"""Find the next available id number among custom properties."""
|
|
239
|
+
max_id = -1
|
|
240
|
+
for elem in symbol:
|
|
241
|
+
if (
|
|
242
|
+
isinstance(elem, list)
|
|
243
|
+
and len(elem) >= 4
|
|
244
|
+
and isinstance(elem[0], sexpdata.Symbol)
|
|
245
|
+
and elem[0].value() == "property"
|
|
246
|
+
):
|
|
247
|
+
for sub in elem:
|
|
248
|
+
if isinstance(sub, list) and len(sub) == 2:
|
|
249
|
+
if (
|
|
250
|
+
isinstance(sub[0], sexpdata.Symbol)
|
|
251
|
+
and sub[0].value() == "id"
|
|
252
|
+
and isinstance(sub[1], int)
|
|
253
|
+
):
|
|
254
|
+
max_id = max(max_id, sub[1])
|
|
255
|
+
return max_id + 1
|
|
256
|
+
|
|
257
|
+
def overwrite_or_create(symbol):
|
|
258
|
+
# Try to overwrite existing property
|
|
259
|
+
for elem in symbol:
|
|
260
|
+
if (
|
|
261
|
+
isinstance(elem, list)
|
|
262
|
+
and len(elem) >= 3
|
|
263
|
+
and isinstance(elem[0], sexpdata.Symbol)
|
|
264
|
+
and elem[0].value() == "property"
|
|
265
|
+
and elem[1] == prop_name
|
|
266
|
+
):
|
|
267
|
+
elem[2] = value # force overwrite as string
|
|
268
|
+
return symbol
|
|
269
|
+
|
|
270
|
+
# If missing, create new one with next id
|
|
271
|
+
new_id = next_id(symbol)
|
|
272
|
+
new_prop = _make_property(prop_name, value, id_counter=new_id, hide=hide)
|
|
273
|
+
symbol.append(new_prop)
|
|
274
|
+
return symbol
|
|
275
|
+
|
|
276
|
+
# Traverse to the right (symbol ...) block
|
|
277
|
+
for i, elem in enumerate(data):
|
|
278
|
+
if (
|
|
279
|
+
isinstance(elem, list)
|
|
280
|
+
and isinstance(elem[0], sexpdata.Symbol)
|
|
281
|
+
and elem[0].value() == "symbol"
|
|
282
|
+
and elem[1] == target_symbol_name
|
|
283
|
+
):
|
|
284
|
+
data[i] = overwrite_or_create(elem)
|
|
285
|
+
|
|
286
|
+
# Save file back
|
|
287
|
+
with open(path("library file"), "w", encoding="utf-8") as f:
|
|
288
|
+
sexpdata.dump(data, f)
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def _extract_pins_from_symbol(kicad_lib, symbol_name):
|
|
292
|
+
"""
|
|
293
|
+
Extract all pin info for the given symbol (and its subsymbols).
|
|
294
|
+
Returns a list of dicts like {"name": ..., "number": ..., "type": ..., "shape": ...}.
|
|
295
|
+
"""
|
|
296
|
+
|
|
297
|
+
def sym_to_str(obj):
|
|
298
|
+
"""Convert sexpdata.Symbol to string, pass through everything else."""
|
|
299
|
+
if isinstance(obj, sexpdata.Symbol):
|
|
300
|
+
return obj.value()
|
|
301
|
+
return obj
|
|
302
|
+
|
|
303
|
+
pins = []
|
|
304
|
+
|
|
305
|
+
def recurse(node, inside_target=False):
|
|
306
|
+
if not isinstance(node, list) or not node:
|
|
307
|
+
return
|
|
308
|
+
|
|
309
|
+
tag = sym_to_str(node[0])
|
|
310
|
+
|
|
311
|
+
if tag == "symbol":
|
|
312
|
+
sym_name = sym_to_str(node[1])
|
|
313
|
+
new_inside = inside_target or (sym_name == symbol_name)
|
|
314
|
+
for sub in node[2:]:
|
|
315
|
+
recurse(sub, inside_target=new_inside)
|
|
316
|
+
|
|
317
|
+
elif tag == "pin" and inside_target:
|
|
318
|
+
pin_type = sym_to_str(node[1]) if len(node) > 1 else None
|
|
319
|
+
pin_shape = sym_to_str(node[2]) if len(node) > 2 else None
|
|
320
|
+
name_val = None
|
|
321
|
+
number_val = None
|
|
322
|
+
|
|
323
|
+
for entry in node[3:]:
|
|
324
|
+
if isinstance(entry, list) and entry:
|
|
325
|
+
etag = sym_to_str(entry[0])
|
|
326
|
+
if etag == "name":
|
|
327
|
+
name_val = sym_to_str(entry[1])
|
|
328
|
+
elif etag == "number":
|
|
329
|
+
number_val = sym_to_str(entry[1])
|
|
330
|
+
|
|
331
|
+
pin_info = {
|
|
332
|
+
"name": name_val,
|
|
333
|
+
"number": number_val,
|
|
334
|
+
"type": pin_type,
|
|
335
|
+
"shape": pin_shape,
|
|
336
|
+
}
|
|
337
|
+
pins.append(pin_info)
|
|
338
|
+
|
|
339
|
+
else:
|
|
340
|
+
for sub in node[1:]:
|
|
341
|
+
recurse(sub, inside_target=inside_target)
|
|
342
|
+
|
|
343
|
+
recurse(kicad_lib, inside_target=False)
|
|
344
|
+
return pins
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def _validate_pins(pins, unique_connectors_in_signals_list):
|
|
348
|
+
"""Validate pins for uniqueness, type conformity, and check required pins.
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
tuple:
|
|
352
|
+
missing (set): Any missing pin names from unique_connectors_in_signals_list.
|
|
353
|
+
used_pin_numbers (set): Numbers already assigned to pins.
|
|
354
|
+
Raises:
|
|
355
|
+
ValueError: On duplicate names/numbers or invalid types.
|
|
356
|
+
"""
|
|
357
|
+
seen_names = set()
|
|
358
|
+
seen_numbers = set()
|
|
359
|
+
|
|
360
|
+
for pin in pins:
|
|
361
|
+
name = pin.get("name")
|
|
362
|
+
number = pin.get("number")
|
|
363
|
+
ptype = pin.get("type")
|
|
364
|
+
|
|
365
|
+
# Duplicate name
|
|
366
|
+
if name in seen_names:
|
|
367
|
+
raise ValueError(f"Duplicate pin name found: {name}")
|
|
368
|
+
seen_names.add(name)
|
|
369
|
+
|
|
370
|
+
# Duplicate number
|
|
371
|
+
if number in seen_numbers:
|
|
372
|
+
raise ValueError(f"Duplicate pin number found: {number}")
|
|
373
|
+
seen_numbers.add(number)
|
|
374
|
+
|
|
375
|
+
# Type check
|
|
376
|
+
if ptype != "unspecified":
|
|
377
|
+
raise ValueError(
|
|
378
|
+
f"Pin {name} ({number}) has invalid type: {ptype}. Harnice requires all pins to have type 'unspecified'."
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
# Set comparison for 1:1 match
|
|
382
|
+
required = set(unique_connectors_in_signals_list)
|
|
383
|
+
pin_names = seen_names
|
|
384
|
+
|
|
385
|
+
missing = required - pin_names
|
|
386
|
+
extra = pin_names - required
|
|
387
|
+
if extra:
|
|
388
|
+
raise ValueError(
|
|
389
|
+
f"The following pin(s) exist in KiCad symbol but not Signals List: {', '.join(sorted(extra))}"
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
return missing, seen_numbers
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
def _append_missing_pin(pin_name, pin_number, spacing=3.81):
|
|
396
|
+
"""
|
|
397
|
+
Append a pin to the KiCad symbol whose name matches state.partnumber('pn-rev').
|
|
398
|
+
Immediately writes the updated symbol back to path("library file").
|
|
399
|
+
"""
|
|
400
|
+
file_path = path("library file")
|
|
401
|
+
pin_number = str(pin_number)
|
|
402
|
+
target_name = state.partnumber("pn-rev")
|
|
403
|
+
|
|
404
|
+
import sexpdata
|
|
405
|
+
|
|
406
|
+
# --- Load file ---
|
|
407
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
408
|
+
symbol_data = sexpdata.load(f)
|
|
409
|
+
|
|
410
|
+
# --- Find the symbol with matching name ---
|
|
411
|
+
target_symbol = None
|
|
412
|
+
for item in symbol_data:
|
|
413
|
+
if (
|
|
414
|
+
isinstance(item, list)
|
|
415
|
+
and len(item) >= 2
|
|
416
|
+
and isinstance(item[0], sexpdata.Symbol)
|
|
417
|
+
and item[0].value() == "symbol"
|
|
418
|
+
):
|
|
419
|
+
# (symbol "Name" ...)
|
|
420
|
+
name_token = item[1]
|
|
421
|
+
if isinstance(name_token, str) and name_token.strip() == target_name:
|
|
422
|
+
target_symbol = item
|
|
423
|
+
break
|
|
424
|
+
|
|
425
|
+
if target_symbol is None:
|
|
426
|
+
raise ValueError(f"No symbol named '{target_name}' found in {file_path}")
|
|
427
|
+
|
|
428
|
+
# --- Skip if duplicate already present ---
|
|
429
|
+
for elem in target_symbol[2:]:
|
|
430
|
+
if (
|
|
431
|
+
isinstance(elem, list)
|
|
432
|
+
and isinstance(elem[0], sexpdata.Symbol)
|
|
433
|
+
and elem[0].value() == "pin"
|
|
434
|
+
):
|
|
435
|
+
name_entry = next(
|
|
436
|
+
(
|
|
437
|
+
x
|
|
438
|
+
for x in elem
|
|
439
|
+
if isinstance(x, list)
|
|
440
|
+
and len(x) > 1
|
|
441
|
+
and isinstance(x[0], sexpdata.Symbol)
|
|
442
|
+
and x[0].value() == "name"
|
|
443
|
+
),
|
|
444
|
+
None,
|
|
445
|
+
)
|
|
446
|
+
num_entry = next(
|
|
447
|
+
(
|
|
448
|
+
x
|
|
449
|
+
for x in elem
|
|
450
|
+
if isinstance(x, list)
|
|
451
|
+
and len(x) > 1
|
|
452
|
+
and isinstance(x[0], sexpdata.Symbol)
|
|
453
|
+
and x[0].value() == "number"
|
|
454
|
+
),
|
|
455
|
+
None,
|
|
456
|
+
)
|
|
457
|
+
if (
|
|
458
|
+
name_entry
|
|
459
|
+
and name_entry[1] == pin_name
|
|
460
|
+
and num_entry
|
|
461
|
+
and num_entry[1] == pin_number
|
|
462
|
+
):
|
|
463
|
+
return symbol_data # already present
|
|
464
|
+
|
|
465
|
+
# --- Find max Y among existing pins ---
|
|
466
|
+
max_y = -spacing
|
|
467
|
+
for elem in target_symbol[2:]:
|
|
468
|
+
if (
|
|
469
|
+
isinstance(elem, list)
|
|
470
|
+
and isinstance(elem[0], sexpdata.Symbol)
|
|
471
|
+
and elem[0].value() == "pin"
|
|
472
|
+
):
|
|
473
|
+
at_entry = next(
|
|
474
|
+
(
|
|
475
|
+
x
|
|
476
|
+
for x in elem
|
|
477
|
+
if isinstance(x, list)
|
|
478
|
+
and len(x) >= 3
|
|
479
|
+
and isinstance(x[0], sexpdata.Symbol)
|
|
480
|
+
and x[0].value() == "at"
|
|
481
|
+
),
|
|
482
|
+
None,
|
|
483
|
+
)
|
|
484
|
+
if at_entry:
|
|
485
|
+
y_val = float(at_entry[2])
|
|
486
|
+
max_y = max(max_y, y_val)
|
|
487
|
+
|
|
488
|
+
new_y = max_y + spacing
|
|
489
|
+
|
|
490
|
+
# --- Build new pin ---
|
|
491
|
+
new_pin = [
|
|
492
|
+
sexpdata.Symbol("pin"),
|
|
493
|
+
sexpdata.Symbol("unspecified"),
|
|
494
|
+
sexpdata.Symbol("line"),
|
|
495
|
+
[sexpdata.Symbol("at"), 0, new_y, 0],
|
|
496
|
+
[sexpdata.Symbol("length"), 2.54],
|
|
497
|
+
[
|
|
498
|
+
sexpdata.Symbol("name"),
|
|
499
|
+
pin_name,
|
|
500
|
+
[
|
|
501
|
+
sexpdata.Symbol("effects"),
|
|
502
|
+
[sexpdata.Symbol("font"), [sexpdata.Symbol("size"), 1.27, 1.27]],
|
|
503
|
+
],
|
|
504
|
+
],
|
|
505
|
+
[
|
|
506
|
+
sexpdata.Symbol("number"),
|
|
507
|
+
pin_number,
|
|
508
|
+
[
|
|
509
|
+
sexpdata.Symbol("effects"),
|
|
510
|
+
[sexpdata.Symbol("font"), [sexpdata.Symbol("size"), 1.27, 1.27]],
|
|
511
|
+
],
|
|
512
|
+
],
|
|
513
|
+
]
|
|
514
|
+
|
|
515
|
+
target_symbol.append(new_pin)
|
|
516
|
+
|
|
517
|
+
# --- Write back ---
|
|
518
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
519
|
+
sexpdata.dump(symbol_data, f)
|
|
520
|
+
|
|
521
|
+
print(
|
|
522
|
+
f"Appended pin {pin_name} ({pin_number}) to symbol '{target_name}' in {os.path.basename(file_path)}"
|
|
523
|
+
)
|
|
524
|
+
return symbol_data
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
def _remove_details_from_signals_list():
|
|
528
|
+
"""Remove the specified channel-related columns from the signals list."""
|
|
529
|
+
old_list = fileio.read_tsv("signals list")
|
|
530
|
+
|
|
531
|
+
COLUMNS_TO_DROP = {"channel_id", "signal", "cavity"}
|
|
532
|
+
|
|
533
|
+
new_list = []
|
|
534
|
+
for row in old_list:
|
|
535
|
+
filtered = {k: v for k, v in row.items() if k not in COLUMNS_TO_DROP}
|
|
536
|
+
new_list.append(filtered)
|
|
537
|
+
|
|
538
|
+
# Rewrite the TSV
|
|
539
|
+
path = fileio.path("signals list")
|
|
540
|
+
with open(path, "w", newline="", encoding="utf-8") as f:
|
|
541
|
+
writer = csv.DictWriter(f, fieldnames=signals_list.COLUMNS, delimiter="\t")
|
|
542
|
+
writer.writeheader()
|
|
543
|
+
writer.writerows(new_list)
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def _next_free_number(seen_numbers, start=1):
|
|
547
|
+
"""Find the next unused pin number as a string."""
|
|
548
|
+
n = start
|
|
549
|
+
while True:
|
|
550
|
+
if str(n) not in seen_numbers:
|
|
551
|
+
return str(n)
|
|
552
|
+
n += 1
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def _validate_kicad_library():
|
|
556
|
+
"""
|
|
557
|
+
Validate that the KiCad .kicad_sym library has:
|
|
558
|
+
0. The .kicad_sym file exists (create if missing).
|
|
559
|
+
1. A symbol matching the current part number.
|
|
560
|
+
2. Pins that match the connectors in the signals list.
|
|
561
|
+
"""
|
|
562
|
+
|
|
563
|
+
if not os.path.exists(path("library file")):
|
|
564
|
+
_make_new_library_file()
|
|
565
|
+
print("New Kicad symbol file created.")
|
|
566
|
+
|
|
567
|
+
kicad_library_data = _parse_kicad_sym_file()
|
|
568
|
+
|
|
569
|
+
if not _symbol_exists(kicad_library_data, state.partnumber("pn-rev")):
|
|
570
|
+
_add_blank_symbol(
|
|
571
|
+
sym_name=state.partnumber("pn-rev"),
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
# Step 1. Collect unique connectors from the signals list
|
|
575
|
+
unique_connectors_in_signals_list = set()
|
|
576
|
+
for signal in fileio.read_tsv("signals list"):
|
|
577
|
+
connector_name = signal.get("connector_name")
|
|
578
|
+
if connector_name:
|
|
579
|
+
unique_connectors_in_signals_list.add(connector_name)
|
|
580
|
+
|
|
581
|
+
# Step 2. Validate pins
|
|
582
|
+
kicad_lib = _parse_kicad_sym_file()
|
|
583
|
+
pins = _extract_pins_from_symbol(kicad_lib, state.partnumber("pn-rev"))
|
|
584
|
+
missing, seen_numbers = _validate_pins(pins, unique_connectors_in_signals_list)
|
|
585
|
+
|
|
586
|
+
kicad_library_data = _parse_kicad_sym_file()
|
|
587
|
+
|
|
588
|
+
# Step 3. Append missing pins
|
|
589
|
+
for pin_name in missing:
|
|
590
|
+
# find the next available number
|
|
591
|
+
pin_number = _next_free_number(seen_numbers)
|
|
592
|
+
# append it
|
|
593
|
+
_append_missing_pin(pin_name, pin_number)
|
|
594
|
+
# mark number as used
|
|
595
|
+
seen_numbers.add(pin_number)
|
|
596
|
+
|
|
597
|
+
# Step 4. Overwrite symbol properties
|
|
598
|
+
_overwrite_or_create_property_in_symbol(
|
|
599
|
+
"Reference", _get_attribute("default_refdes"), hide=False
|
|
600
|
+
)
|
|
601
|
+
_overwrite_or_create_property_in_symbol(
|
|
602
|
+
"Description", _get_attribute("desc"), hide=False
|
|
603
|
+
)
|
|
604
|
+
_overwrite_or_create_property_in_symbol("MFG", _get_attribute("mfg"), hide=True)
|
|
605
|
+
_overwrite_or_create_property_in_symbol("MPN", _get_attribute("pn"), hide=False)
|
|
606
|
+
|
|
607
|
+
if rev_history.info(field="library_repo") in ["", None]:
|
|
608
|
+
_overwrite_or_create_property_in_symbol("lib_repo", "local", hide=True)
|
|
609
|
+
else:
|
|
610
|
+
_overwrite_or_create_property_in_symbol(
|
|
611
|
+
"lib_repo", _get_attribute("library_repo"), hide=True
|
|
612
|
+
)
|
|
613
|
+
_overwrite_or_create_property_in_symbol(
|
|
614
|
+
"lib_subpath", _get_attribute("library_subpath"), hide=True
|
|
615
|
+
)
|
|
616
|
+
_overwrite_or_create_property_in_symbol("rev", state.partnumber("rev"), hide=True)
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def _validate_attributes_json():
|
|
620
|
+
"""Ensure an attributes JSON file exists with default values if missing."""
|
|
621
|
+
|
|
622
|
+
default_attributes = {"default_refdes": "DEVICE"}
|
|
623
|
+
|
|
624
|
+
attributes_path = fileio.path("attributes")
|
|
625
|
+
|
|
626
|
+
# If attributes file does not exist, create it with defaults
|
|
627
|
+
if not os.path.exists(attributes_path):
|
|
628
|
+
with open(attributes_path, "w", encoding="utf-8") as f:
|
|
629
|
+
json.dump(default_attributes, f, indent=4)
|
|
630
|
+
|
|
631
|
+
# If it exists, load it and verify required keys
|
|
632
|
+
else:
|
|
633
|
+
with open(attributes_path, "r", encoding="utf-8") as f:
|
|
634
|
+
try:
|
|
635
|
+
attributes = json.load(f)
|
|
636
|
+
except json.JSONDecodeError:
|
|
637
|
+
raise ValueError(f"Invalid JSON in attributes file: {attributes_path}")
|
|
638
|
+
|
|
639
|
+
updated = False
|
|
640
|
+
for key, value in default_attributes.items():
|
|
641
|
+
if key not in attributes:
|
|
642
|
+
attributes[key] = value
|
|
643
|
+
updated = True
|
|
644
|
+
|
|
645
|
+
if updated:
|
|
646
|
+
with open(attributes_path, "w", encoding="utf-8") as f:
|
|
647
|
+
json.dump(attributes, f, indent=4)
|
|
648
|
+
print(f"Updated attributes file with missing defaults at {attributes_path}")
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def _get_attribute(attribute_key):
|
|
652
|
+
# find an attribute from either revision history tsv or attributes json
|
|
653
|
+
if attribute_key in rev_history.COLUMNS:
|
|
654
|
+
return rev_history.info(field=attribute_key)
|
|
655
|
+
|
|
656
|
+
else:
|
|
657
|
+
with open(fileio.path("attributes"), "r", encoding="utf-8") as f:
|
|
658
|
+
return json.load(f).get(attribute_key)
|
|
659
|
+
|
|
660
|
+
|
|
661
|
+
def configurations(sig_list):
|
|
662
|
+
"""
|
|
663
|
+
Returns a dict of each configuration variable and each of its allowed options.
|
|
664
|
+
{number} represents any number and can be used in a string like "{number}V".
|
|
665
|
+
You can also say "0<={number}<10V" to describe bounds.
|
|
666
|
+
|
|
667
|
+
Args:
|
|
668
|
+
signals_list (dictionary form)
|
|
669
|
+
|
|
670
|
+
Returns:
|
|
671
|
+
{
|
|
672
|
+
"config_col_1": {"opt1", "opt2", ""},
|
|
673
|
+
"config_col_2": {"5V", "12V", ""},
|
|
674
|
+
...
|
|
675
|
+
}
|
|
676
|
+
"""
|
|
677
|
+
|
|
678
|
+
# collect headers
|
|
679
|
+
headers = set()
|
|
680
|
+
for item in sig_list:
|
|
681
|
+
headers.update(item.keys())
|
|
682
|
+
break # only need first row for headers
|
|
683
|
+
|
|
684
|
+
# find configuration columns
|
|
685
|
+
configuration_cols = (
|
|
686
|
+
headers - set(signals_list.DEVICE_COLUMNS) - {"config_variable"}
|
|
687
|
+
)
|
|
688
|
+
|
|
689
|
+
# initialize root dict
|
|
690
|
+
configuration_vars = {col: set() for col in configuration_cols}
|
|
691
|
+
|
|
692
|
+
# populate unique values (INCLUDING blanks)
|
|
693
|
+
for row in sig_list:
|
|
694
|
+
for col in configuration_cols:
|
|
695
|
+
val = row.get(col)
|
|
696
|
+
|
|
697
|
+
# normalize everything to string
|
|
698
|
+
if val is None:
|
|
699
|
+
val = ""
|
|
700
|
+
else:
|
|
701
|
+
val = str(val).strip()
|
|
702
|
+
|
|
703
|
+
configuration_vars[col].add(val)
|
|
704
|
+
|
|
705
|
+
return configuration_vars
|
|
706
|
+
|
|
707
|
+
|
|
708
|
+
def _validate_signals_list():
|
|
709
|
+
print("--------------------------------")
|
|
710
|
+
print("Validating signals list...")
|
|
711
|
+
if not os.path.exists(fileio.path("signals list")):
|
|
712
|
+
raise FileNotFoundError("Signals list was not generated.")
|
|
713
|
+
|
|
714
|
+
with open(fileio.path("signals list"), "r", encoding="utf-8") as f:
|
|
715
|
+
reader = csv.DictReader(f, delimiter="\t")
|
|
716
|
+
headers = reader.fieldnames
|
|
717
|
+
signals_list = list(reader)
|
|
718
|
+
|
|
719
|
+
if not headers:
|
|
720
|
+
raise ValueError("Signals list has no header row.")
|
|
721
|
+
|
|
722
|
+
config_vars = configurations(signals_list)
|
|
723
|
+
|
|
724
|
+
print(json.dumps(config_vars, indent=4))
|
|
725
|
+
# NEXT UP: WAIT UNTIL YOU HAVE A GOOD USE CASE OF CONFIGURED DEVICES.
|
|
726
|
+
# CONFIRM THAT THIS PRINTS A DICTIONARY OF ALL THE VALID CONFIGURATION VARIABLES AND THEIR DEFINED STATES
|
|
727
|
+
# THEN MAKE A LIST OF EVERY SINGLE FACTORIAL COMBINATION OF THE CONFIGURATION VARIABLES
|
|
728
|
+
# THEN ITERATE THROUGH THAT LIST AND VALIDATE EACH CONFIGURATION
|
|
729
|
+
|
|
730
|
+
counter = 2
|
|
731
|
+
for signal in signals_list:
|
|
732
|
+
print("Looking at csv row:", counter)
|
|
733
|
+
channel_type = chtype.parse(signal.get("channel_type"))
|
|
734
|
+
expected_signals = chtype.signals(channel_type)
|
|
735
|
+
found_signals = set()
|
|
736
|
+
connector_names = set()
|
|
737
|
+
|
|
738
|
+
# make sure all the fields are there
|
|
739
|
+
if signal.get("channel_id") in ["", None]:
|
|
740
|
+
raise ValueError("channel_id is blank")
|
|
741
|
+
if signal.get("signal") in ["", None]:
|
|
742
|
+
raise ValueError("signal is blank")
|
|
743
|
+
if signal.get("connector_name") in ["", None]:
|
|
744
|
+
raise ValueError("connector_name is blank")
|
|
745
|
+
if signal.get("cavity") in ["", None]:
|
|
746
|
+
raise ValueError("cavity is blank")
|
|
747
|
+
if signal.get("connector_mpn") in ["", None]:
|
|
748
|
+
raise ValueError("connector_mpn is blank")
|
|
749
|
+
if signal.get("channel_type") in ["", None]:
|
|
750
|
+
raise ValueError("channel_type is blank")
|
|
751
|
+
|
|
752
|
+
# make sure signal is a valid signal of its channel type
|
|
753
|
+
if signal.get("signal") not in chtype.signals(channel_type):
|
|
754
|
+
raise ValueError(
|
|
755
|
+
f"Signal {signal.get('signal')} is not a valid signal of its channel type"
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
# make sure all the signals of each channel type are present
|
|
759
|
+
for expected_signal in expected_signals:
|
|
760
|
+
for signal2 in signals_list:
|
|
761
|
+
if (
|
|
762
|
+
signal2.get("channel_id") == signal.get("channel_id")
|
|
763
|
+
and signal2.get("signal") == expected_signal
|
|
764
|
+
):
|
|
765
|
+
found_signals.add(expected_signal)
|
|
766
|
+
connector_names.add(signal2.get("connector_name"))
|
|
767
|
+
|
|
768
|
+
missing_signals = set(expected_signals) - found_signals
|
|
769
|
+
if missing_signals:
|
|
770
|
+
raise ValueError(
|
|
771
|
+
f"Channel {signal.get('channel_id')} is missing signals: {', '.join(missing_signals)}"
|
|
772
|
+
)
|
|
773
|
+
|
|
774
|
+
# make sure no channels are spread across multiple connectors
|
|
775
|
+
if len(connector_names) > 1:
|
|
776
|
+
raise ValueError(
|
|
777
|
+
f"Channel {signal.get('channel_id')} has signals spread across multiple connectors: "
|
|
778
|
+
f"{', '.join(connector_names)}"
|
|
779
|
+
)
|
|
780
|
+
|
|
781
|
+
counter += 1
|
|
782
|
+
|
|
783
|
+
# make sure no duplicate cavities are present
|
|
784
|
+
seen_cavities = set()
|
|
785
|
+
for signal in signals_list:
|
|
786
|
+
cavity_key = f"{signal.get('connector_name')}-{signal.get('cavity')}"
|
|
787
|
+
if cavity_key in seen_cavities:
|
|
788
|
+
raise ValueError(
|
|
789
|
+
f"Duplicate cavity '{signal.get('cavity')}' found on connector '{signal.get('connector_name')}'"
|
|
790
|
+
)
|
|
791
|
+
seen_cavities.add(cavity_key)
|
|
792
|
+
|
|
793
|
+
print(f"Signals list of {state.partnumber('pn')} is valid.\n")
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
def render(lightweight=False):
|
|
797
|
+
signals_list.set_list_type("device")
|
|
798
|
+
_validate_attributes_json()
|
|
799
|
+
|
|
800
|
+
# make a new signals list
|
|
801
|
+
if not os.path.exists(fileio.path("signals list")):
|
|
802
|
+
if lightweight:
|
|
803
|
+
signals_list.new()
|
|
804
|
+
signals_list.write_signal(
|
|
805
|
+
connector_name="J1",
|
|
806
|
+
channel_type=0,
|
|
807
|
+
signal="placeholder",
|
|
808
|
+
cavity=1,
|
|
809
|
+
connector_mpn="DB9_F",
|
|
810
|
+
)
|
|
811
|
+
else:
|
|
812
|
+
with open(fileio.path("feature tree"), "w", encoding="utf-8") as f:
|
|
813
|
+
f.write(device_feature_tree_utils_default)
|
|
814
|
+
|
|
815
|
+
if os.path.exists(fileio.path("feature tree")):
|
|
816
|
+
runpy.run_path(fileio.path("feature tree"))
|
|
817
|
+
print("Successfully rebuilt signals list per feature tree.")
|
|
818
|
+
|
|
819
|
+
if not lightweight:
|
|
820
|
+
_validate_signals_list()
|
|
821
|
+
|
|
822
|
+
if lightweight:
|
|
823
|
+
# don't want to map things that have not been mapped completely yet
|
|
824
|
+
_remove_details_from_signals_list()
|
|
825
|
+
|
|
826
|
+
path_nickname = ""
|
|
827
|
+
subpath_nickname = ""
|
|
828
|
+
if rev_history.info(field="library_repo"):
|
|
829
|
+
path_nickname = f"{os.path.basename(rev_history.info(field='library_repo'))}/"
|
|
830
|
+
if rev_history.info(field="library_subpath"):
|
|
831
|
+
subpath_nickname = f"{rev_history.info(field='library_subpath')}"
|
|
832
|
+
|
|
833
|
+
if path_nickname == "":
|
|
834
|
+
print(
|
|
835
|
+
"Add this to 'PROJECT SPECIFIC LIBRARIES' not 'global libraries' in Kicad because it doesn't look like you're working in a Harnice library path"
|
|
836
|
+
)
|
|
837
|
+
|
|
838
|
+
print(
|
|
839
|
+
"\n"
|
|
840
|
+
f"Nickname: {path_nickname}{subpath_nickname}{state.partnumber('pn')}\n"
|
|
841
|
+
f"Library path: {path('library file')}\n"
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
_validate_kicad_library()
|