harnice 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. harnice/__init__.py +0 -0
  2. harnice/__main__.py +4 -0
  3. harnice/cli.py +234 -0
  4. harnice/fileio.py +295 -0
  5. harnice/gui/launcher.py +426 -0
  6. harnice/lists/channel_map.py +182 -0
  7. harnice/lists/circuits_list.py +302 -0
  8. harnice/lists/disconnect_map.py +237 -0
  9. harnice/lists/formboard_graph.py +63 -0
  10. harnice/lists/instances_list.py +280 -0
  11. harnice/lists/library_history.py +40 -0
  12. harnice/lists/manifest.py +93 -0
  13. harnice/lists/post_harness_instances_list.py +66 -0
  14. harnice/lists/rev_history.py +325 -0
  15. harnice/lists/signals_list.py +135 -0
  16. harnice/products/__init__.py +1 -0
  17. harnice/products/cable.py +152 -0
  18. harnice/products/chtype.py +80 -0
  19. harnice/products/device.py +844 -0
  20. harnice/products/disconnect.py +225 -0
  21. harnice/products/flagnote.py +139 -0
  22. harnice/products/harness.py +522 -0
  23. harnice/products/macro.py +10 -0
  24. harnice/products/part.py +640 -0
  25. harnice/products/system.py +125 -0
  26. harnice/products/tblock.py +270 -0
  27. harnice/state.py +57 -0
  28. harnice/utils/appearance.py +51 -0
  29. harnice/utils/circuit_utils.py +326 -0
  30. harnice/utils/feature_tree_utils.py +183 -0
  31. harnice/utils/formboard_utils.py +973 -0
  32. harnice/utils/library_utils.py +333 -0
  33. harnice/utils/note_utils.py +417 -0
  34. harnice/utils/svg_utils.py +819 -0
  35. harnice/utils/system_utils.py +563 -0
  36. harnice-0.3.0.dist-info/METADATA +32 -0
  37. harnice-0.3.0.dist-info/RECORD +41 -0
  38. harnice-0.3.0.dist-info/WHEEL +5 -0
  39. harnice-0.3.0.dist-info/entry_points.txt +3 -0
  40. harnice-0.3.0.dist-info/licenses/LICENSE +19 -0
  41. harnice-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,280 @@
1
+ import csv
2
+ import os
3
+ import inspect
4
+ from threading import Lock
5
+ from harnice import fileio, state
6
+
7
+ COLUMNS = [
8
+ "net", #documentation needed
9
+ "instance_name", #documentation needed
10
+ "print_name", #documentation needed
11
+ "bom_line_number", #documentation needed
12
+ "mfg", #documentation needed
13
+ "mpn", # unique part identifier (manufacturer + part number concatenated)
14
+ "item_type", # connector, backshell, whatever
15
+ "parent_instance", # general purpose reference
16
+ "location_type", # each instance is either better represented by one or ther other
17
+ "segment_group", # the group of segments that this instance is part of
18
+ "segment_order", # the sequential id of this item in its segment group
19
+ "connector_group", # a group of co-located parts (connectors, backshells, nodes)
20
+ "channel_group", #documentation needed
21
+ "circuit_id", # which signal this component is electrically connected to
22
+ "circuit_port_number", # the sequential id of this item in its signal chain
23
+ "node_at_end_a", # derived from formboard definition
24
+ "node_at_end_b", # derived from formboard definition
25
+ "parent_csys_instance_name", # the other instance upon which this instance's location is based
26
+ "parent_csys_outputcsys_name", # the specific output coordinate system of the parent that this instance's location is based
27
+ "translate_x", # derived from parent_csys and parent_csys_name
28
+ "translate_y", # derived from parent_csys and parent_csys_name
29
+ "rotate_csys", # derived from parent_csys and parent_csys_name
30
+ "absolute_rotation", # manual add, not nominally used unless it's a flagnote, segment, or node
31
+ "csys_children", # imported csys children from library attributes file
32
+ "cable_group", #documentation needed
33
+ "cable_container", #documentation needed
34
+ "cable_identifier", #documentation needed
35
+ "length", # derived from formboard definition, the length of a segment
36
+ "diameter", # apparent diameter of a segment <---------- change to print_diameter
37
+ "appearance", # see harnice.utils.appearance for details
38
+ "note_type", # build_note, rev_note, etc
39
+ "note_number", # if there is a counter involved (rev, bom, build_note, etc)
40
+ "note_parent", # the instance the note applies to. typically don't use this in the instances list, just note_utils
41
+ "note_text", # the content of the note
42
+ "note_affected_instances", # list of instances that are affected by the note
43
+ "lib_repo", #documentation needed
44
+ "lib_subpath", #documentation needed
45
+ "lib_desc", #documentation needed
46
+ "lib_latest_rev", #documentation needed
47
+ "lib_rev_used_here", #documentation needed
48
+ "lib_status", #documentation needed
49
+ "lib_releaseticket", #documentation needed
50
+ "lib_datestarted", #documentation needed
51
+ "lib_datemodified", #documentation needed
52
+ "lib_datereleased", #documentation needed
53
+ "lib_drawnby", #documentation needed
54
+ "lib_checkedby", #documentation needed
55
+ "project_editable_lib_modified", #documentation needed
56
+ "lib_build_notes", #documentation needed
57
+ "lib_tools", #documentation needed
58
+ "this_instance_mating_device_refdes", # if connector, refdes of the device it plugs into
59
+ "this_instance_mating_device_connector", # if connector, name of the connector it plugs into
60
+ "this_instance_mating_device_connector_mpn", # if connector, mpn of the connector it plugs into
61
+ "this_net_from_device_refdes", #documentation needed
62
+ "this_net_from_device_channel_id", #documentation needed
63
+ "this_net_from_device_connector_name", #documentation needed
64
+ "this_net_to_device_refdes", #documentation needed
65
+ "this_net_to_device_channel_id", #documentation needed
66
+ "this_net_to_device_connector_name", #documentation needed
67
+ "this_channel_from_device_refdes", # if channel, refdes of the device on one side of the channel
68
+ "this_channel_from_device_channel_id", #documentation needed
69
+ "this_channel_to_device_refdes", # if channel, refdes of the device on the other side of the channel
70
+ "this_channel_to_device_channel_id", #documentation needed
71
+ "this_channel_from_channel_type", #documentation needed
72
+ "this_channel_to_channel_type", #documentation needed
73
+ "signal_of_channel_type", #documentation needed
74
+ "debug", #documentation needed
75
+ "debug_cutoff", #documentation needed
76
+ ]
77
+
78
+
79
+ def new_instance(instance_name, instance_data, ignore_duplicates=False):
80
+ """
81
+ New Instance
82
+
83
+ instances_list.new_instance(
84
+ instance_name,
85
+ instance_data,
86
+ ignore_duplicates=False
87
+ )
88
+
89
+ Add a new instance to your instances list.
90
+
91
+ instance_name is a string and must be unique within the list.
92
+ instance_data is a dictionary of columns (above). You may or may not include instance_name in this dict, though if you do and it doesn't match the argument, the code will fail.
93
+ Setting ignore_duplicates to True will cause the line to pass silently if you try to add an instance with an instance_name that already exists. By default, False, if you do this, the code will raise an error if you try to add a duplicate instance_name.
94
+
95
+ Args:
96
+ instance_name: string, must be unique within the list
97
+ instance_data: dictionary of columns (above)
98
+ ignore_duplicates: boolean, default False
99
+
100
+ Returns:
101
+ -1 if the instance was added successfully, otherwise raises an error
102
+ """
103
+ if instance_name in ["", None]:
104
+ raise ValueError(
105
+ "Argument 'instance_name' is blank and reqired to idenitify a unique instance"
106
+ )
107
+
108
+ if (
109
+ "instance_name" in instance_data
110
+ and instance_data["instance_name"] != instance_name
111
+ ):
112
+ raise ValueError(
113
+ f"Inconsistent instance_name: argument='{instance_name}' vs data['instance_name']='{instance_data['instance_name']}'"
114
+ )
115
+
116
+ if any(
117
+ row.get("instance_name") == instance_name
118
+ for row in fileio.read_tsv("instances list")
119
+ ):
120
+ if not ignore_duplicates:
121
+ raise ValueError(
122
+ f"An instance with the name '{instance_name}' already exists"
123
+ )
124
+ else:
125
+ return -1
126
+
127
+ if instance_data.get("net") is None:
128
+ try:
129
+ instance_data["net"] = state.net
130
+ except AttributeError: # no net has been set
131
+ pass
132
+
133
+ # Add debug call chain
134
+ instance_data["debug"] = _get_call_chain_str()
135
+ instance_data["debug_cutoff"] = " "
136
+
137
+ # add argumet to data added
138
+ instance_data["instance_name"] = instance_name
139
+
140
+ with open(fileio.path("instances list"), "a", newline="", encoding="utf-8") as f:
141
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
142
+ writer.writerow({key: instance_data.get(key, "") for key in COLUMNS})
143
+
144
+
145
+ _instances_lock = Lock()
146
+
147
+
148
+ def modify(instance_name, instance_data):
149
+ with _instances_lock:
150
+ path = fileio.path("instances list")
151
+
152
+ # --- Read once ---
153
+ with open(path, newline="", encoding="utf-8") as f:
154
+ reader = csv.DictReader(f, delimiter="\t")
155
+ rows = list(reader)
156
+ fieldnames = reader.fieldnames or []
157
+
158
+ # --- Add debug info before updating ---
159
+ instance_data["debug"] = _get_call_chain_str()
160
+ instance_data["debug_cutoff"] = " "
161
+
162
+ # Ensure any new keys are part of the header
163
+ for key in instance_data:
164
+ if key not in fieldnames:
165
+ fieldnames.append(key)
166
+
167
+ # --- Modify in-place ---
168
+ found = False
169
+ for row in rows:
170
+ if row.get("instance_name") == instance_name:
171
+ row.update(instance_data)
172
+ found = True
173
+ break
174
+
175
+ if not found:
176
+ raise ValueError(f"Instance '{instance_name}' not found")
177
+
178
+ # --- Write atomically ---
179
+ tmp = path + ".tmp"
180
+ with open(tmp, "w", newline="", encoding="utf-8") as f:
181
+ writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter="\t")
182
+ writer.writeheader()
183
+ writer.writerows(rows)
184
+ f.flush()
185
+ os.fsync(f.fileno())
186
+
187
+ os.replace(tmp, path)
188
+
189
+
190
+ def remove_instance(instance_to_delete):
191
+ instances_list = fileio.read_tsv("instances list")
192
+ new_list = []
193
+ for instance in instances_list:
194
+ if instance.get("instance_name") == instance_to_delete.get("instance_name"):
195
+ continue
196
+ new_list.append(instance)
197
+
198
+ with open(fileio.path("instances list"), "w", newline="", encoding="utf-8") as f:
199
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
200
+ writer.writeheader()
201
+ writer.writerows(new_list)
202
+
203
+
204
+ def new():
205
+ with open(fileio.path("instances list"), "w", newline="", encoding="utf-8") as f:
206
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
207
+ writer.writeheader()
208
+ writer.writerows([])
209
+
210
+
211
+ def assign_bom_line_numbers():
212
+ bom = []
213
+ for instance in fileio.read_tsv("instances list"):
214
+ if instance.get("bom_line_number") == "True":
215
+ if instance.get("mpn") == "":
216
+ raise ValueError(
217
+ f"You've chosen to add {instance.get('instance_name')} to the bom, but haven't specified an MPN"
218
+ )
219
+ if instance.get("mpn") not in bom:
220
+ bom.append(instance.get("mpn"))
221
+
222
+ bom_line_number = 1
223
+ for bom_item in bom:
224
+ for instance in fileio.read_tsv("instances list"):
225
+ if instance.get("mpn") == bom_item:
226
+ modify(
227
+ instance.get("instance_name"), {"bom_line_number": bom_line_number}
228
+ )
229
+ bom_line_number += 1
230
+
231
+
232
+ def attribute_of(target_instance, attribute):
233
+ for instance in fileio.read_tsv("instances list"):
234
+ if instance.get("instance_name") == target_instance:
235
+ return instance.get(attribute)
236
+
237
+
238
+ def instance_in_connector_group_with_item_type(connector_group, item_type):
239
+ if connector_group in ["", None]:
240
+ raise ValueError("Connector group is blank")
241
+ if item_type in ["", None]:
242
+ raise ValueError("Suffix is blank")
243
+ match = 0
244
+ output = None
245
+ for instance in fileio.read_tsv("instances list"):
246
+ if instance.get("connector_group") == connector_group:
247
+ if instance.get("item_type") == item_type:
248
+ match = match + 1
249
+ output = instance
250
+ if match == 0:
251
+ return 0
252
+ if match > 1:
253
+ raise ValueError(
254
+ f"Multiple instances found in connector_group '{connector_group}' with item type '{item_type}'."
255
+ )
256
+ return output
257
+
258
+
259
+ def _get_call_chain_str():
260
+ """
261
+ Returns the call chain as a readable string:
262
+ filename:line in function -> filename:line in function ...
263
+ """
264
+ stack = inspect.stack()
265
+ chain_parts = []
266
+ for frame_info in reversed(stack[1:]): # skip this function itself
267
+ filename = os.path.basename(frame_info.filename)
268
+ lineno = frame_info.lineno
269
+ function = frame_info.function
270
+ chain_parts.append(f"{filename}:{lineno} in {function}()")
271
+ return " -> ".join(chain_parts)
272
+
273
+
274
+ def list_of_uniques(attribute):
275
+ output = []
276
+ for instance in fileio.read_tsv("instances list"):
277
+ if instance.get(attribute) not in output:
278
+ if instance.get(attribute) not in [None, ""]:
279
+ output.append(instance.get(attribute))
280
+ return output
@@ -0,0 +1,40 @@
1
+ import csv
2
+ from harnice import fileio
3
+
4
+ COLUMNS = [
5
+ "instance_name", #documentation needed
6
+ "mpn", #documentation needed
7
+ "item_type", #documentation needed
8
+ "lib_repo", #documentation needed
9
+ "lib_subpath", #documentation needed
10
+ "lib_desc", #documentation needed
11
+ "lib_latest_rev", #documentation needed
12
+ "lib_rev_used_here", #documentation needed
13
+ "lib_status", #documentation needed
14
+ "lib_releaseticket", #documentation needed
15
+ "lib_datestarted", #documentation needed
16
+ "lib_datemodified", #documentation needed
17
+ "lib_datereleased", #documentation needed
18
+ "lib_drawnby", #documentation needed
19
+ "lib_checkedby", #documentation needed
20
+ "project_editable_lib_modified", #documentation needed
21
+ ]
22
+
23
+
24
+ def new():
25
+ with open(fileio.path("library history"), "w", newline="", encoding="utf-8") as f:
26
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
27
+ writer.writeheader()
28
+ writer.writerows([])
29
+
30
+
31
+ def append(instance_name, instance_data):
32
+ instance_data["instance_name"] = instance_name
33
+ for row in fileio.read_tsv("library history"):
34
+ if row.get("instance name") == instance_name:
35
+ raise ValueError(
36
+ f"You're trying to import something with instance_name '{instance_name}' but it has already been imported."
37
+ )
38
+ with open(fileio.path("library history"), "a", newline="", encoding="utf-8") as f:
39
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
40
+ writer.writerow({key: instance_data.get(key, "") for key in COLUMNS})
@@ -0,0 +1,93 @@
1
+ import os
2
+ import csv
3
+ from harnice import fileio
4
+
5
+ COLUMNS = [
6
+ "net", #documentation needed
7
+ "harness_pn", #documentation needed
8
+ ]
9
+
10
+
11
+ def new():
12
+ """
13
+ Synchronize the system harness manifest with the system connector list:
14
+ - Remove nets that no longer exist in the connector list
15
+ - Add nets that appear in the connector list but not yet in the manifest
16
+ - Preserve all other column data for nets that still exist
17
+ """
18
+ # Load connector list and extract unique nets
19
+ connector_nets = {
20
+ row.get("net", "").strip()
21
+ for row in fileio.read_tsv("system connector list")
22
+ if row.get("net")
23
+ }
24
+
25
+ manifest_path = fileio.path("harness manifest")
26
+
27
+ # Load existing manifest if present
28
+ existing_manifest = []
29
+ manifest_nets = set()
30
+ try:
31
+ existing_manifest = fileio.read_tsv("harness manifest")
32
+ manifest_nets = {
33
+ row.get("net", "").strip() for row in existing_manifest if row.get("net")
34
+ }
35
+ except FileNotFoundError:
36
+ existing_manifest = []
37
+ manifest_nets = set()
38
+
39
+ # Determine differences
40
+ nets_to_add = connector_nets - manifest_nets
41
+ nets_to_keep = manifest_nets & connector_nets
42
+
43
+ # Preserve existing info for kept nets
44
+ updated_manifest = [
45
+ row for row in existing_manifest if row.get("net") in nets_to_keep
46
+ ]
47
+
48
+ # Add new rows for new nets
49
+ for net in sorted(nets_to_add):
50
+ updated_manifest.append({"net": net})
51
+
52
+ # Sort by net name for consistency
53
+ updated_manifest = sorted(updated_manifest, key=lambda r: r.get("net", ""))
54
+
55
+ # Write updated manifest
56
+ with open(manifest_path, "w", newline="", encoding="utf-8") as f:
57
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
58
+ writer.writeheader()
59
+ for row in updated_manifest:
60
+ full_row = {col: row.get(col, "") for col in COLUMNS}
61
+ writer.writerow(full_row)
62
+
63
+
64
+ def update_upstream(path_to_system_rev, system_pn_rev, manifest_nets, harness_pn):
65
+ manifest_path = os.path.join(
66
+ path_to_system_rev,
67
+ "lists",
68
+ f"{system_pn_rev[0]}-{system_pn_rev[1]}-harness_manifest.tsv",
69
+ )
70
+
71
+ # Read existing manifest
72
+ with open(manifest_path, newline="", encoding="utf-8") as f:
73
+ reader = csv.DictReader(f, delimiter="\t")
74
+ manifest = list(reader)
75
+ fieldnames = reader.fieldnames
76
+
77
+ # --- Pass 1: update matching nets ---
78
+ for net in manifest_nets:
79
+ for row in manifest:
80
+ if row.get("net") == net:
81
+ row["harness_pn"] = harness_pn
82
+ break
83
+
84
+ # --- Pass 2: remove outdated links ---
85
+ for row in manifest:
86
+ if row.get("harness_pn") == harness_pn and row.get("net") not in manifest_nets:
87
+ row["harness_pn"] = ""
88
+
89
+ # Write back updated manifest
90
+ with open(manifest_path, "w", newline="", encoding="utf-8") as f:
91
+ writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter="\t")
92
+ writer.writeheader()
93
+ writer.writerows(manifest)
@@ -0,0 +1,66 @@
1
+ import os
2
+ import csv
3
+ import shutil
4
+ from harnice import fileio
5
+ from harnice.lists import instances_list
6
+
7
+
8
+ def rebuild():
9
+ """
10
+ Build the 'post harness instances list' by merging instance data from:
11
+ - Each harness's instances list if the harness_pn is defined and file exists
12
+ - Otherwise, fall back to the system-level instances list for matching nets
13
+
14
+ Writes a clean TSV with INSTANCES_LIST_COLUMNS.
15
+ """
16
+
17
+ post_harness_instances = []
18
+
19
+ # --- iterate through manifest rows ---
20
+ for harness in fileio.read_tsv("harness manifest"):
21
+ harness_pn = (harness.get("harness_pn") or "").strip()
22
+ net = (harness.get("net") or "").strip()
23
+ if not net:
24
+ continue
25
+
26
+ # Case 1: harness_pn missing -> import from system instances
27
+ if not harness_pn:
28
+ for system_instance in fileio.read_tsv("instances list"):
29
+ if system_instance.get("net", "").strip() == net:
30
+ post_harness_instances.append(system_instance)
31
+ continue
32
+
33
+ # Case 2: harness_pn provided -> try to load harness instances list
34
+ harness_instances_list_path = os.path.join(
35
+ fileio.dirpath("harnesses"),
36
+ f"{harness_pn}-instances_list.tsv",
37
+ )
38
+
39
+ if os.path.exists(harness_instances_list_path):
40
+ post_harness_instances.extend(fileio.read_tsv(harness_instances_list_path))
41
+ else:
42
+ # Fallback to system-level instances for same net
43
+ for system_instance in fileio.read_tsv("instances list"):
44
+ if system_instance.get("net", "").strip() == net:
45
+ post_harness_instances.append(system_instance)
46
+
47
+ # --- Determine fieldnames dynamically ---
48
+ fieldnames = set()
49
+ for instance in post_harness_instances:
50
+ fieldnames.update(instance.keys())
51
+
52
+ # --- Write TSV file ---
53
+ with open(
54
+ fileio.path("post harness instances list"), "w", newline="", encoding="utf-8"
55
+ ) as f:
56
+ writer = csv.DictWriter(f, fieldnames=instances_list.COLUMNS, delimiter="\t")
57
+ writer.writeheader()
58
+ for instance in post_harness_instances:
59
+ writer.writerow({k: instance.get(k, "") for k in fieldnames})
60
+
61
+
62
+ def push(path_to_system_rev, system_pn_rev):
63
+ path_to_harness_dir_of_system = os.path.join(
64
+ path_to_system_rev, f"{system_pn_rev[0]}-{system_pn_rev[1]}", "harnesses"
65
+ )
66
+ shutil.copy(fileio.path("instances list"), path_to_harness_dir_of_system)