harnice 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. harnice/__init__.py +0 -0
  2. harnice/__main__.py +4 -0
  3. harnice/cli.py +234 -0
  4. harnice/fileio.py +295 -0
  5. harnice/gui/launcher.py +426 -0
  6. harnice/lists/channel_map.py +182 -0
  7. harnice/lists/circuits_list.py +302 -0
  8. harnice/lists/disconnect_map.py +237 -0
  9. harnice/lists/formboard_graph.py +63 -0
  10. harnice/lists/instances_list.py +280 -0
  11. harnice/lists/library_history.py +40 -0
  12. harnice/lists/manifest.py +93 -0
  13. harnice/lists/post_harness_instances_list.py +66 -0
  14. harnice/lists/rev_history.py +325 -0
  15. harnice/lists/signals_list.py +135 -0
  16. harnice/products/__init__.py +1 -0
  17. harnice/products/cable.py +152 -0
  18. harnice/products/chtype.py +80 -0
  19. harnice/products/device.py +844 -0
  20. harnice/products/disconnect.py +225 -0
  21. harnice/products/flagnote.py +139 -0
  22. harnice/products/harness.py +522 -0
  23. harnice/products/macro.py +10 -0
  24. harnice/products/part.py +640 -0
  25. harnice/products/system.py +125 -0
  26. harnice/products/tblock.py +270 -0
  27. harnice/state.py +57 -0
  28. harnice/utils/appearance.py +51 -0
  29. harnice/utils/circuit_utils.py +326 -0
  30. harnice/utils/feature_tree_utils.py +183 -0
  31. harnice/utils/formboard_utils.py +973 -0
  32. harnice/utils/library_utils.py +333 -0
  33. harnice/utils/note_utils.py +417 -0
  34. harnice/utils/svg_utils.py +819 -0
  35. harnice/utils/system_utils.py +563 -0
  36. harnice-0.3.0.dist-info/METADATA +32 -0
  37. harnice-0.3.0.dist-info/RECORD +41 -0
  38. harnice-0.3.0.dist-info/WHEEL +5 -0
  39. harnice-0.3.0.dist-info/entry_points.txt +3 -0
  40. harnice-0.3.0.dist-info/licenses/LICENSE +19 -0
  41. harnice-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,302 @@
1
+ import os
2
+ import csv
3
+ from harnice import fileio
4
+ from harnice.lists import signals_list
5
+ from harnice.products import chtype
6
+
7
+ COLUMNS = [
8
+ "net", #documentation needed
9
+ "circuit_id", #documentation needed
10
+ "signal", #documentation needed
11
+ "net_from_refdes", #documentation needed
12
+ "net_from_channel_id", #documentation needed
13
+ "net_from_connector_name", #documentation needed
14
+ "net_from_cavity", #documentation needed
15
+ "net_to_refdes", #documentation needed
16
+ "net_to_channel_id", #documentation needed
17
+ "net_to_connector_name", #documentation needed
18
+ "net_to_cavity", #documentation needed
19
+ "from_side_device_refdes", #documentation needed
20
+ "from_side_device_chname", #documentation needed
21
+ "to_side_device_refdes", #documentation needed
22
+ "to_side_device_chname", #documentation needed
23
+ "from_channel_type", #documentation needed
24
+ "to_channel_type", #documentation needed
25
+ ]
26
+
27
+
28
+ def new():
29
+ """
30
+ Makes a new blank circuits list. Overwrites existing circuits list.
31
+
32
+ Args: none
33
+
34
+ Returns: none
35
+ """
36
+ # --- helper: first non-empty field ---
37
+ def first_nonempty(row, *candidate_names):
38
+ for name in candidate_names:
39
+ value = (row.get(name) or "").strip()
40
+ if value:
41
+ return value
42
+ return ""
43
+
44
+ # --- load disconnect map and build index ---
45
+ disconnect_index = {}
46
+
47
+ for row in fileio.read_tsv("disconnect map"):
48
+ a_refdes = first_nonempty(
49
+ row,
50
+ "A-side_device_refdes",
51
+ "from_destination_device_refdes",
52
+ "from_device_refdes",
53
+ )
54
+ if not a_refdes:
55
+ continue # skip "available channel" rows
56
+
57
+ a_channel_id = first_nonempty(
58
+ row,
59
+ "A-side_device_channel_id",
60
+ "from_destination_device_channel_id",
61
+ "from_device_channel_id",
62
+ )
63
+ b_refdes = first_nonempty(
64
+ row,
65
+ "B-side_device_refdes",
66
+ "to_destination_device_refdes",
67
+ "to_device_refdes",
68
+ )
69
+ b_channel_id = first_nonempty(
70
+ row,
71
+ "B-side_device_channel_id",
72
+ "to_destination_device_channel_id",
73
+ "to_device_channel_id",
74
+ )
75
+
76
+ disconnect_refdes = first_nonempty(row, "disconnect_refdes")
77
+ disconnect_channel_id = first_nonempty(
78
+ row, "disconnect_channel_id", "disconnect_channel_id"
79
+ )
80
+
81
+ key_forward = (
82
+ a_refdes,
83
+ a_channel_id,
84
+ b_refdes,
85
+ b_channel_id,
86
+ disconnect_refdes,
87
+ )
88
+ key_reverse = (
89
+ b_refdes,
90
+ b_channel_id,
91
+ a_refdes,
92
+ a_channel_id,
93
+ disconnect_refdes,
94
+ )
95
+
96
+ disconnect_index[key_forward] = disconnect_channel_id
97
+ disconnect_index[key_reverse] = disconnect_channel_id
98
+
99
+ circuits_list = []
100
+ circuit_id = 0
101
+
102
+ # --- resolvers ---
103
+ def resolve_device_endpoint(refdes, channel_id, signal):
104
+ device_signals_list_path = os.path.join(
105
+ fileio.dirpath("instance_data"),
106
+ "device",
107
+ refdes,
108
+ f"{refdes}-signals_list.tsv",
109
+ )
110
+ connector_name = (
111
+ signals_list.connector_name_of_channel(channel_id, device_signals_list_path)
112
+ if channel_id
113
+ else ""
114
+ )
115
+ cavity = (
116
+ signals_list.cavity_of_signal(channel_id, signal, device_signals_list_path)
117
+ if channel_id
118
+ else ""
119
+ )
120
+ return {
121
+ "refdes": refdes,
122
+ "channel_id": channel_id,
123
+ "connector_name": connector_name,
124
+ "cavity": cavity,
125
+ }
126
+
127
+ def resolve_disconnect_endpoint(refdes, side, signal, channel_id):
128
+ disconnect_signals_list_path = os.path.join(
129
+ fileio.dirpath("instance_data"), "disconnect", refdes, f"{refdes}-signals_list.tsv"
130
+ )
131
+
132
+ row = None
133
+ for disconnect_signal_row in fileio.read_tsv(disconnect_signals_list_path):
134
+ if disconnect_signal_row.get("signal", "").strip() == signal.strip():
135
+ if (
136
+ disconnect_signal_row.get("channel_id", "").strip()
137
+ == channel_id.strip()
138
+ ):
139
+ row = disconnect_signal_row
140
+ break
141
+
142
+ if row is None:
143
+ raise ValueError(
144
+ f"Signal '{signal}' of channel '{channel_id}' not found in {disconnect_signals_list_path}"
145
+ )
146
+
147
+ cavity = (row.get(f"{side}_cavity") or "").strip()
148
+ return {
149
+ "refdes": refdes,
150
+ "channel_id": channel_id,
151
+ "connector_name": side,
152
+ "cavity": cavity,
153
+ }
154
+
155
+ # --- iterate channel map rows ---
156
+ for row in fileio.read_tsv("channel map"):
157
+ if not row.get("from_device_channel_id"):
158
+ continue
159
+ if not row.get("to_device_refdes") and not row.get("multi_ch_junction_id"):
160
+ continue
161
+
162
+ from_refdes = row["from_device_refdes"].strip()
163
+ from_channel_id = row["from_device_channel_id"].strip()
164
+ to_refdes = row["to_device_refdes"].strip()
165
+ to_channel_id = row["to_device_channel_id"].strip()
166
+
167
+ signals = chtype.signals(row.get("from_channel_type"))
168
+
169
+ # --- parse disconnect requirement ---
170
+ disconnect_chain = []
171
+ if row.get("disconnect_refdes_requirement"):
172
+ for token in row["disconnect_refdes_requirement"].split(";"):
173
+ token = token.strip()
174
+ if token:
175
+ refdes, sides = token.split("(", 1)
176
+ refdes = refdes.strip()
177
+ sides = sides.rstrip(")")
178
+ side_from, side_to = [s.strip() for s in sides.split(",")]
179
+ disconnect_chain.append((refdes, side_from, side_to))
180
+ disconnect_set = {d[0] for d in disconnect_chain}
181
+
182
+ # --- nets list ---
183
+ nets = [n.strip() for n in row.get("chain_of_nets", "").split(";") if n.strip()]
184
+
185
+ # --- connection steps (disconnects + final device) ---
186
+ connection_steps = disconnect_chain + [(to_refdes, None, None)]
187
+
188
+ if len(connection_steps) != len(nets):
189
+ step_labels = [s[0] for s in connection_steps]
190
+ raise ValueError(
191
+ f"While building circuits from channel_id '{from_channel_id}' of device '{from_refdes}' "
192
+ f"to channel_id '{to_channel_id}' of device '{to_refdes}', "
193
+ f"found {len(connection_steps)} connection steps: "
194
+ f"{', '.join(step_labels) or 'none'}, "
195
+ f"but expected {len(nets)} because there are {len(nets)} nets "
196
+ f"from channel end '{from_channel_id}' to channel end '{to_channel_id}' "
197
+ f"({'; '.join(nets) or 'no nets listed'}). "
198
+ "Each net should correspond to one physical connection segment between devices or disconnects. "
199
+ "Check the channel map for missing or unexpected info in cells, or if the disconnect requirements match the disconnect map."
200
+ )
201
+
202
+ # --- iterate signals ---
203
+ for signal in signals:
204
+ current_refdes = from_refdes
205
+ current_side = None
206
+ current_channel_id = from_channel_id
207
+
208
+ for step, net in zip(connection_steps, nets):
209
+ refdes, side_from, side_to = step
210
+
211
+ if side_from is not None:
212
+ # disconnect step
213
+ disconnect_key = (
214
+ from_refdes,
215
+ from_channel_id,
216
+ to_refdes,
217
+ to_channel_id,
218
+ refdes,
219
+ )
220
+ mapped_channel_id = disconnect_index[disconnect_key]
221
+
222
+ if current_refdes in disconnect_set:
223
+ left = resolve_disconnect_endpoint(
224
+ current_refdes, current_side, signal, current_channel_id
225
+ )
226
+ else:
227
+ left = resolve_device_endpoint(
228
+ current_refdes, current_channel_id, signal
229
+ )
230
+
231
+ right = resolve_disconnect_endpoint(
232
+ refdes, side_from, signal, mapped_channel_id
233
+ )
234
+
235
+ circuits_list.append(
236
+ {
237
+ "net": net,
238
+ "circuit_id": circuit_id,
239
+ "from_channel_type": row.get("from_channel_type"),
240
+ "to_channel_type": row.get("to_channel_type"),
241
+ "signal": signal,
242
+ "net_from_refdes": left["refdes"],
243
+ "net_from_channel_id": left["channel_id"],
244
+ "net_from_connector_name": left["connector_name"],
245
+ "net_from_cavity": left["cavity"],
246
+ "net_to_refdes": right["refdes"],
247
+ "net_to_channel_id": right["channel_id"],
248
+ "net_to_connector_name": right["connector_name"],
249
+ "net_to_cavity": right["cavity"],
250
+ "from_side_device_refdes": from_refdes,
251
+ "from_side_device_chname": from_channel_id,
252
+ "to_side_device_refdes": to_refdes,
253
+ "to_side_device_chname": to_channel_id,
254
+ }
255
+ )
256
+ circuit_id += 1
257
+
258
+ current_refdes = refdes
259
+ current_side = side_to
260
+ current_channel_id = mapped_channel_id
261
+
262
+ else:
263
+ # final device step
264
+ if current_refdes in disconnect_set:
265
+ left = resolve_disconnect_endpoint(
266
+ current_refdes, current_side, signal, current_channel_id
267
+ )
268
+ else:
269
+ left = resolve_device_endpoint(
270
+ current_refdes, current_channel_id, signal
271
+ )
272
+
273
+ right = resolve_device_endpoint(refdes, to_channel_id, signal)
274
+
275
+ circuits_list.append(
276
+ {
277
+ "net": net,
278
+ "circuit_id": circuit_id,
279
+ "signal": signal,
280
+ "from_channel_type": row.get("from_channel_type"),
281
+ "to_channel_type": row.get("to_channel_type"),
282
+ "net_from_refdes": left["refdes"],
283
+ "net_from_channel_id": left["channel_id"],
284
+ "net_from_connector_name": left["connector_name"],
285
+ "net_from_cavity": left["cavity"],
286
+ "net_to_refdes": right["refdes"],
287
+ "net_to_channel_id": right["channel_id"],
288
+ "net_to_connector_name": right["connector_name"],
289
+ "net_to_cavity": right["cavity"],
290
+ "from_side_device_refdes": from_refdes,
291
+ "from_side_device_chname": from_channel_id,
292
+ "to_side_device_refdes": to_refdes,
293
+ "to_side_device_chname": to_channel_id,
294
+ }
295
+ )
296
+ circuit_id += 1
297
+
298
+ # --- write circuits list ---
299
+ with open(fileio.path("circuits list"), "w", newline="", encoding="utf-8") as f:
300
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
301
+ writer.writeheader()
302
+ writer.writerows(circuits_list)
@@ -0,0 +1,237 @@
1
+ import os
2
+ import csv
3
+ from harnice import fileio
4
+
5
+ COLUMNS = [
6
+ "A-side_device_refdes", #documentation needed
7
+ "A-side_device_channel_id", #documentation needed
8
+ "A-side_device_channel_type", #documentation needed
9
+ "B-side_device_refdes", #documentation needed
10
+ "B-side_device_channel_id", #documentation needed
11
+ "B-side_device_channel_type", #documentation needed
12
+ "disconnect_refdes", #documentation needed
13
+ "disconnect_channel_id", #documentation needed
14
+ "A-port_channel_type", #documentation needed
15
+ "B-port_channel_type", #documentation needed
16
+ "manual_map_channel_python_equiv", #documentation needed
17
+ ]
18
+
19
+
20
+ def new():
21
+ disconnect_map_rows = []
22
+
23
+ for channel in fileio.read_tsv("channel map"):
24
+ raw = (channel.get("disconnect_refdes_requirement") or "").strip()
25
+ if not raw:
26
+ continue
27
+
28
+ # split on semicolon -> one row per disconnect_refdes requirement
29
+ disconnects = [item.strip() for item in raw.split(";") if item.strip()]
30
+
31
+ for requirement in disconnects:
32
+ # requirement looks like "X1(A,B)" or "X2(B,A)"
33
+ refdes, ports = requirement.split("(")
34
+ ports = ports.rstrip(")")
35
+ first_port, second_port = [p.strip() for p in ports.split(",")]
36
+
37
+ # orientation: (A,B) means from_device is A-side, (B,A) means from_device is B-side
38
+ if (first_port, second_port) == ("A", "B"):
39
+ a_refdes = channel.get("from_device_refdes", "")
40
+ a_chan_id = channel.get("from_device_channel_id", "")
41
+ a_chan_type_id = channel.get("from_channel_type", "")
42
+ b_refdes = channel.get("to_device_refdes", "")
43
+ b_chan_id = channel.get("to_device_channel_id", "")
44
+ b_chan_type_id = channel.get("to_channel_type", "")
45
+ elif (first_port, second_port) == ("B", "A"):
46
+ b_refdes = channel.get("from_device_refdes", "")
47
+ b_chan_id = channel.get("from_device_channel_id", "")
48
+ b_chan_type_id = channel.get("from_channel_type", "")
49
+ a_refdes = channel.get("to_device_refdes", "")
50
+ a_chan_id = channel.get("to_device_channel_id", "")
51
+ a_chan_type_id = channel.get("to_channel_type", "")
52
+ else:
53
+ raise ValueError(f"Unexpected port order: {requirement}")
54
+
55
+ disconnect_map_rows.append(
56
+ {
57
+ "A-side_device_refdes": a_refdes,
58
+ "A-side_device_channel_id": a_chan_id,
59
+ "A-side_device_channel_type": a_chan_type_id,
60
+ "B-side_device_refdes": b_refdes,
61
+ "B-side_device_channel_id": b_chan_id,
62
+ "B-side_device_channel_type": b_chan_type_id,
63
+ "disconnect_refdes": refdes.strip(),
64
+ }
65
+ )
66
+
67
+ for item in fileio.read_tsv("bom"):
68
+ if item.get("disconnect"):
69
+ disconnect_signals_list_path = os.path.join(
70
+ fileio.dirpath("instance_data"),
71
+ "disconnect",
72
+ item.get("device_refdes"),
73
+ f"{item.get('device_refdes')}-signals_list.tsv",
74
+ )
75
+
76
+ available_disconnect_channels = set()
77
+ for signal in fileio.read_tsv(disconnect_signals_list_path):
78
+ if signal.get("channel_id") in available_disconnect_channels:
79
+ continue
80
+ available_disconnect_channels.add(signal.get("channel_id"))
81
+
82
+ disconnect_map_rows.append(
83
+ {
84
+ "disconnect_refdes": item.get("device_refdes"),
85
+ "disconnect_channel_id": signal.get("channel_id"),
86
+ "A-port_channel_type": signal.get("A_channel_type"),
87
+ "B-port_channel_type": signal.get("B_channel_type"),
88
+ }
89
+ )
90
+
91
+ with open(fileio.path("disconnect map"), "w", newline="", encoding="utf-8") as f:
92
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
93
+ writer.writeheader()
94
+ writer.writerows(disconnect_map_rows)
95
+
96
+ # initialize mapped disconnect channels set (empty TSV)
97
+ with open(
98
+ fileio.path("mapped disconnects set"), "w", newline="", encoding="utf-8"
99
+ ) as f:
100
+ pass
101
+ with open(
102
+ fileio.path("mapped A-side channels through disconnects set"),
103
+ "w",
104
+ newline="",
105
+ encoding="utf-8",
106
+ ) as f:
107
+ pass
108
+
109
+
110
+ def assign(a_side_key, disconnect_key):
111
+ # a_side is the (device refdes, channel_id) that is on the A-side of the disconnect
112
+ channels = fileio.read_tsv("disconnect map")
113
+ if channel_is_already_assigned_through_disconnect(a_side_key, disconnect_key[0]):
114
+ raise ValueError(f"disconnect_key {disconnect_key} already assigned")
115
+
116
+ if disconnect_is_already_assigned(disconnect_key):
117
+ raise ValueError(f"disconnect {disconnect_key} already assigned")
118
+
119
+ # Find the disconnect row we want to merge
120
+ disconnect_info = None
121
+ for row in channels:
122
+ if (
123
+ row.get("disconnect_refdes") == disconnect_key[0]
124
+ and row.get("disconnect_channel_id") == disconnect_key[1]
125
+ and row.get("A-side_device_refdes") in [None, ""]
126
+ ):
127
+ disconnect_info = row
128
+ break
129
+
130
+ updated_channels = []
131
+ for row in channels:
132
+ if (
133
+ row.get("A-side_device_refdes") == a_side_key[0]
134
+ and row.get("A-side_device_channel_id") == a_side_key[1]
135
+ and row.get("disconnect_refdes") == disconnect_key[0]
136
+ ):
137
+ row["disconnect_channel_id"] = disconnect_key[1]
138
+ row["A-port_channel_type"] = disconnect_info.get("A-port_channel_type", "")
139
+ row["B-port_channel_type"] = disconnect_info.get("B-port_channel_type", "")
140
+ row["manual_map_channel_python_equiv"] = (
141
+ f"disconnect_map.assign({a_side_key}, {disconnect_key})"
142
+ )
143
+
144
+ elif (
145
+ row.get("disconnect_refdes") == disconnect_key[0]
146
+ and row.get("disconnect_channel_id") == disconnect_key[1]
147
+ and row.get("A-side_device_refdes") in [None, ""]
148
+ ):
149
+ continue
150
+
151
+ updated_channels.append(row)
152
+
153
+ already_assigned_channels_through_disconnects_set_append(
154
+ a_side_key, disconnect_key[0]
155
+ )
156
+ already_assigned_disconnects_set_append(disconnect_key)
157
+
158
+ with open(fileio.path("disconnect map"), "w", newline="", encoding="utf-8") as f:
159
+ writer = csv.DictWriter(f, fieldnames=COLUMNS, delimiter="\t")
160
+ writer.writeheader()
161
+ writer.writerows(updated_channels)
162
+
163
+
164
+ def already_assigned_channels_through_disconnects_set_append(key, disconnect_refdes):
165
+ item = f"{key}:{disconnect_refdes}"
166
+ items = set(already_assigned_channels_through_disconnects_set())
167
+ if item in items:
168
+ raise ValueError(
169
+ f"channel {key} through disconnect {disconnect_refdes} already assigned"
170
+ )
171
+ with open(
172
+ fileio.path("mapped A-side channels through disconnects set"),
173
+ "a",
174
+ newline="",
175
+ encoding="utf-8",
176
+ ) as f:
177
+ writer = csv.writer(f, delimiter="\t")
178
+ writer.writerow([item])
179
+
180
+
181
+ def already_assigned_disconnects_set_append(key):
182
+ items = set(already_assigned_disconnects_set())
183
+ if str(key) in items:
184
+ raise ValueError(f"disconnect {key} already assigned to a channel")
185
+ items.add(str(key))
186
+ with open(
187
+ fileio.path("mapped disconnects set"), "w", newline="", encoding="utf-8"
188
+ ) as f:
189
+ writer = csv.writer(f, delimiter="\t")
190
+ for item in sorted(items):
191
+ writer.writerow([item])
192
+
193
+
194
+ def already_assigned_channels_through_disconnects_set():
195
+ items = []
196
+ with open(
197
+ fileio.path("mapped A-side channels through disconnects set"),
198
+ newline="",
199
+ encoding="utf-8",
200
+ ) as f:
201
+ reader = csv.reader(f, delimiter="\t")
202
+ for row in reader:
203
+ if row and row[0].strip(): # skip blank lines
204
+ items.append(row[0].strip())
205
+ return items
206
+
207
+
208
+ def already_assigned_disconnects_set():
209
+ items = []
210
+ with open(fileio.path("mapped disconnects set"), newline="", encoding="utf-8") as f:
211
+ reader = csv.reader(f, delimiter="\t")
212
+ for row in reader:
213
+ if row and row[0].strip():
214
+ items.append(row[0].strip())
215
+ return items
216
+
217
+
218
+ def channel_is_already_assigned_through_disconnect(key, disconnect_refdes):
219
+ if (
220
+ f"{str(key)}:{disconnect_refdes}"
221
+ in already_assigned_channels_through_disconnects_set()
222
+ ):
223
+ return True
224
+ else:
225
+ return False
226
+
227
+
228
+ def disconnect_is_already_assigned(key):
229
+ if str(key) in already_assigned_disconnects_set():
230
+ return True
231
+ else:
232
+ return False
233
+
234
+ def ensure_requirements_met():
235
+ for row in fileio.read_tsv("disconnect map"):
236
+ if row.get("A-side_device_refdes") not in [None, ""] and row.get("disconnect_channel_id") in [None, ""]:
237
+ raise ValueError(f"Channel '{row.get('A-side_device_refdes')}.{row.get('A-side_device_channel_id')}' to '{row.get('B-side_device_refdes')}.{row.get('B-side_device_channel_id')}' could not find a compatible disconnect channel through '{row.get('disconnect_refdes')}'")
@@ -0,0 +1,63 @@
1
+ import csv
2
+ import os
3
+ from harnice import fileio
4
+
5
+ COLUMNS = [
6
+ "segment_id", #documentation needed
7
+ "node_at_end_a", #documentation needed
8
+ "node_at_end_b", #documentation needed
9
+ "length", #documentation needed
10
+ "angle", #documentation needed
11
+ "diameter", #documentation needed
12
+ ]
13
+
14
+
15
+ def new():
16
+ with open(
17
+ fileio.path("formboard graph definition"),
18
+ "w",
19
+ newline="",
20
+ encoding="utf-8",
21
+ ) as f:
22
+ writer = csv.DictWriter(
23
+ f, fieldnames=COLUMNS, delimiter="\t", lineterminator="\n"
24
+ )
25
+ writer.writeheader()
26
+
27
+
28
+ def append(segment_id, segment_data):
29
+ if not segment_id:
30
+ raise ValueError(
31
+ "Argument 'segment_id' is blank and required to identify a unique segment"
32
+ )
33
+
34
+ segment_data["segment_id"] = segment_id
35
+
36
+ # Prevent duplicates
37
+ if any(
38
+ row.get("segment_id") == segment_id
39
+ for row in fileio.read_tsv("formboard graph definition")
40
+ ):
41
+ return True
42
+
43
+ # Ensure the file exists
44
+ path = fileio.path("formboard graph definition")
45
+ if not os.path.exists(path):
46
+ new()
47
+
48
+ # Append safely
49
+ with open(path, "a+", newline="", encoding="utf-8") as f:
50
+ # ---- Ensure file ends with a newline before writing ----
51
+ f.seek(0, os.SEEK_END)
52
+ if f.tell() > 0: # file is non-empty
53
+ f.seek(f.tell() - 1)
54
+ if f.read(1) != "\n":
55
+ f.write("\n")
56
+ # --------------------------------------------------------
57
+
58
+ writer = csv.DictWriter(
59
+ f, fieldnames=COLUMNS, delimiter="\t", lineterminator="\n"
60
+ )
61
+ writer.writerow({key: segment_data.get(key, "") for key in COLUMNS})
62
+
63
+ return False