sinter 1.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sinter might be problematic. Click here for more details.

Files changed (62) hide show
  1. sinter/__init__.py +47 -0
  2. sinter/_collection/__init__.py +10 -0
  3. sinter/_collection/_collection.py +480 -0
  4. sinter/_collection/_collection_manager.py +581 -0
  5. sinter/_collection/_collection_manager_test.py +287 -0
  6. sinter/_collection/_collection_test.py +317 -0
  7. sinter/_collection/_collection_worker_loop.py +35 -0
  8. sinter/_collection/_collection_worker_state.py +259 -0
  9. sinter/_collection/_collection_worker_test.py +222 -0
  10. sinter/_collection/_mux_sampler.py +56 -0
  11. sinter/_collection/_printer.py +65 -0
  12. sinter/_collection/_sampler_ramp_throttled.py +66 -0
  13. sinter/_collection/_sampler_ramp_throttled_test.py +144 -0
  14. sinter/_command/__init__.py +0 -0
  15. sinter/_command/_main.py +39 -0
  16. sinter/_command/_main_collect.py +350 -0
  17. sinter/_command/_main_collect_test.py +482 -0
  18. sinter/_command/_main_combine.py +84 -0
  19. sinter/_command/_main_combine_test.py +153 -0
  20. sinter/_command/_main_plot.py +817 -0
  21. sinter/_command/_main_plot_test.py +445 -0
  22. sinter/_command/_main_predict.py +75 -0
  23. sinter/_command/_main_predict_test.py +36 -0
  24. sinter/_data/__init__.py +20 -0
  25. sinter/_data/_anon_task_stats.py +89 -0
  26. sinter/_data/_anon_task_stats_test.py +35 -0
  27. sinter/_data/_collection_options.py +106 -0
  28. sinter/_data/_collection_options_test.py +24 -0
  29. sinter/_data/_csv_out.py +74 -0
  30. sinter/_data/_existing_data.py +173 -0
  31. sinter/_data/_existing_data_test.py +41 -0
  32. sinter/_data/_task.py +311 -0
  33. sinter/_data/_task_stats.py +244 -0
  34. sinter/_data/_task_stats_test.py +140 -0
  35. sinter/_data/_task_test.py +38 -0
  36. sinter/_decoding/__init__.py +16 -0
  37. sinter/_decoding/_decoding.py +419 -0
  38. sinter/_decoding/_decoding_all_built_in_decoders.py +25 -0
  39. sinter/_decoding/_decoding_decoder_class.py +161 -0
  40. sinter/_decoding/_decoding_fusion_blossom.py +193 -0
  41. sinter/_decoding/_decoding_mwpf.py +302 -0
  42. sinter/_decoding/_decoding_pymatching.py +81 -0
  43. sinter/_decoding/_decoding_test.py +480 -0
  44. sinter/_decoding/_decoding_vacuous.py +38 -0
  45. sinter/_decoding/_perfectionist_sampler.py +38 -0
  46. sinter/_decoding/_sampler.py +72 -0
  47. sinter/_decoding/_stim_then_decode_sampler.py +222 -0
  48. sinter/_decoding/_stim_then_decode_sampler_test.py +192 -0
  49. sinter/_plotting.py +619 -0
  50. sinter/_plotting_test.py +108 -0
  51. sinter/_predict.py +381 -0
  52. sinter/_predict_test.py +227 -0
  53. sinter/_probability_util.py +519 -0
  54. sinter/_probability_util_test.py +281 -0
  55. sinter-1.15.0.data/data/README.md +332 -0
  56. sinter-1.15.0.data/data/readme_example_plot.png +0 -0
  57. sinter-1.15.0.data/data/requirements.txt +4 -0
  58. sinter-1.15.0.dist-info/METADATA +354 -0
  59. sinter-1.15.0.dist-info/RECORD +62 -0
  60. sinter-1.15.0.dist-info/WHEEL +5 -0
  61. sinter-1.15.0.dist-info/entry_points.txt +2 -0
  62. sinter-1.15.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,482 @@
1
+ import collections
2
+ import pathlib
3
+ import tempfile
4
+
5
+ import stim
6
+
7
+ import pytest
8
+ import sinter
9
+ from sinter._command._main import main
10
+ from sinter._command._main_combine import ExistingData
11
+ from sinter._plotting import split_by
12
+
13
+
14
+ def test_split_by():
15
+ assert split_by('abcdefcccghi', lambda e: e == 'c') == [list('ab'), list('c'), list('def'), list('ccc'), list('ghi')]
16
+
17
+
18
+ def test_main_collect():
19
+ with tempfile.TemporaryDirectory() as d:
20
+ d = pathlib.Path(d)
21
+ for distance in [3, 5, 7]:
22
+ c = stim.Circuit.generated(
23
+ 'repetition_code:memory',
24
+ rounds=3,
25
+ distance=distance,
26
+ after_clifford_depolarization=0.02)
27
+ with open(d / f'{distance}.stim', 'w') as f:
28
+ print(c, file=f)
29
+
30
+ # Collects requested stats.
31
+ main(command_line_args=[
32
+ "collect",
33
+ "--circuits",
34
+ str(d / "3.stim"),
35
+ str(d / "5.stim"),
36
+ str(d / "7.stim"),
37
+ "--max_shots",
38
+ "1000",
39
+ "--max_errors",
40
+ "10",
41
+ "--decoders",
42
+ "pymatching",
43
+ "--processes",
44
+ "4",
45
+ "--quiet",
46
+ "--save_resume_filepath",
47
+ str(d / "out.csv"),
48
+ ])
49
+ data = ExistingData.from_file(d / "out.csv").data
50
+ assert len(data) == 3
51
+ for k, v in data.items():
52
+ assert v.discards == 0
53
+ assert v.errors <= 50
54
+ assert v.shots >= 1000
55
+
56
+ # No more work when existing stats at merge location are sufficient.
57
+ with open(d / "out.csv") as f:
58
+ contents1 = f.read()
59
+ main(command_line_args=[
60
+ "collect",
61
+ "--circuits",
62
+ str(d / "3.stim"),
63
+ str(d / "5.stim"),
64
+ str(d / "7.stim"),
65
+ "--max_shots",
66
+ "1000",
67
+ "--max_errors",
68
+ "10",
69
+ "--decoders",
70
+ "pymatching",
71
+ "--processes",
72
+ "4",
73
+ "--quiet",
74
+ "--save_resume_filepath",
75
+ str(d / "out.csv"),
76
+ ])
77
+ with open(d / "out.csv") as f:
78
+ contents2 = f.read()
79
+ assert contents1 == contents2
80
+
81
+ # No more work when existing work is sufficient.
82
+ main(command_line_args=[
83
+ "collect",
84
+ "--circuits",
85
+ str(d / "3.stim"),
86
+ str(d / "5.stim"),
87
+ str(d / "7.stim"),
88
+ "--max_shots",
89
+ "1000",
90
+ "--max_errors",
91
+ "10",
92
+ "--decoders",
93
+ "pymatching",
94
+ "--processes",
95
+ "4",
96
+ "--quiet",
97
+ "--existing_data_filepaths",
98
+ str(d / "out.csv"),
99
+ "--save_resume_filepath",
100
+ str(d / "out2.csv"),
101
+ ])
102
+ data2 = ExistingData.from_file(d / "out2.csv").data
103
+ assert len(data2) == 0
104
+
105
+
106
+ class AlternatingPredictionsDecoder(sinter.Decoder):
107
+ def decode_via_files(self,
108
+ *,
109
+ num_shots: int,
110
+ num_dets: int,
111
+ num_obs: int,
112
+ dem_path: pathlib.Path,
113
+ dets_b8_in_path: pathlib.Path,
114
+ obs_predictions_b8_out_path: pathlib.Path,
115
+ tmp_dir: pathlib.Path,
116
+ ) -> None:
117
+ bytes_per_shot = (num_obs + 7) // 8
118
+ with open(obs_predictions_b8_out_path, 'wb') as f:
119
+ for k in range(num_shots):
120
+ f.write((k % 3 == 0).to_bytes(length=bytes_per_shot, byteorder='little'))
121
+
122
+
123
+ def _make_custom_decoders():
124
+ return {'alternate': AlternatingPredictionsDecoder()}
125
+
126
+
127
+ def test_main_collect_with_custom_decoder():
128
+ with tempfile.TemporaryDirectory() as d:
129
+ d = pathlib.Path(d)
130
+ with open(d / f'tmp.stim', 'w') as f:
131
+ print("""
132
+ M(0.1) 0
133
+ DETECTOR rec[-1]
134
+ OBSERVABLE_INCLUDE(0) rec[-1]
135
+ """, file=f)
136
+
137
+ with pytest.raises(ValueError, match="Not a recognized decoder"):
138
+ main(command_line_args=[
139
+ "collect",
140
+ "--circuits",
141
+ str(d / "tmp.stim"),
142
+ "--max_shots",
143
+ "1000",
144
+ "--decoders",
145
+ "NOTEXIST",
146
+ "--custom_decoders_module_function",
147
+ "sinter._command._main_collect_test:_make_custom_decoders",
148
+ "--processes",
149
+ "2",
150
+ "--quiet",
151
+ "--save_resume_filepath",
152
+ str(d / "out.csv"),
153
+ ])
154
+
155
+ # Collects requested stats.
156
+ main(command_line_args=[
157
+ "collect",
158
+ "--circuits",
159
+ str(d / "tmp.stim"),
160
+ "--max_shots",
161
+ "1000",
162
+ "--decoders",
163
+ "alternate",
164
+ "--custom_decoders_module_function",
165
+ "sinter._command._main_collect_test:_make_custom_decoders",
166
+ "--processes",
167
+ "2",
168
+ "--quiet",
169
+ "--save_resume_filepath",
170
+ str(d / "out.csv"),
171
+ ])
172
+ data = ExistingData.from_file(d / "out.csv").data
173
+ assert len(data) == 1
174
+ v, = data.values()
175
+ assert v.shots == 1000
176
+ assert 50 < v.errors < 500
177
+ assert v.discards == 0
178
+
179
+
180
+ def test_main_collect_post_select_observables():
181
+ with tempfile.TemporaryDirectory() as d:
182
+ d = pathlib.Path(d)
183
+ with open(d / f'circuit.stim', 'w') as f:
184
+ print("""
185
+ M(0.125) 0 1
186
+ OBSERVABLE_INCLUDE(0) rec[-1]
187
+ OBSERVABLE_INCLUDE(11) rec[-1] rec[-2]
188
+ """, file=f)
189
+
190
+ # Collects requested stats.
191
+ main(command_line_args=[
192
+ "collect",
193
+ "--postselected_observables_predicate",
194
+ "index == 11",
195
+ "--circuits",
196
+ str(d / "circuit.stim"),
197
+ "--max_shots",
198
+ "10000",
199
+ "--max_errors",
200
+ "10000",
201
+ "--decoders",
202
+ "pymatching",
203
+ "--processes",
204
+ "4",
205
+ "--quiet",
206
+ "--save_resume_filepath",
207
+ str(d / "out.csv"),
208
+ ])
209
+ data = sinter.stats_from_csv_files(d / "out.csv")
210
+ assert len(data) == 1
211
+ stats, = data
212
+ assert stats.shots == 10000
213
+ assert 0.21875 - 0.1 < stats.discards / stats.shots < 0.21875 + 0.1
214
+ assert 0.015625 - 0.01 <= stats.errors / (stats.shots - stats.discards) <= 0.015625 + 0.02
215
+
216
+
217
+ def test_main_collect_comma_separated_key_values():
218
+ with tempfile.TemporaryDirectory() as d:
219
+ d = pathlib.Path(d)
220
+ paths = []
221
+ for distance in [3, 5, 7]:
222
+ c = stim.Circuit.generated(
223
+ 'repetition_code:memory',
224
+ rounds=3,
225
+ distance=distance,
226
+ after_clifford_depolarization=0.02)
227
+ path = d / f'd={distance},p=0.02,r=3.0,c=rep_code.stim'
228
+ paths.append(str(path))
229
+ with open(path, 'w') as f:
230
+ print(c, file=f)
231
+
232
+ # Collects requested stats.
233
+ main(command_line_args=[
234
+ "collect",
235
+ "--circuits",
236
+ *paths,
237
+ "--max_shots",
238
+ "1000",
239
+ "--metadata_func",
240
+ "sinter.comma_separated_key_values(path)",
241
+ "--max_errors",
242
+ "10",
243
+ "--decoders",
244
+ "pymatching",
245
+ "--processes",
246
+ "4",
247
+ "--quiet",
248
+ "--save_resume_filepath",
249
+ str(d / "out.csv"),
250
+ ])
251
+ data = sinter.stats_from_csv_files(d / "out.csv")
252
+ seen_metadata = frozenset(repr(e.json_metadata) for e in data)
253
+ assert seen_metadata == frozenset([
254
+ "{'c': 'rep_code', 'd': 3, 'p': 0.02, 'r': 3.0}",
255
+ "{'c': 'rep_code', 'd': 5, 'p': 0.02, 'r': 3.0}",
256
+ "{'c': 'rep_code', 'd': 7, 'p': 0.02, 'r': 3.0}",
257
+ ])
258
+
259
+
260
+ def test_main_collect_count_observable_error_combos():
261
+ with tempfile.TemporaryDirectory() as d:
262
+ d = pathlib.Path(d)
263
+ with open(d / 'a=3.stim', 'w') as f:
264
+ print("""
265
+ X_ERROR(0.1) 0
266
+ X_ERROR(0.2) 1
267
+ M 0 1
268
+ OBSERVABLE_INCLUDE(0) rec[-1]
269
+ OBSERVABLE_INCLUDE(1) rec[-2]
270
+ """, file=f)
271
+
272
+ # Collects requested stats.
273
+ main(command_line_args=[
274
+ "collect",
275
+ "--circuits",
276
+ str(d / 'a=3.stim'),
277
+ "--max_shots",
278
+ "100000",
279
+ "--metadata_func",
280
+ "sinter.comma_separated_key_values(path)",
281
+ "--max_errors",
282
+ "10000",
283
+ "--decoders",
284
+ "pymatching",
285
+ "--count_observable_error_combos",
286
+ "--processes",
287
+ "4",
288
+ "--quiet",
289
+ "--save_resume_filepath",
290
+ str(d / "out.csv"),
291
+ ])
292
+ data = sinter.stats_from_csv_files(d / "out.csv")
293
+ assert len(data) == 1
294
+ item, = data
295
+ assert set(item.custom_counts.keys()) == {"obs_mistake_mask=E_", "obs_mistake_mask=_E", "obs_mistake_mask=EE"}
296
+ assert 0.1*0.8 - 0.01 < item.custom_counts['obs_mistake_mask=_E'] / item.shots < 0.1*0.8 + 0.01
297
+ assert 0.9*0.2 - 0.01 < item.custom_counts['obs_mistake_mask=E_'] / item.shots < 0.9*0.2 + 0.01
298
+ assert 0.1*0.2 - 0.01 < item.custom_counts['obs_mistake_mask=EE'] / item.shots < 0.1*0.2 + 0.01
299
+
300
+
301
+ def test_main_collect_count_detection_events():
302
+ with tempfile.TemporaryDirectory() as d:
303
+ d = pathlib.Path(d)
304
+ with open(d / 'a=3.stim', 'w') as f:
305
+ print("""
306
+ X_ERROR(0.1) 0
307
+ X_ERROR(0.2) 1
308
+ M 0 1
309
+ OBSERVABLE_INCLUDE(0) rec[-1]
310
+ OBSERVABLE_INCLUDE(1) rec[-2]
311
+ DETECTOR rec[-2]
312
+ """, file=f)
313
+
314
+ # Collects requested stats.
315
+ main(command_line_args=[
316
+ "collect",
317
+ "--circuits",
318
+ str(d / 'a=3.stim'),
319
+ "--max_shots",
320
+ "100000",
321
+ "--metadata_func",
322
+ "sinter.comma_separated_key_values(path)",
323
+ "--decoders",
324
+ "pymatching",
325
+ "--count_detection_events",
326
+ "--processes",
327
+ "4",
328
+ "--quiet",
329
+ "--save_resume_filepath",
330
+ str(d / "out.csv"),
331
+ ])
332
+ data = sinter.stats_from_csv_files(d / "out.csv")
333
+ assert len(data) == 1
334
+ item, = data
335
+ assert set(item.custom_counts.keys()) == {"detection_events", "detectors_checked"}
336
+ assert item.custom_counts['detectors_checked'] == 100000
337
+ assert 100000 * 0.1 * 0.5 < item.custom_counts['detection_events'] < 100000 * 0.1 * 1.5
338
+
339
+
340
+ def test_cpu_pin():
341
+ with tempfile.TemporaryDirectory() as d:
342
+ d = pathlib.Path(d)
343
+ with open(d / 'a=3.stim', 'w') as f:
344
+ print("""
345
+ X_ERROR(0.1) 0
346
+ X_ERROR(0.2) 1
347
+ M 0 1
348
+ OBSERVABLE_INCLUDE(0) rec[-1]
349
+ OBSERVABLE_INCLUDE(1) rec[-2]
350
+ DETECTOR rec[-2]
351
+ """, file=f)
352
+
353
+ # Collects requested stats.
354
+ main(command_line_args=[
355
+ "collect",
356
+ "--circuits",
357
+ str(d / 'a=3.stim'),
358
+ "--max_shots",
359
+ "100000",
360
+ "--metadata_func",
361
+ "auto",
362
+ "--decoders",
363
+ "pymatching",
364
+ "--count_detection_events",
365
+ "--processes",
366
+ "4",
367
+ "--quiet",
368
+ "--save_resume_filepath",
369
+ str(d / "out.csv"),
370
+ "--allowed_cpu_affinity_ids",
371
+ "0",
372
+ "range(1, 9, 2)",
373
+ "[4, 20]"
374
+ ])
375
+ data = sinter.stats_from_csv_files(d / "out.csv")
376
+ assert len(data) == 1
377
+ item, = data
378
+ assert set(item.custom_counts.keys()) == {"detection_events", "detectors_checked"}
379
+ assert item.custom_counts['detectors_checked'] == 100000
380
+ assert 100000 * 0.1 * 0.5 < item.custom_counts['detection_events'] < 100000 * 0.1 * 1.5
381
+
382
+
383
+ def test_custom_error_stopping_count():
384
+ with tempfile.TemporaryDirectory() as d:
385
+ d = pathlib.Path(d)
386
+ stim.Circuit.generated(
387
+ 'repetition_code:memory',
388
+ rounds=25,
389
+ distance=5,
390
+ after_clifford_depolarization=0.1,
391
+ ).to_file(d / 'a=3.stim')
392
+
393
+ # Collects requested stats.
394
+ main(command_line_args=[
395
+ "collect",
396
+ "--circuits",
397
+ str(d / 'a=3.stim'),
398
+ "--max_shots",
399
+ "100_000_000_000_000",
400
+ "--quiet",
401
+ "--max_errors",
402
+ "1_000_000",
403
+ "--metadata_func",
404
+ "auto",
405
+ "--decoders",
406
+ "vacuous",
407
+ "--count_detection_events",
408
+ "--processes",
409
+ "4",
410
+ "--save_resume_filepath",
411
+ str(d / "out.csv"),
412
+ "--custom_error_count_key",
413
+ "detection_events",
414
+ ])
415
+ data = sinter.stats_from_csv_files(d / "out.csv")
416
+ assert len(data) == 1
417
+ item, = data
418
+ # Would normally need >1_000_000 shots to see 1_000_000 errors.
419
+ assert item.shots < 100_000
420
+ assert item.errors < 90_000
421
+ assert item.custom_counts['detection_events'] > 1_000_000
422
+
423
+
424
+ def test_auto_processes():
425
+ with tempfile.TemporaryDirectory() as d:
426
+ d = pathlib.Path(d)
427
+ stim.Circuit.generated(
428
+ 'repetition_code:memory',
429
+ rounds=5,
430
+ distance=3,
431
+ after_clifford_depolarization=0.1,
432
+ ).to_file(d / 'a=3.stim')
433
+
434
+ # Collects requested stats.
435
+ main(command_line_args=[
436
+ "collect",
437
+ "--circuits",
438
+ str(d / 'a=3.stim'),
439
+ "--max_shots",
440
+ "200",
441
+ "--quiet",
442
+ "--metadata_func",
443
+ "auto",
444
+ "--decoders",
445
+ "vacuous",
446
+ "--processes",
447
+ "auto",
448
+ "--save_resume_filepath",
449
+ str(d / "out.csv"),
450
+ ])
451
+ data = sinter.stats_from_csv_files(d / "out.csv")
452
+ assert len(data) == 1
453
+
454
+
455
+ def test_implicit_auto_processes():
456
+ with tempfile.TemporaryDirectory() as d:
457
+ d = pathlib.Path(d)
458
+ stim.Circuit.generated(
459
+ 'repetition_code:memory',
460
+ rounds=5,
461
+ distance=3,
462
+ after_clifford_depolarization=0.1,
463
+ ).to_file(d / 'a=3.stim')
464
+
465
+ # Collects requested stats.
466
+ main(command_line_args=[
467
+ "collect",
468
+ "--circuits",
469
+ str(d / 'a=3.stim'),
470
+ "--max_shots",
471
+ "200",
472
+ "--quiet",
473
+ "--metadata_func",
474
+ "auto",
475
+ "--decoders",
476
+ "perfectionist",
477
+ "--save_resume_filepath",
478
+ str(d / "out.csv"),
479
+ ])
480
+ data = sinter.stats_from_csv_files(d / "out.csv")
481
+ assert len(data) == 1
482
+ assert data[0].discards > 0
@@ -0,0 +1,84 @@
1
+ import argparse
2
+ import collections
3
+ import json
4
+
5
+ import sys
6
+ from typing import List, Any
7
+
8
+ import sinter
9
+ from sinter._data import CSV_HEADER, ExistingData
10
+ from sinter._plotting import better_sorted_str_terms
11
+
12
+
13
+ def main_combine(*, command_line_args: List[str]):
14
+ parser = argparse.ArgumentParser()
15
+ parser.add_argument('--order',
16
+ choices=('preserve', 'metadata', 'error'),
17
+ default='metadata',
18
+ help='Determines the order of output rows.\n'
19
+ ' metadata (default): sort ascending by metadata.'
20
+ ' preserve: match order of input rows.\n'
21
+ ' error: sort ascending by error rate')
22
+ parser.add_argument('--strip_custom_counts',
23
+ help='Removes custom counts from the output.',
24
+ action='store_true')
25
+ parser.add_argument('rest',
26
+ nargs=argparse.REMAINDER,
27
+ type=str,
28
+ help='Paths to CSV files containing sample statistics.')
29
+ args = parser.parse_args(command_line_args)
30
+
31
+ if args.rest:
32
+ total = ExistingData()
33
+ for path in args.rest:
34
+ total += ExistingData.from_file(path)
35
+ else:
36
+ total = ExistingData.from_file(sys.stdin)
37
+
38
+ total = list(total.data.values())
39
+
40
+ if args.strip_custom_counts:
41
+ total = [
42
+ sinter.TaskStats(
43
+ strong_id=task.strong_id,
44
+ decoder=task.decoder,
45
+ json_metadata=task.json_metadata,
46
+ shots=task.shots,
47
+ errors=task.errors,
48
+ discards=task.discards,
49
+ seconds=task.seconds,
50
+ )
51
+ for task in total
52
+ ]
53
+ else:
54
+ total = [
55
+ sinter.TaskStats(
56
+ strong_id=task.strong_id,
57
+ decoder=task.decoder,
58
+ json_metadata=task.json_metadata,
59
+ shots=task.shots,
60
+ errors=task.errors,
61
+ discards=task.discards,
62
+ seconds=task.seconds,
63
+ custom_counts=collections.Counter(dict(sorted(task.custom_counts.items(), key=better_sorted_str_terms))),
64
+ )
65
+ for task in total
66
+ ]
67
+
68
+ if args.order == 'metadata':
69
+ output = sorted(total, key=lambda e: better_sorted_str_terms(json.dumps(e.json_metadata, separators=(',', ':'), sort_keys=True)))
70
+ elif args.order == 'preserve':
71
+ output = list(total)
72
+ elif args.order == 'error':
73
+ def err_rate_key(stats: sinter.TaskStats) -> Any:
74
+ num_kept = stats.shots - stats.discards
75
+ err_rate = 0 if num_kept == 0 else stats.errors / num_kept
76
+ discard_rate = 0 if stats.shots == 0 else stats.discards / stats.shots
77
+ return err_rate, discard_rate
78
+ output = sorted(total, key=err_rate_key)
79
+ else:
80
+ raise NotImplementedError(f'order={args.order}')
81
+
82
+ print(CSV_HEADER)
83
+ for value in output:
84
+ print(value.to_csv_line())