pygraph-sp 2026.3__cp312-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
graph_sp/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ from .graph_sp import *
2
+
3
+ __doc__ = graph_sp.__doc__
4
+ if hasattr(graph_sp, "__all__"):
5
+ __all__ = graph_sp.__all__
Binary file
@@ -0,0 +1,704 @@
1
+ Metadata-Version: 2.1
2
+ Name: pygraph-sp
3
+ Version: 2026.3
4
+ Summary: A pure Rust graph executor supporting implicit node connections, branching, and config sweeps
5
+ Keywords: graph,dag,execution,pipeline,workflow
6
+ Home-Page: https://github.com/briday1/graph-sp
7
+ Author: briday1 <your-email@example.com>
8
+ Author-email: briday1 <your-email@example.com>
9
+ License: MIT
10
+ Requires-Python: >=3.8
11
+ Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
12
+ Project-URL: Source Code, https://github.com/briday1/graph-sp
13
+
14
+ # graph-sp
15
+
16
+ graph-sp is a pure Rust grid/node graph executor and optimizer. The project focuses on representing directed dataflow graphs, computing port mappings by graph inspection, and executing nodes efficiently in-process with parallel CPU execution.
17
+
18
+ ## Core Features
19
+
20
+ - **Implicit Node Connections**: Nodes automatically connect based on execution order
21
+ - **Parallel Branching**: Create fan-out execution paths with `.branch()`
22
+ - **Configuration Variants**: Use `.variant()` to create parameter sweeps
23
+ - **DAG Analysis**: Automatic inspection and optimization of execution paths
24
+ - **Mermaid Visualization**: Generate diagrams with `.to_mermaid()`
25
+ - **In-process Execution**: Parallel execution using rayon
26
+
27
+ ## Installation
28
+
29
+ ### Rust
30
+
31
+ Add to your `Cargo.toml`:
32
+
33
+ ```toml
34
+ [dependencies]
35
+ graph-sp = "0.1.0"
36
+
37
+ # Optional: For radar signal processing examples with ndarray and FFT support
38
+ [features]
39
+ radar_examples = ["graph-sp/radar_examples"]
40
+ ```
41
+
42
+ For radar signal processing with ndarray and complex number support, enable the `radar_examples` feature.
43
+
44
+ ### Python
45
+
46
+ The library can also be used from Python via PyO3 bindings:
47
+
48
+ ```bash
49
+ pip install pygraph-sp
50
+ ```
51
+
52
+ Or build from source:
53
+
54
+ ```bash
55
+ pip install maturin
56
+ maturin build --release --features python
57
+ pip install target/wheels/pygraph_sp-*.whl
58
+ ```
59
+
60
+ ## Quick Start
61
+
62
+ ### Rust
63
+
64
+ #### Basic Sequential Pipeline
65
+
66
+ ```rust
67
+ use graph_sp::{Graph, GraphData};
68
+ use std::collections::HashMap;
69
+
70
+ fn data_source(_: &HashMap<String, GraphData>, _: &HashMap<String, GraphData>) -> HashMap<String, GraphData> {
71
+ let mut result = HashMap::new();
72
+ result.insert("value".to_string(), GraphData::int(42));
73
+ result
74
+ }
75
+
76
+ fn multiply(inputs: &HashMap<String, GraphData>, _: &HashMap<String, GraphData>) -> HashMap<String, GraphData> {
77
+ let mut result = HashMap::new();
78
+ if let Some(val) = inputs.get("x").and_then(|d| d.as_int()) {
79
+ result.insert("doubled".to_string(), GraphData::int(val * 2));
80
+ }
81
+ result
82
+ }
83
+
84
+ fn main() {
85
+ let mut graph = Graph::new();
86
+
87
+ // Add source node
88
+ graph.add(data_source, Some("DataSource"), None, Some(vec![("value", "data")]));
89
+
90
+ // Add processing node
91
+ graph.add(multiply, Some("Multiply"), Some(vec![("data", "x")]), Some(vec![("doubled", "result")]));
92
+
93
+ let dag = graph.build();
94
+ let context = dag.execute(false, None);
95
+
96
+ println!("Result: {}", context.get("result").unwrap().to_string_repr());
97
+ }
98
+ ```
99
+
100
+ ### Python
101
+
102
+ #### Basic Sequential Pipeline
103
+
104
+ ```python
105
+ import graph_sp
106
+
107
+ def data_source(inputs, variant_params):
108
+ return {"value": "42"}
109
+
110
+ def multiply(inputs, variant_params):
111
+ val = int(inputs.get("x", "0"))
112
+ return {"doubled": str(val * 2)}
113
+
114
+ # Create graph
115
+ graph = graph_sp.PyGraph()
116
+
117
+ # Add source node
118
+ graph.add(
119
+ function=data_source,
120
+ label="DataSource",
121
+ inputs=None,
122
+ outputs=[("value", "data")]
123
+ )
124
+
125
+ # Add processing node
126
+ graph.add(
127
+ function=multiply,
128
+ label="Multiply",
129
+ inputs=[("data", "x")],
130
+ outputs=[("doubled", "result")]
131
+ )
132
+
133
+ # Build and execute
134
+ dag = graph.build()
135
+ context = dag.execute()
136
+
137
+ print(f"Result: {context['result']}")
138
+ ```
139
+
140
+ **Mermaid visualization output:**
141
+
142
+ ```mermaid
143
+ graph TD
144
+ 0["DataSource"]
145
+ 1["Multiply"]
146
+ 0 -->|data → x| 1
147
+ ```
148
+
149
+ ### Parallel Branching (Fan-Out)
150
+
151
+ ```rust
152
+ let mut graph = Graph::new();
153
+
154
+ // Source node
155
+ graph.add(source_fn, Some("Source"), None, Some(vec![("data", "data")]));
156
+
157
+ // Create parallel branches
158
+ graph.branch();
159
+ graph.add(stats_fn, Some("Statistics"), Some(vec![("data", "input")]), Some(vec![("mean", "stats")]));
160
+
161
+ graph.branch();
162
+ graph.add(model_fn, Some("MLModel"), Some(vec![("data", "input")]), Some(vec![("prediction", "model")]));
163
+
164
+ graph.branch();
165
+ graph.add(viz_fn, Some("Visualization"), Some(vec![("data", "input")]), Some(vec![("plot", "viz")]));
166
+
167
+ let dag = graph.build();
168
+ ```
169
+
170
+ **Mermaid visualization output:**
171
+
172
+ ```mermaid
173
+ graph TD
174
+ 0["Source"]
175
+ 1["Statistics"]
176
+ 2["MLModel"]
177
+ 3["Visualization"]
178
+ 0 -->|data → input| 1
179
+ 0 -->|data → input| 2
180
+ 0 -->|data → input| 3
181
+ style 1 fill:#e1f5ff
182
+ style 2 fill:#e1f5ff
183
+ style 3 fill:#e1f5ff
184
+ ```
185
+
186
+ **DAG Statistics:**
187
+ - Nodes: 4
188
+ - Depth: 2 levels
189
+ - Max Parallelism: 3 nodes (all branches execute in parallel)
190
+
191
+ ### Parameter Sweep with Variants
192
+
193
+ ```rust
194
+ use graph_sp::{Graph, Linspace};
195
+
196
+ let mut graph = Graph::new();
197
+
198
+ // Source node
199
+ graph.add(source_fn, Some("DataSource"), None, Some(vec![("value", "data")]));
200
+
201
+ // Create variants for different learning rates
202
+ let learning_rates = vec![0.001, 0.01, 0.1, 1.0];
203
+ graph.variant("learning_rate", learning_rates);
204
+ graph.add(scale_fn, Some("ScaleLR"), Some(vec![("data", "input")]), Some(vec![("scaled", "output")]));
205
+
206
+ let dag = graph.build();
207
+ ```
208
+
209
+ **Mermaid visualization output:**
210
+
211
+ ```mermaid
212
+ graph TD
213
+ 0["DataSource"]
214
+ 1["ScaleLR (v0)"]
215
+ 2["ScaleLR (v1)"]
216
+ 3["ScaleLR (v2)"]
217
+ 4["ScaleLR (v3)"]
218
+ 0 -->|data → input| 1
219
+ 0 -->|data → input| 2
220
+ 0 -->|data → input| 3
221
+ 0 -->|data → input| 4
222
+ style 1 fill:#e1f5ff
223
+ style 2 fill:#e1f5ff
224
+ style 3 fill:#e1f5ff
225
+ style 4 fill:#e1f5ff
226
+ style 1 fill:#ffe1e1
227
+ style 2 fill:#e1ffe1
228
+ style 3 fill:#ffe1ff
229
+ style 4 fill:#ffffe1
230
+ ```
231
+
232
+ **DAG Statistics:**
233
+ - Nodes: 5
234
+ - Depth: 2 levels
235
+ - Max Parallelism: 4 nodes
236
+ - Variants: 4 (all execute in parallel)
237
+
238
+ ## Radar Signal Processing Example
239
+
240
+ This example demonstrates a complete radar signal processing pipeline using GraphData with ndarray arrays and complex numbers. The pipeline implements:
241
+
242
+ 1. **LFM Pulse Generation** - Creates a Linear Frequency Modulation chirp signal
243
+ 2. **Pulse Stacking** - Accumulates multiple pulses with Doppler shifts
244
+ 3. **Range Compression** - FFT-based matched filtering
245
+ 4. **Doppler Compression** - Creates Range-Doppler map
246
+
247
+ ### Rust Implementation
248
+
249
+ ```rust
250
+ use graph_sp::{Graph, GraphData};
251
+ use ndarray::Array1;
252
+ use num_complex::Complex;
253
+ use std::collections::HashMap;
254
+
255
+ // LFM pulse generator node
256
+ fn lfm_generator(_inputs: &HashMap<String, GraphData>, params: &HashMap<String, GraphData>)
257
+ -> HashMap<String, GraphData> {
258
+ let num_samples = params.get("num_samples")
259
+ .and_then(|d| d.as_int())
260
+ .unwrap_or(256) as usize;
261
+
262
+ let bandwidth = params.get("bandwidth")
263
+ .and_then(|d| d.as_float())
264
+ .unwrap_or(100e6); // 100 MHz
265
+
266
+ let pulse_width = params.get("pulse_width")
267
+ .and_then(|d| d.as_float())
268
+ .unwrap_or(1e-6); // 1 microsecond
269
+
270
+ // Generate LFM chirp signal
271
+ let sample_rate = 100e6;
272
+ let chirp_rate = bandwidth / pulse_width;
273
+ let mut signal = Array1::<Complex<f64>>::zeros(num_samples);
274
+
275
+ // ... signal generation code ...
276
+
277
+ let mut output = HashMap::new();
278
+ output.insert("pulse".to_string(), GraphData::complex_array(signal));
279
+ output.insert("num_samples".to_string(), GraphData::int(num_samples as i64));
280
+ output
281
+ }
282
+
283
+ // Stack pulses node
284
+ fn stack_pulses(inputs: &HashMap<String, GraphData>, params: &HashMap<String, GraphData>)
285
+ -> HashMap<String, GraphData> {
286
+ let num_pulses = params.get("num_pulses")
287
+ .and_then(|d| d.as_int())
288
+ .unwrap_or(128) as usize;
289
+
290
+ // Get input pulse as ComplexArray
291
+ let pulse = inputs.get("pulse")
292
+ .and_then(|d| d.as_complex_array())
293
+ .unwrap().clone();
294
+
295
+ // Stack with Doppler shifts
296
+ // ... stacking logic ...
297
+
298
+ let mut output = HashMap::new();
299
+ output.insert("stacked".to_string(), GraphData::complex_array(stacked_data));
300
+ output.insert("num_pulses".to_string(), GraphData::int(num_pulses as i64));
301
+ output
302
+ }
303
+
304
+ fn main() {
305
+ let mut graph = Graph::new();
306
+
307
+ // Add LFM generator
308
+ graph.add(
309
+ lfm_generator,
310
+ Some("LFMGenerator"),
311
+ None,
312
+ Some(vec![("pulse", "lfm_pulse"), ("num_samples", "num_samples")])
313
+ );
314
+
315
+ // Add pulse stacking
316
+ graph.add(
317
+ stack_pulses,
318
+ Some("StackPulses"),
319
+ Some(vec![("lfm_pulse", "pulse")]),
320
+ Some(vec![("stacked", "stacked_data"), ("num_pulses", "num_pulses")])
321
+ );
322
+
323
+ // Add range compression
324
+ graph.add(
325
+ range_compress,
326
+ Some("RangeCompress"),
327
+ Some(vec![("stacked_data", "data"), ("lfm_pulse", "reference")]),
328
+ Some(vec![("compressed", "compressed_data")])
329
+ );
330
+
331
+ // Add Doppler compression
332
+ graph.add(
333
+ doppler_compress,
334
+ Some("DopplerCompress"),
335
+ Some(vec![
336
+ ("compressed_data", "data"),
337
+ ("num_pulses", "num_pulses"),
338
+ ("num_samples", "num_samples")
339
+ ]),
340
+ Some(vec![
341
+ ("range_doppler", "range_doppler_map"),
342
+ ("peak_value", "peak"),
343
+ ("peak_doppler_bin", "peak_doppler"),
344
+ ("peak_range_bin", "peak_range")
345
+ ])
346
+ );
347
+
348
+ let dag = graph.build();
349
+ let context = dag.execute(false, None);
350
+
351
+ // Display results
352
+ if let Some(peak) = context.get("peak").and_then(|d| d.as_float()) {
353
+ println!("Peak magnitude: {:.2}", peak);
354
+ }
355
+ if let Some(doppler) = context.get("peak_doppler").and_then(|d| d.as_int()) {
356
+ println!("Peak Doppler bin: {}", doppler);
357
+ }
358
+ if let Some(range) = context.get("peak_range").and_then(|d| d.as_int()) {
359
+ println!("Peak Range bin: {}", range);
360
+ }
361
+ }
362
+ ```
363
+
364
+ **Run the example:**
365
+
366
+ ```bash
367
+ cargo run --example radar_demo --features radar_examples
368
+ ```
369
+
370
+ **Mermaid visualization output:**
371
+
372
+ ```mermaid
373
+ graph TD
374
+ 0["LFMGenerator"]
375
+ 1["StackPulses"]
376
+ 2["RangeCompress"]
377
+ 3["DopplerCompress"]
378
+ 0 -->|lfm_pulse → pulse| 1
379
+ 1 -->|stacked_data → data| 2
380
+ 2 -->|compressed_data → data| 3
381
+ ```
382
+
383
+ **DAG Statistics:**
384
+ - Nodes: 4
385
+ - Depth: 4 levels
386
+ - Max Parallelism: 1 node
387
+
388
+ **Execution Output:**
389
+
390
+ ```
391
+ LFMGenerator: Generated 256 sample LFM pulse
392
+ StackPulses: Stacked 128 pulses with Doppler shifts
393
+ RangeCompress: Performed matched filtering on 32768 samples
394
+ DopplerCompress: Created Range-Doppler map of shape (128, 256)
395
+ Peak at Doppler bin 13, Range bin 255
396
+ Magnitude: 11974.31
397
+
398
+ Peak magnitude: 11974.31
399
+ Peak Doppler bin: 13
400
+ Peak Range bin: 255
401
+ ```
402
+
403
+ ### Python Implementation
404
+
405
+ ```python
406
+ import graph_sp
407
+ import numpy as np
408
+
409
+ def lfm_generator(inputs, variant_params):
410
+ """Generate LFM pulse with rectangular envelope."""
411
+ num_samples = 256
412
+ bandwidth = 100e6 # 100 MHz
413
+ pulse_width = 1e-6 # 1 microsecond
414
+ sample_rate = 100e6
415
+
416
+ # Generate LFM chirp
417
+ chirp_rate = bandwidth / pulse_width
418
+ signal = np.zeros(num_samples, dtype=complex)
419
+
420
+ # ... signal generation code ...
421
+
422
+ # Return numpy array directly (no conversion needed)
423
+ return {
424
+ "pulse": signal, # Can pass numpy arrays directly
425
+ "num_samples": num_samples
426
+ }
427
+
428
+ def stack_pulses(inputs, variant_params):
429
+ """Stack multiple pulses with Doppler shifts."""
430
+ num_pulses = 128
431
+
432
+ # Get pulse data directly as complex array (implicit handling)
433
+ pulse_data = inputs.get("pulse", [])
434
+ pulse = np.array(pulse_data, dtype=complex)
435
+
436
+ # Stack with Doppler shifts
437
+ # ... stacking logic ...
438
+
439
+ # Return numpy array directly (no conversion needed)
440
+ return {
441
+ "stacked": stacked, # Can pass numpy arrays directly
442
+ "num_pulses": num_pulses
443
+ }
444
+
445
+ # Create graph
446
+ graph = graph_sp.PyGraph()
447
+
448
+ # Add nodes
449
+ graph.add(
450
+ function=lfm_generator,
451
+ label="LFMGenerator",
452
+ inputs=None,
453
+ outputs=[("pulse", "lfm_pulse"), ("num_samples", "num_samples")]
454
+ )
455
+
456
+ graph.add(
457
+ function=stack_pulses,
458
+ label="StackPulses",
459
+ inputs=[("lfm_pulse", "pulse")],
460
+ outputs=[("stacked", "stacked_data"), ("num_pulses", "num_pulses")]
461
+ )
462
+
463
+ graph.add(
464
+ function=range_compress,
465
+ label="RangeCompress",
466
+ inputs=[("stacked_data", "data"), ("lfm_pulse", "reference")],
467
+ outputs=[("compressed", "compressed_data")]
468
+ )
469
+
470
+ graph.add(
471
+ function=doppler_compress,
472
+ label="DopplerCompress",
473
+ inputs=[
474
+ ("compressed_data", "data"),
475
+ ("num_pulses", "num_pulses"),
476
+ ("num_samples", "num_samples")
477
+ ],
478
+ outputs=[
479
+ ("range_doppler", "range_doppler_map"),
480
+ ("peak_value", "peak"),
481
+ ("peak_doppler_bin", "peak_doppler"),
482
+ ("peak_range_bin", "peak_range")
483
+ ]
484
+ )
485
+
486
+ # Build and execute
487
+ dag = graph.build()
488
+ context = dag.execute()
489
+
490
+ print(f"Peak magnitude: {context['peak']}")
491
+ print(f"Peak Doppler bin: {context['peak_doppler']}")
492
+ print(f"Peak Range bin: {context['peak_range']}")
493
+ ```
494
+
495
+ **Run the example:**
496
+
497
+ ```bash
498
+ python examples/python_radar_demo.py
499
+ ```
500
+
501
+ ### Key Features Demonstrated
502
+
503
+ - **Native Type Support**: Uses `GraphData::complex_array()` for signal data, `GraphData::int()` for metadata
504
+ - **No String Conversions**: Numeric data stays in native format (i64, f64, Complex<f64>)
505
+ - **Implicit Complex Number Handling**: Python complex numbers (numpy.complex128, built-in complex) are automatically converted to/from GraphData::Complex without manual real/imag splitting
506
+ - **Direct Numpy Array Support**: Pass numpy ndarrays directly without `.tolist()` conversion - automatic detection and conversion
507
+ - **Type Safety**: Accessor methods (`.as_complex_array()`, `.as_int()`, `.as_float()`) provide safe type extraction
508
+ - **Complex Signal Processing**: Full FFT-based radar processing with ndarray integration
509
+
510
+ ### Adding Plotting Nodes
511
+
512
+ Plotting and visualization functions can be added as terminal nodes that take input but produce no output:
513
+
514
+ ```rust
515
+ fn plot_range_doppler(inputs: &HashMap<String, GraphData>, _params: &HashMap<String, GraphData>)
516
+ -> HashMap<String, GraphData> {
517
+ // Extract data for plotting
518
+ if let Some(map) = inputs.get("range_doppler").and_then(|d| d.as_complex_array()) {
519
+ // Generate plot (save to file, display, etc.)
520
+ println!("Generating Range-Doppler map plot...");
521
+ // ... plotting code using matplotlib, plotters, etc. ...
522
+ }
523
+
524
+ // No outputs - this is a terminal/visualization node
525
+ HashMap::new()
526
+ }
527
+
528
+ // Add to graph
529
+ graph.add(
530
+ plot_range_doppler,
531
+ Some("PlotRangeDoppler"),
532
+ Some(vec![("range_doppler_map", "range_doppler")]),
533
+ None // No outputs for visualization nodes
534
+ );
535
+ ```
536
+
537
+ This pattern allows visualization and logging nodes to be integrated into the pipeline without affecting data flow.
538
+
539
+ ## API Overview
540
+
541
+ ### Rust API
542
+
543
+ ### Graph Construction
544
+
545
+ - `Graph::new()` - Create a new graph
546
+ - `graph.add(fn, name, inputs, outputs)` - Add a node
547
+ - `fn`: Node function with signature `fn(&HashMap<String, GraphData>, &HashMap<String, GraphData>) -> HashMap<String, GraphData>`
548
+ - `name`: Optional node name
549
+ - `inputs`: Optional vector of `(broadcast_var, impl_var)` tuples for input mappings
550
+ - `outputs`: Optional vector of `(impl_var, broadcast_var)` tuples for output mappings
551
+ - `graph.branch()` - Create a new parallel branch
552
+ - `graph.variant(param_name, values)` - Create parameter sweep variants
553
+ - `graph.build()` - Build the DAG
554
+
555
+ ### DAG Operations
556
+
557
+ - `dag.execute()` - Execute the graph and return execution context
558
+ - `dag.stats()` - Get DAG statistics (nodes, depth, parallelism, branches, variants)
559
+ - `dag.to_mermaid()` - Generate Mermaid diagram representation
560
+
561
+ ### Python API
562
+
563
+ The Python bindings provide a similar API with proper GIL handling:
564
+
565
+ #### Graph Construction
566
+
567
+ - `PyGraph()` - Create a new graph
568
+ - `graph.add(function, label, inputs, outputs)` - Add a node
569
+ - `function`: Python callable with signature `fn(inputs: dict, variant_params: dict) -> dict`
570
+ - `label`: Optional node name (str)
571
+ - `inputs`: Optional list of `(broadcast_var, impl_var)` tuples or dict
572
+ - `outputs`: Optional list of `(impl_var, broadcast_var)` tuples or dict
573
+ - `graph.branch(subgraph)` - Create a new parallel branch with a subgraph
574
+ - `graph.build()` - Build the DAG and return a PyDag
575
+
576
+ #### DAG Operations
577
+
578
+ - `dag.execute()` - Execute the graph and return execution context (dict)
579
+ - `dag.execute_parallel()` - Execute with parallel execution where possible (dict)
580
+ - `dag.to_mermaid()` - Generate Mermaid diagram representation (str)
581
+
582
+ #### GIL Handling
583
+
584
+ The Python bindings are designed with proper GIL handling:
585
+
586
+ - **GIL Release**: The Rust executor runs without holding the GIL, allowing true parallelism
587
+ - **GIL Acquisition**: Python callables used as node functions acquire the GIL only during their execution
588
+ - **Thread Safety**: The bindings use `pyo3::prepare_freethreaded_python()` (via auto-initialize) for multi-threaded safety
589
+
590
+ This means that while Python functions execute sequentially (due to the GIL), the Rust graph traversal and coordination happens in parallel without GIL contention.
591
+
592
+ ## Development
593
+
594
+ ### Rust Development
595
+
596
+ Prerequisites:
597
+ - Rust (stable toolchain) installed: https://www.rust-lang.org/tools/install
598
+
599
+ Build and run tests:
600
+
601
+ ```bash
602
+ cargo build --release
603
+ cargo test
604
+ ```
605
+
606
+ Run examples:
607
+
608
+ ```bash
609
+ cargo run --example comprehensive_demo
610
+ cargo run --example parallel_execution_demo
611
+ cargo run --example variant_demo_full
612
+ cargo run --example radar_demo --features radar_examples
613
+ ```
614
+
615
+ ### Python Development
616
+
617
+ Prerequisites:
618
+ - Python 3.8+ installed
619
+ - Rust toolchain installed
620
+
621
+ Build Python bindings:
622
+
623
+ ```bash
624
+ # Create virtual environment
625
+ python -m venv .venv
626
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
627
+
628
+ # Install maturin
629
+ pip install maturin==1.2.0
630
+
631
+ # Build and install in development mode
632
+ maturin develop --release --features python
633
+
634
+ # Run Python example
635
+ python examples/python_demo.py
636
+ ```
637
+
638
+ Build wheel for distribution:
639
+
640
+ ```bash
641
+ maturin build --release --features python
642
+ # Wheel will be in target/wheels/
643
+ ```
644
+
645
+ ## Publishing
646
+
647
+ This repository is configured with GitHub Actions workflows to automatically publish to [crates.io](https://crates.io) and [PyPI](https://pypi.org) when a release tag is pushed.
648
+
649
+ ### Required Repository Secrets
650
+
651
+ To enable automatic publishing, the repository owner must configure the following secrets in GitHub Settings → Secrets and variables → Actions:
652
+
653
+ - **`CRATES_IO_TOKEN`**: Your crates.io API token (obtain from https://crates.io/me)
654
+ - **`PYPI_API_TOKEN`**: Your PyPI API token (obtain from https://pypi.org/manage/account/token/)
655
+
656
+ ### Publishing Process
657
+
658
+ The publish workflow (`.github/workflows/publish.yml`) will automatically run when:
659
+
660
+ 1. A tag matching `v*` is pushed (e.g., `v0.1.0`, `v1.0.0`)
661
+ 2. The workflow is manually triggered via workflow_dispatch
662
+
663
+ **Creating a release:**
664
+
665
+ ```bash
666
+ # Ensure version numbers in Cargo.toml and pyproject.toml are correct
667
+ git tag -a v0.1.0 -m "Release v0.1.0"
668
+ git push origin v0.1.0
669
+ ```
670
+
671
+ The workflow will:
672
+
673
+ 1. **Build Python wheels** for Python 3.8-3.11 on Linux, macOS, and Windows
674
+ 2. **Upload wheel artifacts** to the GitHub Actions run (always, even without secrets)
675
+ 3. **Publish to PyPI** (only if `PYPI_API_TOKEN` is set) - prebuilt wheels mean end users do not need Rust
676
+ 4. **Publish to crates.io** (only if `CRATES_IO_TOKEN` is set)
677
+
678
+ **Important notes:**
679
+
680
+ - Installing from PyPI with `pip install pygraph-sp` will **not require Rust** on the target machine because prebuilt platform-specific wheels are published
681
+ - Both crates.io and PyPI will reject duplicate version numbers - update versions before tagging
682
+ - The workflow will continue even if tokens are not set, allowing you to download artifacts for manual publishing
683
+ - For local testing, you can build wheels with `maturin build --release --features python`
684
+
685
+ ### Manual Publishing
686
+
687
+ If you prefer to publish manually or need to publish from a local machine:
688
+
689
+ **To crates.io:**
690
+
691
+ ```bash
692
+ cargo publish --token YOUR_CRATES_IO_TOKEN
693
+ ```
694
+
695
+ **To PyPI:**
696
+
697
+ ```bash
698
+ # Install maturin
699
+ pip install maturin==1.2.0
700
+
701
+ # Build and publish wheels
702
+ maturin publish --username __token__ --password YOUR_PYPI_API_TOKEN --features python
703
+ ```
704
+
@@ -0,0 +1,5 @@
1
+ pygraph_sp-2026.3.dist-info/METADATA,sha256=KiiPKCAhkFMK2EDkFc7skWNov7gW01jooiTbf_D3Pak,20827
2
+ pygraph_sp-2026.3.dist-info/WHEEL,sha256=kZgfOa0mdA1pG__wNamgNDx5FxDT5dGuN5SCS--d7iQ,95
3
+ graph_sp/__init__.py,sha256=xhKDfKdv8a2aPIlaskNZS4LNy00Uy4EuwE31glx0wYA,115
4
+ graph_sp/graph_sp.cp312-win_amd64.pyd,sha256=agaF9WaErWv5hu1lYFHsRztOEgVNZu26NdJqdWmGdoI,406016
5
+ pygraph_sp-2026.3.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: maturin (1.2.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp312-none-win_amd64