typhoon-rainflow 0.2.0__cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
typhoon/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ from .typhoon import *
2
+
3
+ __doc__ = typhoon.__doc__
4
+ if hasattr(typhoon, "__all__"):
5
+ __all__ = typhoon.__all__
typhoon/__init__.pyi ADDED
@@ -0,0 +1,17 @@
1
+ from .typhoon import *
2
+ from .woehler import (
3
+ MinerType,
4
+ WoehlerCurveParams,
5
+ woehler_loads_basic,
6
+ woehler_loads,
7
+ woehler_log_space,
8
+ )
9
+ from .helper import (
10
+ CycleKey,
11
+ CycleCounter,
12
+ merge_cycle_counters,
13
+ add_residual_half_cycles,
14
+ counter_to_full_interval_df,
15
+ )
16
+
17
+ __all__: list[str]
typhoon/helper.py ADDED
@@ -0,0 +1,102 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import Counter
4
+ from typing import Iterable, Literal, Mapping, Tuple, cast
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+
10
+ CycleKey = Tuple[float, float]
11
+ CycleCounter = Counter
12
+
13
+
14
+ def merge_cycle_counters(counters: Iterable[Mapping[CycleKey, float]]) -> CycleCounter:
15
+ """Merge multiple rainflow() cycle dicts/Counter objects using Counter.
16
+
17
+ Each input mapping should be like the first return value from ``rainflow``:
18
+ ``{(s_lower, s_upper): count}``.
19
+ """
20
+
21
+ total: CycleCounter = Counter()
22
+ for c in counters:
23
+ total.update(c)
24
+ return total
25
+
26
+
27
+ def add_residual_half_cycles(
28
+ counter: Mapping[CycleKey, float],
29
+ residual_peaks: np.ndarray,
30
+ ) -> CycleCounter:
31
+ """Add half-cycles from residual waveform peaks to an existing Counter.
32
+
33
+ The residual peaks array is expected to be the second return value from
34
+ ``rainflow``. It represents half-cycles between adjacent peaks.
35
+
36
+ Each half-cycle contributes 0.5 to the count for its (from, to) key.
37
+ """
38
+
39
+ result: CycleCounter = Counter(counter)
40
+
41
+ if residual_peaks.size < 2:
42
+ return result
43
+
44
+ for i in range(len(residual_peaks) - 1):
45
+ f = float(residual_peaks[i])
46
+ t = float(residual_peaks[i + 1])
47
+ key: CycleKey = (f, t)
48
+ result[key] += 0.5 # type: ignore
49
+
50
+ return result
51
+
52
+
53
+ def counter_to_full_interval_df(
54
+ counter: Mapping[CycleKey, float],
55
+ bin_size: float = 0.1,
56
+ closed: Literal["left", "right"] = "right",
57
+ round_decimals: int = 12,
58
+ ) -> pd.DataFrame:
59
+ """Convert a (from, to): count mapping to a full 2D interval DataFrame.
60
+
61
+ The returned DataFrame has a MultiIndex of (from_interval, to_interval)
62
+ covering the entire range, with zero counts where no cycles exist.
63
+ """
64
+
65
+ if not counter:
66
+ # Return empty but well-formed DataFrame
67
+ return pd.DataFrame(
68
+ [],
69
+ index=pd.MultiIndex.from_arrays(
70
+ [pd.IntervalIndex([], name="from"), pd.IntervalIndex([], name="to")]
71
+ ),
72
+ columns=["value"],
73
+ )
74
+
75
+ half = bin_size / 2.0
76
+
77
+ from_vals = sorted({f for (f, _) in counter.keys()})
78
+ to_vals = sorted({t for (_, t) in counter.keys()})
79
+
80
+ min_val = min(min(from_vals), min(to_vals))
81
+ max_val = max(max(from_vals), max(to_vals))
82
+
83
+ centers = np.arange(min_val, max_val + bin_size / 2.0, bin_size)
84
+
85
+ def make_interval(c: float) -> pd.Interval:
86
+ left = round(c - half, round_decimals)
87
+ right = round(c + half, round_decimals)
88
+ return pd.Interval(left, right, closed=closed)
89
+
90
+ from_bins = pd.IntervalIndex([make_interval(cast(float, c)) for c in centers], name="from")
91
+ to_bins = pd.IntervalIndex([make_interval(cast(float, c)) for c in centers], name="to")
92
+
93
+ full_idx = pd.MultiIndex.from_product([from_bins, to_bins], names=["from", "to"])
94
+
95
+ data = {
96
+ (make_interval(f), make_interval(t)): float(v) for (f, t), v in counter.items()
97
+ }
98
+
99
+ s = pd.Series(data, name="value", dtype="float64")
100
+ s = s.reindex(full_idx, fill_value=0.0)
101
+
102
+ return s.to_frame()
typhoon/helper.pyi ADDED
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import Counter
4
+ from typing import Iterable, Literal, Mapping, Tuple
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ CycleKey = Tuple[float, float]
10
+ CycleCounter = Counter[CycleKey]
11
+
12
+ def merge_cycle_counters(
13
+ counters: Iterable[Mapping[CycleKey, float]],
14
+ ) -> CycleCounter: ...
15
+ def add_residual_half_cycles(
16
+ counter: Mapping[CycleKey, float],
17
+ residual_peaks: np.ndarray,
18
+ ) -> CycleCounter: ...
19
+ def counter_to_full_interval_df(
20
+ counter: Mapping[CycleKey, float],
21
+ bin_size: float = 0.1,
22
+ closed: Literal["left", "right"] = "right",
23
+ round_decimals: int = 12,
24
+ ) -> pd.DataFrame: ...
typhoon/py.typed ADDED
File without changes
typhoon/typhoon.pyi ADDED
@@ -0,0 +1,62 @@
1
+ from collections import Counter
2
+ from collections.abc import Mapping
3
+ from typing import Any, TypeAlias
4
+
5
+ import numpy as np
6
+ from numpy.typing import NDArray
7
+
8
+ Array1D: TypeAlias = NDArray[np.float32]
9
+
10
+ def init_tracing() -> None: ...
11
+
12
+
13
+ def rainflow(
14
+ waveform: Array1D,
15
+ last_peaks: Array1D | None = ...,
16
+ bin_size: float = ...,
17
+ threshold: float | None = ...,
18
+ min_chunk_size: int = ...,
19
+ ) -> tuple[dict[tuple[float, float], int], Array1D]:
20
+ ...
21
+
22
+
23
+ def goodman_transform(
24
+ cycles: Mapping[tuple[float, float], float] | Mapping[tuple[float, float], int],
25
+ m: float,
26
+ m2: float | None = ...,
27
+ ) -> dict[float, float]:
28
+ ...
29
+
30
+
31
+ def summed_histogram(
32
+ hist: Mapping[float, float] | Mapping[float, int],
33
+ ) -> list[tuple[float, float]]:
34
+ ...
35
+
36
+
37
+ class RainflowContext:
38
+ def __init__(self, bin_size: float = ..., threshold: float = ...) -> None: ...
39
+
40
+ def process(self, waveform: Array1D) -> None: ...
41
+ def reset(self) -> None: ...
42
+
43
+ def cycles_len(self) -> int: ...
44
+ def get_last_peaks(self) -> Array1D: ...
45
+
46
+ def to_dict(self) -> dict[tuple[float, float], int]: ...
47
+ def to_counter(self) -> Counter[tuple[float, float]]: ...
48
+ def to_heatmap(self) -> tuple[np.ndarray[Any, np.dtype[np.float64]], Array1D]: ...
49
+
50
+ def goodman_transform(
51
+ self,
52
+ m: float,
53
+ m2: float | None = ...,
54
+ include_half_cycles: bool = ...,
55
+ ) -> dict[float, float]: ...
56
+
57
+ def summed_histogram(
58
+ self,
59
+ m: float,
60
+ m2: float | None = ...,
61
+ include_half_cycles: bool = ...,
62
+ ) -> list[tuple[float, float]]: ...
typhoon/woehler.py ADDED
@@ -0,0 +1,280 @@
1
+ """Helpers for evaluating Woehler (S-N) curves.
2
+
3
+ The functions in this module are a NumPy-based translation of the TypeScript
4
+ implementation used in the UI project. They provide utilities for computing
5
+ load amplitudes for a given number of cycles and for generating a convenient
6
+ logarithmic cycle axis.
7
+
8
+ The central entry points are:
9
+
10
+ * :class:`WoehlerCurveParams` – container for curve parameters.
11
+ * :func:`woehler_loads` – probability-dependent Woehler curve.
12
+ * :func:`woehler_loads_basic` – Woehler curve without probability/scattering.
13
+ * :func:`woehler_log_space` – helper to create a log-spaced cycle axis.
14
+ """
15
+
16
+ from __future__ import annotations
17
+
18
+ import numpy as np
19
+
20
+ from dataclasses import dataclass
21
+ from enum import Enum
22
+ from math import log10
23
+ from typing import Iterable
24
+
25
+
26
+
27
+
28
+ class MinerType(str, Enum):
29
+ NONE = "none"
30
+ ORIGINAL = "original"
31
+ ELEMENTARY = "elementary"
32
+ HAIBACH = "haibach"
33
+
34
+
35
+ @dataclass
36
+ class WoehlerCurveParams:
37
+ sd: float
38
+ nd: float
39
+ k1: float
40
+ k2: float | None = None
41
+ ts: float | None = None
42
+ tn: float | None = None
43
+
44
+
45
+ _DEFAULT_FAILURE_PROBABILITY = 0.5
46
+
47
+
48
+ def _norm_ppf(p: float) -> float:
49
+ """Approximate the inverse CDF (ppf) of the standard normal distribution.
50
+
51
+ Implementation based on Peter J. Acklam's algorithm. This avoids adding a
52
+ dependency on SciPy while being sufficiently accurate for engineering
53
+ purposes.
54
+ """
55
+
56
+ if not (0.0 < p < 1.0):
57
+ raise ValueError("p must be in (0, 1)")
58
+
59
+ a = [
60
+ -3.969683028665376e01,
61
+ 2.209460984245205e02,
62
+ -2.759285104469687e02,
63
+ 1.383577518672690e02,
64
+ -3.066479806614716e01,
65
+ 2.506628277459239e00,
66
+ ]
67
+ b = [
68
+ -5.447609879822406e01,
69
+ 1.615858368580409e02,
70
+ -1.556989798598866e02,
71
+ 6.680131188771972e01,
72
+ -1.328068155288572e01,
73
+ ]
74
+ c = [
75
+ -7.784894002430293e-03,
76
+ -3.223964580411365e-01,
77
+ -2.400758277161838e00,
78
+ -2.549732539343734e00,
79
+ 4.374664141464968e00,
80
+ 2.938163982698783e00,
81
+ ]
82
+ d = [
83
+ 7.784695709041462e-03,
84
+ 3.224671290700398e-01,
85
+ 2.445134137142996e00,
86
+ 3.754408661907416e00,
87
+ ]
88
+
89
+ plow = 0.02425
90
+ phigh = 1 - plow
91
+
92
+ if p < plow:
93
+ q = np.sqrt(-2 * np.log(p))
94
+ return (((((c[0] * q + c[1]) * q + c[2]) * q + c[3]) * q + c[4]) * q + c[5]) / (
95
+ (((d[0] * q + d[1]) * q + d[2]) * q + d[3]) * q + 1.0
96
+ )
97
+
98
+ if p > phigh:
99
+ q = np.sqrt(-2 * np.log(1 - p))
100
+ return -(
101
+ (((((c[0] * q + c[1]) * q + c[2]) * q + c[3]) * q + c[4]) * q + c[5])
102
+ / ((((d[0] * q + d[1]) * q + d[2]) * q + d[3]) * q + 1.0)
103
+ )
104
+
105
+ q = p - 0.5
106
+ r = q * q
107
+ return (
108
+ (((((a[0] * r + a[1]) * r + a[2]) * r + a[3]) * r + a[4]) * r + a[5]) * q
109
+ ) / (((((b[0] * r + b[1]) * r + b[2]) * r + b[3]) * r + b[4]) * r + 1.0)
110
+
111
+
112
+ _NATIVE_PPF = 0.0 # _norm_ppf(_DEFAULT_FAILURE_PROBABILITY)
113
+
114
+
115
+ def _scattering_range_to_std(t: float) -> float:
116
+ return 0.39015207303618954 * log10(t)
117
+
118
+
119
+ def _derive_k2(params: WoehlerCurveParams, miner: MinerType) -> float:
120
+ if miner is MinerType.ORIGINAL:
121
+ return float("inf")
122
+ if miner is MinerType.ELEMENTARY:
123
+ return params.k1
124
+ if miner is MinerType.HAIBACH:
125
+ return 2.0 * params.k1 - 1.0
126
+ return float("inf")
127
+
128
+
129
+ def _derive_ts(params: WoehlerCurveParams) -> float:
130
+ if params.ts is not None:
131
+ return params.ts
132
+ if params.tn is not None:
133
+ return float(params.tn) ** (1.0 / params.k1)
134
+ return 1.0
135
+
136
+
137
+ def _derive_tn(params: WoehlerCurveParams) -> float:
138
+ if params.tn is not None:
139
+ return params.tn
140
+ if params.ts is not None:
141
+ return float(params.ts) ** params.k1
142
+ return 1.0
143
+
144
+
145
+ def _make_k(
146
+ src: float, ref: float, params: WoehlerCurveParams, miner: MinerType
147
+ ) -> float:
148
+ k2_derived = _derive_k2(params, miner)
149
+ if src < ref:
150
+ return k2_derived
151
+ return params.k1
152
+
153
+
154
+ def woehler_loads_basic(
155
+ cycles: Iterable[float] | np.ndarray,
156
+ params: WoehlerCurveParams,
157
+ miner: MinerType = MinerType.NONE,
158
+ ) -> np.ndarray:
159
+ """Return Woehler curve loads for given cycle counts.
160
+
161
+ This variant corresponds to the "native" Woehler curve and does not apply
162
+ any probability or scattering transformation. It still honours the
163
+ ``miner`` setting and thus the possible change of slope between the
164
+ finite-life and the endurance region.
165
+ """
166
+
167
+ cyc = np.asarray(list(cycles), dtype=float)
168
+ if cyc.ndim != 1:
169
+ raise ValueError("cycles must be 1D")
170
+
171
+ sd = params.sd
172
+ nd_transformed = params.nd
173
+
174
+ sd_transformed = sd
175
+
176
+ ref = -nd_transformed
177
+ k_values = np.array(
178
+ [_make_k(-float(c), ref, params, miner) for c in cyc], dtype=float
179
+ )
180
+
181
+ loads = np.empty_like(cyc, dtype=float)
182
+ mask_finite = np.isfinite(k_values)
183
+ loads[mask_finite] = sd_transformed * (cyc[mask_finite] / nd_transformed) ** (
184
+ -1.0 / k_values[mask_finite]
185
+ )
186
+ loads[~mask_finite] = sd_transformed
187
+
188
+ return loads
189
+
190
+
191
+ def woehler_loads(
192
+ cycles: Iterable[float] | np.ndarray,
193
+ params: WoehlerCurveParams,
194
+ miner: MinerType = MinerType.NONE,
195
+ failure_probability: float = _DEFAULT_FAILURE_PROBABILITY,
196
+ ) -> np.ndarray:
197
+ """Return Woehler curve loads for given cycle counts.
198
+
199
+ Parameters
200
+ ----------
201
+ cycles:
202
+ Iterable of cycle counts (e.g. values from :func:`woehler_log_space`).
203
+ params:
204
+ Woehler curve parameters such as fatigue strength and slopes.
205
+ miner:
206
+ Miner damage rule variant determining the second slope ``k2``.
207
+ failure_probability:
208
+ Target failure probability :math:`P_f` in the interval ``(0, 1)``.
209
+
210
+ Notes
211
+ -----
212
+ The implementation mirrors the TypeScript logic from the UI and is
213
+ vectorised for NumPy arrays. It uses an internal approximation of the
214
+ standard normal inverse CDF and applies the same transformations to
215
+ ``sd`` and ``nd`` that are used in the UI.
216
+ """
217
+
218
+ if not (0.0 < failure_probability < 1.0):
219
+ raise ValueError("failure_probability must be in (0, 1)")
220
+
221
+ cyc = np.asarray(list(cycles), dtype=float)
222
+ if cyc.ndim != 1:
223
+ raise ValueError("cycles must be 1D")
224
+
225
+ goal_ppf = _norm_ppf(failure_probability)
226
+
227
+ ts_derived = _derive_ts(params)
228
+ tn_derived = _derive_tn(params)
229
+
230
+ sd = params.sd
231
+ nd = params.nd
232
+
233
+ # Transform sd
234
+ sd_transformed = sd / 10.0 ** (
235
+ (_NATIVE_PPF - goal_ppf) * _scattering_range_to_std(ts_derived)
236
+ )
237
+
238
+ # Transform nd
239
+ transformed_nd = nd / 10.0 ** (
240
+ (_NATIVE_PPF - goal_ppf) * _scattering_range_to_std(tn_derived)
241
+ )
242
+ if sd_transformed != 0.0:
243
+ nd_transformed = transformed_nd * (sd_transformed / sd) ** (-params.k1)
244
+ else:
245
+ nd_transformed = transformed_nd
246
+
247
+ ref = -nd_transformed
248
+ k_values = np.array(
249
+ [_make_k(-float(c), ref, params, miner) for c in cyc], dtype=float
250
+ )
251
+
252
+ loads = np.empty_like(cyc, dtype=float)
253
+ mask_finite = np.isfinite(k_values)
254
+ loads[mask_finite] = sd_transformed * (cyc[mask_finite] / nd_transformed) ** (
255
+ -1.0 / k_values[mask_finite]
256
+ )
257
+ loads[~mask_finite] = sd_transformed
258
+
259
+ return loads
260
+
261
+
262
+ def woehler_log_space(
263
+ minimum: float = 1.0,
264
+ maximum: float = 10.0**8,
265
+ n: int = 101,
266
+ ) -> np.ndarray:
267
+ """Return logarithmically spaced cycle counts between ``minimum`` and ``maximum``.
268
+
269
+ This is a small convenience wrapper around :func:`numpy.logspace` with
270
+ defaults that are suitable for typical Woehler curves.
271
+ """
272
+
273
+ if n < 2:
274
+ raise ValueError("n must be >= 2")
275
+
276
+ log_min = log10(minimum)
277
+ log_max = log10(maximum)
278
+ # step = (log_max - log_min) / (n - 1)
279
+ exponents = np.linspace(log_min, log_max, num=n)
280
+ return 10.0**exponents
typhoon/woehler.pyi ADDED
@@ -0,0 +1,39 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+ from typing import Iterable
6
+
7
+ import numpy as np
8
+
9
+ class MinerType(str, Enum):
10
+ NONE: str
11
+ ORIGINAL: str
12
+ ELEMENTARY: str
13
+ HAIBACH: str
14
+
15
+ @dataclass
16
+ class WoehlerCurveParams:
17
+ sd: float
18
+ nd: float
19
+ k1: float
20
+ k2: float | None = None
21
+ ts: float | None = None
22
+ tn: float | None = None
23
+
24
+ def woehler_loads_basic(
25
+ cycles: Iterable[float] | np.ndarray,
26
+ params: WoehlerCurveParams,
27
+ miner: MinerType = ...,
28
+ ) -> np.ndarray: ...
29
+ def woehler_loads(
30
+ cycles: Iterable[float] | np.ndarray,
31
+ params: WoehlerCurveParams,
32
+ miner: MinerType = ...,
33
+ failure_probability: float = ...,
34
+ ) -> np.ndarray: ...
35
+ def woehler_log_space(
36
+ minimum: float = 1.0,
37
+ maximum: float = 10.0**8,
38
+ n: int = 101,
39
+ ) -> np.ndarray: ...
@@ -0,0 +1,254 @@
1
+ Metadata-Version: 2.4
2
+ Name: typhoon-rainflow
3
+ Version: 0.2.0
4
+ Classifier: Programming Language :: Rust
5
+ Classifier: Programming Language :: Python :: Implementation :: CPython
6
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
7
+ Classifier: Typing :: Typed
8
+ Requires-Dist: numpy
9
+ Requires-Dist: pandas
10
+ Requires-Dist: nox ; extra == 'test'
11
+ Provides-Extra: test
12
+ Summary: Fast rainflow counting for streaming input written in Rust
13
+ Author: Markus Wegmann
14
+ Author-email: mw@technokrat.ch
15
+ Requires-Python: >=3.8
16
+ Description-Content-Type: text/markdown
17
+
18
+ # typhoon
19
+ [![CI](https://github.com/technokrat/typhoon/actions/workflows/CI.yml/badge.svg)](https://github.com/technokrat/typhoon/actions/workflows/CI.yml) ![PyPI - Version](https://img.shields.io/pypi/v/typhoon-rainflow)
20
+
21
+
22
+ Typhoon is a rainflow counting Python module written in Rust by Markus Wegmann (mw@technokrat.ch).
23
+
24
+ It uses a new windowed four-point counting method which can be run in parallel on multiple cores and allows for chunk-based sample stream processing, preserving half cycles for future chunks.
25
+
26
+ It is therefore intended for real-time processing of load captures and serves as as crucial part of i-Spring's in-edge data processing chain.
27
+
28
+ ## Installation
29
+ Add the package `typhoon-rainflow` to your Python project, e.g.
30
+
31
+ ```python
32
+ poetry add typhoon-rainflow
33
+ ```
34
+
35
+ ## Python API
36
+
37
+ The Python package exposes two main namespaces:
38
+
39
+ - `typhoon.typhoon`: low-level, performance‑critical functions implemented in Rust.
40
+ - `typhoon.helper`: convenience utilities for working with the rainflow output.
41
+
42
+ The top-level package re-exports everything from `typhoon.typhoon`, so you can either
43
+
44
+ ```python
45
+ import typhoon # recommended for normal use
46
+ from typhoon import rainflow, goodman_transform
47
+ ```
48
+
49
+ or
50
+
51
+ ```python
52
+ from typhoon.typhoon import rainflow
53
+ from typhoon import helper # for helper utilities
54
+ ```
55
+
56
+ ### Core functions (`typhoon.typhoon`)
57
+
58
+ All arguments are keyword-compatible with the examples below.
59
+
60
+ - `init_tracing() -> None`
61
+ - Initialize verbose tracing/logging from the Rust implementation.
62
+ - Intended mainly for debugging and performance analysis; it writes to stdout.
63
+
64
+ - `rainflow(waveform, last_peaks=None, bin_size=0.0, threshold=None, min_chunk_size=64*1024)`
65
+ - Perform windowed four-point rainflow counting on a 1D NumPy waveform.
66
+ - `waveform`: 1D `numpy.ndarray` of `float32` or `float64`.
67
+ - `last_peaks`: optional 1D array of peaks from the previous chunk (for streaming).
68
+ - `bin_size`: bin width for quantizing ranges; `0.0` disables quantization.
69
+ - `threshold`: minimum cycle amplitude to count; default `0.0`.
70
+ - `min_chunk_size`: minimum chunk size for internal parallelization.
71
+ - Returns `(cycles, residual_peaks)` where
72
+ - `cycles` is a dict `{(s_lower, s_upper): count}` and
73
+ - `residual_peaks` is a 1D NumPy array of remaining peaks to pass to the next call.
74
+
75
+ - `goodman_transform(cycles, m, m2=None)`
76
+ - Apply a (piecewise) Goodman-like mean stress correction to rainflow cycles.
77
+ - `cycles`: mapping `{(s_lower, s_upper): count}` (e.g. first return value of `rainflow`).
78
+ - `m`: main slope/parameter.
79
+ - `m2`: optional secondary slope; defaults to `m / 3` if omitted.
80
+ - Returns a dict `{s_a_ers: count}` where `s_a_ers` is the equivalent range.
81
+
82
+ - `summed_histogram(hist)`
83
+ - Build a descending cumulative histogram from the Goodman-transformed result.
84
+ - `hist`: mapping `{s_a_ers: count}` such as returned from `goodman_transform`.
85
+ - Returns a list of `(s_a_ers, cumulative_count)` pairs sorted from high to low range.
86
+
87
+ ### Stateful streaming (`RainflowContext`)
88
+
89
+ If you process signals chunk-by-chunk, repeatedly calling `rainflow()` and merging dicts/Counters can become a bottleneck.
90
+
91
+ `RainflowContext` keeps the accumulated cycle map and the residual `last_peaks` inside the Rust extension, so each new chunk only updates the existing state.
92
+
93
+ Key methods:
94
+
95
+ - `process(waveform)`: update the internal state from one waveform chunk.
96
+ - `to_counter()`: export the accumulated cycles as a Python `collections.Counter`.
97
+ - `to_heatmap()`: export a dense 2D NumPy array for plotting (and the corresponding bin centers).
98
+ - `goodman_transform(m, m2=None, include_half_cycles=False)`: Goodman transform directly on the internal state.
99
+ - When `include_half_cycles=True`, the current residual `last_peaks` are treated as half-cycles (each adjacent peak-pair contributes `0.5`).
100
+ - `summed_histogram(m, m2=None, include_half_cycles=False)`: convenience wrapper that returns the descending cumulative histogram (same format as `typhoon.summed_histogram`).
101
+
102
+ Example:
103
+
104
+ ```python
105
+ import numpy as np
106
+ import typhoon
107
+
108
+ ctx = typhoon.RainflowContext(bin_size=1.0, threshold=0.0)
109
+
110
+ for chunk in chunks: # iterable of 1D numpy arrays
111
+ ctx.process(chunk)
112
+
113
+ # Export accumulated cycles
114
+ cycles = ctx.to_counter()
115
+
116
+ # Goodman transform (optionally including the current residual half-cycles)
117
+ hist = ctx.goodman_transform(m=0.3, include_half_cycles=True)
118
+
119
+ # Summed histogram directly from the context
120
+ summed = ctx.summed_histogram(m=0.3, include_half_cycles=True)
121
+
122
+ # Heatmap export for matplotlib
123
+ heatmap, bins = ctx.to_heatmap()
124
+
125
+ # Example plotting
126
+ # import matplotlib.pyplot as plt
127
+ # plt.imshow(heatmap, origin="lower")
128
+ # plt.xticks(range(len(bins)), bins, rotation=90)
129
+ # plt.yticks(range(len(bins)), bins)
130
+ # plt.xlabel("to")
131
+ # plt.ylabel("from")
132
+ # plt.colorbar(label="count")
133
+ # plt.tight_layout()
134
+ # plt.show()
135
+ ```
136
+
137
+ ### Helper utilities (`typhoon.helper`)
138
+
139
+ The helper module provides convenience tools for post-processing and analysis.
140
+
141
+ - `merge_cycle_counters(counters)`
142
+ - Merge multiple `dict`/`Counter` objects of the form `{(from, to): count}`.
143
+ - Useful when combining rainflow results from multiple chunks or channels.
144
+
145
+ - `add_residual_half_cycles(counter, residual_peaks)`
146
+ - Convert the trailing `residual_peaks` from `rainflow` into half-cycles and add them to an existing counter.
147
+ - Each adjacent pair of peaks `(p_i, p_{i+1})` contributes `0.5` to the corresponding cycle key.
148
+
149
+ - `counter_to_full_interval_df(counter, bin_size=0.1, closed="right", round_decimals=12)`
150
+ - Convert a sparse `(from, to): count` mapping into a dense 2D `pandas.DataFrame` over all intervals.
151
+ - Returns a DataFrame with a `(from, to)` `MultiIndex` of `pd.Interval` and a single `"value"` column.
152
+
153
+ ### Woehler curves (`typhoon.woehler`)
154
+
155
+ The `typhoon.woehler` module provides helpers for evaluating S–N (Woehler) curves.
156
+
157
+ Key entry points are:
158
+
159
+ - `WoehlerCurveParams(sd, nd, k1, k2=None, ts=None, tn=None)`
160
+ - Container for the curve parameters:
161
+ - `sd`: fatigue strength at `nd` cycles for the reference failure probability.
162
+ - `nd`: reference number of cycles (e.g. 1e6).
163
+ - `k1`: slope in the finite-life region.
164
+ - `k2`: optional slope in the endurance region; derived from the Miner
165
+ rule if omitted.
166
+ - `ts` / `tn`: optional scattering parameters controlling probability
167
+ transforms of `sd` and `nd`.
168
+ - `MinerType` enum
169
+ - Miner damage rule variant that determines the second slope `k2`:
170
+ `NONE`, `ORIGINAL`, `ELEMENTARY`, `HAIBACH`.
171
+ - `woehler_log_space(minimum=1.0, maximum=1e8, n=101)`
172
+ - Convenience helper to generate a logarithmically spaced cycle axis for
173
+ plotting Woehler curves.
174
+ - `woehler_loads_basic(cycles, params, miner=MinerType.NONE)`
175
+ - Compute a "native" Woehler curve without probability/scattering
176
+ transformation, but honouring the selected Miner type.
177
+ - `woehler_loads(cycles, params, miner=MinerType.NONE, failure_probability=0.5)`
178
+ - Compute a probability-dependent Woehler curve using an internal
179
+ approximation of the normal inverse CDF.
180
+
181
+ ## Example Usage
182
+
183
+ ### Basic rainflow counting
184
+
185
+ ```python
186
+ import numpy as np
187
+ import typhoon
188
+
189
+ waveform = np.array([0.0, 1.0, 2.0, 1.0, 2.0, 1.0, 3.0, 4.0], dtype=np.float32)
190
+
191
+ cycles, residual_peaks = typhoon.rainflow(
192
+ waveform=waveform,
193
+ last_peaks=None,
194
+ bin_size=1.0,
195
+ )
196
+
197
+ print("Cycles:", cycles)
198
+ print("Residual peaks:", residual_peaks)
199
+ ```
200
+
201
+ ### Streaming / chunked processing with helpers
202
+
203
+ ```python
204
+ from collections import Counter
205
+
206
+ import numpy as np
207
+ import typhoon
208
+ from typhoon import helper
209
+
210
+ waveform1 = np.array([0.0, 1.0, 2.0, 1.0, 2.0, 1.0, 3.0, 4.0], dtype=np.float32)
211
+ waveform2 = np.array([3.0, 5.0, 4.0, 2.0], dtype=np.float32)
212
+
213
+ # First chunk
214
+ cycles1, residual1 = typhoon.rainflow(waveform1, last_peaks=None, bin_size=1.0)
215
+
216
+ # Second chunk, passing residual peaks from the first
217
+ cycles2, residual2 = typhoon.rainflow(waveform2, last_peaks=residual1, bin_size=1.0)
218
+
219
+ # Merge cycle counts from both chunks
220
+ merged = helper.merge_cycle_counters([cycles1, cycles2])
221
+
222
+ # Optionally add remaining half-cycles from the final residual peaks
223
+ merged_with_residuals = helper.add_residual_half_cycles(merged, residual2)
224
+
225
+ print("Merged cycles:", merged_with_residuals)
226
+ ```
227
+
228
+ ### Goodman transform and summed histogram
229
+
230
+ ```python
231
+ import typhoon
232
+ from typhoon import helper
233
+
234
+ cycles, residual_peaks = typhoon.rainflow(waveform, last_peaks=None, bin_size=1.0)
235
+
236
+ # Apply Goodman transform
237
+ hist = typhoon.goodman_transform(cycles, m=0.3)
238
+
239
+ # Summed histogram from the Goodman result
240
+ summed = typhoon.summed_histogram(hist)
241
+
242
+ print("Goodman result:", hist)
243
+ print("Summed histogram:", summed)
244
+ ```
245
+
246
+ ## Testing
247
+
248
+ ```sh
249
+ pipx install nox
250
+
251
+ nox -s build
252
+ nox -s test
253
+ nox -s develop
254
+ ```
@@ -0,0 +1,12 @@
1
+ typhoon/__init__.py,sha256=61qdDpZxeAZUZYNQSz2J5rW7yQYJweTiuEM8WAvWr0E,112
2
+ typhoon/__init__.pyi,sha256=eIk21D4wbD61ApVTUwbUo-yzyol72X7WuBhL8EOcx0o,319
3
+ typhoon/helper.py,sha256=4sCHSb14URp0h5RPVf8nqjV_-cpc2nymryaiUIz6kCE,3104
4
+ typhoon/helper.pyi,sha256=EkvjSMIHiCRtjn5ARjtdNpFahD4G79Evqj9EGu4J3Sc,650
5
+ typhoon/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ typhoon/typhoon.cpython-312-powerpc64le-linux-gnu.so,sha256=5Swf2SHXuUVT7p0YzLXjKt2WaOfSnO06u4tdAX5TVZw,1681040
7
+ typhoon/typhoon.pyi,sha256=vPiyHPSgT5srb6T_nzXx6SXu5HGO8xz2S0LYDrCjTkM,1597
8
+ typhoon/woehler.py,sha256=cMk1L4taWKWNAYbr4dGUb3Bkp3A10s9hhGCRQ6ZvcS8,7808
9
+ typhoon/woehler.pyi,sha256=OuejNfi8pPa58V0lJzmu7f6N4W036YuC09nmimdigUU,849
10
+ typhoon_rainflow-0.2.0.dist-info/METADATA,sha256=YCqpafjp6mAJ7ZyUIkPImLwyJ-fK68AKVq1Tdn8mhS0,9519
11
+ typhoon_rainflow-0.2.0.dist-info/WHEEL,sha256=qwDhKaIiqiFOO-9vdjqn1s6gfU4heRxHMy95GnoMwLQ,149
12
+ typhoon_rainflow-0.2.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: maturin (1.11.5)
3
+ Root-Is-Purelib: false
4
+ Tag: cp312-cp312-manylinux_2_17_ppc64le
5
+ Tag: cp312-cp312-manylinux2014_ppc64le