jasonlib-dev 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,39 @@
1
+ name: Publish
2
+
3
+ on:
4
+ push:
5
+ branches: [master]
6
+
7
+ jobs:
8
+ publish:
9
+ runs-on: ubuntu-latest
10
+ permissions:
11
+ contents: write
12
+ id-token: write
13
+ steps:
14
+ - uses: actions/checkout@v4
15
+
16
+ - name: Install uv
17
+ uses: astral-sh/setup-uv@v5
18
+ with:
19
+ python-version: "3.13"
20
+
21
+ - name: Run tests
22
+ run: uv run --group test pytest tests/
23
+
24
+ - name: Read version
25
+ id: version
26
+ run: |
27
+ VERSION=$(grep '^version' pyproject.toml | sed 's/version = "\(.*\)"/\1/')
28
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
29
+
30
+ - name: Create tag
31
+ run: |
32
+ git tag "v${{ steps.version.outputs.version }}" || true
33
+ git push origin "v${{ steps.version.outputs.version }}" || true
34
+
35
+ - name: Build
36
+ run: uv build
37
+
38
+ - name: Publish to PyPI
39
+ run: uv publish
@@ -0,0 +1,24 @@
1
+ name: Test
2
+
3
+ on:
4
+ push:
5
+ branches: [main, master]
6
+ pull_request:
7
+ branches: [main, master]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ steps:
13
+ - uses: actions/checkout@v4
14
+
15
+ - name: Install uv
16
+ uses: astral-sh/setup-uv@v5
17
+ with:
18
+ python-version: "3.13"
19
+
20
+ - name: Install dependencies
21
+ run: uv sync --group test
22
+
23
+ - name: Run tests
24
+ run: uv run pytest tests/
@@ -0,0 +1,8 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.pyo
4
+ .venv/
5
+ dist/
6
+ *.egg-info/
7
+ .pytest_cache/
8
+ .ruff_cache/
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 2.4
2
+ Name: jasonlib-dev
3
+ Version: 0.1.0
4
+ Summary: Core JASON pivot-based feature computation library
5
+ Requires-Python: >=3.13
6
+ Requires-Dist: numba<0.63.0,>=0.62.1
7
+ Requires-Dist: numpy<2.4.0,>=2.3.0
8
+ Requires-Dist: pandas<4.0.0,>=3.0.1
9
+ Requires-Dist: pydantic<3.0.0,>=2.11.0
10
+ Requires-Dist: tqdm<5.0.0,>=4.67.1
@@ -0,0 +1,27 @@
1
+ # jasonlib
2
+
3
+ Core JASON pivot-based feature computation library.
4
+
5
+ ## Installing as a dependency
6
+
7
+ Add to your `pyproject.toml`:
8
+
9
+ ```toml
10
+ dependencies = [
11
+ "jasonlib",
12
+ ]
13
+
14
+ [tool.uv.sources]
15
+ jasonlib = { git = "https://github.com/palinorio/jasonlib", tag = "v0.1.0" }
16
+ ```
17
+
18
+ Update the `tag` value when consuming a newer version.
19
+
20
+ ## Releasing
21
+
22
+ 1. Update the `version` in `pyproject.toml`
23
+ 2. Commit and merge the change to `master`
24
+
25
+ On merge, CI will automatically create a git tag matching the version. Consumers can then pin to the new tag.
26
+
27
+ > Merging without bumping the version will not create a new tag — the existing tag is left unchanged.
@@ -0,0 +1,24 @@
1
+ [project]
2
+ name = "jasonlib-dev"
3
+ version = "0.1.0"
4
+ description = "Core JASON pivot-based feature computation library"
5
+ requires-python = ">=3.13"
6
+ dependencies = [
7
+ "numpy>=2.3.0,<2.4.0",
8
+ "pandas>=3.0.1,<4.0.0",
9
+ "numba>=0.62.1,<0.63.0",
10
+ "tqdm>=4.67.1,<5.0.0",
11
+ "pydantic>=2.11.0,<3.0.0",
12
+ ]
13
+
14
+ [dependency-groups]
15
+ test = [
16
+ "pytest>=9.0.2,<10.0.0",
17
+ ]
18
+
19
+ [tool.hatch.build.targets.wheel]
20
+ packages = ["src/jasonlib"]
21
+
22
+ [build-system]
23
+ requires = ["hatchling>=1.27.0"]
24
+ build-backend = "hatchling.build"
@@ -0,0 +1,53 @@
1
+ """jasonlib - Core JASON pivot-based feature computation library.
2
+
3
+ Public API::
4
+
5
+ from jasonlib import compute_jason, compute_trend_analysis, compute_pivot_analysis
6
+ from jasonlib import JAssetClass, JInterval, TrendFunction, PivotSortOption
7
+ from jasonlib import CoinDataset, TrendAnalysisResult, PivotAnalysisResult
8
+ """
9
+
10
+ from jasonlib._calculator import compute_jason
11
+ from jasonlib._models import JAssetClass, JInterval
12
+ from jasonlib._trend_analysis import compute_trend_analysis
13
+ from jasonlib._trend_models import (
14
+ ChartData,
15
+ CoinDataset,
16
+ CoinSeries,
17
+ InsufficientDataError,
18
+ MarketTrendSummary,
19
+ PlotWindow,
20
+ SummaryMetric,
21
+ TrendAnalysisResult,
22
+ TrendCharts,
23
+ TrendFunction,
24
+ )
25
+ from jasonlib._pivot_models import (
26
+ PivotSortOption,
27
+ PivotAnalysisSummary,
28
+ PivotAnalysisParameters,
29
+ PivotAnalysisResult,
30
+ )
31
+ from jasonlib._pivot_analysis import compute_pivot_analysis
32
+
33
+ __all__ = [
34
+ "compute_jason",
35
+ "compute_trend_analysis",
36
+ "JAssetClass",
37
+ "JInterval",
38
+ "TrendFunction",
39
+ "CoinDataset",
40
+ "InsufficientDataError",
41
+ "TrendAnalysisResult",
42
+ "PlotWindow",
43
+ "SummaryMetric",
44
+ "MarketTrendSummary",
45
+ "CoinSeries",
46
+ "ChartData",
47
+ "TrendCharts",
48
+ "compute_pivot_analysis",
49
+ "PivotSortOption",
50
+ "PivotAnalysisSummary",
51
+ "PivotAnalysisParameters",
52
+ "PivotAnalysisResult",
53
+ ]
@@ -0,0 +1,248 @@
1
+ """Core JASON feature computation."""
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from tqdm import tqdm
6
+
7
+ from jasonlib._models import JAssetClass, JInterval
8
+ from jasonlib._numba_kernels import _calculate_metrics_for_window
9
+ from jasonlib._trading_calendar import (
10
+ TRADING_DAYS_PER_YEAR,
11
+ TRADING_HOURS_PER_DAY,
12
+ get_logger,
13
+ normalize_asset_class,
14
+ )
15
+
16
+ logger = get_logger("jasonlib.calculator")
17
+
18
+ _JASON_COLUMNS = [
19
+ "high_vol",
20
+ "high_days",
21
+ "high_z",
22
+ "high_pivot",
23
+ "high_json2",
24
+ "high_json2_plus_1d",
25
+ "high_json2_plus_2d",
26
+ "last_fx",
27
+ "low_json2",
28
+ "low_json2_plus_1d",
29
+ "low_json2_plus_2d",
30
+ "low_pivot",
31
+ "low_z",
32
+ "low_days",
33
+ "low_vol",
34
+ ]
35
+
36
+ _ROUNDING = {
37
+ "high_vol": 2,
38
+ "high_days": 1,
39
+ "high_z": 2,
40
+ "low_z": 2,
41
+ "low_days": 1,
42
+ "low_vol": 2,
43
+ }
44
+
45
+
46
+ def compute_jason(
47
+ df: pd.DataFrame,
48
+ interval: JInterval,
49
+ asset_class: JAssetClass,
50
+ lookback: int = 7,
51
+ n_pivots: int = 5,
52
+ min_days: int = 1,
53
+ ) -> pd.DataFrame:
54
+ """Compute JASON pivot-based features from OHLC candle data.
55
+
56
+ Identifies high/low pivot points over a rolling lookback window,
57
+ weights the most frequent pivots, and derives volatility-adjusted
58
+ support/resistance levels (JSON2), Z-scores, and days-since-pivot
59
+ metrics.
60
+
61
+ Args:
62
+ df: OHLCV candle data. Must be datetime-indexed (UTC
63
+ ``DatetimeIndex``). Required columns and dtypes:
64
+
65
+ ======== ========= ============================
66
+ Column Dtype Description
67
+ ======== ========= ============================
68
+ ``open`` float64 Opening price of the candle
69
+ ``high`` float64 Highest price in the candle
70
+ ``low`` float64 Lowest price in the candle
71
+ ``close`` float64 Closing price of the candle
72
+ ======== ========= ============================
73
+
74
+ Additional columns are ignored. Rows must be sorted ascending
75
+ by timestamp; the function will sort if they are not.
76
+ interval: Candle size. Determines the number of candles that fit
77
+ inside the *lookback* window.
78
+ asset_class: Trading calendar to use. Determines days per year
79
+ and hours per day for annualization. Required — no default.
80
+ lookback: Lookback period in trading days for pivot detection.
81
+ Default ``7``.
82
+ n_pivots: Number of top pivots (by frequency) to weight.
83
+ Default ``5``.
84
+ min_days: Minimum number of days for a pivot to be considered.
85
+ Default ``1``.
86
+
87
+ Returns:
88
+ ``pd.DataFrame`` indexed by ``DatetimeIndex`` (UTC), starting from
89
+ the first candle after the lookback window. Columns:
90
+
91
+ ====================== ========= ==========================================
92
+ Column Dtype Description
93
+ ====================== ========= ==========================================
94
+ ``high_vol`` float64 Annualized volatility (%) at high pivots
95
+ ``high_days`` float64 Trading days since the high pivot
96
+ ``high_z`` float64 Z-score from high pivot to last close
97
+ ``high_pivot`` float64 Weighted average high pivot price
98
+ ``high_json2`` float64 Support level (high_pivot * exp(-2*std))
99
+ ``high_json2_plus_1d`` float64 Support level projected +1 day
100
+ ``high_json2_plus_2d`` float64 Support level projected +2 days
101
+ ``last_fx`` float64 Last closing price at this timestamp
102
+ ``low_json2`` float64 Resistance level (low_pivot * exp(+2*std))
103
+ ``low_json2_plus_1d`` float64 Resistance level projected +1 day
104
+ ``low_json2_plus_2d`` float64 Resistance level projected +2 days
105
+ ``low_pivot`` float64 Weighted average low pivot price
106
+ ``low_z`` float64 Z-score from low pivot to last close
107
+ ``low_days`` float64 Trading days since the low pivot
108
+ ``low_vol`` float64 Annualized volatility (%) at low pivots
109
+ ====================== ========= ==========================================
110
+
111
+ Volatility and Z-score columns are rounded to 2 decimal places.
112
+ Days columns are rounded to 1 decimal place.
113
+ """
114
+ if df.empty:
115
+ return pd.DataFrame()
116
+
117
+ df = df.sort_index()
118
+
119
+ normalized_ac = normalize_asset_class(asset_class)
120
+ days_in_year = float(TRADING_DAYS_PER_YEAR[normalized_ac])
121
+ hours_per_day = TRADING_HOURS_PER_DAY[normalized_ac]
122
+ seconds_per_trading_day = hours_per_day * 3600
123
+
124
+ # Quadratic variation for volatility estimation
125
+ m_big = np.log(df["high"] / df["open"])
126
+ m_small = np.log(df["low"] / df["open"])
127
+ x = np.log(df["close"] / df["open"])
128
+ qv_series = (
129
+ 0.511 * (m_big - m_small) ** 2
130
+ - 0.019 * (x * (m_big + m_small) - 2 * m_big * m_small)
131
+ - 0.383 * x**2
132
+ )
133
+
134
+ highs = df["high"].to_numpy(dtype=np.float64)
135
+ lows = df["low"].to_numpy(dtype=np.float64)
136
+ closes = df["close"].to_numpy(dtype=np.float64)
137
+
138
+ candle_seconds = interval.seconds
139
+
140
+ if normalized_ac == JAssetClass.CRYPTO:
141
+ lookback_candles = int(lookback * 24 * 3600 // candle_seconds)
142
+ else:
143
+ lookback_candles = int(
144
+ lookback * seconds_per_trading_day // candle_seconds
145
+ )
146
+
147
+ window_size = lookback_candles + 1
148
+
149
+ s_in_year = days_in_year * seconds_per_trading_day
150
+ min_days_in_years = (min_days / days_in_year) - (candle_seconds / s_in_year)
151
+
152
+ qv_values = qv_series.to_numpy(dtype=np.float64)
153
+
154
+ t_seconds = np.arange(
155
+ window_size * candle_seconds, 0, -candle_seconds, dtype=np.float64
156
+ )
157
+ t_seconds = np.maximum(t_seconds, candle_seconds)
158
+ relative_time_in_years = t_seconds / s_in_year
159
+
160
+ num_points = len(df)
161
+ results: list[tuple] = []
162
+
163
+ for i in tqdm(
164
+ range(lookback_candles, num_points),
165
+ desc="Computing JASON",
166
+ leave=False,
167
+ ):
168
+ start_idx = i - lookback_candles
169
+ end_idx = i + 1
170
+
171
+ qv_window = qv_values[start_idx:end_idx]
172
+ qv_sum_rev = np.cumsum(qv_window[::-1])[::-1]
173
+ w_sum_rev = np.arange(len(qv_window), 0, -1, dtype=np.float64)
174
+ vols_window = np.sqrt(
175
+ (qv_sum_rev * s_in_year) / (w_sum_rev * candle_seconds)
176
+ )
177
+
178
+ res_tuple = _calculate_metrics_for_window(
179
+ highs[start_idx:end_idx],
180
+ lows[start_idx:end_idx],
181
+ closes[start_idx:end_idx],
182
+ vols_window,
183
+ relative_time_in_years,
184
+ n_pivots,
185
+ min_days_in_years,
186
+ days_in_year,
187
+ )
188
+ results.append(res_tuple)
189
+
190
+ if not results:
191
+ return pd.DataFrame()
192
+
193
+ result_df = pd.DataFrame(
194
+ results, index=df.index[lookback_candles:], columns=_JASON_COLUMNS
195
+ )
196
+ result_df = result_df.round(_ROUNDING)
197
+ result_df = _calculate_ztv(result_df)
198
+ return result_df
199
+
200
+
201
+
202
+
203
+ # ============================================================================
204
+ # ZTV AND LATEST METRICS
205
+ # ============================================================================
206
+
207
+ def _calculate_ztv(df: pd.DataFrame) -> pd.DataFrame:
208
+ """
209
+ Calculates all ZTV and Days metrics and returns a single DataFrame.
210
+
211
+ ZTV (Z-Time-Volatility) is a conviction score combining Z-Score, Time, and Volatility.
212
+ It measures the total power of a price move as a percentage of theoretical maximum.
213
+
214
+ Args:
215
+ jason_df: DataFrame with JASON features including:
216
+ - high_z, low_z: Z-scores
217
+ - high_vol, low_vol: Volatility at pivots
218
+ - high_days, low_days: Days since pivots
219
+
220
+ Returns:
221
+ DataFrame with additional columns:
222
+ - high_ztv, low_ztv: Raw ZTV values
223
+ - high_ztv_ratio, low_ztv_ratio: ZTV as ratio of max (0-1)
224
+ - high_days_ratio, low_days_ratio: Days as ratio of max (0-1)
225
+ """
226
+ # if jason_df.empty:
227
+ # return pd.DataFrame()
228
+
229
+ # df = jason_df.copy()
230
+
231
+
232
+ # ZTV Metrics Logic
233
+ MAX_Z = 2.5
234
+ MAX_DAYS_ZTV = 4.0
235
+ MAX_VOL = 100.0
236
+ max_ztv = MAX_VOL * MAX_Z * np.sqrt(MAX_DAYS_ZTV)
237
+
238
+ df['high_ztv'] = df['high_z'] * df['high_vol'] * np.sqrt(df['high_days'].astype(float))
239
+ df['low_ztv'] = df['low_z'] * df['low_vol'] * np.sqrt(df['low_days'].astype(float))
240
+ df['high_ztv_ratio'] = (df['high_ztv'] / max_ztv).clip(0, 1)
241
+ df['low_ztv_ratio'] = (df['low_ztv'] / max_ztv).clip(0, 1)
242
+
243
+ # Days Metrics Logic
244
+ MAX_DAYS_RATIO = 5.0
245
+ df['high_days_ratio'] = (df['high_days'] / MAX_DAYS_RATIO).clip(0, 1)
246
+ df['low_days_ratio'] = (df['low_days'] / MAX_DAYS_RATIO).clip(0, 1)
247
+
248
+ return df
@@ -0,0 +1,52 @@
1
+ """Sigmoid utility functions for trend score calculations."""
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+
7
+ def sigmoid_z(x: pd.Series, threshold: float = 2.0, k: float = 10.0) -> pd.Series:
8
+ """Apply a sigmoid function centered around a threshold.
9
+
10
+ Args:
11
+ x: Input Series.
12
+ threshold: Center point of the sigmoid.
13
+ k: Steepness factor.
14
+
15
+ Returns:
16
+ Sigmoid-transformed Series with same index.
17
+ """
18
+ numeric_x = x.astype(float).to_numpy()
19
+ result = 1 / (1 + np.exp(-k * (numeric_x - threshold)))
20
+ return pd.Series(result, index=x.index)
21
+
22
+
23
+ def sigmoid_diff(x: pd.Series, threshold: float = 1.0, k: float = 10.0) -> pd.Series:
24
+ """Apply a differential sigmoid creating symmetric positive/negative mapping.
25
+
26
+ Args:
27
+ x: Input Series.
28
+ threshold: Threshold for the differential.
29
+ k: Steepness factor.
30
+
31
+ Returns:
32
+ Differential sigmoid-transformed Series with same index.
33
+ """
34
+ numeric_x = x.astype(float).to_numpy()
35
+ result = 1 + 1 / (1 + np.exp(-k * (numeric_x - threshold))) - 1 / (1 + np.exp(-k * (-numeric_x + threshold)))
36
+ return pd.Series(result, index=x.index)
37
+
38
+
39
+ def sigmoid_vol(x: pd.Series, hist_vol: float = 50.0, k: float = 0.2) -> pd.Series:
40
+ """Apply a sigmoid function for volatility weighting.
41
+
42
+ Args:
43
+ x: Input Series of volatility values.
44
+ hist_vol: Historical volatility center point.
45
+ k: Steepness factor.
46
+
47
+ Returns:
48
+ Sigmoid-weighted Series with same index.
49
+ """
50
+ numeric_x = x.astype(float).to_numpy()
51
+ result = 1 / (1 + np.exp(-k * (numeric_x - hist_vol)))
52
+ return pd.Series(result, index=x.index)
@@ -0,0 +1,62 @@
1
+ """JASON domain model enums."""
2
+
3
+ from enum import Enum
4
+
5
+ import pandas as pd
6
+
7
+
8
+ class JAssetClass(str, Enum):
9
+ """Supported asset classes for JASON calculations.
10
+
11
+ Determines the trading calendar used for time-aware computations:
12
+ - CRYPTO: 365 days/year, 24 hours/day
13
+ - EQUITY: 252 trading days/year, 6.5 hours/day (US market hours)
14
+ - COMMOD: 252 trading days/year, 23 hours/day (CME Globex)
15
+ - FX: 252 trading days/year, 23 hours/day (CME Globex)
16
+ - EQUITY_IDX: 252 trading days/year, 23 hours/day (CME Globex)
17
+ """
18
+
19
+ CRYPTO = "CRYPTO"
20
+ FX = "FX"
21
+ COMMOD = "COMMOD"
22
+ EQUITIES = "EQUITIES"
23
+ EQUITY_IDX = "EQUITY_IDX"
24
+
25
+
26
+ _INTERVAL_SECONDS = {
27
+ "1m": 60,
28
+ "5m": 300,
29
+ "15m": 900,
30
+ "30m": 1800,
31
+ "1h": 3600,
32
+ "4h": 14400,
33
+ "12h": 43200,
34
+ "1d": 86400,
35
+ }
36
+
37
+
38
+ class JInterval(str, Enum):
39
+ """Supported candle intervals for JASON calculations.
40
+
41
+ Uses MetaTrader-style naming: M for minutes, H for hours, D for days.
42
+ Each member's string value is the compact interval notation (e.g. "1h").
43
+ Use the ``seconds`` property to get the candle duration in seconds.
44
+ """
45
+
46
+ M1 = "1m"
47
+ M5 = "5m"
48
+ M15 = "15m"
49
+ M30 = "30m"
50
+ H1 = "1h"
51
+ H4 = "4h"
52
+ H12 = "12h"
53
+ D1 = "1d"
54
+
55
+ @property
56
+ def seconds(self) -> int:
57
+ """Return candle duration in seconds."""
58
+ return _INTERVAL_SECONDS[self.value]
59
+
60
+ def to_timedelta(self) -> pd.Timedelta:
61
+ """Convert interval to pandas Timedelta."""
62
+ return pd.Timedelta(seconds=self.seconds)