quantjourney-bidask 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ from .edge import edge
2
+ from .edge_rolling import edge_rolling
3
+ from .edge_expanding import edge_expanding
4
+ from .data_fetcher import fetch_binance_data, fetch_yfinance_data
5
+ from .websocket_fetcher import LiveSpreadMonitor
6
+ from ._version import __version__, __author__, __email__, __license__
7
+
8
+ __all__ = ['edge', 'edge_rolling', 'edge_expanding', 'fetch_binance_data', 'fetch_yfinance_data', 'LiveSpreadMonitor']
@@ -0,0 +1,7 @@
1
+ """Version information for quantjourney_bidask."""
2
+
3
+ __version__ = "0.1.0"
4
+ __author__ = "Jakub Polec"
5
+ __email__ = "jakub@quantjourney.pro"
6
+ __license__ = "Apache License 2.0"
7
+ __copyright__ = "Copyright (c) 2024 Jakub Polec, QuantJourney"
@@ -0,0 +1,160 @@
1
+ import pandas as pd
2
+ import requests
3
+ import yfinance as yf
4
+ from typing import Optional, List
5
+ from datetime import datetime
6
+
7
+ def fetch_binance_data(
8
+ symbols: List[str],
9
+ timeframe: str,
10
+ start: str,
11
+ end: str,
12
+ api_key: str,
13
+ api_url: str = "http://localhost:8000"
14
+ ) -> pd.DataFrame:
15
+ """
16
+ Fetch OHLCV data from Binance using the provided FastAPI server.
17
+
18
+ Parameters
19
+ ----------
20
+ symbols : List[str]
21
+ List of trading pairs (e.g., ["BTCUSDT", "ETHUSDT"]).
22
+ timeframe : str
23
+ Data timeframe (e.g., "1m", "1h", "1d").
24
+ start : str
25
+ Start time in ISO 8601 format (e.g., "2024-01-01T00:00:00Z").
26
+ end : str
27
+ End time in ISO 8601 format (e.g., "2024-01-02T00:00:00Z").
28
+ api_key : str
29
+ API key for authentication.
30
+ api_url : str, default "http://localhost:8000"
31
+ Base URL of the FastAPI server.
32
+
33
+ Returns
34
+ -------
35
+ pd.DataFrame
36
+ DataFrame with columns ['open', 'high', 'low', 'close', 'volume', 'timestamp', 'symbol'].
37
+
38
+ Raises
39
+ ------
40
+ ValueError
41
+ If the API request fails or returns an error.
42
+ """
43
+ payload = {
44
+ "exchange": "binance",
45
+ "symbols": symbols,
46
+ "start": start,
47
+ "end": end,
48
+ "timeframe": timeframe,
49
+ "upload_d1": False,
50
+ "force": False
51
+ }
52
+ headers = {"X-API-Key": api_key}
53
+
54
+ # Initiate fetch request
55
+ response = requests.post(f"{api_url}/fetch", json=payload, headers=headers)
56
+ if response.status_code != 200:
57
+ raise ValueError(f"Fetch request failed: {response.text}")
58
+
59
+ task_id = response.json().get("task_id")
60
+ if not task_id:
61
+ raise ValueError("No task ID returned from fetch request")
62
+
63
+ # Poll task status
64
+ while True:
65
+ status_response = requests.get(f"{api_url}/tasks/{task_id}")
66
+ if status_response.status_code != 200:
67
+ raise ValueError(f"Task status check failed: {status_response.text}")
68
+
69
+ task = status_response.json().get("task")
70
+ if task["status"] in ["completed", "failed"]:
71
+ if task["status"] == "failed":
72
+ raise ValueError(f"Task failed: {task.get('message')}")
73
+ break
74
+
75
+ # Query data
76
+ data = []
77
+ for symbol in symbols:
78
+ query_payload = {
79
+ "symbol": symbol,
80
+ "timeframe": timeframe,
81
+ "start": start,
82
+ "end": end
83
+ }
84
+ query_response = requests.post(f"{api_url}/d1/query", json=query_payload)
85
+ if query_response.status_code != 200:
86
+ raise ValueError(f"Data query failed for {symbol}: {query_response.text}")
87
+
88
+ rows = query_response.json().get("data", [])
89
+ df = pd.DataFrame(rows)
90
+ if not df.empty:
91
+ df['symbol'] = symbol
92
+ data.append(df)
93
+
94
+ if not data:
95
+ raise ValueError("No data retrieved for the specified parameters")
96
+
97
+ result = pd.concat(data, ignore_index=True)
98
+ result['timestamp'] = pd.to_datetime(result['timestamp'])
99
+ return result[['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume']]
100
+
101
+ def fetch_yfinance_data(
102
+ tickers: List[str],
103
+ period: str = "1mo",
104
+ interval: str = "1d",
105
+ start: Optional[str] = None,
106
+ end: Optional[str] = None
107
+ ) -> pd.DataFrame:
108
+ """
109
+ Fetch OHLCV data from Yahoo Finance using yfinance.
110
+
111
+ Parameters
112
+ ----------
113
+ tickers : List[str]
114
+ List of ticker symbols (e.g., ["AAPL", "MSFT"]).
115
+ period : str, default "1mo"
116
+ Data period (e.g., "1d", "1mo", "1y"). Ignored if start and end are provided.
117
+ interval : str, default "1d"
118
+ Data interval (e.g., "1m", "1h", "1d").
119
+ start : str, optional
120
+ Start date (e.g., "2024-01-01"). Overrides period if provided.
121
+ end : str, optional
122
+ End date (e.g., "2024-01-31"). Overrides period if provided.
123
+
124
+ Returns
125
+ -------
126
+ pd.DataFrame
127
+ DataFrame with columns ['open', 'high', 'low', 'close', 'volume', 'timestamp', 'symbol'].
128
+
129
+ Raises
130
+ ------
131
+ ValueError
132
+ If no data is retrieved for the specified parameters.
133
+ """
134
+ data = []
135
+ for ticker in tickers:
136
+ stock = yf.Ticker(ticker)
137
+ if start and end:
138
+ df = stock.history(start=start, end=end, interval=interval)
139
+ else:
140
+ df = stock.history(period=period, interval=interval)
141
+
142
+ if df.empty:
143
+ continue
144
+
145
+ df = df.reset_index()
146
+ df['symbol'] = ticker
147
+ df = df.rename(columns={
148
+ 'Date': 'timestamp',
149
+ 'Open': 'open',
150
+ 'High': 'high',
151
+ 'Low': 'low',
152
+ 'Close': 'close',
153
+ 'Volume': 'volume'
154
+ })
155
+ data.append(df[['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume']])
156
+
157
+ if not data:
158
+ raise ValueError("No data retrieved for the specified parameters")
159
+
160
+ return pd.concat(data, ignore_index=True)
@@ -0,0 +1,148 @@
1
+ import numpy as np
2
+ import warnings
3
+ from typing import Union, List, Tuple, Any
4
+
5
+ def edge(
6
+ open: Union[List[float], Any],
7
+ high: Union[List[float], Any],
8
+ low: Union[List[float], Any],
9
+ close: Union[List[float], Any],
10
+ sign: bool = False
11
+ ) -> float:
12
+ """
13
+ Estimate the effective bid-ask spread from open, high, low, and close (OHLC) prices.
14
+
15
+ Implements the efficient estimator described in Ardia, Guidotti, & Kroencke (2024):
16
+ https://doi.org/10.1016/j.jfineco.2024.103916. The estimator computes the root mean square
17
+ effective spread within the sample period using log-returns and indicator variables.
18
+
19
+ Parameters
20
+ ----------
21
+ open : array-like
22
+ Vector of open prices, sorted in ascending order of timestamp.
23
+ high : array-like
24
+ Vector of high prices, sorted in ascending order of timestamp.
25
+ low : array-like
26
+ Vector of low prices, sorted in ascending order of timestamp.
27
+ close : array-like
28
+ Vector of close prices, sorted in ascending order of timestamp.
29
+ sign : bool, default False
30
+ If True, returns signed estimates (negative values possible). If False, returns
31
+ absolute values to reduce small-sample bias in averaging or regression studies.
32
+
33
+ Returns
34
+ -------
35
+ float
36
+ Estimated bid-ask spread as a fraction of price (e.g., 0.01 = 1% spread).
37
+ Returns np.nan if the estimate cannot be computed (e.g., insufficient data).
38
+
39
+ Notes
40
+ -----
41
+ - Requires at least 3 observations for a valid estimate.
42
+ - Handles missing values (NaNs) automatically by excluding them from calculations.
43
+ - The estimator assumes prices are positive and non-zero to compute log-prices.
44
+ - For optimal results, use high-frequency data (e.g., minute or hourly) for frequently
45
+ traded assets, or lower frequency (e.g., daily) for less liquid assets.
46
+
47
+ Examples
48
+ --------
49
+ >>> import pandas as pd
50
+ >>> df = pd.read_csv("https://raw.githubusercontent.com/eguidotti/bidask/main/pseudocode/ohlc.csv")
51
+ >>> spread = edge(df.Open, df.High, df.Low, df.Close)
52
+ >>> print(f"Estimated spread: {spread:.6f}")
53
+ Estimated spread: 0.010185
54
+ """
55
+ # Convert inputs to numpy arrays
56
+ open = np.asarray(open, dtype=float)
57
+ high = np.asarray(high, dtype=float)
58
+ low = np.asarray(low, dtype=float)
59
+ close = np.asarray(close, dtype=float)
60
+
61
+ # Validate input lengths
62
+ nobs = len(open)
63
+ if len(high) != nobs or len(low) != nobs or len(close) != nobs:
64
+ raise ValueError("Open, high, low, and close must have the same length")
65
+
66
+ # Return NaN if insufficient observations
67
+ if nobs < 3:
68
+ return np.nan
69
+
70
+ # Compute log-prices, handling non-positive prices
71
+ with warnings.catch_warnings():
72
+ warnings.simplefilter("ignore", RuntimeWarning)
73
+ o = np.log(np.where(open > 0, open, np.nan))
74
+ h = np.log(np.where(high > 0, high, np.nan))
75
+ l = np.log(np.where(low > 0, low, np.nan))
76
+ c = np.log(np.where(close > 0, close, np.nan))
77
+ m = (h + l) / 2.0 # Mid-price log
78
+
79
+ # Shift log-prices by one period
80
+ h1, l1, c1, m1 = h[:-1], l[:-1], c[:-1], m[:-1]
81
+ o, h, l, c, m = o[1:], h[1:], l[1:], c[1:], m[1:]
82
+
83
+ # Compute log-returns
84
+ r1 = m - o # Mid - Open
85
+ r2 = o - m1 # Open - Previous Mid
86
+ r3 = m - c1 # Mid - Previous Close
87
+ r4 = c1 - m1 # Previous Close - Previous Mid
88
+ r5 = o - c1 # Open - Previous Close
89
+
90
+ # Compute indicator variables
91
+ # tau: Indicator for valid price variation (1 if high != low or low != previous close)
92
+ tau = np.where(np.isnan(h) | np.isnan(l) | np.isnan(c1), np.nan,
93
+ ((h != l) | (l != c1)).astype(float))
94
+
95
+ # po1: Indicator for open price not equal to high, scaled by tau
96
+ po1 = tau * np.where(np.isnan(o) | np.isnan(h), np.nan, (o != h).astype(float))
97
+
98
+ # po2: Indicator for open price not equal to low, scaled by tau
99
+ po2 = tau * np.where(np.isnan(o) | np.isnan(l), np.nan, (o != l).astype(float))
100
+
101
+ # pc1: Indicator for previous close not equal to previous high, scaled by tau
102
+ pc1 = tau * np.where(np.isnan(c1) | np.isnan(h1), np.nan, (c1 != h1).astype(float))
103
+
104
+ # pc2: Indicator for previous close not equal to previous low, scaled by tau
105
+ pc2 = tau * np.where(np.isnan(c1) | np.isnan(l1), np.nan, (c1 != l1).astype(float))
106
+
107
+ # Compute probabilities with NaN handling
108
+ with warnings.catch_warnings():
109
+ warnings.simplefilter("ignore", RuntimeWarning)
110
+ pt = np.nanmean(tau)
111
+ po = np.nanmean(po1) + np.nanmean(po2)
112
+ pc = np.nanmean(pc1) + np.nanmean(pc2)
113
+
114
+ # Return NaN if insufficient valid periods or probabilities are zero
115
+ if np.nansum(tau) < 2 or po == 0 or pc == 0:
116
+ return np.nan
117
+
118
+ # Compute de-meaned log-returns
119
+ d1 = r1 - np.nanmean(r1) / pt * tau
120
+ d3 = r3 - np.nanmean(r3) / pt * tau
121
+ d5 = r5 - np.nanmean(r5) / pt * tau
122
+
123
+ # Compute input vectors for GMM estimation
124
+ # x1: First moment condition combining open-high-low and close-high-low effects
125
+ x1 = -4.0 / po * d1 * r2 + -4.0 / pc * d3 * r4 # Scaled by probability of open/close extremes
126
+ # x2: Second moment condition combining open-high-low-close and close-high-low-open effects
127
+ x2 = -4.0 / po * d1 * r5 + -4.0 / pc * d5 * r4
128
+
129
+ # Compute expectations (means) of the moment conditions
130
+ e1 = np.nanmean(x1) # First moment expectation
131
+ e2 = np.nanmean(x2) # Second moment expectation
132
+
133
+ # Compute variances of the moment conditions for optimal weighting
134
+ v1 = np.nanmean(x1**2) - e1**2 # Variance of first moment
135
+ v2 = np.nanmean(x2**2) - e2**2 # Variance of second moment
136
+
137
+ # Compute squared spread estimate using optimal GMM weights
138
+ vt = v1 + v2 # Total variance for weighting
139
+ # If total variance is positive, use optimal weighted average
140
+ # Otherwise fall back to simple average of the two estimates
141
+ s2 = (v2 * e1 + v1 * e2) / vt if vt > 0 else (e1 + e2) / 2.0
142
+
143
+ # Compute signed root
144
+ s = np.sqrt(np.abs(s2))
145
+ if sign:
146
+ s *= np.sign(s2)
147
+
148
+ return float(s)
@@ -0,0 +1,59 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ from .edge import edge
4
+ from .edge_rolling import edge_rolling
5
+
6
+ def edge_expanding(
7
+ df: pd.DataFrame,
8
+ min_periods: int = 1,
9
+ sign: bool = False
10
+ ) -> pd.Series:
11
+ """
12
+ Compute expanding window estimates of the bid-ask spread from OHLC prices.
13
+
14
+ Uses the efficient estimator from Ardia, Guidotti, & Kroencke (2024):
15
+ https://doi.org/10.1016/j.jfineco.2024.103916. Calculates spreads over
16
+ expanding windows starting from the first observation.
17
+
18
+ Parameters
19
+ ----------
20
+ df : pd.DataFrame
21
+ DataFrame with columns 'open', 'high', 'low', 'close' (case-insensitive).
22
+ min_periods : int, default 1
23
+ Minimum number of observations required for an estimate. Note that
24
+ at least 3 observations are needed for a non-NaN result.
25
+ sign : bool, default False
26
+ If True, returns signed estimates. If False, returns absolute values.
27
+
28
+ Returns
29
+ -------
30
+ pd.Series
31
+ Series of expanding spread estimates, indexed by the DataFrame's index.
32
+ A value of 0.01 corresponds to a 1% spread. NaN for periods with
33
+ insufficient data.
34
+
35
+ Notes
36
+ -----
37
+ - The function leverages `edge_rolling` with a window equal to the DataFrame length.
38
+ - Missing values are handled automatically.
39
+ - The estimator is most reliable with sufficient data (e.g., 20+ observations).
40
+
41
+ Examples
42
+ --------
43
+ >>> import pandas as pd
44
+ >>> df = pd.read_csv("https://raw.githubusercontent.com/eguidotti/bidask/main/pseudocode/ohlc.csv")
45
+ >>> spreads = edge_expanding(df, min_periods=21)
46
+ >>> print(spreads.head())
47
+ """
48
+ # Standardize column names
49
+ df = df.rename(columns=str.lower).copy()
50
+ required_cols = ['open', 'high', 'low', 'close']
51
+ if not all(col in df.columns for col in required_cols):
52
+ raise ValueError("DataFrame must contain 'open', 'high', 'low', 'close' columns")
53
+
54
+ return edge_rolling(
55
+ df=df,
56
+ window=len(df),
57
+ min_periods=max(min_periods, 3),
58
+ sign=sign
59
+ )
@@ -0,0 +1,202 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from typing import Union, Dict
4
+ from .edge import edge
5
+
6
+ def edge_rolling(
7
+ df: pd.DataFrame,
8
+ window: Union[int, str, pd.offsets.BaseOffset],
9
+ sign: bool = False,
10
+ **kwargs
11
+ ) -> pd.Series:
12
+ """
13
+ Compute rolling window estimates of the bid-ask spread from OHLC prices.
14
+
15
+ Uses the efficient estimator from Ardia, Guidotti, & Kroencke (2024):
16
+ https://doi.org/10.1016/j.jfineco.2024.103916. Optimized for fast computation
17
+ over rolling windows using vectorized operations.
18
+
19
+ Parameters
20
+ ----------
21
+ df : pd.DataFrame
22
+ DataFrame with columns 'open', 'high', 'low', 'close' (case-insensitive).
23
+ window : int, str, or pd.offsets.BaseOffset
24
+ Size of the rolling window. Can be an integer (number of periods),
25
+ a string (e.g., '30D' for 30 days), or a pandas offset object.
26
+ See pandas.DataFrame.rolling for details.
27
+ sign : bool, default False
28
+ If True, returns signed estimates. If False, returns absolute values.
29
+ **kwargs
30
+ Additional arguments to pass to pandas.DataFrame.rolling, such as
31
+ min_periods, step, or center.
32
+
33
+ Returns
34
+ -------
35
+ pd.Series
36
+ Series of rolling spread estimates, indexed by the DataFrame's index.
37
+ A value of 0.01 corresponds to a 1% spread. NaN for periods with
38
+ insufficient data.
39
+
40
+ Notes
41
+ -----
42
+ - The function accounts for missing values by masking invalid periods.
43
+ - The first observation is masked due to the need for lagged prices.
44
+ - For large datasets, this implementation is significantly faster than
45
+ applying `edge` repeatedly over windows.
46
+
47
+ Examples
48
+ --------
49
+ >>> import pandas as pd
50
+ >>> df = pd.read_csv("https://raw.githubusercontent.com/eguidotti/bidask/main/pseudocode/ohlc.csv")
51
+ >>> spreads = edge_rolling(df, window=21)
52
+ >>> print(spreads.head())
53
+ """
54
+ # Standardize column names
55
+ df = df.rename(columns=str.lower).copy()
56
+ required_cols = ['open', 'high', 'low', 'close']
57
+ if not all(col in df.columns for col in required_cols):
58
+ raise ValueError("DataFrame must contain 'open', 'high', 'low', 'close' columns")
59
+
60
+ # Compute log-prices, handling non-positive prices by replacing them with NaN
61
+ # This prevents errors from taking log of zero or negative values
62
+ o = np.log(df['open'].where(df['open'] > 0)) # Log of open prices
63
+ h = np.log(df['high'].where(df['high'] > 0)) # Log of high prices
64
+ l = np.log(df['low'].where(df['low'] > 0)) # Log of low prices
65
+ c = np.log(df['close'].where(df['close'] > 0)) # Log of close prices
66
+ m = (h + l) / 2.0 # Log of geometric mid-price each period
67
+
68
+ # Get lagged (previous period) log-prices using pandas shift
69
+ # These are needed to compute overnight returns and indicators
70
+ h1 = h.shift(1) # Previous period's high
71
+ l1 = l.shift(1) # Previous period's low
72
+ c1 = c.shift(1) # Previous period's close
73
+ m1 = m.shift(1) # Previous period's mid-price
74
+
75
+ # Compute log-returns:
76
+ r1 = m - o # Mid-price minus open (intraday return from open to mid)
77
+ r2 = o - m1 # Open minus previous mid (overnight return from prev mid to open)
78
+ r3 = m - c1 # Mid-price minus previous close (return from prev close to mid)
79
+ r4 = c1 - m1 # Previous close minus previous mid (prev intraday return from mid to close)
80
+ r5 = o - c1 # Open minus previous close (overnight return from prev close to open)
81
+
82
+ # Compute indicator variables for price variation and extremes
83
+ # tau: Indicator for valid price variation (1 if high != low or low != previous close)
84
+ tau = np.where(np.isnan(h) | np.isnan(l) | np.isnan(c1), np.nan,
85
+ ((h != l) | (l != c1)).astype(float))
86
+
87
+ # po1: Indicator for open price not equal to high, scaled by tau
88
+ po1 = tau * np.where(np.isnan(o) | np.isnan(h), np.nan, (o != h).astype(float))
89
+
90
+ # po2: Indicator for open price not equal to low, scaled by tau
91
+ po2 = tau * np.where(np.isnan(o) | np.isnan(l), np.nan, (o != l).astype(float))
92
+
93
+ # pc1: Indicator for previous close not equal to previous high, scaled by tau
94
+ pc1 = tau * np.where(np.isnan(c1) | np.isnan(h1), np.nan, (c1 != h1).astype(float))
95
+
96
+ # pc2: Indicator for previous close not equal to previous low, scaled by tau
97
+ pc2 = tau * np.where(np.isnan(c1) | np.isnan(l1), np.nan, (c1 != l1).astype(float))
98
+
99
+ # Compute base products needed for rolling means
100
+ # Products of log-returns for covariance calculations
101
+ r12 = r1 * r2 # Mid-Open × Open-PrevMid
102
+ r15 = r1 * r5 # Mid-Open × Open-PrevClose
103
+ r34 = r3 * r4 # Mid-PrevClose × PrevClose-PrevMid
104
+ r45 = r4 * r5 # PrevClose-PrevMid × Open-PrevClose
105
+
106
+ # Products with tau indicator for valid periods
107
+ tr1 = tau * r1 # Scaled Mid-Open
108
+ tr2 = tau * r2 # Scaled Open-PrevMid
109
+ tr4 = tau * r4 # Scaled PrevClose-PrevMid
110
+ tr5 = tau * r5 # Scaled Open-PrevClose
111
+
112
+ # Set up DataFrame for efficient rolling mean calculations
113
+ # Includes all products needed for moment conditions and variance calculations
114
+ x = pd.DataFrame({
115
+ # Basic return products
116
+ 'r12': r12, 'r34': r34, 'r15': r15, 'r45': r45,
117
+ 'tau': tau, # Price variation indicator
118
+ # Individual returns
119
+ 'r1': r1, 'tr2': tr2, 'r3': r3, 'tr4': tr4, 'r5': r5,
120
+ # Squared terms for variance
121
+ 'r12_sq': r12**2, 'r34_sq': r34**2, 'r15_sq': r15**2, 'r45_sq': r45**2,
122
+ # Cross products for covariance
123
+ 'r12_r34': r12 * r34, 'r15_r45': r15 * r45,
124
+ # Products with tau-scaled returns
125
+ 'tr2_r2': tr2 * r2, 'tr4_r4': tr4 * r4, 'tr5_r5': tr5 * r5,
126
+ 'tr2_r12': tr2 * r12, 'tr4_r34': tr4 * r34,
127
+ 'tr5_r15': tr5 * r15, 'tr4_r45': tr4 * r45,
128
+ 'tr4_r12': tr4 * r12, 'tr2_r34': tr2 * r34,
129
+ 'tr2_r4': tr2 * r4, 'tr1_r45': tr1 * r45,
130
+ 'tr5_r45': tr5 * r45, 'tr4_r5': tr4 * r5,
131
+ 'tr5': tr5,
132
+ # Extreme price indicators
133
+ 'po1': po1, 'po2': po2, 'pc1': pc1, 'pc2': pc2
134
+ }, index=df.index)
135
+
136
+ # Handle first observation and adjust window parameters
137
+ x.iloc[0] = np.nan # Mask first row due to lagged values
138
+ if isinstance(window, (int, np.integer)):
139
+ window = max(0, window - 1) # Adjust window size for lag
140
+ if 'min_periods' in kwargs and isinstance(kwargs['min_periods'], (int, np.integer)):
141
+ kwargs['min_periods'] = max(0, kwargs['min_periods'] - 1)
142
+
143
+ # Compute rolling means for all variables
144
+ m = x.rolling(window=window, **kwargs).mean()
145
+
146
+ # Calculate probabilities of price extremes
147
+ pt = m['tau'] # Probability of valid price variation
148
+ po = m['po1'] + m['po2'] # Probability of open being extreme
149
+ pc = m['pc1'] + m['pc2'] # Probability of close being extreme
150
+
151
+ # Mask periods with insufficient data or zero probabilities
152
+ nt = x['tau'].rolling(window=window, **kwargs).sum()
153
+ m[(nt < 2) | (po == 0) | (pc == 0)] = np.nan
154
+
155
+ # Compute coefficients for moment conditions
156
+ a1 = -4.0 / po # Scaling for open price moments
157
+ a2 = -4.0 / pc # Scaling for close price moments
158
+ a3 = m['r1'] / pt # Mean-adjustment for Mid-Open
159
+ a4 = m['tr4'] / pt # Mean-adjustment for PrevClose-PrevMid
160
+ a5 = m['r3'] / pt # Mean-adjustment for Mid-PrevClose
161
+ a6 = m['r5'] / pt # Mean-adjustment for Open-PrevClose
162
+
163
+ # Pre-compute squared and product terms
164
+ a12 = 2 * a1 * a2
165
+ a11 = a1**2
166
+ a22 = a2**2
167
+ a33 = a3**2
168
+ a55 = a5**2
169
+ a66 = a6**2
170
+
171
+ # Calculate moment condition expectations
172
+ e1 = a1 * (m['r12'] - a3 * m['tr2']) + a2 * (m['r34'] - a4 * m['r3']) # First moment
173
+ e2 = a1 * (m['r15'] - a3 * m['tr5']) + a2 * (m['r45'] - a4 * m['r5']) # Second moment
174
+
175
+ # Calculate variances of moment conditions
176
+ # v1: Variance of first moment condition
177
+ v1 = -e1**2 + (
178
+ a11 * (m['r12_sq'] - 2 * a3 * m['tr2_r12'] + a33 * m['tr2_r2']) +
179
+ a22 * (m['r34_sq'] - 2 * a5 * m['tr4_r34'] + a55 * m['tr4_r4']) +
180
+ a12 * (m['r12_r34'] - a3 * m['tr2_r34'] - a5 * m['tr4_r12'] + a3 * a5 * m['tr2_r4'])
181
+ )
182
+ # v2: Variance of second moment condition
183
+ v2 = -e2**2 + (
184
+ a11 * (m['r15_sq'] - 2 * a3 * m['tr5_r15'] + a33 * m['tr5_r5']) +
185
+ a22 * (m['r45_sq'] - 2 * a6 * m['tr4_r45'] + a66 * m['tr4_r4']) +
186
+ a12 * (m['r15_r45'] - a3 * m['tr5_r45'] - a6 * m['tr1_r45'] + a3 * a6 * m['tr4_r5'])
187
+ )
188
+
189
+ # Compute squared spread using optimal GMM weights
190
+ vt = v1 + v2 # Total variance
191
+ s2 = pd.Series(np.where(
192
+ vt > 0,
193
+ (v2 * e1 + v1 * e2) / vt, # Optimal weighted average if variance is positive
194
+ (e1 + e2) / 2.0 # Simple average if variance is zero/negative
195
+ ), index=df.index)
196
+
197
+ # Compute signed root
198
+ s = np.sqrt(np.abs(s2))
199
+ if sign:
200
+ s *= np.sign(s2)
201
+
202
+ return pd.Series(s, index=df.index, name=f"EDGE_rolling_{window}")
@@ -0,0 +1,308 @@
1
+ """
2
+ WebSocket Live Data Fetcher
3
+
4
+ Real-time data fetching for cryptocurrency exchanges using WebSockets.
5
+ """
6
+
7
+ import json
8
+ import threading
9
+ import time
10
+ import pandas as pd
11
+ import numpy as np
12
+ from datetime import datetime, timezone
13
+ from typing import Dict, List, Callable, Optional
14
+ import websocket
15
+ from collections import deque
16
+ from .edge_rolling import edge_rolling
17
+
18
+ class LiveSpreadMonitor:
19
+ """
20
+ Real-time spread monitoring using WebSocket connections.
21
+
22
+ Supports Binance WebSocket streams for live OHLC data and real-time
23
+ spread calculation with configurable alerts.
24
+ """
25
+
26
+ def __init__(self, symbols: List[str], window: int = 20, buffer_size: int = 1000):
27
+ """
28
+ Initialize the live spread monitor.
29
+
30
+ Parameters
31
+ ----------
32
+ symbols : List[str]
33
+ List of trading symbols to monitor (e.g., ['BTCUSDT', 'ETHUSDT'])
34
+ window : int
35
+ Rolling window size for spread calculation
36
+ buffer_size : int
37
+ Maximum number of candles to keep in memory
38
+ """
39
+ self.symbols = [s.lower() for s in symbols]
40
+ self.window = window
41
+ self.buffer_size = buffer_size
42
+
43
+ # Data storage
44
+ self.data_buffers = {symbol: deque(maxlen=buffer_size) for symbol in self.symbols}
45
+ self.spread_buffers = {symbol: deque(maxlen=buffer_size) for symbol in self.symbols}
46
+
47
+ # WebSocket connections
48
+ self.ws_connections = {}
49
+ self.running = False
50
+
51
+ # Callbacks
52
+ self.data_callbacks = []
53
+ self.alert_callbacks = []
54
+
55
+ # Alert thresholds (in basis points)
56
+ self.alert_thresholds = {symbol: {'high': 100, 'low': 5} for symbol in self.symbols}
57
+
58
+ def add_data_callback(self, callback: Callable):
59
+ """Add callback function for new data events."""
60
+ self.data_callbacks.append(callback)
61
+
62
+ def add_alert_callback(self, callback: Callable):
63
+ """Add callback function for alert events."""
64
+ self.alert_callbacks.append(callback)
65
+
66
+ def set_alert_threshold(self, symbol: str, high_bps: float, low_bps: float):
67
+ """Set alert thresholds for a symbol (in basis points)."""
68
+ symbol = symbol.lower()
69
+ if symbol in self.alert_thresholds:
70
+ self.alert_thresholds[symbol] = {'high': high_bps, 'low': low_bps}
71
+
72
+ def _create_websocket_url(self, symbols: List[str]) -> str:
73
+ """Create Binance WebSocket URL for multiple symbols."""
74
+ streams = []
75
+ for symbol in symbols:
76
+ streams.append(f"{symbol}@kline_1m") # 1-minute klines
77
+
78
+ if len(streams) == 1:
79
+ return f"wss://stream.binance.com:9443/ws/{streams[0]}"
80
+ else:
81
+ stream_string = "/".join(streams)
82
+ return f"wss://stream.binance.com:9443/stream?streams={stream_string}"
83
+
84
+ def _on_message(self, ws, message):
85
+ """Handle incoming WebSocket messages."""
86
+ try:
87
+ data = json.loads(message)
88
+
89
+ # Handle multi-stream format
90
+ if 'stream' in data:
91
+ stream_data = data['data']
92
+ symbol = stream_data['s'].lower()
93
+ else:
94
+ stream_data = data
95
+ symbol = stream_data['s'].lower()
96
+
97
+ # Extract kline data
98
+ kline = stream_data['k']
99
+ is_closed = kline['x'] # Whether kline is closed
100
+
101
+ if is_closed: # Only process closed candles
102
+ candle_data = {
103
+ 'timestamp': pd.to_datetime(kline['t'], unit='ms'),
104
+ 'symbol': symbol,
105
+ 'open': float(kline['o']),
106
+ 'high': float(kline['h']),
107
+ 'low': float(kline['l']),
108
+ 'close': float(kline['c']),
109
+ 'volume': float(kline['v'])
110
+ }
111
+
112
+ self._process_candle(candle_data)
113
+
114
+ except Exception as e:
115
+ print(f"Error processing message: {e}")
116
+
117
+ def _process_candle(self, candle_data: Dict):
118
+ """Process new candle data and update spreads."""
119
+ symbol = candle_data['symbol']
120
+
121
+ # Add to buffer
122
+ self.data_buffers[symbol].append(candle_data)
123
+
124
+ # Calculate spread if we have enough data
125
+ if len(self.data_buffers[symbol]) >= self.window:
126
+ # Convert buffer to DataFrame for spread calculation
127
+ df = pd.DataFrame(list(self.data_buffers[symbol])[-self.window:])
128
+
129
+ # Calculate current spread
130
+ try:
131
+ current_spread = edge_rolling(df.tail(1), window=min(len(df), self.window)).iloc[-1]
132
+
133
+ if not pd.isna(current_spread):
134
+ spread_bps = current_spread * 10000 # Convert to basis points
135
+
136
+ spread_data = {
137
+ 'timestamp': candle_data['timestamp'],
138
+ 'symbol': symbol,
139
+ 'spread_bps': spread_bps,
140
+ 'price': candle_data['close']
141
+ }
142
+
143
+ self.spread_buffers[symbol].append(spread_data)
144
+
145
+ # Check for alerts
146
+ self._check_alerts(spread_data)
147
+
148
+ # Notify callbacks
149
+ for callback in self.data_callbacks:
150
+ callback(candle_data, spread_data)
151
+
152
+ except Exception as e:
153
+ print(f"Error calculating spread for {symbol}: {e}")
154
+
155
+ def _check_alerts(self, spread_data: Dict):
156
+ """Check if spread triggers any alerts."""
157
+ symbol = spread_data['symbol']
158
+ spread_bps = spread_data['spread_bps']
159
+ thresholds = self.alert_thresholds[symbol]
160
+
161
+ alert_type = None
162
+ if spread_bps > thresholds['high']:
163
+ alert_type = 'HIGH'
164
+ elif spread_bps < thresholds['low']:
165
+ alert_type = 'LOW'
166
+
167
+ if alert_type:
168
+ alert_data = {
169
+ 'type': alert_type,
170
+ 'symbol': symbol,
171
+ 'spread_bps': spread_bps,
172
+ 'threshold': thresholds[alert_type.lower()],
173
+ 'timestamp': spread_data['timestamp'],
174
+ 'price': spread_data['price']
175
+ }
176
+
177
+ for callback in self.alert_callbacks:
178
+ callback(alert_data)
179
+
180
+ def _on_error(self, ws, error):
181
+ """Handle WebSocket errors."""
182
+ print(f"WebSocket error: {error}")
183
+
184
+ def _on_close(self, ws, close_status_code, close_msg):
185
+ """Handle WebSocket connection close."""
186
+ print("WebSocket connection closed")
187
+
188
+ def _on_open(self, ws):
189
+ """Handle WebSocket connection open."""
190
+ print(f"WebSocket connected for symbols: {', '.join(self.symbols)}")
191
+
192
+ def start(self):
193
+ """Start the live monitoring."""
194
+ if self.running:
195
+ print("Monitor is already running")
196
+ return
197
+
198
+ self.running = True
199
+
200
+ # Create WebSocket URL
201
+ ws_url = self._create_websocket_url(self.symbols)
202
+
203
+ # Create WebSocket connection
204
+ self.ws = websocket.WebSocketApp(
205
+ ws_url,
206
+ on_message=self._on_message,
207
+ on_error=self._on_error,
208
+ on_close=self._on_close,
209
+ on_open=self._on_open
210
+ )
211
+
212
+ # Start WebSocket in a separate thread
213
+ self.ws_thread = threading.Thread(target=self.ws.run_forever)
214
+ self.ws_thread.daemon = True
215
+ self.ws_thread.start()
216
+
217
+ print("Live spread monitoring started...")
218
+
219
+ def stop(self):
220
+ """Stop the live monitoring."""
221
+ if not self.running:
222
+ return
223
+
224
+ self.running = False
225
+ if hasattr(self, 'ws'):
226
+ self.ws.close()
227
+
228
+ print("Live spread monitoring stopped.")
229
+
230
+ def get_current_data(self) -> Dict[str, pd.DataFrame]:
231
+ """Get current data for all symbols."""
232
+ result = {}
233
+ for symbol in self.symbols:
234
+ if len(self.data_buffers[symbol]) > 0:
235
+ result[symbol] = pd.DataFrame(list(self.data_buffers[symbol]))
236
+ return result
237
+
238
+ def get_current_spreads(self) -> Dict[str, pd.DataFrame]:
239
+ """Get current spread data for all symbols."""
240
+ result = {}
241
+ for symbol in self.symbols:
242
+ if len(self.spread_buffers[symbol]) > 0:
243
+ result[symbol] = pd.DataFrame(list(self.spread_buffers[symbol]))
244
+ return result
245
+
246
+ def create_live_dashboard_example():
247
+ """
248
+ Example of creating a live dashboard (console-based).
249
+ """
250
+ import time
251
+
252
+ def data_callback(candle_data, spread_data):
253
+ """Print new data to console."""
254
+ symbol = spread_data['symbol'].upper()
255
+ timestamp = spread_data['timestamp'].strftime('%H:%M:%S')
256
+ price = spread_data['price']
257
+ spread_bps = spread_data['spread_bps']
258
+
259
+ print(f"[{timestamp}] {symbol}: ${price:.2f} | Spread: {spread_bps:.2f}bps")
260
+
261
+ def alert_callback(alert_data):
262
+ """Print alerts to console."""
263
+ symbol = alert_data['symbol'].upper()
264
+ alert_type = alert_data['type']
265
+ spread_bps = alert_data['spread_bps']
266
+ threshold = alert_data['threshold']
267
+ timestamp = alert_data['timestamp'].strftime('%H:%M:%S')
268
+
269
+ print(f"🚨 [{timestamp}] {alert_type} SPREAD ALERT for {symbol}: "
270
+ f"{spread_bps:.2f}bps (threshold: {threshold}bps)")
271
+
272
+ # Create monitor
273
+ monitor = LiveSpreadMonitor(['BTCUSDT', 'ETHUSDT'], window=10)
274
+
275
+ # Set custom thresholds
276
+ monitor.set_alert_threshold('BTCUSDT', high_bps=50, low_bps=2)
277
+ monitor.set_alert_threshold('ETHUSDT', high_bps=60, low_bps=3)
278
+
279
+ # Add callbacks
280
+ monitor.add_data_callback(data_callback)
281
+ monitor.add_alert_callback(alert_callback)
282
+
283
+ return monitor
284
+
285
+ if __name__ == "__main__":
286
+ print("Live Spread Monitor Example")
287
+ print("==========================")
288
+ print("This example demonstrates real-time spread monitoring using WebSockets.")
289
+ print("Note: This requires an active internet connection and will connect to Binance WebSocket.")
290
+ print()
291
+
292
+ try:
293
+ # Create and start monitor
294
+ monitor = create_live_dashboard_example()
295
+
296
+ print("Starting live monitor... (Press Ctrl+C to stop)")
297
+ monitor.start()
298
+
299
+ # Run for a demo period
300
+ time.sleep(60) # Run for 1 minute
301
+
302
+ except KeyboardInterrupt:
303
+ print("\nStopping monitor...")
304
+ finally:
305
+ if 'monitor' in locals():
306
+ monitor.stop()
307
+
308
+ print("Example completed.")
@@ -0,0 +1,183 @@
1
+ Metadata-Version: 2.4
2
+ Name: quantjourney-bidask
3
+ Version: 0.5.0
4
+ Summary: Efficient bid-ask spread estimator from OHLC prices
5
+ Author-email: Jakub Polec <jakub@quantjourney.pro>
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/QuantJourneyOrg/qj_bidask
8
+ Project-URL: Repository, https://github.com/QuantJourneyOrg/qj_bidask
9
+ Project-URL: Bug Tracker, https://github.com/QuantJourneyOrg/qj_bidask/issues
10
+ Keywords: finance,bid-ask,spread,trading,quantitative,OHLC
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Financial and Insurance Industry
13
+ Classifier: Operating System :: OS Independent
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Topic :: Office/Business :: Financial :: Investment
18
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
+ Requires-Python: <3.15,>=3.11
20
+ Description-Content-Type: text/markdown
21
+ License-File: LICENSE
22
+ Requires-Dist: numpy>=1.20
23
+ Requires-Dist: pandas>=1.5
24
+ Requires-Dist: requests>=2.28
25
+ Requires-Dist: yfinance>=0.2
26
+ Requires-Dist: matplotlib>=3.5
27
+ Requires-Dist: plotly>=5.0
28
+ Requires-Dist: websocket-client>=1.0
29
+ Provides-Extra: dev
30
+ Requires-Dist: pytest>=7.0; extra == "dev"
31
+ Requires-Dist: pytest-mock>=3.10; extra == "dev"
32
+ Requires-Dist: pytest-cov>=4.0; extra == "dev"
33
+ Requires-Dist: ruff>=0.1; extra == "dev"
34
+ Requires-Dist: mypy>=1.0; extra == "dev"
35
+ Requires-Dist: black>=22.0; extra == "dev"
36
+ Requires-Dist: isort>=5.0; extra == "dev"
37
+ Provides-Extra: examples
38
+ Requires-Dist: jupyter>=1.0; extra == "examples"
39
+ Requires-Dist: ipywidgets>=7.0; extra == "examples"
40
+ Dynamic: license-file
41
+
42
+ # QuantJourney Bid-Ask Spread Estimator
43
+
44
+ ![PyPI](https://img.shields.io/pypi/v/quantjourney-bidask)
45
+ ![License](https://img.shields.io/github/license/quantjourney/bidask)
46
+ ![Tests](https://img.shields.io/github/workflow/status/quantjourney/bidask/Test)
47
+
48
+ The `quantjourney-bidask` library provides an efficient estimator for calculating bid-ask spreads from open, high, low, and close (OHLC) prices, based on the methodology described in:
49
+
50
+ > Ardia, D., Guidotti, E., Kroencke, T.A. (2024). Efficient Estimation of Bid-Ask Spreads from Open, High, Low, and Close Prices. *Journal of Financial Economics*, 161, 103916. [doi:10.1016/j.jfineco.2024.103916](https://doi.org/10.1016/j.jfineco.2024.103916)
51
+
52
+ This library is designed for quantitative finance professionals, researchers, and traders who need accurate and computationally efficient spread estimates for equities, cryptocurrencies, and other assets.
53
+
54
+ ## Features
55
+
56
+ - **Efficient Spread Estimation**: Implements the EDGE estimator for single, rolling, and expanding windows.
57
+ - **Data Integration**: Fetch OHLC data from Binance (via custom FastAPI server) and Yahoo Finance (via yfinance).
58
+ - **Robust Handling**: Supports missing values, non-positive prices, and various data frequencies.
59
+ - **Comprehensive Tests**: Extensive unit tests with known test cases from the original paper.
60
+ - **Clear Documentation**: Detailed docstrings and usage examples.
61
+
62
+ ## Installation
63
+
64
+ Install the library via pip:
65
+
66
+ ```bash
67
+ pip install quantjourney-bidask
68
+ ```
69
+
70
+ ## Quick Start
71
+
72
+ ### Basic Usage
73
+
74
+ ```python
75
+ from quantjourney_bidask import edge
76
+
77
+ # Example OHLC data (as lists or numpy arrays)
78
+ open_prices = [100.0, 101.5, 99.8, 102.1, 100.9]
79
+ high_prices = [102.3, 103.0, 101.2, 103.5, 102.0]
80
+ low_prices = [99.5, 100.8, 98.9, 101.0, 100.1]
81
+ close_prices = [101.2, 100.2, 101.8, 100.5, 101.5]
82
+
83
+ # Calculate bid-ask spread
84
+ spread = edge(open_prices, high_prices, low_prices, close_prices)
85
+ print(f"Estimated bid-ask spread: {spread:.6f}")
86
+ ```
87
+
88
+ ### Rolling Window Analysis
89
+
90
+ ```python
91
+ from quantjourney_bidask import edge_rolling
92
+
93
+ # Calculate rolling spreads with a 20-period window
94
+ rolling_spreads = edge_rolling(
95
+ open_prices, high_prices, low_prices, close_prices,
96
+ window=20
97
+ )
98
+ print(f"Rolling spreads: {rolling_spreads}")
99
+ ```
100
+
101
+ ### Data Fetching Integration
102
+
103
+ ```python
104
+ from quantjourney_bidask import fetch_yfinance_data, edge
105
+
106
+ # Fetch OHLC data for a stock
107
+ data = fetch_yfinance_data("AAPL", period="1mo", interval="1h")
108
+
109
+ # Calculate spread from fetched data
110
+ spread = edge(data['Open'], data['High'], data['Low'], data['Close'])
111
+ print(f"AAPL spread estimate: {spread:.6f}")
112
+ ```
113
+
114
+ ### Live Monitoring
115
+
116
+ ```python
117
+ from quantjourney_bidask import LiveSpreadMonitor
118
+
119
+ # Monitor live spreads for cryptocurrency
120
+ monitor = LiveSpreadMonitor("BTCUSDT", window=100)
121
+ monitor.start()
122
+
123
+ # Get current spread estimate
124
+ current_spread = monitor.get_current_spread()
125
+ print(f"Current BTC/USDT spread: {current_spread:.6f}")
126
+
127
+ monitor.stop()
128
+ ```
129
+
130
+ ## API Reference
131
+
132
+ ### Core Functions
133
+
134
+ - `edge(open, high, low, close, sign=False)`: Single-period spread estimation
135
+ - `edge_rolling(open, high, low, close, window, min_periods=None)`: Rolling window estimation
136
+ - `edge_expanding(open, high, low, close, min_periods=3)`: Expanding window estimation
137
+
138
+ ### Data Fetching
139
+
140
+ - `fetch_yfinance_data(symbol, period, interval)`: Fetch data from Yahoo Finance
141
+ - `fetch_binance_data(symbol, interval, limit)`: Fetch data from Binance API
142
+
143
+ ### Live Monitoring
144
+
145
+ - `LiveSpreadMonitor(symbol, window)`: Real-time spread monitoring via WebSocket
146
+
147
+ ## Requirements
148
+
149
+ - Python >= 3.8
150
+ - numpy >= 1.20
151
+ - pandas >= 1.5
152
+ - requests >= 2.28
153
+ - yfinance >= 0.2
154
+
155
+ ## Academic Citation
156
+
157
+ If you use this library in academic research, please cite:
158
+
159
+ ```bibtex
160
+ @article{ardia2024efficient,
161
+ title={Efficient Estimation of Bid-Ask Spreads from Open, High, Low, and Close Prices},
162
+ author={Ardia, David and Guidotti, Emanuele and Kroencke, Tim A},
163
+ journal={Journal of Financial Economics},
164
+ volume={161},
165
+ pages={103916},
166
+ year={2024},
167
+ publisher={Elsevier}
168
+ }
169
+ ```
170
+
171
+ ## License
172
+
173
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
174
+
175
+ ## Contributing
176
+
177
+ Contributions are welcome! Please feel free to submit a Pull Request. For major changes, please open an issue first to discuss what you would like to change.
178
+
179
+ ## Support
180
+
181
+ - **Documentation**: [GitHub Repository](https://github.com/QuantJourneyOrg/qj_bidask)
182
+ - **Issues**: [Bug Tracker](https://github.com/QuantJourneyOrg/qj_bidask/issues)
183
+ - **Contact**: jakub@quantjourney.pro
@@ -0,0 +1,12 @@
1
+ quantjourney_bidask/__init__.py,sha256=vumoRDEDOTclYapknfSwKpCZi9IdfJbukdp7S1-kphA,409
2
+ quantjourney_bidask/_version.py,sha256=FG3XKw_Vb0JfvroFMn303BEuhI10eKAvkGzI0gQT-LY,235
3
+ quantjourney_bidask/data_fetcher.py,sha256=GMVf4wRVwIE2JJ2sYAR_CCo56JQnReNhTWTSrZc0-L0,4931
4
+ quantjourney_bidask/edge.py,sha256=z-uRUH3Rot6Zw-dPa2pNlQu0hY1YJu6d0c18IyqbiNs,6105
5
+ quantjourney_bidask/edge_expanding.py,sha256=bN6lBetJdqC2xSdRc1RTjHfSI1XXVKegl0GQaD8eanY,2047
6
+ quantjourney_bidask/edge_rolling.py,sha256=CAZW_wBF7G6mGLenoEwlq4yB_1x1-PsQ4TgwL-zdM7w,8910
7
+ quantjourney_bidask/websocket_fetcher.py,sha256=xMS_qLbSW9hCS3RbNKvkn5HTK0XGmAO4wpaAl4_Mxb4,10895
8
+ quantjourney_bidask-0.5.0.dist-info/licenses/LICENSE,sha256=vny3AM3KIslUu5fdooMsdxVKghoZhDKnBCsLvMDHqLg,1081
9
+ quantjourney_bidask-0.5.0.dist-info/METADATA,sha256=17qESjK2WZZl89pukiZF21IzX4xlbP_n14lZWNsfDbg,6234
10
+ quantjourney_bidask-0.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
+ quantjourney_bidask-0.5.0.dist-info/top_level.txt,sha256=rOBM4GxA87iQv-mR8-WZdu3-Yj5ESyggRICpUhJ-4Dg,20
12
+ quantjourney_bidask-0.5.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Jakub Polec, QuantJourney
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ quantjourney_bidask