pybinbot 0.1.6__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,198 @@
1
+ from typing import cast
2
+
3
+ from pandas import DataFrame, to_numeric, concat
4
+ from pandas.api.types import is_numeric_dtype
5
+ from pandas import to_datetime
6
+ from pybinbot.shared.enums import ExchangeId
7
+
8
+
9
+ class HeikinAshi:
10
+ """
11
+ Dataframe operations shared across projects and Heikin Ashi candle transformation.
12
+ This avoids circular imports and groups related functionality.
13
+
14
+ Canonical formulas applied to OHLC data:
15
+ HA_Close = (O + H + L + C) / 4
16
+ HA_Open = (prev_HA_Open + prev_HA_Close) / 2, seed = (O0 + C0) / 2
17
+ HA_High = max(H, HA_Open, HA_Close)
18
+ HA_Low = min(L, HA_Open, HA_Close)
19
+
20
+ This version:
21
+ * Works if a 'timestamp' column exists (sorted chronologically first).
22
+ * Does NOT mutate the original dataframe in-place; returns a copy.
23
+ * Validates required columns.
24
+ """
25
+
26
+ binance_cols = [
27
+ "open_time",
28
+ "open",
29
+ "high",
30
+ "low",
31
+ "close",
32
+ "volume",
33
+ "close_time",
34
+ "quote_asset_volume",
35
+ "number_of_trades",
36
+ "taker_buy_base_asset_volume",
37
+ "taker_buy_quote_asset_volume",
38
+ ]
39
+ kucoin_cols = [
40
+ "open_time",
41
+ "open",
42
+ "high",
43
+ "low",
44
+ "close",
45
+ "volume",
46
+ "close_time",
47
+ "quote_asset_volume",
48
+ ]
49
+
50
+ numeric_cols = [
51
+ "open",
52
+ "high",
53
+ "low",
54
+ "close",
55
+ "open_time",
56
+ "close_time",
57
+ "volume",
58
+ "quote_asset_volume",
59
+ ]
60
+
61
+ ohlc_cols = ["open", "high", "low", "close"]
62
+
63
+ REQUIRED_COLUMNS = kucoin_cols
64
+
65
+ def pre_process(self, exchange: ExchangeId, candles: list):
66
+ df_1h = DataFrame()
67
+ df_4h = DataFrame()
68
+ if exchange == ExchangeId.BINANCE:
69
+ # Binance API may return extra columns; only take the expected ones
70
+ df_raw = DataFrame(candles)
71
+ df = df_raw.iloc[:, : len(self.binance_cols)]
72
+ df.columns = self.binance_cols
73
+ columns = self.binance_cols
74
+ else:
75
+ df = DataFrame(candles, columns=self.kucoin_cols)
76
+ columns = self.kucoin_cols
77
+
78
+ # Ensure the dataframe has exactly the expected columns
79
+ if len(df.columns) != len(columns):
80
+ raise ValueError(
81
+ f"Column mismatch: {len(df.columns)} vs expected {len(columns)}"
82
+ )
83
+
84
+ # Convert only numeric columns safely
85
+ numeric_cols = ["open", "high", "low", "close", "volume"]
86
+ for col in numeric_cols:
87
+ df[col] = to_numeric(df[col], errors="coerce")
88
+
89
+ df = self.get_heikin_ashi(df)
90
+
91
+ # Ensure close_time is datetime and set as index for proper resampling
92
+ df["timestamp"] = to_datetime(df["close_time"], unit="ms")
93
+ df.set_index("timestamp", inplace=True)
94
+ df = df.sort_index()
95
+ df = df[~df.index.duplicated(keep="last")]
96
+
97
+ # Create aggregation dictionary without close_time and open_time since they're now index-based
98
+ resample_aggregation = {
99
+ "open": "first",
100
+ "close": "last",
101
+ "high": "max",
102
+ "low": "min",
103
+ "volume": "sum", # Add volume if it exists in your data
104
+ "close_time": "first",
105
+ "open_time": "first",
106
+ }
107
+
108
+ # Resample to 4 hour candles for TWAP (align to calendar hours like MongoDB)
109
+ df_4h = df.resample("4h").agg(cast(dict, resample_aggregation))
110
+ # Add open_time and close_time back as columns for 4h data
111
+ df_4h["open_time"] = df_4h.index
112
+ df_4h["close_time"] = df_4h.index
113
+
114
+ # Resample to 1 hour candles for Supertrend (align to calendar hours like MongoDB)
115
+ df_1h = df.resample("1h").agg(cast(dict, resample_aggregation))
116
+ # Add open_time and close_time back as columns for 1h data
117
+ df_1h["open_time"] = df_1h.index
118
+ df_1h["close_time"] = df_1h.index
119
+
120
+ return df, df_1h, df_4h
121
+
122
+ @staticmethod
123
+ def post_process(df: DataFrame) -> DataFrame:
124
+ """
125
+ Post-process the DataFrame by filling missing values and
126
+ converting data types as needed.
127
+ """
128
+ df.dropna(inplace=True)
129
+ df.reset_index(drop=True, inplace=True)
130
+ return df
131
+
132
+ def ensure_ohlc(self, df: DataFrame) -> DataFrame:
133
+ """Validate & coerce a DataFrame into an DataFrame.
134
+
135
+ Steps:
136
+ - Verify all REQUIRED_COLUMNS are present (raises ValueError if missing).
137
+ - Coerce numeric columns (including *_time which are expected as ms epoch).
138
+ - Perform early failure if quote_asset_volume becomes entirely NaN.
139
+ - Return the same underlying object cast to DataFrame (no deep copy).
140
+ """
141
+ missing = set(self.REQUIRED_COLUMNS) - set(df.columns)
142
+ if missing:
143
+ raise ValueError(f"Missing required OHLC columns: {missing}")
144
+
145
+ for col in self.numeric_cols:
146
+ if col in df.columns and not is_numeric_dtype(df[col]):
147
+ df[col] = to_numeric(df[col], errors="coerce")
148
+
149
+ if (
150
+ "quote_asset_volume" in df.columns
151
+ and df["quote_asset_volume"].notna().sum() == 0
152
+ ):
153
+ raise ValueError(
154
+ "quote_asset_volume column is entirely non-numeric after coercion; cannot compute quote_volume_ratio"
155
+ )
156
+
157
+ return df
158
+
159
+ def get_heikin_ashi(self, df: DataFrame) -> DataFrame:
160
+ if df.empty:
161
+ return df
162
+
163
+ # Validate & coerce using the new type guard helper.
164
+ df = self.ensure_ohlc(df)
165
+ work = df.reset_index(drop=True).copy()
166
+
167
+ # Compute HA_Close from ORIGINAL OHLC (still intact in 'work').
168
+ # Ensure numeric dtypes (API feeds sometimes deliver strings)
169
+ for c in self.ohlc_cols:
170
+ # Only attempt conversion if dtype is not already numeric
171
+ if not is_numeric_dtype(work[c]):
172
+ work.loc[:, c] = to_numeric(work[c], errors="coerce")
173
+
174
+ if work[self.ohlc_cols].isna().any().any():
175
+ # Drop rows that became NaN after coercion (invalid numeric data)
176
+ work = work.dropna(subset=self.ohlc_cols).reset_index(drop=True)
177
+ if work.empty:
178
+ raise ValueError("All OHLC rows became NaN after numeric coercion.")
179
+
180
+ ha_close = (work["open"] + work["high"] + work["low"] + work["close"]) / 4.0
181
+
182
+ # Seed HA_Open with original O & C (not HA close).
183
+ ha_open = ha_close.copy()
184
+ ha_open.iloc[0] = (work["open"].iloc[0] + work["close"].iloc[0]) / 2.0
185
+ for i in range(1, len(work)):
186
+ ha_open.iloc[i] = (ha_open.iloc[i - 1] + ha_close.iloc[i - 1]) / 2.0
187
+
188
+ # High / Low derived from max/min of (raw high/low, ha_open, ha_close)
189
+ ha_high = concat([work["high"], ha_open, ha_close], axis=1).max(axis=1)
190
+ ha_low = concat([work["low"], ha_open, ha_close], axis=1).min(axis=1)
191
+
192
+ # Assign transformed values.
193
+ work.loc[:, "open"] = ha_open
194
+ work.loc[:, "high"] = ha_high
195
+ work.loc[:, "low"] = ha_low
196
+ work.loc[:, "close"] = ha_close
197
+
198
+ return work
@@ -0,0 +1,271 @@
1
+ from typing import cast
2
+ from pandas import DataFrame, Series, Timedelta, concat, to_datetime
3
+
4
+
5
+ class Indicators:
6
+ """
7
+ Technical indicators for financial data analysis
8
+ this avoids using ta-lib because that requires
9
+ dependencies that causes issues in the infrastructure
10
+ """
11
+
12
+ @staticmethod
13
+ def moving_averages(df: DataFrame, period=7) -> DataFrame:
14
+ """
15
+ Calculate moving averages for 7, 25, 100 days
16
+ this also takes care of Bollinguer bands
17
+ """
18
+ df[f"ma_{period}"] = df["close"].rolling(window=period).mean()
19
+ return df
20
+
21
+ @staticmethod
22
+ def macd(df: DataFrame) -> DataFrame:
23
+ """
24
+ Moving Average Convergence Divergence (MACD) indicator
25
+ https://www.alpharithms.com/calculate-macd-python-272222/
26
+ """
27
+
28
+ k = df["close"].ewm(span=12, min_periods=12).mean()
29
+ # Get the 12-day EMA of the closing price
30
+ d = df["close"].ewm(span=26, min_periods=26).mean()
31
+ # Subtract the 26-day EMA from the 12-Day EMA to get the MACD
32
+ macd = k - d
33
+ # Get the 9-Day EMA of the MACD for the Trigger line
34
+ # Get the 9-Day EMA of the MACD for the Trigger line
35
+ macd_s = macd.ewm(span=9, min_periods=9).mean()
36
+
37
+ df["macd"] = macd
38
+ df["macd_signal"] = macd_s
39
+
40
+ return df
41
+
42
+ @staticmethod
43
+ def ema(
44
+ df: DataFrame, column: str = "close", span: int = 9, out_col: str | None = None
45
+ ) -> DataFrame:
46
+ """Exponential moving average for a given column.
47
+
48
+ Adds a new column with the EMA values and returns the DataFrame.
49
+ """
50
+ target_col = out_col or f"ema_{span}"
51
+ df[target_col] = df[column].ewm(span=span, adjust=False).mean()
52
+ return df
53
+
54
+ @staticmethod
55
+ def trend_ema(
56
+ df: DataFrame, column: str = "close", fast_span: int = 9, slow_span: int = 21
57
+ ) -> DataFrame:
58
+ """Compute fast and slow EMAs for trend analysis.
59
+
60
+ Adds 'ema_fast' and 'ema_slow' columns and returns the DataFrame.
61
+ """
62
+ df = Indicators.ema(df, column=column, span=fast_span, out_col="ema_fast")
63
+ df = Indicators.ema(df, column=column, span=slow_span, out_col="ema_slow")
64
+ return df
65
+
66
+ @staticmethod
67
+ def rsi(df: DataFrame, window: int = 14) -> DataFrame:
68
+ """
69
+ Relative Strength Index (RSI) indicator
70
+ https://www.qmr.ai/relative-strength-index-rsi-in-python/
71
+ """
72
+
73
+ change = df["close"].astype(float).diff()
74
+
75
+ gain = change.mask(change < 0, 0.0)
76
+ loss = -change.mask(change > 0, -0.0)
77
+
78
+ # Verify that we did not make any mistakes
79
+ change.equals(gain + loss)
80
+
81
+ # Calculate the rolling average of average up and average down
82
+ avg_up = gain.rolling(window).mean()
83
+ avg_down = loss.rolling(window).mean().abs()
84
+
85
+ rsi = 100 * avg_up / (avg_up + avg_down)
86
+ df["rsi"] = rsi
87
+
88
+ return df
89
+
90
+ @staticmethod
91
+ def standard_rsi(df: DataFrame, window: int = 14) -> DataFrame:
92
+ delta = df["close"].diff()
93
+ gain = delta.where(delta > 0, 0).rolling(window=window, min_periods=1).mean()
94
+ loss = (-delta.where(delta < 0, 0)).rolling(window=window, min_periods=1).mean()
95
+ rs = gain / (loss + 1e-10)
96
+ df["rsi"] = 100 - (100 / (1 + rs))
97
+ return df
98
+
99
+ @staticmethod
100
+ def ma_spreads(df: DataFrame) -> DataFrame:
101
+ """
102
+ Calculates spread based on bollinger bands,
103
+ for later use in take profit and stop loss
104
+
105
+ Returns:
106
+ - top_band: diff between ma_25 and ma_100
107
+ - bottom_band: diff between ma_7 and ma_25
108
+ """
109
+
110
+ band_1 = (abs(df["ma_100"] - df["ma_25"]) / df["ma_100"]) * 100
111
+ band_2 = (abs(df["ma_25"] - df["ma_7"]) / df["ma_25"]) * 100
112
+
113
+ df["big_ma_spread"] = band_1
114
+ df["small_ma_spread"] = band_2
115
+
116
+ return df
117
+
118
+ @staticmethod
119
+ def bollinguer_spreads(df: DataFrame, window=20, num_std=2) -> DataFrame:
120
+ """
121
+ Calculates Bollinguer bands
122
+
123
+ https://www.kaggle.com/code/blakemarterella/pandas-bollinger-bands
124
+
125
+ """
126
+ bb_df = df.copy()
127
+ bb_df["rolling_mean"] = bb_df["close"].rolling(window).mean()
128
+ bb_df["rolling_std"] = bb_df["close"].rolling(window).std()
129
+ bb_df["upper_band"] = bb_df["rolling_mean"] + (num_std * bb_df["rolling_std"])
130
+ bb_df["lower_band"] = bb_df["rolling_mean"] - (num_std * bb_df["rolling_std"])
131
+
132
+ df["bb_upper"] = bb_df["upper_band"]
133
+ df["bb_lower"] = bb_df["lower_band"]
134
+ df["bb_mid"] = bb_df["rolling_mean"]
135
+
136
+ return df
137
+
138
+ @staticmethod
139
+ def log_volatility(df: DataFrame, window_size=7) -> DataFrame:
140
+ """
141
+ Volatility (standard deviation of returns) using logarithm, this normalizes data
142
+ so it's easily comparable with other assets
143
+
144
+ Returns:
145
+ - Volatility in percentage
146
+ """
147
+ log_volatility = (
148
+ Series(df["close"]).astype(float).pct_change().rolling(window_size).std()
149
+ )
150
+ df["perc_volatility"] = log_volatility
151
+
152
+ return df
153
+
154
+ @staticmethod
155
+ def set_twap(df: DataFrame, periods: int = 30) -> DataFrame:
156
+ """
157
+ Time-weighted average price
158
+ https://stackoverflow.com/a/69517577/2454059
159
+
160
+ Periods kept at 4 by default,
161
+ otherwise there's not enough data
162
+ """
163
+ pre_df = df.copy()
164
+ pre_df["Event Time"] = to_datetime(pre_df["close_time"])
165
+ time_diff_td = cast(
166
+ "Series[Timedelta]", pre_df["Event Time"].diff(periods=periods)
167
+ )
168
+ pre_df["Time Diff"] = time_diff_td.dt.total_seconds() / 3600
169
+ pre_df["Weighted Value"] = pre_df["close"] * pre_df["Time Diff"]
170
+ pre_df["Weighted Average"] = (
171
+ pre_df["Weighted Value"].rolling(periods).sum() / pre_df["Time Diff"].sum()
172
+ )
173
+ # Fixed window of given interval
174
+ df["twap"] = pre_df["Weighted Average"]
175
+
176
+ return df
177
+
178
+ @staticmethod
179
+ def atr(
180
+ df: DataFrame,
181
+ window: int = 14,
182
+ min_periods: int | None = None,
183
+ col_prefix: str = "",
184
+ ) -> DataFrame:
185
+ """
186
+ Generic ATR indicator.
187
+
188
+ Adds column: '{prefix}ATR'
189
+ """
190
+ if df.empty:
191
+ return df
192
+
193
+ if min_periods is None:
194
+ min_periods = window
195
+
196
+ prev_close = df["close"].shift(1)
197
+
198
+ tr = concat(
199
+ [
200
+ df["high"] - df["low"],
201
+ (df["high"] - prev_close).abs(),
202
+ (df["low"] - prev_close).abs(),
203
+ ],
204
+ axis=1,
205
+ ).max(axis=1)
206
+
207
+ df[f"{col_prefix}ATR"] = tr.rolling(
208
+ window=window, min_periods=min_periods
209
+ ).mean()
210
+
211
+ return df
212
+
213
+ @staticmethod
214
+ def set_supertrend(
215
+ df: DataFrame,
216
+ atr_col: str = "ATR",
217
+ multiplier: float = 3.0,
218
+ prefix: str = "",
219
+ ) -> DataFrame:
220
+ """
221
+ Supertrend indicator.
222
+
223
+ Requires ATR to already exist.
224
+ Adds:
225
+ - '{prefix}supertrend'
226
+ - '{prefix}supertrend_dir' (1 bullish, -1 bearish)
227
+ """
228
+ if df.empty or atr_col not in df:
229
+ return df
230
+
231
+ hl2 = (df["high"] + df["low"]) / 2
232
+ atr = df[atr_col]
233
+
234
+ upperband = hl2 + multiplier * atr
235
+ lowerband = hl2 - multiplier * atr
236
+
237
+ final_upper = upperband.copy()
238
+ final_lower = lowerband.copy()
239
+
240
+ for i in range(1, len(df)):
241
+ final_upper.iloc[i] = (
242
+ min(upperband.iloc[i], final_upper.iloc[i - 1])
243
+ if df["close"].iloc[i - 1] <= final_upper.iloc[i - 1]
244
+ else upperband.iloc[i]
245
+ )
246
+
247
+ final_lower.iloc[i] = (
248
+ max(lowerband.iloc[i], final_lower.iloc[i - 1])
249
+ if df["close"].iloc[i - 1] >= final_lower.iloc[i - 1]
250
+ else lowerband.iloc[i]
251
+ )
252
+
253
+ direction = [0] * len(df)
254
+ supertrend = [None] * len(df)
255
+
256
+ for i in range(1, len(df)):
257
+ if df["close"].iloc[i] > final_upper.iloc[i - 1]:
258
+ direction[i] = 1
259
+ elif df["close"].iloc[i] < final_lower.iloc[i - 1]:
260
+ direction[i] = -1
261
+ else:
262
+ direction[i] = direction[i - 1]
263
+
264
+ supertrend[i] = (
265
+ final_lower.iloc[i] if direction[i] == 1 else final_upper.iloc[i]
266
+ )
267
+
268
+ df[f"{prefix}supertrend"] = supertrend
269
+ df[f"{prefix}supertrend_dir"] = direction
270
+
271
+ return df
@@ -16,9 +16,7 @@ def configure_logging(
16
16
  force: bool = True,
17
17
  quiet_loggers: Iterable[str] | None = ("uvicorn", "confluent_kafka"),
18
18
  ) -> None:
19
- """
20
- Configure root logging consistently across API services.
21
- """
19
+ """Configure root logging consistently across services."""
22
20
  resolved_level = str(level or os.environ.get("LOG_LEVEL", "INFO")).upper()
23
21
  logging.basicConfig(
24
22
  level=resolved_level,
@@ -2,11 +2,13 @@ import os
2
2
  from time import time
3
3
  import math
4
4
  from zoneinfo import ZoneInfo
5
- from shared.maths import round_numbers_ceiling
6
5
  from datetime import datetime
7
6
 
7
+ from .maths import round_numbers_ceiling
8
+
8
9
  format = "%Y-%m-%d %H:%M:%S"
9
10
 
11
+
10
12
  def timestamp() -> int:
11
13
  ts = time() * 1000
12
14
  rounded_ts = round_timestamp(ts)
@@ -44,8 +46,8 @@ def ts_to_day(ts: float | int) -> str:
44
46
  ts = ts * pow(10, 10 - digits)
45
47
 
46
48
  dt_obj = datetime.fromtimestamp(ts)
47
- b_str_date = datetime.strftime(dt_obj, format)
48
- return b_str_date
49
+ # ts_to_day returns a date string without time component
50
+ return datetime.strftime(dt_obj, "%Y-%m-%d")
49
51
 
50
52
 
51
53
  def ms_to_sec(ms: int) -> int:
@@ -94,4 +96,3 @@ def timestamp_to_datetime(timestamp: str | int) -> str:
94
96
  timestamp, tz=ZoneInfo(os.getenv("TZ", "Europe/London"))
95
97
  )
96
98
  return dt.strftime(format)
97
-
@@ -1,6 +1,9 @@
1
1
  from typing import Annotated
2
+
2
3
  from pydantic import BeforeValidator
3
- from shared.maths import ensure_float
4
+
5
+ from .maths import ensure_float
6
+
4
7
 
5
8
  Amount = Annotated[
6
9
  float,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pybinbot
3
- Version: 0.1.6
3
+ Version: 0.4.0
4
4
  Summary: Utility functions for the binbot project.
5
5
  Author-email: Carlos Wu <carkodw@gmail.com>
6
6
  Requires-Python: >=3.11
@@ -10,9 +10,18 @@ Requires-Dist: pydantic[email]>=2.0.0
10
10
  Requires-Dist: numpy==2.2.0
11
11
  Requires-Dist: pandas>=2.2.3
12
12
  Requires-Dist: pymongo==4.6.3
13
+ Requires-Dist: pandas-stubs>=2.3.3.251219
14
+ Requires-Dist: requests>=2.32.5
15
+ Requires-Dist: kucoin-universal-sdk>=1.3.0
16
+ Requires-Dist: aiohttp>=3.13.3
17
+ Requires-Dist: python-dotenv>=1.2.1
13
18
  Provides-Extra: dev
14
19
  Requires-Dist: pytest>=9.0.2; extra == "dev"
15
- Requires-Dist: ruff; extra == "dev"
20
+ Requires-Dist: ruff>=0.11.12; extra == "dev"
21
+ Requires-Dist: mypy>=1.19.1; extra == "dev"
22
+ Requires-Dist: types-requests>=2.32.4.20260107; extra == "dev"
23
+ Requires-Dist: httpx>=0.28.1; extra == "dev"
24
+ Requires-Dist: pytest-asyncio>=1.3.0; extra == "dev"
16
25
  Dynamic: license-file
17
26
 
18
27
  # PyBinbot
@@ -39,6 +48,9 @@ uv sync --extra dev
39
48
 
40
49
  ## Publishing
41
50
 
51
+ 1. Save your changes and do the usual Git flow (add, commit, don't push the changes yet).
52
+ 2. Bump the version, choose one of these:
53
+
42
54
  ```bash
43
55
  make bump-patch
44
56
  ```
@@ -54,4 +66,12 @@ or
54
66
  make bump-major
55
67
  ```
56
68
 
69
+ 3. Git tag the version for Github. This will read the bump version. There's a convenience command:
70
+ ```
71
+ make tag
72
+ ```
73
+
74
+ 4. `git commit --amend`. This is to put these new changes in the previous commit so we don't dup uncessary commits. Then `git push`
75
+
76
+
57
77
  For further commands take a look at the `Makefile` such as testing `make test`
@@ -0,0 +1,23 @@
1
+ pybinbot/__init__.py,sha256=4yW8FlBmbjfSiY6xcl_18_BRYppwMlDjB5s7zaxjogM,3791
2
+ pybinbot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ pybinbot/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ pybinbot/models/bot_base.py,sha256=uOVM4sdNTVV3hpUj9CLh-nZQ4bQmFrkFHdZcv-K9AeQ,3592
5
+ pybinbot/models/deal.py,sha256=9TBqt-WgaGJ7raZE2gHUimvZohiH831oRL-mH0dvGQ8,2443
6
+ pybinbot/models/order.py,sha256=1I9LJuEOysfMa9qJew2QIXhpwvl3xsfAP7DKbAH24sc,3492
7
+ pybinbot/models/routes.py,sha256=ZyRBrlabJYXOGW05-6sxpUKyTnF1AjYpc88KQtXeRFU,126
8
+ pybinbot/models/signals.py,sha256=Br0q32L9m13hVw8C8A1UtBG5vQQxdQzkVREIdDE_SaA,1256
9
+ pybinbot/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ pybinbot/shared/cache.py,sha256=iIl0IPqYVhKNbQLYuDigZYQzKfyQzPFZSWzD4oSWu_A,984
11
+ pybinbot/shared/enums.py,sha256=dO3KMvxcmzaEVfJqrMSi6vvJGdjc__GyCI7MbRAmRro,7303
12
+ pybinbot/shared/handlers.py,sha256=6BtNVSlU8WKuMN5O66oDAyS6d0M5oID7G_aCaDEsIkg,2961
13
+ pybinbot/shared/heikin_ashi.py,sha256=ypV31YXIv63G-02v7KyeBR2rwraMCoU2CYIolm5OC6w,7083
14
+ pybinbot/shared/indicators.py,sha256=_yGcxRsaPq8v-h_OXiLSg-ygvOEfse6uFSVi9ViIxBQ,8509
15
+ pybinbot/shared/logging_config.py,sha256=Bg38T5gL5H7BMxnOEEZNiBUQT2ccV96UZNxwJIyDc5s,1130
16
+ pybinbot/shared/maths.py,sha256=JjlrgV0INlJG4Zj28ahRkfzcI2Ec4noilrDwX2p82TM,3207
17
+ pybinbot/shared/timestamps.py,sha256=84N9t78Zs5CqKGPbIHlaJGyZzHcwrMwAw9r4hPTR0ro,2616
18
+ pybinbot/shared/types.py,sha256=JNMZiO3UPMa4xuo9fUO4kZff-mTNT6CaI5uQ_8WtcY4,170
19
+ pybinbot-0.4.0.dist-info/licenses/LICENSE,sha256=ECEAqAQ81zTT8PeN7gYqbkZtewkyeleEqQ26MxuHQxs,938
20
+ pybinbot-0.4.0.dist-info/METADATA,sha256=ZE1wloj_6zQPJbNAdq3QJnjvphVlvDZy2GwVbiXx4XE,2140
21
+ pybinbot-0.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ pybinbot-0.4.0.dist-info/top_level.txt,sha256=agwNV3TckcrWlAb8UpSJdWxfoMu2IvnOsmMd--F7Si8,9
23
+ pybinbot-0.4.0.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ pybinbot
@@ -1,15 +0,0 @@
1
- pybinbot.py,sha256=bLB9AYwuk3cbQ7HVlVGwTtzqNSV3FsZv4Rtbetg0Kas,1920
2
- models/bot_base.py,sha256=z9hSK7uVLW4oDmPdtntYozo8RnQnpZtFi8fL1r9Qd5Q,3593
3
- models/deal.py,sha256=jOdMSobN_K4-be3hG38l0WeZq5ln5JndA-BBgMUElsI,2402
4
- models/order.py,sha256=FHs7qi2JNYn-kXfWD2m5oFSSeMQDKmw0uPDwOvy5KkQ,3592
5
- models/signals.py,sha256=DAcV2ft6n5iJW4kqspdfEakFZ3igx97erwyTiyDMlgM,1356
6
- pybinbot-0.1.6.dist-info/licenses/LICENSE,sha256=ECEAqAQ81zTT8PeN7gYqbkZtewkyeleEqQ26MxuHQxs,938
7
- shared/enums.py,sha256=b472TAbRrnznDRDDVrLW_2I_9dURafqeC3pMu4DHQ1w,6730
8
- shared/logging_config.py,sha256=XZblKXH9KsLUDbIJqFRZPzI0h17-CRBZH4KktVak-TI,1144
9
- shared/maths.py,sha256=JjlrgV0INlJG4Zj28ahRkfzcI2Ec4noilrDwX2p82TM,3207
10
- shared/timestamps.py,sha256=401JkggjW--trNenxkUBEObnWyy9Cd-L3xVpqdbW8Tc,2587
11
- shared/types.py,sha256=KfuJzjsbMUHFcBaQ6ZXUbuSyFHbHqehgeY73Zt8lqO8,173
12
- pybinbot-0.1.6.dist-info/METADATA,sha256=LEBxxxJ0TBmDQ77xJpskIZYprx8_5y52oRgDFxsRf60,1358
13
- pybinbot-0.1.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
14
- pybinbot-0.1.6.dist-info/top_level.txt,sha256=rfaU2KRcKvquGQYwN5weorBMzgHpWW4eZlITOQwjRvw,23
15
- pybinbot-0.1.6.dist-info/RECORD,,
@@ -1,3 +0,0 @@
1
- models
2
- pybinbot
3
- shared